StreamingCommunity 1.8.0__py3-none-any.whl → 1.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (98) hide show
  1. StreamingCommunity/run.py +18 -13
  2. {StreamingCommunity-1.8.0.dist-info → StreamingCommunity-1.9.2.dist-info}/METADATA +157 -63
  3. StreamingCommunity-1.9.2.dist-info/RECORD +7 -0
  4. {StreamingCommunity-1.8.0.dist-info → StreamingCommunity-1.9.2.dist-info}/WHEEL +1 -1
  5. {StreamingCommunity-1.8.0.dist-info → StreamingCommunity-1.9.2.dist-info}/entry_points.txt +1 -0
  6. StreamingCommunity/Src/Api/Player/Helper/Vixcloud/js_parser.py +0 -143
  7. StreamingCommunity/Src/Api/Player/Helper/Vixcloud/util.py +0 -166
  8. StreamingCommunity/Src/Api/Player/ddl.py +0 -89
  9. StreamingCommunity/Src/Api/Player/maxstream.py +0 -151
  10. StreamingCommunity/Src/Api/Player/supervideo.py +0 -194
  11. StreamingCommunity/Src/Api/Player/vixcloud.py +0 -224
  12. StreamingCommunity/Src/Api/Site/1337xx/__init__.py +0 -50
  13. StreamingCommunity/Src/Api/Site/1337xx/costant.py +0 -15
  14. StreamingCommunity/Src/Api/Site/1337xx/site.py +0 -84
  15. StreamingCommunity/Src/Api/Site/1337xx/title.py +0 -66
  16. StreamingCommunity/Src/Api/Site/altadefinizione/__init__.py +0 -50
  17. StreamingCommunity/Src/Api/Site/altadefinizione/costant.py +0 -15
  18. StreamingCommunity/Src/Api/Site/altadefinizione/film.py +0 -69
  19. StreamingCommunity/Src/Api/Site/altadefinizione/site.py +0 -86
  20. StreamingCommunity/Src/Api/Site/animeunity/__init__.py +0 -50
  21. StreamingCommunity/Src/Api/Site/animeunity/anime.py +0 -126
  22. StreamingCommunity/Src/Api/Site/animeunity/costant.py +0 -15
  23. StreamingCommunity/Src/Api/Site/animeunity/film_serie.py +0 -131
  24. StreamingCommunity/Src/Api/Site/animeunity/site.py +0 -165
  25. StreamingCommunity/Src/Api/Site/animeunity/util/ScrapeSerie.py +0 -97
  26. StreamingCommunity/Src/Api/Site/bitsearch/__init__.py +0 -51
  27. StreamingCommunity/Src/Api/Site/bitsearch/costant.py +0 -15
  28. StreamingCommunity/Src/Api/Site/bitsearch/site.py +0 -84
  29. StreamingCommunity/Src/Api/Site/bitsearch/title.py +0 -47
  30. StreamingCommunity/Src/Api/Site/cb01new/__init__.py +0 -51
  31. StreamingCommunity/Src/Api/Site/cb01new/costant.py +0 -15
  32. StreamingCommunity/Src/Api/Site/cb01new/film.py +0 -69
  33. StreamingCommunity/Src/Api/Site/cb01new/site.py +0 -74
  34. StreamingCommunity/Src/Api/Site/ddlstreamitaly/Player/ScrapeSerie.py +0 -83
  35. StreamingCommunity/Src/Api/Site/ddlstreamitaly/__init__.py +0 -57
  36. StreamingCommunity/Src/Api/Site/ddlstreamitaly/costant.py +0 -16
  37. StreamingCommunity/Src/Api/Site/ddlstreamitaly/series.py +0 -142
  38. StreamingCommunity/Src/Api/Site/ddlstreamitaly/site.py +0 -93
  39. StreamingCommunity/Src/Api/Site/ddlstreamitaly/util/ScrapeSerie.py +0 -83
  40. StreamingCommunity/Src/Api/Site/guardaserie/Player/ScrapeSerie.py +0 -110
  41. StreamingCommunity/Src/Api/Site/guardaserie/__init__.py +0 -52
  42. StreamingCommunity/Src/Api/Site/guardaserie/costant.py +0 -15
  43. StreamingCommunity/Src/Api/Site/guardaserie/series.py +0 -195
  44. StreamingCommunity/Src/Api/Site/guardaserie/site.py +0 -84
  45. StreamingCommunity/Src/Api/Site/guardaserie/util/ScrapeSerie.py +0 -110
  46. StreamingCommunity/Src/Api/Site/mostraguarda/__init__.py +0 -48
  47. StreamingCommunity/Src/Api/Site/mostraguarda/costant.py +0 -15
  48. StreamingCommunity/Src/Api/Site/mostraguarda/film.py +0 -94
  49. StreamingCommunity/Src/Api/Site/piratebays/__init__.py +0 -50
  50. StreamingCommunity/Src/Api/Site/piratebays/costant.py +0 -15
  51. StreamingCommunity/Src/Api/Site/piratebays/site.py +0 -89
  52. StreamingCommunity/Src/Api/Site/piratebays/title.py +0 -45
  53. StreamingCommunity/Src/Api/Site/streamingcommunity/__init__.py +0 -55
  54. StreamingCommunity/Src/Api/Site/streamingcommunity/costant.py +0 -15
  55. StreamingCommunity/Src/Api/Site/streamingcommunity/film.py +0 -70
  56. StreamingCommunity/Src/Api/Site/streamingcommunity/series.py +0 -203
  57. StreamingCommunity/Src/Api/Site/streamingcommunity/site.py +0 -126
  58. StreamingCommunity/Src/Api/Site/streamingcommunity/util/ScrapeSerie.py +0 -113
  59. StreamingCommunity/Src/Api/Template/Class/SearchType.py +0 -101
  60. StreamingCommunity/Src/Api/Template/Util/__init__.py +0 -5
  61. StreamingCommunity/Src/Api/Template/Util/get_domain.py +0 -137
  62. StreamingCommunity/Src/Api/Template/Util/manage_ep.py +0 -153
  63. StreamingCommunity/Src/Api/Template/Util/recall_search.py +0 -37
  64. StreamingCommunity/Src/Api/Template/__init__.py +0 -3
  65. StreamingCommunity/Src/Api/Template/site.py +0 -87
  66. StreamingCommunity/Src/Lib/Downloader/HLS/downloader.py +0 -968
  67. StreamingCommunity/Src/Lib/Downloader/HLS/proxyes.py +0 -110
  68. StreamingCommunity/Src/Lib/Downloader/HLS/segments.py +0 -540
  69. StreamingCommunity/Src/Lib/Downloader/MP4/downloader.py +0 -156
  70. StreamingCommunity/Src/Lib/Downloader/TOR/downloader.py +0 -222
  71. StreamingCommunity/Src/Lib/Downloader/__init__.py +0 -5
  72. StreamingCommunity/Src/Lib/Driver/driver_1.py +0 -76
  73. StreamingCommunity/Src/Lib/FFmpeg/__init__.py +0 -4
  74. StreamingCommunity/Src/Lib/FFmpeg/capture.py +0 -170
  75. StreamingCommunity/Src/Lib/FFmpeg/command.py +0 -292
  76. StreamingCommunity/Src/Lib/FFmpeg/util.py +0 -242
  77. StreamingCommunity/Src/Lib/M3U8/__init__.py +0 -6
  78. StreamingCommunity/Src/Lib/M3U8/decryptor.py +0 -129
  79. StreamingCommunity/Src/Lib/M3U8/estimator.py +0 -173
  80. StreamingCommunity/Src/Lib/M3U8/parser.py +0 -666
  81. StreamingCommunity/Src/Lib/M3U8/url_fixer.py +0 -52
  82. StreamingCommunity/Src/Lib/TMBD/__init__.py +0 -2
  83. StreamingCommunity/Src/Lib/TMBD/obj_tmbd.py +0 -39
  84. StreamingCommunity/Src/Lib/TMBD/tmdb.py +0 -346
  85. StreamingCommunity/Src/Upload/update.py +0 -64
  86. StreamingCommunity/Src/Upload/version.py +0 -5
  87. StreamingCommunity/Src/Util/_jsonConfig.py +0 -204
  88. StreamingCommunity/Src/Util/call_stack.py +0 -42
  89. StreamingCommunity/Src/Util/color.py +0 -20
  90. StreamingCommunity/Src/Util/console.py +0 -12
  91. StreamingCommunity/Src/Util/headers.py +0 -147
  92. StreamingCommunity/Src/Util/logger.py +0 -53
  93. StreamingCommunity/Src/Util/message.py +0 -46
  94. StreamingCommunity/Src/Util/os.py +0 -417
  95. StreamingCommunity/Src/Util/table.py +0 -163
  96. StreamingCommunity-1.8.0.dist-info/RECORD +0 -97
  97. {StreamingCommunity-1.8.0.dist-info → StreamingCommunity-1.9.2.dist-info}/LICENSE +0 -0
  98. {StreamingCommunity-1.8.0.dist-info → StreamingCommunity-1.9.2.dist-info}/top_level.txt +0 -0
@@ -1,110 +0,0 @@
1
- # 09.06.24
2
-
3
- import os
4
- import sys
5
- import logging
6
- from concurrent.futures import ThreadPoolExecutor
7
-
8
-
9
- # External libraries
10
- import httpx
11
-
12
-
13
- # Internal utilities
14
- from StreamingCommunity.Src.Util._jsonConfig import config_manager
15
- from StreamingCommunity.Src.Util.headers import get_headers
16
- from StreamingCommunity.Src.Util.os import os_manager
17
-
18
-
19
- class ProxyManager:
20
- def __init__(self, proxy_list=None, url=None):
21
- """
22
- Initialize ProxyManager with a list of proxies and timeout.
23
-
24
- Parameters:
25
- - proxy_list: List of proxy strings
26
- - timeout: Timeout for proxy requests
27
- """
28
- self.proxy_list = proxy_list or []
29
- self.verified_proxies = []
30
- self.timeout = config_manager.get_float('REQUESTS', 'timeout')
31
- self.url = url
32
-
33
- def _check_proxy(self, proxy):
34
- """
35
- Check if a single proxy is working by making a request to Google.
36
-
37
- Parameters:
38
- - proxy: Proxy string to be checked
39
-
40
- Returns:
41
- - Proxy string if working, None otherwise
42
- """
43
- protocol = proxy.split(":")[0].lower()
44
- protocol = f'{protocol}://'
45
- proxy = {protocol: proxy, "https://": proxy}
46
-
47
- try:
48
- with httpx.Client(proxies=proxy, verify=False) as client:
49
- response = client.get(self.url, timeout=self.timeout, headers={'user-agent': get_headers()})
50
-
51
- if response.status_code == 200:
52
- logging.info(f"Proxy {proxy} is working.")
53
- return proxy
54
-
55
- except Exception as e:
56
- logging.error(f"Test proxy {proxy} failed: {e}")
57
- return None
58
-
59
- def verify_proxies(self):
60
- """
61
- Verify all proxies in the list and store the working ones.
62
- """
63
- logging.info("Starting proxy verification...")
64
- with ThreadPoolExecutor(max_workers=os.cpu_count()) as executor:
65
- self.verified_proxies = list(executor.map(self._check_proxy, self.proxy_list))
66
-
67
- self.verified_proxies = [proxy for proxy in self.verified_proxies if proxy]
68
- logging.info(f"Verification complete. {len(self.verified_proxies)} proxies are working.")
69
-
70
- def get_verified_proxies(self):
71
- """
72
- Get validate proxies.
73
- """
74
-
75
- if len(self.verified_proxies) > 0:
76
- return self.verified_proxies
77
-
78
- else:
79
- logging.error("Cant find valid proxy.")
80
- sys.exit(0)
81
-
82
-
83
- def main_test_proxy(url_test):
84
-
85
- path_file_proxt_list = "list_proxy.txt"
86
-
87
- if os_manager.check_file(path_file_proxt_list):
88
-
89
- # Read file
90
- with open(path_file_proxt_list, 'r') as file:
91
- ip_addresses = file.readlines()
92
-
93
- # Formatt ip
94
- ip_addresses = [ip.strip() for ip in ip_addresses]
95
- formatted_ips = [f"http://{ip}" for ip in ip_addresses]
96
-
97
- # Get list of proxy from config.json
98
- proxy_list = formatted_ips
99
-
100
- # Verify proxy
101
- manager = ProxyManager(proxy_list, url_test)
102
- manager.verify_proxies()
103
-
104
- # Write valid ip in txt file
105
- with open(path_file_proxt_list, 'w') as file:
106
- for ip in ip_addresses:
107
- file.write(f"{ip}\n")
108
-
109
- # Return valid proxy
110
- return manager.get_verified_proxies()
@@ -1,540 +0,0 @@
1
- # 18.04.24
2
-
3
- import os
4
- import sys
5
- import time
6
- import queue
7
- import logging
8
- import binascii
9
- import threading
10
- import signal
11
- from queue import PriorityQueue
12
- from urllib.parse import urljoin, urlparse
13
- from concurrent.futures import ThreadPoolExecutor, as_completed
14
-
15
-
16
- # External libraries
17
- import httpx
18
- from tqdm import tqdm
19
-
20
-
21
- # Internal utilities
22
- from StreamingCommunity.Src.Util.console import console
23
- from StreamingCommunity.Src.Util.headers import get_headers, random_headers
24
- from StreamingCommunity.Src.Util.color import Colors
25
- from StreamingCommunity.Src.Util._jsonConfig import config_manager
26
- from StreamingCommunity.Src.Util.os import os_manager
27
- from StreamingCommunity.Src.Util.call_stack import get_call_stack
28
-
29
-
30
- # Logic class
31
- from ...M3U8 import (
32
- M3U8_Decryption,
33
- M3U8_Ts_Estimator,
34
- M3U8_Parser,
35
- M3U8_UrlFix
36
- )
37
- from .proxyes import main_test_proxy
38
-
39
- # Config
40
- TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
41
- TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
42
- REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
43
- REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify_ssl')
44
- THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt")
45
- PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min')
46
- PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
47
- DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
48
- DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
49
-
50
-
51
- # Variable
52
- headers_index = config_manager.get_dict('REQUESTS', 'user-agent')
53
- max_timeout = config_manager.get_int("REQUESTS", "timeout")
54
-
55
-
56
-
57
- class M3U8_Segments:
58
- def __init__(self, url: str, tmp_folder: str, is_index_url: bool = True):
59
- """
60
- Initializes the M3U8_Segments object.
61
-
62
- Parameters:
63
- - url (str): The URL of the M3U8 playlist.
64
- - tmp_folder (str): The temporary folder to store downloaded segments.
65
- - is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
66
- """
67
- self.url = url
68
- self.tmp_folder = tmp_folder
69
- self.is_index_url = is_index_url
70
- self.expected_real_time = None
71
-
72
- self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
73
- os.makedirs(self.tmp_folder, exist_ok=True)
74
-
75
- # Util class
76
- self.decryption: M3U8_Decryption = None
77
- self.class_ts_estimator = M3U8_Ts_Estimator(0)
78
- self.class_url_fixer = M3U8_UrlFix(url)
79
-
80
- # Sync
81
- self.queue = PriorityQueue()
82
- self.stop_event = threading.Event()
83
- self.downloaded_segments = set()
84
-
85
- # Stopping
86
- self.interrupt_flag = threading.Event()
87
- self.download_interrupted = False
88
-
89
- def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
90
- """
91
- Retrieves the encryption key from the M3U8 playlist.
92
-
93
- Parameters:
94
- - m3u8_parser (M3U8_Parser): The parser object containing M3U8 playlist information.
95
-
96
- Returns:
97
- bytes: The encryption key in bytes.
98
- """
99
- headers_index = {'user-agent': get_headers()}
100
-
101
- # Construct the full URL of the key
102
- key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
103
- parsed_url = urlparse(key_uri)
104
- self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
105
- logging.info(f"Uri key: {key_uri}")
106
-
107
- # Make request to get porxy
108
- try:
109
- response = httpx.get(
110
- url=key_uri,
111
- headers=headers_index,
112
- timeout=max_timeout
113
- )
114
- response.raise_for_status()
115
-
116
- except Exception as e:
117
- raise Exception(f"Failed to fetch key from {key_uri}: {e}")
118
-
119
- # Convert the content of the response to hexadecimal and then to bytes
120
- hex_content = binascii.hexlify(response.content).decode('utf-8')
121
- byte_content = bytes.fromhex(hex_content)
122
-
123
- return byte_content
124
-
125
- def parse_data(self, m3u8_content: str) -> None:
126
- """
127
- Parses the M3U8 content to extract segment information.
128
-
129
- Parameters:
130
- - m3u8_content (str): The content of the M3U8 file.
131
- """
132
- m3u8_parser = M3U8_Parser()
133
- m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
134
-
135
- self.expected_real_time = m3u8_parser.get_duration(return_string=False)
136
- self.expected_real_time_s = m3u8_parser.duration
137
-
138
- # Check if there is an encryption key in the playlis
139
- if m3u8_parser.keys is not None:
140
- try:
141
-
142
- # Extract byte from the key
143
- key = self.__get_key__(m3u8_parser)
144
-
145
- except Exception as e:
146
- raise Exception(f"Failed to retrieve encryption key {e}.")
147
-
148
- iv = m3u8_parser.keys.get('iv')
149
- method = m3u8_parser.keys.get('method')
150
-
151
- # Create a decryption object with the key and set the method
152
- self.decryption = M3U8_Decryption(key, iv, method)
153
-
154
- # Store the segment information parsed from the playlist
155
- self.segments = m3u8_parser.segments
156
-
157
- # Fix URL if it is incomplete (missing 'http')
158
- for i in range(len(self.segments)):
159
- segment_url = self.segments[i]
160
-
161
- if "http" not in segment_url:
162
- self.segments[i] = self.class_url_fixer.generate_full_url(segment_url)
163
- logging.info(f"Generated new URL: {self.segments[i]}, from: {segment_url}")
164
-
165
- # Update segments for estimator
166
- self.class_ts_estimator.total_segments = len(self.segments)
167
- logging.info(f"Segmnets to download: [{len(self.segments)}]")
168
-
169
- # Proxy
170
- if THERE_IS_PROXY_LIST:
171
- console.log("[red]Start validation proxy.")
172
- self.valid_proxy = main_test_proxy(self.segments[0])
173
- console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}")
174
-
175
- if len(self.valid_proxy) == 0:
176
- sys.exit(0)
177
-
178
- def get_info(self) -> None:
179
- """
180
- Makes a request to the index M3U8 file to get information about segments.
181
- """
182
- headers_index = {'user-agent': get_headers()}
183
-
184
- if self.is_index_url:
185
-
186
- # Send a GET request to retrieve the index M3U8 file
187
- response = httpx.get(
188
- self.url,
189
- headers=headers_index,
190
- timeout=max_timeout
191
- )
192
- response.raise_for_status()
193
-
194
- # Save the M3U8 file to the temporary folder
195
- path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
196
- open(path_m3u8_file, "w+").write(response.text)
197
-
198
- # Parse the text from the M3U8 index file
199
- self.parse_data(response.text)
200
-
201
- else:
202
-
203
- # Parser data of content of index pass in input to class
204
- self.parse_data(self.url)
205
-
206
- def setup_interrupt_handler(self):
207
- """
208
- Set up a signal handler for graceful interruption.
209
- """
210
- def interrupt_handler(signum, frame):
211
- if not self.interrupt_flag.is_set():
212
- console.log("\n[red] Stopping download gracefully...")
213
- self.interrupt_flag.set()
214
- self.download_interrupted = True
215
- self.stop_event.set()
216
-
217
- signal.signal(signal.SIGINT, interrupt_handler)
218
-
219
- def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, retries: int = 3, backoff_factor: float = 1.5) -> None:
220
- """
221
- Downloads a TS segment and adds it to the segment queue with retry logic.
222
-
223
- Parameters:
224
- - ts_url (str): The URL of the TS segment.
225
- - index (int): The index of the segment.
226
- - progress_bar (tqdm): Progress counter for tracking download progress.
227
- - retries (int): The number of times to retry on failure (default is 3).
228
- - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
229
- """
230
- if self.interrupt_flag.is_set():
231
- return
232
-
233
- need_verify = REQUEST_VERIFY
234
- min_segment_size = 100 # Minimum acceptable size for a TS segment in bytes
235
-
236
- for attempt in range(retries):
237
- if self.interrupt_flag.is_set():
238
- return
239
-
240
- try:
241
- start_time = time.time()
242
-
243
- # Make request to get content
244
- if THERE_IS_PROXY_LIST:
245
-
246
- # Get proxy from list
247
- proxy = self.valid_proxy[index % len(self.valid_proxy)]
248
- logging.info(f"Use proxy: {proxy}")
249
-
250
- with httpx.Client(proxies=proxy, verify=need_verify) as client:
251
- if 'key_base_url' in self.__dict__:
252
- response = client.get(
253
- url=ts_url,
254
- headers=random_headers(self.key_base_url),
255
- timeout=max_timeout,
256
- follow_redirects=True
257
- )
258
-
259
- else:
260
- response = client.get(
261
- url=ts_url,
262
- headers={'user-agent': get_headers()},
263
- timeout=max_timeout,
264
- follow_redirects=True
265
- )
266
-
267
- else:
268
- with httpx.Client(verify=need_verify) as client_2:
269
- if 'key_base_url' in self.__dict__:
270
- response = client_2.get(
271
- url=ts_url,
272
- headers=random_headers(self.key_base_url),
273
- timeout=max_timeout,
274
- follow_redirects=True
275
- )
276
-
277
- else:
278
- response = client_2.get(
279
- url=ts_url,
280
- headers={'user-agent': get_headers()},
281
- timeout=max_timeout,
282
- follow_redirects=True
283
- )
284
-
285
- # Validate response and content
286
- response.raise_for_status()
287
- segment_content = response.content
288
- content_size = len(segment_content)
289
-
290
- # Check if segment is too small (possibly corrupted or empty)
291
- if content_size < min_segment_size:
292
- raise httpx.RequestError(f"Segment {index} too small ({content_size} bytes)")
293
-
294
- duration = time.time() - start_time
295
-
296
- # Decrypt if needed and verify decrypted content
297
- if self.decryption is not None:
298
- try:
299
- segment_content = self.decryption.decrypt(segment_content)
300
- if len(segment_content) < min_segment_size:
301
- raise Exception(f"Decrypted segment {index} too small ({len(segment_content)} bytes)")
302
-
303
- except Exception as e:
304
- logging.error(f"Decryption failed for segment {index}: {str(e)}")
305
- raise
306
-
307
- # Update progress and queue
308
- self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
309
-
310
- # Add the segment to the queue
311
- self.queue.put((index, segment_content))
312
-
313
- # Track successfully downloaded segments
314
- self.downloaded_segments.add(index)
315
- progress_bar.update(1)
316
-
317
- # Break out of the loop on success
318
- return
319
-
320
- except Exception as e:
321
- #logging.error(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
322
-
323
- if attempt + 1 == retries:
324
- #logging.error(f"Final retry failed for segment {index}")
325
- self.queue.put((index, None)) # Marker for failed segment
326
- progress_bar.update(1)
327
- break
328
-
329
- sleep_time = backoff_factor * (2 ** attempt)
330
- logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
331
- time.sleep(sleep_time)
332
-
333
- def write_segments_to_file(self):
334
- """
335
- Writes segments to file with additional verification.
336
- """
337
- with open(self.tmp_file_path, 'wb') as f:
338
- expected_index = 0
339
- buffer = {}
340
- total_written = 0
341
- segments_written = set()
342
-
343
- while not self.stop_event.is_set() or not self.queue.empty():
344
-
345
- if self.interrupt_flag.is_set():
346
- break
347
-
348
- try:
349
- index, segment_content = self.queue.get(timeout=1)
350
-
351
- # Handle failed segments
352
- if segment_content is None:
353
- if index == expected_index:
354
- expected_index += 1
355
- continue
356
-
357
- # Write segment if it's the next expected one
358
- if index == expected_index:
359
- f.write(segment_content)
360
- total_written += len(segment_content)
361
- segments_written.add(index)
362
- f.flush()
363
- expected_index += 1
364
-
365
- # Write any buffered segments that are now in order
366
- while expected_index in buffer:
367
- next_segment = buffer.pop(expected_index)
368
- if next_segment is not None:
369
- f.write(next_segment)
370
- total_written += len(next_segment)
371
- segments_written.add(expected_index)
372
- f.flush()
373
- expected_index += 1
374
- else:
375
- buffer[index] = segment_content
376
-
377
- except queue.Empty:
378
- if self.stop_event.is_set():
379
- break
380
- continue
381
- except Exception as e:
382
- logging.error(f"Error writing segment {index}: {str(e)}")
383
- continue
384
-
385
- # Final verification
386
- if total_written == 0:
387
- raise Exception("No data written to file")
388
-
389
- def download_streams(self, add_desc):
390
- """
391
- Downloads all TS segments in parallel and writes them to a file.
392
-
393
- Parameters:
394
- - add_desc (str): Additional description for the progress bar.
395
- """
396
- self.setup_interrupt_handler()
397
-
398
- # Get config site from prev stack
399
- frames = get_call_stack()
400
- logging.info(f"Extract info from: {frames}")
401
- config_site = str(frames[-4]['folder_base'])
402
- logging.info(f"Use frame: {frames[-1]}")
403
-
404
- # Workers to use for downloading
405
- TQDM_MAX_WORKER = 0
406
-
407
- # Select audio workers from folder of frames stack prev call.
408
- try:
409
- VIDEO_WORKERS = int(config_manager.get_dict('SITE', config_site)['video_workers'])
410
- except:
411
- #VIDEO_WORKERS = os.cpu_count()
412
- VIDEO_WORKERS = DEFAULT_VIDEO_WORKERS
413
-
414
- try:
415
- AUDIO_WORKERS = int(config_manager.get_dict('SITE', config_site)['audio_workers'])
416
- except:
417
- #AUDIO_WORKERS = os.cpu_count()
418
- AUDIO_WORKERS = DEFAULT_AUDIO_WORKERS
419
-
420
- # Differnt workers for audio and video
421
- if "video" in str(add_desc):
422
- TQDM_MAX_WORKER = VIDEO_WORKERS
423
- if "audio" in str(add_desc):
424
- TQDM_MAX_WORKER = AUDIO_WORKERS
425
-
426
- # Custom bar for mobile and pc
427
- if TQDM_USE_LARGE_BAR:
428
- bar_format = (
429
- f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{add_desc}{Colors.WHITE}): "
430
- f"{Colors.RED}{{percentage:.2f}}% "
431
- f"{Colors.MAGENTA}{{bar}} "
432
- f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
433
- f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
434
- )
435
- else:
436
- bar_format = (
437
- f"{Colors.YELLOW}Proc{Colors.WHITE}: "
438
- f"{Colors.RED}{{percentage:.2f}}% "
439
- f"{Colors.WHITE}| "
440
- f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
441
- )
442
-
443
- # Create progress bar
444
- progress_bar = tqdm(
445
- total=len(self.segments),
446
- unit='s',
447
- ascii='░▒█',
448
- bar_format=bar_format,
449
- mininterval=0.05
450
- )
451
-
452
- try:
453
-
454
- # Start writer thread
455
- writer_thread = threading.Thread(target=self.write_segments_to_file)
456
- writer_thread.daemon = True
457
- writer_thread.start()
458
-
459
- # Configure workers and delay
460
- max_workers = len(self.valid_proxy) if THERE_IS_PROXY_LIST else TQDM_MAX_WORKER
461
- delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER
462
-
463
- # Download segments with completion verification
464
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
465
- futures = []
466
- for index, segment_url in enumerate(self.segments):
467
- # Check for interrupt before submitting each task
468
- if self.interrupt_flag.is_set():
469
- break
470
-
471
- time.sleep(delay)
472
- futures.append(executor.submit(self.make_requests_stream, segment_url, index, progress_bar))
473
-
474
- # Wait for futures with interrupt handling
475
- for future in as_completed(futures):
476
- if self.interrupt_flag.is_set():
477
- break
478
- try:
479
- future.result()
480
- except Exception as e:
481
- logging.error(f"Error in download thread: {str(e)}")
482
-
483
- # Interrupt handling for missing segments
484
- if not self.interrupt_flag.is_set():
485
- total_segments = len(self.segments)
486
- completed_segments = len(self.downloaded_segments)
487
-
488
- if completed_segments < total_segments:
489
- missing_segments = set(range(total_segments)) - self.downloaded_segments
490
- logging.warning(f"Missing segments: {sorted(missing_segments)}")
491
-
492
- # Retry missing segments with interrupt check
493
- for index in missing_segments:
494
- if self.interrupt_flag.is_set():
495
- break
496
- try:
497
- self.make_requests_stream(self.segments[index], index, progress_bar)
498
- except Exception as e:
499
- logging.error(f"Failed to retry segment {index}: {str(e)}")
500
-
501
- except Exception as e:
502
- logging.error(f"Download failed: {str(e)}")
503
- raise
504
-
505
- finally:
506
-
507
- # Clean up resources
508
- self.stop_event.set()
509
- writer_thread.join(timeout=30)
510
- progress_bar.close()
511
-
512
- # Check if download was interrupted
513
- if self.download_interrupted:
514
- console.log("[red] Download was manually stopped.")
515
-
516
- # Optional: Delete partial download
517
- if os.path.exists(self.tmp_file_path):
518
- os.remove(self.tmp_file_path)
519
- sys.exit(0)
520
-
521
- # Clean up
522
- self.stop_event.set()
523
- writer_thread.join(timeout=30)
524
- progress_bar.close()
525
-
526
- # Final verification
527
- final_completion = (len(self.downloaded_segments) / total_segments) * 100
528
- if final_completion < 99.9: # Less than 99.9% complete
529
- missing = set(range(total_segments)) - self.downloaded_segments
530
- raise Exception(f"Download incomplete ({final_completion:.1f}%). Missing segments: {sorted(missing)}")
531
-
532
- # Verify output file
533
- if not os.path.exists(self.tmp_file_path):
534
- raise Exception("Output file missing")
535
-
536
- file_size = os.path.getsize(self.tmp_file_path)
537
- if file_size == 0:
538
- raise Exception("Output file is empty")
539
-
540
- logging.info(f"Download completed. File size: {file_size} bytes")