StreamingCommunity 1.9.1__py3-none-any.whl → 1.9.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (96) hide show
  1. StreamingCommunity/run.py +4 -5
  2. {StreamingCommunity-1.9.1.dist-info → StreamingCommunity-1.9.4.dist-info}/METADATA +37 -7
  3. StreamingCommunity-1.9.4.dist-info/RECORD +7 -0
  4. {StreamingCommunity-1.9.1.dist-info → StreamingCommunity-1.9.4.dist-info}/WHEEL +1 -1
  5. {StreamingCommunity-1.9.1.dist-info → StreamingCommunity-1.9.4.dist-info}/entry_points.txt +1 -0
  6. StreamingCommunity/Api/Player/Helper/Vixcloud/js_parser.py +0 -143
  7. StreamingCommunity/Api/Player/Helper/Vixcloud/util.py +0 -166
  8. StreamingCommunity/Api/Player/ddl.py +0 -89
  9. StreamingCommunity/Api/Player/maxstream.py +0 -151
  10. StreamingCommunity/Api/Player/supervideo.py +0 -194
  11. StreamingCommunity/Api/Player/vixcloud.py +0 -224
  12. StreamingCommunity/Api/Site/1337xx/__init__.py +0 -50
  13. StreamingCommunity/Api/Site/1337xx/costant.py +0 -15
  14. StreamingCommunity/Api/Site/1337xx/site.py +0 -84
  15. StreamingCommunity/Api/Site/1337xx/title.py +0 -66
  16. StreamingCommunity/Api/Site/altadefinizione/__init__.py +0 -50
  17. StreamingCommunity/Api/Site/altadefinizione/costant.py +0 -15
  18. StreamingCommunity/Api/Site/altadefinizione/film.py +0 -69
  19. StreamingCommunity/Api/Site/altadefinizione/site.py +0 -86
  20. StreamingCommunity/Api/Site/animeunity/__init__.py +0 -50
  21. StreamingCommunity/Api/Site/animeunity/costant.py +0 -15
  22. StreamingCommunity/Api/Site/animeunity/film_serie.py +0 -130
  23. StreamingCommunity/Api/Site/animeunity/site.py +0 -165
  24. StreamingCommunity/Api/Site/animeunity/util/ScrapeSerie.py +0 -97
  25. StreamingCommunity/Api/Site/bitsearch/__init__.py +0 -51
  26. StreamingCommunity/Api/Site/bitsearch/costant.py +0 -15
  27. StreamingCommunity/Api/Site/bitsearch/site.py +0 -84
  28. StreamingCommunity/Api/Site/bitsearch/title.py +0 -47
  29. StreamingCommunity/Api/Site/cb01new/__init__.py +0 -51
  30. StreamingCommunity/Api/Site/cb01new/costant.py +0 -15
  31. StreamingCommunity/Api/Site/cb01new/film.py +0 -69
  32. StreamingCommunity/Api/Site/cb01new/site.py +0 -74
  33. StreamingCommunity/Api/Site/ddlstreamitaly/__init__.py +0 -57
  34. StreamingCommunity/Api/Site/ddlstreamitaly/costant.py +0 -16
  35. StreamingCommunity/Api/Site/ddlstreamitaly/series.py +0 -141
  36. StreamingCommunity/Api/Site/ddlstreamitaly/site.py +0 -93
  37. StreamingCommunity/Api/Site/ddlstreamitaly/util/ScrapeSerie.py +0 -85
  38. StreamingCommunity/Api/Site/guardaserie/__init__.py +0 -52
  39. StreamingCommunity/Api/Site/guardaserie/costant.py +0 -15
  40. StreamingCommunity/Api/Site/guardaserie/series.py +0 -195
  41. StreamingCommunity/Api/Site/guardaserie/site.py +0 -84
  42. StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +0 -110
  43. StreamingCommunity/Api/Site/mostraguarda/__init__.py +0 -48
  44. StreamingCommunity/Api/Site/mostraguarda/costant.py +0 -15
  45. StreamingCommunity/Api/Site/mostraguarda/film.py +0 -94
  46. StreamingCommunity/Api/Site/piratebays/__init__.py +0 -50
  47. StreamingCommunity/Api/Site/piratebays/costant.py +0 -15
  48. StreamingCommunity/Api/Site/piratebays/site.py +0 -89
  49. StreamingCommunity/Api/Site/piratebays/title.py +0 -45
  50. StreamingCommunity/Api/Site/streamingcommunity/__init__.py +0 -55
  51. StreamingCommunity/Api/Site/streamingcommunity/costant.py +0 -15
  52. StreamingCommunity/Api/Site/streamingcommunity/film.py +0 -70
  53. StreamingCommunity/Api/Site/streamingcommunity/series.py +0 -205
  54. StreamingCommunity/Api/Site/streamingcommunity/site.py +0 -126
  55. StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py +0 -113
  56. StreamingCommunity/Api/Template/Class/SearchType.py +0 -101
  57. StreamingCommunity/Api/Template/Util/__init__.py +0 -5
  58. StreamingCommunity/Api/Template/Util/get_domain.py +0 -137
  59. StreamingCommunity/Api/Template/Util/manage_ep.py +0 -153
  60. StreamingCommunity/Api/Template/Util/recall_search.py +0 -37
  61. StreamingCommunity/Api/Template/__init__.py +0 -3
  62. StreamingCommunity/Api/Template/site.py +0 -87
  63. StreamingCommunity/Lib/Downloader/HLS/downloader.py +0 -968
  64. StreamingCommunity/Lib/Downloader/HLS/proxyes.py +0 -110
  65. StreamingCommunity/Lib/Downloader/HLS/segments.py +0 -538
  66. StreamingCommunity/Lib/Downloader/MP4/downloader.py +0 -156
  67. StreamingCommunity/Lib/Downloader/TOR/downloader.py +0 -222
  68. StreamingCommunity/Lib/Downloader/__init__.py +0 -5
  69. StreamingCommunity/Lib/Driver/driver_1.py +0 -76
  70. StreamingCommunity/Lib/FFmpeg/__init__.py +0 -4
  71. StreamingCommunity/Lib/FFmpeg/capture.py +0 -170
  72. StreamingCommunity/Lib/FFmpeg/command.py +0 -292
  73. StreamingCommunity/Lib/FFmpeg/util.py +0 -242
  74. StreamingCommunity/Lib/M3U8/__init__.py +0 -6
  75. StreamingCommunity/Lib/M3U8/decryptor.py +0 -164
  76. StreamingCommunity/Lib/M3U8/estimator.py +0 -176
  77. StreamingCommunity/Lib/M3U8/parser.py +0 -666
  78. StreamingCommunity/Lib/M3U8/url_fixer.py +0 -52
  79. StreamingCommunity/Lib/TMBD/__init__.py +0 -2
  80. StreamingCommunity/Lib/TMBD/obj_tmbd.py +0 -39
  81. StreamingCommunity/Lib/TMBD/tmdb.py +0 -346
  82. StreamingCommunity/Upload/update.py +0 -68
  83. StreamingCommunity/Upload/version.py +0 -5
  84. StreamingCommunity/Util/_jsonConfig.py +0 -204
  85. StreamingCommunity/Util/call_stack.py +0 -42
  86. StreamingCommunity/Util/color.py +0 -20
  87. StreamingCommunity/Util/console.py +0 -12
  88. StreamingCommunity/Util/ffmpeg_installer.py +0 -275
  89. StreamingCommunity/Util/headers.py +0 -147
  90. StreamingCommunity/Util/logger.py +0 -53
  91. StreamingCommunity/Util/message.py +0 -46
  92. StreamingCommunity/Util/os.py +0 -514
  93. StreamingCommunity/Util/table.py +0 -163
  94. StreamingCommunity-1.9.1.dist-info/RECORD +0 -95
  95. {StreamingCommunity-1.9.1.dist-info → StreamingCommunity-1.9.4.dist-info}/LICENSE +0 -0
  96. {StreamingCommunity-1.9.1.dist-info → StreamingCommunity-1.9.4.dist-info}/top_level.txt +0 -0
@@ -1,110 +0,0 @@
1
- # 09.06.24
2
-
3
- import os
4
- import sys
5
- import logging
6
- from concurrent.futures import ThreadPoolExecutor
7
-
8
-
9
- # External libraries
10
- import httpx
11
-
12
-
13
- # Internal utilities
14
- from StreamingCommunity.Util._jsonConfig import config_manager
15
- from StreamingCommunity.Util.headers import get_headers
16
- from StreamingCommunity.Util.os import os_manager
17
-
18
-
19
- class ProxyManager:
20
- def __init__(self, proxy_list=None, url=None):
21
- """
22
- Initialize ProxyManager with a list of proxies and timeout.
23
-
24
- Parameters:
25
- - proxy_list: List of proxy strings
26
- - timeout: Timeout for proxy requests
27
- """
28
- self.proxy_list = proxy_list or []
29
- self.verified_proxies = []
30
- self.timeout = config_manager.get_float('REQUESTS', 'timeout')
31
- self.url = url
32
-
33
- def _check_proxy(self, proxy):
34
- """
35
- Check if a single proxy is working by making a request to Google.
36
-
37
- Parameters:
38
- - proxy: Proxy string to be checked
39
-
40
- Returns:
41
- - Proxy string if working, None otherwise
42
- """
43
- protocol = proxy.split(":")[0].lower()
44
- protocol = f'{protocol}://'
45
- proxy = {protocol: proxy, "https://": proxy}
46
-
47
- try:
48
- with httpx.Client(proxies=proxy, verify=False) as client:
49
- response = client.get(self.url, timeout=self.timeout, headers={'user-agent': get_headers()})
50
-
51
- if response.status_code == 200:
52
- logging.info(f"Proxy {proxy} is working.")
53
- return proxy
54
-
55
- except Exception as e:
56
- logging.error(f"Test proxy {proxy} failed: {e}")
57
- return None
58
-
59
- def verify_proxies(self):
60
- """
61
- Verify all proxies in the list and store the working ones.
62
- """
63
- logging.info("Starting proxy verification...")
64
- with ThreadPoolExecutor(max_workers=os.cpu_count()) as executor:
65
- self.verified_proxies = list(executor.map(self._check_proxy, self.proxy_list))
66
-
67
- self.verified_proxies = [proxy for proxy in self.verified_proxies if proxy]
68
- logging.info(f"Verification complete. {len(self.verified_proxies)} proxies are working.")
69
-
70
- def get_verified_proxies(self):
71
- """
72
- Get validate proxies.
73
- """
74
-
75
- if len(self.verified_proxies) > 0:
76
- return self.verified_proxies
77
-
78
- else:
79
- logging.error("Cant find valid proxy.")
80
- sys.exit(0)
81
-
82
-
83
- def main_test_proxy(url_test):
84
-
85
- path_file_proxt_list = "list_proxy.txt"
86
-
87
- if os_manager.check_file(path_file_proxt_list):
88
-
89
- # Read file
90
- with open(path_file_proxt_list, 'r') as file:
91
- ip_addresses = file.readlines()
92
-
93
- # Formatt ip
94
- ip_addresses = [ip.strip() for ip in ip_addresses]
95
- formatted_ips = [f"http://{ip}" for ip in ip_addresses]
96
-
97
- # Get list of proxy from config.json
98
- proxy_list = formatted_ips
99
-
100
- # Verify proxy
101
- manager = ProxyManager(proxy_list, url_test)
102
- manager.verify_proxies()
103
-
104
- # Write valid ip in txt file
105
- with open(path_file_proxt_list, 'w') as file:
106
- for ip in ip_addresses:
107
- file.write(f"{ip}\n")
108
-
109
- # Return valid proxy
110
- return manager.get_verified_proxies()
@@ -1,538 +0,0 @@
1
- # 18.04.24
2
-
3
- import os
4
- import sys
5
- import time
6
- import queue
7
- import logging
8
- import binascii
9
- import threading
10
- import signal
11
- from queue import PriorityQueue
12
- from urllib.parse import urljoin, urlparse
13
- from concurrent.futures import ThreadPoolExecutor, as_completed
14
-
15
-
16
- # External libraries
17
- import httpx
18
- from tqdm import tqdm
19
-
20
-
21
- # Internal utilities
22
- from StreamingCommunity.Util.console import console
23
- from StreamingCommunity.Util.headers import get_headers, random_headers
24
- from StreamingCommunity.Util.color import Colors
25
- from StreamingCommunity.Util._jsonConfig import config_manager
26
- from StreamingCommunity.Util.os import os_manager
27
- from StreamingCommunity.Util.call_stack import get_call_stack
28
-
29
-
30
- # Logic class
31
- from ...M3U8 import (
32
- M3U8_Decryption,
33
- M3U8_Ts_Estimator,
34
- M3U8_Parser,
35
- M3U8_UrlFix
36
- )
37
- from .proxyes import main_test_proxy
38
-
39
- # Config
40
- TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
41
- TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
42
-
43
- REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
44
- REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify_ssl')
45
-
46
- THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt")
47
- PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min')
48
- PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
49
-
50
- DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
51
- DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
52
-
53
-
54
-
55
- # Variable
56
- headers_index = config_manager.get_dict('REQUESTS', 'user-agent')
57
- max_timeout = config_manager.get_int("REQUESTS", "timeout")
58
-
59
-
60
-
61
- class M3U8_Segments:
62
- def __init__(self, url: str, tmp_folder: str, is_index_url: bool = True):
63
- """
64
- Initializes the M3U8_Segments object.
65
-
66
- Parameters:
67
- - url (str): The URL of the M3U8 playlist.
68
- - tmp_folder (str): The temporary folder to store downloaded segments.
69
- - is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
70
- """
71
- self.url = url
72
- self.tmp_folder = tmp_folder
73
- self.is_index_url = is_index_url
74
- self.expected_real_time = None
75
- self.max_timeout = max_timeout
76
-
77
- self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
78
- os.makedirs(self.tmp_folder, exist_ok=True)
79
-
80
- # Util class
81
- self.decryption: M3U8_Decryption = None
82
- self.class_ts_estimator = M3U8_Ts_Estimator(0)
83
- self.class_url_fixer = M3U8_UrlFix(url)
84
-
85
- # Sync
86
- self.queue = PriorityQueue()
87
- self.stop_event = threading.Event()
88
- self.downloaded_segments = set()
89
- self.base_timeout = 1.0
90
- self.current_timeout = 5.0
91
-
92
- # Stopping
93
- self.interrupt_flag = threading.Event()
94
- self.download_interrupted = False
95
-
96
- def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
97
- """
98
- Retrieves the encryption key from the M3U8 playlist.
99
-
100
- Parameters:
101
- - m3u8_parser (M3U8_Parser): The parser object containing M3U8 playlist information.
102
-
103
- Returns:
104
- bytes: The encryption key in bytes.
105
- """
106
- headers_index = {'user-agent': get_headers()}
107
-
108
- # Construct the full URL of the key
109
- key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
110
- parsed_url = urlparse(key_uri)
111
- self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
112
- logging.info(f"Uri key: {key_uri}")
113
-
114
- # Make request to get porxy
115
- try:
116
- response = httpx.get(
117
- url=key_uri,
118
- headers=headers_index,
119
- timeout=max_timeout
120
- )
121
- response.raise_for_status()
122
-
123
- except Exception as e:
124
- raise Exception(f"Failed to fetch key from {key_uri}: {e}")
125
-
126
- # Convert the content of the response to hexadecimal and then to bytes
127
- hex_content = binascii.hexlify(response.content).decode('utf-8')
128
- byte_content = bytes.fromhex(hex_content)
129
-
130
- return byte_content
131
-
132
- def parse_data(self, m3u8_content: str) -> None:
133
- """
134
- Parses the M3U8 content to extract segment information.
135
-
136
- Parameters:
137
- - m3u8_content (str): The content of the M3U8 file.
138
- """
139
- m3u8_parser = M3U8_Parser()
140
- m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
141
-
142
- self.expected_real_time = m3u8_parser.get_duration(return_string=False)
143
- self.expected_real_time_s = m3u8_parser.duration
144
-
145
- # Check if there is an encryption key in the playlis
146
- if m3u8_parser.keys is not None:
147
- try:
148
-
149
- # Extract byte from the key
150
- key = self.__get_key__(m3u8_parser)
151
-
152
- except Exception as e:
153
- raise Exception(f"Failed to retrieve encryption key {e}.")
154
-
155
- iv = m3u8_parser.keys.get('iv')
156
- method = m3u8_parser.keys.get('method')
157
-
158
- # Create a decryption object with the key and set the method
159
- self.decryption = M3U8_Decryption(key, iv, method)
160
-
161
- # Store the segment information parsed from the playlist
162
- self.segments = m3u8_parser.segments
163
-
164
- # Fix URL if it is incomplete (missing 'http')
165
- for i in range(len(self.segments)):
166
- segment_url = self.segments[i]
167
-
168
- if "http" not in segment_url:
169
- self.segments[i] = self.class_url_fixer.generate_full_url(segment_url)
170
- logging.info(f"Generated new URL: {self.segments[i]}, from: {segment_url}")
171
-
172
- # Update segments for estimator
173
- self.class_ts_estimator.total_segments = len(self.segments)
174
- logging.info(f"Segmnets to download: [{len(self.segments)}]")
175
-
176
- # Proxy
177
- if THERE_IS_PROXY_LIST:
178
- console.log("[red]Start validation proxy.")
179
- self.valid_proxy = main_test_proxy(self.segments[0])
180
- console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}")
181
-
182
- if len(self.valid_proxy) == 0:
183
- sys.exit(0)
184
-
185
- def get_info(self) -> None:
186
- """
187
- Makes a request to the index M3U8 file to get information about segments.
188
- """
189
- headers_index = {'user-agent': get_headers()}
190
-
191
- if self.is_index_url:
192
-
193
- # Send a GET request to retrieve the index M3U8 file
194
- response = httpx.get(
195
- self.url,
196
- headers=headers_index,
197
- timeout=max_timeout
198
- )
199
- response.raise_for_status()
200
-
201
- # Save the M3U8 file to the temporary folder
202
- path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
203
- open(path_m3u8_file, "w+").write(response.text)
204
-
205
- # Parse the text from the M3U8 index file
206
- self.parse_data(response.text)
207
-
208
- else:
209
-
210
- # Parser data of content of index pass in input to class
211
- self.parse_data(self.url)
212
-
213
- def setup_interrupt_handler(self):
214
- """
215
- Set up a signal handler for graceful interruption.
216
- """
217
- def interrupt_handler(signum, frame):
218
- if not self.interrupt_flag.is_set():
219
- console.log("\n[red] Stopping download gracefully...")
220
- self.interrupt_flag.set()
221
- self.download_interrupted = True
222
- self.stop_event.set()
223
-
224
- if threading.current_thread() is threading.main_thread():
225
- signal.signal(signal.SIGINT, interrupt_handler)
226
- else:
227
- print("Signal handler must be set in the main thread")
228
-
229
- def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.5) -> None:
230
- """
231
- Downloads a TS segment and adds it to the segment queue with retry logic.
232
-
233
- Parameters:
234
- - ts_url (str): The URL of the TS segment.
235
- - index (int): The index of the segment.
236
- - progress_bar (tqdm): Progress counter for tracking download progress.
237
- - retries (int): The number of times to retry on failure (default is 3).
238
- - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
239
- """
240
- for attempt in range(REQUEST_MAX_RETRY):
241
- if self.interrupt_flag.is_set():
242
- return
243
-
244
- try:
245
- start_time = time.time()
246
-
247
- # Make request to get content
248
- if THERE_IS_PROXY_LIST:
249
-
250
- # Get proxy from list
251
- proxy = self.valid_proxy[index % len(self.valid_proxy)]
252
- logging.info(f"Use proxy: {proxy}")
253
-
254
- with httpx.Client(proxies=proxy, verify=REQUEST_VERIFY) as client:
255
- if 'key_base_url' in self.__dict__:
256
- response = client.get(
257
- url=ts_url,
258
- headers=random_headers(self.key_base_url),
259
- timeout=max_timeout,
260
- follow_redirects=True
261
- )
262
-
263
- else:
264
- response = client.get(
265
- url=ts_url,
266
- headers={'user-agent': get_headers()},
267
- timeout=max_timeout,
268
- follow_redirects=True
269
- )
270
-
271
- else:
272
- with httpx.Client(verify=REQUEST_VERIFY) as client_2:
273
- if 'key_base_url' in self.__dict__:
274
- response = client_2.get(
275
- url=ts_url,
276
- headers=random_headers(self.key_base_url),
277
- timeout=max_timeout,
278
- follow_redirects=True
279
- )
280
-
281
- else:
282
- response = client_2.get(
283
- url=ts_url,
284
- headers={'user-agent': get_headers()},
285
- timeout=max_timeout,
286
- follow_redirects=True
287
- )
288
-
289
- # Validate response and content
290
- response.raise_for_status()
291
- segment_content = response.content
292
- content_size = len(segment_content)
293
- duration = time.time() - start_time
294
-
295
- # Decrypt if needed and verify decrypted content
296
- if self.decryption is not None:
297
- try:
298
- segment_content = self.decryption.decrypt(segment_content)
299
-
300
- except Exception as e:
301
- logging.error(f"Decryption failed for segment {index}: {str(e)}")
302
- raise
303
-
304
- # Update progress and queue
305
- self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
306
-
307
- # Add the segment to the queue
308
- self.queue.put((index, segment_content))
309
-
310
- # Track successfully downloaded segments
311
- self.downloaded_segments.add(index)
312
- progress_bar.update(1)
313
-
314
- # Break out of the loop on success
315
- return
316
-
317
- except Exception as e:
318
- logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
319
-
320
- if attempt + 1 == REQUEST_MAX_RETRY:
321
- console.log(f"[red]Final retry failed for segment: {index}")
322
- self.queue.put((index, None)) # Marker for failed segment
323
- progress_bar.update(1)
324
- break
325
-
326
- sleep_time = backoff_factor * (2 ** attempt)
327
- logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
328
- time.sleep(sleep_time)
329
-
330
- def write_segments_to_file(self):
331
- """
332
- Writes segments to file with additional verification.
333
- """
334
- buffer = {}
335
- expected_index = 0
336
- segments_written = set()
337
-
338
- with open(self.tmp_file_path, 'wb') as f:
339
- while not self.stop_event.is_set() or not self.queue.empty():
340
- if self.interrupt_flag.is_set():
341
- break
342
-
343
- try:
344
- index, segment_content = self.queue.get(timeout=self.current_timeout)
345
-
346
- # Successful queue retrieval: reduce timeout
347
- self.current_timeout = max(self.base_timeout, self.current_timeout / 2)
348
-
349
- # Handle failed segments
350
- if segment_content is None:
351
- if index == expected_index:
352
- expected_index += 1
353
- continue
354
-
355
- # Write segment if it's the next expected one
356
- if index == expected_index:
357
- f.write(segment_content)
358
- segments_written.add(index)
359
- f.flush()
360
- expected_index += 1
361
-
362
- # Write any buffered segments that are now in order
363
- while expected_index in buffer:
364
- next_segment = buffer.pop(expected_index)
365
-
366
- if next_segment is not None:
367
- f.write(next_segment)
368
- segments_written.add(expected_index)
369
- f.flush()
370
-
371
- expected_index += 1
372
-
373
- else:
374
- buffer[index] = segment_content
375
-
376
- except queue.Empty:
377
- self.current_timeout = min(self.max_timeout, self.current_timeout * 1.5)
378
-
379
- if self.stop_event.is_set():
380
- break
381
-
382
- except Exception as e:
383
- logging.error(f"Error writing segment {index}: {str(e)}")
384
-
385
- def download_streams(self, add_desc):
386
- """
387
- Downloads all TS segments in parallel and writes them to a file.
388
-
389
- Parameters:
390
- - add_desc (str): Additional description for the progress bar.
391
- """
392
- self.setup_interrupt_handler()
393
-
394
- # Get config site from prev stack
395
- frames = get_call_stack()
396
- logging.info(f"Extract info from: {frames}")
397
- config_site = str(frames[-4]['folder_base'])
398
- logging.info(f"Use frame: {frames[-1]}")
399
-
400
- # Workers to use for downloading
401
- TQDM_MAX_WORKER = 0
402
-
403
- # Select audio workers from folder of frames stack prev call.
404
- try:
405
- VIDEO_WORKERS = int(config_manager.get_dict('SITE', config_site)['video_workers'])
406
- except:
407
- #VIDEO_WORKERS = os.cpu_count()
408
- VIDEO_WORKERS = DEFAULT_VIDEO_WORKERS
409
-
410
- try:
411
- AUDIO_WORKERS = int(config_manager.get_dict('SITE', config_site)['audio_workers'])
412
- except:
413
- #AUDIO_WORKERS = os.cpu_count()
414
- AUDIO_WORKERS = DEFAULT_AUDIO_WORKERS
415
-
416
- # Differnt workers for audio and video
417
- if "video" in str(add_desc):
418
- TQDM_MAX_WORKER = VIDEO_WORKERS
419
- if "audio" in str(add_desc):
420
- TQDM_MAX_WORKER = AUDIO_WORKERS
421
-
422
- # Custom bar for mobile and pc
423
- if TQDM_USE_LARGE_BAR:
424
- bar_format = (
425
- f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{add_desc}{Colors.WHITE}): "
426
- f"{Colors.RED}{{percentage:.2f}}% "
427
- f"{Colors.MAGENTA}{{bar}} "
428
- f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
429
- f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
430
- )
431
- else:
432
- bar_format = (
433
- f"{Colors.YELLOW}Proc{Colors.WHITE}: "
434
- f"{Colors.RED}{{percentage:.2f}}% "
435
- f"{Colors.WHITE}| "
436
- f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
437
- )
438
-
439
- # Create progress bar
440
- progress_bar = tqdm(
441
- total=len(self.segments),
442
- unit='s',
443
- ascii='░▒█',
444
- bar_format=bar_format,
445
- mininterval=0.05
446
- )
447
-
448
- try:
449
-
450
- # Start writer thread
451
- writer_thread = threading.Thread(target=self.write_segments_to_file)
452
- writer_thread.daemon = True
453
- writer_thread.start()
454
-
455
- # Configure workers and delay
456
- max_workers = len(self.valid_proxy) if THERE_IS_PROXY_LIST else TQDM_MAX_WORKER
457
- delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER
458
-
459
- # Download segments with completion verification
460
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
461
- futures = []
462
- for index, segment_url in enumerate(self.segments):
463
- # Check for interrupt before submitting each task
464
- if self.interrupt_flag.is_set():
465
- break
466
-
467
- time.sleep(delay)
468
- futures.append(executor.submit(self.make_requests_stream, segment_url, index, progress_bar))
469
-
470
- # Wait for futures with interrupt handling
471
- for future in as_completed(futures):
472
- if self.interrupt_flag.is_set():
473
- break
474
- try:
475
- future.result()
476
- except Exception as e:
477
- logging.error(f"Error in download thread: {str(e)}")
478
-
479
- # Interrupt handling for missing segments
480
- if not self.interrupt_flag.is_set():
481
- total_segments = len(self.segments)
482
- completed_segments = len(self.downloaded_segments)
483
-
484
- if completed_segments < total_segments:
485
- missing_segments = set(range(total_segments)) - self.downloaded_segments
486
- logging.warning(f"Missing segments: {sorted(missing_segments)}")
487
-
488
- # Retry missing segments with interrupt check
489
- for index in missing_segments:
490
- if self.interrupt_flag.is_set():
491
- break
492
-
493
- try:
494
- self.make_requests_stream(self.segments[index], index, progress_bar)
495
-
496
- except Exception as e:
497
- logging.error(f"Failed to retry segment {index}: {str(e)}")
498
-
499
- except Exception as e:
500
- logging.error(f"Download failed: {str(e)}")
501
- raise
502
-
503
- finally:
504
-
505
- # Clean up resources
506
- self.stop_event.set()
507
- writer_thread.join(timeout=30)
508
- progress_bar.close()
509
-
510
- # Check if download was interrupted
511
- if self.download_interrupted:
512
- console.log("[red] Download was manually stopped.")
513
-
514
- # Optional: Delete partial download
515
- if os.path.exists(self.tmp_file_path):
516
- os.remove(self.tmp_file_path)
517
- sys.exit(0)
518
-
519
- # Clean up
520
- self.stop_event.set()
521
- writer_thread.join(timeout=30)
522
- progress_bar.close()
523
-
524
- # Final verification
525
- final_completion = (len(self.downloaded_segments) / total_segments) * 100
526
- if final_completion < 99.9: # Less than 99.9% complete
527
- missing = set(range(total_segments)) - self.downloaded_segments
528
- raise Exception(f"Download incomplete ({final_completion:.1f}%). Missing segments: {sorted(missing)}")
529
-
530
- # Verify output file
531
- if not os.path.exists(self.tmp_file_path):
532
- raise Exception("Output file missing")
533
-
534
- file_size = os.path.getsize(self.tmp_file_path)
535
- if file_size == 0:
536
- raise Exception("Output file is empty")
537
-
538
- logging.info(f"Download completed. File size: {file_size} bytes")