StreamingCommunity 1.7.6__py3-none-any.whl → 1.9.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (101) hide show
  1. StreamingCommunity/{Src/Api → Api}/Player/Helper/Vixcloud/js_parser.py +4 -1
  2. StreamingCommunity/{Src/Api → Api}/Player/Helper/Vixcloud/util.py +166 -166
  3. StreamingCommunity/{Src/Api → Api}/Player/ddl.py +89 -89
  4. StreamingCommunity/{Src/Api → Api}/Player/maxstream.py +151 -151
  5. StreamingCommunity/{Src/Api → Api}/Player/supervideo.py +193 -193
  6. StreamingCommunity/{Src/Api → Api}/Player/vixcloud.py +224 -212
  7. StreamingCommunity/{Src/Api → Api}/Site/1337xx/__init__.py +50 -50
  8. StreamingCommunity/{Src/Api → Api}/Site/1337xx/costant.py +15 -15
  9. StreamingCommunity/{Src/Api → Api}/Site/1337xx/site.py +83 -83
  10. StreamingCommunity/{Src/Api → Api}/Site/1337xx/title.py +66 -66
  11. StreamingCommunity/{Src/Api → Api}/Site/altadefinizione/__init__.py +50 -50
  12. StreamingCommunity/{Src/Api/Site/mostraguarda → Api/Site/altadefinizione}/costant.py +15 -15
  13. StreamingCommunity/{Src/Api → Api}/Site/altadefinizione/film.py +69 -69
  14. StreamingCommunity/{Src/Api → Api}/Site/altadefinizione/site.py +86 -86
  15. StreamingCommunity/{Src/Api → Api}/Site/animeunity/__init__.py +50 -50
  16. StreamingCommunity/{Src/Api/Site/altadefinizione → Api/Site/animeunity}/costant.py +15 -15
  17. StreamingCommunity/{Src/Api → Api}/Site/animeunity/film_serie.py +130 -131
  18. StreamingCommunity/{Src/Api → Api}/Site/animeunity/site.py +164 -164
  19. StreamingCommunity/{Src/Api → Api}/Site/animeunity/util/ScrapeSerie.py +3 -3
  20. StreamingCommunity/{Src/Api → Api}/Site/bitsearch/__init__.py +51 -51
  21. StreamingCommunity/{Src/Api → Api}/Site/bitsearch/costant.py +15 -15
  22. StreamingCommunity/{Src/Api → Api}/Site/bitsearch/site.py +84 -84
  23. StreamingCommunity/{Src/Api → Api}/Site/bitsearch/title.py +47 -47
  24. StreamingCommunity/{Src/Api → Api}/Site/cb01new/__init__.py +51 -51
  25. StreamingCommunity/{Src/Api → Api}/Site/cb01new/costant.py +15 -15
  26. StreamingCommunity/{Src/Api → Api}/Site/cb01new/film.py +69 -69
  27. StreamingCommunity/{Src/Api → Api}/Site/cb01new/site.py +74 -74
  28. StreamingCommunity/{Src/Api → Api}/Site/ddlstreamitaly/__init__.py +57 -57
  29. StreamingCommunity/{Src/Api → Api}/Site/ddlstreamitaly/costant.py +16 -16
  30. StreamingCommunity/{Src/Api → Api}/Site/ddlstreamitaly/series.py +141 -142
  31. StreamingCommunity/{Src/Api → Api}/Site/ddlstreamitaly/site.py +92 -92
  32. StreamingCommunity/{Src/Api → Api}/Site/ddlstreamitaly/util/ScrapeSerie.py +84 -82
  33. StreamingCommunity/{Src/Api → Api}/Site/guardaserie/__init__.py +52 -52
  34. StreamingCommunity/{Src/Api/Site/piratebays → Api/Site/guardaserie}/costant.py +15 -15
  35. StreamingCommunity/{Src/Api → Api}/Site/guardaserie/series.py +195 -195
  36. StreamingCommunity/{Src/Api → Api}/Site/guardaserie/site.py +84 -84
  37. StreamingCommunity/{Src/Api → Api}/Site/guardaserie/util/ScrapeSerie.py +110 -110
  38. StreamingCommunity/{Src/Api → Api}/Site/mostraguarda/__init__.py +48 -48
  39. StreamingCommunity/{Src/Api/Site/animeunity → Api/Site/mostraguarda}/costant.py +15 -15
  40. StreamingCommunity/{Src/Api → Api}/Site/mostraguarda/film.py +94 -94
  41. StreamingCommunity/{Src/Api → Api}/Site/piratebays/__init__.py +50 -50
  42. StreamingCommunity/{Src/Api/Site/guardaserie → Api/Site/piratebays}/costant.py +15 -15
  43. StreamingCommunity/{Src/Api → Api}/Site/piratebays/site.py +88 -88
  44. StreamingCommunity/{Src/Api → Api}/Site/piratebays/title.py +45 -45
  45. StreamingCommunity/{Src/Api → Api}/Site/streamingcommunity/__init__.py +55 -55
  46. StreamingCommunity/{Src/Api → Api}/Site/streamingcommunity/costant.py +15 -15
  47. StreamingCommunity/{Src/Api → Api}/Site/streamingcommunity/film.py +70 -70
  48. StreamingCommunity/{Src/Api → Api}/Site/streamingcommunity/series.py +205 -203
  49. StreamingCommunity/{Src/Api → Api}/Site/streamingcommunity/site.py +125 -125
  50. StreamingCommunity/{Src/Api → Api}/Site/streamingcommunity/util/ScrapeSerie.py +3 -3
  51. StreamingCommunity/{Src/Api → Api}/Template/Class/SearchType.py +101 -101
  52. StreamingCommunity/{Src/Api → Api}/Template/Util/__init__.py +4 -4
  53. StreamingCommunity/{Src/Api → Api}/Template/Util/get_domain.py +137 -137
  54. StreamingCommunity/{Src/Api → Api}/Template/Util/manage_ep.py +153 -153
  55. StreamingCommunity/{Src/Api → Api}/Template/Util/recall_search.py +37 -37
  56. StreamingCommunity/Api/Template/__init__.py +3 -0
  57. StreamingCommunity/{Src/Api → Api}/Template/site.py +87 -87
  58. StreamingCommunity/{Src/Lib → Lib}/Downloader/HLS/downloader.py +968 -968
  59. StreamingCommunity/{Src/Lib → Lib}/Downloader/HLS/proxyes.py +110 -110
  60. StreamingCommunity/{Src/Lib → Lib}/Downloader/HLS/segments.py +538 -540
  61. StreamingCommunity/{Src/Lib → Lib}/Downloader/MP4/downloader.py +156 -156
  62. StreamingCommunity/{Src/Lib → Lib}/Downloader/TOR/downloader.py +222 -222
  63. StreamingCommunity/{Src/Lib → Lib}/Downloader/__init__.py +4 -4
  64. StreamingCommunity/{Src/Lib → Lib}/Driver/driver_1.py +76 -76
  65. StreamingCommunity/{Src/Lib → Lib}/FFmpeg/__init__.py +4 -4
  66. StreamingCommunity/{Src/Lib → Lib}/FFmpeg/capture.py +170 -170
  67. StreamingCommunity/{Src/Lib → Lib}/FFmpeg/command.py +292 -292
  68. StreamingCommunity/{Src/Lib → Lib}/FFmpeg/util.py +241 -241
  69. StreamingCommunity/{Src/Lib → Lib}/M3U8/__init__.py +5 -5
  70. StreamingCommunity/{Src/Lib → Lib}/M3U8/decryptor.py +164 -129
  71. StreamingCommunity/{Src/Lib → Lib}/M3U8/estimator.py +175 -172
  72. StreamingCommunity/{Src/Lib → Lib}/M3U8/parser.py +666 -666
  73. StreamingCommunity/{Src/Lib → Lib}/M3U8/url_fixer.py +51 -51
  74. StreamingCommunity/Lib/TMBD/__init__.py +2 -0
  75. StreamingCommunity/{Src/Lib → Lib}/TMBD/obj_tmbd.py +39 -39
  76. StreamingCommunity/{Src/Lib → Lib}/TMBD/tmdb.py +345 -345
  77. StreamingCommunity/{Src/Upload → Upload}/update.py +68 -64
  78. StreamingCommunity/{Src/Upload → Upload}/version.py +5 -5
  79. StreamingCommunity/{Src/Util → Util}/_jsonConfig.py +204 -204
  80. StreamingCommunity/{Src/Util → Util}/call_stack.py +42 -42
  81. StreamingCommunity/{Src/Util → Util}/color.py +20 -20
  82. StreamingCommunity/{Src/Util → Util}/console.py +12 -12
  83. StreamingCommunity/Util/ffmpeg_installer.py +275 -0
  84. StreamingCommunity/{Src/Util → Util}/headers.py +147 -147
  85. StreamingCommunity/{Src/Util → Util}/logger.py +53 -53
  86. StreamingCommunity/{Src/Util → Util}/message.py +46 -46
  87. StreamingCommunity/{Src/Util → Util}/os.py +514 -417
  88. StreamingCommunity/{Src/Util → Util}/table.py +163 -163
  89. StreamingCommunity/run.py +202 -196
  90. {StreamingCommunity-1.7.6.dist-info → StreamingCommunity-1.9.1.dist-info}/METADATA +126 -60
  91. StreamingCommunity-1.9.1.dist-info/RECORD +95 -0
  92. {StreamingCommunity-1.7.6.dist-info → StreamingCommunity-1.9.1.dist-info}/WHEEL +1 -1
  93. StreamingCommunity/Src/Api/Site/animeunity/anime.py +0 -126
  94. StreamingCommunity/Src/Api/Site/ddlstreamitaly/Player/ScrapeSerie.py +0 -83
  95. StreamingCommunity/Src/Api/Site/guardaserie/Player/ScrapeSerie.py +0 -110
  96. StreamingCommunity/Src/Api/Template/__init__.py +0 -3
  97. StreamingCommunity/Src/Lib/TMBD/__init__.py +0 -2
  98. StreamingCommunity-1.7.6.dist-info/RECORD +0 -97
  99. {StreamingCommunity-1.7.6.dist-info → StreamingCommunity-1.9.1.dist-info}/LICENSE +0 -0
  100. {StreamingCommunity-1.7.6.dist-info → StreamingCommunity-1.9.1.dist-info}/entry_points.txt +0 -0
  101. {StreamingCommunity-1.7.6.dist-info → StreamingCommunity-1.9.1.dist-info}/top_level.txt +0 -0
@@ -1,540 +1,538 @@
1
- # 18.04.24
2
-
3
- import os
4
- import sys
5
- import time
6
- import queue
7
- import logging
8
- import binascii
9
- import threading
10
- import signal
11
- from queue import PriorityQueue
12
- from urllib.parse import urljoin, urlparse
13
- from concurrent.futures import ThreadPoolExecutor, as_completed
14
-
15
-
16
- # External libraries
17
- import httpx
18
- from tqdm import tqdm
19
-
20
-
21
- # Internal utilities
22
- from StreamingCommunity.Src.Util.console import console
23
- from StreamingCommunity.Src.Util.headers import get_headers, random_headers
24
- from StreamingCommunity.Src.Util.color import Colors
25
- from StreamingCommunity.Src.Util._jsonConfig import config_manager
26
- from StreamingCommunity.Src.Util.os import os_manager
27
- from StreamingCommunity.Src.Util.call_stack import get_call_stack
28
-
29
-
30
- # Logic class
31
- from ...M3U8 import (
32
- M3U8_Decryption,
33
- M3U8_Ts_Estimator,
34
- M3U8_Parser,
35
- M3U8_UrlFix
36
- )
37
- from .proxyes import main_test_proxy
38
-
39
- # Config
40
- TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
41
- TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
42
- REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
43
- REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify_ssl')
44
- THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt")
45
- PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min')
46
- PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
47
- DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
48
- DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
49
-
50
-
51
- # Variable
52
- headers_index = config_manager.get_dict('REQUESTS', 'user-agent')
53
- max_timeout = config_manager.get_int("REQUESTS", "timeout")
54
-
55
-
56
-
57
- class M3U8_Segments:
58
- def __init__(self, url: str, tmp_folder: str, is_index_url: bool = True):
59
- """
60
- Initializes the M3U8_Segments object.
61
-
62
- Parameters:
63
- - url (str): The URL of the M3U8 playlist.
64
- - tmp_folder (str): The temporary folder to store downloaded segments.
65
- - is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
66
- """
67
- self.url = url
68
- self.tmp_folder = tmp_folder
69
- self.is_index_url = is_index_url
70
- self.expected_real_time = None
71
-
72
- self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
73
- os.makedirs(self.tmp_folder, exist_ok=True)
74
-
75
- # Util class
76
- self.decryption: M3U8_Decryption = None
77
- self.class_ts_estimator = M3U8_Ts_Estimator(0)
78
- self.class_url_fixer = M3U8_UrlFix(url)
79
-
80
- # Sync
81
- self.queue = PriorityQueue()
82
- self.stop_event = threading.Event()
83
- self.downloaded_segments = set()
84
-
85
- # Stopping
86
- self.interrupt_flag = threading.Event()
87
- self.download_interrupted = False
88
-
89
- def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
90
- """
91
- Retrieves the encryption key from the M3U8 playlist.
92
-
93
- Parameters:
94
- - m3u8_parser (M3U8_Parser): The parser object containing M3U8 playlist information.
95
-
96
- Returns:
97
- bytes: The encryption key in bytes.
98
- """
99
- headers_index = {'user-agent': get_headers()}
100
-
101
- # Construct the full URL of the key
102
- key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
103
- parsed_url = urlparse(key_uri)
104
- self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
105
- logging.info(f"Uri key: {key_uri}")
106
-
107
- # Make request to get porxy
108
- try:
109
- response = httpx.get(
110
- url=key_uri,
111
- headers=headers_index,
112
- timeout=max_timeout
113
- )
114
- response.raise_for_status()
115
-
116
- except Exception as e:
117
- raise Exception(f"Failed to fetch key from {key_uri}: {e}")
118
-
119
- # Convert the content of the response to hexadecimal and then to bytes
120
- hex_content = binascii.hexlify(response.content).decode('utf-8')
121
- byte_content = bytes.fromhex(hex_content)
122
-
123
- return byte_content
124
-
125
- def parse_data(self, m3u8_content: str) -> None:
126
- """
127
- Parses the M3U8 content to extract segment information.
128
-
129
- Parameters:
130
- - m3u8_content (str): The content of the M3U8 file.
131
- """
132
- m3u8_parser = M3U8_Parser()
133
- m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
134
-
135
- self.expected_real_time = m3u8_parser.get_duration(return_string=False)
136
- self.expected_real_time_s = m3u8_parser.duration
137
-
138
- # Check if there is an encryption key in the playlis
139
- if m3u8_parser.keys is not None:
140
- try:
141
-
142
- # Extract byte from the key
143
- key = self.__get_key__(m3u8_parser)
144
-
145
- except Exception as e:
146
- raise Exception(f"Failed to retrieve encryption key {e}.")
147
-
148
- iv = m3u8_parser.keys.get('iv')
149
- method = m3u8_parser.keys.get('method')
150
-
151
- # Create a decryption object with the key and set the method
152
- self.decryption = M3U8_Decryption(key, iv, method)
153
-
154
- # Store the segment information parsed from the playlist
155
- self.segments = m3u8_parser.segments
156
-
157
- # Fix URL if it is incomplete (missing 'http')
158
- for i in range(len(self.segments)):
159
- segment_url = self.segments[i]
160
-
161
- if "http" not in segment_url:
162
- self.segments[i] = self.class_url_fixer.generate_full_url(segment_url)
163
- logging.info(f"Generated new URL: {self.segments[i]}, from: {segment_url}")
164
-
165
- # Update segments for estimator
166
- self.class_ts_estimator.total_segments = len(self.segments)
167
- logging.info(f"Segmnets to download: [{len(self.segments)}]")
168
-
169
- # Proxy
170
- if THERE_IS_PROXY_LIST:
171
- console.log("[red]Start validation proxy.")
172
- self.valid_proxy = main_test_proxy(self.segments[0])
173
- console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}")
174
-
175
- if len(self.valid_proxy) == 0:
176
- sys.exit(0)
177
-
178
- def get_info(self) -> None:
179
- """
180
- Makes a request to the index M3U8 file to get information about segments.
181
- """
182
- headers_index = {'user-agent': get_headers()}
183
-
184
- if self.is_index_url:
185
-
186
- # Send a GET request to retrieve the index M3U8 file
187
- response = httpx.get(
188
- self.url,
189
- headers=headers_index,
190
- timeout=max_timeout
191
- )
192
- response.raise_for_status()
193
-
194
- # Save the M3U8 file to the temporary folder
195
- path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
196
- open(path_m3u8_file, "w+").write(response.text)
197
-
198
- # Parse the text from the M3U8 index file
199
- self.parse_data(response.text)
200
-
201
- else:
202
-
203
- # Parser data of content of index pass in input to class
204
- self.parse_data(self.url)
205
-
206
- def setup_interrupt_handler(self):
207
- """
208
- Set up a signal handler for graceful interruption.
209
- """
210
- def interrupt_handler(signum, frame):
211
- if not self.interrupt_flag.is_set():
212
- console.log("\n[red] Stopping download gracefully...")
213
- self.interrupt_flag.set()
214
- self.download_interrupted = True
215
- self.stop_event.set()
216
-
217
- signal.signal(signal.SIGINT, interrupt_handler)
218
-
219
- def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, retries: int = 3, backoff_factor: float = 1.5) -> None:
220
- """
221
- Downloads a TS segment and adds it to the segment queue with retry logic.
222
-
223
- Parameters:
224
- - ts_url (str): The URL of the TS segment.
225
- - index (int): The index of the segment.
226
- - progress_bar (tqdm): Progress counter for tracking download progress.
227
- - retries (int): The number of times to retry on failure (default is 3).
228
- - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
229
- """
230
- if self.interrupt_flag.is_set():
231
- return
232
-
233
- need_verify = REQUEST_VERIFY
234
- min_segment_size = 100 # Minimum acceptable size for a TS segment in bytes
235
-
236
- for attempt in range(retries):
237
- if self.interrupt_flag.is_set():
238
- return
239
-
240
- try:
241
- start_time = time.time()
242
-
243
- # Make request to get content
244
- if THERE_IS_PROXY_LIST:
245
-
246
- # Get proxy from list
247
- proxy = self.valid_proxy[index % len(self.valid_proxy)]
248
- logging.info(f"Use proxy: {proxy}")
249
-
250
- with httpx.Client(proxies=proxy, verify=need_verify) as client:
251
- if 'key_base_url' in self.__dict__:
252
- response = client.get(
253
- url=ts_url,
254
- headers=random_headers(self.key_base_url),
255
- timeout=max_timeout,
256
- follow_redirects=True
257
- )
258
-
259
- else:
260
- response = client.get(
261
- url=ts_url,
262
- headers={'user-agent': get_headers()},
263
- timeout=max_timeout,
264
- follow_redirects=True
265
- )
266
-
267
- else:
268
- with httpx.Client(verify=need_verify) as client_2:
269
- if 'key_base_url' in self.__dict__:
270
- response = client_2.get(
271
- url=ts_url,
272
- headers=random_headers(self.key_base_url),
273
- timeout=max_timeout,
274
- follow_redirects=True
275
- )
276
-
277
- else:
278
- response = client_2.get(
279
- url=ts_url,
280
- headers={'user-agent': get_headers()},
281
- timeout=max_timeout,
282
- follow_redirects=True
283
- )
284
-
285
- # Validate response and content
286
- response.raise_for_status()
287
- segment_content = response.content
288
- content_size = len(segment_content)
289
-
290
- # Check if segment is too small (possibly corrupted or empty)
291
- if content_size < min_segment_size:
292
- raise httpx.RequestError(f"Segment {index} too small ({content_size} bytes)")
293
-
294
- duration = time.time() - start_time
295
-
296
- # Decrypt if needed and verify decrypted content
297
- if self.decryption is not None:
298
- try:
299
- segment_content = self.decryption.decrypt(segment_content)
300
- if len(segment_content) < min_segment_size:
301
- raise Exception(f"Decrypted segment {index} too small ({len(segment_content)} bytes)")
302
-
303
- except Exception as e:
304
- logging.error(f"Decryption failed for segment {index}: {str(e)}")
305
- raise
306
-
307
- # Update progress and queue
308
- self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
309
-
310
- # Add the segment to the queue
311
- self.queue.put((index, segment_content))
312
-
313
- # Track successfully downloaded segments
314
- self.downloaded_segments.add(index)
315
- progress_bar.update(1)
316
-
317
- # Break out of the loop on success
318
- return
319
-
320
- except Exception as e:
321
- #logging.error(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
322
-
323
- if attempt + 1 == retries:
324
- #logging.error(f"Final retry failed for segment {index}")
325
- self.queue.put((index, None)) # Marker for failed segment
326
- progress_bar.update(1)
327
- break
328
-
329
- sleep_time = backoff_factor * (2 ** attempt)
330
- logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
331
- time.sleep(sleep_time)
332
-
333
- def write_segments_to_file(self):
334
- """
335
- Writes segments to file with additional verification.
336
- """
337
- with open(self.tmp_file_path, 'wb') as f:
338
- expected_index = 0
339
- buffer = {}
340
- total_written = 0
341
- segments_written = set()
342
-
343
- while not self.stop_event.is_set() or not self.queue.empty():
344
-
345
- if self.interrupt_flag.is_set():
346
- break
347
-
348
- try:
349
- index, segment_content = self.queue.get(timeout=1)
350
-
351
- # Handle failed segments
352
- if segment_content is None:
353
- if index == expected_index:
354
- expected_index += 1
355
- continue
356
-
357
- # Write segment if it's the next expected one
358
- if index == expected_index:
359
- f.write(segment_content)
360
- total_written += len(segment_content)
361
- segments_written.add(index)
362
- f.flush()
363
- expected_index += 1
364
-
365
- # Write any buffered segments that are now in order
366
- while expected_index in buffer:
367
- next_segment = buffer.pop(expected_index)
368
- if next_segment is not None:
369
- f.write(next_segment)
370
- total_written += len(next_segment)
371
- segments_written.add(expected_index)
372
- f.flush()
373
- expected_index += 1
374
- else:
375
- buffer[index] = segment_content
376
-
377
- except queue.Empty:
378
- if self.stop_event.is_set():
379
- break
380
- continue
381
- except Exception as e:
382
- logging.error(f"Error writing segment {index}: {str(e)}")
383
- continue
384
-
385
- # Final verification
386
- if total_written == 0:
387
- raise Exception("No data written to file")
388
-
389
- def download_streams(self, add_desc):
390
- """
391
- Downloads all TS segments in parallel and writes them to a file.
392
-
393
- Parameters:
394
- - add_desc (str): Additional description for the progress bar.
395
- """
396
- self.setup_interrupt_handler()
397
-
398
- # Get config site from prev stack
399
- frames = get_call_stack()
400
- logging.info(f"Extract info from: {frames}")
401
- config_site = str(frames[-4]['folder_base'])
402
- logging.info(f"Use frame: {frames[-1]}")
403
-
404
- # Workers to use for downloading
405
- TQDM_MAX_WORKER = 0
406
-
407
- # Select audio workers from folder of frames stack prev call.
408
- try:
409
- VIDEO_WORKERS = int(config_manager.get_dict('SITE', config_site)['video_workers'])
410
- except:
411
- #VIDEO_WORKERS = os.cpu_count()
412
- VIDEO_WORKERS = DEFAULT_VIDEO_WORKERS
413
-
414
- try:
415
- AUDIO_WORKERS = int(config_manager.get_dict('SITE', config_site)['audio_workers'])
416
- except:
417
- #AUDIO_WORKERS = os.cpu_count()
418
- AUDIO_WORKERS = DEFAULT_AUDIO_WORKERS
419
-
420
- # Differnt workers for audio and video
421
- if "video" in str(add_desc):
422
- TQDM_MAX_WORKER = VIDEO_WORKERS
423
- if "audio" in str(add_desc):
424
- TQDM_MAX_WORKER = AUDIO_WORKERS
425
-
426
- # Custom bar for mobile and pc
427
- if TQDM_USE_LARGE_BAR:
428
- bar_format = (
429
- f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{add_desc}{Colors.WHITE}): "
430
- f"{Colors.RED}{{percentage:.2f}}% "
431
- f"{Colors.MAGENTA}{{bar}} "
432
- f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
433
- f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
434
- )
435
- else:
436
- bar_format = (
437
- f"{Colors.YELLOW}Proc{Colors.WHITE}: "
438
- f"{Colors.RED}{{percentage:.2f}}% "
439
- f"{Colors.WHITE}| "
440
- f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
441
- )
442
-
443
- # Create progress bar
444
- progress_bar = tqdm(
445
- total=len(self.segments),
446
- unit='s',
447
- ascii='░▒█',
448
- bar_format=bar_format,
449
- mininterval=0.05
450
- )
451
-
452
- try:
453
-
454
- # Start writer thread
455
- writer_thread = threading.Thread(target=self.write_segments_to_file)
456
- writer_thread.daemon = True
457
- writer_thread.start()
458
-
459
- # Configure workers and delay
460
- max_workers = len(self.valid_proxy) if THERE_IS_PROXY_LIST else TQDM_MAX_WORKER
461
- delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER
462
-
463
- # Download segments with completion verification
464
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
465
- futures = []
466
- for index, segment_url in enumerate(self.segments):
467
- # Check for interrupt before submitting each task
468
- if self.interrupt_flag.is_set():
469
- break
470
-
471
- time.sleep(delay)
472
- futures.append(executor.submit(self.make_requests_stream, segment_url, index, progress_bar))
473
-
474
- # Wait for futures with interrupt handling
475
- for future in as_completed(futures):
476
- if self.interrupt_flag.is_set():
477
- break
478
- try:
479
- future.result()
480
- except Exception as e:
481
- logging.error(f"Error in download thread: {str(e)}")
482
-
483
- # Interrupt handling for missing segments
484
- if not self.interrupt_flag.is_set():
485
- total_segments = len(self.segments)
486
- completed_segments = len(self.downloaded_segments)
487
-
488
- if completed_segments < total_segments:
489
- missing_segments = set(range(total_segments)) - self.downloaded_segments
490
- logging.warning(f"Missing segments: {sorted(missing_segments)}")
491
-
492
- # Retry missing segments with interrupt check
493
- for index in missing_segments:
494
- if self.interrupt_flag.is_set():
495
- break
496
- try:
497
- self.make_requests_stream(self.segments[index], index, progress_bar)
498
- except Exception as e:
499
- logging.error(f"Failed to retry segment {index}: {str(e)}")
500
-
501
- except Exception as e:
502
- logging.error(f"Download failed: {str(e)}")
503
- raise
504
-
505
- finally:
506
-
507
- # Clean up resources
508
- self.stop_event.set()
509
- writer_thread.join(timeout=30)
510
- progress_bar.close()
511
-
512
- # Check if download was interrupted
513
- if self.download_interrupted:
514
- console.log("[red] Download was manually stopped.")
515
-
516
- # Optional: Delete partial download
517
- if os.path.exists(self.tmp_file_path):
518
- os.remove(self.tmp_file_path)
519
- sys.exit(0)
520
-
521
- # Clean up
522
- self.stop_event.set()
523
- writer_thread.join(timeout=30)
524
- progress_bar.close()
525
-
526
- # Final verification
527
- final_completion = (len(self.downloaded_segments) / total_segments) * 100
528
- if final_completion < 99.9: # Less than 99.9% complete
529
- missing = set(range(total_segments)) - self.downloaded_segments
530
- raise Exception(f"Download incomplete ({final_completion:.1f}%). Missing segments: {sorted(missing)}")
531
-
532
- # Verify output file
533
- if not os.path.exists(self.tmp_file_path):
534
- raise Exception("Output file missing")
535
-
536
- file_size = os.path.getsize(self.tmp_file_path)
537
- if file_size == 0:
538
- raise Exception("Output file is empty")
539
-
540
- logging.info(f"Download completed. File size: {file_size} bytes")
1
+ # 18.04.24
2
+
3
+ import os
4
+ import sys
5
+ import time
6
+ import queue
7
+ import logging
8
+ import binascii
9
+ import threading
10
+ import signal
11
+ from queue import PriorityQueue
12
+ from urllib.parse import urljoin, urlparse
13
+ from concurrent.futures import ThreadPoolExecutor, as_completed
14
+
15
+
16
+ # External libraries
17
+ import httpx
18
+ from tqdm import tqdm
19
+
20
+
21
+ # Internal utilities
22
+ from StreamingCommunity.Util.console import console
23
+ from StreamingCommunity.Util.headers import get_headers, random_headers
24
+ from StreamingCommunity.Util.color import Colors
25
+ from StreamingCommunity.Util._jsonConfig import config_manager
26
+ from StreamingCommunity.Util.os import os_manager
27
+ from StreamingCommunity.Util.call_stack import get_call_stack
28
+
29
+
30
+ # Logic class
31
+ from ...M3U8 import (
32
+ M3U8_Decryption,
33
+ M3U8_Ts_Estimator,
34
+ M3U8_Parser,
35
+ M3U8_UrlFix
36
+ )
37
+ from .proxyes import main_test_proxy
38
+
39
+ # Config
40
+ TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
41
+ TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
42
+
43
+ REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
44
+ REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify_ssl')
45
+
46
+ THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt")
47
+ PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min')
48
+ PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
49
+
50
+ DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
51
+ DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
52
+
53
+
54
+
55
+ # Variable
56
+ headers_index = config_manager.get_dict('REQUESTS', 'user-agent')
57
+ max_timeout = config_manager.get_int("REQUESTS", "timeout")
58
+
59
+
60
+
61
+ class M3U8_Segments:
62
+ def __init__(self, url: str, tmp_folder: str, is_index_url: bool = True):
63
+ """
64
+ Initializes the M3U8_Segments object.
65
+
66
+ Parameters:
67
+ - url (str): The URL of the M3U8 playlist.
68
+ - tmp_folder (str): The temporary folder to store downloaded segments.
69
+ - is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
70
+ """
71
+ self.url = url
72
+ self.tmp_folder = tmp_folder
73
+ self.is_index_url = is_index_url
74
+ self.expected_real_time = None
75
+ self.max_timeout = max_timeout
76
+
77
+ self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
78
+ os.makedirs(self.tmp_folder, exist_ok=True)
79
+
80
+ # Util class
81
+ self.decryption: M3U8_Decryption = None
82
+ self.class_ts_estimator = M3U8_Ts_Estimator(0)
83
+ self.class_url_fixer = M3U8_UrlFix(url)
84
+
85
+ # Sync
86
+ self.queue = PriorityQueue()
87
+ self.stop_event = threading.Event()
88
+ self.downloaded_segments = set()
89
+ self.base_timeout = 1.0
90
+ self.current_timeout = 5.0
91
+
92
+ # Stopping
93
+ self.interrupt_flag = threading.Event()
94
+ self.download_interrupted = False
95
+
96
+ def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
97
+ """
98
+ Retrieves the encryption key from the M3U8 playlist.
99
+
100
+ Parameters:
101
+ - m3u8_parser (M3U8_Parser): The parser object containing M3U8 playlist information.
102
+
103
+ Returns:
104
+ bytes: The encryption key in bytes.
105
+ """
106
+ headers_index = {'user-agent': get_headers()}
107
+
108
+ # Construct the full URL of the key
109
+ key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
110
+ parsed_url = urlparse(key_uri)
111
+ self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
112
+ logging.info(f"Uri key: {key_uri}")
113
+
114
+ # Make request to get porxy
115
+ try:
116
+ response = httpx.get(
117
+ url=key_uri,
118
+ headers=headers_index,
119
+ timeout=max_timeout
120
+ )
121
+ response.raise_for_status()
122
+
123
+ except Exception as e:
124
+ raise Exception(f"Failed to fetch key from {key_uri}: {e}")
125
+
126
+ # Convert the content of the response to hexadecimal and then to bytes
127
+ hex_content = binascii.hexlify(response.content).decode('utf-8')
128
+ byte_content = bytes.fromhex(hex_content)
129
+
130
+ return byte_content
131
+
132
+ def parse_data(self, m3u8_content: str) -> None:
133
+ """
134
+ Parses the M3U8 content to extract segment information.
135
+
136
+ Parameters:
137
+ - m3u8_content (str): The content of the M3U8 file.
138
+ """
139
+ m3u8_parser = M3U8_Parser()
140
+ m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
141
+
142
+ self.expected_real_time = m3u8_parser.get_duration(return_string=False)
143
+ self.expected_real_time_s = m3u8_parser.duration
144
+
145
+ # Check if there is an encryption key in the playlis
146
+ if m3u8_parser.keys is not None:
147
+ try:
148
+
149
+ # Extract byte from the key
150
+ key = self.__get_key__(m3u8_parser)
151
+
152
+ except Exception as e:
153
+ raise Exception(f"Failed to retrieve encryption key {e}.")
154
+
155
+ iv = m3u8_parser.keys.get('iv')
156
+ method = m3u8_parser.keys.get('method')
157
+
158
+ # Create a decryption object with the key and set the method
159
+ self.decryption = M3U8_Decryption(key, iv, method)
160
+
161
+ # Store the segment information parsed from the playlist
162
+ self.segments = m3u8_parser.segments
163
+
164
+ # Fix URL if it is incomplete (missing 'http')
165
+ for i in range(len(self.segments)):
166
+ segment_url = self.segments[i]
167
+
168
+ if "http" not in segment_url:
169
+ self.segments[i] = self.class_url_fixer.generate_full_url(segment_url)
170
+ logging.info(f"Generated new URL: {self.segments[i]}, from: {segment_url}")
171
+
172
+ # Update segments for estimator
173
+ self.class_ts_estimator.total_segments = len(self.segments)
174
+ logging.info(f"Segmnets to download: [{len(self.segments)}]")
175
+
176
+ # Proxy
177
+ if THERE_IS_PROXY_LIST:
178
+ console.log("[red]Start validation proxy.")
179
+ self.valid_proxy = main_test_proxy(self.segments[0])
180
+ console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}")
181
+
182
+ if len(self.valid_proxy) == 0:
183
+ sys.exit(0)
184
+
185
+ def get_info(self) -> None:
186
+ """
187
+ Makes a request to the index M3U8 file to get information about segments.
188
+ """
189
+ headers_index = {'user-agent': get_headers()}
190
+
191
+ if self.is_index_url:
192
+
193
+ # Send a GET request to retrieve the index M3U8 file
194
+ response = httpx.get(
195
+ self.url,
196
+ headers=headers_index,
197
+ timeout=max_timeout
198
+ )
199
+ response.raise_for_status()
200
+
201
+ # Save the M3U8 file to the temporary folder
202
+ path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
203
+ open(path_m3u8_file, "w+").write(response.text)
204
+
205
+ # Parse the text from the M3U8 index file
206
+ self.parse_data(response.text)
207
+
208
+ else:
209
+
210
+ # Parser data of content of index pass in input to class
211
+ self.parse_data(self.url)
212
+
213
+ def setup_interrupt_handler(self):
214
+ """
215
+ Set up a signal handler for graceful interruption.
216
+ """
217
+ def interrupt_handler(signum, frame):
218
+ if not self.interrupt_flag.is_set():
219
+ console.log("\n[red] Stopping download gracefully...")
220
+ self.interrupt_flag.set()
221
+ self.download_interrupted = True
222
+ self.stop_event.set()
223
+
224
+ if threading.current_thread() is threading.main_thread():
225
+ signal.signal(signal.SIGINT, interrupt_handler)
226
+ else:
227
+ print("Signal handler must be set in the main thread")
228
+
229
+ def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.5) -> None:
230
+ """
231
+ Downloads a TS segment and adds it to the segment queue with retry logic.
232
+
233
+ Parameters:
234
+ - ts_url (str): The URL of the TS segment.
235
+ - index (int): The index of the segment.
236
+ - progress_bar (tqdm): Progress counter for tracking download progress.
237
+ - retries (int): The number of times to retry on failure (default is 3).
238
+ - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
239
+ """
240
+ for attempt in range(REQUEST_MAX_RETRY):
241
+ if self.interrupt_flag.is_set():
242
+ return
243
+
244
+ try:
245
+ start_time = time.time()
246
+
247
+ # Make request to get content
248
+ if THERE_IS_PROXY_LIST:
249
+
250
+ # Get proxy from list
251
+ proxy = self.valid_proxy[index % len(self.valid_proxy)]
252
+ logging.info(f"Use proxy: {proxy}")
253
+
254
+ with httpx.Client(proxies=proxy, verify=REQUEST_VERIFY) as client:
255
+ if 'key_base_url' in self.__dict__:
256
+ response = client.get(
257
+ url=ts_url,
258
+ headers=random_headers(self.key_base_url),
259
+ timeout=max_timeout,
260
+ follow_redirects=True
261
+ )
262
+
263
+ else:
264
+ response = client.get(
265
+ url=ts_url,
266
+ headers={'user-agent': get_headers()},
267
+ timeout=max_timeout,
268
+ follow_redirects=True
269
+ )
270
+
271
+ else:
272
+ with httpx.Client(verify=REQUEST_VERIFY) as client_2:
273
+ if 'key_base_url' in self.__dict__:
274
+ response = client_2.get(
275
+ url=ts_url,
276
+ headers=random_headers(self.key_base_url),
277
+ timeout=max_timeout,
278
+ follow_redirects=True
279
+ )
280
+
281
+ else:
282
+ response = client_2.get(
283
+ url=ts_url,
284
+ headers={'user-agent': get_headers()},
285
+ timeout=max_timeout,
286
+ follow_redirects=True
287
+ )
288
+
289
+ # Validate response and content
290
+ response.raise_for_status()
291
+ segment_content = response.content
292
+ content_size = len(segment_content)
293
+ duration = time.time() - start_time
294
+
295
+ # Decrypt if needed and verify decrypted content
296
+ if self.decryption is not None:
297
+ try:
298
+ segment_content = self.decryption.decrypt(segment_content)
299
+
300
+ except Exception as e:
301
+ logging.error(f"Decryption failed for segment {index}: {str(e)}")
302
+ raise
303
+
304
+ # Update progress and queue
305
+ self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
306
+
307
+ # Add the segment to the queue
308
+ self.queue.put((index, segment_content))
309
+
310
+ # Track successfully downloaded segments
311
+ self.downloaded_segments.add(index)
312
+ progress_bar.update(1)
313
+
314
+ # Break out of the loop on success
315
+ return
316
+
317
+ except Exception as e:
318
+ logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
319
+
320
+ if attempt + 1 == REQUEST_MAX_RETRY:
321
+ console.log(f"[red]Final retry failed for segment: {index}")
322
+ self.queue.put((index, None)) # Marker for failed segment
323
+ progress_bar.update(1)
324
+ break
325
+
326
+ sleep_time = backoff_factor * (2 ** attempt)
327
+ logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
328
+ time.sleep(sleep_time)
329
+
330
+ def write_segments_to_file(self):
331
+ """
332
+ Writes segments to file with additional verification.
333
+ """
334
+ buffer = {}
335
+ expected_index = 0
336
+ segments_written = set()
337
+
338
+ with open(self.tmp_file_path, 'wb') as f:
339
+ while not self.stop_event.is_set() or not self.queue.empty():
340
+ if self.interrupt_flag.is_set():
341
+ break
342
+
343
+ try:
344
+ index, segment_content = self.queue.get(timeout=self.current_timeout)
345
+
346
+ # Successful queue retrieval: reduce timeout
347
+ self.current_timeout = max(self.base_timeout, self.current_timeout / 2)
348
+
349
+ # Handle failed segments
350
+ if segment_content is None:
351
+ if index == expected_index:
352
+ expected_index += 1
353
+ continue
354
+
355
+ # Write segment if it's the next expected one
356
+ if index == expected_index:
357
+ f.write(segment_content)
358
+ segments_written.add(index)
359
+ f.flush()
360
+ expected_index += 1
361
+
362
+ # Write any buffered segments that are now in order
363
+ while expected_index in buffer:
364
+ next_segment = buffer.pop(expected_index)
365
+
366
+ if next_segment is not None:
367
+ f.write(next_segment)
368
+ segments_written.add(expected_index)
369
+ f.flush()
370
+
371
+ expected_index += 1
372
+
373
+ else:
374
+ buffer[index] = segment_content
375
+
376
+ except queue.Empty:
377
+ self.current_timeout = min(self.max_timeout, self.current_timeout * 1.5)
378
+
379
+ if self.stop_event.is_set():
380
+ break
381
+
382
+ except Exception as e:
383
+ logging.error(f"Error writing segment {index}: {str(e)}")
384
+
385
+ def download_streams(self, add_desc):
386
+ """
387
+ Downloads all TS segments in parallel and writes them to a file.
388
+
389
+ Parameters:
390
+ - add_desc (str): Additional description for the progress bar.
391
+ """
392
+ self.setup_interrupt_handler()
393
+
394
+ # Get config site from prev stack
395
+ frames = get_call_stack()
396
+ logging.info(f"Extract info from: {frames}")
397
+ config_site = str(frames[-4]['folder_base'])
398
+ logging.info(f"Use frame: {frames[-1]}")
399
+
400
+ # Workers to use for downloading
401
+ TQDM_MAX_WORKER = 0
402
+
403
+ # Select audio workers from folder of frames stack prev call.
404
+ try:
405
+ VIDEO_WORKERS = int(config_manager.get_dict('SITE', config_site)['video_workers'])
406
+ except:
407
+ #VIDEO_WORKERS = os.cpu_count()
408
+ VIDEO_WORKERS = DEFAULT_VIDEO_WORKERS
409
+
410
+ try:
411
+ AUDIO_WORKERS = int(config_manager.get_dict('SITE', config_site)['audio_workers'])
412
+ except:
413
+ #AUDIO_WORKERS = os.cpu_count()
414
+ AUDIO_WORKERS = DEFAULT_AUDIO_WORKERS
415
+
416
+ # Differnt workers for audio and video
417
+ if "video" in str(add_desc):
418
+ TQDM_MAX_WORKER = VIDEO_WORKERS
419
+ if "audio" in str(add_desc):
420
+ TQDM_MAX_WORKER = AUDIO_WORKERS
421
+
422
+ # Custom bar for mobile and pc
423
+ if TQDM_USE_LARGE_BAR:
424
+ bar_format = (
425
+ f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{add_desc}{Colors.WHITE}): "
426
+ f"{Colors.RED}{{percentage:.2f}}% "
427
+ f"{Colors.MAGENTA}{{bar}} "
428
+ f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
429
+ f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
430
+ )
431
+ else:
432
+ bar_format = (
433
+ f"{Colors.YELLOW}Proc{Colors.WHITE}: "
434
+ f"{Colors.RED}{{percentage:.2f}}% "
435
+ f"{Colors.WHITE}| "
436
+ f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
437
+ )
438
+
439
+ # Create progress bar
440
+ progress_bar = tqdm(
441
+ total=len(self.segments),
442
+ unit='s',
443
+ ascii='░▒█',
444
+ bar_format=bar_format,
445
+ mininterval=0.05
446
+ )
447
+
448
+ try:
449
+
450
+ # Start writer thread
451
+ writer_thread = threading.Thread(target=self.write_segments_to_file)
452
+ writer_thread.daemon = True
453
+ writer_thread.start()
454
+
455
+ # Configure workers and delay
456
+ max_workers = len(self.valid_proxy) if THERE_IS_PROXY_LIST else TQDM_MAX_WORKER
457
+ delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER
458
+
459
+ # Download segments with completion verification
460
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
461
+ futures = []
462
+ for index, segment_url in enumerate(self.segments):
463
+ # Check for interrupt before submitting each task
464
+ if self.interrupt_flag.is_set():
465
+ break
466
+
467
+ time.sleep(delay)
468
+ futures.append(executor.submit(self.make_requests_stream, segment_url, index, progress_bar))
469
+
470
+ # Wait for futures with interrupt handling
471
+ for future in as_completed(futures):
472
+ if self.interrupt_flag.is_set():
473
+ break
474
+ try:
475
+ future.result()
476
+ except Exception as e:
477
+ logging.error(f"Error in download thread: {str(e)}")
478
+
479
+ # Interrupt handling for missing segments
480
+ if not self.interrupt_flag.is_set():
481
+ total_segments = len(self.segments)
482
+ completed_segments = len(self.downloaded_segments)
483
+
484
+ if completed_segments < total_segments:
485
+ missing_segments = set(range(total_segments)) - self.downloaded_segments
486
+ logging.warning(f"Missing segments: {sorted(missing_segments)}")
487
+
488
+ # Retry missing segments with interrupt check
489
+ for index in missing_segments:
490
+ if self.interrupt_flag.is_set():
491
+ break
492
+
493
+ try:
494
+ self.make_requests_stream(self.segments[index], index, progress_bar)
495
+
496
+ except Exception as e:
497
+ logging.error(f"Failed to retry segment {index}: {str(e)}")
498
+
499
+ except Exception as e:
500
+ logging.error(f"Download failed: {str(e)}")
501
+ raise
502
+
503
+ finally:
504
+
505
+ # Clean up resources
506
+ self.stop_event.set()
507
+ writer_thread.join(timeout=30)
508
+ progress_bar.close()
509
+
510
+ # Check if download was interrupted
511
+ if self.download_interrupted:
512
+ console.log("[red] Download was manually stopped.")
513
+
514
+ # Optional: Delete partial download
515
+ if os.path.exists(self.tmp_file_path):
516
+ os.remove(self.tmp_file_path)
517
+ sys.exit(0)
518
+
519
+ # Clean up
520
+ self.stop_event.set()
521
+ writer_thread.join(timeout=30)
522
+ progress_bar.close()
523
+
524
+ # Final verification
525
+ final_completion = (len(self.downloaded_segments) / total_segments) * 100
526
+ if final_completion < 99.9: # Less than 99.9% complete
527
+ missing = set(range(total_segments)) - self.downloaded_segments
528
+ raise Exception(f"Download incomplete ({final_completion:.1f}%). Missing segments: {sorted(missing)}")
529
+
530
+ # Verify output file
531
+ if not os.path.exists(self.tmp_file_path):
532
+ raise Exception("Output file missing")
533
+
534
+ file_size = os.path.getsize(self.tmp_file_path)
535
+ if file_size == 0:
536
+ raise Exception("Output file is empty")
537
+
538
+ logging.info(f"Download completed. File size: {file_size} bytes")