StreamingCommunity 1.7.6__py3-none-any.whl → 1.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (93) hide show
  1. StreamingCommunity/Src/Api/Player/Helper/Vixcloud/js_parser.py +4 -1
  2. StreamingCommunity/Src/Api/Player/Helper/Vixcloud/util.py +166 -166
  3. StreamingCommunity/Src/Api/Player/ddl.py +89 -89
  4. StreamingCommunity/Src/Api/Player/maxstream.py +151 -151
  5. StreamingCommunity/Src/Api/Player/supervideo.py +193 -193
  6. StreamingCommunity/Src/Api/Player/vixcloud.py +224 -212
  7. StreamingCommunity/Src/Api/Site/1337xx/__init__.py +50 -50
  8. StreamingCommunity/Src/Api/Site/1337xx/costant.py +14 -14
  9. StreamingCommunity/Src/Api/Site/1337xx/site.py +83 -83
  10. StreamingCommunity/Src/Api/Site/1337xx/title.py +66 -66
  11. StreamingCommunity/Src/Api/Site/altadefinizione/__init__.py +50 -50
  12. StreamingCommunity/Src/Api/Site/altadefinizione/costant.py +14 -14
  13. StreamingCommunity/Src/Api/Site/altadefinizione/film.py +69 -69
  14. StreamingCommunity/Src/Api/Site/altadefinizione/site.py +86 -86
  15. StreamingCommunity/Src/Api/Site/animeunity/__init__.py +50 -50
  16. StreamingCommunity/Src/Api/Site/animeunity/costant.py +15 -15
  17. StreamingCommunity/Src/Api/Site/animeunity/film_serie.py +131 -131
  18. StreamingCommunity/Src/Api/Site/animeunity/site.py +164 -164
  19. StreamingCommunity/Src/Api/Site/bitsearch/__init__.py +51 -51
  20. StreamingCommunity/Src/Api/Site/bitsearch/costant.py +15 -15
  21. StreamingCommunity/Src/Api/Site/bitsearch/site.py +84 -84
  22. StreamingCommunity/Src/Api/Site/bitsearch/title.py +47 -47
  23. StreamingCommunity/Src/Api/Site/cb01new/__init__.py +51 -51
  24. StreamingCommunity/Src/Api/Site/cb01new/costant.py +15 -15
  25. StreamingCommunity/Src/Api/Site/cb01new/film.py +69 -69
  26. StreamingCommunity/Src/Api/Site/cb01new/site.py +74 -74
  27. StreamingCommunity/Src/Api/Site/ddlstreamitaly/__init__.py +57 -57
  28. StreamingCommunity/Src/Api/Site/ddlstreamitaly/costant.py +16 -16
  29. StreamingCommunity/Src/Api/Site/ddlstreamitaly/series.py +142 -142
  30. StreamingCommunity/Src/Api/Site/ddlstreamitaly/site.py +92 -92
  31. StreamingCommunity/Src/Api/Site/ddlstreamitaly/util/ScrapeSerie.py +82 -82
  32. StreamingCommunity/Src/Api/Site/guardaserie/__init__.py +52 -52
  33. StreamingCommunity/Src/Api/Site/guardaserie/costant.py +15 -15
  34. StreamingCommunity/Src/Api/Site/guardaserie/series.py +195 -195
  35. StreamingCommunity/Src/Api/Site/guardaserie/site.py +84 -84
  36. StreamingCommunity/Src/Api/Site/guardaserie/util/ScrapeSerie.py +110 -110
  37. StreamingCommunity/Src/Api/Site/mostraguarda/__init__.py +48 -48
  38. StreamingCommunity/Src/Api/Site/mostraguarda/costant.py +14 -14
  39. StreamingCommunity/Src/Api/Site/mostraguarda/film.py +94 -94
  40. StreamingCommunity/Src/Api/Site/piratebays/__init__.py +50 -50
  41. StreamingCommunity/Src/Api/Site/piratebays/costant.py +14 -14
  42. StreamingCommunity/Src/Api/Site/piratebays/site.py +88 -88
  43. StreamingCommunity/Src/Api/Site/piratebays/title.py +45 -45
  44. StreamingCommunity/Src/Api/Site/streamingcommunity/__init__.py +55 -55
  45. StreamingCommunity/Src/Api/Site/streamingcommunity/costant.py +14 -14
  46. StreamingCommunity/Src/Api/Site/streamingcommunity/film.py +70 -70
  47. StreamingCommunity/Src/Api/Site/streamingcommunity/series.py +203 -203
  48. StreamingCommunity/Src/Api/Site/streamingcommunity/site.py +125 -125
  49. StreamingCommunity/Src/Api/Template/Class/SearchType.py +101 -101
  50. StreamingCommunity/Src/Api/Template/Util/__init__.py +4 -4
  51. StreamingCommunity/Src/Api/Template/Util/get_domain.py +137 -137
  52. StreamingCommunity/Src/Api/Template/Util/manage_ep.py +153 -153
  53. StreamingCommunity/Src/Api/Template/Util/recall_search.py +37 -37
  54. StreamingCommunity/Src/Api/Template/__init__.py +2 -2
  55. StreamingCommunity/Src/Api/Template/site.py +87 -87
  56. StreamingCommunity/Src/Lib/Downloader/HLS/downloader.py +968 -968
  57. StreamingCommunity/Src/Lib/Downloader/HLS/proxyes.py +110 -110
  58. StreamingCommunity/Src/Lib/Downloader/HLS/segments.py +540 -540
  59. StreamingCommunity/Src/Lib/Downloader/MP4/downloader.py +156 -156
  60. StreamingCommunity/Src/Lib/Downloader/TOR/downloader.py +222 -222
  61. StreamingCommunity/Src/Lib/Downloader/__init__.py +4 -4
  62. StreamingCommunity/Src/Lib/Driver/driver_1.py +76 -76
  63. StreamingCommunity/Src/Lib/FFmpeg/__init__.py +4 -4
  64. StreamingCommunity/Src/Lib/FFmpeg/capture.py +170 -170
  65. StreamingCommunity/Src/Lib/FFmpeg/command.py +292 -292
  66. StreamingCommunity/Src/Lib/FFmpeg/util.py +241 -241
  67. StreamingCommunity/Src/Lib/M3U8/__init__.py +5 -5
  68. StreamingCommunity/Src/Lib/M3U8/decryptor.py +128 -128
  69. StreamingCommunity/Src/Lib/M3U8/estimator.py +172 -172
  70. StreamingCommunity/Src/Lib/M3U8/parser.py +666 -666
  71. StreamingCommunity/Src/Lib/M3U8/url_fixer.py +51 -51
  72. StreamingCommunity/Src/Lib/TMBD/__init__.py +1 -1
  73. StreamingCommunity/Src/Lib/TMBD/obj_tmbd.py +39 -39
  74. StreamingCommunity/Src/Lib/TMBD/tmdb.py +345 -345
  75. StreamingCommunity/Src/Upload/update.py +64 -64
  76. StreamingCommunity/Src/Upload/version.py +5 -5
  77. StreamingCommunity/Src/Util/_jsonConfig.py +204 -204
  78. StreamingCommunity/Src/Util/call_stack.py +42 -42
  79. StreamingCommunity/Src/Util/color.py +20 -20
  80. StreamingCommunity/Src/Util/console.py +12 -12
  81. StreamingCommunity/Src/Util/headers.py +147 -147
  82. StreamingCommunity/Src/Util/logger.py +53 -53
  83. StreamingCommunity/Src/Util/message.py +46 -46
  84. StreamingCommunity/Src/Util/os.py +417 -417
  85. StreamingCommunity/Src/Util/table.py +163 -163
  86. StreamingCommunity/run.py +196 -196
  87. {StreamingCommunity-1.7.6.dist-info → StreamingCommunity-1.8.0.dist-info}/METADATA +1 -1
  88. StreamingCommunity-1.8.0.dist-info/RECORD +97 -0
  89. StreamingCommunity-1.7.6.dist-info/RECORD +0 -97
  90. {StreamingCommunity-1.7.6.dist-info → StreamingCommunity-1.8.0.dist-info}/LICENSE +0 -0
  91. {StreamingCommunity-1.7.6.dist-info → StreamingCommunity-1.8.0.dist-info}/WHEEL +0 -0
  92. {StreamingCommunity-1.7.6.dist-info → StreamingCommunity-1.8.0.dist-info}/entry_points.txt +0 -0
  93. {StreamingCommunity-1.7.6.dist-info → StreamingCommunity-1.8.0.dist-info}/top_level.txt +0 -0
@@ -1,540 +1,540 @@
1
- # 18.04.24
2
-
3
- import os
4
- import sys
5
- import time
6
- import queue
7
- import logging
8
- import binascii
9
- import threading
10
- import signal
11
- from queue import PriorityQueue
12
- from urllib.parse import urljoin, urlparse
13
- from concurrent.futures import ThreadPoolExecutor, as_completed
14
-
15
-
16
- # External libraries
17
- import httpx
18
- from tqdm import tqdm
19
-
20
-
21
- # Internal utilities
22
- from StreamingCommunity.Src.Util.console import console
23
- from StreamingCommunity.Src.Util.headers import get_headers, random_headers
24
- from StreamingCommunity.Src.Util.color import Colors
25
- from StreamingCommunity.Src.Util._jsonConfig import config_manager
26
- from StreamingCommunity.Src.Util.os import os_manager
27
- from StreamingCommunity.Src.Util.call_stack import get_call_stack
28
-
29
-
30
- # Logic class
31
- from ...M3U8 import (
32
- M3U8_Decryption,
33
- M3U8_Ts_Estimator,
34
- M3U8_Parser,
35
- M3U8_UrlFix
36
- )
37
- from .proxyes import main_test_proxy
38
-
39
- # Config
40
- TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
41
- TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
42
- REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
43
- REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify_ssl')
44
- THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt")
45
- PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min')
46
- PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
47
- DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
48
- DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
49
-
50
-
51
- # Variable
52
- headers_index = config_manager.get_dict('REQUESTS', 'user-agent')
53
- max_timeout = config_manager.get_int("REQUESTS", "timeout")
54
-
55
-
56
-
57
- class M3U8_Segments:
58
- def __init__(self, url: str, tmp_folder: str, is_index_url: bool = True):
59
- """
60
- Initializes the M3U8_Segments object.
61
-
62
- Parameters:
63
- - url (str): The URL of the M3U8 playlist.
64
- - tmp_folder (str): The temporary folder to store downloaded segments.
65
- - is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
66
- """
67
- self.url = url
68
- self.tmp_folder = tmp_folder
69
- self.is_index_url = is_index_url
70
- self.expected_real_time = None
71
-
72
- self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
73
- os.makedirs(self.tmp_folder, exist_ok=True)
74
-
75
- # Util class
76
- self.decryption: M3U8_Decryption = None
77
- self.class_ts_estimator = M3U8_Ts_Estimator(0)
78
- self.class_url_fixer = M3U8_UrlFix(url)
79
-
80
- # Sync
81
- self.queue = PriorityQueue()
82
- self.stop_event = threading.Event()
83
- self.downloaded_segments = set()
84
-
85
- # Stopping
86
- self.interrupt_flag = threading.Event()
87
- self.download_interrupted = False
88
-
89
- def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
90
- """
91
- Retrieves the encryption key from the M3U8 playlist.
92
-
93
- Parameters:
94
- - m3u8_parser (M3U8_Parser): The parser object containing M3U8 playlist information.
95
-
96
- Returns:
97
- bytes: The encryption key in bytes.
98
- """
99
- headers_index = {'user-agent': get_headers()}
100
-
101
- # Construct the full URL of the key
102
- key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
103
- parsed_url = urlparse(key_uri)
104
- self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
105
- logging.info(f"Uri key: {key_uri}")
106
-
107
- # Make request to get porxy
108
- try:
109
- response = httpx.get(
110
- url=key_uri,
111
- headers=headers_index,
112
- timeout=max_timeout
113
- )
114
- response.raise_for_status()
115
-
116
- except Exception as e:
117
- raise Exception(f"Failed to fetch key from {key_uri}: {e}")
118
-
119
- # Convert the content of the response to hexadecimal and then to bytes
120
- hex_content = binascii.hexlify(response.content).decode('utf-8')
121
- byte_content = bytes.fromhex(hex_content)
122
-
123
- return byte_content
124
-
125
- def parse_data(self, m3u8_content: str) -> None:
126
- """
127
- Parses the M3U8 content to extract segment information.
128
-
129
- Parameters:
130
- - m3u8_content (str): The content of the M3U8 file.
131
- """
132
- m3u8_parser = M3U8_Parser()
133
- m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
134
-
135
- self.expected_real_time = m3u8_parser.get_duration(return_string=False)
136
- self.expected_real_time_s = m3u8_parser.duration
137
-
138
- # Check if there is an encryption key in the playlis
139
- if m3u8_parser.keys is not None:
140
- try:
141
-
142
- # Extract byte from the key
143
- key = self.__get_key__(m3u8_parser)
144
-
145
- except Exception as e:
146
- raise Exception(f"Failed to retrieve encryption key {e}.")
147
-
148
- iv = m3u8_parser.keys.get('iv')
149
- method = m3u8_parser.keys.get('method')
150
-
151
- # Create a decryption object with the key and set the method
152
- self.decryption = M3U8_Decryption(key, iv, method)
153
-
154
- # Store the segment information parsed from the playlist
155
- self.segments = m3u8_parser.segments
156
-
157
- # Fix URL if it is incomplete (missing 'http')
158
- for i in range(len(self.segments)):
159
- segment_url = self.segments[i]
160
-
161
- if "http" not in segment_url:
162
- self.segments[i] = self.class_url_fixer.generate_full_url(segment_url)
163
- logging.info(f"Generated new URL: {self.segments[i]}, from: {segment_url}")
164
-
165
- # Update segments for estimator
166
- self.class_ts_estimator.total_segments = len(self.segments)
167
- logging.info(f"Segmnets to download: [{len(self.segments)}]")
168
-
169
- # Proxy
170
- if THERE_IS_PROXY_LIST:
171
- console.log("[red]Start validation proxy.")
172
- self.valid_proxy = main_test_proxy(self.segments[0])
173
- console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}")
174
-
175
- if len(self.valid_proxy) == 0:
176
- sys.exit(0)
177
-
178
- def get_info(self) -> None:
179
- """
180
- Makes a request to the index M3U8 file to get information about segments.
181
- """
182
- headers_index = {'user-agent': get_headers()}
183
-
184
- if self.is_index_url:
185
-
186
- # Send a GET request to retrieve the index M3U8 file
187
- response = httpx.get(
188
- self.url,
189
- headers=headers_index,
190
- timeout=max_timeout
191
- )
192
- response.raise_for_status()
193
-
194
- # Save the M3U8 file to the temporary folder
195
- path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
196
- open(path_m3u8_file, "w+").write(response.text)
197
-
198
- # Parse the text from the M3U8 index file
199
- self.parse_data(response.text)
200
-
201
- else:
202
-
203
- # Parser data of content of index pass in input to class
204
- self.parse_data(self.url)
205
-
206
- def setup_interrupt_handler(self):
207
- """
208
- Set up a signal handler for graceful interruption.
209
- """
210
- def interrupt_handler(signum, frame):
211
- if not self.interrupt_flag.is_set():
212
- console.log("\n[red] Stopping download gracefully...")
213
- self.interrupt_flag.set()
214
- self.download_interrupted = True
215
- self.stop_event.set()
216
-
217
- signal.signal(signal.SIGINT, interrupt_handler)
218
-
219
- def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, retries: int = 3, backoff_factor: float = 1.5) -> None:
220
- """
221
- Downloads a TS segment and adds it to the segment queue with retry logic.
222
-
223
- Parameters:
224
- - ts_url (str): The URL of the TS segment.
225
- - index (int): The index of the segment.
226
- - progress_bar (tqdm): Progress counter for tracking download progress.
227
- - retries (int): The number of times to retry on failure (default is 3).
228
- - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
229
- """
230
- if self.interrupt_flag.is_set():
231
- return
232
-
233
- need_verify = REQUEST_VERIFY
234
- min_segment_size = 100 # Minimum acceptable size for a TS segment in bytes
235
-
236
- for attempt in range(retries):
237
- if self.interrupt_flag.is_set():
238
- return
239
-
240
- try:
241
- start_time = time.time()
242
-
243
- # Make request to get content
244
- if THERE_IS_PROXY_LIST:
245
-
246
- # Get proxy from list
247
- proxy = self.valid_proxy[index % len(self.valid_proxy)]
248
- logging.info(f"Use proxy: {proxy}")
249
-
250
- with httpx.Client(proxies=proxy, verify=need_verify) as client:
251
- if 'key_base_url' in self.__dict__:
252
- response = client.get(
253
- url=ts_url,
254
- headers=random_headers(self.key_base_url),
255
- timeout=max_timeout,
256
- follow_redirects=True
257
- )
258
-
259
- else:
260
- response = client.get(
261
- url=ts_url,
262
- headers={'user-agent': get_headers()},
263
- timeout=max_timeout,
264
- follow_redirects=True
265
- )
266
-
267
- else:
268
- with httpx.Client(verify=need_verify) as client_2:
269
- if 'key_base_url' in self.__dict__:
270
- response = client_2.get(
271
- url=ts_url,
272
- headers=random_headers(self.key_base_url),
273
- timeout=max_timeout,
274
- follow_redirects=True
275
- )
276
-
277
- else:
278
- response = client_2.get(
279
- url=ts_url,
280
- headers={'user-agent': get_headers()},
281
- timeout=max_timeout,
282
- follow_redirects=True
283
- )
284
-
285
- # Validate response and content
286
- response.raise_for_status()
287
- segment_content = response.content
288
- content_size = len(segment_content)
289
-
290
- # Check if segment is too small (possibly corrupted or empty)
291
- if content_size < min_segment_size:
292
- raise httpx.RequestError(f"Segment {index} too small ({content_size} bytes)")
293
-
294
- duration = time.time() - start_time
295
-
296
- # Decrypt if needed and verify decrypted content
297
- if self.decryption is not None:
298
- try:
299
- segment_content = self.decryption.decrypt(segment_content)
300
- if len(segment_content) < min_segment_size:
301
- raise Exception(f"Decrypted segment {index} too small ({len(segment_content)} bytes)")
302
-
303
- except Exception as e:
304
- logging.error(f"Decryption failed for segment {index}: {str(e)}")
305
- raise
306
-
307
- # Update progress and queue
308
- self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
309
-
310
- # Add the segment to the queue
311
- self.queue.put((index, segment_content))
312
-
313
- # Track successfully downloaded segments
314
- self.downloaded_segments.add(index)
315
- progress_bar.update(1)
316
-
317
- # Break out of the loop on success
318
- return
319
-
320
- except Exception as e:
321
- #logging.error(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
322
-
323
- if attempt + 1 == retries:
324
- #logging.error(f"Final retry failed for segment {index}")
325
- self.queue.put((index, None)) # Marker for failed segment
326
- progress_bar.update(1)
327
- break
328
-
329
- sleep_time = backoff_factor * (2 ** attempt)
330
- logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
331
- time.sleep(sleep_time)
332
-
333
- def write_segments_to_file(self):
334
- """
335
- Writes segments to file with additional verification.
336
- """
337
- with open(self.tmp_file_path, 'wb') as f:
338
- expected_index = 0
339
- buffer = {}
340
- total_written = 0
341
- segments_written = set()
342
-
343
- while not self.stop_event.is_set() or not self.queue.empty():
344
-
345
- if self.interrupt_flag.is_set():
346
- break
347
-
348
- try:
349
- index, segment_content = self.queue.get(timeout=1)
350
-
351
- # Handle failed segments
352
- if segment_content is None:
353
- if index == expected_index:
354
- expected_index += 1
355
- continue
356
-
357
- # Write segment if it's the next expected one
358
- if index == expected_index:
359
- f.write(segment_content)
360
- total_written += len(segment_content)
361
- segments_written.add(index)
362
- f.flush()
363
- expected_index += 1
364
-
365
- # Write any buffered segments that are now in order
366
- while expected_index in buffer:
367
- next_segment = buffer.pop(expected_index)
368
- if next_segment is not None:
369
- f.write(next_segment)
370
- total_written += len(next_segment)
371
- segments_written.add(expected_index)
372
- f.flush()
373
- expected_index += 1
374
- else:
375
- buffer[index] = segment_content
376
-
377
- except queue.Empty:
378
- if self.stop_event.is_set():
379
- break
380
- continue
381
- except Exception as e:
382
- logging.error(f"Error writing segment {index}: {str(e)}")
383
- continue
384
-
385
- # Final verification
386
- if total_written == 0:
387
- raise Exception("No data written to file")
388
-
389
- def download_streams(self, add_desc):
390
- """
391
- Downloads all TS segments in parallel and writes them to a file.
392
-
393
- Parameters:
394
- - add_desc (str): Additional description for the progress bar.
395
- """
396
- self.setup_interrupt_handler()
397
-
398
- # Get config site from prev stack
399
- frames = get_call_stack()
400
- logging.info(f"Extract info from: {frames}")
401
- config_site = str(frames[-4]['folder_base'])
402
- logging.info(f"Use frame: {frames[-1]}")
403
-
404
- # Workers to use for downloading
405
- TQDM_MAX_WORKER = 0
406
-
407
- # Select audio workers from folder of frames stack prev call.
408
- try:
409
- VIDEO_WORKERS = int(config_manager.get_dict('SITE', config_site)['video_workers'])
410
- except:
411
- #VIDEO_WORKERS = os.cpu_count()
412
- VIDEO_WORKERS = DEFAULT_VIDEO_WORKERS
413
-
414
- try:
415
- AUDIO_WORKERS = int(config_manager.get_dict('SITE', config_site)['audio_workers'])
416
- except:
417
- #AUDIO_WORKERS = os.cpu_count()
418
- AUDIO_WORKERS = DEFAULT_AUDIO_WORKERS
419
-
420
- # Differnt workers for audio and video
421
- if "video" in str(add_desc):
422
- TQDM_MAX_WORKER = VIDEO_WORKERS
423
- if "audio" in str(add_desc):
424
- TQDM_MAX_WORKER = AUDIO_WORKERS
425
-
426
- # Custom bar for mobile and pc
427
- if TQDM_USE_LARGE_BAR:
428
- bar_format = (
429
- f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{add_desc}{Colors.WHITE}): "
430
- f"{Colors.RED}{{percentage:.2f}}% "
431
- f"{Colors.MAGENTA}{{bar}} "
432
- f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
433
- f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
434
- )
435
- else:
436
- bar_format = (
437
- f"{Colors.YELLOW}Proc{Colors.WHITE}: "
438
- f"{Colors.RED}{{percentage:.2f}}% "
439
- f"{Colors.WHITE}| "
440
- f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
441
- )
442
-
443
- # Create progress bar
444
- progress_bar = tqdm(
445
- total=len(self.segments),
446
- unit='s',
447
- ascii='░▒█',
448
- bar_format=bar_format,
449
- mininterval=0.05
450
- )
451
-
452
- try:
453
-
454
- # Start writer thread
455
- writer_thread = threading.Thread(target=self.write_segments_to_file)
456
- writer_thread.daemon = True
457
- writer_thread.start()
458
-
459
- # Configure workers and delay
460
- max_workers = len(self.valid_proxy) if THERE_IS_PROXY_LIST else TQDM_MAX_WORKER
461
- delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER
462
-
463
- # Download segments with completion verification
464
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
465
- futures = []
466
- for index, segment_url in enumerate(self.segments):
467
- # Check for interrupt before submitting each task
468
- if self.interrupt_flag.is_set():
469
- break
470
-
471
- time.sleep(delay)
472
- futures.append(executor.submit(self.make_requests_stream, segment_url, index, progress_bar))
473
-
474
- # Wait for futures with interrupt handling
475
- for future in as_completed(futures):
476
- if self.interrupt_flag.is_set():
477
- break
478
- try:
479
- future.result()
480
- except Exception as e:
481
- logging.error(f"Error in download thread: {str(e)}")
482
-
483
- # Interrupt handling for missing segments
484
- if not self.interrupt_flag.is_set():
485
- total_segments = len(self.segments)
486
- completed_segments = len(self.downloaded_segments)
487
-
488
- if completed_segments < total_segments:
489
- missing_segments = set(range(total_segments)) - self.downloaded_segments
490
- logging.warning(f"Missing segments: {sorted(missing_segments)}")
491
-
492
- # Retry missing segments with interrupt check
493
- for index in missing_segments:
494
- if self.interrupt_flag.is_set():
495
- break
496
- try:
497
- self.make_requests_stream(self.segments[index], index, progress_bar)
498
- except Exception as e:
499
- logging.error(f"Failed to retry segment {index}: {str(e)}")
500
-
501
- except Exception as e:
502
- logging.error(f"Download failed: {str(e)}")
503
- raise
504
-
505
- finally:
506
-
507
- # Clean up resources
508
- self.stop_event.set()
509
- writer_thread.join(timeout=30)
510
- progress_bar.close()
511
-
512
- # Check if download was interrupted
513
- if self.download_interrupted:
514
- console.log("[red] Download was manually stopped.")
515
-
516
- # Optional: Delete partial download
517
- if os.path.exists(self.tmp_file_path):
518
- os.remove(self.tmp_file_path)
519
- sys.exit(0)
520
-
521
- # Clean up
522
- self.stop_event.set()
523
- writer_thread.join(timeout=30)
524
- progress_bar.close()
525
-
526
- # Final verification
527
- final_completion = (len(self.downloaded_segments) / total_segments) * 100
528
- if final_completion < 99.9: # Less than 99.9% complete
529
- missing = set(range(total_segments)) - self.downloaded_segments
530
- raise Exception(f"Download incomplete ({final_completion:.1f}%). Missing segments: {sorted(missing)}")
531
-
532
- # Verify output file
533
- if not os.path.exists(self.tmp_file_path):
534
- raise Exception("Output file missing")
535
-
536
- file_size = os.path.getsize(self.tmp_file_path)
537
- if file_size == 0:
538
- raise Exception("Output file is empty")
539
-
540
- logging.info(f"Download completed. File size: {file_size} bytes")
1
+ # 18.04.24
2
+
3
+ import os
4
+ import sys
5
+ import time
6
+ import queue
7
+ import logging
8
+ import binascii
9
+ import threading
10
+ import signal
11
+ from queue import PriorityQueue
12
+ from urllib.parse import urljoin, urlparse
13
+ from concurrent.futures import ThreadPoolExecutor, as_completed
14
+
15
+
16
+ # External libraries
17
+ import httpx
18
+ from tqdm import tqdm
19
+
20
+
21
+ # Internal utilities
22
+ from StreamingCommunity.Src.Util.console import console
23
+ from StreamingCommunity.Src.Util.headers import get_headers, random_headers
24
+ from StreamingCommunity.Src.Util.color import Colors
25
+ from StreamingCommunity.Src.Util._jsonConfig import config_manager
26
+ from StreamingCommunity.Src.Util.os import os_manager
27
+ from StreamingCommunity.Src.Util.call_stack import get_call_stack
28
+
29
+
30
+ # Logic class
31
+ from ...M3U8 import (
32
+ M3U8_Decryption,
33
+ M3U8_Ts_Estimator,
34
+ M3U8_Parser,
35
+ M3U8_UrlFix
36
+ )
37
+ from .proxyes import main_test_proxy
38
+
39
+ # Config
40
+ TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
41
+ TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
42
+ REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
43
+ REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify_ssl')
44
+ THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt")
45
+ PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min')
46
+ PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
47
+ DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
48
+ DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
49
+
50
+
51
+ # Variable
52
+ headers_index = config_manager.get_dict('REQUESTS', 'user-agent')
53
+ max_timeout = config_manager.get_int("REQUESTS", "timeout")
54
+
55
+
56
+
57
+ class M3U8_Segments:
58
+ def __init__(self, url: str, tmp_folder: str, is_index_url: bool = True):
59
+ """
60
+ Initializes the M3U8_Segments object.
61
+
62
+ Parameters:
63
+ - url (str): The URL of the M3U8 playlist.
64
+ - tmp_folder (str): The temporary folder to store downloaded segments.
65
+ - is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
66
+ """
67
+ self.url = url
68
+ self.tmp_folder = tmp_folder
69
+ self.is_index_url = is_index_url
70
+ self.expected_real_time = None
71
+
72
+ self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
73
+ os.makedirs(self.tmp_folder, exist_ok=True)
74
+
75
+ # Util class
76
+ self.decryption: M3U8_Decryption = None
77
+ self.class_ts_estimator = M3U8_Ts_Estimator(0)
78
+ self.class_url_fixer = M3U8_UrlFix(url)
79
+
80
+ # Sync
81
+ self.queue = PriorityQueue()
82
+ self.stop_event = threading.Event()
83
+ self.downloaded_segments = set()
84
+
85
+ # Stopping
86
+ self.interrupt_flag = threading.Event()
87
+ self.download_interrupted = False
88
+
89
+ def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
90
+ """
91
+ Retrieves the encryption key from the M3U8 playlist.
92
+
93
+ Parameters:
94
+ - m3u8_parser (M3U8_Parser): The parser object containing M3U8 playlist information.
95
+
96
+ Returns:
97
+ bytes: The encryption key in bytes.
98
+ """
99
+ headers_index = {'user-agent': get_headers()}
100
+
101
+ # Construct the full URL of the key
102
+ key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
103
+ parsed_url = urlparse(key_uri)
104
+ self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
105
+ logging.info(f"Uri key: {key_uri}")
106
+
107
+ # Make request to get porxy
108
+ try:
109
+ response = httpx.get(
110
+ url=key_uri,
111
+ headers=headers_index,
112
+ timeout=max_timeout
113
+ )
114
+ response.raise_for_status()
115
+
116
+ except Exception as e:
117
+ raise Exception(f"Failed to fetch key from {key_uri}: {e}")
118
+
119
+ # Convert the content of the response to hexadecimal and then to bytes
120
+ hex_content = binascii.hexlify(response.content).decode('utf-8')
121
+ byte_content = bytes.fromhex(hex_content)
122
+
123
+ return byte_content
124
+
125
+ def parse_data(self, m3u8_content: str) -> None:
126
+ """
127
+ Parses the M3U8 content to extract segment information.
128
+
129
+ Parameters:
130
+ - m3u8_content (str): The content of the M3U8 file.
131
+ """
132
+ m3u8_parser = M3U8_Parser()
133
+ m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
134
+
135
+ self.expected_real_time = m3u8_parser.get_duration(return_string=False)
136
+ self.expected_real_time_s = m3u8_parser.duration
137
+
138
+ # Check if there is an encryption key in the playlis
139
+ if m3u8_parser.keys is not None:
140
+ try:
141
+
142
+ # Extract byte from the key
143
+ key = self.__get_key__(m3u8_parser)
144
+
145
+ except Exception as e:
146
+ raise Exception(f"Failed to retrieve encryption key {e}.")
147
+
148
+ iv = m3u8_parser.keys.get('iv')
149
+ method = m3u8_parser.keys.get('method')
150
+
151
+ # Create a decryption object with the key and set the method
152
+ self.decryption = M3U8_Decryption(key, iv, method)
153
+
154
+ # Store the segment information parsed from the playlist
155
+ self.segments = m3u8_parser.segments
156
+
157
+ # Fix URL if it is incomplete (missing 'http')
158
+ for i in range(len(self.segments)):
159
+ segment_url = self.segments[i]
160
+
161
+ if "http" not in segment_url:
162
+ self.segments[i] = self.class_url_fixer.generate_full_url(segment_url)
163
+ logging.info(f"Generated new URL: {self.segments[i]}, from: {segment_url}")
164
+
165
+ # Update segments for estimator
166
+ self.class_ts_estimator.total_segments = len(self.segments)
167
+ logging.info(f"Segmnets to download: [{len(self.segments)}]")
168
+
169
+ # Proxy
170
+ if THERE_IS_PROXY_LIST:
171
+ console.log("[red]Start validation proxy.")
172
+ self.valid_proxy = main_test_proxy(self.segments[0])
173
+ console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}")
174
+
175
+ if len(self.valid_proxy) == 0:
176
+ sys.exit(0)
177
+
178
+ def get_info(self) -> None:
179
+ """
180
+ Makes a request to the index M3U8 file to get information about segments.
181
+ """
182
+ headers_index = {'user-agent': get_headers()}
183
+
184
+ if self.is_index_url:
185
+
186
+ # Send a GET request to retrieve the index M3U8 file
187
+ response = httpx.get(
188
+ self.url,
189
+ headers=headers_index,
190
+ timeout=max_timeout
191
+ )
192
+ response.raise_for_status()
193
+
194
+ # Save the M3U8 file to the temporary folder
195
+ path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
196
+ open(path_m3u8_file, "w+").write(response.text)
197
+
198
+ # Parse the text from the M3U8 index file
199
+ self.parse_data(response.text)
200
+
201
+ else:
202
+
203
+ # Parser data of content of index pass in input to class
204
+ self.parse_data(self.url)
205
+
206
+ def setup_interrupt_handler(self):
207
+ """
208
+ Set up a signal handler for graceful interruption.
209
+ """
210
+ def interrupt_handler(signum, frame):
211
+ if not self.interrupt_flag.is_set():
212
+ console.log("\n[red] Stopping download gracefully...")
213
+ self.interrupt_flag.set()
214
+ self.download_interrupted = True
215
+ self.stop_event.set()
216
+
217
+ signal.signal(signal.SIGINT, interrupt_handler)
218
+
219
+ def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, retries: int = 3, backoff_factor: float = 1.5) -> None:
220
+ """
221
+ Downloads a TS segment and adds it to the segment queue with retry logic.
222
+
223
+ Parameters:
224
+ - ts_url (str): The URL of the TS segment.
225
+ - index (int): The index of the segment.
226
+ - progress_bar (tqdm): Progress counter for tracking download progress.
227
+ - retries (int): The number of times to retry on failure (default is 3).
228
+ - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
229
+ """
230
+ if self.interrupt_flag.is_set():
231
+ return
232
+
233
+ need_verify = REQUEST_VERIFY
234
+ min_segment_size = 100 # Minimum acceptable size for a TS segment in bytes
235
+
236
+ for attempt in range(retries):
237
+ if self.interrupt_flag.is_set():
238
+ return
239
+
240
+ try:
241
+ start_time = time.time()
242
+
243
+ # Make request to get content
244
+ if THERE_IS_PROXY_LIST:
245
+
246
+ # Get proxy from list
247
+ proxy = self.valid_proxy[index % len(self.valid_proxy)]
248
+ logging.info(f"Use proxy: {proxy}")
249
+
250
+ with httpx.Client(proxies=proxy, verify=need_verify) as client:
251
+ if 'key_base_url' in self.__dict__:
252
+ response = client.get(
253
+ url=ts_url,
254
+ headers=random_headers(self.key_base_url),
255
+ timeout=max_timeout,
256
+ follow_redirects=True
257
+ )
258
+
259
+ else:
260
+ response = client.get(
261
+ url=ts_url,
262
+ headers={'user-agent': get_headers()},
263
+ timeout=max_timeout,
264
+ follow_redirects=True
265
+ )
266
+
267
+ else:
268
+ with httpx.Client(verify=need_verify) as client_2:
269
+ if 'key_base_url' in self.__dict__:
270
+ response = client_2.get(
271
+ url=ts_url,
272
+ headers=random_headers(self.key_base_url),
273
+ timeout=max_timeout,
274
+ follow_redirects=True
275
+ )
276
+
277
+ else:
278
+ response = client_2.get(
279
+ url=ts_url,
280
+ headers={'user-agent': get_headers()},
281
+ timeout=max_timeout,
282
+ follow_redirects=True
283
+ )
284
+
285
+ # Validate response and content
286
+ response.raise_for_status()
287
+ segment_content = response.content
288
+ content_size = len(segment_content)
289
+
290
+ # Check if segment is too small (possibly corrupted or empty)
291
+ if content_size < min_segment_size:
292
+ raise httpx.RequestError(f"Segment {index} too small ({content_size} bytes)")
293
+
294
+ duration = time.time() - start_time
295
+
296
+ # Decrypt if needed and verify decrypted content
297
+ if self.decryption is not None:
298
+ try:
299
+ segment_content = self.decryption.decrypt(segment_content)
300
+ if len(segment_content) < min_segment_size:
301
+ raise Exception(f"Decrypted segment {index} too small ({len(segment_content)} bytes)")
302
+
303
+ except Exception as e:
304
+ logging.error(f"Decryption failed for segment {index}: {str(e)}")
305
+ raise
306
+
307
+ # Update progress and queue
308
+ self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
309
+
310
+ # Add the segment to the queue
311
+ self.queue.put((index, segment_content))
312
+
313
+ # Track successfully downloaded segments
314
+ self.downloaded_segments.add(index)
315
+ progress_bar.update(1)
316
+
317
+ # Break out of the loop on success
318
+ return
319
+
320
+ except Exception as e:
321
+ #logging.error(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
322
+
323
+ if attempt + 1 == retries:
324
+ #logging.error(f"Final retry failed for segment {index}")
325
+ self.queue.put((index, None)) # Marker for failed segment
326
+ progress_bar.update(1)
327
+ break
328
+
329
+ sleep_time = backoff_factor * (2 ** attempt)
330
+ logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
331
+ time.sleep(sleep_time)
332
+
333
+ def write_segments_to_file(self):
334
+ """
335
+ Writes segments to file with additional verification.
336
+ """
337
+ with open(self.tmp_file_path, 'wb') as f:
338
+ expected_index = 0
339
+ buffer = {}
340
+ total_written = 0
341
+ segments_written = set()
342
+
343
+ while not self.stop_event.is_set() or not self.queue.empty():
344
+
345
+ if self.interrupt_flag.is_set():
346
+ break
347
+
348
+ try:
349
+ index, segment_content = self.queue.get(timeout=1)
350
+
351
+ # Handle failed segments
352
+ if segment_content is None:
353
+ if index == expected_index:
354
+ expected_index += 1
355
+ continue
356
+
357
+ # Write segment if it's the next expected one
358
+ if index == expected_index:
359
+ f.write(segment_content)
360
+ total_written += len(segment_content)
361
+ segments_written.add(index)
362
+ f.flush()
363
+ expected_index += 1
364
+
365
+ # Write any buffered segments that are now in order
366
+ while expected_index in buffer:
367
+ next_segment = buffer.pop(expected_index)
368
+ if next_segment is not None:
369
+ f.write(next_segment)
370
+ total_written += len(next_segment)
371
+ segments_written.add(expected_index)
372
+ f.flush()
373
+ expected_index += 1
374
+ else:
375
+ buffer[index] = segment_content
376
+
377
+ except queue.Empty:
378
+ if self.stop_event.is_set():
379
+ break
380
+ continue
381
+ except Exception as e:
382
+ logging.error(f"Error writing segment {index}: {str(e)}")
383
+ continue
384
+
385
+ # Final verification
386
+ if total_written == 0:
387
+ raise Exception("No data written to file")
388
+
389
+ def download_streams(self, add_desc):
390
+ """
391
+ Downloads all TS segments in parallel and writes them to a file.
392
+
393
+ Parameters:
394
+ - add_desc (str): Additional description for the progress bar.
395
+ """
396
+ self.setup_interrupt_handler()
397
+
398
+ # Get config site from prev stack
399
+ frames = get_call_stack()
400
+ logging.info(f"Extract info from: {frames}")
401
+ config_site = str(frames[-4]['folder_base'])
402
+ logging.info(f"Use frame: {frames[-1]}")
403
+
404
+ # Workers to use for downloading
405
+ TQDM_MAX_WORKER = 0
406
+
407
+ # Select audio workers from folder of frames stack prev call.
408
+ try:
409
+ VIDEO_WORKERS = int(config_manager.get_dict('SITE', config_site)['video_workers'])
410
+ except:
411
+ #VIDEO_WORKERS = os.cpu_count()
412
+ VIDEO_WORKERS = DEFAULT_VIDEO_WORKERS
413
+
414
+ try:
415
+ AUDIO_WORKERS = int(config_manager.get_dict('SITE', config_site)['audio_workers'])
416
+ except:
417
+ #AUDIO_WORKERS = os.cpu_count()
418
+ AUDIO_WORKERS = DEFAULT_AUDIO_WORKERS
419
+
420
+ # Differnt workers for audio and video
421
+ if "video" in str(add_desc):
422
+ TQDM_MAX_WORKER = VIDEO_WORKERS
423
+ if "audio" in str(add_desc):
424
+ TQDM_MAX_WORKER = AUDIO_WORKERS
425
+
426
+ # Custom bar for mobile and pc
427
+ if TQDM_USE_LARGE_BAR:
428
+ bar_format = (
429
+ f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{add_desc}{Colors.WHITE}): "
430
+ f"{Colors.RED}{{percentage:.2f}}% "
431
+ f"{Colors.MAGENTA}{{bar}} "
432
+ f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
433
+ f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
434
+ )
435
+ else:
436
+ bar_format = (
437
+ f"{Colors.YELLOW}Proc{Colors.WHITE}: "
438
+ f"{Colors.RED}{{percentage:.2f}}% "
439
+ f"{Colors.WHITE}| "
440
+ f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
441
+ )
442
+
443
+ # Create progress bar
444
+ progress_bar = tqdm(
445
+ total=len(self.segments),
446
+ unit='s',
447
+ ascii='░▒█',
448
+ bar_format=bar_format,
449
+ mininterval=0.05
450
+ )
451
+
452
+ try:
453
+
454
+ # Start writer thread
455
+ writer_thread = threading.Thread(target=self.write_segments_to_file)
456
+ writer_thread.daemon = True
457
+ writer_thread.start()
458
+
459
+ # Configure workers and delay
460
+ max_workers = len(self.valid_proxy) if THERE_IS_PROXY_LIST else TQDM_MAX_WORKER
461
+ delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER
462
+
463
+ # Download segments with completion verification
464
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
465
+ futures = []
466
+ for index, segment_url in enumerate(self.segments):
467
+ # Check for interrupt before submitting each task
468
+ if self.interrupt_flag.is_set():
469
+ break
470
+
471
+ time.sleep(delay)
472
+ futures.append(executor.submit(self.make_requests_stream, segment_url, index, progress_bar))
473
+
474
+ # Wait for futures with interrupt handling
475
+ for future in as_completed(futures):
476
+ if self.interrupt_flag.is_set():
477
+ break
478
+ try:
479
+ future.result()
480
+ except Exception as e:
481
+ logging.error(f"Error in download thread: {str(e)}")
482
+
483
+ # Interrupt handling for missing segments
484
+ if not self.interrupt_flag.is_set():
485
+ total_segments = len(self.segments)
486
+ completed_segments = len(self.downloaded_segments)
487
+
488
+ if completed_segments < total_segments:
489
+ missing_segments = set(range(total_segments)) - self.downloaded_segments
490
+ logging.warning(f"Missing segments: {sorted(missing_segments)}")
491
+
492
+ # Retry missing segments with interrupt check
493
+ for index in missing_segments:
494
+ if self.interrupt_flag.is_set():
495
+ break
496
+ try:
497
+ self.make_requests_stream(self.segments[index], index, progress_bar)
498
+ except Exception as e:
499
+ logging.error(f"Failed to retry segment {index}: {str(e)}")
500
+
501
+ except Exception as e:
502
+ logging.error(f"Download failed: {str(e)}")
503
+ raise
504
+
505
+ finally:
506
+
507
+ # Clean up resources
508
+ self.stop_event.set()
509
+ writer_thread.join(timeout=30)
510
+ progress_bar.close()
511
+
512
+ # Check if download was interrupted
513
+ if self.download_interrupted:
514
+ console.log("[red] Download was manually stopped.")
515
+
516
+ # Optional: Delete partial download
517
+ if os.path.exists(self.tmp_file_path):
518
+ os.remove(self.tmp_file_path)
519
+ sys.exit(0)
520
+
521
+ # Clean up
522
+ self.stop_event.set()
523
+ writer_thread.join(timeout=30)
524
+ progress_bar.close()
525
+
526
+ # Final verification
527
+ final_completion = (len(self.downloaded_segments) / total_segments) * 100
528
+ if final_completion < 99.9: # Less than 99.9% complete
529
+ missing = set(range(total_segments)) - self.downloaded_segments
530
+ raise Exception(f"Download incomplete ({final_completion:.1f}%). Missing segments: {sorted(missing)}")
531
+
532
+ # Verify output file
533
+ if not os.path.exists(self.tmp_file_path):
534
+ raise Exception("Output file missing")
535
+
536
+ file_size = os.path.getsize(self.tmp_file_path)
537
+ if file_size == 0:
538
+ raise Exception("Output file is empty")
539
+
540
+ logging.info(f"Download completed. File size: {file_size} bytes")