StreamingCommunity 3.3.8__py3-none-any.whl → 3.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (64) hide show
  1. StreamingCommunity/Api/Player/hdplayer.py +0 -5
  2. StreamingCommunity/Api/Player/mediapolisvod.py +4 -13
  3. StreamingCommunity/Api/Player/supervideo.py +3 -8
  4. StreamingCommunity/Api/Player/sweetpixel.py +1 -9
  5. StreamingCommunity/Api/Player/vixcloud.py +5 -16
  6. StreamingCommunity/Api/Site/altadefinizione/film.py +4 -15
  7. StreamingCommunity/Api/Site/altadefinizione/site.py +2 -7
  8. StreamingCommunity/Api/Site/altadefinizione/util/ScrapeSerie.py +2 -7
  9. StreamingCommunity/Api/Site/animeunity/site.py +9 -24
  10. StreamingCommunity/Api/Site/animeunity/util/ScrapeSerie.py +11 -27
  11. StreamingCommunity/Api/Site/animeworld/film.py +4 -2
  12. StreamingCommunity/Api/Site/animeworld/site.py +3 -11
  13. StreamingCommunity/Api/Site/animeworld/util/ScrapeSerie.py +1 -4
  14. StreamingCommunity/Api/Site/crunchyroll/film.py +17 -8
  15. StreamingCommunity/Api/Site/crunchyroll/series.py +8 -9
  16. StreamingCommunity/Api/Site/crunchyroll/site.py +14 -16
  17. StreamingCommunity/Api/Site/crunchyroll/util/ScrapeSerie.py +18 -65
  18. StreamingCommunity/Api/Site/crunchyroll/util/get_license.py +97 -106
  19. StreamingCommunity/Api/Site/guardaserie/site.py +4 -12
  20. StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +3 -10
  21. StreamingCommunity/Api/Site/mediasetinfinity/film.py +11 -12
  22. StreamingCommunity/Api/Site/mediasetinfinity/series.py +1 -2
  23. StreamingCommunity/Api/Site/mediasetinfinity/site.py +3 -11
  24. StreamingCommunity/Api/Site/mediasetinfinity/util/ScrapeSerie.py +39 -50
  25. StreamingCommunity/Api/Site/mediasetinfinity/util/fix_mpd.py +3 -3
  26. StreamingCommunity/Api/Site/mediasetinfinity/util/get_license.py +8 -26
  27. StreamingCommunity/Api/Site/raiplay/film.py +6 -7
  28. StreamingCommunity/Api/Site/raiplay/series.py +1 -12
  29. StreamingCommunity/Api/Site/raiplay/site.py +8 -24
  30. StreamingCommunity/Api/Site/raiplay/util/ScrapeSerie.py +15 -22
  31. StreamingCommunity/Api/Site/raiplay/util/get_license.py +3 -12
  32. StreamingCommunity/Api/Site/streamingcommunity/film.py +5 -16
  33. StreamingCommunity/Api/Site/streamingcommunity/site.py +3 -22
  34. StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py +11 -26
  35. StreamingCommunity/Api/Site/streamingwatch/__init__.py +1 -0
  36. StreamingCommunity/Api/Site/streamingwatch/film.py +4 -2
  37. StreamingCommunity/Api/Site/streamingwatch/series.py +1 -1
  38. StreamingCommunity/Api/Site/streamingwatch/site.py +4 -18
  39. StreamingCommunity/Api/Site/streamingwatch/util/ScrapeSerie.py +0 -3
  40. StreamingCommunity/Api/Template/config_loader.py +0 -7
  41. StreamingCommunity/Lib/Downloader/DASH/cdm_helpher.py +8 -3
  42. StreamingCommunity/Lib/Downloader/DASH/decrypt.py +55 -1
  43. StreamingCommunity/Lib/Downloader/DASH/downloader.py +139 -55
  44. StreamingCommunity/Lib/Downloader/DASH/parser.py +458 -101
  45. StreamingCommunity/Lib/Downloader/DASH/segments.py +131 -74
  46. StreamingCommunity/Lib/Downloader/HLS/downloader.py +31 -50
  47. StreamingCommunity/Lib/Downloader/HLS/segments.py +266 -365
  48. StreamingCommunity/Lib/Downloader/MP4/downloader.py +1 -1
  49. StreamingCommunity/Lib/FFmpeg/capture.py +37 -5
  50. StreamingCommunity/Lib/FFmpeg/command.py +35 -93
  51. StreamingCommunity/Lib/M3U8/estimator.py +0 -1
  52. StreamingCommunity/Lib/TMBD/tmdb.py +2 -4
  53. StreamingCommunity/TelegramHelp/config.json +0 -1
  54. StreamingCommunity/Upload/version.py +1 -1
  55. StreamingCommunity/Util/config_json.py +28 -21
  56. StreamingCommunity/Util/http_client.py +28 -0
  57. StreamingCommunity/Util/os.py +16 -6
  58. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.4.0.dist-info}/METADATA +1 -3
  59. streamingcommunity-3.4.0.dist-info/RECORD +111 -0
  60. streamingcommunity-3.3.8.dist-info/RECORD +0 -111
  61. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.4.0.dist-info}/WHEEL +0 -0
  62. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.4.0.dist-info}/entry_points.txt +0 -0
  63. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.4.0.dist-info}/licenses/LICENSE +0 -0
  64. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.4.0.dist-info}/top_level.txt +0 -0
@@ -1,16 +1,11 @@
1
1
  # 18.04.24
2
2
 
3
3
  import os
4
- import sys
5
4
  import time
6
- import queue
7
- import signal
8
5
  import logging
9
6
  import binascii
10
- import threading
11
- from queue import PriorityQueue
7
+ import asyncio
12
8
  from urllib.parse import urljoin, urlparse
13
- from concurrent.futures import ThreadPoolExecutor, as_completed
14
9
  from typing import Dict, Optional
15
10
 
16
11
 
@@ -23,7 +18,7 @@ from rich.console import Console
23
18
  # Internal utilities
24
19
  from StreamingCommunity.Util.color import Colors
25
20
  from StreamingCommunity.Util.headers import get_userAgent
26
- from StreamingCommunity.Util.http_client import create_client
21
+ from StreamingCommunity.Util.http_client import create_client_curl
27
22
  from StreamingCommunity.Util.config_json import config_manager
28
23
 
29
24
 
@@ -35,16 +30,17 @@ from ...M3U8 import (
35
30
  M3U8_UrlFix
36
31
  )
37
32
 
33
+
38
34
  # Config
39
- TQDM_DELAY_WORKER = 0.01
40
35
  REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
41
36
  REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
42
37
  DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workers')
43
38
  DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workers')
44
- MAX_TIMEOOUT = config_manager.get_int("REQUESTS", "timeout")
39
+ MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
45
40
  SEGMENT_MAX_TIMEOUT = config_manager.get_int("M3U8_DOWNLOAD", "segment_timeout")
46
- TELEGRAM_BOT = config_manager.get_bool('DEFAULT', 'telegram_bot')
47
- MAX_INTERRUPT_COUNT = 3
41
+ LIMIT_SEGMENT = config_manager.get_int('M3U8_DOWNLOAD', 'limit_segment')
42
+ ENABLE_RETRY = config_manager.get_bool('M3U8_DOWNLOAD', 'enable_retry')
43
+
48
44
 
49
45
  # Variable
50
46
  console = Console()
@@ -58,78 +54,51 @@ class M3U8_Segments:
58
54
  Parameters:
59
55
  - url (str): The URL of the M3U8 playlist.
60
56
  - tmp_folder (str): The temporary folder to store downloaded segments.
61
- - is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
62
- - limit_segments (int): Optional limit for number of segments to process.
57
+ - is_index_url (bool): Flag indicating if url is a URL (default True).
58
+ - limit_segments (int): Optional limit for number of segments (overrides LIMIT_SEGMENT if provided).
63
59
  - custom_headers (Dict[str, str]): Optional custom headers to use for all requests.
64
60
  """
65
61
  self.url = url
66
62
  self.tmp_folder = tmp_folder
67
63
  self.is_index_url = is_index_url
68
- self.limit_segments = limit_segments
69
64
  self.custom_headers = custom_headers if custom_headers else {'User-Agent': get_userAgent()}
70
- self.expected_real_time = None
71
- self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
65
+ self.final_output_path = os.path.join(self.tmp_folder, "0.ts")
72
66
  os.makedirs(self.tmp_folder, exist_ok=True)
73
67
 
68
+ # Use LIMIT_SEGMENT from config if limit_segments not specified or is 0
69
+ if limit_segments is None or limit_segments == 0:
70
+ self.limit_segments = LIMIT_SEGMENT if LIMIT_SEGMENT > 0 else None
71
+ else:
72
+ self.limit_segments = limit_segments
73
+
74
+ self.enable_retry = ENABLE_RETRY
75
+
74
76
  # Util class
75
77
  self.decryption: M3U8_Decryption = None
76
78
  self.class_ts_estimator = M3U8_Ts_Estimator(0, self)
77
79
  self.class_url_fixer = M3U8_UrlFix(url)
78
80
 
79
- # Sync
80
- self.queue = PriorityQueue(maxsize=20)
81
- self.buffer = {}
82
- self.expected_index = 0
83
- self.write_buffer = bytearray()
84
- self.write_batch_size = 50
85
-
86
- self.stop_event = threading.Event()
81
+ # Stats
87
82
  self.downloaded_segments = set()
88
- self.base_timeout = 1.0
89
- self.current_timeout = 3.0
90
-
91
- # Stopping
92
- self.interrupt_flag = threading.Event()
93
83
  self.download_interrupted = False
94
- self.interrupt_count = 0
95
- self.force_stop = False
96
- self.interrupt_lock = threading.Lock()
97
-
98
- # HTTP Client
99
- self._client = None
100
- self._client_lock = threading.Lock()
101
-
102
- # OTHER INFO
103
84
  self.info_maxRetry = 0
104
85
  self.info_nRetry = 0
105
86
  self.info_nFailed = 0
106
- self.active_retries = 0
107
- self.active_retries_lock = threading.Lock()
108
87
 
88
+ # Progress throttling
109
89
  self._last_progress_update = 0
110
90
  self._progress_update_interval = 0.1
111
91
 
112
92
  def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
113
93
  """
114
94
  Fetches the encryption key from the M3U8 playlist.
115
-
116
- Args:
117
- m3u8_parser (M3U8_Parser): An instance of M3U8_Parser containing parsed M3U8 data.
118
-
119
- Returns:
120
- bytes: The decryption key in byte format.
121
95
  """
122
96
  key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
123
97
  parsed_url = urlparse(key_uri)
124
98
  self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
125
99
 
126
100
  try:
127
- client_params = {
128
- 'headers': self.custom_headers,
129
- 'timeout': MAX_TIMEOOUT,
130
- 'verify': REQUEST_VERIFY
131
- }
132
- response = httpx.get(url=key_uri, **client_params)
101
+ response = create_client_curl(headers=self.custom_headers).get(key_uri)
133
102
  response.raise_for_status()
134
103
 
135
104
  hex_content = binascii.hexlify(response.content).decode('utf-8')
@@ -139,12 +108,7 @@ class M3U8_Segments:
139
108
  raise Exception(f"Failed to fetch key: {e}")
140
109
 
141
110
  def parse_data(self, m3u8_content: str) -> None:
142
- """
143
- Parses the M3U8 content and extracts necessary data.
144
-
145
- Args:
146
- m3u8_content (str): The raw M3U8 playlist content.
147
- """
111
+ """Parses the M3U8 content and extracts necessary data."""
148
112
  m3u8_parser = M3U8_Parser()
149
113
  m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
150
114
 
@@ -157,11 +121,11 @@ class M3U8_Segments:
157
121
  self.decryption = M3U8_Decryption(key, m3u8_parser.keys.get('iv'), m3u8_parser.keys.get('method'))
158
122
 
159
123
  segments = [
160
- self.class_url_fixer.generate_full_url(seg)
161
- if "http" not in seg else seg
124
+ self.class_url_fixer.generate_full_url(seg) if "http" not in seg else seg
162
125
  for seg in m3u8_parser.segments
163
126
  ]
164
127
 
128
+ # Apply segment limit
165
129
  if self.limit_segments and len(segments) > self.limit_segments:
166
130
  logging.info(f"Limiting segments from {len(segments)} to {self.limit_segments}")
167
131
  segments = segments[:self.limit_segments]
@@ -181,12 +145,7 @@ class M3U8_Segments:
181
145
  """
182
146
  if self.is_index_url:
183
147
  try:
184
- client_params = {
185
- 'headers': self.custom_headers,
186
- 'timeout': MAX_TIMEOOUT,
187
- 'verify': REQUEST_VERIFY
188
- }
189
- response = httpx.get(self.url, **client_params, follow_redirects=True)
148
+ response = create_client_curl(headers=self.custom_headers).get(self.url)
190
149
  response.raise_for_status()
191
150
 
192
151
  self.parse_data(response.text)
@@ -195,205 +154,29 @@ class M3U8_Segments:
195
154
 
196
155
  except Exception as e:
197
156
  raise RuntimeError(f"M3U8 info retrieval failed: {e}")
198
-
199
- def setup_interrupt_handler(self):
200
- """
201
- Set up a signal handler for graceful interruption.
202
- """
203
- def interrupt_handler(signum, frame):
204
- with self.interrupt_lock:
205
- self.interrupt_count += 1
206
- if self.interrupt_count >= MAX_INTERRUPT_COUNT:
207
- self.force_stop = True
208
-
209
- if self.force_stop:
210
- console.print("\n[red]Force stop triggered! Exiting immediately.")
211
- self._cleanup_client()
212
-
213
- else:
214
- if not self.interrupt_flag.is_set():
215
- remaining = MAX_INTERRUPT_COUNT - self.interrupt_count
216
- console.print(f"\n[red]- Stopping gracefully... (Ctrl+C {remaining}x to force)")
217
- self.download_interrupted = True
218
157
 
219
- if remaining == 1:
220
- self.interrupt_flag.set()
221
-
222
- if threading.current_thread() is threading.main_thread():
223
- signal.signal(signal.SIGINT, interrupt_handler)
224
- else:
225
- print("Signal handler must be set in the main thread")
226
-
227
- def _get_http_client(self):
228
- """
229
- Get a reusable HTTP client using the centralized factory.
230
- Uses optimized settings for segment downloading with custom headers.
158
+ def _throttled_progress_update(self, content_size: int, progress_bar: tqdm):
231
159
  """
232
- if self._client is None:
233
- with self._client_lock:
234
- self._client = create_client(
235
- headers=self.custom_headers,
236
- timeout=SEGMENT_MAX_TIMEOUT
237
- )
238
-
239
- return self._client
240
-
241
- def _cleanup_client(self):
242
- """Pulizia client"""
243
- if self._client:
244
- try:
245
- self._client.close()
246
- except Exception:
247
- pass
248
- self._client = None
249
-
250
- def download_segment(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.02) -> None:
160
+ Throttled progress update to reduce CPU usage.
251
161
  """
252
- Downloads a TS segment
253
-
254
- Parameters:
255
- - ts_url (str): The URL of the TS segment.
256
- - index (int): The index of the segment.
257
- - progress_bar (tqdm): Progress counter for tracking download progress.
258
- - backoff_factor (float): Backoff factor.
259
- """
260
- for attempt in range(REQUEST_MAX_RETRY):
261
- if self.interrupt_flag.is_set():
262
- return
263
-
264
- try:
265
- client = self._get_http_client()
266
- timeout = min(SEGMENT_MAX_TIMEOUT, 10 + attempt * 5)
267
-
268
- # Make request with custom headers
269
- response = client.get(ts_url, timeout=timeout, headers=self.custom_headers)
270
- response.raise_for_status()
271
- segment_content = response.content
272
- content_size = len(segment_content)
273
-
274
- # Decrypt if needed
275
- if self.decryption is not None:
276
- try:
277
- segment_content = self.decryption.decrypt(segment_content)
278
- except Exception as e:
279
- logging.error(f"Decryption failed for segment {index}: {str(e)}")
280
-
281
- if attempt + 1 == REQUEST_MAX_RETRY:
282
- self.interrupt_flag.set()
283
- self.stop_event.set()
284
-
285
- raise e
286
-
287
- current_time = time.time()
288
- if current_time - self._last_progress_update > self._progress_update_interval:
289
- self.class_ts_estimator.update_progress_bar(content_size, progress_bar)
290
- self._last_progress_update = current_time
291
-
292
- try:
293
- self.queue.put((index, segment_content), timeout=0.05)
294
- self.downloaded_segments.add(index)
295
- progress_bar.update(1)
296
- return
297
-
298
- except queue.Full:
299
- time.sleep(0.02)
300
-
301
- try:
302
- self.queue.put((index, segment_content), timeout=0.1)
303
- self.downloaded_segments.add(index)
304
- progress_bar.update(1)
305
- return
306
-
307
- except queue.Full:
308
- self.queue.put((index, segment_content))
309
- self.downloaded_segments.add(index)
310
- progress_bar.update(1)
311
- return
312
-
313
- except Exception:
314
-
315
- if attempt > self.info_maxRetry:
316
- self.info_maxRetry = attempt + 1
317
- self.info_nRetry += 1
318
-
319
- if attempt + 1 == REQUEST_MAX_RETRY:
320
- console.print(f" -- [red]Final retry failed for segment: {index}")
321
-
322
- try:
323
- self.queue.put((index, None), timeout=0.1)
324
- except queue.Full:
325
- time.sleep(0.02)
326
- self.queue.put((index, None))
327
-
328
- progress_bar.update(1)
329
- self.info_nFailed += 1
330
- return
331
-
332
- if attempt < 2:
333
- sleep_time = 0.5 + attempt * 0.5
334
- else:
335
- sleep_time = min(3.0, backoff_factor ** attempt)
336
-
337
- time.sleep(sleep_time)
162
+ current_time = time.time()
163
+ if current_time - self._last_progress_update > self._progress_update_interval:
164
+ self.class_ts_estimator.update_progress_bar(content_size, progress_bar)
165
+ self._last_progress_update = current_time
338
166
 
339
- def write_segments_to_file(self):
167
+ def _get_temp_segment_path(self, temp_dir: str, index: int) -> str:
340
168
  """
341
- Writes segments to file with additional verification.
169
+ Get the file path for a temporary segment.
342
170
  """
343
- with open(self.tmp_file_path, 'wb') as f:
344
- while not self.stop_event.is_set() or not self.queue.empty():
345
- if self.interrupt_flag.is_set():
346
- break
347
-
348
- try:
349
- index, segment_content = self.queue.get(timeout=self.current_timeout)
350
-
351
- # Successful queue retrieval: reduce timeout
352
- self.current_timeout = max(self.base_timeout, self.current_timeout / 2)
353
-
354
- # Handle failed segments
355
- if segment_content is None:
356
- if index == self.expected_index:
357
- self.expected_index += 1
358
- continue
359
-
360
- # Write segment if it's the next expected one
361
- if index == self.expected_index:
362
- f.write(segment_content)
363
- f.flush()
364
- self.expected_index += 1
365
-
366
- # Write any buffered segments that are now in order
367
- while self.expected_index in self.buffer:
368
- next_segment = self.buffer.pop(self.expected_index)
369
-
370
- if next_segment is not None:
371
- f.write(next_segment)
372
- f.flush()
171
+ return os.path.join(temp_dir, f"seg_{index:06d}.ts")
373
172
 
374
- self.expected_index += 1
375
-
376
- else:
377
- self.buffer[index] = segment_content
378
-
379
- except queue.Empty:
380
- self.current_timeout = min(MAX_TIMEOOUT, self.current_timeout * 1.1)
381
- time.sleep(0.05)
382
-
383
- if self.stop_event.is_set():
384
- break
385
-
386
- except Exception as e:
387
- logging.error(f"Error writing segment {index}: {str(e)}")
388
-
389
- def download_init_segment(self) -> bool:
173
+ async def _download_init_segment(self, client: httpx.AsyncClient, output_path: str, progress_bar: tqdm) -> bool:
390
174
  """
391
- Downloads the initialization segment if available.
392
-
393
- Returns:
394
- bool: True if init segment was downloaded successfully, False otherwise
175
+ Downloads the initialization segment and writes to output file.
395
176
  """
396
177
  if not self.has_init_segment:
178
+ with open(output_path, 'wb') as f:
179
+ pass
397
180
  return False
398
181
 
399
182
  init_url = self.segment_init_url
@@ -401,139 +184,260 @@ class M3U8_Segments:
401
184
  init_url = self.class_url_fixer.generate_full_url(init_url)
402
185
 
403
186
  try:
404
- client = self._get_http_client()
405
- response = client.get(
406
- init_url,
407
- timeout=SEGMENT_MAX_TIMEOUT,
408
- headers=self.custom_headers
409
- )
187
+ response = await client.get(init_url, timeout=SEGMENT_MAX_TIMEOUT, headers=self.custom_headers)
410
188
  response.raise_for_status()
411
189
  init_content = response.content
412
190
 
413
- # Decrypt if needed (although init segments are typically not encrypted)
191
+ # Decrypt if needed
414
192
  if self.decryption is not None:
415
193
  try:
416
194
  init_content = self.decryption.decrypt(init_content)
417
-
418
195
  except Exception as e:
419
196
  logging.error(f"Decryption failed for init segment: {str(e)}")
420
197
  return False
421
198
 
422
- # Put init segment in queue with highest priority (0)
423
- self.queue.put((0, init_content))
424
- self.downloaded_segments.add(0)
199
+ # Write init segment to output file
200
+ with open(output_path, 'wb') as f:
201
+ f.write(init_content)
425
202
 
426
- # Adjust expected_index to 1 since we've handled index 0 separately
427
- self.expected_index = 0
203
+ progress_bar.update(1)
204
+ self._throttled_progress_update(len(init_content), progress_bar)
428
205
  logging.info("Init segment downloaded successfully")
429
206
  return True
430
207
 
431
208
  except Exception as e:
432
209
  logging.error(f"Failed to download init segment: {str(e)}")
210
+ with open(output_path, 'wb') as f:
211
+ pass
433
212
  return False
434
-
435
- def download_streams(self, description: str, type: str):
213
+
214
+ async def _download_single_segment(self, client: httpx.AsyncClient, ts_url: str, index: int, temp_dir: str,
215
+ semaphore: asyncio.Semaphore, max_retry: int) -> tuple:
216
+ """
217
+ Downloads a single TS segment and saves to temp file IMMEDIATELY.
218
+
219
+ Returns:
220
+ tuple: (index, success, retry_count, file_size)
221
+ """
222
+ async with semaphore:
223
+ temp_file = self._get_temp_segment_path(temp_dir, index)
224
+
225
+ for attempt in range(max_retry):
226
+ if self.download_interrupted:
227
+ return index, False, attempt, 0
228
+
229
+ try:
230
+ timeout = min(SEGMENT_MAX_TIMEOUT, 10 + attempt * 5)
231
+ response = await client.get(ts_url, timeout=timeout, headers=self.custom_headers, follow_redirects=True)
232
+ response.raise_for_status()
233
+ segment_content = response.content
234
+
235
+ # Decrypt if needed
236
+ if self.decryption is not None:
237
+ try:
238
+ segment_content = self.decryption.decrypt(segment_content)
239
+ except Exception as e:
240
+ logging.error(f"Decryption failed for segment {index}: {str(e)}")
241
+ if attempt + 1 == max_retry:
242
+ return index, False, attempt, 0
243
+ raise e
244
+
245
+ # Write segment to temp file IMMEDIATELY
246
+ with open(temp_file, 'wb') as f:
247
+ f.write(segment_content)
248
+
249
+ size = len(segment_content)
250
+ del segment_content
251
+ return index, True, attempt, size
252
+
253
+ except Exception:
254
+ if attempt + 1 == max_retry:
255
+ console.print(f" -- [red]Final retry failed for segment: {index}")
256
+ return index, False, max_retry, 0
257
+
258
+ sleep_time = 0.5 + attempt * 0.5 if attempt < 2 else min(3.0, 1.02 ** attempt)
259
+ await asyncio.sleep(sleep_time)
260
+
261
+ return index, False, max_retry, 0
262
+
263
+ async def _download_all_segments(self, client: httpx.AsyncClient, temp_dir: str, semaphore: asyncio.Semaphore, progress_bar: tqdm):
264
+ """
265
+ Download all segments in parallel with automatic retry.
266
+ """
267
+
268
+ # First pass: download all segments
269
+ tasks = [
270
+ self._download_single_segment(client, url, i, temp_dir, semaphore, REQUEST_MAX_RETRY)
271
+ for i, url in enumerate(self.segments)
272
+ ]
273
+
274
+ for coro in asyncio.as_completed(tasks):
275
+ try:
276
+ idx, success, nretry, size = await coro
277
+
278
+ if success:
279
+ self.downloaded_segments.add(idx)
280
+ else:
281
+ self.info_nFailed += 1
282
+
283
+ if nretry > self.info_maxRetry:
284
+ self.info_maxRetry = nretry
285
+ self.info_nRetry += nretry
286
+
287
+ progress_bar.update(1)
288
+ self._throttled_progress_update(size, progress_bar)
289
+
290
+ except KeyboardInterrupt:
291
+ self.download_interrupted = True
292
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
293
+ break
294
+
295
+ # Retry failed segments only if enabled
296
+ if self.enable_retry and not self.download_interrupted:
297
+ await self._retry_failed_segments(client, temp_dir, semaphore, progress_bar)
298
+
299
+ async def _retry_failed_segments(self, client: httpx.AsyncClient, temp_dir: str, semaphore: asyncio.Semaphore,
300
+ progress_bar: tqdm):
301
+ """
302
+ Retry failed segments up to 3 times.
303
+ """
304
+ max_global_retries = 3
305
+ global_retry_count = 0
306
+
307
+ while self.info_nFailed > 0 and global_retry_count < max_global_retries and not self.download_interrupted:
308
+ failed_indices = [i for i in range(len(self.segments)) if i not in self.downloaded_segments]
309
+ if not failed_indices:
310
+ break
311
+
312
+ console.print(f" -- [yellow]Retrying {len(failed_indices)} failed segments (attempt {global_retry_count+1}/{max_global_retries})...")
313
+
314
+ retry_tasks = [
315
+ self._download_single_segment(client, self.segments[i], i, temp_dir, semaphore, REQUEST_MAX_RETRY)
316
+ for i in failed_indices
317
+ ]
318
+
319
+ nFailed_this_round = 0
320
+ for coro in asyncio.as_completed(retry_tasks):
321
+ try:
322
+ idx, success, nretry, size = await coro
323
+
324
+ if success:
325
+ self.downloaded_segments.add(idx)
326
+ else:
327
+ nFailed_this_round += 1
328
+
329
+ if nretry > self.info_maxRetry:
330
+ self.info_maxRetry = nretry
331
+ self.info_nRetry += nretry
332
+
333
+ progress_bar.update(0)
334
+ self._throttled_progress_update(size, progress_bar)
335
+
336
+ except KeyboardInterrupt:
337
+ self.download_interrupted = True
338
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
339
+ break
340
+
341
+ self.info_nFailed = nFailed_this_round
342
+ global_retry_count += 1
343
+
344
+ async def _concatenate_segments(self, output_path: str, temp_dir: str):
436
345
  """
437
- Downloads all TS segments in parallel and writes them to a file.
346
+ Concatenate all segment files in order to the final output file.
347
+ """
348
+ with open(output_path, 'ab') as outfile:
349
+ for idx in range(len(self.segments)):
350
+ temp_file = self._get_temp_segment_path(temp_dir, idx)
351
+
352
+ if os.path.exists(temp_file):
353
+ with open(temp_file, 'rb') as infile:
354
+ outfile.write(infile.read())
355
+ os.remove(temp_file)
356
+
357
+ async def download_segments_async(self, description: str, type: str):
358
+ """
359
+ Downloads all TS segments asynchronously.
438
360
 
439
361
  Parameters:
440
362
  - description: Description to insert on tqdm bar
441
363
  - type (str): Type of download: 'video' or 'audio'
442
364
  """
443
- if TELEGRAM_BOT:
444
- console.log("####")
445
-
446
365
  self.get_info()
447
- self.setup_interrupt_handler()
448
366
 
367
+ # Setup directories
368
+ temp_dir = os.path.join(self.tmp_folder, "segments_temp")
369
+ os.makedirs(temp_dir, exist_ok=True)
370
+
371
+ # Initialize progress bar
372
+ total_segments = len(self.segments) + (1 if self.has_init_segment else 0)
449
373
  progress_bar = tqdm(
450
- total=len(self.segments) + (1 if self.has_init_segment else 0),
451
- bar_format=self._get_bar_format(description),
452
- file=sys.stdout,
374
+ total=total_segments,
375
+ bar_format=self._get_bar_format(description)
453
376
  )
454
377
 
378
+ # Reset stats
379
+ self.downloaded_segments = set()
380
+ self.info_nFailed = 0
381
+ self.info_nRetry = 0
382
+ self.info_maxRetry = 0
383
+ self.download_interrupted = False
384
+
455
385
  try:
456
- self.class_ts_estimator.total_segments = len(self.segments)
457
-
458
- writer_thread = threading.Thread(target=self.write_segments_to_file)
459
- writer_thread.daemon = True
460
- writer_thread.start()
461
- max_workers = self._get_worker_count(type)
462
-
463
- # First download the init segment if available
464
- if self.has_init_segment:
465
- if self.download_init_segment():
466
- progress_bar.update(1)
386
+ # Configure HTTP client
387
+ timeout_config = httpx.Timeout(SEGMENT_MAX_TIMEOUT, connect=10.0)
388
+ limits = httpx.Limits(max_keepalive_connections=20, max_connections=100)
467
389
 
468
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
469
- futures = []
390
+ async with httpx.AsyncClient(timeout=timeout_config, limits=limits, verify=REQUEST_VERIFY) as client:
470
391
 
471
- # Start segment indices from 1 if we have an init segment
472
- start_idx = 1 if self.has_init_segment else 0
392
+ # Download init segment first (writes to 0.ts)
393
+ await self._download_init_segment(client, self.final_output_path, progress_bar)
473
394
 
474
- for index, segment_url in enumerate(self.segments):
475
- if self.interrupt_flag.is_set():
476
- break
477
-
478
- # Adjust index if we have an init segment
479
- queue_index = index + start_idx
480
-
481
- # Delay every 200 submissions to reduce CPU usage
482
- if index % 200 == 0 and index > 0:
483
- time.sleep(TQDM_DELAY_WORKER)
484
-
485
- futures.append(executor.submit(self.download_segment, segment_url, queue_index, progress_bar))
486
-
487
- # Process completed futures
488
- for future in as_completed(futures):
489
- if self.interrupt_flag.is_set():
490
- break
491
- try:
492
- future.result(timeout=1.0)
493
- except Exception as e:
494
- logging.error(f"Error in download thread: {str(e)}")
495
-
496
- # Retry missing segments if necessary
497
- if not self.interrupt_flag.is_set():
498
- total_segments = len(self.segments)
499
- completed_segments = len(self.downloaded_segments)
500
-
501
- if completed_segments < total_segments:
502
- missing_segments = set(range(total_segments)) - self.downloaded_segments
503
- logging.warning(f"Missing {len(missing_segments)} segments")
504
-
505
- # Retry missing segments with interrupt check
506
- retry_workers = min(2, len(missing_segments))
507
- if retry_workers > 0:
508
- retry_futures = []
509
- for index in missing_segments:
510
- if self.interrupt_flag.is_set():
511
- break
512
- retry_futures.append(executor.submit(self.download_segment, self.segments[index], index, progress_bar))
513
-
514
- for future in as_completed(retry_futures):
515
- if self.interrupt_flag.is_set():
516
- break
517
- try:
518
- future.result(timeout=2.0)
519
- except Exception as e:
520
- logging.error(f"Failed to retry segment: {str(e)}")
395
+ # Determine worker count based on type
396
+ max_workers = self._get_worker_count(type)
397
+ semaphore = asyncio.Semaphore(max_workers)
398
+
399
+ # Update estimator
400
+ self.class_ts_estimator.total_segments = len(self.segments)
401
+
402
+ # Download all segments to temp files
403
+ await self._download_all_segments(client, temp_dir, semaphore, progress_bar)
404
+
405
+ # Concatenate all segments to 0.ts
406
+ if not self.download_interrupted:
407
+ await self._concatenate_segments(self.final_output_path, temp_dir)
521
408
 
409
+ except KeyboardInterrupt:
410
+ self.download_interrupted = True
411
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
412
+
522
413
  finally:
523
- self._cleanup_resources(writer_thread, progress_bar)
414
+ self._cleanup_resources(temp_dir, progress_bar)
524
415
 
525
- if not self.interrupt_flag.is_set():
416
+ if not self.download_interrupted:
526
417
  self._verify_download_completion()
527
418
 
528
419
  return self._generate_results(type)
529
-
530
-
531
- def _get_bar_format(self, description: str) -> str:
420
+
421
+ def download_streams(self, description: str, type: str):
532
422
  """
533
- Generate platform-appropriate progress bar format.
423
+ Synchronous wrapper for download_segments_async.
424
+
425
+ Parameters:
426
+ - description: Description to insert on tqdm bar
427
+ - type (str): Type of download: 'video' or 'audio'
534
428
  """
429
+ try:
430
+ return asyncio.run(self.download_segments_async(description, type))
431
+
432
+ except KeyboardInterrupt:
433
+ self.download_interrupted = True
434
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
435
+ return self._generate_results(type)
436
+
437
+ def _get_bar_format(self, description: str) -> str:
438
+ """Generate platform-appropriate progress bar format."""
535
439
  return (
536
- f"{Colors.YELLOW}[HLS]{Colors.CYAN} {description}{Colors.WHITE}: "
440
+ f"{Colors.YELLOW}HLS{Colors.CYAN} {description}{Colors.WHITE}: "
537
441
  f"{Colors.MAGENTA}{{bar:40}} "
538
442
  f"{Colors.LIGHT_GREEN}{{n_fmt}}{Colors.WHITE}/{Colors.CYAN}{{total_fmt}} {Colors.LIGHT_MAGENTA}TS {Colors.WHITE}"
539
443
  f"{Colors.DARK_GRAY}[{Colors.YELLOW}{{elapsed}}{Colors.WHITE} < {Colors.CYAN}{{remaining}}{Colors.DARK_GRAY}] "
@@ -541,9 +445,7 @@ class M3U8_Segments:
541
445
  )
542
446
 
543
447
  def _get_worker_count(self, stream_type: str) -> int:
544
- """
545
- Return parallel workers based on stream type and infrastructure.
546
- """
448
+ """Return parallel workers based on stream type."""
547
449
  return {
548
450
  'video': DEFAULT_VIDEO_WORKERS,
549
451
  'audio': DEFAULT_AUDIO_WORKERS
@@ -564,26 +466,25 @@ class M3U8_Segments:
564
466
  missing = sorted(set(range(total)) - self.downloaded_segments)
565
467
  raise RuntimeError(f"Download incomplete ({len(self.downloaded_segments)/total:.1%}). Missing segments: {missing}")
566
468
 
567
- def _cleanup_resources(self, writer_thread: threading.Thread, progress_bar: tqdm) -> None:
469
+ def _cleanup_resources(self, temp_dir: str, progress_bar: tqdm) -> None:
568
470
  """Ensure resource cleanup and final reporting."""
569
- self.stop_event.set()
570
- writer_thread.join(timeout=30)
571
471
  progress_bar.close()
572
- self._cleanup_client()
472
+
473
+ # Delete temp directory if exists
474
+ if temp_dir and os.path.exists(temp_dir):
475
+ try:
476
+ # Remove any remaining files (in case of interruption)
477
+ for file in os.listdir(temp_dir):
478
+ os.remove(os.path.join(temp_dir, file))
479
+ os.rmdir(temp_dir)
480
+ except Exception as e:
481
+ console.print(f"[yellow]Warning: Could not clean temp directory: {e}")
573
482
 
574
483
  if self.info_nFailed > 0:
575
484
  self._display_error_summary()
576
485
 
577
- self.buffer = {}
578
- self.write_buffer.clear()
579
- self.expected_index = 0
580
-
581
486
  def _display_error_summary(self) -> None:
582
487
  """Generate final error report."""
583
- console.print(f"\n[cyan]Retry Summary: "
584
- f"[white]Max retries: [green]{self.info_maxRetry} "
585
- f"[white]Total retries: [green]{self.info_nRetry} "
586
- f"[white]Failed segments: [red]{self.info_nFailed}")
587
-
588
- if self.info_nRetry > len(self.segments) * 0.3:
589
- console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
488
+ console.print(f" [cyan]Max retries: [red]{self.info_maxRetry} [white] | "
489
+ f"[cyan]Total retries: [red]{self.info_nRetry} [white] | "
490
+ f"[cyan]Failed segments: [red]{self.info_nFailed}")