StreamingCommunity 3.3.8__py3-none-any.whl → 3.3.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (26) hide show
  1. StreamingCommunity/Api/Player/supervideo.py +1 -1
  2. StreamingCommunity/Api/Site/crunchyroll/film.py +13 -3
  3. StreamingCommunity/Api/Site/crunchyroll/series.py +6 -6
  4. StreamingCommunity/Api/Site/crunchyroll/site.py +13 -8
  5. StreamingCommunity/Api/Site/crunchyroll/util/ScrapeSerie.py +16 -41
  6. StreamingCommunity/Api/Site/crunchyroll/util/get_license.py +107 -101
  7. StreamingCommunity/Api/Site/mediasetinfinity/util/get_license.py +1 -1
  8. StreamingCommunity/Api/Site/raiplay/series.py +1 -10
  9. StreamingCommunity/Api/Site/raiplay/site.py +5 -13
  10. StreamingCommunity/Api/Site/raiplay/util/ScrapeSerie.py +12 -12
  11. StreamingCommunity/Lib/Downloader/DASH/cdm_helpher.py +8 -3
  12. StreamingCommunity/Lib/Downloader/DASH/decrypt.py +1 -0
  13. StreamingCommunity/Lib/Downloader/DASH/downloader.py +9 -2
  14. StreamingCommunity/Lib/Downloader/DASH/parser.py +456 -98
  15. StreamingCommunity/Lib/Downloader/DASH/segments.py +109 -64
  16. StreamingCommunity/Lib/Downloader/HLS/segments.py +261 -355
  17. StreamingCommunity/Lib/Downloader/MP4/downloader.py +1 -1
  18. StreamingCommunity/Lib/FFmpeg/command.py +3 -3
  19. StreamingCommunity/Lib/M3U8/estimator.py +0 -1
  20. StreamingCommunity/Upload/version.py +1 -1
  21. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.3.9.dist-info}/METADATA +1 -1
  22. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.3.9.dist-info}/RECORD +26 -26
  23. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.3.9.dist-info}/WHEEL +0 -0
  24. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.3.9.dist-info}/entry_points.txt +0 -0
  25. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.3.9.dist-info}/licenses/LICENSE +0 -0
  26. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.3.9.dist-info}/top_level.txt +0 -0
@@ -1,16 +1,11 @@
1
1
  # 18.04.24
2
2
 
3
3
  import os
4
- import sys
5
4
  import time
6
- import queue
7
- import signal
8
5
  import logging
9
6
  import binascii
10
- import threading
11
- from queue import PriorityQueue
7
+ import asyncio
12
8
  from urllib.parse import urljoin, urlparse
13
- from concurrent.futures import ThreadPoolExecutor, as_completed
14
9
  from typing import Dict, Optional
15
10
 
16
11
 
@@ -23,7 +18,6 @@ from rich.console import Console
23
18
  # Internal utilities
24
19
  from StreamingCommunity.Util.color import Colors
25
20
  from StreamingCommunity.Util.headers import get_userAgent
26
- from StreamingCommunity.Util.http_client import create_client
27
21
  from StreamingCommunity.Util.config_json import config_manager
28
22
 
29
23
 
@@ -35,16 +29,16 @@ from ...M3U8 import (
35
29
  M3U8_UrlFix
36
30
  )
37
31
 
32
+
38
33
  # Config
39
- TQDM_DELAY_WORKER = 0.01
40
34
  REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
41
35
  REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
42
36
  DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workers')
43
37
  DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workers')
44
- MAX_TIMEOOUT = config_manager.get_int("REQUESTS", "timeout")
38
+ MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
45
39
  SEGMENT_MAX_TIMEOUT = config_manager.get_int("M3U8_DOWNLOAD", "segment_timeout")
46
- TELEGRAM_BOT = config_manager.get_bool('DEFAULT', 'telegram_bot')
47
- MAX_INTERRUPT_COUNT = 3
40
+ LIMIT_SEGMENT = config_manager.get_int('M3U8_DOWNLOAD', 'limit_segment')
41
+
48
42
 
49
43
  # Variable
50
44
  console = Console()
@@ -58,66 +52,42 @@ class M3U8_Segments:
58
52
  Parameters:
59
53
  - url (str): The URL of the M3U8 playlist.
60
54
  - tmp_folder (str): The temporary folder to store downloaded segments.
61
- - is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
62
- - limit_segments (int): Optional limit for number of segments to process.
55
+ - is_index_url (bool): Flag indicating if url is a URL (default True).
56
+ - limit_segments (int): Optional limit for number of segments (overrides LIMIT_SEGMENT if provided).
63
57
  - custom_headers (Dict[str, str]): Optional custom headers to use for all requests.
64
58
  """
65
59
  self.url = url
66
60
  self.tmp_folder = tmp_folder
67
61
  self.is_index_url = is_index_url
68
- self.limit_segments = limit_segments
69
62
  self.custom_headers = custom_headers if custom_headers else {'User-Agent': get_userAgent()}
70
- self.expected_real_time = None
71
- self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
63
+ self.final_output_path = os.path.join(self.tmp_folder, "0.ts")
72
64
  os.makedirs(self.tmp_folder, exist_ok=True)
73
65
 
66
+ # Use LIMIT_SEGMENT from config if limit_segments not specified or is 0
67
+ if limit_segments is None or limit_segments == 0:
68
+ self.limit_segments = LIMIT_SEGMENT if LIMIT_SEGMENT > 0 else None
69
+ else:
70
+ self.limit_segments = limit_segments
71
+
74
72
  # Util class
75
73
  self.decryption: M3U8_Decryption = None
76
74
  self.class_ts_estimator = M3U8_Ts_Estimator(0, self)
77
75
  self.class_url_fixer = M3U8_UrlFix(url)
78
76
 
79
- # Sync
80
- self.queue = PriorityQueue(maxsize=20)
81
- self.buffer = {}
82
- self.expected_index = 0
83
- self.write_buffer = bytearray()
84
- self.write_batch_size = 50
85
-
86
- self.stop_event = threading.Event()
77
+ # Stats
87
78
  self.downloaded_segments = set()
88
- self.base_timeout = 1.0
89
- self.current_timeout = 3.0
90
-
91
- # Stopping
92
- self.interrupt_flag = threading.Event()
93
79
  self.download_interrupted = False
94
- self.interrupt_count = 0
95
- self.force_stop = False
96
- self.interrupt_lock = threading.Lock()
97
-
98
- # HTTP Client
99
- self._client = None
100
- self._client_lock = threading.Lock()
101
-
102
- # OTHER INFO
103
80
  self.info_maxRetry = 0
104
81
  self.info_nRetry = 0
105
82
  self.info_nFailed = 0
106
- self.active_retries = 0
107
- self.active_retries_lock = threading.Lock()
108
83
 
84
+ # Progress throttling
109
85
  self._last_progress_update = 0
110
86
  self._progress_update_interval = 0.1
111
87
 
112
88
  def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
113
89
  """
114
90
  Fetches the encryption key from the M3U8 playlist.
115
-
116
- Args:
117
- m3u8_parser (M3U8_Parser): An instance of M3U8_Parser containing parsed M3U8 data.
118
-
119
- Returns:
120
- bytes: The decryption key in byte format.
121
91
  """
122
92
  key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
123
93
  parsed_url = urlparse(key_uri)
@@ -126,7 +96,7 @@ class M3U8_Segments:
126
96
  try:
127
97
  client_params = {
128
98
  'headers': self.custom_headers,
129
- 'timeout': MAX_TIMEOOUT,
99
+ 'timeout': MAX_TIMEOUT,
130
100
  'verify': REQUEST_VERIFY
131
101
  }
132
102
  response = httpx.get(url=key_uri, **client_params)
@@ -139,12 +109,7 @@ class M3U8_Segments:
139
109
  raise Exception(f"Failed to fetch key: {e}")
140
110
 
141
111
  def parse_data(self, m3u8_content: str) -> None:
142
- """
143
- Parses the M3U8 content and extracts necessary data.
144
-
145
- Args:
146
- m3u8_content (str): The raw M3U8 playlist content.
147
- """
112
+ """Parses the M3U8 content and extracts necessary data."""
148
113
  m3u8_parser = M3U8_Parser()
149
114
  m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
150
115
 
@@ -157,11 +122,11 @@ class M3U8_Segments:
157
122
  self.decryption = M3U8_Decryption(key, m3u8_parser.keys.get('iv'), m3u8_parser.keys.get('method'))
158
123
 
159
124
  segments = [
160
- self.class_url_fixer.generate_full_url(seg)
161
- if "http" not in seg else seg
125
+ self.class_url_fixer.generate_full_url(seg) if "http" not in seg else seg
162
126
  for seg in m3u8_parser.segments
163
127
  ]
164
128
 
129
+ # Apply segment limit
165
130
  if self.limit_segments and len(segments) > self.limit_segments:
166
131
  logging.info(f"Limiting segments from {len(segments)} to {self.limit_segments}")
167
132
  segments = segments[:self.limit_segments]
@@ -183,7 +148,7 @@ class M3U8_Segments:
183
148
  try:
184
149
  client_params = {
185
150
  'headers': self.custom_headers,
186
- 'timeout': MAX_TIMEOOUT,
151
+ 'timeout': MAX_TIMEOUT,
187
152
  'verify': REQUEST_VERIFY
188
153
  }
189
154
  response = httpx.get(self.url, **client_params, follow_redirects=True)
@@ -195,205 +160,29 @@ class M3U8_Segments:
195
160
 
196
161
  except Exception as e:
197
162
  raise RuntimeError(f"M3U8 info retrieval failed: {e}")
198
-
199
- def setup_interrupt_handler(self):
200
- """
201
- Set up a signal handler for graceful interruption.
202
- """
203
- def interrupt_handler(signum, frame):
204
- with self.interrupt_lock:
205
- self.interrupt_count += 1
206
- if self.interrupt_count >= MAX_INTERRUPT_COUNT:
207
- self.force_stop = True
208
-
209
- if self.force_stop:
210
- console.print("\n[red]Force stop triggered! Exiting immediately.")
211
- self._cleanup_client()
212
-
213
- else:
214
- if not self.interrupt_flag.is_set():
215
- remaining = MAX_INTERRUPT_COUNT - self.interrupt_count
216
- console.print(f"\n[red]- Stopping gracefully... (Ctrl+C {remaining}x to force)")
217
- self.download_interrupted = True
218
163
 
219
- if remaining == 1:
220
- self.interrupt_flag.set()
221
-
222
- if threading.current_thread() is threading.main_thread():
223
- signal.signal(signal.SIGINT, interrupt_handler)
224
- else:
225
- print("Signal handler must be set in the main thread")
226
-
227
- def _get_http_client(self):
164
+ def _throttled_progress_update(self, content_size: int, progress_bar: tqdm):
228
165
  """
229
- Get a reusable HTTP client using the centralized factory.
230
- Uses optimized settings for segment downloading with custom headers.
231
- """
232
- if self._client is None:
233
- with self._client_lock:
234
- self._client = create_client(
235
- headers=self.custom_headers,
236
- timeout=SEGMENT_MAX_TIMEOUT
237
- )
238
-
239
- return self._client
240
-
241
- def _cleanup_client(self):
242
- """Pulizia client"""
243
- if self._client:
244
- try:
245
- self._client.close()
246
- except Exception:
247
- pass
248
- self._client = None
249
-
250
- def download_segment(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.02) -> None:
166
+ Throttled progress update to reduce CPU usage.
251
167
  """
252
- Downloads a TS segment
253
-
254
- Parameters:
255
- - ts_url (str): The URL of the TS segment.
256
- - index (int): The index of the segment.
257
- - progress_bar (tqdm): Progress counter for tracking download progress.
258
- - backoff_factor (float): Backoff factor.
259
- """
260
- for attempt in range(REQUEST_MAX_RETRY):
261
- if self.interrupt_flag.is_set():
262
- return
263
-
264
- try:
265
- client = self._get_http_client()
266
- timeout = min(SEGMENT_MAX_TIMEOUT, 10 + attempt * 5)
267
-
268
- # Make request with custom headers
269
- response = client.get(ts_url, timeout=timeout, headers=self.custom_headers)
270
- response.raise_for_status()
271
- segment_content = response.content
272
- content_size = len(segment_content)
273
-
274
- # Decrypt if needed
275
- if self.decryption is not None:
276
- try:
277
- segment_content = self.decryption.decrypt(segment_content)
278
- except Exception as e:
279
- logging.error(f"Decryption failed for segment {index}: {str(e)}")
280
-
281
- if attempt + 1 == REQUEST_MAX_RETRY:
282
- self.interrupt_flag.set()
283
- self.stop_event.set()
284
-
285
- raise e
286
-
287
- current_time = time.time()
288
- if current_time - self._last_progress_update > self._progress_update_interval:
289
- self.class_ts_estimator.update_progress_bar(content_size, progress_bar)
290
- self._last_progress_update = current_time
291
-
292
- try:
293
- self.queue.put((index, segment_content), timeout=0.05)
294
- self.downloaded_segments.add(index)
295
- progress_bar.update(1)
296
- return
297
-
298
- except queue.Full:
299
- time.sleep(0.02)
300
-
301
- try:
302
- self.queue.put((index, segment_content), timeout=0.1)
303
- self.downloaded_segments.add(index)
304
- progress_bar.update(1)
305
- return
306
-
307
- except queue.Full:
308
- self.queue.put((index, segment_content))
309
- self.downloaded_segments.add(index)
310
- progress_bar.update(1)
311
- return
168
+ current_time = time.time()
169
+ if current_time - self._last_progress_update > self._progress_update_interval:
170
+ self.class_ts_estimator.update_progress_bar(content_size, progress_bar)
171
+ self._last_progress_update = current_time
312
172
 
313
- except Exception:
314
-
315
- if attempt > self.info_maxRetry:
316
- self.info_maxRetry = attempt + 1
317
- self.info_nRetry += 1
318
-
319
- if attempt + 1 == REQUEST_MAX_RETRY:
320
- console.print(f" -- [red]Final retry failed for segment: {index}")
321
-
322
- try:
323
- self.queue.put((index, None), timeout=0.1)
324
- except queue.Full:
325
- time.sleep(0.02)
326
- self.queue.put((index, None))
327
-
328
- progress_bar.update(1)
329
- self.info_nFailed += 1
330
- return
331
-
332
- if attempt < 2:
333
- sleep_time = 0.5 + attempt * 0.5
334
- else:
335
- sleep_time = min(3.0, backoff_factor ** attempt)
336
-
337
- time.sleep(sleep_time)
338
-
339
- def write_segments_to_file(self):
173
+ def _get_temp_segment_path(self, temp_dir: str, index: int) -> str:
340
174
  """
341
- Writes segments to file with additional verification.
175
+ Get the file path for a temporary segment.
342
176
  """
343
- with open(self.tmp_file_path, 'wb') as f:
344
- while not self.stop_event.is_set() or not self.queue.empty():
345
- if self.interrupt_flag.is_set():
346
- break
347
-
348
- try:
349
- index, segment_content = self.queue.get(timeout=self.current_timeout)
350
-
351
- # Successful queue retrieval: reduce timeout
352
- self.current_timeout = max(self.base_timeout, self.current_timeout / 2)
353
-
354
- # Handle failed segments
355
- if segment_content is None:
356
- if index == self.expected_index:
357
- self.expected_index += 1
358
- continue
359
-
360
- # Write segment if it's the next expected one
361
- if index == self.expected_index:
362
- f.write(segment_content)
363
- f.flush()
364
- self.expected_index += 1
365
-
366
- # Write any buffered segments that are now in order
367
- while self.expected_index in self.buffer:
368
- next_segment = self.buffer.pop(self.expected_index)
369
-
370
- if next_segment is not None:
371
- f.write(next_segment)
372
- f.flush()
373
-
374
- self.expected_index += 1
375
-
376
- else:
377
- self.buffer[index] = segment_content
378
-
379
- except queue.Empty:
380
- self.current_timeout = min(MAX_TIMEOOUT, self.current_timeout * 1.1)
381
- time.sleep(0.05)
177
+ return os.path.join(temp_dir, f"seg_{index:06d}.tmp")
382
178
 
383
- if self.stop_event.is_set():
384
- break
385
-
386
- except Exception as e:
387
- logging.error(f"Error writing segment {index}: {str(e)}")
388
-
389
- def download_init_segment(self) -> bool:
179
+ async def _download_init_segment(self, client: httpx.AsyncClient, output_path: str, progress_bar: tqdm) -> bool:
390
180
  """
391
- Downloads the initialization segment if available.
392
-
393
- Returns:
394
- bool: True if init segment was downloaded successfully, False otherwise
181
+ Downloads the initialization segment and writes to output file.
395
182
  """
396
183
  if not self.has_init_segment:
184
+ with open(output_path, 'wb') as f:
185
+ pass
397
186
  return False
398
187
 
399
188
  init_url = self.segment_init_url
@@ -401,139 +190,257 @@ class M3U8_Segments:
401
190
  init_url = self.class_url_fixer.generate_full_url(init_url)
402
191
 
403
192
  try:
404
- client = self._get_http_client()
405
- response = client.get(
406
- init_url,
407
- timeout=SEGMENT_MAX_TIMEOUT,
408
- headers=self.custom_headers
409
- )
193
+ response = await client.get(init_url, timeout=SEGMENT_MAX_TIMEOUT, headers=self.custom_headers)
410
194
  response.raise_for_status()
411
195
  init_content = response.content
412
196
 
413
- # Decrypt if needed (although init segments are typically not encrypted)
197
+ # Decrypt if needed
414
198
  if self.decryption is not None:
415
199
  try:
416
200
  init_content = self.decryption.decrypt(init_content)
417
-
418
201
  except Exception as e:
419
202
  logging.error(f"Decryption failed for init segment: {str(e)}")
420
203
  return False
421
204
 
422
- # Put init segment in queue with highest priority (0)
423
- self.queue.put((0, init_content))
424
- self.downloaded_segments.add(0)
205
+ # Write init segment to output file
206
+ with open(output_path, 'wb') as f:
207
+ f.write(init_content)
425
208
 
426
- # Adjust expected_index to 1 since we've handled index 0 separately
427
- self.expected_index = 0
209
+ progress_bar.update(1)
210
+ self._throttled_progress_update(len(init_content), progress_bar)
428
211
  logging.info("Init segment downloaded successfully")
429
212
  return True
430
213
 
431
214
  except Exception as e:
432
215
  logging.error(f"Failed to download init segment: {str(e)}")
216
+ with open(output_path, 'wb') as f:
217
+ pass
433
218
  return False
434
-
435
- def download_streams(self, description: str, type: str):
219
+
220
+ async def _download_single_segment(self, client: httpx.AsyncClient, ts_url: str, index: int, temp_dir: str,
221
+ semaphore: asyncio.Semaphore, max_retry: int) -> tuple:
222
+ """
223
+ Downloads a single TS segment and saves to temp file.
224
+
225
+ Returns:
226
+ tuple: (index, success, retry_count, file_size)
227
+ """
228
+ async with semaphore:
229
+ temp_file = self._get_temp_segment_path(temp_dir, index)
230
+
231
+ for attempt in range(max_retry):
232
+ if self.download_interrupted:
233
+ return index, False, attempt, 0
234
+
235
+ try:
236
+ timeout = min(SEGMENT_MAX_TIMEOUT, 10 + attempt * 5)
237
+ response = await client.get(ts_url, timeout=timeout, headers=self.custom_headers, follow_redirects=True)
238
+ response.raise_for_status()
239
+ segment_content = response.content
240
+
241
+ # Decrypt if needed
242
+ if self.decryption is not None:
243
+ try:
244
+ segment_content = self.decryption.decrypt(segment_content)
245
+ except Exception as e:
246
+ logging.error(f"Decryption failed for segment {index}: {str(e)}")
247
+ if attempt + 1 == max_retry:
248
+ return index, False, attempt, 0
249
+ raise e
250
+
251
+ # Write to temp file
252
+ with open(temp_file, 'wb') as f:
253
+ f.write(segment_content)
254
+
255
+ return index, True, attempt, len(segment_content)
256
+
257
+ except Exception:
258
+ if attempt + 1 == max_retry:
259
+ console.print(f" -- [red]Final retry failed for segment: {index}")
260
+ return index, False, max_retry, 0
261
+
262
+ sleep_time = 0.5 + attempt * 0.5 if attempt < 2 else min(3.0, 1.02 ** attempt)
263
+ await asyncio.sleep(sleep_time)
264
+
265
+ return index, False, max_retry, 0
266
+
267
+ async def _download_all_segments(self, client: httpx.AsyncClient, temp_dir: str, semaphore: asyncio.Semaphore, progress_bar: tqdm):
268
+ """
269
+ Download all segments in parallel with automatic retry.
270
+ """
271
+
272
+ # First pass: download all segments
273
+ tasks = [
274
+ self._download_single_segment(client, url, i, temp_dir, semaphore, REQUEST_MAX_RETRY)
275
+ for i, url in enumerate(self.segments)
276
+ ]
277
+
278
+ for coro in asyncio.as_completed(tasks):
279
+ try:
280
+ idx, success, nretry, size = await coro
281
+
282
+ if success:
283
+ self.downloaded_segments.add(idx)
284
+ else:
285
+ self.info_nFailed += 1
286
+
287
+ if nretry > self.info_maxRetry:
288
+ self.info_maxRetry = nretry
289
+ self.info_nRetry += nretry
290
+
291
+ progress_bar.update(1)
292
+ self._throttled_progress_update(size, progress_bar)
293
+
294
+ except KeyboardInterrupt:
295
+ self.download_interrupted = True
296
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
297
+ break
298
+
299
+ # Retry failed segments
300
+ if not self.download_interrupted:
301
+ await self._retry_failed_segments(client, temp_dir, semaphore, progress_bar)
302
+
303
+ async def _retry_failed_segments(self, client: httpx.AsyncClient, temp_dir: str, semaphore: asyncio.Semaphore,
304
+ progress_bar: tqdm):
305
+ """
306
+ Retry failed segments up to 3 times.
307
+ """
308
+ max_global_retries = 3
309
+ global_retry_count = 0
310
+
311
+ while self.info_nFailed > 0 and global_retry_count < max_global_retries and not self.download_interrupted:
312
+ failed_indices = [i for i in range(len(self.segments)) if i not in self.downloaded_segments]
313
+ if not failed_indices:
314
+ break
315
+
316
+ console.print(f" -- [yellow]Retrying {len(failed_indices)} failed segments (attempt {global_retry_count+1}/{max_global_retries})...")
317
+
318
+ retry_tasks = [
319
+ self._download_single_segment(client, self.segments[i], i, temp_dir, semaphore, REQUEST_MAX_RETRY)
320
+ for i in failed_indices
321
+ ]
322
+
323
+ nFailed_this_round = 0
324
+ for coro in asyncio.as_completed(retry_tasks):
325
+ try:
326
+ idx, success, nretry, size = await coro
327
+
328
+ if success:
329
+ self.downloaded_segments.add(idx)
330
+ else:
331
+ nFailed_this_round += 1
332
+
333
+ if nretry > self.info_maxRetry:
334
+ self.info_maxRetry = nretry
335
+ self.info_nRetry += nretry
336
+
337
+ progress_bar.update(0)
338
+ self._throttled_progress_update(size, progress_bar)
339
+
340
+ except KeyboardInterrupt:
341
+ self.download_interrupted = True
342
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
343
+ break
344
+
345
+ self.info_nFailed = nFailed_this_round
346
+ global_retry_count += 1
347
+
348
+ async def _concatenate_segments(self, output_path: str, temp_dir: str):
436
349
  """
437
- Downloads all TS segments in parallel and writes them to a file.
350
+ Concatenate all segment files in order to the final output file.
351
+ """
352
+ with open(output_path, 'ab') as outfile:
353
+ for idx in range(len(self.segments)):
354
+ temp_file = self._get_temp_segment_path(temp_dir, idx)
355
+
356
+ if os.path.exists(temp_file):
357
+ with open(temp_file, 'rb') as infile:
358
+ outfile.write(infile.read())
359
+
360
+ async def download_segments_async(self, description: str, type: str):
361
+ """
362
+ Downloads all TS segments asynchronously.
438
363
 
439
364
  Parameters:
440
365
  - description: Description to insert on tqdm bar
441
366
  - type (str): Type of download: 'video' or 'audio'
442
367
  """
443
- if TELEGRAM_BOT:
444
- console.log("####")
445
-
446
368
  self.get_info()
447
- self.setup_interrupt_handler()
448
369
 
370
+ # Setup directories
371
+ temp_dir = os.path.join(self.tmp_folder, "segments_temp")
372
+ os.makedirs(temp_dir, exist_ok=True)
373
+
374
+ # Initialize progress bar
375
+ total_segments = len(self.segments) + (1 if self.has_init_segment else 0)
449
376
  progress_bar = tqdm(
450
- total=len(self.segments) + (1 if self.has_init_segment else 0),
451
- bar_format=self._get_bar_format(description),
452
- file=sys.stdout,
377
+ total=total_segments,
378
+ bar_format=self._get_bar_format(description)
453
379
  )
454
380
 
381
+ # Reset stats
382
+ self.downloaded_segments = set()
383
+ self.info_nFailed = 0
384
+ self.info_nRetry = 0
385
+ self.info_maxRetry = 0
386
+ self.download_interrupted = False
387
+
455
388
  try:
456
- self.class_ts_estimator.total_segments = len(self.segments)
457
-
458
- writer_thread = threading.Thread(target=self.write_segments_to_file)
459
- writer_thread.daemon = True
460
- writer_thread.start()
461
- max_workers = self._get_worker_count(type)
462
-
463
- # First download the init segment if available
464
- if self.has_init_segment:
465
- if self.download_init_segment():
466
- progress_bar.update(1)
389
+ # Configure HTTP client
390
+ timeout_config = httpx.Timeout(SEGMENT_MAX_TIMEOUT, connect=10.0)
391
+ limits = httpx.Limits(max_keepalive_connections=20, max_connections=100)
467
392
 
468
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
469
- futures = []
393
+ async with httpx.AsyncClient(timeout=timeout_config, limits=limits, verify=REQUEST_VERIFY) as client:
470
394
 
471
- # Start segment indices from 1 if we have an init segment
472
- start_idx = 1 if self.has_init_segment else 0
395
+ # Download init segment first (writes to 0.ts)
396
+ await self._download_init_segment(client, self.final_output_path, progress_bar)
473
397
 
474
- for index, segment_url in enumerate(self.segments):
475
- if self.interrupt_flag.is_set():
476
- break
477
-
478
- # Adjust index if we have an init segment
479
- queue_index = index + start_idx
480
-
481
- # Delay every 200 submissions to reduce CPU usage
482
- if index % 200 == 0 and index > 0:
483
- time.sleep(TQDM_DELAY_WORKER)
484
-
485
- futures.append(executor.submit(self.download_segment, segment_url, queue_index, progress_bar))
486
-
487
- # Process completed futures
488
- for future in as_completed(futures):
489
- if self.interrupt_flag.is_set():
490
- break
491
- try:
492
- future.result(timeout=1.0)
493
- except Exception as e:
494
- logging.error(f"Error in download thread: {str(e)}")
495
-
496
- # Retry missing segments if necessary
497
- if not self.interrupt_flag.is_set():
498
- total_segments = len(self.segments)
499
- completed_segments = len(self.downloaded_segments)
500
-
501
- if completed_segments < total_segments:
502
- missing_segments = set(range(total_segments)) - self.downloaded_segments
503
- logging.warning(f"Missing {len(missing_segments)} segments")
504
-
505
- # Retry missing segments with interrupt check
506
- retry_workers = min(2, len(missing_segments))
507
- if retry_workers > 0:
508
- retry_futures = []
509
- for index in missing_segments:
510
- if self.interrupt_flag.is_set():
511
- break
512
- retry_futures.append(executor.submit(self.download_segment, self.segments[index], index, progress_bar))
513
-
514
- for future in as_completed(retry_futures):
515
- if self.interrupt_flag.is_set():
516
- break
517
- try:
518
- future.result(timeout=2.0)
519
- except Exception as e:
520
- logging.error(f"Failed to retry segment: {str(e)}")
398
+ # Determine worker count based on type
399
+ max_workers = self._get_worker_count(type)
400
+ semaphore = asyncio.Semaphore(max_workers)
401
+
402
+ # Update estimator
403
+ self.class_ts_estimator.total_segments = len(self.segments)
404
+
405
+ # Download all segments to temp files
406
+ await self._download_all_segments(client, temp_dir, semaphore, progress_bar)
407
+
408
+ # Concatenate all segments to 0.ts
409
+ if not self.download_interrupted:
410
+ await self._concatenate_segments(self.final_output_path, temp_dir)
521
411
 
412
+ except KeyboardInterrupt:
413
+ self.download_interrupted = True
414
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
415
+
522
416
  finally:
523
- self._cleanup_resources(writer_thread, progress_bar)
417
+ self._cleanup_resources(temp_dir, progress_bar)
524
418
 
525
- if not self.interrupt_flag.is_set():
419
+ if not self.download_interrupted:
526
420
  self._verify_download_completion()
527
421
 
528
422
  return self._generate_results(type)
529
-
530
-
531
- def _get_bar_format(self, description: str) -> str:
423
+
424
+ def download_streams(self, description: str, type: str):
532
425
  """
533
- Generate platform-appropriate progress bar format.
426
+ Synchronous wrapper for download_segments_async.
427
+
428
+ Parameters:
429
+ - description: Description to insert on tqdm bar
430
+ - type (str): Type of download: 'video' or 'audio'
534
431
  """
432
+ try:
433
+ return asyncio.run(self.download_segments_async(description, type))
434
+
435
+ except KeyboardInterrupt:
436
+ self.download_interrupted = True
437
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
438
+ return self._generate_results(type)
439
+
440
+ def _get_bar_format(self, description: str) -> str:
441
+ """Generate platform-appropriate progress bar format."""
535
442
  return (
536
- f"{Colors.YELLOW}[HLS]{Colors.CYAN} {description}{Colors.WHITE}: "
443
+ f"{Colors.YELLOW}HLS{Colors.CYAN} {description}{Colors.WHITE}: "
537
444
  f"{Colors.MAGENTA}{{bar:40}} "
538
445
  f"{Colors.LIGHT_GREEN}{{n_fmt}}{Colors.WHITE}/{Colors.CYAN}{{total_fmt}} {Colors.LIGHT_MAGENTA}TS {Colors.WHITE}"
539
446
  f"{Colors.DARK_GRAY}[{Colors.YELLOW}{{elapsed}}{Colors.WHITE} < {Colors.CYAN}{{remaining}}{Colors.DARK_GRAY}] "
@@ -541,9 +448,7 @@ class M3U8_Segments:
541
448
  )
542
449
 
543
450
  def _get_worker_count(self, stream_type: str) -> int:
544
- """
545
- Return parallel workers based on stream type and infrastructure.
546
- """
451
+ """Return parallel workers based on stream type."""
547
452
  return {
548
453
  'video': DEFAULT_VIDEO_WORKERS,
549
454
  'audio': DEFAULT_AUDIO_WORKERS
@@ -564,26 +469,27 @@ class M3U8_Segments:
564
469
  missing = sorted(set(range(total)) - self.downloaded_segments)
565
470
  raise RuntimeError(f"Download incomplete ({len(self.downloaded_segments)/total:.1%}). Missing segments: {missing}")
566
471
 
567
- def _cleanup_resources(self, writer_thread: threading.Thread, progress_bar: tqdm) -> None:
472
+ def _cleanup_resources(self, temp_dir: str, progress_bar: tqdm) -> None:
568
473
  """Ensure resource cleanup and final reporting."""
569
- self.stop_event.set()
570
- writer_thread.join(timeout=30)
571
474
  progress_bar.close()
572
- self._cleanup_client()
475
+
476
+ # Delete temp segment files
477
+ if temp_dir and os.path.exists(temp_dir):
478
+ try:
479
+ for idx in range(len(self.segments)):
480
+ temp_file = self._get_temp_segment_path(temp_dir, idx)
481
+ if os.path.exists(temp_file):
482
+ os.remove(temp_file)
483
+ os.rmdir(temp_dir)
484
+ except Exception as e:
485
+ console.print(f"[yellow]Warning: Could not clean temp directory: {e}")
573
486
 
574
487
  if self.info_nFailed > 0:
575
488
  self._display_error_summary()
576
489
 
577
- self.buffer = {}
578
- self.write_buffer.clear()
579
- self.expected_index = 0
580
-
581
490
  def _display_error_summary(self) -> None:
582
491
  """Generate final error report."""
583
- console.print(f"\n[cyan]Retry Summary: "
584
- f"[white]Max retries: [green]{self.info_maxRetry} "
585
- f"[white]Total retries: [green]{self.info_nRetry} "
586
- f"[white]Failed segments: [red]{self.info_nFailed}")
587
-
588
- if self.info_nRetry > len(self.segments) * 0.3:
589
- console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
492
+ console.print(f"\n[green]Retry Summary: "
493
+ f"[cyan]Max retries: [red]{self.info_maxRetry} "
494
+ f"[cyan]Total retries: [red]{self.info_nRetry} "
495
+ f"[cyan]Failed segments: [red]{self.info_nFailed}")