StreamingCommunity 3.3.8__py3-none-any.whl → 3.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (64) hide show
  1. StreamingCommunity/Api/Player/hdplayer.py +0 -5
  2. StreamingCommunity/Api/Player/mediapolisvod.py +4 -13
  3. StreamingCommunity/Api/Player/supervideo.py +3 -8
  4. StreamingCommunity/Api/Player/sweetpixel.py +1 -9
  5. StreamingCommunity/Api/Player/vixcloud.py +5 -16
  6. StreamingCommunity/Api/Site/altadefinizione/film.py +4 -15
  7. StreamingCommunity/Api/Site/altadefinizione/site.py +2 -7
  8. StreamingCommunity/Api/Site/altadefinizione/util/ScrapeSerie.py +2 -7
  9. StreamingCommunity/Api/Site/animeunity/site.py +9 -24
  10. StreamingCommunity/Api/Site/animeunity/util/ScrapeSerie.py +11 -27
  11. StreamingCommunity/Api/Site/animeworld/film.py +4 -2
  12. StreamingCommunity/Api/Site/animeworld/site.py +3 -11
  13. StreamingCommunity/Api/Site/animeworld/util/ScrapeSerie.py +1 -4
  14. StreamingCommunity/Api/Site/crunchyroll/film.py +17 -8
  15. StreamingCommunity/Api/Site/crunchyroll/series.py +8 -9
  16. StreamingCommunity/Api/Site/crunchyroll/site.py +14 -16
  17. StreamingCommunity/Api/Site/crunchyroll/util/ScrapeSerie.py +18 -65
  18. StreamingCommunity/Api/Site/crunchyroll/util/get_license.py +97 -106
  19. StreamingCommunity/Api/Site/guardaserie/site.py +4 -12
  20. StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +3 -10
  21. StreamingCommunity/Api/Site/mediasetinfinity/film.py +11 -12
  22. StreamingCommunity/Api/Site/mediasetinfinity/series.py +1 -2
  23. StreamingCommunity/Api/Site/mediasetinfinity/site.py +3 -11
  24. StreamingCommunity/Api/Site/mediasetinfinity/util/ScrapeSerie.py +39 -50
  25. StreamingCommunity/Api/Site/mediasetinfinity/util/fix_mpd.py +3 -3
  26. StreamingCommunity/Api/Site/mediasetinfinity/util/get_license.py +8 -26
  27. StreamingCommunity/Api/Site/raiplay/film.py +6 -7
  28. StreamingCommunity/Api/Site/raiplay/series.py +1 -12
  29. StreamingCommunity/Api/Site/raiplay/site.py +8 -24
  30. StreamingCommunity/Api/Site/raiplay/util/ScrapeSerie.py +15 -22
  31. StreamingCommunity/Api/Site/raiplay/util/get_license.py +3 -12
  32. StreamingCommunity/Api/Site/streamingcommunity/film.py +5 -16
  33. StreamingCommunity/Api/Site/streamingcommunity/site.py +3 -22
  34. StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py +11 -26
  35. StreamingCommunity/Api/Site/streamingwatch/__init__.py +1 -0
  36. StreamingCommunity/Api/Site/streamingwatch/film.py +4 -2
  37. StreamingCommunity/Api/Site/streamingwatch/series.py +1 -1
  38. StreamingCommunity/Api/Site/streamingwatch/site.py +4 -18
  39. StreamingCommunity/Api/Site/streamingwatch/util/ScrapeSerie.py +0 -3
  40. StreamingCommunity/Api/Template/config_loader.py +0 -7
  41. StreamingCommunity/Lib/Downloader/DASH/cdm_helpher.py +8 -3
  42. StreamingCommunity/Lib/Downloader/DASH/decrypt.py +55 -1
  43. StreamingCommunity/Lib/Downloader/DASH/downloader.py +139 -55
  44. StreamingCommunity/Lib/Downloader/DASH/parser.py +458 -101
  45. StreamingCommunity/Lib/Downloader/DASH/segments.py +131 -74
  46. StreamingCommunity/Lib/Downloader/HLS/downloader.py +31 -50
  47. StreamingCommunity/Lib/Downloader/HLS/segments.py +266 -365
  48. StreamingCommunity/Lib/Downloader/MP4/downloader.py +1 -1
  49. StreamingCommunity/Lib/FFmpeg/capture.py +37 -5
  50. StreamingCommunity/Lib/FFmpeg/command.py +35 -93
  51. StreamingCommunity/Lib/M3U8/estimator.py +0 -1
  52. StreamingCommunity/Lib/TMBD/tmdb.py +2 -4
  53. StreamingCommunity/TelegramHelp/config.json +0 -1
  54. StreamingCommunity/Upload/version.py +1 -1
  55. StreamingCommunity/Util/config_json.py +28 -21
  56. StreamingCommunity/Util/http_client.py +28 -0
  57. StreamingCommunity/Util/os.py +16 -6
  58. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.4.0.dist-info}/METADATA +1 -3
  59. streamingcommunity-3.4.0.dist-info/RECORD +111 -0
  60. streamingcommunity-3.3.8.dist-info/RECORD +0 -111
  61. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.4.0.dist-info}/WHEEL +0 -0
  62. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.4.0.dist-info}/entry_points.txt +0 -0
  63. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.4.0.dist-info}/licenses/LICENSE +0 -0
  64. {streamingcommunity-3.3.8.dist-info → streamingcommunity-3.4.0.dist-info}/top_level.txt +0 -0
@@ -8,6 +8,7 @@ import time
8
8
  # External libraries
9
9
  import httpx
10
10
  from tqdm import tqdm
11
+ from rich.console import Console
11
12
 
12
13
 
13
14
  # Internal utilities
@@ -22,6 +23,12 @@ REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
22
23
  DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workers')
23
24
  DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workers')
24
25
  SEGMENT_MAX_TIMEOUT = config_manager.get_int("M3U8_DOWNLOAD", "segment_timeout")
26
+ LIMIT_SEGMENT = config_manager.get_int('M3U8_DOWNLOAD', 'limit_segment')
27
+ ENABLE_RETRY = config_manager.get_bool('M3U8_DOWNLOAD', 'enable_retry')
28
+
29
+
30
+ # Variable
31
+ console = Console()
25
32
 
26
33
 
27
34
  class MPD_Segments:
@@ -38,7 +45,14 @@ class MPD_Segments:
38
45
  self.tmp_folder = tmp_folder
39
46
  self.selected_representation = representation
40
47
  self.pssh = pssh
41
- self.limit_segments = limit_segments
48
+
49
+ # Use LIMIT_SEGMENT from config if limit_segments is not specified or is 0
50
+ if limit_segments is None or limit_segments == 0:
51
+ self.limit_segments = LIMIT_SEGMENT if LIMIT_SEGMENT > 0 else None
52
+ else:
53
+ self.limit_segments = limit_segments
54
+
55
+ self.enable_retry = ENABLE_RETRY
42
56
  self.download_interrupted = False
43
57
  self.info_nFailed = 0
44
58
 
@@ -50,6 +64,10 @@ class MPD_Segments:
50
64
  # Progress
51
65
  self._last_progress_update = 0
52
66
  self._progress_update_interval = 0.1
67
+
68
+ # Segment tracking - store only metadata, not content
69
+ self.segment_status = {} # {idx: {'downloaded': bool, 'size': int}}
70
+ self.segments_lock = asyncio.Lock()
53
71
 
54
72
  def get_concat_path(self, output_dir: str = None):
55
73
  """
@@ -78,10 +96,9 @@ class MPD_Segments:
78
96
  if self.limit_segments is not None:
79
97
  orig_count = len(self.selected_representation.get('segment_urls', []))
80
98
  if orig_count > self.limit_segments:
81
-
99
+
82
100
  # Limit segment URLs
83
101
  self.selected_representation['segment_urls'] = self.selected_representation['segment_urls'][:self.limit_segments]
84
- print(f"[yellow]Limiting segments from {orig_count} to {self.limit_segments}")
85
102
 
86
103
  # Run async download in sync mode
87
104
  try:
@@ -89,7 +106,7 @@ class MPD_Segments:
89
106
 
90
107
  except KeyboardInterrupt:
91
108
  self.download_interrupted = True
92
- print("\n[red]Download interrupted by user (Ctrl+C).")
109
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
93
110
 
94
111
  return {
95
112
  "concat_path": concat_path,
@@ -99,7 +116,7 @@ class MPD_Segments:
99
116
 
100
117
  async def download_segments(self, output_dir: str = None, concurrent_downloads: int = None, description: str = "DASH"):
101
118
  """
102
- Download and concatenate all segments (including init) asynchronously and in order.
119
+ Download segments to temporary files, then concatenate them in order.
103
120
 
104
121
  Parameters:
105
122
  - output_dir (str): Output directory for segments
@@ -113,6 +130,9 @@ class MPD_Segments:
113
130
 
114
131
  os.makedirs(output_dir or self.tmp_folder, exist_ok=True)
115
132
  concat_path = os.path.join(output_dir or self.tmp_folder, f"{rep_id}_encrypted.m4s")
133
+
134
+ temp_dir = os.path.join(output_dir or self.tmp_folder, f"{rep_id}_segments")
135
+ os.makedirs(temp_dir, exist_ok=True)
116
136
 
117
137
  # Determine stream type (video/audio) for progress bar
118
138
  stream_type = description
@@ -132,7 +152,7 @@ class MPD_Segments:
132
152
  # Initialize estimator
133
153
  estimator = M3U8_Ts_Estimator(total_segments=len(segment_urls) + 1)
134
154
 
135
- results = [None] * len(segment_urls)
155
+ self.segment_status = {}
136
156
  self.downloaded_segments = set()
137
157
  self.info_nFailed = 0
138
158
  self.download_interrupted = False
@@ -148,25 +168,26 @@ class MPD_Segments:
148
168
  # Download init segment
149
169
  await self._download_init_segment(client, init_url, concat_path, estimator, progress_bar)
150
170
 
151
- # Download all segments (first batch)
171
+ # Download all segments to temp files
152
172
  await self._download_segments_batch(
153
- client, segment_urls, results, semaphore, REQUEST_MAX_RETRY, estimator, progress_bar
173
+ client, segment_urls, temp_dir, semaphore, REQUEST_MAX_RETRY, estimator, progress_bar
154
174
  )
155
175
 
156
- # Retry failed segments
157
- await self._retry_failed_segments(
158
- client, segment_urls, results, semaphore, REQUEST_MAX_RETRY, estimator, progress_bar
159
- )
176
+ # Retry failed segments only if enabled
177
+ if self.enable_retry:
178
+ await self._retry_failed_segments(
179
+ client, segment_urls, temp_dir, semaphore, REQUEST_MAX_RETRY, estimator, progress_bar
180
+ )
160
181
 
161
- # Write all results to file
162
- self._write_results_to_file(concat_path, results)
182
+ # Concatenate all segments IN ORDER
183
+ await self._concatenate_segments_in_order(temp_dir, concat_path, len(segment_urls))
163
184
 
164
185
  except KeyboardInterrupt:
165
186
  self.download_interrupted = True
166
- print("\n[red]Download interrupted by user (Ctrl+C).")
187
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
167
188
 
168
189
  finally:
169
- self._cleanup_resources(None, progress_bar)
190
+ self._cleanup_resources(temp_dir, progress_bar)
170
191
 
171
192
  self._verify_download_completion()
172
193
  return self._generate_results(stream_type)
@@ -187,12 +208,9 @@ class MPD_Segments:
187
208
  with open(concat_path, 'wb') as outfile:
188
209
  if response.status_code == 200:
189
210
  outfile.write(response.content)
190
- # Update estimator with init segment size
191
211
  estimator.add_ts_file(len(response.content))
192
212
 
193
213
  progress_bar.update(1)
194
-
195
- # Update progress bar with estimated info
196
214
  self._throttled_progress_update(len(response.content), estimator, progress_bar)
197
215
 
198
216
  except Exception as e:
@@ -208,24 +226,35 @@ class MPD_Segments:
208
226
  estimator.update_progress_bar(content_size, progress_bar)
209
227
  self._last_progress_update = current_time
210
228
 
211
- async def _download_segments_batch(self, client, segment_urls, results, semaphore, max_retry, estimator, progress_bar):
229
+ async def _download_segments_batch(self, client, segment_urls, temp_dir, semaphore, max_retry, estimator, progress_bar):
212
230
  """
213
- Download a batch of segments and update results.
231
+ Download segments to temporary files - write immediately to disk, not memory.
214
232
  """
215
233
  async def download_single(url, idx):
216
234
  async with semaphore:
217
235
  headers = {'User-Agent': get_userAgent()}
236
+ temp_file = os.path.join(temp_dir, f"seg_{idx:06d}.tmp")
218
237
 
219
238
  for attempt in range(max_retry):
220
239
  if self.download_interrupted:
221
- return idx, b'', attempt
240
+ return idx, False, attempt, 0
222
241
 
223
242
  try:
224
243
  timeout = min(SEGMENT_MAX_TIMEOUT, 10 + attempt * 3)
225
244
  resp = await client.get(url, headers=headers, follow_redirects=True, timeout=timeout)
226
245
 
246
+ # Write directly to temp file
227
247
  if resp.status_code == 200:
228
- return idx, resp.content, attempt
248
+ content_size = len(resp.content)
249
+ with open(temp_file, 'wb') as f:
250
+ f.write(resp.content)
251
+
252
+ # Update status
253
+ async with self.segments_lock:
254
+ self.segment_status[idx] = {'downloaded': True, 'size': content_size}
255
+ self.downloaded_segments.add(idx)
256
+
257
+ return idx, True, attempt, content_size
229
258
  else:
230
259
  if attempt < 2:
231
260
  sleep_time = 0.5 + attempt * 0.5
@@ -236,20 +265,21 @@ class MPD_Segments:
236
265
  except Exception:
237
266
  sleep_time = min(2.0, 1.1 * (2 ** attempt))
238
267
  await asyncio.sleep(sleep_time)
268
+
269
+ # Mark as failed
270
+ async with self.segments_lock:
271
+ self.segment_status[idx] = {'downloaded': False, 'size': 0}
239
272
 
240
- return idx, b'', max_retry
273
+ return idx, False, max_retry, 0
241
274
 
242
- # Initial download attempt
275
+ # Download all segments concurrently
243
276
  tasks = [download_single(url, i) for i, url in enumerate(segment_urls)]
244
277
 
245
278
  for coro in asyncio.as_completed(tasks):
246
279
  try:
247
- idx, data, nretry = await coro
248
- results[idx] = data
280
+ idx, success, nretry, size = await coro
249
281
 
250
- if data and len(data) > 0:
251
- self.downloaded_segments.add(idx)
252
- else:
282
+ if not success:
253
283
  self.info_nFailed += 1
254
284
 
255
285
  if nretry > self.info_maxRetry:
@@ -257,19 +287,15 @@ class MPD_Segments:
257
287
  self.info_nRetry += nretry
258
288
 
259
289
  progress_bar.update(1)
260
-
261
- # Update estimator with segment size
262
- estimator.add_ts_file(len(data))
263
-
264
- # Update progress bar with estimated info and segment count
265
- self._throttled_progress_update(len(data), estimator, progress_bar)
290
+ estimator.add_ts_file(size)
291
+ self._throttled_progress_update(size, estimator, progress_bar)
266
292
 
267
293
  except KeyboardInterrupt:
268
294
  self.download_interrupted = True
269
- print("\n[red]Download interrupted by user (Ctrl+C).")
295
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
270
296
  break
271
297
 
272
- async def _retry_failed_segments(self, client, segment_urls, results, semaphore, max_retry, estimator, progress_bar):
298
+ async def _retry_failed_segments(self, client, segment_urls, temp_dir, semaphore, max_retry, estimator, progress_bar):
273
299
  """
274
300
  Retry failed segments up to 3 times.
275
301
  """
@@ -277,79 +303,93 @@ class MPD_Segments:
277
303
  global_retry_count = 0
278
304
 
279
305
  while self.info_nFailed > 0 and global_retry_count < max_global_retries and not self.download_interrupted:
280
- failed_indices = [i for i, data in enumerate(results) if not data or len(data) == 0]
306
+ failed_indices = [i for i in range(len(segment_urls)) if i not in self.downloaded_segments]
281
307
  if not failed_indices:
282
308
  break
283
-
284
- print(f"[yellow]Retrying {len(failed_indices)} failed segments (attempt {global_retry_count+1}/{max_global_retries})...")
285
309
 
286
310
  async def download_single(url, idx):
287
311
  async with semaphore:
288
312
  headers = {'User-Agent': get_userAgent()}
313
+ temp_file = os.path.join(temp_dir, f"seg_{idx:06d}.tmp")
289
314
 
290
315
  for attempt in range(max_retry):
291
316
  if self.download_interrupted:
292
- return idx, b'', attempt
317
+ return idx, False, attempt, 0
293
318
 
294
319
  try:
295
320
  timeout = min(SEGMENT_MAX_TIMEOUT, 15 + attempt * 5)
296
321
  resp = await client.get(url, headers=headers, timeout=timeout)
297
322
 
323
+ # Write directly to temp file
298
324
  if resp.status_code == 200:
299
- return idx, resp.content, attempt
325
+ content_size = len(resp.content)
326
+ with open(temp_file, 'wb') as f:
327
+ f.write(resp.content)
328
+
329
+ async with self.segments_lock:
330
+ self.segment_status[idx] = {'downloaded': True, 'size': content_size}
331
+ self.downloaded_segments.add(idx)
332
+
333
+ return idx, True, attempt, content_size
300
334
  else:
301
335
  await asyncio.sleep(1.5 * (2 ** attempt))
302
336
 
303
337
  except Exception:
304
338
  await asyncio.sleep(1.5 * (2 ** attempt))
305
339
 
306
- return idx, b'', max_retry
340
+ return idx, False, max_retry, 0
307
341
 
308
342
  retry_tasks = [download_single(segment_urls[i], i) for i in failed_indices]
309
343
 
310
- # Reset nFailed for this round
311
344
  nFailed_this_round = 0
312
345
  for coro in asyncio.as_completed(retry_tasks):
313
346
  try:
314
- idx, data, nretry = await coro
347
+ idx, success, nretry, size = await coro
315
348
 
316
- if data and len(data) > 0:
317
- results[idx] = data
318
- self.downloaded_segments.add(idx)
319
- else:
349
+ if not success:
320
350
  nFailed_this_round += 1
321
351
 
322
352
  if nretry > self.info_maxRetry:
323
353
  self.info_maxRetry = nretry
324
354
  self.info_nRetry += nretry
325
355
 
326
- progress_bar.update(0) # No progress bar increment, already counted
327
- estimator.add_ts_file(len(data))
328
- self._throttled_progress_update(len(data), estimator, progress_bar)
356
+ progress_bar.update(0)
357
+ estimator.add_ts_file(size)
358
+ self._throttled_progress_update(size, estimator, progress_bar)
329
359
 
330
360
  except KeyboardInterrupt:
331
361
  self.download_interrupted = True
332
- print("\n[red]Download interrupted by user (Ctrl+C).")
362
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
333
363
  break
334
364
 
335
365
  self.info_nFailed = nFailed_this_round
336
366
  global_retry_count += 1
337
367
 
338
- def _write_results_to_file(self, concat_path, results):
368
+ async def _concatenate_segments_in_order(self, temp_dir, concat_path, total_segments):
339
369
  """
340
- Write all downloaded segments to the output file.
370
+ Concatenate all segment files IN ORDER to the final output file.
341
371
  """
342
372
  with open(concat_path, 'ab') as outfile:
343
- for data in results:
344
- if data:
345
- outfile.write(data)
373
+ for idx in range(total_segments):
374
+ temp_file = os.path.join(temp_dir, f"seg_{idx:06d}.tmp")
375
+
376
+ # Only concatenate successfully downloaded segments
377
+ if idx in self.downloaded_segments and os.path.exists(temp_file):
378
+ with open(temp_file, 'rb') as infile:
379
+
380
+ # Read and write in chunks to avoid memory issues
381
+ while True:
382
+ chunk = infile.read(8192) # 8KB chunks
383
+ if not chunk:
384
+ break
385
+ outfile.write(chunk)
346
386
 
347
387
  def _get_bar_format(self, description: str) -> str:
348
388
  """
349
389
  Generate platform-appropriate progress bar format.
350
390
  """
351
391
  return (
352
- f"{Colors.YELLOW}[DASH]{Colors.CYAN} {description}{Colors.WHITE}: "
392
+ f"{Colors.YELLOW}DASH{Colors.CYAN} {description}{Colors.WHITE}: "
353
393
  f"{Colors.MAGENTA}{{bar:40}} "
354
394
  f"{Colors.LIGHT_GREEN}{{n_fmt}}{Colors.WHITE}/{Colors.CYAN}{{total_fmt}} {Colors.LIGHT_MAGENTA}TS {Colors.WHITE}"
355
395
  f"{Colors.DARK_GRAY}[{Colors.YELLOW}{{elapsed}}{Colors.WHITE} < {Colors.CYAN}{{remaining}}{Colors.DARK_GRAY}] "
@@ -378,43 +418,60 @@ class MPD_Segments:
378
418
 
379
419
  def _verify_download_completion(self) -> None:
380
420
  """
381
- Validate final download integrity.
421
+ Validate final download integrity - allow partial downloads.
382
422
  """
383
423
  total = len(self.selected_representation['segment_urls'])
384
424
  completed = getattr(self, 'downloaded_segments', set())
385
425
 
386
- # If interrupted, skip raising error
387
426
  if self.download_interrupted:
388
427
  return
389
428
 
390
429
  if total == 0:
391
430
  return
392
431
 
393
- if len(completed) / total < 0.999:
432
+ completion_rate = len(completed) / total
433
+ missing_count = total - len(completed)
434
+
435
+ # Allow downloads with up to 30 missing segments or 90% completion rate
436
+ if completion_rate >= 0.90 or missing_count <= 30:
437
+ return
438
+
439
+ else:
394
440
  missing = sorted(set(range(total)) - completed)
395
- raise RuntimeError(f"Download incomplete ({len(completed)/total:.1%}). Missing segments: {missing}")
441
+ console.print(f"[red]Missing segments: {missing[:10]}..." if len(missing) > 10 else f"[red]Missing segments: {missing}")
396
442
 
397
- def _cleanup_resources(self, writer_thread, progress_bar: tqdm) -> None:
443
+ def _cleanup_resources(self, temp_dir, progress_bar: tqdm) -> None:
398
444
  """
399
445
  Ensure resource cleanup and final reporting.
400
446
  """
401
447
  progress_bar.close()
448
+
449
+ # Delete temp segment files
450
+ if temp_dir and os.path.exists(temp_dir):
451
+ try:
452
+ for idx in range(len(self.selected_representation.get('segment_urls', []))):
453
+ temp_file = os.path.join(temp_dir, f"seg_{idx:06d}.tmp")
454
+ if os.path.exists(temp_file):
455
+ os.remove(temp_file)
456
+ os.rmdir(temp_dir)
457
+
458
+ except Exception as e:
459
+ print(f"[yellow]Warning: Could not clean temp directory: {e}")
460
+
402
461
  if getattr(self, 'info_nFailed', 0) > 0:
403
462
  self._display_error_summary()
404
463
 
405
464
  # Clear memory
406
- self.buffer = {}
407
- self.expected_index = 0
465
+ self.segment_status = {}
408
466
 
409
467
  def _display_error_summary(self) -> None:
410
468
  """
411
469
  Generate final error report.
412
470
  """
413
471
  total_segments = len(self.selected_representation.get('segment_urls', []))
414
- print(f"\n[cyan]Retry Summary: "
415
- f"[white]Max retries: [green]{getattr(self, 'info_maxRetry', 0)} "
416
- f"[white]Total retries: [green]{getattr(self, 'info_nRetry', 0)} "
417
- f"[white]Failed segments: [red]{getattr(self, 'info_nFailed', 0)}")
418
-
419
- if getattr(self, 'info_nRetry', 0) > total_segments * 0.3:
420
- print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
472
+ failed_indices = [i for i in range(total_segments) if i not in self.downloaded_segments]
473
+
474
+ console.print(f" [cyan]Max retries: [red]{getattr(self, 'info_maxRetry', 0)} [white]| "
475
+ f"[cyan]Total retries: [red]{getattr(self, 'info_nRetry', 0)} [white]| "
476
+ f"[cyan]Failed segments: [red]{getattr(self, 'info_nFailed', 0)} [white]| "
477
+ f"[cyan]Failed indices: [red]{failed_indices}")
@@ -1,14 +1,12 @@
1
1
  # 17.10.24
2
2
 
3
3
  import os
4
- import time
5
4
  import logging
6
5
  import shutil
7
- from typing import Any, Dict, List, Optional
6
+ from typing import Any, Dict, List, Optional, Union
8
7
 
9
8
 
10
9
  # External libraries
11
- import httpx
12
10
  from rich.console import Console
13
11
  from rich.panel import Panel
14
12
  from rich.table import Table
@@ -17,7 +15,7 @@ from rich.table import Table
17
15
  # Internal utilities
18
16
  from StreamingCommunity.Util.config_json import config_manager
19
17
  from StreamingCommunity.Util.headers import get_userAgent
20
- from StreamingCommunity.Util.http_client import create_client
18
+ from StreamingCommunity.Util.http_client import fetch
21
19
  from StreamingCommunity.Util.os import os_manager, internet_manager
22
20
 
23
21
 
@@ -33,15 +31,13 @@ from .segments import M3U8_Segments
33
31
 
34
32
 
35
33
  # Config
36
- ENABLE_SUBTITLE = config_manager.get_bool('M3U8_DOWNLOAD', 'download_subtitle')
37
34
  DOWNLOAD_SPECIFIC_AUDIO = config_manager.get_list('M3U8_DOWNLOAD', 'specific_list_audio')
38
35
  DOWNLOAD_SPECIFIC_SUBTITLE = config_manager.get_list('M3U8_DOWNLOAD', 'specific_list_subtitles')
39
36
  MERGE_SUBTITLE = config_manager.get_bool('M3U8_DOWNLOAD', 'merge_subs')
40
37
  CLEANUP_TMP = config_manager.get_bool('M3U8_DOWNLOAD', 'cleanup_tmp_folder')
41
38
  GET_ONLY_LINK = config_manager.get_int('M3U8_DOWNLOAD', 'get_only_link')
42
39
  FILTER_CUSTOM_RESOLUTION = str(config_manager.get('M3U8_CONVERSION', 'force_resolution')).strip().lower()
43
- RETRY_LIMIT = config_manager.get_int('REQUESTS', 'max_retry')
44
- MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
40
+ EXTENSION_OUTPUT = config_manager.get("M3U8_CONVERSION", "extension")
45
41
 
46
42
  console = Console()
47
43
 
@@ -51,9 +47,9 @@ class HLSClient:
51
47
  def __init__(self, custom_headers: Optional[Dict[str, str]] = None):
52
48
  self.headers = custom_headers if custom_headers else {'User-Agent': get_userAgent()}
53
49
 
54
- def request(self, url: str, return_content: bool = False) -> Optional[httpx.Response]:
50
+ def request(self, url: str, return_content: bool = False) -> Optional[Union[str, bytes]]:
55
51
  """
56
- Makes HTTP GET requests with retry logic.
52
+ Makes HTTP GET requests with retry logic using http_client.
57
53
 
58
54
  Args:
59
55
  url: Target URL to request
@@ -67,21 +63,12 @@ class HLSClient:
67
63
  logging.error("URL is None or empty, cannot make request")
68
64
  return None
69
65
 
70
- client = create_client(headers=self.headers)
71
-
72
- for attempt in range(RETRY_LIMIT):
73
- try:
74
- response = client.get(url)
75
- response.raise_for_status()
76
- return response.content if return_content else response.text
77
-
78
- except Exception as e:
79
- logging.error(f"Attempt {attempt+1} failed for URL {url}: {str(e)}")
80
- if attempt < RETRY_LIMIT - 1: # Don't sleep on last attempt
81
- time.sleep(1.5 ** attempt)
82
-
83
- logging.error(f"All {RETRY_LIMIT} attempts failed for URL: {url}")
84
- return None
66
+ return fetch(
67
+ url,
68
+ method="GET",
69
+ headers=self.headers,
70
+ return_content=return_content
71
+ )
85
72
 
86
73
 
87
74
  class PathManager:
@@ -94,7 +81,7 @@ class PathManager:
94
81
  """
95
82
  self.m3u8_url = m3u8_url
96
83
  self.output_path = self._sanitize_output_path(output_path)
97
- base_name = os.path.basename(self.output_path).replace(".mp4", "")
84
+ base_name = os.path.basename(self.output_path).replace(EXTENSION_OUTPUT, "")
98
85
  self.temp_dir = os.path.join(os.path.dirname(self.output_path), f"{base_name}_tmp")
99
86
 
100
87
  def _sanitize_output_path(self, path: Optional[str]) -> str:
@@ -103,10 +90,10 @@ class PathManager:
103
90
  Creates a hash-based filename if no path is provided.
104
91
  """
105
92
  if not path:
106
- path = "download.mp4"
93
+ path = f"download{EXTENSION_OUTPUT}"
107
94
 
108
- if not path.endswith(".mp4"):
109
- path += ".mp4"
95
+ if not path.endswith(EXTENSION_OUTPUT):
96
+ path += EXTENSION_OUTPUT
110
97
 
111
98
  return os_manager.get_sanitize_path(path)
112
99
 
@@ -182,6 +169,11 @@ class M3U8Manager:
182
169
  elif str(FILTER_CUSTOM_RESOLUTION).replace("p", "").replace("px", "").isdigit():
183
170
  resolution_value = int(str(FILTER_CUSTOM_RESOLUTION).replace("p", "").replace("px", ""))
184
171
  self.video_url, self.video_res = self.parser._video.get_custom_uri(resolution_value)
172
+
173
+ # Fallback to best if custom resolution not found
174
+ if self.video_url is None:
175
+ self.video_url, self.video_res = self.parser._video.get_best_uri()
176
+
185
177
  else:
186
178
  logging.error("Resolution not recognized.")
187
179
  self.video_url, self.video_res = self.parser._video.get_best_uri()
@@ -192,15 +184,15 @@ class M3U8Manager:
192
184
  if s.get('language') in DOWNLOAD_SPECIFIC_AUDIO
193
185
  ]
194
186
 
187
+ # Subtitle info
195
188
  self.sub_streams = []
196
- if ENABLE_SUBTITLE:
197
- if "*" in DOWNLOAD_SPECIFIC_SUBTITLE:
198
- self.sub_streams = self.parser._subtitle.get_all_uris_and_names() or []
199
- else:
200
- self.sub_streams = [
201
- s for s in (self.parser._subtitle.get_all_uris_and_names() or [])
202
- if s.get('language') in DOWNLOAD_SPECIFIC_SUBTITLE
203
- ]
189
+ if "*" in DOWNLOAD_SPECIFIC_SUBTITLE:
190
+ self.sub_streams = self.parser._subtitle.get_all_uris_and_names() or []
191
+ else:
192
+ self.sub_streams = [
193
+ s for s in (self.parser._subtitle.get_all_uris_and_names() or [])
194
+ if s.get('language') in DOWNLOAD_SPECIFIC_SUBTITLE
195
+ ]
204
196
 
205
197
  def log_selection(self):
206
198
  """Log the stream selection information in a formatted table."""
@@ -220,17 +212,6 @@ class M3U8Manager:
220
212
 
221
213
  data_rows.append(["Video", available_video, str(FILTER_CUSTOM_RESOLUTION), downloadable_video])
222
214
 
223
- # Codec information
224
- if self.parser.codec is not None:
225
- available_codec_info = (
226
- f"v: {self.parser.codec.video_codec_name} "
227
- f"(b: {self.parser.codec.video_bitrate // 1000}k), "
228
- f"a: {self.parser.codec.audio_codec_name} "
229
- f"(b: {self.parser.codec.audio_bitrate // 1000}k)"
230
- )
231
- set_codec_info = available_codec_info if config_manager.get_bool("M3U8_CONVERSION", "use_codec") else "copy"
232
-
233
- data_rows.append(["Codec", available_codec_info, set_codec_info, set_codec_info])
234
215
 
235
216
  # Subtitle information
236
217
  available_subtitles = self.parser._subtitle.get_all_uris_and_names() or []
@@ -683,11 +664,11 @@ class HLS_Downloader:
683
664
 
684
665
  new_filename = self.path_manager.output_path
685
666
  if missing_ts and use_shortest:
686
- new_filename = new_filename.replace(".mp4", "_failed_sync_ts.mp4")
667
+ new_filename = new_filename.replace(EXTENSION_OUTPUT, f"_failed_sync_ts{EXTENSION_OUTPUT}")
687
668
  elif missing_ts:
688
- new_filename = new_filename.replace(".mp4", "_failed_ts.mp4")
669
+ new_filename = new_filename.replace(EXTENSION_OUTPUT, f"_failed_ts{EXTENSION_OUTPUT}")
689
670
  elif use_shortest:
690
- new_filename = new_filename.replace(".mp4", "_failed_sync.mp4")
671
+ new_filename = new_filename.replace(EXTENSION_OUTPUT, f"_failed_sync{EXTENSION_OUTPUT}")
691
672
 
692
673
  if missing_ts or use_shortest:
693
674
  os.rename(self.path_manager.output_path, new_filename)