StreamingCommunity 3.2.8__py3-none-any.whl → 3.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (79) hide show
  1. StreamingCommunity/Api/Player/Helper/Vixcloud/util.py +2 -1
  2. StreamingCommunity/Api/Player/hdplayer.py +2 -2
  3. StreamingCommunity/Api/Player/sweetpixel.py +5 -8
  4. StreamingCommunity/Api/Site/altadefinizione/__init__.py +2 -2
  5. StreamingCommunity/Api/Site/altadefinizione/film.py +10 -8
  6. StreamingCommunity/Api/Site/altadefinizione/series.py +9 -7
  7. StreamingCommunity/Api/Site/altadefinizione/site.py +1 -1
  8. StreamingCommunity/Api/Site/animeunity/__init__.py +2 -2
  9. StreamingCommunity/Api/Site/animeunity/serie.py +2 -2
  10. StreamingCommunity/Api/Site/animeworld/site.py +3 -5
  11. StreamingCommunity/Api/Site/animeworld/util/ScrapeSerie.py +8 -10
  12. StreamingCommunity/Api/Site/cb01new/film.py +7 -5
  13. StreamingCommunity/Api/Site/crunchyroll/__init__.py +1 -3
  14. StreamingCommunity/Api/Site/crunchyroll/film.py +9 -7
  15. StreamingCommunity/Api/Site/crunchyroll/series.py +9 -7
  16. StreamingCommunity/Api/Site/crunchyroll/site.py +10 -1
  17. StreamingCommunity/Api/Site/guardaserie/series.py +8 -6
  18. StreamingCommunity/Api/Site/guardaserie/site.py +0 -3
  19. StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +1 -2
  20. StreamingCommunity/Api/Site/mediasetinfinity/__init__.py +1 -1
  21. StreamingCommunity/Api/Site/mediasetinfinity/film.py +10 -16
  22. StreamingCommunity/Api/Site/mediasetinfinity/series.py +12 -18
  23. StreamingCommunity/Api/Site/mediasetinfinity/site.py +11 -3
  24. StreamingCommunity/Api/Site/mediasetinfinity/util/ScrapeSerie.py +214 -180
  25. StreamingCommunity/Api/Site/mediasetinfinity/util/get_license.py +2 -31
  26. StreamingCommunity/Api/Site/raiplay/__init__.py +1 -1
  27. StreamingCommunity/Api/Site/raiplay/film.py +41 -10
  28. StreamingCommunity/Api/Site/raiplay/series.py +44 -12
  29. StreamingCommunity/Api/Site/raiplay/site.py +4 -1
  30. StreamingCommunity/Api/Site/raiplay/util/ScrapeSerie.py +2 -1
  31. StreamingCommunity/Api/Site/raiplay/util/get_license.py +40 -0
  32. StreamingCommunity/Api/Site/streamingcommunity/__init__.py +0 -1
  33. StreamingCommunity/Api/Site/streamingcommunity/film.py +7 -5
  34. StreamingCommunity/Api/Site/streamingcommunity/series.py +9 -7
  35. StreamingCommunity/Api/Site/streamingcommunity/site.py +4 -2
  36. StreamingCommunity/Api/Site/streamingwatch/film.py +7 -5
  37. StreamingCommunity/Api/Site/streamingwatch/series.py +8 -6
  38. StreamingCommunity/Api/Site/streamingwatch/site.py +3 -1
  39. StreamingCommunity/Api/Site/streamingwatch/util/ScrapeSerie.py +3 -3
  40. StreamingCommunity/Api/Template/Util/__init__.py +10 -1
  41. StreamingCommunity/Api/Template/Util/manage_ep.py +4 -4
  42. StreamingCommunity/Api/Template/__init__.py +5 -1
  43. StreamingCommunity/Api/Template/site.py +10 -6
  44. StreamingCommunity/Lib/Downloader/DASH/cdm_helpher.py +5 -12
  45. StreamingCommunity/Lib/Downloader/DASH/decrypt.py +1 -1
  46. StreamingCommunity/Lib/Downloader/DASH/downloader.py +1 -1
  47. StreamingCommunity/Lib/Downloader/DASH/parser.py +1 -1
  48. StreamingCommunity/Lib/Downloader/DASH/segments.py +4 -3
  49. StreamingCommunity/Lib/Downloader/HLS/downloader.py +11 -9
  50. StreamingCommunity/Lib/Downloader/HLS/segments.py +4 -9
  51. StreamingCommunity/Lib/Downloader/MP4/downloader.py +4 -3
  52. StreamingCommunity/Lib/Downloader/TOR/downloader.py +3 -5
  53. StreamingCommunity/Lib/Downloader/__init__.py +9 -1
  54. StreamingCommunity/Lib/FFmpeg/__init__.py +10 -1
  55. StreamingCommunity/Lib/FFmpeg/command.py +4 -6
  56. StreamingCommunity/Lib/FFmpeg/util.py +1 -1
  57. StreamingCommunity/Lib/M3U8/__init__.py +9 -1
  58. StreamingCommunity/Lib/M3U8/decryptor.py +8 -4
  59. StreamingCommunity/Lib/M3U8/estimator.py +0 -6
  60. StreamingCommunity/Lib/M3U8/parser.py +1 -1
  61. StreamingCommunity/Lib/M3U8/url_fixer.py +1 -1
  62. StreamingCommunity/Lib/TMBD/__init__.py +6 -1
  63. StreamingCommunity/TelegramHelp/config.json +1 -5
  64. StreamingCommunity/TelegramHelp/telegram_bot.py +9 -10
  65. StreamingCommunity/Upload/version.py +2 -2
  66. StreamingCommunity/Util/config_json.py +139 -59
  67. StreamingCommunity/Util/http_client.py +201 -0
  68. StreamingCommunity/Util/message.py +1 -1
  69. StreamingCommunity/Util/os.py +5 -5
  70. StreamingCommunity/Util/table.py +3 -3
  71. StreamingCommunity/__init__.py +9 -1
  72. StreamingCommunity/run.py +396 -260
  73. {streamingcommunity-3.2.8.dist-info → streamingcommunity-3.2.9.dist-info}/METADATA +143 -45
  74. streamingcommunity-3.2.9.dist-info/RECORD +113 -0
  75. streamingcommunity-3.2.8.dist-info/RECORD +0 -111
  76. {streamingcommunity-3.2.8.dist-info → streamingcommunity-3.2.9.dist-info}/WHEEL +0 -0
  77. {streamingcommunity-3.2.8.dist-info → streamingcommunity-3.2.9.dist-info}/entry_points.txt +0 -0
  78. {streamingcommunity-3.2.8.dist-info → streamingcommunity-3.2.9.dist-info}/licenses/LICENSE +0 -0
  79. {streamingcommunity-3.2.8.dist-info → streamingcommunity-3.2.9.dist-info}/top_level.txt +0 -0
@@ -18,8 +18,6 @@ from StreamingCommunity.Util.headers import get_userAgent
18
18
 
19
19
  # Variable
20
20
  console = Console()
21
- download_site_data = True
22
- validate_github_config = True
23
21
 
24
22
 
25
23
  class ConfigManager:
@@ -54,8 +52,7 @@ class ConfigManager:
54
52
  self.configSite = {}
55
53
  self.cache = {}
56
54
 
57
- self.use_api = False
58
- self.download_site_data = False
55
+ self.fetch_domain_online = True
59
56
  self.validate_github_config = False
60
57
 
61
58
  console.print(f"[bold cyan]Initializing ConfigManager:[/bold cyan] [green]{self.file_path}[/green]")
@@ -67,7 +64,7 @@ class ConfigManager:
67
64
  """Load the configuration and initialize all settings."""
68
65
  if not os.path.exists(self.file_path):
69
66
  console.print(f"[bold red]WARNING: Configuration file not found:[/bold red] {self.file_path}")
70
- console.print(f"[bold yellow]Attempting to download from reference repository...[/bold yellow]")
67
+ console.print("[bold yellow]Attempting to download from reference repository...[/bold yellow]")
71
68
  self._download_reference_config()
72
69
 
73
70
  # Load the configuration file
@@ -85,11 +82,8 @@ class ConfigManager:
85
82
  else:
86
83
  console.print("[bold yellow]GitHub validation disabled[/bold yellow]")
87
84
 
88
- # Load site data if requested
89
- if self.download_site_data:
90
- self._load_site_data()
91
- else:
92
- console.print("[bold yellow]Site data download disabled[/bold yellow]")
85
+ # Load site data based on fetch_domain_online setting
86
+ self._load_site_data()
93
87
 
94
88
  except json.JSONDecodeError as e:
95
89
  console.print(f"[bold red]Error parsing JSON:[/bold red] {str(e)}")
@@ -119,18 +113,11 @@ class ConfigManager:
119
113
  """Update internal settings from loaded configurations."""
120
114
  default_section = self.config.get('DEFAULT', {})
121
115
 
122
- # Save local values in temporary variables
123
- temp_use_api = default_section.get('use_api', False)
124
- temp_download_site_data = default_section.get('download_site_data', False)
125
- temp_validate_github_config = default_section.get('validate_github_config', False)
126
-
127
- # Update settings with found values (False by default)
128
- self.use_api = temp_use_api
129
- self.download_site_data = temp_download_site_data
130
- self.validate_github_config = temp_validate_github_config
116
+ # Get fetch_domain_online setting (True by default)
117
+ self.fetch_domain_online = default_section.get('fetch_domain_online', True)
118
+ self.validate_github_config = default_section.get('validate_github_config', False)
131
119
 
132
- console.print(f"[bold cyan]API Usage:[/bold cyan] [{'green' if self.use_api else 'yellow'}]{self.use_api}[/{'green' if self.use_api else 'yellow'}]")
133
- console.print(f"[bold cyan]Site data download:[/bold cyan] [{'green' if self.download_site_data else 'yellow'}]{self.download_site_data}[/{'green' if self.download_site_data else 'yellow'}]")
120
+ console.print(f"[bold cyan]Fetch domains online:[/bold cyan] [{'green' if self.fetch_domain_online else 'yellow'}]{self.fetch_domain_online}[/{'green' if self.fetch_domain_online else 'yellow'}]")
134
121
  console.print(f"[bold cyan]GitHub configuration validation:[/bold cyan] [{'green' if self.validate_github_config else 'yellow'}]{self.validate_github_config}[/{'green' if self.validate_github_config else 'yellow'}]")
135
122
 
136
123
  def _download_reference_config(self) -> None:
@@ -159,7 +146,7 @@ class ConfigManager:
159
146
  """Validate the local configuration against the reference one and update missing keys."""
160
147
  try:
161
148
  # Download the reference configuration
162
- console.print(f"[bold cyan]Validating configuration with GitHub...[/bold cyan]")
149
+ console.print("[bold cyan]Validating configuration with GitHub...[/bold cyan]")
163
150
  response = requests.get(self.reference_config_url, timeout=8, headers={'User-Agent': get_userAgent()})
164
151
 
165
152
  if not response.ok:
@@ -242,11 +229,9 @@ class ConfigManager:
242
229
  # Make sure control keys maintain local values
243
230
  merged_section = self._deep_merge_configs(merged[key], value)
244
231
 
245
- # Preserve local values for the three critical settings
246
- if 'use_api' in merged[key]:
247
- merged_section['use_api'] = merged[key]['use_api']
248
- if 'download_site_data' in merged[key]:
249
- merged_section['download_site_data'] = merged[key]['download_site_data']
232
+ # Preserve local values for critical settings
233
+ if 'fetch_domain_online' in merged[key]:
234
+ merged_section['fetch_domain_online'] = merged[key]['fetch_domain_online']
250
235
  if 'validate_github_config' in merged[key]:
251
236
  merged_section['validate_github_config'] = merged[key]['validate_github_config']
252
237
 
@@ -259,28 +244,31 @@ class ConfigManager:
259
244
  return merged
260
245
 
261
246
  def _load_site_data(self) -> None:
262
- """Load site data from API or local file."""
263
- if self.use_api:
264
- self._load_site_data_from_api()
247
+ """Load site data based on fetch_domain_online setting."""
248
+ if self.fetch_domain_online:
249
+ self._load_site_data_online()
265
250
  else:
266
251
  self._load_site_data_from_file()
267
252
 
268
- def _load_site_data_from_api(self) -> None:
269
- """Load site data from GitHub."""
253
+ def _load_site_data_online(self) -> None:
254
+ """Load site data from GitHub and update local domains.json file."""
270
255
  domains_github_url = "https://raw.githubusercontent.com/Arrowar/StreamingCommunity/refs/heads/main/.github/.domain/domains.json"
271
256
  headers = {
272
257
  "User-Agent": get_userAgent()
273
258
  }
274
259
 
275
260
  try:
276
- console.print("[bold cyan]Retrieving site data from GitHub:[/bold cyan]")
261
+ console.print("[bold cyan]Fetching domains from GitHub:[/bold cyan]")
277
262
  response = requests.get(domains_github_url, timeout=8, headers=headers)
278
263
 
279
264
  if response.ok:
280
265
  self.configSite = response.json()
281
266
 
267
+ # Determine which file to save to
268
+ self._save_domains_to_appropriate_location()
269
+
282
270
  site_count = len(self.configSite) if isinstance(self.configSite, dict) else 0
283
- console.print(f"[bold green]Site data loaded from GitHub:[/bold green] {site_count} streaming services found.")
271
+ console.print(f"[bold green]Domains loaded from GitHub:[/bold green] {site_count} streaming services found.")
284
272
 
285
273
  else:
286
274
  console.print(f"[bold red]GitHub request failed:[/bold red] HTTP {response.status_code}, {response.text[:100]}")
@@ -294,42 +282,129 @@ class ConfigManager:
294
282
  console.print(f"[bold red]GitHub connection error:[/bold red] {str(e)}")
295
283
  self._handle_site_data_fallback()
296
284
 
285
+ def _save_domains_to_appropriate_location(self) -> None:
286
+ """Save domains to the appropriate location based on existing files."""
287
+ if getattr(sys, 'frozen', False):
288
+ # If the application is frozen (e.g., PyInstaller)
289
+ base_path = os.path.dirname(sys.executable)
290
+ else:
291
+ # Use the current working directory where the script is executed
292
+ base_path = os.getcwd()
293
+
294
+ # Check for GitHub structure first
295
+ github_domains_path = os.path.join(base_path, '.github', '.domain', 'domains.json')
296
+
297
+ try:
298
+ if os.path.exists(github_domains_path):
299
+
300
+ # Update existing GitHub structure file
301
+ with open(github_domains_path, 'w', encoding='utf-8') as f:
302
+ json.dump(self.configSite, f, indent=4, ensure_ascii=False)
303
+ console.print(f"[bold green]Domains updated in GitHub structure:[/bold green] {github_domains_path}")
304
+
305
+ elif not os.path.exists(self.domains_path):
306
+
307
+ # Save to root only if it doesn't exist and GitHub structure doesn't exist
308
+ with open(self.domains_path, 'w', encoding='utf-8') as f:
309
+ json.dump(self.configSite, f, indent=4, ensure_ascii=False)
310
+ console.print(f"[bold green]Domains saved to:[/bold green] {self.domains_path}")
311
+
312
+ else:
313
+
314
+ # Root file exists, don't overwrite it
315
+ console.print(f"[bold yellow]Local domains.json already exists, not overwriting:[/bold yellow] {self.domains_path}")
316
+ console.print("[bold yellow]Tip: Delete the file if you want to recreate it from GitHub[/bold yellow]")
317
+
318
+ except Exception as save_error:
319
+ console.print(f"[bold yellow]Warning: Could not save domains to file:[/bold yellow] {str(save_error)}")
320
+
321
+ # Try to save to root as fallback only if it doesn't exist
322
+ if not os.path.exists(self.domains_path):
323
+ try:
324
+ with open(self.domains_path, 'w', encoding='utf-8') as f:
325
+ json.dump(self.configSite, f, indent=4, ensure_ascii=False)
326
+ console.print(f"[bold green]Domains saved to fallback location:[/bold green] {self.domains_path}")
327
+ except Exception as fallback_error:
328
+ console.print(f"[bold red]Failed to save to fallback location:[/bold red] {str(fallback_error)}")
329
+
297
330
  def _load_site_data_from_file(self) -> None:
298
- """Load site data from local file."""
331
+ """Load site data from local domains.json file."""
299
332
  try:
300
- if os.path.exists(self.domains_path):
301
- console.print(f"[bold cyan]Reading domains from:[/bold cyan] {self.domains_path}")
302
- with open(self.domains_path, 'r') as f:
333
+ # Determine the base path
334
+ if getattr(sys, 'frozen', False):
335
+
336
+ # If the application is frozen (e.g., PyInstaller)
337
+ base_path = os.path.dirname(sys.executable)
338
+ else:
339
+
340
+ # Use the current working directory where the script is executed
341
+ base_path = os.getcwd()
342
+
343
+ # Check for GitHub structure first
344
+ github_domains_path = os.path.join(base_path, '.github', '.domain', 'domains.json')
345
+
346
+ if os.path.exists(github_domains_path):
347
+ console.print(f"[bold cyan]Reading domains from GitHub structure:[/bold cyan] {github_domains_path}")
348
+ with open(github_domains_path, 'r', encoding='utf-8') as f:
349
+ self.configSite = json.load(f)
350
+
351
+ site_count = len(self.configSite) if isinstance(self.configSite, dict) else 0
352
+ console.print(f"[bold green]Domains loaded from GitHub structure:[/bold green] {site_count} streaming services")
353
+
354
+ elif os.path.exists(self.domains_path):
355
+ console.print(f"[bold cyan]Reading domains from root:[/bold cyan] {self.domains_path}")
356
+ with open(self.domains_path, 'r', encoding='utf-8') as f:
303
357
  self.configSite = json.load(f)
304
358
 
305
359
  site_count = len(self.configSite) if isinstance(self.configSite, dict) else 0
306
- console.print(f"[bold green]Site data loaded from file:[/bold green] {site_count} streaming services")
360
+ console.print(f"[bold green]Domains loaded from root file:[/bold green] {site_count} streaming services")
307
361
 
308
362
  else:
309
- error_msg = f"domains.json not found at {self.domains_path} and API usage is disabled"
363
+ error_msg = f"domains.json not found in GitHub structure ({github_domains_path}) or root ({self.domains_path}) and fetch_domain_online is disabled"
310
364
  console.print(f"[bold red]Configuration error:[/bold red] {error_msg}")
311
- self._handle_site_data_fallback()
365
+ console.print("[bold yellow]Tip: Set 'fetch_domain_online' to true to download domains from GitHub[/bold yellow]")
366
+ self.configSite = {}
312
367
 
313
368
  except Exception as e:
314
- console.print(f"[bold red]Domain file error:[/bold red] {str(e)}")
315
- self._handle_site_data_fallback()
369
+ console.print(f"[bold red]Local domain file error:[/bold red] {str(e)}")
370
+ self.configSite = {}
316
371
 
317
372
  def _handle_site_data_fallback(self) -> None:
318
373
  """Handle site data fallback in case of error."""
319
- if self.use_api and os.path.exists(self.domains_path):
320
- console.print("[bold yellow]Attempting fallback to local domains.json file...[/bold yellow]")
321
-
374
+ # Determine the base path
375
+ if getattr(sys, 'frozen', False):
376
+
377
+ # If the application is frozen (e.g., PyInstaller)
378
+ base_path = os.path.dirname(sys.executable)
379
+ else:
380
+ # Use the current working directory where the script is executed
381
+ base_path = os.getcwd()
382
+
383
+ # Check for GitHub structure first
384
+ github_domains_path = os.path.join(base_path, '.github', '.domain', 'domains.json')
385
+
386
+ if os.path.exists(github_domains_path):
387
+ console.print("[bold yellow]Attempting fallback to GitHub structure domains.json file...[/bold yellow]")
322
388
  try:
323
- with open(self.domains_path, 'r') as f:
389
+ with open(github_domains_path, 'r', encoding='utf-8') as f:
324
390
  self.configSite = json.load(f)
325
- console.print("[bold green]Fallback to local data successful[/bold green]")
391
+ console.print("[bold green]Fallback to GitHub structure successful[/bold green]")
392
+ return
326
393
  except Exception as fallback_error:
327
- console.print(f"[bold red]Fallback also failed:[/bold red] {str(fallback_error)}")
328
- self.configSite = {}
329
- else:
330
-
331
- # Initialize with an empty dictionary if there are no alternatives
332
- self.configSite = {}
394
+ console.print(f"[bold red]GitHub structure fallback failed:[/bold red] {str(fallback_error)}")
395
+
396
+ if os.path.exists(self.domains_path):
397
+ console.print("[bold yellow]Attempting fallback to root domains.json file...[/bold yellow]")
398
+ try:
399
+ with open(self.domains_path, 'r', encoding='utf-8') as f:
400
+ self.configSite = json.load(f)
401
+ console.print("[bold green]Fallback to root domains successful[/bold green]")
402
+ return
403
+ except Exception as fallback_error:
404
+ console.print(f"[bold red]Root domains fallback failed:[/bold red] {str(fallback_error)}")
405
+
406
+ console.print("[bold red]No local domains.json file available for fallback[/bold red]")
407
+ self.configSite = {}
333
408
 
334
409
  def download_file(self, url: str, filename: str) -> None:
335
410
  """
@@ -412,23 +487,28 @@ class ConfigManager:
412
487
  Any: Converted value
413
488
  """
414
489
  try:
415
- if data_type == int:
490
+ if data_type is int:
416
491
  return int(value)
417
- elif data_type == float:
492
+
493
+ elif data_type is float:
418
494
  return float(value)
419
- elif data_type == bool:
495
+
496
+ elif data_type is bool:
420
497
  if isinstance(value, str):
421
498
  return value.lower() in ("yes", "true", "t", "1")
422
499
  return bool(value)
423
- elif data_type == list:
500
+
501
+ elif data_type is list:
424
502
  if isinstance(value, list):
425
503
  return value
426
504
  if isinstance(value, str):
427
505
  return [item.strip() for item in value.split(',')]
428
506
  return [value]
429
- elif data_type == dict:
507
+
508
+ elif data_type is dict:
430
509
  if isinstance(value, dict):
431
510
  return value
511
+
432
512
  raise ValueError(f"Cannot convert {type(value).__name__} to dict")
433
513
  else:
434
514
  return value
@@ -0,0 +1,201 @@
1
+ # 09.08.25
2
+ from __future__ import annotations
3
+
4
+ import time
5
+ import random
6
+ from typing import Any, Dict, Optional, Union
7
+
8
+
9
+ # External library
10
+ import httpx
11
+
12
+
13
+ # Logic class
14
+ from StreamingCommunity.Util.config_json import config_manager
15
+ from StreamingCommunity.Util.headers import get_userAgent
16
+
17
+
18
+ # Defaults from config
19
+ def _get_timeout() -> int:
20
+ try:
21
+ return int(config_manager.get_int("REQUESTS", "timeout"))
22
+ except Exception:
23
+ return 20
24
+
25
+
26
+ def _get_max_retry() -> int:
27
+ try:
28
+ return int(config_manager.get_int("REQUESTS", "max_retry"))
29
+ except Exception:
30
+ return 3
31
+
32
+
33
+ def _get_verify() -> bool:
34
+ try:
35
+ return bool(config_manager.get_bool("REQUESTS", "verify"))
36
+ except Exception:
37
+ return True
38
+
39
+
40
+ def _get_proxies() -> Optional[Dict[str, str]]:
41
+ """Return proxies dict if present in config and non-empty, else None."""
42
+ try:
43
+ proxies = config_manager.get_dict("REQUESTS", "proxy")
44
+ if not isinstance(proxies, dict):
45
+ return None
46
+ # Normalize empty strings to None (httpx ignores None)
47
+ cleaned: Dict[str, str] = {}
48
+ for scheme, url in proxies.items():
49
+ if isinstance(url, str) and url.strip():
50
+ cleaned[scheme] = url.strip()
51
+ return cleaned or None
52
+ except Exception:
53
+ return None
54
+
55
+
56
+ def _default_headers(extra: Optional[Dict[str, str]] = None) -> Dict[str, str]:
57
+ headers = {"User-Agent": get_userAgent()}
58
+ if extra:
59
+ headers.update(extra)
60
+ return headers
61
+
62
+
63
+ def create_client(
64
+ *,
65
+ headers: Optional[Dict[str, str]] = None,
66
+ cookies: Optional[Dict[str, str]] = None,
67
+ timeout: Optional[Union[int, float]] = None,
68
+ verify: Optional[bool] = None,
69
+ proxies: Optional[Dict[str, str]] = None,
70
+ http2: bool = False,
71
+ follow_redirects: bool = True,
72
+ ) -> httpx.Client:
73
+ """Factory for a configured httpx.Client."""
74
+ return httpx.Client(
75
+ headers=_default_headers(headers),
76
+ cookies=cookies,
77
+ timeout=timeout if timeout is not None else _get_timeout(),
78
+ verify=_get_verify() if verify is None else verify,
79
+ follow_redirects=follow_redirects,
80
+ http2=http2,
81
+ proxy=proxies if proxies is not None else _get_proxies(),
82
+ )
83
+
84
+
85
+ def create_async_client(
86
+ *,
87
+ headers: Optional[Dict[str, str]] = None,
88
+ cookies: Optional[Dict[str, str]] = None,
89
+ timeout: Optional[Union[int, float]] = None,
90
+ verify: Optional[bool] = None,
91
+ proxies: Optional[Dict[str, str]] = None,
92
+ http2: bool = False,
93
+ follow_redirects: bool = True,
94
+ ) -> httpx.AsyncClient:
95
+ """Factory for a configured httpx.AsyncClient."""
96
+ return httpx.AsyncClient(
97
+ headers=_default_headers(headers),
98
+ cookies=cookies,
99
+ timeout=timeout if timeout is not None else _get_timeout(),
100
+ verify=_get_verify() if verify is None else verify,
101
+ follow_redirects=follow_redirects,
102
+ http2=http2,
103
+ proxies=proxies if proxies is not None else _get_proxies(),
104
+ )
105
+
106
+
107
+ def _sleep_with_backoff(attempt: int, base: float = 1.1, cap: float = 10.0) -> None:
108
+ """Exponential backoff with jitter."""
109
+ delay = min(base * (2 ** attempt), cap)
110
+ # Add small jitter to avoid thundering herd
111
+ delay += random.uniform(0.0, 0.25)
112
+ time.sleep(delay)
113
+
114
+
115
+ def fetch(
116
+ url: str,
117
+ *,
118
+ method: str = "GET",
119
+ params: Optional[Dict[str, Any]] = None,
120
+ data: Optional[Any] = None,
121
+ json: Optional[Any] = None,
122
+ headers: Optional[Dict[str, str]] = None,
123
+ cookies: Optional[Dict[str, str]] = None,
124
+ timeout: Optional[Union[int, float]] = None,
125
+ verify: Optional[bool] = None,
126
+ proxies: Optional[Dict[str, str]] = None,
127
+ follow_redirects: bool = True,
128
+ http2: bool = False,
129
+ max_retry: Optional[int] = None,
130
+ return_content: bool = False,
131
+ ) -> Optional[Union[str, bytes]]:
132
+ """
133
+ Perform an HTTP request with retry. Returns text or bytes according to return_content.
134
+ Returns None if all retries fail.
135
+ """
136
+ attempts = max_retry if max_retry is not None else _get_max_retry()
137
+
138
+ with create_client(
139
+ headers=headers,
140
+ cookies=cookies,
141
+ timeout=timeout,
142
+ verify=verify,
143
+ proxies=proxies,
144
+ http2=http2,
145
+ follow_redirects=follow_redirects,
146
+ ) as client:
147
+ for attempt in range(attempts):
148
+ try:
149
+ resp = client.request(method, url, params=params, data=data, json=json)
150
+ resp.raise_for_status()
151
+ return resp.content if return_content else resp.text
152
+ except Exception:
153
+ if attempt + 1 >= attempts:
154
+ break
155
+ _sleep_with_backoff(attempt)
156
+ return None
157
+
158
+
159
+ async def async_fetch(
160
+ url: str,
161
+ *,
162
+ method: str = "GET",
163
+ params: Optional[Dict[str, Any]] = None,
164
+ data: Optional[Any] = None,
165
+ json: Optional[Any] = None,
166
+ headers: Optional[Dict[str, str]] = None,
167
+ cookies: Optional[Dict[str, str]] = None,
168
+ timeout: Optional[Union[int, float]] = None,
169
+ verify: Optional[bool] = None,
170
+ proxies: Optional[Dict[str, str]] = None,
171
+ follow_redirects: bool = True,
172
+ http2: bool = False,
173
+ max_retry: Optional[int] = None,
174
+ return_content: bool = False,
175
+ ) -> Optional[Union[str, bytes]]:
176
+ """
177
+ Async HTTP request with retry. Returns text or bytes according to return_content.
178
+ Returns None if all retries fail.
179
+ """
180
+ attempts = max_retry if max_retry is not None else _get_max_retry()
181
+
182
+ async with create_async_client(
183
+ headers=headers,
184
+ cookies=cookies,
185
+ timeout=timeout,
186
+ verify=verify,
187
+ proxies=proxies,
188
+ http2=http2,
189
+ follow_redirects=follow_redirects,
190
+ ) as client:
191
+ for attempt in range(attempts):
192
+ try:
193
+ resp = await client.request(method, url, params=params, data=data, json=json)
194
+ resp.raise_for_status()
195
+ return resp.content if return_content else resp.text
196
+ except Exception:
197
+ if attempt + 1 >= attempts:
198
+ break
199
+ # Use same backoff logic for async by sleeping in thread (short duration)
200
+ _sleep_with_backoff(attempt)
201
+ return None
@@ -14,7 +14,7 @@ from StreamingCommunity.Util.config_json import config_manager
14
14
 
15
15
  # Variable
16
16
  console = Console()
17
- CLEAN = config_manager.get_bool('DEFAULT', 'clean_console')
17
+ CLEAN = config_manager.get_bool('DEFAULT', 'show_message')
18
18
  SHOW = config_manager.get_bool('DEFAULT', 'show_message')
19
19
 
20
20
 
@@ -5,7 +5,6 @@ import os
5
5
  import glob
6
6
  import sys
7
7
  import shutil
8
- import hashlib
9
8
  import logging
10
9
  import platform
11
10
  import inspect
@@ -432,10 +431,11 @@ class OsSummary:
432
431
 
433
432
  if not self.mp4decrypt_path:
434
433
  console.log("[yellow]Warning: mp4decrypt not found")
435
-
436
- console.print(f"[cyan]Path: [red]ffmpeg [bold yellow]'{self.ffmpeg_path}'[/bold yellow][white], [red]ffprobe '[bold yellow]{self.ffprobe_path}'[/bold yellow]")
437
- if self.mp4decrypt_path:
438
- console.print(f"[cyan]Path: [red]mp4decrypt [bold yellow]'{self.mp4decrypt_path}'[/bold yellow]")
434
+
435
+ ffmpeg_str = f"'{self.ffmpeg_path}'" if self.ffmpeg_path else "None"
436
+ ffprobe_str = f"'{self.ffprobe_path}'" if self.ffprobe_path else "None"
437
+ mp4decrypt_str = f"'{self.mp4decrypt_path}'" if self.mp4decrypt_path else "None"
438
+ console.print(f"[cyan]Path: [red]ffmpeg [bold yellow]{ffmpeg_str}[/bold yellow][white], [red]ffprobe [bold yellow]{ffprobe_str}[/bold yellow][white], [red]mp4decrypt [bold yellow]{mp4decrypt_str}[/bold yellow]")
439
439
 
440
440
 
441
441
  os_manager = OsManager()
@@ -107,7 +107,7 @@ class TVShowManager:
107
107
  search_func = getattr(module, 'search')
108
108
  search_func(None)
109
109
 
110
- except Exception as e:
110
+ except Exception:
111
111
  logging.error("Error during search execution")
112
112
 
113
113
  finally:
@@ -142,7 +142,7 @@ class TVShowManager:
142
142
 
143
143
  # Handle pagination and user input
144
144
  if self.slice_end < total_items:
145
- self.console.print(f"\n[green]Press [red]Enter [green]for next page, [red]'q' [green]to quit, or [red]'back' [green]to search.")
145
+ self.console.print("\n[green]Press [red]Enter [green]for next page, [red]'q' [green]to quit, or [red]'back' [green]to search.")
146
146
 
147
147
  if not force_int_input:
148
148
  prompt_msg = ("\n[cyan]Insert media index [yellow](e.g., 1), [red]* [cyan]to download all media, "
@@ -184,7 +184,7 @@ class TVShowManager:
184
184
 
185
185
  else:
186
186
  # Last page handling
187
- self.console.print(f"\n[green]You've reached the end. [red]Enter [green]for first page, [red]'q' [green]to quit, or [red]'back' [green]to search.")
187
+ self.console.print("\n[green]You've reached the end. [red]Enter [green]for first page, [red]'q' [green]to quit, or [red]'back' [green]to search.")
188
188
 
189
189
  if not force_int_input:
190
190
  prompt_msg = ("\n[cyan]Insert media index [yellow](e.g., 1), [red]* [cyan]to download all media, "
@@ -4,4 +4,12 @@ from .run import main
4
4
  from .Lib.Downloader.HLS.downloader import HLS_Downloader
5
5
  from .Lib.Downloader.MP4.downloader import MP4_downloader
6
6
  from .Lib.Downloader.TOR.downloader import TOR_downloader
7
- from .Lib.Downloader.DASH.downloader import DASH_Downloader
7
+ from .Lib.Downloader.DASH.downloader import DASH_Downloader
8
+
9
+ __all__ = [
10
+ "main",
11
+ "HLS_Downloader",
12
+ "MP4_downloader",
13
+ "TOR_downloader",
14
+ "DASH_Downloader"
15
+ ]