StreamingCommunity 2.6.1__py3-none-any.whl → 2.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (40) hide show
  1. StreamingCommunity/Api/Player/ddl.py +2 -2
  2. StreamingCommunity/Api/Player/maxstream.py +3 -3
  3. StreamingCommunity/Api/Player/supervideo.py +2 -2
  4. StreamingCommunity/Api/Player/vixcloud.py +16 -18
  5. StreamingCommunity/Api/Site/1337xx/site.py +11 -4
  6. StreamingCommunity/Api/Site/1337xx/title.py +3 -4
  7. StreamingCommunity/Api/Site/animeunity/film_serie.py +5 -4
  8. StreamingCommunity/Api/Site/animeunity/site.py +9 -3
  9. StreamingCommunity/Api/Site/animeunity/util/ScrapeSerie.py +8 -9
  10. StreamingCommunity/Api/Site/cb01new/site.py +12 -4
  11. StreamingCommunity/Api/Site/ddlstreamitaly/site.py +10 -4
  12. StreamingCommunity/Api/Site/ddlstreamitaly/util/ScrapeSerie.py +2 -2
  13. StreamingCommunity/Api/Site/guardaserie/site.py +17 -11
  14. StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +4 -3
  15. StreamingCommunity/Api/Site/mostraguarda/film.py +3 -3
  16. StreamingCommunity/Api/Site/streamingcommunity/film.py +1 -1
  17. StreamingCommunity/Api/Site/streamingcommunity/series.py +2 -2
  18. StreamingCommunity/Api/Site/streamingcommunity/site.py +11 -4
  19. StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py +8 -9
  20. StreamingCommunity/Api/Template/Util/get_domain.py +11 -72
  21. StreamingCommunity/Api/Template/config_loader.py +6 -2
  22. StreamingCommunity/Lib/Downloader/HLS/downloader.py +2 -2
  23. StreamingCommunity/Lib/Downloader/HLS/proxyes.py +2 -2
  24. StreamingCommunity/Lib/Downloader/HLS/segments.py +5 -5
  25. StreamingCommunity/Lib/Downloader/MP4/downloader.py +2 -2
  26. StreamingCommunity/Upload/update.py +3 -3
  27. StreamingCommunity/Upload/version.py +1 -1
  28. StreamingCommunity/Util/_jsonConfig.py +198 -98
  29. StreamingCommunity/Util/headers.py +8 -1
  30. StreamingCommunity/run.py +13 -9
  31. {StreamingCommunity-2.6.1.dist-info → StreamingCommunity-2.7.0.dist-info}/METADATA +39 -23
  32. {StreamingCommunity-2.6.1.dist-info → StreamingCommunity-2.7.0.dist-info}/RECORD +36 -40
  33. StreamingCommunity/Api/Site/ilcorsaronero/__init__.py +0 -53
  34. StreamingCommunity/Api/Site/ilcorsaronero/site.py +0 -64
  35. StreamingCommunity/Api/Site/ilcorsaronero/title.py +0 -42
  36. StreamingCommunity/Api/Site/ilcorsaronero/util/ilCorsarScraper.py +0 -149
  37. {StreamingCommunity-2.6.1.dist-info → StreamingCommunity-2.7.0.dist-info}/LICENSE +0 -0
  38. {StreamingCommunity-2.6.1.dist-info → StreamingCommunity-2.7.0.dist-info}/WHEEL +0 -0
  39. {StreamingCommunity-2.6.1.dist-info → StreamingCommunity-2.7.0.dist-info}/entry_points.txt +0 -0
  40. {StreamingCommunity-2.6.1.dist-info → StreamingCommunity-2.7.0.dist-info}/top_level.txt +0 -0
@@ -10,7 +10,7 @@ from bs4 import BeautifulSoup
10
10
 
11
11
 
12
12
  # Internal utilities
13
- from StreamingCommunity.Util.headers import get_headers
13
+ from StreamingCommunity.Util.headers import get_userAgent
14
14
  from StreamingCommunity.Util._jsonConfig import config_manager
15
15
  from StreamingCommunity.Api.Player.Helper.Vixcloud.util import Season, EpisodeManager
16
16
 
@@ -20,17 +20,16 @@ max_timeout = config_manager.get_int("REQUESTS", "timeout")
20
20
 
21
21
 
22
22
  class ScrapeSerie:
23
- def __init__(self, site_name: str):
23
+ def __init__(self, url):
24
24
  """
25
25
  Initialize the ScrapeSerie class for scraping TV series information.
26
26
 
27
27
  Args:
28
- site_name (str): Name of the streaming site to scrape from
28
+ - url (str): The URL of the streaming site.
29
29
  """
30
30
  self.is_series = False
31
- self.headers = {'user-agent': get_headers()}
32
- self.base_name = site_name
33
- self.domain = config_manager.get_dict('SITE', self.base_name)['domain']
31
+ self.headers = {'user-agent': get_userAgent()}
32
+ self.url = url
34
33
 
35
34
  def setup(self, media_id: int = None, series_name: str = None):
36
35
  """
@@ -58,7 +57,7 @@ class ScrapeSerie:
58
57
  """
59
58
  try:
60
59
  response = httpx.get(
61
- url=f"https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}",
60
+ url=f"{self.url}/titles/{self.media_id}-{self.series_name}",
62
61
  headers=self.headers,
63
62
  timeout=max_timeout
64
63
  )
@@ -88,9 +87,9 @@ class ScrapeSerie:
88
87
  """
89
88
  try:
90
89
  response = httpx.get(
91
- url=f'https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}/stagione-{number_season}',
90
+ url=f'{self.url}/titles/{self.media_id}-{self.series_name}/stagione-{number_season}',
92
91
  headers={
93
- 'User-Agent': get_headers(),
92
+ 'User-Agent': get_userAgent(),
94
93
  'x-inertia': 'true',
95
94
  'x-inertia-version': self.version,
96
95
  },
@@ -7,29 +7,13 @@ from urllib.parse import urlparse, unquote
7
7
 
8
8
  # External libraries
9
9
  import httpx
10
- from googlesearch import search
11
10
 
12
11
 
13
12
  # Internal utilities
14
13
  from StreamingCommunity.Util.headers import get_headers
15
- from StreamingCommunity.Util.console import console, msg
14
+ from StreamingCommunity.Util.console import console
16
15
  from StreamingCommunity.Util._jsonConfig import config_manager
17
16
 
18
- base_headers = {
19
- 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
20
- 'accept-language': 'it-IT,it;q=0.9,en-US;q=0.8,en;q=0.7',
21
- 'dnt': '1',
22
- 'priority': 'u=0, i',
23
- 'referer': '',
24
- 'sec-ch-ua-mobile': '?0',
25
- 'sec-ch-ua-platform': '"Windows"',
26
- 'sec-fetch-dest': 'document',
27
- 'sec-fetch-mode': 'navigate',
28
- 'sec-fetch-site': 'same-origin',
29
- 'sec-fetch-user': '?1',
30
- 'upgrade-insecure-requests': '1',
31
- 'user-agent': ''
32
- }
33
17
 
34
18
 
35
19
  def get_tld(url_str):
@@ -58,10 +42,10 @@ def get_base_domain(url_str):
58
42
 
59
43
  # Check if domain has multiple parts separated by dots
60
44
  parts = domain.split('.')
61
- if len(parts) > 2: # Handle subdomains
62
- return '.'.join(parts[:-1]) # Return everything except TLD
45
+ if len(parts) > 2:
46
+ return '.'.join(parts[:-1])
63
47
 
64
- return parts[0] # Return base domain
48
+ return parts[0]
65
49
 
66
50
  except Exception:
67
51
  return None
@@ -83,9 +67,6 @@ def validate_url(url, base_url, max_timeout, max_retries=2, sleep=1):
83
67
  base_domain = get_base_domain(base_url)
84
68
  url_domain = get_base_domain(url)
85
69
 
86
- base_headers['referer'] = url
87
- base_headers['user-agent'] = get_headers()
88
-
89
70
  if base_domain != url_domain:
90
71
  console.print(f"[red]Domain structure mismatch: {url_domain} != {base_domain}")
91
72
  return False, None
@@ -93,13 +74,13 @@ def validate_url(url, base_url, max_timeout, max_retries=2, sleep=1):
93
74
  # Count dots to ensure we don't have extra subdomains
94
75
  base_dots = base_url.count('.')
95
76
  url_dots = url.count('.')
96
- if url_dots > base_dots + 1: # Allow for one extra dot for TLD change
77
+ if url_dots > base_dots + 1:
97
78
  console.print(f"[red]Too many subdomains in URL")
98
79
  return False, None
99
80
 
100
81
  client = httpx.Client(
101
82
  verify=False,
102
- headers=base_headers,
83
+ headers=get_headers(),
103
84
  timeout=max_timeout
104
85
  )
105
86
 
@@ -142,61 +123,19 @@ def validate_url(url, base_url, max_timeout, max_retries=2, sleep=1):
142
123
  def search_domain(site_name: str, base_url: str, get_first: bool = False):
143
124
  """Search for valid domain matching site name and base URL."""
144
125
  max_timeout = config_manager.get_int("REQUESTS", "timeout")
145
- domain = str(config_manager.get_dict("SITE", site_name)['domain'])
146
126
 
147
- # Test initial URL
148
127
  try:
149
128
  is_correct, redirect_tld = validate_url(base_url, base_url, max_timeout)
150
129
 
151
130
  if is_correct:
152
131
  tld = redirect_tld or get_tld(base_url)
153
- config_manager.config['SITE'][site_name]['domain'] = tld
154
- config_manager.write_config()
132
+ config_manager.configSite[site_name]['domain'] = tld
133
+
155
134
  console.print(f"[green]Successfully validated initial URL")
156
135
  return tld, base_url
157
136
 
158
- except Exception as e:
159
- console.print(f"[red]Error testing initial URL: {str(e)}")
160
-
161
- # Google search phase
162
- base_domain = get_base_domain(base_url)
163
- console.print(f"\n[cyan]Searching for alternate domains for[white]: [yellow]{base_domain}")
164
-
165
- try:
166
- search_results = list(search(base_domain, num_results=20, lang="it"))
167
-
168
- base_urls = set()
169
- for url in search_results:
170
- element_url = get_base_url(url)
171
- if element_url:
172
- base_urls.add(element_url)
137
+ else:
138
+ return None, None
173
139
 
174
- # Filter URLs based on domain matching and subdomain count
175
- filtered_results = [
176
- url for url in base_urls
177
- if get_base_domain(url) == base_domain
178
- and url.count('.') <= base_url.count('.') + 1
179
- ]
180
-
181
- for idx, result_url in enumerate(filtered_results, 1):
182
- console.print(f"\n[cyan]Checking result {idx}/{len(filtered_results)}[white]: [yellow]{result_url}")
183
-
184
- is_valid, new_tld = validate_url(result_url, base_url, max_timeout)
185
- if is_valid:
186
- final_tld = new_tld or get_tld(result_url)
187
-
188
- if get_first or msg.ask(
189
- f"\n[cyan]Update site[white] [red]'{site_name}'[cyan] with domain[white] [red]'{final_tld}'",
190
- choices=["y", "n"],
191
- default="y"
192
- ).lower() == "y":
193
-
194
- config_manager.config['SITE'][site_name]['domain'] = final_tld
195
- config_manager.write_config()
196
- return final_tld, f"{base_url}.{final_tld}"
197
-
198
140
  except Exception as e:
199
- console.print(f"[red]Error during search: {str(e)}")
200
-
201
- console.print("[bold red]No valid URLs found matching the base URL.")
202
- return domain, f"{base_url}.{domain}"
141
+ console.print(f"[red]Error testing initial URL: {str(e)}")
@@ -33,7 +33,11 @@ class SiteConstant:
33
33
 
34
34
  @property
35
35
  def DOMAIN_NOW(self):
36
- return config_manager.get_dict('SITE', self.SITE_NAME)['domain']
36
+ return config_manager.get_site(self.SITE_NAME, 'domain')
37
+
38
+ @property
39
+ def FULL_URL(self):
40
+ return config_manager.get_site(self.SITE_NAME, 'full_url').rstrip('/')
37
41
 
38
42
  @property
39
43
  def SERIES_FOLDER(self):
@@ -59,7 +63,7 @@ class SiteConstant:
59
63
  @property
60
64
  def COOKIE(self):
61
65
  try:
62
- return config_manager.get_dict('SITE', self.SITE_NAME)['extra']
66
+ return config_manager.get_dict('SITE_EXTRA', self.SITE_NAME)
63
67
  except KeyError:
64
68
  return None
65
69
 
@@ -14,7 +14,7 @@ import httpx
14
14
 
15
15
  # Internal utilities
16
16
  from StreamingCommunity.Util._jsonConfig import config_manager
17
- from StreamingCommunity.Util.headers import get_headers
17
+ from StreamingCommunity.Util.headers import get_userAgent
18
18
  from StreamingCommunity.Util.console import console, Panel
19
19
  from StreamingCommunity.Util.os import (
20
20
  compute_sha1_hash,
@@ -55,7 +55,7 @@ TELEGRAM_BOT = config_manager.get_bool('DEFAULT', 'telegram_bot')
55
55
  class HLSClient:
56
56
  """Client for making HTTP requests to HLS endpoints with retry mechanism."""
57
57
  def __init__(self):
58
- self.headers = {'User-Agent': get_headers()}
58
+ self.headers = {'User-Agent': get_userAgent()}
59
59
 
60
60
  def request(self, url: str, return_content: bool = False) -> Optional[httpx.Response]:
61
61
  """
@@ -12,7 +12,7 @@ import httpx
12
12
 
13
13
  # Internal utilities
14
14
  from StreamingCommunity.Util._jsonConfig import config_manager
15
- from StreamingCommunity.Util.headers import get_headers
15
+ from StreamingCommunity.Util.headers import get_userAgent
16
16
  from StreamingCommunity.Util.os import os_manager
17
17
 
18
18
 
@@ -46,7 +46,7 @@ class ProxyManager:
46
46
 
47
47
  try:
48
48
  with httpx.Client(proxies=proxy, verify=False) as client:
49
- response = client.get(self.url, timeout=self.timeout, headers={'user-agent': get_headers()})
49
+ response = client.get(self.url, timeout=self.timeout, headers={'user-agent': get_userAgent()})
50
50
 
51
51
  if response.status_code == 200:
52
52
  logging.info(f"Proxy {proxy} is working.")
@@ -22,7 +22,7 @@ from tqdm import tqdm
22
22
  # Internal utilities
23
23
  from StreamingCommunity.Util.color import Colors
24
24
  from StreamingCommunity.Util.console import console
25
- from StreamingCommunity.Util.headers import get_headers, random_headers
25
+ from StreamingCommunity.Util.headers import get_userAgent, random_headers
26
26
  from StreamingCommunity.Util._jsonConfig import config_manager
27
27
  from StreamingCommunity.Util.os import os_manager
28
28
 
@@ -102,7 +102,7 @@ class M3U8_Segments:
102
102
  self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
103
103
 
104
104
  try:
105
- client_params = {'headers': {'User-Agent': get_headers()}, 'timeout': MAX_TIMEOOUT}
105
+ client_params = {'headers': {'User-Agent': get_userAgent()}, 'timeout': MAX_TIMEOOUT}
106
106
  response = httpx.get(url=key_uri, **client_params)
107
107
  response.raise_for_status()
108
108
 
@@ -145,7 +145,7 @@ class M3U8_Segments:
145
145
  def get_info(self) -> None:
146
146
  if self.is_index_url:
147
147
  try:
148
- client_params = {'headers': {'User-Agent': get_headers()}, 'timeout': MAX_TIMEOOUT}
148
+ client_params = {'headers': {'User-Agent': get_userAgent()}, 'timeout': MAX_TIMEOOUT}
149
149
  response = httpx.get(self.url, **client_params)
150
150
  response.raise_for_status()
151
151
 
@@ -186,8 +186,8 @@ class M3U8_Segments:
186
186
 
187
187
  def _get_http_client(self, index: int = None):
188
188
  client_params = {
189
- #'headers': random_headers(self.key_base_url) if hasattr(self, 'key_base_url') else {'User-Agent': get_headers()},
190
- 'headers': {'User-Agent': get_headers()},
189
+ #'headers': random_headers(self.key_base_url) if hasattr(self, 'key_base_url') else {'User-Agent': get_userAgent()},
190
+ 'headers': {'User-Agent': get_userAgent()},
191
191
  'timeout': SEGMENT_MAX_TIMEOUT,
192
192
  'follow_redirects': True,
193
193
  'http2': False
@@ -15,7 +15,7 @@ from tqdm import tqdm
15
15
 
16
16
 
17
17
  # Internal utilities
18
- from StreamingCommunity.Util.headers import get_headers
18
+ from StreamingCommunity.Util.headers import get_userAgent
19
19
  from StreamingCommunity.Util.color import Colors
20
20
  from StreamingCommunity.Util.console import console, Panel
21
21
  from StreamingCommunity.Util._jsonConfig import config_manager
@@ -99,7 +99,7 @@ def MP4_downloader(url: str, path: str, referer: str = None, headers_: dict = No
99
99
  if headers_:
100
100
  headers.update(headers_)
101
101
  else:
102
- headers['User-Agent'] = get_headers()
102
+ headers['User-Agent'] = get_userAgent()
103
103
 
104
104
  except Exception as header_err:
105
105
  logging.error(f"Error preparing headers: {header_err}")
@@ -13,7 +13,7 @@ import httpx
13
13
  from .version import __version__, __author__, __title__
14
14
  from StreamingCommunity.Util.console import console
15
15
  from StreamingCommunity.Util._jsonConfig import config_manager
16
- from StreamingCommunity.Util.headers import get_headers
16
+ from StreamingCommunity.Util.headers import get_userAgent
17
17
 
18
18
 
19
19
 
@@ -31,14 +31,14 @@ def update():
31
31
  try:
32
32
  response_reposity = httpx.get(
33
33
  url=f"https://api.github.com/repos/{__author__}/{__title__}",
34
- headers={'user-agent': get_headers()},
34
+ headers={'user-agent': get_userAgent()},
35
35
  timeout=config_manager.get_int("REQUESTS", "timeout"),
36
36
  follow_redirects=True
37
37
  ).json()
38
38
 
39
39
  response_releases = httpx.get(
40
40
  url=f"https://api.github.com/repos/{__author__}/{__title__}/releases",
41
- headers={'user-agent': get_headers()},
41
+ headers={'user-agent': get_userAgent()},
42
42
  timeout=config_manager.get_int("REQUESTS", "timeout"),
43
43
  follow_redirects=True
44
44
  ).json()
@@ -1,5 +1,5 @@
1
1
  __title__ = 'StreamingCommunity'
2
- __version__ = '2.6.0'
2
+ __version__ = '2.7.0'
3
3
  __author__ = 'Arrowar'
4
4
  __description__ = 'A command-line program to download film'
5
5
  __copyright__ = 'Copyright 2024'