StreamingCommunity 2.6.0__py3-none-any.whl → 2.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (47) hide show
  1. StreamingCommunity/Api/Player/Helper/Vixcloud/util.py +26 -2
  2. StreamingCommunity/Api/Player/ddl.py +2 -2
  3. StreamingCommunity/Api/Player/maxstream.py +3 -3
  4. StreamingCommunity/Api/Player/supervideo.py +2 -2
  5. StreamingCommunity/Api/Player/vixcloud.py +16 -18
  6. StreamingCommunity/Api/Site/1337xx/site.py +11 -4
  7. StreamingCommunity/Api/Site/1337xx/title.py +3 -4
  8. StreamingCommunity/Api/Site/animeunity/film_serie.py +5 -4
  9. StreamingCommunity/Api/Site/animeunity/site.py +9 -3
  10. StreamingCommunity/Api/Site/animeunity/util/ScrapeSerie.py +8 -9
  11. StreamingCommunity/Api/Site/cb01new/site.py +12 -4
  12. StreamingCommunity/Api/Site/ddlstreamitaly/site.py +10 -4
  13. StreamingCommunity/Api/Site/ddlstreamitaly/util/ScrapeSerie.py +2 -2
  14. StreamingCommunity/Api/Site/guardaserie/site.py +17 -11
  15. StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +4 -3
  16. StreamingCommunity/Api/Site/mostraguarda/film.py +11 -6
  17. StreamingCommunity/Api/Site/streamingcommunity/__init__.py +3 -5
  18. StreamingCommunity/Api/Site/streamingcommunity/film.py +1 -1
  19. StreamingCommunity/Api/Site/streamingcommunity/series.py +6 -7
  20. StreamingCommunity/Api/Site/streamingcommunity/site.py +14 -63
  21. StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py +9 -24
  22. StreamingCommunity/Api/Template/Util/get_domain.py +11 -72
  23. StreamingCommunity/Api/Template/Util/manage_ep.py +28 -23
  24. StreamingCommunity/Api/Template/config_loader.py +6 -2
  25. StreamingCommunity/Lib/Downloader/HLS/downloader.py +2 -2
  26. StreamingCommunity/Lib/Downloader/HLS/proxyes.py +2 -2
  27. StreamingCommunity/Lib/Downloader/HLS/segments.py +5 -5
  28. StreamingCommunity/Lib/Downloader/MP4/downloader.py +2 -2
  29. StreamingCommunity/Upload/update.py +3 -3
  30. StreamingCommunity/Upload/version.py +1 -1
  31. StreamingCommunity/Util/_jsonConfig.py +209 -96
  32. StreamingCommunity/Util/headers.py +8 -1
  33. StreamingCommunity/Util/table.py +6 -6
  34. StreamingCommunity/run.py +13 -9
  35. {StreamingCommunity-2.6.0.dist-info → StreamingCommunity-2.7.0.dist-info}/METADATA +39 -24
  36. {StreamingCommunity-2.6.0.dist-info → StreamingCommunity-2.7.0.dist-info}/RECORD +40 -47
  37. StreamingCommunity/Api/Site/altadefinizionegratis/__init__.py +0 -76
  38. StreamingCommunity/Api/Site/altadefinizionegratis/film.py +0 -76
  39. StreamingCommunity/Api/Site/altadefinizionegratis/site.py +0 -109
  40. StreamingCommunity/Api/Site/ilcorsaronero/__init__.py +0 -53
  41. StreamingCommunity/Api/Site/ilcorsaronero/site.py +0 -64
  42. StreamingCommunity/Api/Site/ilcorsaronero/title.py +0 -42
  43. StreamingCommunity/Api/Site/ilcorsaronero/util/ilCorsarScraper.py +0 -149
  44. {StreamingCommunity-2.6.0.dist-info → StreamingCommunity-2.7.0.dist-info}/LICENSE +0 -0
  45. {StreamingCommunity-2.6.0.dist-info → StreamingCommunity-2.7.0.dist-info}/WHEEL +0 -0
  46. {StreamingCommunity-2.6.0.dist-info → StreamingCommunity-2.7.0.dist-info}/entry_points.txt +0 -0
  47. {StreamingCommunity-2.6.0.dist-info → StreamingCommunity-2.7.0.dist-info}/top_level.txt +0 -0
@@ -48,7 +48,7 @@ def download_film(select_title: MediaItem) -> str:
48
48
  console.print(f"[yellow]Download: [red]{select_title.name} \n")
49
49
 
50
50
  # Init class
51
- video_source = VideoSource(site_constant.SITE_NAME, False)
51
+ video_source = VideoSource(site_constant.FULL_URL, False)
52
52
  video_source.setup(select_title.id)
53
53
 
54
54
  # Retrieve scws and if available master playlist
@@ -132,14 +132,13 @@ def download_episode(index_season_selected: int, scrape_serie: ScrapeSerie, vide
132
132
  if stopped:
133
133
  break
134
134
 
135
- def download_series(select_season: MediaItem, version: str) -> None:
135
+ def download_series(select_season: MediaItem) -> None:
136
136
  """
137
137
  Download episodes of a TV series based on user selection.
138
138
 
139
139
  Parameters:
140
140
  - select_season (MediaItem): Selected media item (TV series).
141
141
  - domain (str): Domain from which to download.
142
- - version (str): Version of the site.
143
142
  """
144
143
  if site_constant.TELEGRAM_BOT:
145
144
  bot = get_bot_instance()
@@ -148,11 +147,11 @@ def download_series(select_season: MediaItem, version: str) -> None:
148
147
  start_message()
149
148
 
150
149
  # Init class
151
- scrape_serie = ScrapeSerie(site_constant.SITE_NAME)
152
- video_source = VideoSource(site_constant.SITE_NAME, True)
150
+ scrape_serie = ScrapeSerie(site_constant.FULL_URL)
151
+ video_source = VideoSource(site_constant.FULL_URL, True)
153
152
 
154
153
  # Setup video source
155
- scrape_serie.setup(version, select_season.id, select_season.slug)
154
+ scrape_serie.setup(select_season.id, select_season.slug)
156
155
  video_source.setup(select_season.id)
157
156
 
158
157
  # Collect information about seasons
@@ -194,11 +193,11 @@ def download_series(select_season: MediaItem, version: str) -> None:
194
193
  if len(list_season_select) > 1 or index_season_selected == "*":
195
194
 
196
195
  # Download all episodes if multiple seasons are selected or if '*' is used
197
- download_episode(i_season, scrape_serie, video_source, download_all=True)
196
+ download_episode(scrape_serie.season_manager.seasonsData.get_season_by_number(i_season-1).number, scrape_serie, video_source, download_all=True)
198
197
  else:
199
198
 
200
199
  # Otherwise, let the user select specific episodes for the single season
201
- download_episode(i_season, scrape_serie, video_source, download_all=False)
200
+ download_episode(scrape_serie.season_manager.seasonsData.get_season_by_number(i_season-1).number, scrape_serie, video_source, download_all=False)
202
201
 
203
202
  if site_constant.TELEGRAM_BOT:
204
203
  bot.send_message(f"Finito di scaricare tutte le serie e episodi", None)
@@ -1,19 +1,16 @@
1
1
  # 10.12.23
2
2
 
3
- import json
4
- import logging
5
- import secrets
3
+ import sys
6
4
 
7
5
 
8
6
  # External libraries
9
7
  import httpx
10
- from bs4 import BeautifulSoup
11
8
 
12
9
 
13
10
  # Internal utilities
14
11
  from StreamingCommunity.Util.console import console
15
12
  from StreamingCommunity.Util._jsonConfig import config_manager
16
- from StreamingCommunity.Util.headers import get_headers
13
+ from StreamingCommunity.Util.headers import get_userAgent
17
14
  from StreamingCommunity.Util.table import TVShowManager
18
15
  from StreamingCommunity.TelegramHelp.telegram_bot import get_bot_instance
19
16
 
@@ -32,72 +29,26 @@ max_timeout = config_manager.get_int("REQUESTS", "timeout")
32
29
  disable_searchDomain = config_manager.get_bool("DEFAULT", "disable_searchDomain")
33
30
 
34
31
 
35
- def get_version(domain: str):
32
+ def title_search(title_search: str) -> int:
36
33
  """
37
- Extracts the version from the HTML text of a webpage.
38
-
34
+ Search for titles based on a search query.
35
+
39
36
  Parameters:
40
- - domain (str): The domain of the site.
37
+ - title_search (str): The title to search for.
41
38
 
42
39
  Returns:
43
- str: The version extracted from the webpage.
44
- """
45
- try:
46
- response = httpx.get(
47
- url=f"https://{site_constant.SITE_NAME}.{domain}/",
48
- headers={'User-Agent': get_headers()},
49
- timeout=max_timeout
50
- )
51
- response.raise_for_status()
52
-
53
- # Parse request to site
54
- soup = BeautifulSoup(response.text, "html.parser")
55
-
56
- # Extract version
57
- version = json.loads(soup.find("div", {"id": "app"}).get("data-page"))['version']
58
- #console.print(f"[cyan]Get version [white]=> [red]{version} \n")
59
-
60
- return version
61
-
62
- except Exception as e:
63
- logging.error(f"Error extracting version: {e}")
64
- raise
65
-
66
-
67
- def get_version_and_domain():
68
- """
69
- Retrieve the current version and domain of the site.
70
-
71
- This function performs the following steps:
72
- - Determines the correct domain to use for the site by searching for a specific meta tag.
73
- - Fetches the content of the site to extract the version information.
40
+ int: The number of titles found.
74
41
  """
75
42
  domain_to_use = site_constant
76
43
 
77
44
  if not disable_searchDomain:
78
- domain_to_use, base_url = search_domain(site_constant.SITE_NAME, f"https://{site_constant.SITE_NAME}.{site_constant.DOMAIN_NOW}")
79
-
80
- try:
81
- version = get_version(domain_to_use)
82
- except:
83
- #console.print("[green]Auto generate version ...")
84
- #version = secrets.token_hex(32 // 2)
85
- version = None
86
-
87
- return version, domain_to_use
88
-
45
+ domain_to_use, base_url = search_domain(site_constant.SITE_NAME, site_constant.FULL_URL)
89
46
 
90
- def title_search(title_search: str, domain: str) -> int:
91
- """
92
- Search for titles based on a search query.
93
-
94
- Parameters:
95
- - title_search (str): The title to search for.
96
- - domain (str): The domain to search on.
47
+ if domain_to_use is None or base_url is None:
48
+ console.print("[bold red]❌ Error: Unable to determine valid domain or base URL.[/bold red]")
49
+ console.print("[yellow]The service might be temporarily unavailable or the domain may have changed.[/yellow]")
50
+ sys.exit(1)
97
51
 
98
- Returns:
99
- int: The number of titles found.
100
- """
101
52
  if site_constant.TELEGRAM_BOT:
102
53
  bot = get_bot_instance()
103
54
 
@@ -106,8 +57,8 @@ def title_search(title_search: str, domain: str) -> int:
106
57
 
107
58
  try:
108
59
  response = httpx.get(
109
- url=f"https://{site_constant.SITE_NAME}.{domain}/api/search?q={title_search.replace(' ', '+')}",
110
- headers={'user-agent': get_headers()},
60
+ url=f"{site_constant.FULL_URL}/api/search?q={title_search.replace(' ', '+')}",
61
+ headers={'user-agent': get_userAgent()},
111
62
  timeout=max_timeout
112
63
  )
113
64
  response.raise_for_status()
@@ -10,7 +10,7 @@ from bs4 import BeautifulSoup
10
10
 
11
11
 
12
12
  # Internal utilities
13
- from StreamingCommunity.Util.headers import get_headers
13
+ from StreamingCommunity.Util.headers import get_userAgent
14
14
  from StreamingCommunity.Util._jsonConfig import config_manager
15
15
  from StreamingCommunity.Api.Player.Helper.Vixcloud.util import Season, EpisodeManager
16
16
 
@@ -20,28 +20,25 @@ max_timeout = config_manager.get_int("REQUESTS", "timeout")
20
20
 
21
21
 
22
22
  class ScrapeSerie:
23
- def __init__(self, site_name: str):
23
+ def __init__(self, url):
24
24
  """
25
25
  Initialize the ScrapeSerie class for scraping TV series information.
26
26
 
27
27
  Args:
28
- site_name (str): Name of the streaming site to scrape from
28
+ - url (str): The URL of the streaming site.
29
29
  """
30
30
  self.is_series = False
31
- self.headers = {'user-agent': get_headers()}
32
- self.base_name = site_name
33
- self.domain = config_manager.get_dict('SITE', self.base_name)['domain']
31
+ self.headers = {'user-agent': get_userAgent()}
32
+ self.url = url
34
33
 
35
- def setup(self, version: str = None, media_id: int = None, series_name: str = None):
34
+ def setup(self, media_id: int = None, series_name: str = None):
36
35
  """
37
36
  Set up the scraper with specific media details.
38
37
 
39
38
  Args:
40
- version (str, optional): Site version for request headers
41
39
  media_id (int, optional): Unique identifier for the media
42
40
  series_name (str, optional): Name of the TV series
43
41
  """
44
- self.version = version
45
42
  self.media_id = media_id
46
43
 
47
44
  # If series name is provided, initialize series-specific managers
@@ -60,7 +57,7 @@ class ScrapeSerie:
60
57
  """
61
58
  try:
62
59
  response = httpx.get(
63
- url=f"https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}",
60
+ url=f"{self.url}/titles/{self.media_id}-{self.series_name}",
64
61
  headers=self.headers,
65
62
  timeout=max_timeout
66
63
  )
@@ -70,18 +67,6 @@ class ScrapeSerie:
70
67
  soup = BeautifulSoup(response.text, "html.parser")
71
68
  json_response = json.loads(soup.find("div", {"id": "app"}).get("data-page"))
72
69
  self.version = json_response['version']
73
-
74
- """
75
- response = httpx.post(
76
- url=f'https://{self.base_name}.{self.domain}/api/titles/preview/{self.media_id}',
77
- headers={'User-Agent': get_headers()}
78
- )
79
- response.raise_for_status()
80
-
81
-
82
- # Extract seasons from JSON response
83
- json_response = response.json()
84
- """
85
70
 
86
71
  # Collect info about season
87
72
  self.season_manager = Season(json_response.get("props").get("title"))
@@ -102,9 +87,9 @@ class ScrapeSerie:
102
87
  """
103
88
  try:
104
89
  response = httpx.get(
105
- url=f'https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}/stagione-{number_season}',
90
+ url=f'{self.url}/titles/{self.media_id}-{self.series_name}/stagione-{number_season}',
106
91
  headers={
107
- 'User-Agent': get_headers(),
92
+ 'User-Agent': get_userAgent(),
108
93
  'x-inertia': 'true',
109
94
  'x-inertia-version': self.version,
110
95
  },
@@ -7,29 +7,13 @@ from urllib.parse import urlparse, unquote
7
7
 
8
8
  # External libraries
9
9
  import httpx
10
- from googlesearch import search
11
10
 
12
11
 
13
12
  # Internal utilities
14
13
  from StreamingCommunity.Util.headers import get_headers
15
- from StreamingCommunity.Util.console import console, msg
14
+ from StreamingCommunity.Util.console import console
16
15
  from StreamingCommunity.Util._jsonConfig import config_manager
17
16
 
18
- base_headers = {
19
- 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
20
- 'accept-language': 'it-IT,it;q=0.9,en-US;q=0.8,en;q=0.7',
21
- 'dnt': '1',
22
- 'priority': 'u=0, i',
23
- 'referer': '',
24
- 'sec-ch-ua-mobile': '?0',
25
- 'sec-ch-ua-platform': '"Windows"',
26
- 'sec-fetch-dest': 'document',
27
- 'sec-fetch-mode': 'navigate',
28
- 'sec-fetch-site': 'same-origin',
29
- 'sec-fetch-user': '?1',
30
- 'upgrade-insecure-requests': '1',
31
- 'user-agent': ''
32
- }
33
17
 
34
18
 
35
19
  def get_tld(url_str):
@@ -58,10 +42,10 @@ def get_base_domain(url_str):
58
42
 
59
43
  # Check if domain has multiple parts separated by dots
60
44
  parts = domain.split('.')
61
- if len(parts) > 2: # Handle subdomains
62
- return '.'.join(parts[:-1]) # Return everything except TLD
45
+ if len(parts) > 2:
46
+ return '.'.join(parts[:-1])
63
47
 
64
- return parts[0] # Return base domain
48
+ return parts[0]
65
49
 
66
50
  except Exception:
67
51
  return None
@@ -83,9 +67,6 @@ def validate_url(url, base_url, max_timeout, max_retries=2, sleep=1):
83
67
  base_domain = get_base_domain(base_url)
84
68
  url_domain = get_base_domain(url)
85
69
 
86
- base_headers['referer'] = url
87
- base_headers['user-agent'] = get_headers()
88
-
89
70
  if base_domain != url_domain:
90
71
  console.print(f"[red]Domain structure mismatch: {url_domain} != {base_domain}")
91
72
  return False, None
@@ -93,13 +74,13 @@ def validate_url(url, base_url, max_timeout, max_retries=2, sleep=1):
93
74
  # Count dots to ensure we don't have extra subdomains
94
75
  base_dots = base_url.count('.')
95
76
  url_dots = url.count('.')
96
- if url_dots > base_dots + 1: # Allow for one extra dot for TLD change
77
+ if url_dots > base_dots + 1:
97
78
  console.print(f"[red]Too many subdomains in URL")
98
79
  return False, None
99
80
 
100
81
  client = httpx.Client(
101
82
  verify=False,
102
- headers=base_headers,
83
+ headers=get_headers(),
103
84
  timeout=max_timeout
104
85
  )
105
86
 
@@ -142,61 +123,19 @@ def validate_url(url, base_url, max_timeout, max_retries=2, sleep=1):
142
123
  def search_domain(site_name: str, base_url: str, get_first: bool = False):
143
124
  """Search for valid domain matching site name and base URL."""
144
125
  max_timeout = config_manager.get_int("REQUESTS", "timeout")
145
- domain = str(config_manager.get_dict("SITE", site_name)['domain'])
146
126
 
147
- # Test initial URL
148
127
  try:
149
128
  is_correct, redirect_tld = validate_url(base_url, base_url, max_timeout)
150
129
 
151
130
  if is_correct:
152
131
  tld = redirect_tld or get_tld(base_url)
153
- config_manager.config['SITE'][site_name]['domain'] = tld
154
- config_manager.write_config()
132
+ config_manager.configSite[site_name]['domain'] = tld
133
+
155
134
  console.print(f"[green]Successfully validated initial URL")
156
135
  return tld, base_url
157
136
 
158
- except Exception as e:
159
- console.print(f"[red]Error testing initial URL: {str(e)}")
160
-
161
- # Google search phase
162
- base_domain = get_base_domain(base_url)
163
- console.print(f"\n[cyan]Searching for alternate domains for[white]: [yellow]{base_domain}")
164
-
165
- try:
166
- search_results = list(search(base_domain, num_results=20, lang="it"))
167
-
168
- base_urls = set()
169
- for url in search_results:
170
- element_url = get_base_url(url)
171
- if element_url:
172
- base_urls.add(element_url)
137
+ else:
138
+ return None, None
173
139
 
174
- # Filter URLs based on domain matching and subdomain count
175
- filtered_results = [
176
- url for url in base_urls
177
- if get_base_domain(url) == base_domain
178
- and url.count('.') <= base_url.count('.') + 1
179
- ]
180
-
181
- for idx, result_url in enumerate(filtered_results, 1):
182
- console.print(f"\n[cyan]Checking result {idx}/{len(filtered_results)}[white]: [yellow]{result_url}")
183
-
184
- is_valid, new_tld = validate_url(result_url, base_url, max_timeout)
185
- if is_valid:
186
- final_tld = new_tld or get_tld(result_url)
187
-
188
- if get_first or msg.ask(
189
- f"\n[cyan]Update site[white] [red]'{site_name}'[cyan] with domain[white] [red]'{final_tld}'",
190
- choices=["y", "n"],
191
- default="y"
192
- ).lower() == "y":
193
-
194
- config_manager.config['SITE'][site_name]['domain'] = final_tld
195
- config_manager.write_config()
196
- return final_tld, f"{base_url}.{final_tld}"
197
-
198
140
  except Exception as e:
199
- console.print(f"[red]Error during search: {str(e)}")
200
-
201
- console.print("[bold red]No valid URLs found matching the base URL.")
202
- return domain, f"{base_url}.{domain}"
141
+ console.print(f"[red]Error testing initial URL: {str(e)}")
@@ -6,7 +6,7 @@ from typing import List
6
6
 
7
7
 
8
8
  # Internal utilities
9
- from StreamingCommunity.Util.console import console
9
+ from StreamingCommunity.Util.console import console, msg
10
10
  from StreamingCommunity.Util.os import os_manager
11
11
  from StreamingCommunity.Util._jsonConfig import config_manager
12
12
  from StreamingCommunity.Util.table import TVShowManager
@@ -47,28 +47,33 @@ def manage_selection(cmd_insert: str, max_count: int) -> List[int]:
47
47
  Returns:
48
48
  list_selection (List[int]): List of selected items.
49
49
  """
50
- list_selection = []
51
- logging.info(f"Command insert: {cmd_insert}, end index: {max_count + 1}")
52
-
53
- # For a single number (e.g., '5')
54
- if cmd_insert.isnumeric():
55
- list_selection.append(int(cmd_insert))
56
-
57
- # For a range (e.g., '5-12')
58
- elif "-" in cmd_insert:
59
- start, end = map(str.strip, cmd_insert.split('-'))
60
- start = int(start)
61
- end = int(end) if end.isnumeric() else max_count
62
-
63
- list_selection = list(range(start, end + 1))
64
-
65
- # For all items ('*')
66
- elif cmd_insert == "*":
67
- list_selection = list(range(1, max_count + 1))
68
-
69
- else:
70
- raise ValueError("Invalid input format")
71
-
50
+ while True:
51
+ list_selection = []
52
+ logging.info(f"Command insert: {cmd_insert}, end index: {max_count + 1}")
53
+
54
+ # For a single number (e.g., '5')
55
+ if cmd_insert.isnumeric():
56
+ list_selection.append(int(cmd_insert))
57
+ break
58
+
59
+ # For a range (e.g., '5-12')
60
+ elif "-" in cmd_insert:
61
+ try:
62
+ start, end = map(str.strip, cmd_insert.split('-'))
63
+ start = int(start)
64
+ end = int(end) if end.isnumeric() else max_count
65
+ list_selection = list(range(start, end + 1))
66
+ break
67
+ except ValueError:
68
+ pass
69
+
70
+ # For all items ('*')
71
+ elif cmd_insert == "*":
72
+ list_selection = list(range(1, max_count + 1))
73
+ break
74
+
75
+ cmd_insert = msg.ask("[red]Invalid input. Please enter a valid command: ")
76
+
72
77
  logging.info(f"List return: {list_selection}")
73
78
  return list_selection
74
79
 
@@ -33,7 +33,11 @@ class SiteConstant:
33
33
 
34
34
  @property
35
35
  def DOMAIN_NOW(self):
36
- return config_manager.get_dict('SITE', self.SITE_NAME)['domain']
36
+ return config_manager.get_site(self.SITE_NAME, 'domain')
37
+
38
+ @property
39
+ def FULL_URL(self):
40
+ return config_manager.get_site(self.SITE_NAME, 'full_url').rstrip('/')
37
41
 
38
42
  @property
39
43
  def SERIES_FOLDER(self):
@@ -59,7 +63,7 @@ class SiteConstant:
59
63
  @property
60
64
  def COOKIE(self):
61
65
  try:
62
- return config_manager.get_dict('SITE', self.SITE_NAME)['extra']
66
+ return config_manager.get_dict('SITE_EXTRA', self.SITE_NAME)
63
67
  except KeyError:
64
68
  return None
65
69
 
@@ -14,7 +14,7 @@ import httpx
14
14
 
15
15
  # Internal utilities
16
16
  from StreamingCommunity.Util._jsonConfig import config_manager
17
- from StreamingCommunity.Util.headers import get_headers
17
+ from StreamingCommunity.Util.headers import get_userAgent
18
18
  from StreamingCommunity.Util.console import console, Panel
19
19
  from StreamingCommunity.Util.os import (
20
20
  compute_sha1_hash,
@@ -55,7 +55,7 @@ TELEGRAM_BOT = config_manager.get_bool('DEFAULT', 'telegram_bot')
55
55
  class HLSClient:
56
56
  """Client for making HTTP requests to HLS endpoints with retry mechanism."""
57
57
  def __init__(self):
58
- self.headers = {'User-Agent': get_headers()}
58
+ self.headers = {'User-Agent': get_userAgent()}
59
59
 
60
60
  def request(self, url: str, return_content: bool = False) -> Optional[httpx.Response]:
61
61
  """
@@ -12,7 +12,7 @@ import httpx
12
12
 
13
13
  # Internal utilities
14
14
  from StreamingCommunity.Util._jsonConfig import config_manager
15
- from StreamingCommunity.Util.headers import get_headers
15
+ from StreamingCommunity.Util.headers import get_userAgent
16
16
  from StreamingCommunity.Util.os import os_manager
17
17
 
18
18
 
@@ -46,7 +46,7 @@ class ProxyManager:
46
46
 
47
47
  try:
48
48
  with httpx.Client(proxies=proxy, verify=False) as client:
49
- response = client.get(self.url, timeout=self.timeout, headers={'user-agent': get_headers()})
49
+ response = client.get(self.url, timeout=self.timeout, headers={'user-agent': get_userAgent()})
50
50
 
51
51
  if response.status_code == 200:
52
52
  logging.info(f"Proxy {proxy} is working.")
@@ -22,7 +22,7 @@ from tqdm import tqdm
22
22
  # Internal utilities
23
23
  from StreamingCommunity.Util.color import Colors
24
24
  from StreamingCommunity.Util.console import console
25
- from StreamingCommunity.Util.headers import get_headers, random_headers
25
+ from StreamingCommunity.Util.headers import get_userAgent, random_headers
26
26
  from StreamingCommunity.Util._jsonConfig import config_manager
27
27
  from StreamingCommunity.Util.os import os_manager
28
28
 
@@ -102,7 +102,7 @@ class M3U8_Segments:
102
102
  self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
103
103
 
104
104
  try:
105
- client_params = {'headers': {'User-Agent': get_headers()}, 'timeout': MAX_TIMEOOUT}
105
+ client_params = {'headers': {'User-Agent': get_userAgent()}, 'timeout': MAX_TIMEOOUT}
106
106
  response = httpx.get(url=key_uri, **client_params)
107
107
  response.raise_for_status()
108
108
 
@@ -145,7 +145,7 @@ class M3U8_Segments:
145
145
  def get_info(self) -> None:
146
146
  if self.is_index_url:
147
147
  try:
148
- client_params = {'headers': {'User-Agent': get_headers()}, 'timeout': MAX_TIMEOOUT}
148
+ client_params = {'headers': {'User-Agent': get_userAgent()}, 'timeout': MAX_TIMEOOUT}
149
149
  response = httpx.get(self.url, **client_params)
150
150
  response.raise_for_status()
151
151
 
@@ -186,8 +186,8 @@ class M3U8_Segments:
186
186
 
187
187
  def _get_http_client(self, index: int = None):
188
188
  client_params = {
189
- #'headers': random_headers(self.key_base_url) if hasattr(self, 'key_base_url') else {'User-Agent': get_headers()},
190
- 'headers': {'User-Agent': get_headers()},
189
+ #'headers': random_headers(self.key_base_url) if hasattr(self, 'key_base_url') else {'User-Agent': get_userAgent()},
190
+ 'headers': {'User-Agent': get_userAgent()},
191
191
  'timeout': SEGMENT_MAX_TIMEOUT,
192
192
  'follow_redirects': True,
193
193
  'http2': False
@@ -15,7 +15,7 @@ from tqdm import tqdm
15
15
 
16
16
 
17
17
  # Internal utilities
18
- from StreamingCommunity.Util.headers import get_headers
18
+ from StreamingCommunity.Util.headers import get_userAgent
19
19
  from StreamingCommunity.Util.color import Colors
20
20
  from StreamingCommunity.Util.console import console, Panel
21
21
  from StreamingCommunity.Util._jsonConfig import config_manager
@@ -99,7 +99,7 @@ def MP4_downloader(url: str, path: str, referer: str = None, headers_: dict = No
99
99
  if headers_:
100
100
  headers.update(headers_)
101
101
  else:
102
- headers['User-Agent'] = get_headers()
102
+ headers['User-Agent'] = get_userAgent()
103
103
 
104
104
  except Exception as header_err:
105
105
  logging.error(f"Error preparing headers: {header_err}")
@@ -13,7 +13,7 @@ import httpx
13
13
  from .version import __version__, __author__, __title__
14
14
  from StreamingCommunity.Util.console import console
15
15
  from StreamingCommunity.Util._jsonConfig import config_manager
16
- from StreamingCommunity.Util.headers import get_headers
16
+ from StreamingCommunity.Util.headers import get_userAgent
17
17
 
18
18
 
19
19
 
@@ -31,14 +31,14 @@ def update():
31
31
  try:
32
32
  response_reposity = httpx.get(
33
33
  url=f"https://api.github.com/repos/{__author__}/{__title__}",
34
- headers={'user-agent': get_headers()},
34
+ headers={'user-agent': get_userAgent()},
35
35
  timeout=config_manager.get_int("REQUESTS", "timeout"),
36
36
  follow_redirects=True
37
37
  ).json()
38
38
 
39
39
  response_releases = httpx.get(
40
40
  url=f"https://api.github.com/repos/{__author__}/{__title__}/releases",
41
- headers={'user-agent': get_headers()},
41
+ headers={'user-agent': get_userAgent()},
42
42
  timeout=config_manager.get_int("REQUESTS", "timeout"),
43
43
  follow_redirects=True
44
44
  ).json()
@@ -1,5 +1,5 @@
1
1
  __title__ = 'StreamingCommunity'
2
- __version__ = '2.6.0'
2
+ __version__ = '2.7.0'
3
3
  __author__ = 'Arrowar'
4
4
  __description__ = 'A command-line program to download film'
5
5
  __copyright__ = 'Copyright 2024'