StreamingCommunity 2.3.0__py3-none-any.whl → 2.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of StreamingCommunity might be problematic. Click here for more details.
- StreamingCommunity/Api/Site/1337xx/site.py +1 -1
- StreamingCommunity/Api/Site/{altadefinizione → altadefinizionegratis}/site.py +1 -1
- StreamingCommunity/Api/Site/animeunity/__init__.py +1 -1
- StreamingCommunity/Api/Site/animeunity/costant.py +3 -3
- StreamingCommunity/Api/Site/animeunity/film_serie.py +2 -2
- StreamingCommunity/Api/Site/animeunity/site.py +3 -3
- StreamingCommunity/Api/Site/cb01new/site.py +1 -1
- StreamingCommunity/Api/Site/ddlstreamitaly/site.py +1 -1
- StreamingCommunity/Api/Site/guardaserie/site.py +1 -1
- StreamingCommunity/Api/Site/ilcorsaronero/site.py +1 -1
- StreamingCommunity/Api/Site/ilcorsaronero/util/ilCorsarScraper.py +1 -1
- StreamingCommunity/Api/Site/streamingcommunity/series.py +1 -1
- StreamingCommunity/Api/Site/streamingcommunity/site.py +7 -4
- StreamingCommunity/Api/Template/Util/get_domain.py +160 -94
- StreamingCommunity/Api/Template/site.py +1 -1
- StreamingCommunity/Lib/Downloader/HLS/downloader.py +11 -1
- StreamingCommunity/Lib/Downloader/HLS/segments.py +17 -8
- StreamingCommunity/Lib/Downloader/TOR/downloader.py +3 -3
- StreamingCommunity/Lib/M3U8/decryptor.py +1 -0
- StreamingCommunity/Lib/M3U8/estimator.py +2 -2
- StreamingCommunity/Lib/M3U8/url_fixer.py +6 -0
- StreamingCommunity/Lib/TMBD/tmdb.py +1 -1
- StreamingCommunity/Upload/version.py +1 -1
- StreamingCommunity/Util/_jsonConfig.py +43 -19
- StreamingCommunity/Util/ffmpeg_installer.py +31 -14
- StreamingCommunity/Util/headers.py +15 -2
- StreamingCommunity/Util/logger.py +9 -0
- StreamingCommunity/Util/os.py +100 -138
- StreamingCommunity/Util/table.py +6 -6
- StreamingCommunity/run.py +61 -7
- {StreamingCommunity-2.3.0.dist-info → StreamingCommunity-2.4.0.dist-info}/METADATA +86 -19
- {StreamingCommunity-2.3.0.dist-info → StreamingCommunity-2.4.0.dist-info}/RECORD +39 -39
- /StreamingCommunity/Api/Site/{altadefinizione → altadefinizionegratis}/__init__.py +0 -0
- /StreamingCommunity/Api/Site/{altadefinizione → altadefinizionegratis}/costant.py +0 -0
- /StreamingCommunity/Api/Site/{altadefinizione → altadefinizionegratis}/film.py +0 -0
- {StreamingCommunity-2.3.0.dist-info → StreamingCommunity-2.4.0.dist-info}/LICENSE +0 -0
- {StreamingCommunity-2.3.0.dist-info → StreamingCommunity-2.4.0.dist-info}/WHEEL +0 -0
- {StreamingCommunity-2.3.0.dist-info → StreamingCommunity-2.4.0.dist-info}/entry_points.txt +0 -0
- {StreamingCommunity-2.3.0.dist-info → StreamingCommunity-2.4.0.dist-info}/top_level.txt +0 -0
|
@@ -43,7 +43,7 @@ def title_search(word_to_search: str) -> int:
|
|
|
43
43
|
domain_to_use = DOMAIN_NOW
|
|
44
44
|
|
|
45
45
|
if not disable_searchDomain:
|
|
46
|
-
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}")
|
|
46
|
+
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}.{DOMAIN_NOW}")
|
|
47
47
|
|
|
48
48
|
# Construct the full site URL and load the search page
|
|
49
49
|
try:
|
|
@@ -43,7 +43,7 @@ def title_search(title_search: str) -> int:
|
|
|
43
43
|
domain_to_use = DOMAIN_NOW
|
|
44
44
|
|
|
45
45
|
if not disable_searchDomain:
|
|
46
|
-
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}")
|
|
46
|
+
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}.{DOMAIN_NOW}")
|
|
47
47
|
|
|
48
48
|
# Send request to search for title
|
|
49
49
|
client = httpx.Client()
|
|
@@ -27,7 +27,7 @@ def search(string_to_search: str = None, get_onylDatabase: bool = False):
|
|
|
27
27
|
string_to_search = msg.ask(f"\n[purple]Insert word to search in [green]{SITE_NAME}").strip()
|
|
28
28
|
|
|
29
29
|
# Search on database
|
|
30
|
-
len_database = title_search(
|
|
30
|
+
len_database = title_search(string_to_search)
|
|
31
31
|
|
|
32
32
|
# Return list of elements
|
|
33
33
|
if get_onylDatabase:
|
|
@@ -11,9 +11,9 @@ SITE_NAME = os.path.basename(os.path.dirname(os.path.abspath(__file__)))
|
|
|
11
11
|
ROOT_PATH = config_manager.get('DEFAULT', 'root_path')
|
|
12
12
|
DOMAIN_NOW = config_manager.get_dict('SITE', SITE_NAME)['domain']
|
|
13
13
|
|
|
14
|
-
SERIES_FOLDER = os.path.join(ROOT_PATH, config_manager.get('DEFAULT', 'serie_folder_name'))
|
|
15
14
|
MOVIE_FOLDER = os.path.join(ROOT_PATH, config_manager.get('DEFAULT', 'movie_folder_name'))
|
|
15
|
+
ANIME_FOLDER = os.path.join(ROOT_PATH, config_manager.get('DEFAULT', 'anime_folder_name'))
|
|
16
16
|
|
|
17
17
|
if config_manager.get_bool("DEFAULT", "add_siteName"):
|
|
18
|
-
|
|
19
|
-
|
|
18
|
+
MOVIE_FOLDER = os.path.join(ROOT_PATH, SITE_NAME, config_manager.get('DEFAULT', 'movie_folder_name'))
|
|
19
|
+
ANIME_FOLDER = os.path.join(ROOT_PATH, config_manager.get('DEFAULT', 'anime_folder_name'))
|
|
@@ -23,7 +23,7 @@ from StreamingCommunity.Api.Player.vixcloud import VideoSourceAnime
|
|
|
23
23
|
|
|
24
24
|
|
|
25
25
|
# Variable
|
|
26
|
-
from .costant import SITE_NAME,
|
|
26
|
+
from .costant import SITE_NAME, ANIME_FOLDER, MOVIE_FOLDER
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
|
|
@@ -54,7 +54,7 @@ def download_episode(index_select: int, scrape_serie: ScrapeSerieAnime, video_so
|
|
|
54
54
|
|
|
55
55
|
if scrape_serie.is_series:
|
|
56
56
|
mp4_path = os_manager.get_sanitize_path(
|
|
57
|
-
os.path.join(
|
|
57
|
+
os.path.join(ANIME_FOLDER, scrape_serie.series_name)
|
|
58
58
|
)
|
|
59
59
|
else:
|
|
60
60
|
mp4_path = os_manager.get_sanitize_path(
|
|
@@ -110,7 +110,7 @@ def title_search(title: str) -> int:
|
|
|
110
110
|
domain_to_use = DOMAIN_NOW
|
|
111
111
|
|
|
112
112
|
if not disable_searchDomain:
|
|
113
|
-
domain_to_use, base_url = search_domain(SITE_NAME, f"https://www.{SITE_NAME}")
|
|
113
|
+
domain_to_use, base_url = search_domain(SITE_NAME, f"https://www.{SITE_NAME}.{DOMAIN_NOW}")
|
|
114
114
|
|
|
115
115
|
data = get_token(SITE_NAME, domain_to_use)
|
|
116
116
|
|
|
@@ -128,8 +128,8 @@ def title_search(title: str) -> int:
|
|
|
128
128
|
}
|
|
129
129
|
|
|
130
130
|
# Prepare JSON data to be sent in the request
|
|
131
|
-
json_data =
|
|
132
|
-
'title': title
|
|
131
|
+
json_data = {
|
|
132
|
+
'title': title
|
|
133
133
|
}
|
|
134
134
|
|
|
135
135
|
# Send a POST request to the API endpoint for live search
|
|
@@ -42,7 +42,7 @@ def title_search(word_to_search: str) -> int:
|
|
|
42
42
|
domain_to_use = DOMAIN_NOW
|
|
43
43
|
|
|
44
44
|
if not disable_searchDomain:
|
|
45
|
-
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}")
|
|
45
|
+
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}.{DOMAIN_NOW}")
|
|
46
46
|
|
|
47
47
|
response = httpx.get(
|
|
48
48
|
url=f"https://{SITE_NAME}.{domain_to_use}/?s={word_to_search}",
|
|
@@ -46,7 +46,7 @@ def title_search(word_to_search: str) -> int:
|
|
|
46
46
|
domain_to_use = DOMAIN_NOW
|
|
47
47
|
|
|
48
48
|
if not disable_searchDomain:
|
|
49
|
-
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}")
|
|
49
|
+
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}.{DOMAIN_NOW}")
|
|
50
50
|
|
|
51
51
|
# Send request to search for titles
|
|
52
52
|
try:
|
|
@@ -43,7 +43,7 @@ def title_search(word_to_search: str) -> int:
|
|
|
43
43
|
domain_to_use = DOMAIN_NOW
|
|
44
44
|
|
|
45
45
|
if not disable_searchDomain:
|
|
46
|
-
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}")
|
|
46
|
+
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}.{DOMAIN_NOW}")
|
|
47
47
|
|
|
48
48
|
# Send request to search for titles
|
|
49
49
|
try:
|
|
@@ -38,7 +38,7 @@ async def title_search(word_to_search: str) -> int:
|
|
|
38
38
|
domain_to_use = DOMAIN_NOW
|
|
39
39
|
|
|
40
40
|
if not disable_searchDomain:
|
|
41
|
-
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}")
|
|
41
|
+
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}.{DOMAIN_NOW}")
|
|
42
42
|
|
|
43
43
|
# Create scraper and collect result
|
|
44
44
|
print("\n")
|
|
@@ -54,7 +54,7 @@ class IlCorsaroNeroScraper:
|
|
|
54
54
|
return response.text
|
|
55
55
|
|
|
56
56
|
except Exception as e:
|
|
57
|
-
logging.error(f"Error fetching {url}: {e}")
|
|
57
|
+
logging.error(f"Error fetching from {url}: {e}")
|
|
58
58
|
return None
|
|
59
59
|
|
|
60
60
|
def parse_torrents(self, html: str) -> List[Dict[str, str]]:
|
|
@@ -199,7 +199,7 @@ def display_episodes_list(scrape_serie) -> str:
|
|
|
199
199
|
# Run the table and handle user input
|
|
200
200
|
last_command = table_show_manager.run()
|
|
201
201
|
|
|
202
|
-
if last_command == "q":
|
|
202
|
+
if last_command == "q" or last_command == "quit":
|
|
203
203
|
console.print("\n[red]Quit [white]...")
|
|
204
204
|
sys.exit(0)
|
|
205
205
|
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
# 10.12.23
|
|
2
2
|
|
|
3
|
-
import sys
|
|
4
3
|
import json
|
|
5
4
|
import logging
|
|
6
5
|
import secrets
|
|
@@ -81,10 +80,14 @@ def get_version_and_domain():
|
|
|
81
80
|
domain_to_use = DOMAIN_NOW
|
|
82
81
|
|
|
83
82
|
if not disable_searchDomain:
|
|
84
|
-
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}")
|
|
83
|
+
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}.{DOMAIN_NOW}")
|
|
85
84
|
|
|
86
|
-
|
|
87
|
-
|
|
85
|
+
try:
|
|
86
|
+
version = get_version(domain_to_use)
|
|
87
|
+
except:
|
|
88
|
+
console.print("[green]Auto generate version ...")
|
|
89
|
+
version = secrets.token_hex(32 // 2)
|
|
90
|
+
|
|
88
91
|
return version, domain_to_use
|
|
89
92
|
|
|
90
93
|
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
# 18.06.24
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
import ssl
|
|
4
|
+
import time
|
|
5
|
+
from urllib.parse import urlparse, unquote
|
|
4
6
|
|
|
5
7
|
|
|
6
8
|
# External libraries
|
|
@@ -14,124 +16,188 @@ from StreamingCommunity.Util.console import console, msg
|
|
|
14
16
|
from StreamingCommunity.Util._jsonConfig import config_manager
|
|
15
17
|
|
|
16
18
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
19
|
+
base_headers = {
|
|
20
|
+
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
|
21
|
+
'accept-language': 'it-IT,it;q=0.9,en-US;q=0.8,en;q=0.7',
|
|
22
|
+
'dnt': '1',
|
|
23
|
+
'priority': 'u=0, i',
|
|
24
|
+
'referer': '',
|
|
25
|
+
'sec-ch-ua-mobile': '?0',
|
|
26
|
+
'sec-ch-ua-platform': '"Windows"',
|
|
27
|
+
'sec-fetch-dest': 'document',
|
|
28
|
+
'sec-fetch-mode': 'navigate',
|
|
29
|
+
'sec-fetch-site': 'same-origin',
|
|
30
|
+
'sec-fetch-user': '?1',
|
|
31
|
+
'upgrade-insecure-requests': '1',
|
|
32
|
+
'user-agent': ''
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def get_tld(url_str):
|
|
37
|
+
"""Extract the TLD (Top-Level Domain) from the URL."""
|
|
38
|
+
try:
|
|
39
|
+
url_str = unquote(url_str)
|
|
40
|
+
parsed = urlparse(url_str)
|
|
41
|
+
domain = parsed.netloc.lower()
|
|
42
|
+
|
|
43
|
+
if domain.startswith('www.'):
|
|
44
|
+
domain = domain[4:]
|
|
45
|
+
parts = domain.split('.')
|
|
46
|
+
|
|
47
|
+
return parts[-1] if len(parts) >= 2 else None
|
|
30
48
|
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
console.print(f"[red]Check {check_num} failed: Access forbidden (403)")
|
|
34
|
-
return False
|
|
35
|
-
if response.status_code >= 400:
|
|
36
|
-
console.print(f"[red]Check {check_num} failed: HTTP {response.status_code}")
|
|
37
|
-
return False
|
|
38
|
-
console.print(f"[green]Check {check_num} passed: HTTP {response.status_code}")
|
|
39
|
-
return True
|
|
49
|
+
except Exception:
|
|
50
|
+
return None
|
|
40
51
|
|
|
52
|
+
def get_base_domain(url_str):
|
|
53
|
+
"""Extract base domain without protocol, www and path."""
|
|
41
54
|
try:
|
|
55
|
+
parsed = urlparse(url_str)
|
|
56
|
+
domain = parsed.netloc.lower()
|
|
57
|
+
if domain.startswith('www.'):
|
|
58
|
+
domain = domain[4:]
|
|
59
|
+
|
|
60
|
+
# Check if domain has multiple parts separated by dots
|
|
61
|
+
parts = domain.split('.')
|
|
62
|
+
if len(parts) > 2: # Handle subdomains
|
|
63
|
+
return '.'.join(parts[:-1]) # Return everything except TLD
|
|
42
64
|
|
|
43
|
-
#
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
65
|
+
return parts[0] # Return base domain
|
|
66
|
+
|
|
67
|
+
except Exception:
|
|
68
|
+
return None
|
|
69
|
+
|
|
70
|
+
def get_base_url(url_str):
|
|
71
|
+
"""Extract base URL including protocol and domain, removing path and query parameters."""
|
|
72
|
+
try:
|
|
73
|
+
parsed = urlparse(url_str)
|
|
74
|
+
return f"{parsed.scheme}://{parsed.netloc}"
|
|
75
|
+
|
|
76
|
+
except Exception:
|
|
77
|
+
return None
|
|
78
|
+
|
|
79
|
+
def validate_url(url, base_url, max_timeout, max_retries=3, sleep=3):
|
|
80
|
+
"""Validate if URL is accessible and matches expected base domain."""
|
|
81
|
+
console.print(f"\n[cyan]Starting validation for URL[white]: [yellow]{url}")
|
|
82
|
+
|
|
83
|
+
# Verify URL structure matches base_url structure
|
|
84
|
+
base_domain = get_base_domain(base_url)
|
|
85
|
+
url_domain = get_base_domain(url)
|
|
86
|
+
|
|
87
|
+
base_headers['referer'] = url
|
|
88
|
+
base_headers['user-agent'] = get_headers()
|
|
89
|
+
|
|
90
|
+
if base_domain != url_domain:
|
|
91
|
+
console.print(f"[red]Domain structure mismatch: {url_domain} != {base_domain}")
|
|
92
|
+
return False, None
|
|
93
|
+
|
|
94
|
+
# Count dots to ensure we don't have extra subdomains
|
|
95
|
+
base_dots = base_url.count('.')
|
|
96
|
+
url_dots = url.count('.')
|
|
97
|
+
if url_dots > base_dots + 1: # Allow for one extra dot for TLD change
|
|
98
|
+
console.print(f"[red]Too many subdomains in URL")
|
|
99
|
+
return False, None
|
|
100
|
+
|
|
101
|
+
client = httpx.Client(
|
|
102
|
+
verify=False,
|
|
103
|
+
headers=base_headers,
|
|
104
|
+
timeout=max_timeout
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
for retry in range(max_retries):
|
|
108
|
+
try:
|
|
109
|
+
time.sleep(sleep)
|
|
73
110
|
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
111
|
+
# Initial check without redirects
|
|
112
|
+
response = client.get(url, follow_redirects=False)
|
|
113
|
+
if response.status_code == 403:
|
|
114
|
+
console.print(f"[red]Check failed (403) - Attempt {retry + 1}/{max_retries}")
|
|
115
|
+
continue
|
|
116
|
+
|
|
117
|
+
if response.status_code >= 400:
|
|
118
|
+
console.print(f"[red]Check failed: HTTP {response.status_code}")
|
|
119
|
+
return False, None
|
|
120
|
+
|
|
121
|
+
# Follow redirects and verify final domain
|
|
122
|
+
final_response = client.get(url, follow_redirects=True)
|
|
123
|
+
final_domain = get_base_domain(str(final_response.url))
|
|
124
|
+
console.print(f"[cyan]Redirect url: [red]{final_response.url}")
|
|
77
125
|
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
console.print(f"[red]Domain mismatch: Final domain does not match expected base URL")
|
|
82
|
-
console.print(f"Expected: [yellow]{expected_base}")
|
|
83
|
-
return False
|
|
126
|
+
if final_domain != base_domain:
|
|
127
|
+
console.print(f"[red]Final domain mismatch: {final_domain} != {base_domain}")
|
|
128
|
+
return False, None
|
|
84
129
|
|
|
85
|
-
|
|
86
|
-
|
|
130
|
+
new_tld = get_tld(str(final_response.url))
|
|
131
|
+
if new_tld != get_tld(url):
|
|
132
|
+
return True, new_tld
|
|
133
|
+
|
|
134
|
+
return True, None
|
|
87
135
|
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
136
|
+
except (httpx.RequestError, ssl.SSLError) as e:
|
|
137
|
+
console.print(f"[red]Connection error: {str(e)}")
|
|
138
|
+
time.sleep(sleep)
|
|
139
|
+
continue
|
|
140
|
+
|
|
141
|
+
return False, None
|
|
91
142
|
|
|
92
143
|
def search_domain(site_name: str, base_url: str, get_first: bool = False):
|
|
93
|
-
"""
|
|
94
|
-
Search for valid domain matching site name and base URL.
|
|
95
|
-
"""
|
|
144
|
+
"""Search for valid domain matching site name and base URL."""
|
|
96
145
|
max_timeout = config_manager.get_int("REQUESTS", "timeout")
|
|
97
146
|
domain = str(config_manager.get_dict("SITE", site_name)['domain'])
|
|
98
|
-
test_url = f"{base_url}.{domain}"
|
|
99
|
-
|
|
100
|
-
console.print(f"\n[cyan]Testing initial URL[white]: [yellow]{test_url}")
|
|
101
147
|
|
|
148
|
+
# Test initial URL
|
|
102
149
|
try:
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
150
|
+
is_correct, redirect_tld = validate_url(base_url, base_url, max_timeout)
|
|
151
|
+
|
|
152
|
+
if is_correct:
|
|
153
|
+
tld = redirect_tld or get_tld(base_url)
|
|
106
154
|
config_manager.config['SITE'][site_name]['domain'] = tld
|
|
107
155
|
config_manager.write_config()
|
|
108
156
|
console.print(f"[green]Successfully validated initial URL")
|
|
109
|
-
return tld,
|
|
157
|
+
return tld, base_url
|
|
110
158
|
|
|
111
159
|
except Exception as e:
|
|
112
160
|
console.print(f"[red]Error testing initial URL: {str(e)}")
|
|
113
161
|
|
|
114
162
|
# Google search phase
|
|
115
|
-
|
|
116
|
-
console.print(f"\n[cyan]
|
|
117
|
-
search_results = list(search(query, num_results=15, lang="it"))
|
|
163
|
+
base_domain = get_base_domain(base_url)
|
|
164
|
+
console.print(f"\n[cyan]Searching for alternate domains for[white]: [yellow]{base_domain}")
|
|
118
165
|
|
|
119
|
-
|
|
120
|
-
|
|
166
|
+
try:
|
|
167
|
+
search_results = list(search(base_domain, num_results=20, lang="it"))
|
|
168
|
+
base_urls = set()
|
|
169
|
+
|
|
170
|
+
for url in search_results:
|
|
171
|
+
base_url = get_base_url(url)
|
|
172
|
+
if base_url:
|
|
173
|
+
base_urls.add(base_url)
|
|
121
174
|
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
175
|
+
# Filter URLs based on domain matching and subdomain count
|
|
176
|
+
filtered_results = [
|
|
177
|
+
url for url in base_urls
|
|
178
|
+
if get_base_domain(url) == base_domain
|
|
179
|
+
and url.count('.') <= base_url.count('.') + 1
|
|
180
|
+
]
|
|
181
|
+
|
|
182
|
+
for idx, result_url in enumerate(filtered_results, 1):
|
|
183
|
+
console.print(f"\n[cyan]Checking result {idx}/{len(filtered_results)}[white]: [yellow]{result_url}")
|
|
125
184
|
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
185
|
+
is_valid, new_tld = validate_url(result_url, base_url, max_timeout)
|
|
186
|
+
if is_valid:
|
|
187
|
+
final_tld = new_tld or get_tld(result_url)
|
|
188
|
+
|
|
189
|
+
if get_first or msg.ask(
|
|
190
|
+
f"\n[cyan]Update site[white] [red]'{site_name}'[cyan] with domain[white] [red]'{final_tld}'",
|
|
191
|
+
choices=["y", "n"],
|
|
192
|
+
default="y"
|
|
193
|
+
).lower() == "y":
|
|
194
|
+
|
|
195
|
+
config_manager.config['SITE'][site_name]['domain'] = final_tld
|
|
196
|
+
config_manager.write_config()
|
|
197
|
+
return final_tld, f"{base_url}.{final_tld}"
|
|
198
|
+
|
|
199
|
+
except Exception as e:
|
|
200
|
+
console.print(f"[red]Error during search: {str(e)}")
|
|
135
201
|
|
|
136
202
|
console.print("[bold red]No valid URLs found matching the base URL.")
|
|
137
203
|
return domain, f"{base_url}.{domain}"
|
|
@@ -74,7 +74,7 @@ def get_select_title(table_show_manager, media_search_manager):
|
|
|
74
74
|
table_show_manager.clear()
|
|
75
75
|
|
|
76
76
|
# Handle user's quit command
|
|
77
|
-
if last_command == "q":
|
|
77
|
+
if last_command == "q" or last_command == "quit":
|
|
78
78
|
console.print("\n[red]Quit [white]...")
|
|
79
79
|
sys.exit(0)
|
|
80
80
|
|
|
@@ -11,7 +11,7 @@ import httpx
|
|
|
11
11
|
|
|
12
12
|
# Internal utilities
|
|
13
13
|
from StreamingCommunity.Util._jsonConfig import config_manager
|
|
14
|
-
from StreamingCommunity.Util.console import console, Panel
|
|
14
|
+
from StreamingCommunity.Util.console import console, Panel
|
|
15
15
|
from StreamingCommunity.Util.color import Colors
|
|
16
16
|
from StreamingCommunity.Util.os import (
|
|
17
17
|
compute_sha1_hash,
|
|
@@ -284,6 +284,7 @@ class ContentExtractor:
|
|
|
284
284
|
|
|
285
285
|
print("")
|
|
286
286
|
|
|
287
|
+
|
|
287
288
|
class DownloadTracker:
|
|
288
289
|
def __init__(self, path_manager: PathManager):
|
|
289
290
|
"""
|
|
@@ -815,6 +816,9 @@ class HLS_Downloader:
|
|
|
815
816
|
else:
|
|
816
817
|
console.log("[red]Error: m3u8_index is None")
|
|
817
818
|
|
|
819
|
+
# Reset
|
|
820
|
+
self._reset()
|
|
821
|
+
|
|
818
822
|
def _clean(self, out_path: str) -> None:
|
|
819
823
|
"""
|
|
820
824
|
Cleans up temporary files and folders after downloading and processing.
|
|
@@ -953,3 +957,9 @@ class HLS_Downloader:
|
|
|
953
957
|
|
|
954
958
|
# Clean up temporary files and directories
|
|
955
959
|
self._clean(self.content_joiner.converted_out_path)
|
|
960
|
+
|
|
961
|
+
def _reset(self):
|
|
962
|
+
global list_MissingTs, m3u8_url_fixer
|
|
963
|
+
|
|
964
|
+
m3u8_url_fixer.reset_playlist()
|
|
965
|
+
list_MissingTs = []
|
|
@@ -131,6 +131,7 @@ class M3U8_Segments:
|
|
|
131
131
|
# Convert the content of the response to hexadecimal and then to bytes
|
|
132
132
|
hex_content = binascii.hexlify(response.content).decode('utf-8')
|
|
133
133
|
byte_content = bytes.fromhex(hex_content)
|
|
134
|
+
logging.info(f"URI: Hex content: {hex_content}, Byte content: {byte_content}")
|
|
134
135
|
|
|
135
136
|
#console.print(f"[cyan]Find key: [red]{hex_content}")
|
|
136
137
|
return byte_content
|
|
@@ -160,6 +161,7 @@ class M3U8_Segments:
|
|
|
160
161
|
|
|
161
162
|
iv = m3u8_parser.keys.get('iv')
|
|
162
163
|
method = m3u8_parser.keys.get('method')
|
|
164
|
+
logging.info(f"M3U8_Decryption - IV: {iv}, method: {method}")
|
|
163
165
|
|
|
164
166
|
# Create a decryption object with the key and set the method
|
|
165
167
|
self.decryption = M3U8_Decryption(key, iv, method)
|
|
@@ -308,7 +310,9 @@ class M3U8_Segments:
|
|
|
308
310
|
|
|
309
311
|
except Exception as e:
|
|
310
312
|
logging.error(f"Decryption failed for segment {index}: {str(e)}")
|
|
311
|
-
|
|
313
|
+
self.interrupt_flag.set() # Interrupt the download process
|
|
314
|
+
self.stop_event.set() # Trigger the stopping event for all threads
|
|
315
|
+
break # Stop the current task immediately
|
|
312
316
|
|
|
313
317
|
# Update progress and queue
|
|
314
318
|
self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
|
|
@@ -537,10 +541,14 @@ class M3U8_Segments:
|
|
|
537
541
|
progress_bar.close()
|
|
538
542
|
|
|
539
543
|
# Final verification
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
+
try:
|
|
545
|
+
final_completion = (len(self.downloaded_segments) / total_segments) * 100
|
|
546
|
+
if final_completion < 99.9: # Less than 99.9% complete
|
|
547
|
+
missing = set(range(total_segments)) - self.downloaded_segments
|
|
548
|
+
raise Exception(f"Download incomplete ({final_completion:.1f}%). Missing segments: {sorted(missing)}")
|
|
549
|
+
|
|
550
|
+
except:
|
|
551
|
+
pass
|
|
544
552
|
|
|
545
553
|
# Verify output file
|
|
546
554
|
if not os.path.exists(self.tmp_file_path):
|
|
@@ -550,15 +558,16 @@ class M3U8_Segments:
|
|
|
550
558
|
if file_size == 0:
|
|
551
559
|
raise Exception("Output file is empty")
|
|
552
560
|
|
|
553
|
-
# Display additional
|
|
554
|
-
if self.
|
|
561
|
+
# Display additional info when there is missing stream file
|
|
562
|
+
if self.info_nFailed > 0:
|
|
555
563
|
|
|
556
564
|
# Get expected time
|
|
557
565
|
ex_hours, ex_minutes, ex_seconds = format_duration(self.expected_real_time_s)
|
|
558
566
|
ex_formatted_duration = f"[yellow]{int(ex_hours)}[red]h [yellow]{int(ex_minutes)}[red]m [yellow]{int(ex_seconds)}[red]s"
|
|
559
567
|
console.print(f"[cyan]Max retry per URL[white]: [green]{self.info_maxRetry}[green] [white]| [cyan]Total retry done[white]: [green]{self.info_nRetry}[green] [white]| [cyan]Missing TS: [red]{self.info_nFailed} [white]| [cyan]Duration: {print_duration_table(self.tmp_file_path, None, True)} [white]| [cyan]Expected duation: {ex_formatted_duration} \n")
|
|
560
568
|
|
|
561
|
-
|
|
569
|
+
if self.info_nRetry >= len(self.segments) * 0.3:
|
|
570
|
+
console.print("[yellow]⚠ Warning:[/yellow] Too many retries detected! Consider reducing the number of [cyan]workers[/cyan] in the [magenta]config.json[/magenta] file. This will impact [bold]performance[/bold]. \n")
|
|
562
571
|
|
|
563
572
|
# Info to return
|
|
564
573
|
return {'type': type, 'nFailed': self.info_nFailed}
|
|
@@ -139,9 +139,8 @@ class TOR_downloader:
|
|
|
139
139
|
|
|
140
140
|
# Formatta e stampa le informazioni
|
|
141
141
|
console.print("\n[bold green]🔗 Dettagli Torrent Aggiunto:[/bold green]")
|
|
142
|
-
console.print(f"[yellow]
|
|
142
|
+
console.print(f"[yellow]Name:[/yellow] {torrent_info.get('name', torrent_name)}")
|
|
143
143
|
console.print(f"[yellow]Hash:[/yellow] {torrent_info['hash']}")
|
|
144
|
-
console.print(f"[yellow]Dimensione:[/yellow] {internet_manager.format_file_size(torrent_info.get('size'))}")
|
|
145
144
|
print()
|
|
146
145
|
|
|
147
146
|
# Salva l'hash per usi successivi e il path
|
|
@@ -288,7 +287,8 @@ class TOR_downloader:
|
|
|
288
287
|
raise
|
|
289
288
|
|
|
290
289
|
# Delete the torrent data
|
|
291
|
-
|
|
290
|
+
time.sleep(5)
|
|
291
|
+
self.qb.delete_permanently(self.qb.torrents()[-1]['hash'])
|
|
292
292
|
return True
|
|
293
293
|
|
|
294
294
|
except Exception as e:
|
|
@@ -199,7 +199,7 @@ class M3U8_Ts_Estimator:
|
|
|
199
199
|
|
|
200
200
|
if TQDM_USE_LARGE_BAR:
|
|
201
201
|
speed_data = self.get_average_speed()
|
|
202
|
-
logging.debug(f"Speed data for progress bar: {speed_data}")
|
|
202
|
+
#logging.debug(f"Speed data for progress bar: {speed_data}")
|
|
203
203
|
|
|
204
204
|
if len(speed_data) >= 2:
|
|
205
205
|
average_internet_speed = speed_data[0]
|
|
@@ -223,7 +223,7 @@ class M3U8_Ts_Estimator:
|
|
|
223
223
|
)
|
|
224
224
|
|
|
225
225
|
progress_counter.set_postfix_str(progress_str)
|
|
226
|
-
logging.debug(f"Updated progress bar: {progress_str}")
|
|
226
|
+
#logging.debug(f"Updated progress bar: {progress_str}")
|
|
227
227
|
|
|
228
228
|
except Exception as e:
|
|
229
229
|
logging.error(f"Error updating progress bar: {str(e)}")
|
|
@@ -75,7 +75,7 @@ def get_select_title(table_show_manager, generic_obj):
|
|
|
75
75
|
table_show_manager.clear()
|
|
76
76
|
|
|
77
77
|
# Handle user's quit command
|
|
78
|
-
if last_command == "q":
|
|
78
|
+
if last_command == "q" or last_command == "quit":
|
|
79
79
|
Console.print("\n[red]Quit [white]...")
|
|
80
80
|
sys.exit(0)
|
|
81
81
|
|