StreamingCommunity 2.9.7__py3-none-any.whl → 2.9.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of StreamingCommunity might be problematic. Click here for more details.
- StreamingCommunity/Api/Player/ddl.py +2 -10
- StreamingCommunity/Api/Player/mediapolisvod.py +64 -0
- StreamingCommunity/Api/Player/sweetpixel.py +3 -3
- StreamingCommunity/Api/Player/vixcloud.py +4 -9
- StreamingCommunity/Api/Site/1337xx/__init__.py +1 -1
- StreamingCommunity/Api/Site/altadefinizione/__init__.py +23 -7
- StreamingCommunity/Api/Site/altadefinizione/film.py +0 -1
- StreamingCommunity/Api/Site/altadefinizione/series.py +66 -70
- StreamingCommunity/Api/Site/altadefinizione/site.py +2 -1
- StreamingCommunity/Api/Site/altadefinizione/util/ScrapeSerie.py +37 -2
- StreamingCommunity/Api/Site/animeunity/__init__.py +29 -10
- StreamingCommunity/Api/Site/animeunity/film.py +40 -0
- StreamingCommunity/Api/Site/animeunity/serie.py +153 -0
- StreamingCommunity/Api/Site/animeunity/site.py +1 -2
- StreamingCommunity/Api/Site/animeunity/util/ScrapeSerie.py +15 -0
- StreamingCommunity/Api/Site/animeworld/__init__.py +25 -12
- StreamingCommunity/Api/Site/animeworld/film.py +63 -0
- StreamingCommunity/Api/Site/animeworld/serie.py +25 -22
- StreamingCommunity/Api/Site/animeworld/site.py +2 -1
- StreamingCommunity/Api/Site/animeworld/util/ScrapeSerie.py +32 -5
- StreamingCommunity/Api/Site/cb01new/__init__.py +1 -1
- StreamingCommunity/Api/Site/ddlstreamitaly/__init__.py +1 -1
- StreamingCommunity/Api/Site/ddlstreamitaly/series.py +31 -32
- StreamingCommunity/Api/Site/ddlstreamitaly/site.py +2 -2
- StreamingCommunity/Api/Site/ddlstreamitaly/util/ScrapeSerie.py +30 -2
- StreamingCommunity/Api/Site/guardaserie/__init__.py +21 -7
- StreamingCommunity/Api/Site/guardaserie/series.py +55 -53
- StreamingCommunity/Api/Site/guardaserie/site.py +3 -2
- StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +27 -1
- StreamingCommunity/Api/Site/raiplay/__init__.py +93 -0
- StreamingCommunity/Api/Site/raiplay/film.py +65 -0
- StreamingCommunity/Api/Site/raiplay/series.py +162 -0
- StreamingCommunity/Api/Site/raiplay/site.py +166 -0
- StreamingCommunity/Api/Site/raiplay/util/ScrapeSerie.py +127 -0
- StreamingCommunity/Api/Site/streamingcommunity/__init__.py +29 -22
- StreamingCommunity/Api/Site/streamingcommunity/film.py +1 -2
- StreamingCommunity/Api/Site/streamingcommunity/series.py +76 -90
- StreamingCommunity/Api/Site/streamingcommunity/site.py +1 -3
- StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py +41 -15
- StreamingCommunity/Api/Template/site.py +2 -2
- StreamingCommunity/Lib/Downloader/HLS/downloader.py +1 -1
- StreamingCommunity/Lib/Downloader/HLS/segments.py +2 -3
- StreamingCommunity/Lib/Downloader/MP4/downloader.py +2 -1
- StreamingCommunity/Lib/FFmpeg/util.py +47 -17
- StreamingCommunity/Lib/M3U8/estimator.py +50 -21
- StreamingCommunity/Lib/M3U8/parser.py +26 -6
- StreamingCommunity/Upload/update.py +22 -3
- StreamingCommunity/Upload/version.py +1 -1
- StreamingCommunity/Util/config_json.py +425 -274
- StreamingCommunity/Util/table.py +4 -2
- StreamingCommunity/run.py +1 -1
- {streamingcommunity-2.9.7.dist-info → streamingcommunity-2.9.9.dist-info}/METADATA +1 -1
- streamingcommunity-2.9.9.dist-info/RECORD +91 -0
- {streamingcommunity-2.9.7.dist-info → streamingcommunity-2.9.9.dist-info}/WHEEL +1 -1
- StreamingCommunity/Api/Site/animeunity/film_serie.py +0 -181
- StreamingCommunity/Api/Site/mostraguarda/__init__.py +0 -73
- StreamingCommunity/Api/Site/mostraguarda/film.py +0 -93
- streamingcommunity-2.9.7.dist-info/RECORD +0 -85
- {streamingcommunity-2.9.7.dist-info → streamingcommunity-2.9.9.dist-info}/entry_points.txt +0 -0
- {streamingcommunity-2.9.7.dist-info → streamingcommunity-2.9.9.dist-info}/licenses/LICENSE +0 -0
- {streamingcommunity-2.9.7.dist-info → streamingcommunity-2.9.9.dist-info}/top_level.txt +0 -0
|
@@ -20,31 +20,21 @@ max_timeout = config_manager.get_int("REQUESTS", "timeout")
|
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
class GetSerieInfo:
|
|
23
|
-
def __init__(self, url):
|
|
23
|
+
def __init__(self, url, media_id: int = None, series_name: str = None):
|
|
24
24
|
"""
|
|
25
25
|
Initialize the GetSerieInfo class for scraping TV series information.
|
|
26
26
|
|
|
27
27
|
Args:
|
|
28
28
|
- url (str): The URL of the streaming site.
|
|
29
|
+
- media_id (int, optional): Unique identifier for the media
|
|
30
|
+
- series_name (str, optional): Name of the TV series
|
|
29
31
|
"""
|
|
30
32
|
self.is_series = False
|
|
31
33
|
self.headers = {'user-agent': get_userAgent()}
|
|
32
34
|
self.url = url
|
|
33
|
-
|
|
34
|
-
# Initialize the SeasonManager
|
|
35
|
-
self.seasons_manager = SeasonManager()
|
|
36
|
-
|
|
37
|
-
def setup(self, media_id: int = None, series_name: str = None):
|
|
38
|
-
"""
|
|
39
|
-
Set up the scraper with specific media details.
|
|
40
|
-
|
|
41
|
-
Args:
|
|
42
|
-
media_id (int, optional): Unique identifier for the media
|
|
43
|
-
series_name (str, optional): Name of the TV series
|
|
44
|
-
"""
|
|
45
35
|
self.media_id = media_id
|
|
36
|
+
self.seasons_manager = SeasonManager()
|
|
46
37
|
|
|
47
|
-
# If series name is provided, initialize series-specific properties
|
|
48
38
|
if series_name is not None:
|
|
49
39
|
self.is_series = True
|
|
50
40
|
self.series_name = series_name
|
|
@@ -127,4 +117,40 @@ class GetSerieInfo:
|
|
|
127
117
|
|
|
128
118
|
except Exception as e:
|
|
129
119
|
logging.error(f"Error collecting episodes for season {number_season}: {e}")
|
|
130
|
-
raise
|
|
120
|
+
raise
|
|
121
|
+
|
|
122
|
+
# ------------- FOR GUI -------------
|
|
123
|
+
def getNumberSeason(self) -> int:
|
|
124
|
+
"""
|
|
125
|
+
Get the total number of seasons available for the series.
|
|
126
|
+
"""
|
|
127
|
+
if not self.seasons_manager.seasons:
|
|
128
|
+
self.collect_info_title()
|
|
129
|
+
|
|
130
|
+
return len(self.seasons_manager.seasons)
|
|
131
|
+
|
|
132
|
+
def getEpisodeSeasons(self, season_number: int) -> list:
|
|
133
|
+
"""
|
|
134
|
+
Get all episodes for a specific season.
|
|
135
|
+
"""
|
|
136
|
+
season = self.seasons_manager.get_season_by_number(season_number)
|
|
137
|
+
|
|
138
|
+
if not season:
|
|
139
|
+
logging.error(f"Season {season_number} not found")
|
|
140
|
+
return []
|
|
141
|
+
|
|
142
|
+
if not season.episodes.episodes:
|
|
143
|
+
self.collect_info_season(season_number)
|
|
144
|
+
|
|
145
|
+
return season.episodes.episodes
|
|
146
|
+
|
|
147
|
+
def selectEpisode(self, season_number: int, episode_index: int) -> dict:
|
|
148
|
+
"""
|
|
149
|
+
Get information for a specific episode in a specific season.
|
|
150
|
+
"""
|
|
151
|
+
episodes = self.getEpisodeSeasons(season_number)
|
|
152
|
+
if not episodes or episode_index < 0 or episode_index >= len(episodes):
|
|
153
|
+
logging.error(f"Episode index {episode_index} is out of range for season {season_number}")
|
|
154
|
+
return None
|
|
155
|
+
|
|
156
|
+
return episodes[episode_index]
|
|
@@ -10,7 +10,7 @@ from rich.console import Console
|
|
|
10
10
|
# Variable
|
|
11
11
|
console = Console()
|
|
12
12
|
available_colors = ['red', 'magenta', 'yellow', 'cyan', 'green', 'blue', 'white']
|
|
13
|
-
column_to_hide = ['Slug', 'Sub_ita', 'Last_air_date', 'Seasons_count', 'Url']
|
|
13
|
+
column_to_hide = ['Slug', 'Sub_ita', 'Last_air_date', 'Seasons_count', 'Url', 'Image', 'Path_id']
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
def get_select_title(table_show_manager, media_search_manager):
|
|
@@ -81,4 +81,4 @@ def get_select_title(table_show_manager, media_search_manager):
|
|
|
81
81
|
|
|
82
82
|
else:
|
|
83
83
|
console.print("\n[red]Wrong index")
|
|
84
|
-
sys.exit(0)
|
|
84
|
+
sys.exit(0)
|
|
@@ -514,7 +514,7 @@ class HLS_Downloader:
|
|
|
514
514
|
for item in self.download_manager.missing_segments:
|
|
515
515
|
if int(item['nFailed']) >= 1:
|
|
516
516
|
missing_ts = True
|
|
517
|
-
missing_info += f"[red]TS Failed: {item['nFailed']} {item['type']} tracks[/red]
|
|
517
|
+
missing_info += f"[red]TS Failed: {item['nFailed']} {item['type']} tracks[/red]"
|
|
518
518
|
|
|
519
519
|
file_size = internet_manager.format_file_size(os.path.getsize(self.path_manager.output_path))
|
|
520
520
|
duration = print_duration_table(self.path_manager.output_path, description=False, return_string=True)
|
|
@@ -41,10 +41,9 @@ REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
|
|
|
41
41
|
DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
|
|
42
42
|
DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
|
|
43
43
|
MAX_TIMEOOUT = config_manager.get_int("REQUESTS", "timeout")
|
|
44
|
-
MAX_INTERRUPT_COUNT = 3
|
|
45
44
|
SEGMENT_MAX_TIMEOUT = config_manager.get_int("M3U8_DOWNLOAD", "segment_timeout")
|
|
46
45
|
TELEGRAM_BOT = config_manager.get_bool('DEFAULT', 'telegram_bot')
|
|
47
|
-
|
|
46
|
+
MAX_INTERRUPT_COUNT = 3
|
|
48
47
|
|
|
49
48
|
# Variable
|
|
50
49
|
console = Console()
|
|
@@ -160,7 +159,7 @@ class M3U8_Segments:
|
|
|
160
159
|
if self.is_index_url:
|
|
161
160
|
try:
|
|
162
161
|
client_params = {'headers': {'User-Agent': get_userAgent()}, 'timeout': MAX_TIMEOOUT}
|
|
163
|
-
response = httpx.get(self.url, **client_params)
|
|
162
|
+
response = httpx.get(self.url, **client_params, follow_redirects=True)
|
|
164
163
|
response.raise_for_status()
|
|
165
164
|
|
|
166
165
|
self.parse_data(response.text)
|
|
@@ -21,7 +21,7 @@ from rich.panel import Panel
|
|
|
21
21
|
from StreamingCommunity.Util.headers import get_userAgent
|
|
22
22
|
from StreamingCommunity.Util.color import Colors
|
|
23
23
|
from StreamingCommunity.Util.config_json import config_manager
|
|
24
|
-
from StreamingCommunity.Util.os import internet_manager
|
|
24
|
+
from StreamingCommunity.Util.os import internet_manager, os_manager
|
|
25
25
|
from StreamingCommunity.TelegramHelp.telegram_bot import get_bot_instance
|
|
26
26
|
|
|
27
27
|
|
|
@@ -80,6 +80,7 @@ def MP4_downloader(url: str, path: str, referer: str = None, headers_: dict = No
|
|
|
80
80
|
bot = get_bot_instance()
|
|
81
81
|
console.log("####")
|
|
82
82
|
|
|
83
|
+
path = os_manager.get_sanitize_path(path)
|
|
83
84
|
if os.path.exists(path):
|
|
84
85
|
console.log("[red]Output file already exists.")
|
|
85
86
|
if TELEGRAM_BOT:
|
|
@@ -138,26 +138,53 @@ def get_ffprobe_info(file_path):
|
|
|
138
138
|
|
|
139
139
|
Returns:
|
|
140
140
|
dict: A dictionary containing the format name and a list of codec names.
|
|
141
|
+
Returns None if file does not exist or ffprobe crashes.
|
|
141
142
|
"""
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, check=True
|
|
146
|
-
)
|
|
147
|
-
output = result.stdout
|
|
148
|
-
info = json.loads(output)
|
|
143
|
+
if not os.path.exists(file_path):
|
|
144
|
+
logging.error(f"File not found: {file_path}")
|
|
145
|
+
return None
|
|
149
146
|
|
|
150
|
-
|
|
151
|
-
|
|
147
|
+
try:
|
|
148
|
+
# Use subprocess.Popen instead of run to better handle crashes
|
|
149
|
+
cmd = [get_ffprobe_path(), '-v', 'error', '-show_format', '-show_streams', '-print_format', 'json', file_path]
|
|
150
|
+
logging.info(f"FFmpeg command: {cmd}")
|
|
152
151
|
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
152
|
+
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) as proc:
|
|
153
|
+
stdout, stderr = proc.communicate()
|
|
154
|
+
|
|
155
|
+
if proc.returncode != 0:
|
|
156
|
+
logging.error(f"FFprobe failed with return code {proc.returncode} for file {file_path}")
|
|
157
|
+
if stderr:
|
|
158
|
+
logging.error(f"FFprobe stderr: {stderr}")
|
|
159
|
+
return {
|
|
160
|
+
'format_name': None,
|
|
161
|
+
'codec_names': []
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
# Make sure we have valid JSON before parsing
|
|
165
|
+
if not stdout or not stdout.strip():
|
|
166
|
+
logging.warning(f"FFprobe returned empty output for file {file_path}")
|
|
167
|
+
return {
|
|
168
|
+
'format_name': None,
|
|
169
|
+
'codec_names': []
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
info = json.loads(stdout)
|
|
173
|
+
|
|
174
|
+
format_name = info['format']['format_name'] if 'format' in info else None
|
|
175
|
+
codec_names = [stream['codec_name'] for stream in info['streams']] if 'streams' in info else []
|
|
176
|
+
|
|
177
|
+
return {
|
|
178
|
+
'format_name': format_name,
|
|
179
|
+
'codec_names': codec_names
|
|
180
|
+
}
|
|
157
181
|
|
|
158
182
|
except Exception as e:
|
|
159
|
-
logging.error(f"Failed to
|
|
160
|
-
return
|
|
183
|
+
logging.error(f"Failed to get ffprobe info for file {file_path}: {e}")
|
|
184
|
+
return {
|
|
185
|
+
'format_name': None,
|
|
186
|
+
'codec_names': []
|
|
187
|
+
}
|
|
161
188
|
|
|
162
189
|
|
|
163
190
|
def is_png_format_or_codec(file_info):
|
|
@@ -173,8 +200,11 @@ def is_png_format_or_codec(file_info):
|
|
|
173
200
|
if not file_info:
|
|
174
201
|
return False
|
|
175
202
|
|
|
176
|
-
#
|
|
177
|
-
|
|
203
|
+
# Handle None values in format_name gracefully
|
|
204
|
+
format_name = file_info.get('format_name')
|
|
205
|
+
codec_names = file_info.get('codec_names', [])
|
|
206
|
+
|
|
207
|
+
return format_name == 'png_pipe' or 'png' in codec_names
|
|
178
208
|
|
|
179
209
|
|
|
180
210
|
def need_to_force_to_ts(file_path):
|
|
@@ -31,16 +31,20 @@ class M3U8_Ts_Estimator:
|
|
|
31
31
|
self.segments_instance = segments_instance
|
|
32
32
|
self.lock = threading.Lock()
|
|
33
33
|
self.speed = {"upload": "N/A", "download": "N/A"}
|
|
34
|
+
self._running = True
|
|
34
35
|
|
|
35
36
|
if get_use_large_bar():
|
|
36
37
|
logging.debug("USE_LARGE_BAR is True, starting speed capture thread")
|
|
37
38
|
self.speed_thread = threading.Thread(target=self.capture_speed)
|
|
38
39
|
self.speed_thread.daemon = True
|
|
39
40
|
self.speed_thread.start()
|
|
40
|
-
|
|
41
41
|
else:
|
|
42
42
|
logging.debug("USE_LARGE_BAR is False, speed capture thread not started")
|
|
43
43
|
|
|
44
|
+
def __del__(self):
|
|
45
|
+
"""Ensure thread is properly stopped when the object is destroyed."""
|
|
46
|
+
self._running = False
|
|
47
|
+
|
|
44
48
|
def add_ts_file(self, size: int):
|
|
45
49
|
"""Add a file size to the list of file sizes."""
|
|
46
50
|
if size <= 0:
|
|
@@ -50,32 +54,44 @@ class M3U8_Ts_Estimator:
|
|
|
50
54
|
self.ts_file_sizes.append(size)
|
|
51
55
|
|
|
52
56
|
def capture_speed(self, interval: float = 1.5):
|
|
53
|
-
"""Capture the internet speed periodically."""
|
|
57
|
+
"""Capture the internet speed periodically with improved efficiency."""
|
|
54
58
|
last_upload, last_download = 0, 0
|
|
55
59
|
speed_buffer = deque(maxlen=3)
|
|
56
60
|
|
|
57
|
-
while
|
|
61
|
+
while self._running:
|
|
58
62
|
try:
|
|
63
|
+
# Get IO counters only once per loop to reduce function calls
|
|
59
64
|
io_counters = psutil.net_io_counters()
|
|
60
65
|
if not io_counters:
|
|
61
66
|
raise ValueError("No IO counters available")
|
|
62
67
|
|
|
63
68
|
current_upload, current_download = io_counters.bytes_sent, io_counters.bytes_recv
|
|
69
|
+
|
|
64
70
|
if last_upload and last_download:
|
|
65
71
|
upload_speed = (current_upload - last_upload) / interval
|
|
66
72
|
download_speed = (current_download - last_download) / interval
|
|
67
|
-
speed_buffer.append(max(0, download_speed))
|
|
68
73
|
|
|
74
|
+
# Only update buffer when we have valid data
|
|
75
|
+
if download_speed > 0:
|
|
76
|
+
speed_buffer.append(download_speed)
|
|
77
|
+
|
|
78
|
+
# Use a more efficient approach for thread synchronization
|
|
79
|
+
avg_speed = sum(speed_buffer) / len(speed_buffer) if speed_buffer else 0
|
|
80
|
+
formatted_upload = internet_manager.format_transfer_speed(max(0, upload_speed))
|
|
81
|
+
formatted_download = internet_manager.format_transfer_speed(avg_speed)
|
|
82
|
+
|
|
83
|
+
# Minimize lock time by preparing data outside the lock
|
|
69
84
|
with self.lock:
|
|
70
85
|
self.speed = {
|
|
71
|
-
"upload":
|
|
72
|
-
"download":
|
|
86
|
+
"upload": formatted_upload,
|
|
87
|
+
"download": formatted_download
|
|
73
88
|
}
|
|
74
|
-
logging.debug(f"Updated speeds - Upload: {self.speed['upload']}, Download: {self.speed['download']}")
|
|
75
89
|
|
|
76
90
|
last_upload, last_download = current_upload, current_download
|
|
91
|
+
|
|
77
92
|
except Exception as e:
|
|
78
|
-
|
|
93
|
+
if self._running: # Only log if we're still supposed to be running
|
|
94
|
+
logging.error(f"Error in speed capture: {str(e)}")
|
|
79
95
|
self.speed = {"upload": "N/A", "download": "N/A"}
|
|
80
96
|
|
|
81
97
|
time.sleep(interval)
|
|
@@ -88,6 +104,10 @@ class M3U8_Ts_Estimator:
|
|
|
88
104
|
str: The mean size of the files in a human-readable format.
|
|
89
105
|
"""
|
|
90
106
|
try:
|
|
107
|
+
# Only do calculations if we have data
|
|
108
|
+
if not self.ts_file_sizes:
|
|
109
|
+
return "0 B"
|
|
110
|
+
|
|
91
111
|
total_size = sum(self.ts_file_sizes)
|
|
92
112
|
mean_size = total_size / len(self.ts_file_sizes)
|
|
93
113
|
return internet_manager.format_file_size(mean_size)
|
|
@@ -101,31 +121,40 @@ class M3U8_Ts_Estimator:
|
|
|
101
121
|
self.add_ts_file(total_downloaded * self.total_segments)
|
|
102
122
|
|
|
103
123
|
file_total_size = self.calculate_total_size()
|
|
124
|
+
if file_total_size == "Error":
|
|
125
|
+
return
|
|
126
|
+
|
|
104
127
|
number_file_total_size = file_total_size.split(' ')[0]
|
|
105
128
|
units_file_total_size = file_total_size.split(' ')[1]
|
|
106
129
|
|
|
130
|
+
# Reduce lock contention by acquiring data with minimal synchronization
|
|
131
|
+
retry_count = 0
|
|
132
|
+
if self.segments_instance:
|
|
133
|
+
with self.segments_instance.active_retries_lock:
|
|
134
|
+
retry_count = self.segments_instance.active_retries
|
|
135
|
+
|
|
107
136
|
if get_use_large_bar():
|
|
108
|
-
|
|
137
|
+
# Get speed data outside of any locks
|
|
138
|
+
speed_data = ["N/A", ""]
|
|
139
|
+
with self.lock:
|
|
140
|
+
download_speed = self.speed['download']
|
|
109
141
|
|
|
110
|
-
if
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
else
|
|
114
|
-
|
|
115
|
-
average_internet_unit = ""
|
|
142
|
+
if download_speed != "N/A":
|
|
143
|
+
speed_data = download_speed.split(" ")
|
|
144
|
+
|
|
145
|
+
average_internet_speed = speed_data[0] if len(speed_data) >= 1 else "N/A"
|
|
146
|
+
average_internet_unit = speed_data[1] if len(speed_data) >= 2 else ""
|
|
116
147
|
|
|
117
|
-
retry_count = self.segments_instance.active_retries if self.segments_instance else 0
|
|
118
148
|
progress_str = (
|
|
119
149
|
f"{Colors.GREEN}{number_file_total_size} {Colors.RED}{units_file_total_size}"
|
|
120
|
-
f"{Colors.WHITE}, {Colors.CYAN}{average_internet_speed} {Colors.RED}{average_internet_unit}"
|
|
121
|
-
f"{Colors.WHITE}, {Colors.GREEN}CRR {Colors.RED}{retry_count} "
|
|
150
|
+
f"{Colors.WHITE}, {Colors.CYAN}{average_internet_speed} {Colors.RED}{average_internet_unit} "
|
|
151
|
+
#f"{Colors.WHITE}, {Colors.GREEN}CRR {Colors.RED}{retry_count} "
|
|
122
152
|
)
|
|
123
153
|
|
|
124
154
|
else:
|
|
125
|
-
retry_count = self.segments_instance.active_retries if self.segments_instance else 0
|
|
126
155
|
progress_str = (
|
|
127
|
-
f"{Colors.GREEN}{number_file_total_size} {Colors.RED}{units_file_total_size}"
|
|
128
|
-
f"{Colors.WHITE}, {Colors.GREEN}CRR {Colors.RED}{retry_count} "
|
|
156
|
+
f"{Colors.GREEN}{number_file_total_size} {Colors.RED}{units_file_total_size} "
|
|
157
|
+
#f"{Colors.WHITE}, {Colors.GREEN}CRR {Colors.RED}{retry_count} "
|
|
129
158
|
)
|
|
130
159
|
|
|
131
160
|
progress_counter.set_postfix_str(progress_str)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# 20.04.25
|
|
2
2
|
|
|
3
|
-
import
|
|
3
|
+
import re
|
|
4
4
|
import logging
|
|
5
5
|
|
|
6
6
|
|
|
@@ -418,18 +418,38 @@ class M3U8_Parser:
|
|
|
418
418
|
- uri (str): The URI containing video information.
|
|
419
419
|
|
|
420
420
|
Returns:
|
|
421
|
-
|
|
421
|
+
tuple: The video resolution (width, height) if found, otherwise (0, 0).
|
|
422
422
|
"""
|
|
423
|
-
|
|
424
423
|
# Log
|
|
425
424
|
logging.info(f"Try extract resolution from: {uri}")
|
|
426
|
-
|
|
425
|
+
|
|
426
|
+
# First try: Check for known resolutions
|
|
427
427
|
for resolution in RESOLUTIONS:
|
|
428
428
|
if "http" in str(uri):
|
|
429
429
|
if str(resolution[1]) in uri:
|
|
430
430
|
return resolution
|
|
431
|
-
|
|
432
|
-
#
|
|
431
|
+
|
|
432
|
+
# Pattern to match common resolution formats like 854x480, 1280x720, etc.
|
|
433
|
+
resolution_patterns = [
|
|
434
|
+
r'(\d+)x(\d+)', # Match format: 854x480
|
|
435
|
+
r'(\d+)p', # Match format: 480p, 720p, etc.
|
|
436
|
+
r'_(\d+)x(\d+)' # Match format: _854x480
|
|
437
|
+
]
|
|
438
|
+
|
|
439
|
+
for pattern in resolution_patterns:
|
|
440
|
+
matches = re.findall(pattern, uri)
|
|
441
|
+
if matches:
|
|
442
|
+
if len(matches[0]) == 2: # Format like 854x480
|
|
443
|
+
width, height = int(matches[0][0]), int(matches[0][1])
|
|
444
|
+
return (width, height)
|
|
445
|
+
|
|
446
|
+
elif len(matches[0]) == 1: # Format like 480p
|
|
447
|
+
height = int(matches[0])
|
|
448
|
+
|
|
449
|
+
# Estimate width based on common aspect ratios (16:9)
|
|
450
|
+
width = int(height * 16 / 9)
|
|
451
|
+
return (width, height)
|
|
452
|
+
|
|
433
453
|
logging.warning("No resolution found with custom parsing.")
|
|
434
454
|
return (0, 0)
|
|
435
455
|
|
|
@@ -43,6 +43,13 @@ def update():
|
|
|
43
43
|
timeout=config_manager.get_int("REQUESTS", "timeout"),
|
|
44
44
|
follow_redirects=True
|
|
45
45
|
).json()
|
|
46
|
+
|
|
47
|
+
response_commits = httpx.get(
|
|
48
|
+
url=f"https://api.github.com/repos/{__author__}/{__title__}/commits",
|
|
49
|
+
headers={'user-agent': get_userAgent()},
|
|
50
|
+
timeout=config_manager.get_int("REQUESTS", "timeout"),
|
|
51
|
+
follow_redirects=True
|
|
52
|
+
).json()
|
|
46
53
|
|
|
47
54
|
except Exception as e:
|
|
48
55
|
console.print(f"[red]Error accessing GitHub API: {e}")
|
|
@@ -66,11 +73,23 @@ def update():
|
|
|
66
73
|
else:
|
|
67
74
|
percentual_stars = 0
|
|
68
75
|
|
|
69
|
-
#
|
|
70
|
-
|
|
76
|
+
# Get the current version (installed version)
|
|
77
|
+
current_version = __version__
|
|
78
|
+
|
|
79
|
+
# Get commit details
|
|
80
|
+
latest_commit = response_commits[0] if response_commits else None
|
|
81
|
+
if latest_commit:
|
|
82
|
+
latest_commit_message = latest_commit.get('commit', {}).get('message', 'No commit message')
|
|
83
|
+
else:
|
|
84
|
+
latest_commit_message = 'No commit history available'
|
|
85
|
+
|
|
86
|
+
console.print(f"\n[cyan]Current installed version: [yellow]{current_version}")
|
|
87
|
+
console.print(f"[cyan]Last commit: [yellow]{latest_commit_message}")
|
|
88
|
+
|
|
89
|
+
if str(current_version).replace('v', '') != str(last_version).replace('v', ''):
|
|
71
90
|
console.print(f"\n[cyan]New version available: [yellow]{last_version}")
|
|
72
91
|
|
|
73
92
|
console.print(f"\n[red]{__title__} has been downloaded [yellow]{total_download_count} [red]times, but only [yellow]{percentual_stars}% [red]of users have starred it.\n\
|
|
74
93
|
[cyan]Help the repository grow today by leaving a [yellow]star [cyan]and [yellow]sharing [cyan]it with others online!")
|
|
75
94
|
|
|
76
|
-
time.sleep(3)
|
|
95
|
+
time.sleep(3)
|