StreamingCommunity 3.3.3__py3-none-any.whl → 3.3.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of StreamingCommunity might be problematic. Click here for more details.
- StreamingCommunity/Api/Site/altadefinizione/__init__.py +17 -18
- StreamingCommunity/Api/Site/altadefinizione/series.py +4 -0
- StreamingCommunity/Api/Site/animeunity/__init__.py +14 -15
- StreamingCommunity/Api/Site/animeunity/serie.py +1 -1
- StreamingCommunity/Api/Site/animeworld/__init__.py +15 -13
- StreamingCommunity/Api/Site/animeworld/serie.py +1 -1
- StreamingCommunity/Api/Site/crunchyroll/__init__.py +16 -17
- StreamingCommunity/Api/Site/crunchyroll/series.py +6 -1
- StreamingCommunity/Api/Site/guardaserie/__init__.py +17 -19
- StreamingCommunity/Api/Site/guardaserie/series.py +4 -0
- StreamingCommunity/Api/Site/guardaserie/site.py +2 -7
- StreamingCommunity/Api/Site/mediasetinfinity/__init__.py +15 -15
- StreamingCommunity/Api/Site/mediasetinfinity/series.py +4 -0
- StreamingCommunity/Api/Site/mediasetinfinity/site.py +12 -2
- StreamingCommunity/Api/Site/mediasetinfinity/util/ScrapeSerie.py +67 -98
- StreamingCommunity/Api/Site/raiplay/__init__.py +15 -15
- StreamingCommunity/Api/Site/raiplay/series.py +5 -1
- StreamingCommunity/Api/Site/streamingcommunity/__init__.py +16 -14
- StreamingCommunity/Api/Site/streamingwatch/__init__.py +12 -12
- StreamingCommunity/Api/Site/streamingwatch/series.py +4 -0
- StreamingCommunity/Api/Template/Class/SearchType.py +0 -1
- StreamingCommunity/Api/Template/Util/manage_ep.py +1 -11
- StreamingCommunity/Api/Template/site.py +2 -3
- StreamingCommunity/Lib/Downloader/DASH/decrypt.py +4 -1
- StreamingCommunity/Lib/Downloader/DASH/downloader.py +55 -17
- StreamingCommunity/Lib/Downloader/DASH/segments.py +73 -17
- StreamingCommunity/Lib/Downloader/HLS/downloader.py +282 -152
- StreamingCommunity/Lib/Downloader/HLS/segments.py +1 -5
- StreamingCommunity/Lib/FFmpeg/capture.py +1 -1
- StreamingCommunity/Lib/FFmpeg/command.py +6 -6
- StreamingCommunity/Lib/FFmpeg/util.py +11 -30
- StreamingCommunity/Lib/M3U8/estimator.py +27 -13
- StreamingCommunity/Upload/update.py +2 -2
- StreamingCommunity/Upload/version.py +1 -1
- StreamingCommunity/Util/installer/__init__.py +11 -0
- StreamingCommunity/Util/installer/device_install.py +1 -1
- StreamingCommunity/Util/os.py +2 -6
- StreamingCommunity/Util/table.py +40 -8
- StreamingCommunity/run.py +15 -8
- {streamingcommunity-3.3.3.dist-info → streamingcommunity-3.3.6.dist-info}/METADATA +38 -51
- {streamingcommunity-3.3.3.dist-info → streamingcommunity-3.3.6.dist-info}/RECORD +45 -44
- {streamingcommunity-3.3.3.dist-info → streamingcommunity-3.3.6.dist-info}/WHEEL +0 -0
- {streamingcommunity-3.3.3.dist-info → streamingcommunity-3.3.6.dist-info}/entry_points.txt +0 -0
- {streamingcommunity-3.3.3.dist-info → streamingcommunity-3.3.6.dist-info}/licenses/LICENSE +0 -0
- {streamingcommunity-3.3.3.dist-info → streamingcommunity-3.3.6.dist-info}/top_level.txt +0 -0
|
@@ -7,6 +7,7 @@ import shutil
|
|
|
7
7
|
# External libraries
|
|
8
8
|
from rich.console import Console
|
|
9
9
|
from rich.panel import Panel
|
|
10
|
+
from rich.table import Table
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
# Internal utilities
|
|
@@ -64,23 +65,59 @@ class DASH_Downloader:
|
|
|
64
65
|
self.parser = MPDParser(self.mpd_url)
|
|
65
66
|
self.parser.parse(custom_headers)
|
|
66
67
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
68
|
+
def calculate_column_widths():
|
|
69
|
+
"""Calculate optimal column widths based on content."""
|
|
70
|
+
data_rows = []
|
|
71
|
+
|
|
72
|
+
# Video info
|
|
73
|
+
selected_video, list_available_resolution, filter_custom_resolution, downloadable_video = self.parser.select_video(FILTER_CUSTOM_REOLUTION)
|
|
74
|
+
self.selected_video = selected_video
|
|
75
|
+
|
|
76
|
+
available_video = ', '.join(list_available_resolution) if list_available_resolution else "Nothing"
|
|
77
|
+
set_video = str(filter_custom_resolution) if filter_custom_resolution else "Nothing"
|
|
78
|
+
downloadable_video_str = str(downloadable_video) if downloadable_video else "Nothing"
|
|
79
|
+
|
|
80
|
+
data_rows.append(["Video", available_video, set_video, downloadable_video_str])
|
|
81
|
+
|
|
82
|
+
# Audio info
|
|
83
|
+
selected_audio, list_available_audio_langs, filter_custom_audio, downloadable_audio = self.parser.select_audio(DOWNLOAD_SPECIFIC_AUDIO)
|
|
84
|
+
self.selected_audio = selected_audio
|
|
85
|
+
|
|
86
|
+
available_audio = ', '.join(list_available_audio_langs) if list_available_audio_langs else "Nothing"
|
|
87
|
+
set_audio = str(filter_custom_audio) if filter_custom_audio else "Nothing"
|
|
88
|
+
downloadable_audio_str = str(downloadable_audio) if downloadable_audio else "Nothing"
|
|
89
|
+
|
|
90
|
+
data_rows.append(["Audio", available_audio, set_audio, downloadable_audio_str])
|
|
91
|
+
|
|
92
|
+
# Calculate max width for each column
|
|
93
|
+
headers = ["Type", "Available", "Set", "Downloadable"]
|
|
94
|
+
max_widths = [len(header) for header in headers]
|
|
95
|
+
|
|
96
|
+
for row in data_rows:
|
|
97
|
+
for i, cell in enumerate(row):
|
|
98
|
+
max_widths[i] = max(max_widths[i], len(str(cell)))
|
|
99
|
+
|
|
100
|
+
# Add some padding
|
|
101
|
+
max_widths = [w + 2 for w in max_widths]
|
|
102
|
+
|
|
103
|
+
return data_rows, max_widths
|
|
104
|
+
|
|
105
|
+
data_rows, column_widths = calculate_column_widths()
|
|
106
|
+
|
|
107
|
+
# Create table with dynamic widths
|
|
108
|
+
table = Table(show_header=True, header_style="bold cyan", border_style="blue")
|
|
109
|
+
table.add_column("Type", style="cyan bold", width=column_widths[0])
|
|
110
|
+
table.add_column("Available", style="green", width=column_widths[1])
|
|
111
|
+
table.add_column("Set", style="red", width=column_widths[2])
|
|
112
|
+
table.add_column("Downloadable", style="yellow", width=column_widths[3])
|
|
113
|
+
|
|
114
|
+
# Add all rows to the table
|
|
115
|
+
for row in data_rows:
|
|
116
|
+
table.add_row(*row)
|
|
117
|
+
|
|
118
|
+
console.print("[cyan]You can safely stop the download with [bold]Ctrl+c[bold] [cyan]")
|
|
119
|
+
console.print(table)
|
|
120
|
+
console.print("")
|
|
84
121
|
|
|
85
122
|
def get_representation_by_type(self, typ):
|
|
86
123
|
if typ == "video":
|
|
@@ -212,6 +249,7 @@ class DASH_Downloader:
|
|
|
212
249
|
f"[cyan]Output: [bold]{os.path.abspath(output_file)}[/bold]"
|
|
213
250
|
)
|
|
214
251
|
|
|
252
|
+
print("")
|
|
215
253
|
console.print(Panel(
|
|
216
254
|
panel_content,
|
|
217
255
|
title=f"{os.path.basename(output_file.replace('.mp4', ''))}",
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
4
|
import asyncio
|
|
5
|
+
import time
|
|
5
6
|
|
|
6
7
|
|
|
7
8
|
# External libraries
|
|
@@ -33,6 +34,15 @@ class MPD_Segments:
|
|
|
33
34
|
self.pssh = pssh
|
|
34
35
|
self.download_interrupted = False
|
|
35
36
|
self.info_nFailed = 0
|
|
37
|
+
|
|
38
|
+
# OTHER INFO
|
|
39
|
+
self.downloaded_segments = set()
|
|
40
|
+
self.info_maxRetry = 0
|
|
41
|
+
self.info_nRetry = 0
|
|
42
|
+
|
|
43
|
+
# Progress
|
|
44
|
+
self._last_progress_update = 0
|
|
45
|
+
self._progress_update_interval = 0.5
|
|
36
46
|
|
|
37
47
|
def get_concat_path(self, output_dir: str = None):
|
|
38
48
|
"""
|
|
@@ -61,7 +71,7 @@ class MPD_Segments:
|
|
|
61
71
|
"pssh": self.pssh
|
|
62
72
|
}
|
|
63
73
|
|
|
64
|
-
async def download_segments(self, output_dir: str = None, concurrent_downloads: int =
|
|
74
|
+
async def download_segments(self, output_dir: str = None, concurrent_downloads: int = None, description: str = "DASH"):
|
|
65
75
|
"""
|
|
66
76
|
Download and concatenate all segments (including init) asynchronously and in order.
|
|
67
77
|
"""
|
|
@@ -75,12 +85,15 @@ class MPD_Segments:
|
|
|
75
85
|
|
|
76
86
|
# Determine stream type (video/audio) for progress bar
|
|
77
87
|
stream_type = rep.get('type', description)
|
|
88
|
+
if concurrent_downloads is None:
|
|
89
|
+
concurrent_downloads = self._get_worker_count(stream_type)
|
|
90
|
+
|
|
78
91
|
progress_bar = tqdm(
|
|
79
92
|
total=len(segment_urls) + 1,
|
|
80
93
|
desc=f"Downloading {rep_id}",
|
|
81
94
|
bar_format=self._get_bar_format(stream_type),
|
|
82
|
-
mininterval=0
|
|
83
|
-
maxinterval=
|
|
95
|
+
mininterval=1.0,
|
|
96
|
+
maxinterval=2.5,
|
|
84
97
|
)
|
|
85
98
|
|
|
86
99
|
# Define semaphore for concurrent downloads
|
|
@@ -94,9 +107,14 @@ class MPD_Segments:
|
|
|
94
107
|
self.info_nFailed = 0
|
|
95
108
|
self.download_interrupted = False
|
|
96
109
|
self.info_nRetry = 0
|
|
110
|
+
self.info_maxRetry = 0
|
|
97
111
|
|
|
98
112
|
try:
|
|
99
|
-
|
|
113
|
+
timeout_config = httpx.Timeout(SEGMENT_MAX_TIMEOUT, connect=10.0)
|
|
114
|
+
limits = httpx.Limits(max_keepalive_connections=20, max_connections=100)
|
|
115
|
+
|
|
116
|
+
async with httpx.AsyncClient(timeout=timeout_config, limits=limits) as client:
|
|
117
|
+
|
|
100
118
|
# Download init segment
|
|
101
119
|
await self._download_init_segment(client, init_url, concat_path, estimator, progress_bar)
|
|
102
120
|
|
|
@@ -145,12 +163,21 @@ class MPD_Segments:
|
|
|
145
163
|
progress_bar.update(1)
|
|
146
164
|
|
|
147
165
|
# Update progress bar with estimated info
|
|
148
|
-
|
|
166
|
+
self._throttled_progress_update(len(response.content), estimator, progress_bar)
|
|
149
167
|
|
|
150
168
|
except Exception as e:
|
|
151
169
|
progress_bar.close()
|
|
152
170
|
raise RuntimeError(f"Error downloading init segment: {e}")
|
|
153
171
|
|
|
172
|
+
def _throttled_progress_update(self, content_size: int, estimator, progress_bar):
|
|
173
|
+
"""
|
|
174
|
+
Throttled progress update to reduce CPU usage.
|
|
175
|
+
"""
|
|
176
|
+
current_time = time.time()
|
|
177
|
+
if current_time - self._last_progress_update > self._progress_update_interval:
|
|
178
|
+
estimator.update_progress_bar(content_size, progress_bar)
|
|
179
|
+
self._last_progress_update = current_time
|
|
180
|
+
|
|
154
181
|
async def _download_segments_batch(self, client, segment_urls, results, semaphore, max_retry, estimator, progress_bar):
|
|
155
182
|
"""
|
|
156
183
|
Download a batch of segments and update results.
|
|
@@ -158,16 +185,28 @@ class MPD_Segments:
|
|
|
158
185
|
async def download_single(url, idx):
|
|
159
186
|
async with semaphore:
|
|
160
187
|
headers = {'User-Agent': get_userAgent()}
|
|
188
|
+
|
|
161
189
|
for attempt in range(max_retry):
|
|
190
|
+
if self.download_interrupted:
|
|
191
|
+
return idx, b'', attempt
|
|
192
|
+
|
|
162
193
|
try:
|
|
163
|
-
|
|
194
|
+
timeout = min(SEGMENT_MAX_TIMEOUT, 10 + attempt * 3)
|
|
195
|
+
resp = await client.get(url, headers=headers, follow_redirects=True, timeout=timeout)
|
|
164
196
|
|
|
165
197
|
if resp.status_code == 200:
|
|
166
198
|
return idx, resp.content, attempt
|
|
167
199
|
else:
|
|
168
|
-
|
|
200
|
+
if attempt < 2:
|
|
201
|
+
sleep_time = 0.5 + attempt * 0.5
|
|
202
|
+
else:
|
|
203
|
+
sleep_time = min(2.0, 1.1 * (2 ** attempt))
|
|
204
|
+
await asyncio.sleep(sleep_time)
|
|
205
|
+
|
|
169
206
|
except Exception:
|
|
170
|
-
|
|
207
|
+
sleep_time = min(2.0, 1.1 * (2 ** attempt))
|
|
208
|
+
await asyncio.sleep(sleep_time)
|
|
209
|
+
|
|
171
210
|
return idx, b'', max_retry
|
|
172
211
|
|
|
173
212
|
# Initial download attempt
|
|
@@ -177,18 +216,23 @@ class MPD_Segments:
|
|
|
177
216
|
try:
|
|
178
217
|
idx, data, nretry = await coro
|
|
179
218
|
results[idx] = data
|
|
219
|
+
|
|
180
220
|
if data and len(data) > 0:
|
|
181
221
|
self.downloaded_segments.add(idx)
|
|
182
222
|
else:
|
|
183
223
|
self.info_nFailed += 1
|
|
224
|
+
|
|
225
|
+
if nretry > self.info_maxRetry:
|
|
226
|
+
self.info_maxRetry = nretry
|
|
184
227
|
self.info_nRetry += nretry
|
|
228
|
+
|
|
185
229
|
progress_bar.update(1)
|
|
186
230
|
|
|
187
231
|
# Update estimator with segment size
|
|
188
232
|
estimator.add_ts_file(len(data))
|
|
189
233
|
|
|
190
234
|
# Update progress bar with estimated info
|
|
191
|
-
|
|
235
|
+
self._throttled_progress_update(len(data), estimator, progress_bar)
|
|
192
236
|
|
|
193
237
|
except KeyboardInterrupt:
|
|
194
238
|
self.download_interrupted = True
|
|
@@ -197,9 +241,9 @@ class MPD_Segments:
|
|
|
197
241
|
|
|
198
242
|
async def _retry_failed_segments(self, client, segment_urls, results, semaphore, max_retry, estimator, progress_bar):
|
|
199
243
|
"""
|
|
200
|
-
Retry failed segments up to
|
|
244
|
+
Retry failed segments up to 3 times.
|
|
201
245
|
"""
|
|
202
|
-
max_global_retries =
|
|
246
|
+
max_global_retries = 3
|
|
203
247
|
global_retry_count = 0
|
|
204
248
|
|
|
205
249
|
while self.info_nFailed > 0 and global_retry_count < max_global_retries and not self.download_interrupted:
|
|
@@ -208,21 +252,27 @@ class MPD_Segments:
|
|
|
208
252
|
break
|
|
209
253
|
|
|
210
254
|
print(f"[yellow]Retrying {len(failed_indices)} failed segments (attempt {global_retry_count+1}/{max_global_retries})...")
|
|
255
|
+
|
|
211
256
|
async def download_single(url, idx):
|
|
212
257
|
async with semaphore:
|
|
213
258
|
headers = {'User-Agent': get_userAgent()}
|
|
214
259
|
|
|
215
260
|
for attempt in range(max_retry):
|
|
261
|
+
if self.download_interrupted:
|
|
262
|
+
return idx, b'', attempt
|
|
263
|
+
|
|
216
264
|
try:
|
|
217
|
-
|
|
265
|
+
timeout = min(SEGMENT_MAX_TIMEOUT, 15 + attempt * 5)
|
|
266
|
+
resp = await client.get(url, headers=headers, timeout=timeout)
|
|
218
267
|
|
|
219
268
|
if resp.status_code == 200:
|
|
220
269
|
return idx, resp.content, attempt
|
|
221
270
|
else:
|
|
222
|
-
await asyncio.sleep(1.
|
|
271
|
+
await asyncio.sleep(1.5 * (2 ** attempt))
|
|
223
272
|
|
|
224
273
|
except Exception:
|
|
225
|
-
await asyncio.sleep(1.
|
|
274
|
+
await asyncio.sleep(1.5 * (2 ** attempt))
|
|
275
|
+
|
|
226
276
|
return idx, b'', max_retry
|
|
227
277
|
|
|
228
278
|
retry_tasks = [download_single(segment_urls[i], i) for i in failed_indices]
|
|
@@ -239,15 +289,19 @@ class MPD_Segments:
|
|
|
239
289
|
else:
|
|
240
290
|
nFailed_this_round += 1
|
|
241
291
|
|
|
292
|
+
if nretry > self.info_maxRetry:
|
|
293
|
+
self.info_maxRetry = nretry
|
|
242
294
|
self.info_nRetry += nretry
|
|
295
|
+
|
|
243
296
|
progress_bar.update(0) # No progress bar increment, already counted
|
|
244
297
|
estimator.add_ts_file(len(data))
|
|
245
|
-
|
|
298
|
+
self._throttled_progress_update(len(data), estimator, progress_bar)
|
|
246
299
|
|
|
247
300
|
except KeyboardInterrupt:
|
|
248
301
|
self.download_interrupted = True
|
|
249
302
|
print("\n[red]Download interrupted by user (Ctrl+C).")
|
|
250
303
|
break
|
|
304
|
+
|
|
251
305
|
self.info_nFailed = nFailed_this_round
|
|
252
306
|
global_retry_count += 1
|
|
253
307
|
|
|
@@ -278,7 +332,7 @@ class MPD_Segments:
|
|
|
278
332
|
base_workers = {
|
|
279
333
|
'video': DEFAULT_VIDEO_WORKERS,
|
|
280
334
|
'audio': DEFAULT_AUDIO_WORKERS
|
|
281
|
-
}.get(stream_type.lower(),
|
|
335
|
+
}.get(stream_type.lower(), 2)
|
|
282
336
|
return base_workers
|
|
283
337
|
|
|
284
338
|
def _generate_results(self, stream_type: str) -> dict:
|
|
@@ -317,6 +371,7 @@ class MPD_Segments:
|
|
|
317
371
|
if getattr(self, 'info_nFailed', 0) > 0:
|
|
318
372
|
self._display_error_summary()
|
|
319
373
|
|
|
374
|
+
# Clear memory
|
|
320
375
|
self.buffer = {}
|
|
321
376
|
self.expected_index = 0
|
|
322
377
|
|
|
@@ -324,10 +379,11 @@ class MPD_Segments:
|
|
|
324
379
|
"""
|
|
325
380
|
Generate final error report.
|
|
326
381
|
"""
|
|
382
|
+
total_segments = len(self.selected_representation.get('segment_urls', []))
|
|
327
383
|
print(f"\n[cyan]Retry Summary: "
|
|
328
384
|
f"[white]Max retries: [green]{getattr(self, 'info_maxRetry', 0)} "
|
|
329
385
|
f"[white]Total retries: [green]{getattr(self, 'info_nRetry', 0)} "
|
|
330
386
|
f"[white]Failed segments: [red]{getattr(self, 'info_nFailed', 0)}")
|
|
331
387
|
|
|
332
|
-
if getattr(self, 'info_nRetry', 0) >
|
|
388
|
+
if getattr(self, 'info_nRetry', 0) > total_segments * 0.3:
|
|
333
389
|
print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
|