StreamingCommunity 3.2.0__py3-none-any.whl → 3.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of StreamingCommunity might be problematic. Click here for more details.
- StreamingCommunity/Api/Player/Helper/Vixcloud/util.py +4 -0
- StreamingCommunity/Api/Player/hdplayer.py +2 -2
- StreamingCommunity/Api/Player/mixdrop.py +1 -1
- StreamingCommunity/Api/Player/vixcloud.py +4 -5
- StreamingCommunity/Api/Site/animeunity/__init__.py +2 -2
- StreamingCommunity/Api/Site/crunchyroll/__init__.py +103 -0
- StreamingCommunity/Api/Site/crunchyroll/film.py +83 -0
- StreamingCommunity/Api/Site/crunchyroll/series.py +182 -0
- StreamingCommunity/Api/Site/crunchyroll/site.py +113 -0
- StreamingCommunity/Api/Site/crunchyroll/util/ScrapeSerie.py +218 -0
- StreamingCommunity/Api/Site/crunchyroll/util/get_license.py +227 -0
- StreamingCommunity/Api/Site/guardaserie/site.py +1 -2
- StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +9 -8
- StreamingCommunity/Api/Site/mediasetinfinity/__init__.py +96 -0
- StreamingCommunity/Api/Site/mediasetinfinity/film.py +76 -0
- StreamingCommunity/Api/Site/mediasetinfinity/series.py +177 -0
- StreamingCommunity/Api/Site/mediasetinfinity/site.py +112 -0
- StreamingCommunity/Api/Site/mediasetinfinity/util/ScrapeSerie.py +259 -0
- StreamingCommunity/Api/Site/mediasetinfinity/util/fix_mpd.py +64 -0
- StreamingCommunity/Api/Site/mediasetinfinity/util/get_license.py +217 -0
- StreamingCommunity/Api/Site/streamingcommunity/__init__.py +6 -17
- StreamingCommunity/Api/Site/streamingcommunity/film.py +2 -2
- StreamingCommunity/Api/Site/streamingcommunity/series.py +9 -9
- StreamingCommunity/Api/Site/streamingcommunity/site.py +3 -4
- StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py +3 -6
- StreamingCommunity/Api/Site/streamingwatch/__init__.py +6 -14
- StreamingCommunity/Api/Site/streamingwatch/film.py +2 -2
- StreamingCommunity/Api/Site/streamingwatch/series.py +9 -9
- StreamingCommunity/Api/Site/streamingwatch/site.py +5 -7
- StreamingCommunity/Api/Site/streamingwatch/util/ScrapeSerie.py +2 -2
- StreamingCommunity/Lib/Downloader/DASH/cdm_helpher.py +131 -0
- StreamingCommunity/Lib/Downloader/DASH/decrypt.py +79 -0
- StreamingCommunity/Lib/Downloader/DASH/downloader.py +220 -0
- StreamingCommunity/Lib/Downloader/DASH/parser.py +249 -0
- StreamingCommunity/Lib/Downloader/DASH/segments.py +332 -0
- StreamingCommunity/Lib/Downloader/HLS/downloader.py +1 -14
- StreamingCommunity/Lib/Downloader/HLS/segments.py +3 -3
- StreamingCommunity/Lib/Downloader/MP4/downloader.py +0 -5
- StreamingCommunity/Lib/FFmpeg/capture.py +3 -3
- StreamingCommunity/Lib/FFmpeg/command.py +1 -1
- StreamingCommunity/TelegramHelp/config.json +3 -5
- StreamingCommunity/Upload/version.py +2 -2
- StreamingCommunity/Util/os.py +21 -0
- StreamingCommunity/run.py +1 -1
- {streamingcommunity-3.2.0.dist-info → streamingcommunity-3.2.5.dist-info}/METADATA +4 -2
- {streamingcommunity-3.2.0.dist-info → streamingcommunity-3.2.5.dist-info}/RECORD +50 -36
- StreamingCommunity/Api/Site/1337xx/__init__.py +0 -72
- StreamingCommunity/Api/Site/1337xx/site.py +0 -82
- StreamingCommunity/Api/Site/1337xx/title.py +0 -61
- StreamingCommunity/Lib/Proxies/proxy.py +0 -72
- {streamingcommunity-3.2.0.dist-info → streamingcommunity-3.2.5.dist-info}/WHEEL +0 -0
- {streamingcommunity-3.2.0.dist-info → streamingcommunity-3.2.5.dist-info}/entry_points.txt +0 -0
- {streamingcommunity-3.2.0.dist-info → streamingcommunity-3.2.5.dist-info}/licenses/LICENSE +0 -0
- {streamingcommunity-3.2.0.dist-info → streamingcommunity-3.2.5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
# 25.07.25
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import asyncio
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
# External libraries
|
|
8
|
+
import httpx
|
|
9
|
+
from tqdm import tqdm
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
# Internal utilities
|
|
13
|
+
from StreamingCommunity.Util.headers import get_userAgent
|
|
14
|
+
from StreamingCommunity.Lib.M3U8.estimator import M3U8_Ts_Estimator
|
|
15
|
+
from StreamingCommunity.Util.config_json import config_manager
|
|
16
|
+
from StreamingCommunity.Util.color import Colors
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
# Config
|
|
20
|
+
REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
|
|
21
|
+
DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workers')
|
|
22
|
+
DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workers')
|
|
23
|
+
SEGMENT_MAX_TIMEOUT = config_manager.get_int("M3U8_DOWNLOAD", "segment_timeout")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class MPD_Segments:
|
|
27
|
+
def __init__(self, tmp_folder: str, representation: dict, pssh: str = None):
|
|
28
|
+
"""
|
|
29
|
+
Initialize MPD_Segments with temp folder, representation, and optional pssh.
|
|
30
|
+
"""
|
|
31
|
+
self.tmp_folder = tmp_folder
|
|
32
|
+
self.selected_representation = representation
|
|
33
|
+
self.pssh = pssh
|
|
34
|
+
self.download_interrupted = False
|
|
35
|
+
self.info_nFailed = 0
|
|
36
|
+
|
|
37
|
+
def get_concat_path(self, output_dir: str = None):
|
|
38
|
+
"""
|
|
39
|
+
Get the path for the concatenated output file.
|
|
40
|
+
"""
|
|
41
|
+
rep_id = self.selected_representation['id']
|
|
42
|
+
return os.path.join(output_dir or self.tmp_folder, f"{rep_id}_encrypted.m4s")
|
|
43
|
+
|
|
44
|
+
def download_streams(self, output_dir: str = None):
|
|
45
|
+
"""
|
|
46
|
+
Synchronous wrapper for download_segments, compatible with legacy calls.
|
|
47
|
+
"""
|
|
48
|
+
concat_path = self.get_concat_path(output_dir)
|
|
49
|
+
|
|
50
|
+
# Run async download in sync mode
|
|
51
|
+
try:
|
|
52
|
+
asyncio.run(self.download_segments(output_dir=output_dir))
|
|
53
|
+
|
|
54
|
+
except KeyboardInterrupt:
|
|
55
|
+
self.download_interrupted = True
|
|
56
|
+
print("\n[red]Download interrupted by user (Ctrl+C).")
|
|
57
|
+
|
|
58
|
+
return {
|
|
59
|
+
"concat_path": concat_path,
|
|
60
|
+
"representation_id": self.selected_representation['id'],
|
|
61
|
+
"pssh": self.pssh
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
async def download_segments(self, output_dir: str = None, concurrent_downloads: int = 8, description: str = "DASH"):
|
|
65
|
+
"""
|
|
66
|
+
Download and concatenate all segments (including init) asynchronously and in order.
|
|
67
|
+
"""
|
|
68
|
+
rep = self.selected_representation
|
|
69
|
+
rep_id = rep['id']
|
|
70
|
+
segment_urls = rep['segment_urls']
|
|
71
|
+
init_url = rep.get('init_url')
|
|
72
|
+
|
|
73
|
+
os.makedirs(output_dir or self.tmp_folder, exist_ok=True)
|
|
74
|
+
concat_path = os.path.join(output_dir or self.tmp_folder, f"{rep_id}_encrypted.m4s")
|
|
75
|
+
|
|
76
|
+
# Determine stream type (video/audio) for progress bar
|
|
77
|
+
stream_type = rep.get('type', description)
|
|
78
|
+
progress_bar = tqdm(
|
|
79
|
+
total=len(segment_urls) + 1,
|
|
80
|
+
desc=f"Downloading {rep_id}",
|
|
81
|
+
bar_format=self._get_bar_format(stream_type),
|
|
82
|
+
mininterval=0.6,
|
|
83
|
+
maxinterval=1.0
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
# Define semaphore for concurrent downloads
|
|
87
|
+
semaphore = asyncio.Semaphore(concurrent_downloads)
|
|
88
|
+
|
|
89
|
+
# Initialize estimator
|
|
90
|
+
estimator = M3U8_Ts_Estimator(total_segments=len(segment_urls) + 1)
|
|
91
|
+
|
|
92
|
+
results = [None] * len(segment_urls)
|
|
93
|
+
self.downloaded_segments = set()
|
|
94
|
+
self.info_nFailed = 0
|
|
95
|
+
self.download_interrupted = False
|
|
96
|
+
self.info_nRetry = 0
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
async with httpx.AsyncClient(timeout=SEGMENT_MAX_TIMEOUT) as client:
|
|
100
|
+
# Download init segment
|
|
101
|
+
await self._download_init_segment(client, init_url, concat_path, estimator, progress_bar)
|
|
102
|
+
|
|
103
|
+
# Download all segments (first batch)
|
|
104
|
+
await self._download_segments_batch(
|
|
105
|
+
client, segment_urls, results, semaphore, REQUEST_MAX_RETRY, estimator, progress_bar
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
# Retry failed segments
|
|
109
|
+
await self._retry_failed_segments(
|
|
110
|
+
client, segment_urls, results, semaphore, REQUEST_MAX_RETRY, estimator, progress_bar
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
# Write all results to file
|
|
114
|
+
self._write_results_to_file(concat_path, results)
|
|
115
|
+
|
|
116
|
+
except KeyboardInterrupt:
|
|
117
|
+
self.download_interrupted = True
|
|
118
|
+
print("\n[red]Download interrupted by user (Ctrl+C).")
|
|
119
|
+
|
|
120
|
+
finally:
|
|
121
|
+
self._cleanup_resources(None, progress_bar)
|
|
122
|
+
|
|
123
|
+
self._verify_download_completion()
|
|
124
|
+
return self._generate_results(stream_type)
|
|
125
|
+
|
|
126
|
+
async def _download_init_segment(self, client, init_url, concat_path, estimator, progress_bar):
|
|
127
|
+
"""
|
|
128
|
+
Download the init segment and update progress/estimator.
|
|
129
|
+
"""
|
|
130
|
+
if not init_url:
|
|
131
|
+
with open(concat_path, 'wb') as outfile:
|
|
132
|
+
pass
|
|
133
|
+
return
|
|
134
|
+
|
|
135
|
+
try:
|
|
136
|
+
headers = {'User-Agent': get_userAgent()}
|
|
137
|
+
response = await client.get(init_url, headers=headers)
|
|
138
|
+
|
|
139
|
+
with open(concat_path, 'wb') as outfile:
|
|
140
|
+
if response.status_code == 200:
|
|
141
|
+
outfile.write(response.content)
|
|
142
|
+
# Update estimator with init segment size
|
|
143
|
+
estimator.add_ts_file(len(response.content))
|
|
144
|
+
|
|
145
|
+
progress_bar.update(1)
|
|
146
|
+
|
|
147
|
+
# Update progress bar with estimated info
|
|
148
|
+
estimator.update_progress_bar(len(response.content), progress_bar)
|
|
149
|
+
|
|
150
|
+
except Exception as e:
|
|
151
|
+
progress_bar.close()
|
|
152
|
+
raise RuntimeError(f"Error downloading init segment: {e}")
|
|
153
|
+
|
|
154
|
+
async def _download_segments_batch(self, client, segment_urls, results, semaphore, max_retry, estimator, progress_bar):
|
|
155
|
+
"""
|
|
156
|
+
Download a batch of segments and update results.
|
|
157
|
+
"""
|
|
158
|
+
async def download_single(url, idx):
|
|
159
|
+
async with semaphore:
|
|
160
|
+
headers = {'User-Agent': get_userAgent()}
|
|
161
|
+
for attempt in range(max_retry):
|
|
162
|
+
try:
|
|
163
|
+
resp = await client.get(url, headers=headers)
|
|
164
|
+
if resp.status_code == 200:
|
|
165
|
+
return idx, resp.content, attempt
|
|
166
|
+
else:
|
|
167
|
+
await asyncio.sleep(1.1 * (2 ** attempt))
|
|
168
|
+
except Exception:
|
|
169
|
+
await asyncio.sleep(1.1 * (2 ** attempt))
|
|
170
|
+
return idx, b'', max_retry
|
|
171
|
+
|
|
172
|
+
# Initial download attempt
|
|
173
|
+
tasks = [download_single(url, i) for i, url in enumerate(segment_urls)]
|
|
174
|
+
|
|
175
|
+
for coro in asyncio.as_completed(tasks):
|
|
176
|
+
try:
|
|
177
|
+
idx, data, nretry = await coro
|
|
178
|
+
results[idx] = data
|
|
179
|
+
if data and len(data) > 0:
|
|
180
|
+
self.downloaded_segments.add(idx)
|
|
181
|
+
else:
|
|
182
|
+
self.info_nFailed += 1
|
|
183
|
+
self.info_nRetry += nretry
|
|
184
|
+
progress_bar.update(1)
|
|
185
|
+
|
|
186
|
+
# Update estimator with segment size
|
|
187
|
+
estimator.add_ts_file(len(data))
|
|
188
|
+
|
|
189
|
+
# Update progress bar with estimated info
|
|
190
|
+
estimator.update_progress_bar(len(data), progress_bar)
|
|
191
|
+
|
|
192
|
+
except KeyboardInterrupt:
|
|
193
|
+
self.download_interrupted = True
|
|
194
|
+
print("\n[red]Download interrupted by user (Ctrl+C).")
|
|
195
|
+
break
|
|
196
|
+
|
|
197
|
+
async def _retry_failed_segments(self, client, segment_urls, results, semaphore, max_retry, estimator, progress_bar):
|
|
198
|
+
"""
|
|
199
|
+
Retry failed segments up to 5 times.
|
|
200
|
+
"""
|
|
201
|
+
max_global_retries = 5
|
|
202
|
+
global_retry_count = 0
|
|
203
|
+
|
|
204
|
+
while self.info_nFailed > 0 and global_retry_count < max_global_retries and not self.download_interrupted:
|
|
205
|
+
failed_indices = [i for i, data in enumerate(results) if not data or len(data) == 0]
|
|
206
|
+
if not failed_indices:
|
|
207
|
+
break
|
|
208
|
+
|
|
209
|
+
print(f"[yellow]Retrying {len(failed_indices)} failed segments (attempt {global_retry_count+1}/{max_global_retries})...")
|
|
210
|
+
async def download_single(url, idx):
|
|
211
|
+
async with semaphore:
|
|
212
|
+
headers = {'User-Agent': get_userAgent()}
|
|
213
|
+
|
|
214
|
+
for attempt in range(max_retry):
|
|
215
|
+
try:
|
|
216
|
+
resp = await client.get(url, headers=headers)
|
|
217
|
+
|
|
218
|
+
if resp.status_code == 200:
|
|
219
|
+
return idx, resp.content, attempt
|
|
220
|
+
else:
|
|
221
|
+
await asyncio.sleep(1.1 * (2 ** attempt))
|
|
222
|
+
|
|
223
|
+
except Exception:
|
|
224
|
+
await asyncio.sleep(1.1 * (2 ** attempt))
|
|
225
|
+
return idx, b'', max_retry
|
|
226
|
+
|
|
227
|
+
retry_tasks = [download_single(segment_urls[i], i) for i in failed_indices]
|
|
228
|
+
|
|
229
|
+
# Reset nFailed for this round
|
|
230
|
+
nFailed_this_round = 0
|
|
231
|
+
for coro in asyncio.as_completed(retry_tasks):
|
|
232
|
+
try:
|
|
233
|
+
idx, data, nretry = await coro
|
|
234
|
+
|
|
235
|
+
if data and len(data) > 0:
|
|
236
|
+
results[idx] = data
|
|
237
|
+
self.downloaded_segments.add(idx)
|
|
238
|
+
else:
|
|
239
|
+
nFailed_this_round += 1
|
|
240
|
+
|
|
241
|
+
self.info_nRetry += nretry
|
|
242
|
+
progress_bar.update(0) # No progress bar increment, already counted
|
|
243
|
+
estimator.add_ts_file(len(data))
|
|
244
|
+
estimator.update_progress_bar(len(data), progress_bar)
|
|
245
|
+
|
|
246
|
+
except KeyboardInterrupt:
|
|
247
|
+
self.download_interrupted = True
|
|
248
|
+
print("\n[red]Download interrupted by user (Ctrl+C).")
|
|
249
|
+
break
|
|
250
|
+
self.info_nFailed = nFailed_this_round
|
|
251
|
+
global_retry_count += 1
|
|
252
|
+
|
|
253
|
+
def _write_results_to_file(self, concat_path, results):
|
|
254
|
+
"""
|
|
255
|
+
Write all downloaded segments to the output file.
|
|
256
|
+
"""
|
|
257
|
+
with open(concat_path, 'ab') as outfile:
|
|
258
|
+
for data in results:
|
|
259
|
+
if data:
|
|
260
|
+
outfile.write(data)
|
|
261
|
+
|
|
262
|
+
def _get_bar_format(self, description: str) -> str:
|
|
263
|
+
"""
|
|
264
|
+
Generate platform-appropriate progress bar format.
|
|
265
|
+
"""
|
|
266
|
+
return (
|
|
267
|
+
f"{Colors.YELLOW}[MPD] ({Colors.CYAN}{description}{Colors.WHITE}): "
|
|
268
|
+
f"{Colors.RED}{{percentage:.2f}}% "
|
|
269
|
+
f"{Colors.MAGENTA}{{bar}} "
|
|
270
|
+
f"{Colors.YELLOW}{{elapsed}}{Colors.WHITE} < {Colors.CYAN}{{remaining}}{Colors.WHITE}{{postfix}}{Colors.WHITE}"
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
def _get_worker_count(self, stream_type: str) -> int:
|
|
274
|
+
"""
|
|
275
|
+
Calculate optimal parallel workers based on stream type and infrastructure.
|
|
276
|
+
"""
|
|
277
|
+
base_workers = {
|
|
278
|
+
'video': DEFAULT_VIDEO_WORKERS,
|
|
279
|
+
'audio': DEFAULT_AUDIO_WORKERS
|
|
280
|
+
}.get(stream_type.lower(), 1)
|
|
281
|
+
return base_workers
|
|
282
|
+
|
|
283
|
+
def _generate_results(self, stream_type: str) -> dict:
|
|
284
|
+
"""
|
|
285
|
+
Package final download results.
|
|
286
|
+
"""
|
|
287
|
+
return {
|
|
288
|
+
'type': stream_type,
|
|
289
|
+
'nFailed': getattr(self, 'info_nFailed', 0),
|
|
290
|
+
'stopped': getattr(self, 'download_interrupted', False)
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
def _verify_download_completion(self) -> None:
|
|
294
|
+
"""
|
|
295
|
+
Validate final download integrity.
|
|
296
|
+
"""
|
|
297
|
+
total = len(self.selected_representation['segment_urls'])
|
|
298
|
+
completed = getattr(self, 'downloaded_segments', set())
|
|
299
|
+
|
|
300
|
+
# If interrupted, skip raising error
|
|
301
|
+
if self.download_interrupted:
|
|
302
|
+
return
|
|
303
|
+
|
|
304
|
+
if total == 0:
|
|
305
|
+
return
|
|
306
|
+
|
|
307
|
+
if len(completed) / total < 0.999:
|
|
308
|
+
missing = sorted(set(range(total)) - completed)
|
|
309
|
+
raise RuntimeError(f"Download incomplete ({len(completed)/total:.1%}). Missing segments: {missing}")
|
|
310
|
+
|
|
311
|
+
def _cleanup_resources(self, writer_thread, progress_bar: tqdm) -> None:
|
|
312
|
+
"""
|
|
313
|
+
Ensure resource cleanup and final reporting.
|
|
314
|
+
"""
|
|
315
|
+
progress_bar.close()
|
|
316
|
+
if getattr(self, 'info_nFailed', 0) > 0:
|
|
317
|
+
self._display_error_summary()
|
|
318
|
+
|
|
319
|
+
self.buffer = {}
|
|
320
|
+
self.expected_index = 0
|
|
321
|
+
|
|
322
|
+
def _display_error_summary(self) -> None:
|
|
323
|
+
"""
|
|
324
|
+
Generate final error report.
|
|
325
|
+
"""
|
|
326
|
+
print(f"\n[cyan]Retry Summary: "
|
|
327
|
+
f"[white]Max retries: [green]{getattr(self, 'info_maxRetry', 0)} "
|
|
328
|
+
f"[white]Total retries: [green]{getattr(self, 'info_nRetry', 0)} "
|
|
329
|
+
f"[white]Failed segments: [red]{getattr(self, 'info_nFailed', 0)}")
|
|
330
|
+
|
|
331
|
+
if getattr(self, 'info_nRetry', 0) > len(self.selected_representation['segment_urls']) * 0.3:
|
|
332
|
+
print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
|
|
@@ -41,7 +41,6 @@ MERGE_AUDIO = config_manager.get_bool('M3U8_DOWNLOAD', 'merge_audio')
|
|
|
41
41
|
MERGE_SUBTITLE = config_manager.get_bool('M3U8_DOWNLOAD', 'merge_subs')
|
|
42
42
|
CLEANUP_TMP = config_manager.get_bool('M3U8_DOWNLOAD', 'cleanup_tmp_folder')
|
|
43
43
|
FILTER_CUSTOM_REOLUTION = str(config_manager.get('M3U8_PARSER', 'force_resolution')).strip().lower()
|
|
44
|
-
GET_ONLY_LINK = config_manager.get_bool('M3U8_PARSER', 'get_only_link')
|
|
45
44
|
RETRY_LIMIT = config_manager.get_int('REQUESTS', 'max_retry')
|
|
46
45
|
MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
|
|
47
46
|
TELEGRAM_BOT = config_manager.get_bool('DEFAULT', 'telegram_bot')
|
|
@@ -445,19 +444,7 @@ class HLS_Downloader:
|
|
|
445
444
|
if TELEGRAM_BOT:
|
|
446
445
|
bot.send_message(f"Contenuto già scaricato!", None)
|
|
447
446
|
return response
|
|
448
|
-
|
|
449
|
-
if GET_ONLY_LINK:
|
|
450
|
-
console.print(f"URL: [bold red]{self.m3u8_url}[/bold red]")
|
|
451
|
-
return {
|
|
452
|
-
'path': None,
|
|
453
|
-
'url': self.m3u8_url,
|
|
454
|
-
'is_master': getattr(self.m3u8_manager, 'is_master', None),
|
|
455
|
-
'msg': None,
|
|
456
|
-
'error': None,
|
|
457
|
-
'stopped': True
|
|
458
|
-
}
|
|
459
|
-
|
|
460
|
-
|
|
447
|
+
|
|
461
448
|
self.path_manager.setup_directories()
|
|
462
449
|
|
|
463
450
|
# Parse M3U8 and determine if it's a master playlist
|
|
@@ -35,11 +35,11 @@ from ...M3U8 import (
|
|
|
35
35
|
)
|
|
36
36
|
|
|
37
37
|
# Config
|
|
38
|
-
TQDM_DELAY_WORKER =
|
|
38
|
+
TQDM_DELAY_WORKER = 0.01
|
|
39
39
|
REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
|
|
40
40
|
REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
|
|
41
|
-
DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', '
|
|
42
|
-
DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', '
|
|
41
|
+
DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workers')
|
|
42
|
+
DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workers')
|
|
43
43
|
MAX_TIMEOOUT = config_manager.get_int("REQUESTS", "timeout")
|
|
44
44
|
SEGMENT_MAX_TIMEOUT = config_manager.get_int("M3U8_DOWNLOAD", "segment_timeout")
|
|
45
45
|
TELEGRAM_BOT = config_manager.get_bool('DEFAULT', 'telegram_bot')
|
|
@@ -31,7 +31,6 @@ from ...FFmpeg import print_duration_table
|
|
|
31
31
|
|
|
32
32
|
# Config
|
|
33
33
|
REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
|
|
34
|
-
GET_ONLY_LINK = config_manager.get_bool('M3U8_PARSER', 'get_only_link')
|
|
35
34
|
REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout')
|
|
36
35
|
TELEGRAM_BOT = config_manager.get_bool('DEFAULT', 'telegram_bot')
|
|
37
36
|
|
|
@@ -87,10 +86,6 @@ def MP4_downloader(url: str, path: str, referer: str = None, headers_: dict = No
|
|
|
87
86
|
bot.send_message(f"Contenuto già scaricato!", None)
|
|
88
87
|
return None, False
|
|
89
88
|
|
|
90
|
-
if GET_ONLY_LINK:
|
|
91
|
-
console.print(f"[bold red]URL: {url}[/bold red]")
|
|
92
|
-
return path, True
|
|
93
|
-
|
|
94
89
|
if not (url.lower().startswith('http://') or url.lower().startswith('https://')):
|
|
95
90
|
logging.error(f"Invalid URL: {url}")
|
|
96
91
|
console.print(f"[bold red]Invalid URL: {url}[/bold red]")
|
|
@@ -30,14 +30,14 @@ def capture_output(process: subprocess.Popen, description: str) -> None:
|
|
|
30
30
|
try:
|
|
31
31
|
max_length = 0
|
|
32
32
|
|
|
33
|
-
for line in iter(process.stdout.readline, ''):
|
|
33
|
+
for line in iter(process.stdout.readline, ''):
|
|
34
34
|
try:
|
|
35
35
|
line = line.strip()
|
|
36
|
+
logging.info(f"CAPTURE ffmpeg line: {line}")
|
|
37
|
+
|
|
36
38
|
if not line:
|
|
37
39
|
continue
|
|
38
40
|
|
|
39
|
-
logging.info(f"CAPTURE ffmpeg line: {line}")
|
|
40
|
-
|
|
41
41
|
# Check if termination is requested
|
|
42
42
|
if terminate_flag.is_set():
|
|
43
43
|
break
|
|
@@ -251,8 +251,8 @@ def join_audios(video_path: str, audio_tracks: List[Dict[str, str]], out_path: s
|
|
|
251
251
|
# Run join
|
|
252
252
|
if DEBUG_MODE:
|
|
253
253
|
subprocess.run(ffmpeg_cmd, check=True)
|
|
254
|
+
|
|
254
255
|
else:
|
|
255
|
-
|
|
256
256
|
if get_use_large_bar():
|
|
257
257
|
capture_ffmpeg_real_time(ffmpeg_cmd, "[cyan]Join audio")
|
|
258
258
|
print()
|
|
@@ -25,9 +25,8 @@
|
|
|
25
25
|
"pass": "adminadmin"
|
|
26
26
|
},
|
|
27
27
|
"M3U8_DOWNLOAD": {
|
|
28
|
-
"
|
|
29
|
-
"
|
|
30
|
-
"default_audio_workser": 12,
|
|
28
|
+
"default_video_workers": 12,
|
|
29
|
+
"default_audio_workers": 12,
|
|
31
30
|
"segment_timeout": 8,
|
|
32
31
|
"download_audio": true,
|
|
33
32
|
"merge_audio": true,
|
|
@@ -51,8 +50,7 @@
|
|
|
51
50
|
"default_preset": "ultrafast"
|
|
52
51
|
},
|
|
53
52
|
"M3U8_PARSER": {
|
|
54
|
-
"force_resolution": "Best"
|
|
55
|
-
"get_only_link": false
|
|
53
|
+
"force_resolution": "Best"
|
|
56
54
|
},
|
|
57
55
|
"REQUESTS": {
|
|
58
56
|
"verify": false,
|
StreamingCommunity/Util/os.py
CHANGED
|
@@ -14,6 +14,7 @@ import contextlib
|
|
|
14
14
|
import importlib.metadata
|
|
15
15
|
import socket
|
|
16
16
|
|
|
17
|
+
|
|
17
18
|
# External library
|
|
18
19
|
from unidecode import unidecode
|
|
19
20
|
from rich.console import Console
|
|
@@ -512,3 +513,23 @@ def get_ffmpeg_path():
|
|
|
512
513
|
def get_ffprobe_path():
|
|
513
514
|
"""Returns the path of FFprobe."""
|
|
514
515
|
return os_summary.ffprobe_path
|
|
516
|
+
|
|
517
|
+
def get_wvd_path():
|
|
518
|
+
"""
|
|
519
|
+
Searches the system's binary folder and returns the path of the first file ending with 'wvd'.
|
|
520
|
+
Returns None if not found.
|
|
521
|
+
"""
|
|
522
|
+
system = platform.system().lower()
|
|
523
|
+
home = os.path.expanduser('~')
|
|
524
|
+
if system == 'windows':
|
|
525
|
+
binary_dir = os.path.join(os.path.splitdrive(home)[0] + os.path.sep, 'binary')
|
|
526
|
+
elif system == 'darwin':
|
|
527
|
+
binary_dir = os.path.join(home, 'Applications', 'binary')
|
|
528
|
+
else:
|
|
529
|
+
binary_dir = os.path.join(home, '.local', 'bin', 'binary')
|
|
530
|
+
if not os.path.exists(binary_dir):
|
|
531
|
+
return None
|
|
532
|
+
for file in os.listdir(binary_dir):
|
|
533
|
+
if file.lower().endswith('wvd'):
|
|
534
|
+
return os.path.join(binary_dir, file)
|
|
535
|
+
return None
|
StreamingCommunity/run.py
CHANGED
|
@@ -229,7 +229,7 @@ def main(script_id = 0):
|
|
|
229
229
|
|
|
230
230
|
if not internet_manager.check_dns_resolve(hostname_list):
|
|
231
231
|
print()
|
|
232
|
-
console.print("[red]
|
|
232
|
+
console.print("[red] ERROR: DNS configuration is required!")
|
|
233
233
|
console.print("[red]The program cannot function correctly without proper DNS settings.")
|
|
234
234
|
console.print("[yellow]Please configure one of these DNS servers:")
|
|
235
235
|
console.print("[blue]• Cloudflare (1.1.1.1) 'https://developers.cloudflare.com/1.1.1.1/setup/windows/'")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: StreamingCommunity
|
|
3
|
-
Version: 3.2.
|
|
3
|
+
Version: 3.2.5
|
|
4
4
|
Home-page: https://github.com/Lovi-0/StreamingCommunity
|
|
5
5
|
Author: Lovi-0
|
|
6
6
|
Project-URL: Bug Reports, https://github.com/Lovi-0/StreamingCommunity/issues
|
|
@@ -25,7 +25,7 @@ Requires-Dist: pycryptodomex
|
|
|
25
25
|
Requires-Dist: ua-generator
|
|
26
26
|
Requires-Dist: qbittorrent-api
|
|
27
27
|
Requires-Dist: pyTelegramBotAPI
|
|
28
|
-
Requires-Dist:
|
|
28
|
+
Requires-Dist: pywidevine
|
|
29
29
|
Dynamic: author
|
|
30
30
|
Dynamic: description
|
|
31
31
|
Dynamic: description-content-type
|
|
@@ -839,3 +839,5 @@ API non ufficiale per accedere ai contenuti del sito italiano StreamingCommunity
|
|
|
839
839
|
# Disclaimer
|
|
840
840
|
|
|
841
841
|
This software is provided "as is", without warranty of any kind, express or implied, including but not limited to the warranties of merchantability, fitness for a particular purpose, and noninfringement. In no event shall the authors or copyright holders be liable for any claim, damages, or other liability, whether in an action of contract, tort, or otherwise, arising from, out of, or in connection with the software or the use or other dealings in the software.
|
|
842
|
+
|
|
843
|
+
> **Note:** DASH downloads require a valid L3 CDM (Content Decryption Module) to proceed. This project does not provide, include, or facilitate obtaining any CDM. Users are responsible for ensuring compliance with all applicable laws and requirements regarding DRM and decryption modules.
|