StreamingCommunity 3.3.9__py3-none-any.whl → 3.4.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of StreamingCommunity might be problematic. Click here for more details.
- StreamingCommunity/Api/Player/hdplayer.py +0 -5
- StreamingCommunity/Api/Player/mediapolisvod.py +4 -13
- StreamingCommunity/Api/Player/supervideo.py +3 -8
- StreamingCommunity/Api/Player/sweetpixel.py +1 -9
- StreamingCommunity/Api/Player/vixcloud.py +5 -16
- StreamingCommunity/Api/Site/altadefinizione/film.py +4 -16
- StreamingCommunity/Api/Site/altadefinizione/series.py +3 -12
- StreamingCommunity/Api/Site/altadefinizione/site.py +2 -9
- StreamingCommunity/Api/Site/altadefinizione/util/ScrapeSerie.py +2 -7
- StreamingCommunity/Api/Site/animeunity/site.py +9 -24
- StreamingCommunity/Api/Site/animeunity/util/ScrapeSerie.py +11 -27
- StreamingCommunity/Api/Site/animeworld/film.py +4 -2
- StreamingCommunity/Api/Site/animeworld/site.py +3 -11
- StreamingCommunity/Api/Site/animeworld/util/ScrapeSerie.py +1 -4
- StreamingCommunity/Api/Site/crunchyroll/film.py +4 -5
- StreamingCommunity/Api/Site/crunchyroll/series.py +5 -17
- StreamingCommunity/Api/Site/crunchyroll/site.py +4 -13
- StreamingCommunity/Api/Site/crunchyroll/util/ScrapeSerie.py +5 -27
- StreamingCommunity/Api/Site/crunchyroll/util/get_license.py +11 -26
- StreamingCommunity/Api/Site/guardaserie/series.py +3 -14
- StreamingCommunity/Api/Site/guardaserie/site.py +4 -12
- StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +3 -10
- StreamingCommunity/Api/Site/mediasetinfinity/film.py +11 -12
- StreamingCommunity/Api/Site/mediasetinfinity/series.py +4 -15
- StreamingCommunity/Api/Site/mediasetinfinity/site.py +16 -32
- StreamingCommunity/Api/Site/mediasetinfinity/util/ScrapeSerie.py +39 -50
- StreamingCommunity/Api/Site/mediasetinfinity/util/fix_mpd.py +3 -3
- StreamingCommunity/Api/Site/mediasetinfinity/util/get_license.py +7 -25
- StreamingCommunity/Api/Site/raiplay/film.py +6 -8
- StreamingCommunity/Api/Site/raiplay/series.py +5 -20
- StreamingCommunity/Api/Site/raiplay/site.py +45 -47
- StreamingCommunity/Api/Site/raiplay/util/ScrapeSerie.py +91 -55
- StreamingCommunity/Api/Site/raiplay/util/get_license.py +3 -12
- StreamingCommunity/Api/Site/streamingcommunity/film.py +5 -16
- StreamingCommunity/Api/Site/streamingcommunity/series.py +5 -10
- StreamingCommunity/Api/Site/streamingcommunity/site.py +3 -22
- StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py +11 -27
- StreamingCommunity/Api/Site/streamingwatch/__init__.py +1 -0
- StreamingCommunity/Api/Site/streamingwatch/film.py +4 -2
- StreamingCommunity/Api/Site/streamingwatch/series.py +4 -14
- StreamingCommunity/Api/Site/streamingwatch/site.py +4 -18
- StreamingCommunity/Api/Site/streamingwatch/util/ScrapeSerie.py +0 -3
- StreamingCommunity/Api/Template/Util/__init__.py +4 -2
- StreamingCommunity/Api/Template/Util/manage_ep.py +66 -0
- StreamingCommunity/Api/Template/config_loader.py +0 -7
- StreamingCommunity/Lib/Downloader/DASH/decrypt.py +54 -1
- StreamingCommunity/Lib/Downloader/DASH/downloader.py +186 -70
- StreamingCommunity/Lib/Downloader/DASH/parser.py +2 -3
- StreamingCommunity/Lib/Downloader/DASH/segments.py +109 -68
- StreamingCommunity/Lib/Downloader/HLS/downloader.py +100 -82
- StreamingCommunity/Lib/Downloader/HLS/segments.py +40 -28
- StreamingCommunity/Lib/Downloader/MP4/downloader.py +16 -4
- StreamingCommunity/Lib/FFmpeg/capture.py +37 -5
- StreamingCommunity/Lib/FFmpeg/command.py +32 -90
- StreamingCommunity/Lib/M3U8/estimator.py +47 -1
- StreamingCommunity/Lib/TMBD/tmdb.py +2 -4
- StreamingCommunity/TelegramHelp/config.json +0 -1
- StreamingCommunity/Upload/update.py +19 -6
- StreamingCommunity/Upload/version.py +1 -1
- StreamingCommunity/Util/config_json.py +28 -21
- StreamingCommunity/Util/http_client.py +28 -0
- StreamingCommunity/Util/os.py +16 -6
- StreamingCommunity/Util/table.py +50 -8
- {streamingcommunity-3.3.9.dist-info → streamingcommunity-3.4.2.dist-info}/METADATA +1 -3
- streamingcommunity-3.4.2.dist-info/RECORD +111 -0
- streamingcommunity-3.3.9.dist-info/RECORD +0 -111
- {streamingcommunity-3.3.9.dist-info → streamingcommunity-3.4.2.dist-info}/WHEEL +0 -0
- {streamingcommunity-3.3.9.dist-info → streamingcommunity-3.4.2.dist-info}/entry_points.txt +0 -0
- {streamingcommunity-3.3.9.dist-info → streamingcommunity-3.4.2.dist-info}/licenses/LICENSE +0 -0
- {streamingcommunity-3.3.9.dist-info → streamingcommunity-3.4.2.dist-info}/top_level.txt +0 -0
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
import os
|
|
4
4
|
import asyncio
|
|
5
5
|
import time
|
|
6
|
+
from typing import Dict, Optional
|
|
6
7
|
|
|
7
8
|
|
|
8
9
|
# External libraries
|
|
@@ -24,6 +25,7 @@ DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_w
|
|
|
24
25
|
DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workers')
|
|
25
26
|
SEGMENT_MAX_TIMEOUT = config_manager.get_int("M3U8_DOWNLOAD", "segment_timeout")
|
|
26
27
|
LIMIT_SEGMENT = config_manager.get_int('M3U8_DOWNLOAD', 'limit_segment')
|
|
28
|
+
ENABLE_RETRY = config_manager.get_bool('M3U8_DOWNLOAD', 'enable_retry')
|
|
27
29
|
|
|
28
30
|
|
|
29
31
|
# Variable
|
|
@@ -50,7 +52,8 @@ class MPD_Segments:
|
|
|
50
52
|
self.limit_segments = LIMIT_SEGMENT if LIMIT_SEGMENT > 0 else None
|
|
51
53
|
else:
|
|
52
54
|
self.limit_segments = limit_segments
|
|
53
|
-
|
|
55
|
+
|
|
56
|
+
self.enable_retry = ENABLE_RETRY
|
|
54
57
|
self.download_interrupted = False
|
|
55
58
|
self.info_nFailed = 0
|
|
56
59
|
|
|
@@ -63,9 +66,12 @@ class MPD_Segments:
|
|
|
63
66
|
self._last_progress_update = 0
|
|
64
67
|
self._progress_update_interval = 0.1
|
|
65
68
|
|
|
66
|
-
# Segment tracking
|
|
67
|
-
self.
|
|
69
|
+
# Segment tracking - store only metadata, not content
|
|
70
|
+
self.segment_status = {} # {idx: {'downloaded': bool, 'size': int}}
|
|
68
71
|
self.segments_lock = asyncio.Lock()
|
|
72
|
+
|
|
73
|
+
# Estimator for progress tracking
|
|
74
|
+
self.estimator: Optional[M3U8_Ts_Estimator] = None
|
|
69
75
|
|
|
70
76
|
def get_concat_path(self, output_dir: str = None):
|
|
71
77
|
"""
|
|
@@ -114,7 +120,7 @@ class MPD_Segments:
|
|
|
114
120
|
|
|
115
121
|
async def download_segments(self, output_dir: str = None, concurrent_downloads: int = None, description: str = "DASH"):
|
|
116
122
|
"""
|
|
117
|
-
Download
|
|
123
|
+
Download segments to temporary files, then concatenate them in order.
|
|
118
124
|
|
|
119
125
|
Parameters:
|
|
120
126
|
- output_dir (str): Output directory for segments
|
|
@@ -148,9 +154,9 @@ class MPD_Segments:
|
|
|
148
154
|
semaphore = asyncio.Semaphore(concurrent_downloads)
|
|
149
155
|
|
|
150
156
|
# Initialize estimator
|
|
151
|
-
estimator = M3U8_Ts_Estimator(total_segments=len(segment_urls) + 1)
|
|
157
|
+
self.estimator = M3U8_Ts_Estimator(total_segments=len(segment_urls) + 1)
|
|
152
158
|
|
|
153
|
-
self.
|
|
159
|
+
self.segment_status = {}
|
|
154
160
|
self.downloaded_segments = set()
|
|
155
161
|
self.info_nFailed = 0
|
|
156
162
|
self.download_interrupted = False
|
|
@@ -164,20 +170,21 @@ class MPD_Segments:
|
|
|
164
170
|
async with httpx.AsyncClient(timeout=timeout_config, limits=limits) as client:
|
|
165
171
|
|
|
166
172
|
# Download init segment
|
|
167
|
-
await self._download_init_segment(client, init_url, concat_path,
|
|
173
|
+
await self._download_init_segment(client, init_url, concat_path, progress_bar)
|
|
168
174
|
|
|
169
|
-
# Download all segments
|
|
175
|
+
# Download all segments to temp files
|
|
170
176
|
await self._download_segments_batch(
|
|
171
|
-
client, segment_urls, temp_dir, semaphore, REQUEST_MAX_RETRY,
|
|
177
|
+
client, segment_urls, temp_dir, semaphore, REQUEST_MAX_RETRY, progress_bar
|
|
172
178
|
)
|
|
173
179
|
|
|
174
|
-
# Retry failed segments
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
180
|
+
# Retry failed segments only if enabled
|
|
181
|
+
if self.enable_retry:
|
|
182
|
+
await self._retry_failed_segments(
|
|
183
|
+
client, segment_urls, temp_dir, semaphore, REQUEST_MAX_RETRY, progress_bar
|
|
184
|
+
)
|
|
178
185
|
|
|
179
|
-
# Concatenate all
|
|
180
|
-
await self.
|
|
186
|
+
# Concatenate all segments IN ORDER
|
|
187
|
+
await self._concatenate_segments_in_order(temp_dir, concat_path, len(segment_urls))
|
|
181
188
|
|
|
182
189
|
except KeyboardInterrupt:
|
|
183
190
|
self.download_interrupted = True
|
|
@@ -189,7 +196,7 @@ class MPD_Segments:
|
|
|
189
196
|
self._verify_download_completion()
|
|
190
197
|
return self._generate_results(stream_type)
|
|
191
198
|
|
|
192
|
-
async def _download_init_segment(self, client, init_url, concat_path,
|
|
199
|
+
async def _download_init_segment(self, client, init_url, concat_path, progress_bar):
|
|
193
200
|
"""
|
|
194
201
|
Download the init segment and update progress/estimator.
|
|
195
202
|
"""
|
|
@@ -205,49 +212,56 @@ class MPD_Segments:
|
|
|
205
212
|
with open(concat_path, 'wb') as outfile:
|
|
206
213
|
if response.status_code == 200:
|
|
207
214
|
outfile.write(response.content)
|
|
208
|
-
estimator
|
|
215
|
+
if self.estimator:
|
|
216
|
+
self.estimator.add_ts_file(len(response.content))
|
|
209
217
|
|
|
210
218
|
progress_bar.update(1)
|
|
211
|
-
self.
|
|
219
|
+
if self.estimator:
|
|
220
|
+
self._throttled_progress_update(len(response.content), progress_bar)
|
|
212
221
|
|
|
213
222
|
except Exception as e:
|
|
214
223
|
progress_bar.close()
|
|
215
224
|
raise RuntimeError(f"Error downloading init segment: {e}")
|
|
216
225
|
|
|
217
|
-
def _throttled_progress_update(self, content_size: int,
|
|
226
|
+
def _throttled_progress_update(self, content_size: int, progress_bar):
|
|
218
227
|
"""
|
|
219
228
|
Throttled progress update to reduce CPU usage.
|
|
220
229
|
"""
|
|
221
230
|
current_time = time.time()
|
|
222
231
|
if current_time - self._last_progress_update > self._progress_update_interval:
|
|
223
|
-
estimator
|
|
232
|
+
if self.estimator:
|
|
233
|
+
self.estimator.update_progress_bar(content_size, progress_bar)
|
|
224
234
|
self._last_progress_update = current_time
|
|
225
235
|
|
|
226
|
-
async def _download_segments_batch(self, client, segment_urls, temp_dir, semaphore, max_retry,
|
|
236
|
+
async def _download_segments_batch(self, client, segment_urls, temp_dir, semaphore, max_retry, progress_bar):
|
|
227
237
|
"""
|
|
228
|
-
Download
|
|
238
|
+
Download segments to temporary files - write immediately to disk, not memory.
|
|
229
239
|
"""
|
|
230
240
|
async def download_single(url, idx):
|
|
231
241
|
async with semaphore:
|
|
232
242
|
headers = {'User-Agent': get_userAgent()}
|
|
243
|
+
temp_file = os.path.join(temp_dir, f"seg_{idx:06d}.tmp")
|
|
233
244
|
|
|
234
245
|
for attempt in range(max_retry):
|
|
235
246
|
if self.download_interrupted:
|
|
236
|
-
return idx, False, attempt
|
|
247
|
+
return idx, False, attempt, 0
|
|
237
248
|
|
|
238
249
|
try:
|
|
239
250
|
timeout = min(SEGMENT_MAX_TIMEOUT, 10 + attempt * 3)
|
|
240
251
|
resp = await client.get(url, headers=headers, follow_redirects=True, timeout=timeout)
|
|
241
252
|
|
|
242
|
-
# Write to temp file
|
|
253
|
+
# Write directly to temp file
|
|
243
254
|
if resp.status_code == 200:
|
|
244
|
-
|
|
255
|
+
content_size = len(resp.content)
|
|
256
|
+
with open(temp_file, 'wb') as f:
|
|
257
|
+
f.write(resp.content)
|
|
258
|
+
|
|
259
|
+
# Update status
|
|
245
260
|
async with self.segments_lock:
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
self.segment_files[idx] = temp_file
|
|
261
|
+
self.segment_status[idx] = {'downloaded': True, 'size': content_size}
|
|
262
|
+
self.downloaded_segments.add(idx)
|
|
249
263
|
|
|
250
|
-
return idx, True, attempt,
|
|
264
|
+
return idx, True, attempt, content_size
|
|
251
265
|
else:
|
|
252
266
|
if attempt < 2:
|
|
253
267
|
sleep_time = 0.5 + attempt * 0.5
|
|
@@ -258,19 +272,21 @@ class MPD_Segments:
|
|
|
258
272
|
except Exception:
|
|
259
273
|
sleep_time = min(2.0, 1.1 * (2 ** attempt))
|
|
260
274
|
await asyncio.sleep(sleep_time)
|
|
275
|
+
|
|
276
|
+
# Mark as failed
|
|
277
|
+
async with self.segments_lock:
|
|
278
|
+
self.segment_status[idx] = {'downloaded': False, 'size': 0}
|
|
261
279
|
|
|
262
280
|
return idx, False, max_retry, 0
|
|
263
281
|
|
|
264
|
-
#
|
|
282
|
+
# Download all segments concurrently
|
|
265
283
|
tasks = [download_single(url, i) for i, url in enumerate(segment_urls)]
|
|
266
284
|
|
|
267
285
|
for coro in asyncio.as_completed(tasks):
|
|
268
286
|
try:
|
|
269
287
|
idx, success, nretry, size = await coro
|
|
270
288
|
|
|
271
|
-
if success:
|
|
272
|
-
self.downloaded_segments.add(idx)
|
|
273
|
-
else:
|
|
289
|
+
if not success:
|
|
274
290
|
self.info_nFailed += 1
|
|
275
291
|
|
|
276
292
|
if nretry > self.info_maxRetry:
|
|
@@ -278,15 +294,16 @@ class MPD_Segments:
|
|
|
278
294
|
self.info_nRetry += nretry
|
|
279
295
|
|
|
280
296
|
progress_bar.update(1)
|
|
281
|
-
estimator
|
|
282
|
-
|
|
297
|
+
if self.estimator:
|
|
298
|
+
self.estimator.add_ts_file(size)
|
|
299
|
+
self._throttled_progress_update(size, progress_bar)
|
|
283
300
|
|
|
284
301
|
except KeyboardInterrupt:
|
|
285
302
|
self.download_interrupted = True
|
|
286
|
-
print("\n[red]Download interrupted by user (Ctrl+C).")
|
|
303
|
+
console.print("\n[red]Download interrupted by user (Ctrl+C).")
|
|
287
304
|
break
|
|
288
305
|
|
|
289
|
-
async def _retry_failed_segments(self, client, segment_urls, temp_dir, semaphore, max_retry,
|
|
306
|
+
async def _retry_failed_segments(self, client, segment_urls, temp_dir, semaphore, max_retry, progress_bar):
|
|
290
307
|
"""
|
|
291
308
|
Retry failed segments up to 3 times.
|
|
292
309
|
"""
|
|
@@ -301,6 +318,7 @@ class MPD_Segments:
|
|
|
301
318
|
async def download_single(url, idx):
|
|
302
319
|
async with semaphore:
|
|
303
320
|
headers = {'User-Agent': get_userAgent()}
|
|
321
|
+
temp_file = os.path.join(temp_dir, f"seg_{idx:06d}.tmp")
|
|
304
322
|
|
|
305
323
|
for attempt in range(max_retry):
|
|
306
324
|
if self.download_interrupted:
|
|
@@ -310,15 +328,17 @@ class MPD_Segments:
|
|
|
310
328
|
timeout = min(SEGMENT_MAX_TIMEOUT, 15 + attempt * 5)
|
|
311
329
|
resp = await client.get(url, headers=headers, timeout=timeout)
|
|
312
330
|
|
|
313
|
-
# Write to temp file
|
|
331
|
+
# Write directly to temp file
|
|
314
332
|
if resp.status_code == 200:
|
|
315
|
-
|
|
333
|
+
content_size = len(resp.content)
|
|
334
|
+
with open(temp_file, 'wb') as f:
|
|
335
|
+
f.write(resp.content)
|
|
336
|
+
|
|
316
337
|
async with self.segments_lock:
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
self.segment_files[idx] = temp_file
|
|
338
|
+
self.segment_status[idx] = {'downloaded': True, 'size': content_size}
|
|
339
|
+
self.downloaded_segments.add(idx)
|
|
320
340
|
|
|
321
|
-
return idx, True, attempt,
|
|
341
|
+
return idx, True, attempt, content_size
|
|
322
342
|
else:
|
|
323
343
|
await asyncio.sleep(1.5 * (2 ** attempt))
|
|
324
344
|
|
|
@@ -334,9 +354,7 @@ class MPD_Segments:
|
|
|
334
354
|
try:
|
|
335
355
|
idx, success, nretry, size = await coro
|
|
336
356
|
|
|
337
|
-
if success:
|
|
338
|
-
self.downloaded_segments.add(idx)
|
|
339
|
-
else:
|
|
357
|
+
if not success:
|
|
340
358
|
nFailed_this_round += 1
|
|
341
359
|
|
|
342
360
|
if nretry > self.info_maxRetry:
|
|
@@ -344,8 +362,9 @@ class MPD_Segments:
|
|
|
344
362
|
self.info_nRetry += nretry
|
|
345
363
|
|
|
346
364
|
progress_bar.update(0)
|
|
347
|
-
estimator
|
|
348
|
-
|
|
365
|
+
if self.estimator:
|
|
366
|
+
self.estimator.add_ts_file(size)
|
|
367
|
+
self._throttled_progress_update(size, progress_bar)
|
|
349
368
|
|
|
350
369
|
except KeyboardInterrupt:
|
|
351
370
|
self.download_interrupted = True
|
|
@@ -355,20 +374,24 @@ class MPD_Segments:
|
|
|
355
374
|
self.info_nFailed = nFailed_this_round
|
|
356
375
|
global_retry_count += 1
|
|
357
376
|
|
|
358
|
-
async def
|
|
377
|
+
async def _concatenate_segments_in_order(self, temp_dir, concat_path, total_segments):
|
|
359
378
|
"""
|
|
360
|
-
Concatenate all segment files
|
|
361
|
-
Skip missing segments and continue with available ones.
|
|
379
|
+
Concatenate all segment files IN ORDER to the final output file.
|
|
362
380
|
"""
|
|
363
|
-
successful_segments = 0
|
|
364
381
|
with open(concat_path, 'ab') as outfile:
|
|
365
382
|
for idx in range(total_segments):
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
383
|
+
temp_file = os.path.join(temp_dir, f"seg_{idx:06d}.tmp")
|
|
384
|
+
|
|
385
|
+
# Only concatenate successfully downloaded segments
|
|
386
|
+
if idx in self.downloaded_segments and os.path.exists(temp_file):
|
|
387
|
+
with open(temp_file, 'rb') as infile:
|
|
388
|
+
|
|
389
|
+
# Read and write in chunks to avoid memory issues
|
|
390
|
+
while True:
|
|
391
|
+
chunk = infile.read(8192) # 8KB chunks
|
|
392
|
+
if not chunk:
|
|
393
|
+
break
|
|
394
|
+
outfile.write(chunk)
|
|
372
395
|
|
|
373
396
|
def _get_bar_format(self, description: str) -> str:
|
|
374
397
|
"""
|
|
@@ -435,7 +458,8 @@ class MPD_Segments:
|
|
|
435
458
|
# Delete temp segment files
|
|
436
459
|
if temp_dir and os.path.exists(temp_dir):
|
|
437
460
|
try:
|
|
438
|
-
for
|
|
461
|
+
for idx in range(len(self.selected_representation.get('segment_urls', []))):
|
|
462
|
+
temp_file = os.path.join(temp_dir, f"seg_{idx:06d}.tmp")
|
|
439
463
|
if os.path.exists(temp_file):
|
|
440
464
|
os.remove(temp_file)
|
|
441
465
|
os.rmdir(temp_dir)
|
|
@@ -447,7 +471,7 @@ class MPD_Segments:
|
|
|
447
471
|
self._display_error_summary()
|
|
448
472
|
|
|
449
473
|
# Clear memory
|
|
450
|
-
self.
|
|
474
|
+
self.segment_status = {}
|
|
451
475
|
|
|
452
476
|
def _display_error_summary(self) -> None:
|
|
453
477
|
"""
|
|
@@ -455,11 +479,28 @@ class MPD_Segments:
|
|
|
455
479
|
"""
|
|
456
480
|
total_segments = len(self.selected_representation.get('segment_urls', []))
|
|
457
481
|
failed_indices = [i for i in range(total_segments) if i not in self.downloaded_segments]
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
482
|
+
|
|
483
|
+
console.print(f" [cyan]Max retries: [red]{getattr(self, 'info_maxRetry', 0)} [white]| "
|
|
484
|
+
f"[cyan]Total retries: [red]{getattr(self, 'info_nRetry', 0)} [white]| "
|
|
485
|
+
f"[cyan]Failed segments: [red]{getattr(self, 'info_nFailed', 0)} [white]| "
|
|
486
|
+
f"[cyan]Failed indices: [red]{failed_indices}")
|
|
487
|
+
|
|
488
|
+
def get_progress_data(self) -> Dict:
|
|
489
|
+
"""Returns current download progress data for API."""
|
|
490
|
+
if not self.estimator:
|
|
491
|
+
return None
|
|
492
|
+
|
|
493
|
+
total = self.get_segments_count()
|
|
494
|
+
downloaded = len(self.downloaded_segments)
|
|
495
|
+
percentage = (downloaded / total * 100) if total > 0 else 0
|
|
496
|
+
stats = self.estimator.get_stats(downloaded, total)
|
|
497
|
+
|
|
498
|
+
return {
|
|
499
|
+
'total_segments': total,
|
|
500
|
+
'downloaded_segments': downloaded,
|
|
501
|
+
'failed_segments': self.info_nFailed,
|
|
502
|
+
'current_speed': stats['download_speed'],
|
|
503
|
+
'estimated_size': stats['estimated_total_size'],
|
|
504
|
+
'percentage': round(percentage, 2),
|
|
505
|
+
'eta_seconds': stats['eta_seconds']
|
|
506
|
+
}
|