StreamingCommunity 3.2.7__py3-none-any.whl → 3.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of StreamingCommunity might be problematic. Click here for more details.
- StreamingCommunity/Lib/Downloader/HLS/segments.py +258 -144
- StreamingCommunity/Upload/version.py +1 -1
- {streamingcommunity-3.2.7.dist-info → streamingcommunity-3.2.8.dist-info}/METADATA +1 -1
- {streamingcommunity-3.2.7.dist-info → streamingcommunity-3.2.8.dist-info}/RECORD +8 -8
- {streamingcommunity-3.2.7.dist-info → streamingcommunity-3.2.8.dist-info}/WHEEL +0 -0
- {streamingcommunity-3.2.7.dist-info → streamingcommunity-3.2.8.dist-info}/entry_points.txt +0 -0
- {streamingcommunity-3.2.7.dist-info → streamingcommunity-3.2.8.dist-info}/licenses/LICENSE +0 -0
- {streamingcommunity-3.2.7.dist-info → streamingcommunity-3.2.8.dist-info}/top_level.txt +0 -0
|
@@ -2,10 +2,15 @@
|
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
4
|
import sys
|
|
5
|
-
import
|
|
5
|
+
import time
|
|
6
|
+
import queue
|
|
7
|
+
import signal
|
|
6
8
|
import logging
|
|
7
9
|
import binascii
|
|
10
|
+
import threading
|
|
11
|
+
from queue import PriorityQueue
|
|
8
12
|
from urllib.parse import urljoin, urlparse
|
|
13
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
9
14
|
from typing import Dict
|
|
10
15
|
|
|
11
16
|
|
|
@@ -37,7 +42,8 @@ DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_w
|
|
|
37
42
|
DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workers')
|
|
38
43
|
MAX_TIMEOOUT = config_manager.get_int("REQUESTS", "timeout")
|
|
39
44
|
SEGMENT_MAX_TIMEOUT = config_manager.get_int("M3U8_DOWNLOAD", "segment_timeout")
|
|
40
|
-
|
|
45
|
+
TELEGRAM_BOT = config_manager.get_bool('DEFAULT', 'telegram_bot')
|
|
46
|
+
MAX_INTERRUPT_COUNT = 3
|
|
41
47
|
|
|
42
48
|
# Variable
|
|
43
49
|
console = Console()
|
|
@@ -56,18 +62,38 @@ class M3U8_Segments:
|
|
|
56
62
|
self.url = url
|
|
57
63
|
self.tmp_folder = tmp_folder
|
|
58
64
|
self.is_index_url = is_index_url
|
|
65
|
+
self.expected_real_time = None
|
|
59
66
|
self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
|
|
60
67
|
os.makedirs(self.tmp_folder, exist_ok=True)
|
|
61
68
|
|
|
62
69
|
# Util class
|
|
63
70
|
self.decryption: M3U8_Decryption = None
|
|
71
|
+
self.class_ts_estimator = M3U8_Ts_Estimator(0, self)
|
|
64
72
|
self.class_url_fixer = M3U8_UrlFix(url)
|
|
65
|
-
|
|
66
|
-
#
|
|
73
|
+
|
|
74
|
+
# Sync
|
|
75
|
+
self.queue = PriorityQueue()
|
|
76
|
+
self.buffer = {}
|
|
77
|
+
self.expected_index = 0
|
|
78
|
+
|
|
79
|
+
self.stop_event = threading.Event()
|
|
67
80
|
self.downloaded_segments = set()
|
|
81
|
+
self.base_timeout = 0.5
|
|
82
|
+
self.current_timeout = 3.0
|
|
83
|
+
|
|
84
|
+
# Stopping
|
|
85
|
+
self.interrupt_flag = threading.Event()
|
|
68
86
|
self.download_interrupted = False
|
|
69
|
-
self.
|
|
87
|
+
self.interrupt_count = 0
|
|
88
|
+
self.force_stop = False
|
|
89
|
+
self.interrupt_lock = threading.Lock()
|
|
90
|
+
|
|
91
|
+
# OTHER INFO
|
|
92
|
+
self.info_maxRetry = 0
|
|
70
93
|
self.info_nRetry = 0
|
|
94
|
+
self.info_nFailed = 0
|
|
95
|
+
self.active_retries = 0
|
|
96
|
+
self.active_retries_lock = threading.Lock()
|
|
71
97
|
|
|
72
98
|
def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
|
|
73
99
|
"""
|
|
@@ -119,10 +145,16 @@ class M3U8_Segments:
|
|
|
119
145
|
if "http" not in seg else seg
|
|
120
146
|
for seg in m3u8_parser.segments
|
|
121
147
|
]
|
|
148
|
+
self.class_ts_estimator.total_segments = len(self.segments)
|
|
122
149
|
|
|
123
150
|
def get_info(self) -> None:
|
|
124
151
|
"""
|
|
125
152
|
Retrieves M3U8 playlist information from the given URL.
|
|
153
|
+
|
|
154
|
+
If the URL is an index URL, this method:
|
|
155
|
+
- Sends an HTTP GET request to fetch the M3U8 playlist.
|
|
156
|
+
- Parses the M3U8 content using `parse_data`.
|
|
157
|
+
- Saves the playlist to a temporary folder.
|
|
126
158
|
"""
|
|
127
159
|
if self.is_index_url:
|
|
128
160
|
try:
|
|
@@ -137,156 +169,241 @@ class M3U8_Segments:
|
|
|
137
169
|
except Exception as e:
|
|
138
170
|
raise RuntimeError(f"M3U8 info retrieval failed: {e}")
|
|
139
171
|
|
|
140
|
-
def
|
|
172
|
+
def setup_interrupt_handler(self):
|
|
141
173
|
"""
|
|
142
|
-
|
|
174
|
+
Set up a signal handler for graceful interruption.
|
|
143
175
|
"""
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
176
|
+
def interrupt_handler(signum, frame):
|
|
177
|
+
with self.interrupt_lock:
|
|
178
|
+
self.interrupt_count += 1
|
|
179
|
+
if self.interrupt_count >= MAX_INTERRUPT_COUNT:
|
|
180
|
+
self.force_stop = True
|
|
181
|
+
|
|
182
|
+
if self.force_stop:
|
|
183
|
+
console.print("\n[red]Force stop triggered! Exiting immediately.")
|
|
151
184
|
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
progress_bar = tqdm(
|
|
159
|
-
total=len(self.segments),
|
|
160
|
-
unit='s',
|
|
161
|
-
ascii='░▒█',
|
|
162
|
-
bar_format=self._get_bar_format(description),
|
|
163
|
-
mininterval=0.6,
|
|
164
|
-
maxinterval=1.0,
|
|
165
|
-
file=sys.stdout
|
|
166
|
-
)
|
|
185
|
+
else:
|
|
186
|
+
if not self.interrupt_flag.is_set():
|
|
187
|
+
remaining = MAX_INTERRUPT_COUNT - self.interrupt_count
|
|
188
|
+
console.print(f"\n[red]- Stopping gracefully... (Ctrl+C {remaining}x to force)")
|
|
189
|
+
self.download_interrupted = True
|
|
167
190
|
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
semaphore = asyncio.Semaphore(self._get_worker_count(type))
|
|
171
|
-
|
|
172
|
-
results = [None] * len(self.segments)
|
|
173
|
-
|
|
174
|
-
try:
|
|
175
|
-
async with httpx.AsyncClient(timeout=SEGMENT_MAX_TIMEOUT) as client:
|
|
191
|
+
if remaining == 1:
|
|
192
|
+
self.interrupt_flag.set()
|
|
176
193
|
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
194
|
+
|
|
195
|
+
if threading.current_thread() is threading.main_thread():
|
|
196
|
+
signal.signal(signal.SIGINT, interrupt_handler)
|
|
197
|
+
else:
|
|
198
|
+
print("Signal handler must be set in the main thread")
|
|
199
|
+
|
|
200
|
+
def _get_http_client(self):
|
|
201
|
+
client_params = {
|
|
202
|
+
'headers': {'User-Agent': get_userAgent()},
|
|
203
|
+
'timeout': SEGMENT_MAX_TIMEOUT,
|
|
204
|
+
'follow_redirects': True,
|
|
205
|
+
'http2': False,
|
|
206
|
+
'verify': REQUEST_VERIFY
|
|
207
|
+
}
|
|
208
|
+
return httpx.Client(**client_params)
|
|
209
|
+
|
|
210
|
+
def download_segment(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.1) -> None:
|
|
211
|
+
"""
|
|
212
|
+
Downloads a TS segment and adds it to the segment queue with retry logic.
|
|
182
213
|
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
214
|
+
Parameters:
|
|
215
|
+
- ts_url (str): The URL of the TS segment.
|
|
216
|
+
- index (int): The index of the segment.
|
|
217
|
+
- progress_bar (tqdm): Progress counter for tracking download progress.
|
|
218
|
+
- backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
|
|
219
|
+
"""
|
|
220
|
+
for attempt in range(REQUEST_MAX_RETRY):
|
|
221
|
+
if self.interrupt_flag.is_set():
|
|
222
|
+
return
|
|
223
|
+
|
|
224
|
+
try:
|
|
225
|
+
with self._get_http_client() as client:
|
|
226
|
+
response = client.get(ts_url)
|
|
227
|
+
|
|
228
|
+
# Validate response and content
|
|
229
|
+
response.raise_for_status()
|
|
230
|
+
segment_content = response.content
|
|
231
|
+
content_size = len(segment_content)
|
|
232
|
+
|
|
233
|
+
# Decrypt if needed and verify decrypted content
|
|
234
|
+
if self.decryption is not None:
|
|
235
|
+
try:
|
|
236
|
+
segment_content = self.decryption.decrypt(segment_content)
|
|
237
|
+
|
|
238
|
+
except Exception as e:
|
|
239
|
+
logging.error(f"Decryption failed for segment {index}: {str(e)}")
|
|
240
|
+
self.interrupt_flag.set() # Interrupt the download process
|
|
241
|
+
self.stop_event.set() # Trigger the stopping event for all threads
|
|
242
|
+
break # Stop the current task immediately
|
|
243
|
+
|
|
244
|
+
self.class_ts_estimator.update_progress_bar(content_size, progress_bar)
|
|
245
|
+
self.queue.put((index, segment_content))
|
|
246
|
+
self.downloaded_segments.add(index)
|
|
247
|
+
progress_bar.update(1)
|
|
248
|
+
return
|
|
188
249
|
|
|
189
|
-
|
|
190
|
-
|
|
250
|
+
except Exception as e:
|
|
251
|
+
logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
|
|
252
|
+
|
|
253
|
+
if attempt > self.info_maxRetry:
|
|
254
|
+
self.info_maxRetry = ( attempt + 1 )
|
|
255
|
+
self.info_nRetry += 1
|
|
256
|
+
|
|
257
|
+
if attempt + 1 == REQUEST_MAX_RETRY:
|
|
258
|
+
console.log(f"[red]Final retry failed for segment: {index}")
|
|
259
|
+
self.queue.put((index, None)) # Marker for failed segment
|
|
260
|
+
progress_bar.update(1)
|
|
261
|
+
self.info_nFailed += 1
|
|
262
|
+
return
|
|
263
|
+
|
|
264
|
+
with self.active_retries_lock:
|
|
265
|
+
self.active_retries += 1
|
|
266
|
+
|
|
267
|
+
sleep_time = backoff_factor * (2 ** attempt)
|
|
268
|
+
logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
|
|
269
|
+
time.sleep(sleep_time)
|
|
270
|
+
|
|
271
|
+
with self.active_retries_lock:
|
|
272
|
+
self.active_retries -= 1
|
|
191
273
|
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
274
|
+
def write_segments_to_file(self):
|
|
275
|
+
"""
|
|
276
|
+
Writes segments to file with additional verification.
|
|
277
|
+
"""
|
|
278
|
+
with open(self.tmp_file_path, 'wb') as f:
|
|
279
|
+
while not self.stop_event.is_set() or not self.queue.empty():
|
|
280
|
+
if self.interrupt_flag.is_set():
|
|
281
|
+
break
|
|
282
|
+
|
|
283
|
+
try:
|
|
284
|
+
index, segment_content = self.queue.get(timeout=self.current_timeout)
|
|
195
285
|
|
|
196
|
-
|
|
197
|
-
|
|
286
|
+
# Successful queue retrieval: reduce timeout
|
|
287
|
+
self.current_timeout = max(self.base_timeout, self.current_timeout / 2)
|
|
198
288
|
|
|
199
|
-
|
|
200
|
-
|
|
289
|
+
# Handle failed segments
|
|
290
|
+
if segment_content is None:
|
|
291
|
+
if index == self.expected_index:
|
|
292
|
+
self.expected_index += 1
|
|
293
|
+
continue
|
|
201
294
|
|
|
202
|
-
|
|
295
|
+
# Write segment if it's the next expected one
|
|
296
|
+
if index == self.expected_index:
|
|
297
|
+
f.write(segment_content)
|
|
298
|
+
f.flush()
|
|
299
|
+
self.expected_index += 1
|
|
203
300
|
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
"""
|
|
208
|
-
async def download_single(url, idx):
|
|
209
|
-
async with semaphore:
|
|
210
|
-
for attempt in range(max_retry):
|
|
211
|
-
try:
|
|
212
|
-
resp = await client.get(url, headers={'User-Agent': get_userAgent()})
|
|
301
|
+
# Write any buffered segments that are now in order
|
|
302
|
+
while self.expected_index in self.buffer:
|
|
303
|
+
next_segment = self.buffer.pop(self.expected_index)
|
|
213
304
|
|
|
214
|
-
|
|
215
|
-
|
|
305
|
+
if next_segment is not None:
|
|
306
|
+
f.write(next_segment)
|
|
307
|
+
f.flush()
|
|
216
308
|
|
|
217
|
-
|
|
218
|
-
content = self.decryption.decrypt(content)
|
|
219
|
-
return idx, content, attempt
|
|
220
|
-
|
|
221
|
-
await asyncio.sleep(1.1 * (2 ** attempt))
|
|
222
|
-
logging.info(f"Segment {idx} failed with status {resp.status_code}. Retrying...")
|
|
309
|
+
self.expected_index += 1
|
|
223
310
|
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
logging.info(f"Segment {idx} download failed: {sys.exc_info()[1]}. Retrying...")
|
|
311
|
+
else:
|
|
312
|
+
self.buffer[index] = segment_content
|
|
227
313
|
|
|
228
|
-
|
|
314
|
+
except queue.Empty:
|
|
315
|
+
self.current_timeout = min(MAX_TIMEOOUT, self.current_timeout * 1.1)
|
|
316
|
+
time.sleep(0.05)
|
|
229
317
|
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
for coro in asyncio.as_completed(tasks):
|
|
233
|
-
try:
|
|
234
|
-
idx, data, nretry = await coro
|
|
235
|
-
results[idx] = data
|
|
318
|
+
if self.stop_event.is_set():
|
|
319
|
+
break
|
|
236
320
|
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
321
|
+
except Exception as e:
|
|
322
|
+
logging.error(f"Error writing segment {index}: {str(e)}")
|
|
323
|
+
|
|
324
|
+
def download_streams(self, description: str, type: str):
|
|
325
|
+
"""
|
|
326
|
+
Downloads all TS segments in parallel and writes them to a file.
|
|
241
327
|
|
|
242
|
-
|
|
243
|
-
|
|
328
|
+
Parameters:
|
|
329
|
+
- description: Description to insert on tqdm bar
|
|
330
|
+
- type (str): Type of download: 'video' or 'audio'
|
|
331
|
+
"""
|
|
332
|
+
if TELEGRAM_BOT:
|
|
244
333
|
|
|
245
|
-
|
|
246
|
-
|
|
334
|
+
# Viene usato per lo screen
|
|
335
|
+
console.log("####")
|
|
336
|
+
|
|
337
|
+
self.get_info()
|
|
338
|
+
self.setup_interrupt_handler()
|
|
247
339
|
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
340
|
+
progress_bar = tqdm(
|
|
341
|
+
total=len(self.segments),
|
|
342
|
+
unit='s',
|
|
343
|
+
ascii='░▒█',
|
|
344
|
+
bar_format=self._get_bar_format(description),
|
|
345
|
+
mininterval=0.6,
|
|
346
|
+
maxinterval=1.0,
|
|
347
|
+
file=sys.stdout, # Using file=sys.stdout to force in-place updates because sys.stderr may not support carriage returns in this environment.
|
|
348
|
+
)
|
|
251
349
|
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
max_global_retries = 5
|
|
257
|
-
global_retry_count = 0
|
|
350
|
+
try:
|
|
351
|
+
writer_thread = threading.Thread(target=self.write_segments_to_file)
|
|
352
|
+
writer_thread.daemon = True
|
|
353
|
+
writer_thread.start()
|
|
258
354
|
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
not self.download_interrupted):
|
|
355
|
+
# Configure workers and delay
|
|
356
|
+
max_workers = self._get_worker_count(type)
|
|
262
357
|
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
358
|
+
# Download segments with completion verification
|
|
359
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
360
|
+
futures = []
|
|
361
|
+
for index, segment_url in enumerate(self.segments):
|
|
362
|
+
|
|
363
|
+
# Check for interrupt before submitting each task
|
|
364
|
+
if self.interrupt_flag.is_set():
|
|
365
|
+
break
|
|
366
|
+
|
|
367
|
+
time.sleep(TQDM_DELAY_WORKER)
|
|
368
|
+
futures.append(executor.submit(self.download_segment, segment_url, index, progress_bar))
|
|
369
|
+
|
|
370
|
+
# Wait for futures with interrupt handling
|
|
371
|
+
for future in as_completed(futures):
|
|
372
|
+
if self.interrupt_flag.is_set():
|
|
373
|
+
break
|
|
374
|
+
try:
|
|
375
|
+
future.result()
|
|
376
|
+
except Exception as e:
|
|
377
|
+
logging.error(f"Error in download thread: {str(e)}")
|
|
378
|
+
|
|
379
|
+
# Interrupt handling for missing segments
|
|
380
|
+
if not self.interrupt_flag.is_set():
|
|
381
|
+
total_segments = len(self.segments)
|
|
382
|
+
completed_segments = len(self.downloaded_segments)
|
|
383
|
+
|
|
384
|
+
if completed_segments < total_segments:
|
|
385
|
+
missing_segments = set(range(total_segments)) - self.downloaded_segments
|
|
386
|
+
logging.warning(f"Missing segments: {sorted(missing_segments)}")
|
|
387
|
+
|
|
388
|
+
# Retry missing segments with interrupt check
|
|
389
|
+
for index in missing_segments:
|
|
390
|
+
if self.interrupt_flag.is_set():
|
|
391
|
+
break
|
|
279
392
|
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
f.flush()
|
|
393
|
+
try:
|
|
394
|
+
self.download_segment(self.segments[index], index, progress_bar)
|
|
395
|
+
|
|
396
|
+
except Exception as e:
|
|
397
|
+
logging.error(f"Failed to retry segment {index}: {str(e)}")
|
|
398
|
+
|
|
399
|
+
finally:
|
|
400
|
+
self._cleanup_resources(writer_thread, progress_bar)
|
|
289
401
|
|
|
402
|
+
if not self.interrupt_flag.is_set():
|
|
403
|
+
self._verify_download_completion()
|
|
404
|
+
|
|
405
|
+
return self._generate_results(type)
|
|
406
|
+
|
|
290
407
|
def _get_bar_format(self, description: str) -> str:
|
|
291
408
|
"""
|
|
292
409
|
Generate platform-appropriate progress bar format.
|
|
@@ -310,9 +427,7 @@ class M3U8_Segments:
|
|
|
310
427
|
return base_workers
|
|
311
428
|
|
|
312
429
|
def _generate_results(self, stream_type: str) -> Dict:
|
|
313
|
-
"""
|
|
314
|
-
Package final download results.
|
|
315
|
-
"""
|
|
430
|
+
"""Package final download results."""
|
|
316
431
|
return {
|
|
317
432
|
'type': stream_type,
|
|
318
433
|
'nFailed': self.info_nFailed,
|
|
@@ -320,31 +435,30 @@ class M3U8_Segments:
|
|
|
320
435
|
}
|
|
321
436
|
|
|
322
437
|
def _verify_download_completion(self) -> None:
|
|
323
|
-
"""
|
|
324
|
-
Validate final download integrity.
|
|
325
|
-
"""
|
|
438
|
+
"""Validate final download integrity."""
|
|
326
439
|
total = len(self.segments)
|
|
327
440
|
if len(self.downloaded_segments) / total < 0.999:
|
|
328
441
|
missing = sorted(set(range(total)) - self.downloaded_segments)
|
|
329
442
|
raise RuntimeError(f"Download incomplete ({len(self.downloaded_segments)/total:.1%}). Missing segments: {missing}")
|
|
330
443
|
|
|
331
|
-
def _cleanup_resources(self, progress_bar: tqdm) -> None:
|
|
332
|
-
"""
|
|
333
|
-
|
|
334
|
-
|
|
444
|
+
def _cleanup_resources(self, writer_thread: threading.Thread, progress_bar: tqdm) -> None:
|
|
445
|
+
"""Ensure resource cleanup and final reporting."""
|
|
446
|
+
self.stop_event.set()
|
|
447
|
+
writer_thread.join(timeout=30)
|
|
335
448
|
progress_bar.close()
|
|
336
|
-
|
|
449
|
+
|
|
337
450
|
if self.info_nFailed > 0:
|
|
338
451
|
self._display_error_summary()
|
|
339
452
|
|
|
453
|
+
self.buffer = {}
|
|
454
|
+
self.expected_index = 0
|
|
455
|
+
|
|
340
456
|
def _display_error_summary(self) -> None:
|
|
341
|
-
"""
|
|
342
|
-
Generate final error report.
|
|
343
|
-
"""
|
|
457
|
+
"""Generate final error report."""
|
|
344
458
|
console.print(f"\n[cyan]Retry Summary: "
|
|
345
459
|
f"[white]Max retries: [green]{self.info_maxRetry} "
|
|
346
460
|
f"[white]Total retries: [green]{self.info_nRetry} "
|
|
347
461
|
f"[white]Failed segments: [red]{self.info_nFailed}")
|
|
348
462
|
|
|
349
463
|
if self.info_nRetry > len(self.segments) * 0.3:
|
|
350
|
-
console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
|
|
464
|
+
console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
|
|
@@ -74,7 +74,7 @@ StreamingCommunity/Lib/Downloader/DASH/downloader.py,sha256=nuYZLM_Np0TKZ7wc7hYL
|
|
|
74
74
|
StreamingCommunity/Lib/Downloader/DASH/parser.py,sha256=ZW6oErH9i8Nrp6yPc8usiRBv9ftkfVYh46wEg8MOl6o,9835
|
|
75
75
|
StreamingCommunity/Lib/Downloader/DASH/segments.py,sha256=ZV9FYibbS8E722MHVxl16N6rN1tPioZTNqO4jWBpGGo,12672
|
|
76
76
|
StreamingCommunity/Lib/Downloader/HLS/downloader.py,sha256=yzusDF32uSR_MZqdrNoJ27nR2VXRvflckTxiw04JoNk,21189
|
|
77
|
-
StreamingCommunity/Lib/Downloader/HLS/segments.py,sha256=
|
|
77
|
+
StreamingCommunity/Lib/Downloader/HLS/segments.py,sha256=8Ze3s2YNglBonXKngDIs89LH48b_IZy7vx4h138uydQ,17982
|
|
78
78
|
StreamingCommunity/Lib/Downloader/MP4/downloader.py,sha256=OBObY930wrOG0IUlDRROOfgU_u6uYHvckED4nnPjXzs,7367
|
|
79
79
|
StreamingCommunity/Lib/Downloader/TOR/downloader.py,sha256=CrRGdLGI_45AnhtTZm8r7KO7uGmU9k6pywy-qO18LG8,19242
|
|
80
80
|
StreamingCommunity/Lib/FFmpeg/__init__.py,sha256=6PBsZdE1jrD2EKOVyx3JEHnyDZzVeKlPkH5T0zyfOgU,130
|
|
@@ -93,7 +93,7 @@ StreamingCommunity/TelegramHelp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm
|
|
|
93
93
|
StreamingCommunity/TelegramHelp/config.json,sha256=4Tnram-K5wNK0QjWY9R4VpiHfGnMbahvDEdJ2VBWOoc,1460
|
|
94
94
|
StreamingCommunity/TelegramHelp/telegram_bot.py,sha256=zCqj7xBofh9FYfEYl55mgT945jqtKo7qJhn-SMLvAvA,26455
|
|
95
95
|
StreamingCommunity/Upload/update.py,sha256=ZGQHcnTLAfXla_PqvykDeg2-WKOYfX9zX2I3KrdKHpc,3814
|
|
96
|
-
StreamingCommunity/Upload/version.py,sha256=
|
|
96
|
+
StreamingCommunity/Upload/version.py,sha256=NaOdgJwSHkakkbsYisKWKKqEMAjCYxhIHttZMn77p_c,171
|
|
97
97
|
StreamingCommunity/Util/bento4_installer.py,sha256=P5ipziMCvezxan8GUh9vm8B1LXGyHusFVDf842LSwis,6966
|
|
98
98
|
StreamingCommunity/Util/color.py,sha256=NvD0Eni-25oOOkY-szCEoc0lGvzQxyL7xhM0RE4EvUM,458
|
|
99
99
|
StreamingCommunity/Util/config_json.py,sha256=4sn-vvrYybZMOlT-blx3bM9njxQRQS_HwSPfqQ0qZ94,24226
|
|
@@ -103,9 +103,9 @@ StreamingCommunity/Util/logger.py,sha256=9kGD6GmWj2pM8ADpJc85o7jm8DD0c5Aguqnq-9k
|
|
|
103
103
|
StreamingCommunity/Util/message.py,sha256=81vPmsGBusovIhheIO4Ec6p7BYvMj1wE_CthtRyp6OM,1333
|
|
104
104
|
StreamingCommunity/Util/os.py,sha256=vLNRGWsQYTSUGdpj19fR_n0i-6bhZYeJh5IZpahdyKM,16832
|
|
105
105
|
StreamingCommunity/Util/table.py,sha256=Nw5PlsvfEIOQZWy5VhsU5OK3heuBXGwsqmLl0k8yQzc,9813
|
|
106
|
-
streamingcommunity-3.2.
|
|
107
|
-
streamingcommunity-3.2.
|
|
108
|
-
streamingcommunity-3.2.
|
|
109
|
-
streamingcommunity-3.2.
|
|
110
|
-
streamingcommunity-3.2.
|
|
111
|
-
streamingcommunity-3.2.
|
|
106
|
+
streamingcommunity-3.2.8.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
|
107
|
+
streamingcommunity-3.2.8.dist-info/METADATA,sha256=9f38qTwHxcDyf-v7jtyZOL7orWpp5x9IZkRogepZAwU,25312
|
|
108
|
+
streamingcommunity-3.2.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
109
|
+
streamingcommunity-3.2.8.dist-info/entry_points.txt,sha256=Qph9XYfDC8n4LfDLOSl6gJGlkb9eFb5f-JOr_Wb_5rk,67
|
|
110
|
+
streamingcommunity-3.2.8.dist-info/top_level.txt,sha256=YsOcxKP-WOhWpIWgBlh0coll9XUx7aqmRPT7kmt3fH0,19
|
|
111
|
+
streamingcommunity-3.2.8.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|