StreamingCommunity 3.2.5__py3-none-any.whl → 3.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (36) hide show
  1. StreamingCommunity/Api/Site/altadefinizione/film.py +2 -2
  2. StreamingCommunity/Api/Site/altadefinizione/series.py +1 -1
  3. StreamingCommunity/Api/Site/animeunity/serie.py +1 -1
  4. StreamingCommunity/Api/Site/animeworld/film.py +1 -1
  5. StreamingCommunity/Api/Site/animeworld/serie.py +1 -2
  6. StreamingCommunity/Api/Site/cb01new/film.py +1 -1
  7. StreamingCommunity/Api/Site/crunchyroll/film.py +3 -4
  8. StreamingCommunity/Api/Site/crunchyroll/series.py +10 -6
  9. StreamingCommunity/Api/Site/crunchyroll/util/ScrapeSerie.py +20 -0
  10. StreamingCommunity/Api/Site/guardaserie/series.py +1 -2
  11. StreamingCommunity/Api/Site/mediasetinfinity/film.py +12 -3
  12. StreamingCommunity/Api/Site/mediasetinfinity/series.py +14 -6
  13. StreamingCommunity/Api/Site/mediasetinfinity/util/get_license.py +1 -4
  14. StreamingCommunity/Api/Site/raiplay/film.py +2 -2
  15. StreamingCommunity/Api/Site/raiplay/series.py +2 -1
  16. StreamingCommunity/Api/Site/streamingcommunity/film.py +1 -1
  17. StreamingCommunity/Api/Site/streamingcommunity/series.py +2 -2
  18. StreamingCommunity/Api/Site/streamingwatch/film.py +1 -1
  19. StreamingCommunity/Lib/Downloader/DASH/downloader.py +13 -15
  20. StreamingCommunity/Lib/Downloader/DASH/parser.py +1 -1
  21. StreamingCommunity/Lib/Downloader/HLS/downloader.py +9 -16
  22. StreamingCommunity/Lib/Downloader/HLS/segments.py +143 -260
  23. StreamingCommunity/TelegramHelp/config.json +0 -2
  24. StreamingCommunity/Upload/version.py +1 -1
  25. StreamingCommunity/Util/bento4_installer.py +191 -0
  26. StreamingCommunity/Util/config_json.py +1 -1
  27. StreamingCommunity/Util/headers.py +0 -3
  28. StreamingCommunity/Util/os.py +15 -46
  29. StreamingCommunity/__init__.py +2 -1
  30. StreamingCommunity/run.py +11 -10
  31. {streamingcommunity-3.2.5.dist-info → streamingcommunity-3.2.7.dist-info}/METADATA +4 -8
  32. {streamingcommunity-3.2.5.dist-info → streamingcommunity-3.2.7.dist-info}/RECORD +36 -35
  33. {streamingcommunity-3.2.5.dist-info → streamingcommunity-3.2.7.dist-info}/WHEEL +0 -0
  34. {streamingcommunity-3.2.5.dist-info → streamingcommunity-3.2.7.dist-info}/entry_points.txt +0 -0
  35. {streamingcommunity-3.2.5.dist-info → streamingcommunity-3.2.7.dist-info}/licenses/LICENSE +0 -0
  36. {streamingcommunity-3.2.5.dist-info → streamingcommunity-3.2.7.dist-info}/top_level.txt +0 -0
@@ -2,15 +2,10 @@
2
2
 
3
3
  import os
4
4
  import sys
5
- import time
6
- import queue
7
- import signal
5
+ import asyncio
8
6
  import logging
9
7
  import binascii
10
- import threading
11
- from queue import PriorityQueue
12
8
  from urllib.parse import urljoin, urlparse
13
- from concurrent.futures import ThreadPoolExecutor, as_completed
14
9
  from typing import Dict
15
10
 
16
11
 
@@ -42,8 +37,7 @@ DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_w
42
37
  DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workers')
43
38
  MAX_TIMEOOUT = config_manager.get_int("REQUESTS", "timeout")
44
39
  SEGMENT_MAX_TIMEOUT = config_manager.get_int("M3U8_DOWNLOAD", "segment_timeout")
45
- TELEGRAM_BOT = config_manager.get_bool('DEFAULT', 'telegram_bot')
46
- MAX_INTERRUPT_COUNT = 3
40
+
47
41
 
48
42
  # Variable
49
43
  console = Console()
@@ -62,38 +56,18 @@ class M3U8_Segments:
62
56
  self.url = url
63
57
  self.tmp_folder = tmp_folder
64
58
  self.is_index_url = is_index_url
65
- self.expected_real_time = None
66
59
  self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
67
60
  os.makedirs(self.tmp_folder, exist_ok=True)
68
61
 
69
62
  # Util class
70
63
  self.decryption: M3U8_Decryption = None
71
- self.class_ts_estimator = M3U8_Ts_Estimator(0, self)
72
64
  self.class_url_fixer = M3U8_UrlFix(url)
73
-
74
- # Sync
75
- self.queue = PriorityQueue()
76
- self.buffer = {}
77
- self.expected_index = 0
78
-
79
- self.stop_event = threading.Event()
65
+
66
+ # Download tracking
80
67
  self.downloaded_segments = set()
81
- self.base_timeout = 0.5
82
- self.current_timeout = 3.0
83
-
84
- # Stopping
85
- self.interrupt_flag = threading.Event()
86
68
  self.download_interrupted = False
87
- self.interrupt_count = 0
88
- self.force_stop = False
89
- self.interrupt_lock = threading.Lock()
90
-
91
- # OTHER INFO
92
- self.info_maxRetry = 0
93
- self.info_nRetry = 0
94
69
  self.info_nFailed = 0
95
- self.active_retries = 0
96
- self.active_retries_lock = threading.Lock()
70
+ self.info_nRetry = 0
97
71
 
98
72
  def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
99
73
  """
@@ -145,16 +119,10 @@ class M3U8_Segments:
145
119
  if "http" not in seg else seg
146
120
  for seg in m3u8_parser.segments
147
121
  ]
148
- self.class_ts_estimator.total_segments = len(self.segments)
149
122
 
150
123
  def get_info(self) -> None:
151
124
  """
152
125
  Retrieves M3U8 playlist information from the given URL.
153
-
154
- If the URL is an index URL, this method:
155
- - Sends an HTTP GET request to fetch the M3U8 playlist.
156
- - Parses the M3U8 content using `parse_data`.
157
- - Saves the playlist to a temporary folder.
158
126
  """
159
127
  if self.is_index_url:
160
128
  try:
@@ -169,241 +137,156 @@ class M3U8_Segments:
169
137
  except Exception as e:
170
138
  raise RuntimeError(f"M3U8 info retrieval failed: {e}")
171
139
 
172
- def setup_interrupt_handler(self):
140
+ def download_streams(self, description: str, type: str):
173
141
  """
174
- Set up a signal handler for graceful interruption.
142
+ Synchronous wrapper for async download.
175
143
  """
176
- def interrupt_handler(signum, frame):
177
- with self.interrupt_lock:
178
- self.interrupt_count += 1
179
- if self.interrupt_count >= MAX_INTERRUPT_COUNT:
180
- self.force_stop = True
181
-
182
- if self.force_stop:
183
- console.print("\n[red]Force stop triggered! Exiting immediately.")
184
-
185
- else:
186
- if not self.interrupt_flag.is_set():
187
- remaining = MAX_INTERRUPT_COUNT - self.interrupt_count
188
- console.print(f"\n[red]- Stopping gracefully... (Ctrl+C {remaining}x to force)")
189
- self.download_interrupted = True
190
-
191
- if remaining == 1:
192
- self.interrupt_flag.set()
144
+ try:
145
+ return asyncio.run(self.download_segments(description=description, type=type))
146
+
147
+ except KeyboardInterrupt:
148
+ self.download_interrupted = True
149
+ console.print("\n[red]Download interrupted by user (Ctrl+C).")
150
+ return self._generate_results(type)
193
151
 
194
-
195
- if threading.current_thread() is threading.main_thread():
196
- signal.signal(signal.SIGINT, interrupt_handler)
197
- else:
198
- print("Signal handler must be set in the main thread")
199
-
200
- def _get_http_client(self):
201
- client_params = {
202
- 'headers': {'User-Agent': get_userAgent()},
203
- 'timeout': SEGMENT_MAX_TIMEOUT,
204
- 'follow_redirects': True,
205
- 'http2': False,
206
- 'verify': REQUEST_VERIFY
207
- }
208
- return httpx.Client(**client_params)
209
-
210
- def download_segment(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.1) -> None:
152
+ async def download_segments(self, description: str, type: str, concurrent_downloads: int = 8):
211
153
  """
212
- Downloads a TS segment and adds it to the segment queue with retry logic.
213
-
214
- Parameters:
215
- - ts_url (str): The URL of the TS segment.
216
- - index (int): The index of the segment.
217
- - progress_bar (tqdm): Progress counter for tracking download progress.
218
- - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
154
+ Download segments asynchronously.
219
155
  """
220
- for attempt in range(REQUEST_MAX_RETRY):
221
- if self.interrupt_flag.is_set():
222
- return
223
-
224
- try:
225
- with self._get_http_client() as client:
226
- response = client.get(ts_url)
156
+ self.get_info()
227
157
 
228
- # Validate response and content
229
- response.raise_for_status()
230
- segment_content = response.content
231
- content_size = len(segment_content)
232
-
233
- # Decrypt if needed and verify decrypted content
234
- if self.decryption is not None:
235
- try:
236
- segment_content = self.decryption.decrypt(segment_content)
237
-
238
- except Exception as e:
239
- logging.error(f"Decryption failed for segment {index}: {str(e)}")
240
- self.interrupt_flag.set() # Interrupt the download process
241
- self.stop_event.set() # Trigger the stopping event for all threads
242
- break # Stop the current task immediately
243
-
244
- self.class_ts_estimator.update_progress_bar(content_size, progress_bar)
245
- self.queue.put((index, segment_content))
246
- self.downloaded_segments.add(index)
247
- progress_bar.update(1)
248
- return
158
+ progress_bar = tqdm(
159
+ total=len(self.segments),
160
+ unit='s',
161
+ ascii='░▒█',
162
+ bar_format=self._get_bar_format(description),
163
+ mininterval=0.6,
164
+ maxinterval=1.0,
165
+ file=sys.stdout
166
+ )
249
167
 
250
- except Exception as e:
251
- logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
252
-
253
- if attempt > self.info_maxRetry:
254
- self.info_maxRetry = ( attempt + 1 )
255
- self.info_nRetry += 1
256
-
257
- if attempt + 1 == REQUEST_MAX_RETRY:
258
- console.log(f"[red]Final retry failed for segment: {index}")
259
- self.queue.put((index, None)) # Marker for failed segment
260
- progress_bar.update(1)
261
- self.info_nFailed += 1
262
- return
263
-
264
- with self.active_retries_lock:
265
- self.active_retries += 1
266
-
267
- sleep_time = backoff_factor * (2 ** attempt)
268
- logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
269
- time.sleep(sleep_time)
270
-
271
- with self.active_retries_lock:
272
- self.active_retries -= 1
168
+ # Initialize estimator
169
+ estimator = M3U8_Ts_Estimator(total_segments=len(self.segments))
170
+ semaphore = asyncio.Semaphore(self._get_worker_count(type))
171
+
172
+ results = [None] * len(self.segments)
173
+
174
+ try:
175
+ async with httpx.AsyncClient(timeout=SEGMENT_MAX_TIMEOUT) as client:
273
176
 
274
- def write_segments_to_file(self):
275
- """
276
- Writes segments to file with additional verification.
277
- """
278
- with open(self.tmp_file_path, 'wb') as f:
279
- while not self.stop_event.is_set() or not self.queue.empty():
280
- if self.interrupt_flag.is_set():
281
- break
282
-
283
- try:
284
- index, segment_content = self.queue.get(timeout=self.current_timeout)
177
+ # Download all segments (first batch)
178
+ await self._download_segments_batch(
179
+ client, self.segments, results, semaphore,
180
+ REQUEST_MAX_RETRY, estimator, progress_bar
181
+ )
285
182
 
286
- # Successful queue retrieval: reduce timeout
287
- self.current_timeout = max(self.base_timeout, self.current_timeout / 2)
183
+ # Retry failed segments
184
+ await self._retry_failed_segments(
185
+ client, self.segments, results, semaphore,
186
+ REQUEST_MAX_RETRY, estimator, progress_bar
187
+ )
288
188
 
289
- # Handle failed segments
290
- if segment_content is None:
291
- if index == self.expected_index:
292
- self.expected_index += 1
293
- continue
189
+ # Write results
190
+ self._write_results_to_file(results)
294
191
 
295
- # Write segment if it's the next expected one
296
- if index == self.expected_index:
297
- f.write(segment_content)
298
- f.flush()
299
- self.expected_index += 1
192
+ except Exception as e:
193
+ logging.error(f"Download error: {e}")
194
+ raise
300
195
 
301
- # Write any buffered segments that are now in order
302
- while self.expected_index in self.buffer:
303
- next_segment = self.buffer.pop(self.expected_index)
196
+ finally:
197
+ self._cleanup_resources(progress_bar)
304
198
 
305
- if next_segment is not None:
306
- f.write(next_segment)
307
- f.flush()
199
+ if not self.download_interrupted:
200
+ self._verify_download_completion()
308
201
 
309
- self.expected_index += 1
310
-
311
- else:
312
- self.buffer[index] = segment_content
202
+ return self._generate_results(type)
313
203
 
314
- except queue.Empty:
315
- self.current_timeout = min(MAX_TIMEOOUT, self.current_timeout * 1.1)
316
- time.sleep(0.05)
204
+ async def _download_segments_batch(self, client, segment_urls, results, semaphore, max_retry, estimator, progress_bar):
205
+ """
206
+ Download a batch of segments with retry logic.
207
+ """
208
+ async def download_single(url, idx):
209
+ async with semaphore:
210
+ for attempt in range(max_retry):
211
+ try:
212
+ resp = await client.get(url, headers={'User-Agent': get_userAgent()})
317
213
 
318
- if self.stop_event.is_set():
319
- break
214
+ if resp.status_code == 200:
215
+ content = resp.content
320
216
 
321
- except Exception as e:
322
- logging.error(f"Error writing segment {index}: {str(e)}")
323
-
324
- def download_streams(self, description: str, type: str):
325
- """
326
- Downloads all TS segments in parallel and writes them to a file.
217
+ if self.decryption:
218
+ content = self.decryption.decrypt(content)
219
+ return idx, content, attempt
220
+
221
+ await asyncio.sleep(1.1 * (2 ** attempt))
222
+ logging.info(f"Segment {idx} failed with status {resp.status_code}. Retrying...")
223
+
224
+ except Exception:
225
+ await asyncio.sleep(1.1 * (2 ** attempt))
226
+ logging.info(f"Segment {idx} download failed: {sys.exc_info()[1]}. Retrying...")
327
227
 
328
- Parameters:
329
- - description: Description to insert on tqdm bar
330
- - type (str): Type of download: 'video' or 'audio'
331
- """
332
- if TELEGRAM_BOT:
228
+ return idx, b'', max_retry
333
229
 
334
- # Viene usato per lo screen
335
- console.log("####")
336
-
337
- self.get_info()
338
- self.setup_interrupt_handler()
230
+ tasks = [download_single(url, i) for i, url in enumerate(segment_urls)]
231
+
232
+ for coro in asyncio.as_completed(tasks):
233
+ try:
234
+ idx, data, nretry = await coro
235
+ results[idx] = data
339
236
 
340
- progress_bar = tqdm(
341
- total=len(self.segments),
342
- unit='s',
343
- ascii='░▒█',
344
- bar_format=self._get_bar_format(description),
345
- mininterval=0.6,
346
- maxinterval=1.0,
347
- file=sys.stdout, # Using file=sys.stdout to force in-place updates because sys.stderr may not support carriage returns in this environment.
348
- )
237
+ if data:
238
+ self.downloaded_segments.add(idx)
239
+ estimator.add_ts_file(len(data))
240
+ estimator.update_progress_bar(len(data), progress_bar)
349
241
 
350
- try:
351
- writer_thread = threading.Thread(target=self.write_segments_to_file)
352
- writer_thread.daemon = True
353
- writer_thread.start()
242
+ else:
243
+ self.info_nFailed += 1
354
244
 
355
- # Configure workers and delay
356
- max_workers = self._get_worker_count(type)
357
-
358
- # Download segments with completion verification
359
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
360
- futures = []
361
- for index, segment_url in enumerate(self.segments):
362
-
363
- # Check for interrupt before submitting each task
364
- if self.interrupt_flag.is_set():
365
- break
366
-
367
- time.sleep(TQDM_DELAY_WORKER)
368
- futures.append(executor.submit(self.download_segment, segment_url, index, progress_bar))
369
-
370
- # Wait for futures with interrupt handling
371
- for future in as_completed(futures):
372
- if self.interrupt_flag.is_set():
373
- break
374
- try:
375
- future.result()
376
- except Exception as e:
377
- logging.error(f"Error in download thread: {str(e)}")
378
-
379
- # Interrupt handling for missing segments
380
- if not self.interrupt_flag.is_set():
381
- total_segments = len(self.segments)
382
- completed_segments = len(self.downloaded_segments)
383
-
384
- if completed_segments < total_segments:
385
- missing_segments = set(range(total_segments)) - self.downloaded_segments
386
- logging.warning(f"Missing segments: {sorted(missing_segments)}")
387
-
388
- # Retry missing segments with interrupt check
389
- for index in missing_segments:
390
- if self.interrupt_flag.is_set():
391
- break
245
+ self.info_nRetry += nretry
246
+ progress_bar.update(1)
392
247
 
393
- try:
394
- self.download_segment(self.segments[index], index, progress_bar)
395
-
396
- except Exception as e:
397
- logging.error(f"Failed to retry segment {index}: {str(e)}")
248
+ except KeyboardInterrupt:
249
+ self.download_interrupted = True
250
+ break
398
251
 
399
- finally:
400
- self._cleanup_resources(writer_thread, progress_bar)
252
+ async def _retry_failed_segments(self, client, segment_urls, results, semaphore, max_retry, estimator, progress_bar):
253
+ """
254
+ Retry failed segments with exponential backoff.
255
+ """
256
+ max_global_retries = 5
257
+ global_retry_count = 0
401
258
 
402
- if not self.interrupt_flag.is_set():
403
- self._verify_download_completion()
259
+ while (self.info_nFailed > 0 and
260
+ global_retry_count < max_global_retries and
261
+ not self.download_interrupted):
262
+
263
+ failed_indices = [i for i, data in enumerate(results) if not data]
264
+ if not failed_indices:
265
+ break
266
+
267
+ logging.info(f"[yellow]Retrying {len(failed_indices)} failed segments...")
268
+
269
+ retry_tasks = [
270
+ self._download_segments_batch(
271
+ client, [segment_urls[i]], [results[i]],
272
+ semaphore, max_retry, estimator, progress_bar
273
+ )
274
+ for i in failed_indices
275
+ ]
276
+
277
+ await asyncio.gather(*retry_tasks)
278
+ global_retry_count += 1
279
+
280
+ def _write_results_to_file(self, results):
281
+ """
282
+ Write downloaded segments to file.
283
+ """
284
+ with open(self.tmp_file_path, 'wb') as f:
285
+ for data in results:
286
+ if data:
287
+ f.write(data)
288
+ f.flush()
404
289
 
405
- return self._generate_results(type)
406
-
407
290
  def _get_bar_format(self, description: str) -> str:
408
291
  """
409
292
  Generate platform-appropriate progress bar format.
@@ -427,7 +310,9 @@ class M3U8_Segments:
427
310
  return base_workers
428
311
 
429
312
  def _generate_results(self, stream_type: str) -> Dict:
430
- """Package final download results."""
313
+ """
314
+ Package final download results.
315
+ """
431
316
  return {
432
317
  'type': stream_type,
433
318
  'nFailed': self.info_nFailed,
@@ -435,33 +320,31 @@ class M3U8_Segments:
435
320
  }
436
321
 
437
322
  def _verify_download_completion(self) -> None:
438
- """Validate final download integrity."""
323
+ """
324
+ Validate final download integrity.
325
+ """
439
326
  total = len(self.segments)
440
327
  if len(self.downloaded_segments) / total < 0.999:
441
328
  missing = sorted(set(range(total)) - self.downloaded_segments)
442
329
  raise RuntimeError(f"Download incomplete ({len(self.downloaded_segments)/total:.1%}). Missing segments: {missing}")
443
330
 
444
- def _cleanup_resources(self, writer_thread: threading.Thread, progress_bar: tqdm) -> None:
445
- """Ensure resource cleanup and final reporting."""
446
- self.stop_event.set()
447
- writer_thread.join(timeout=30)
331
+ def _cleanup_resources(self, progress_bar: tqdm) -> None:
332
+ """
333
+ Ensure resource cleanup and final reporting.
334
+ """
448
335
  progress_bar.close()
449
-
450
- #if self.download_interrupted:
451
- # console.print("\n[red]Download terminated by user")
452
336
 
453
337
  if self.info_nFailed > 0:
454
338
  self._display_error_summary()
455
339
 
456
- self.buffer = {}
457
- self.expected_index = 0
458
-
459
340
  def _display_error_summary(self) -> None:
460
- """Generate final error report."""
341
+ """
342
+ Generate final error report.
343
+ """
461
344
  console.print(f"\n[cyan]Retry Summary: "
462
345
  f"[white]Max retries: [green]{self.info_maxRetry} "
463
346
  f"[white]Total retries: [green]{self.info_nRetry} "
464
347
  f"[white]Failed segments: [red]{self.info_nFailed}")
465
348
 
466
349
  if self.info_nRetry > len(self.segments) * 0.3:
467
- console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
350
+ console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
@@ -28,8 +28,6 @@
28
28
  "default_video_workers": 12,
29
29
  "default_audio_workers": 12,
30
30
  "segment_timeout": 8,
31
- "download_audio": true,
32
- "merge_audio": true,
33
31
  "specific_list_audio": [
34
32
  "ita"
35
33
  ],
@@ -1,5 +1,5 @@
1
1
  __title__ = 'StreamingCommunity'
2
- __version__ = '3.2.5'
2
+ __version__ = '3.2.7'
3
3
  __author__ = 'Arrowar'
4
4
  __description__ = 'A command-line program to download film'
5
5
  __copyright__ = 'Copyright 2025'