StreamingCommunity 3.3.5__py3-none-any.whl → 3.3.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (44) hide show
  1. StreamingCommunity/Api/Site/altadefinizione/__init__.py +17 -18
  2. StreamingCommunity/Api/Site/altadefinizione/series.py +4 -0
  3. StreamingCommunity/Api/Site/animeunity/__init__.py +14 -15
  4. StreamingCommunity/Api/Site/animeunity/serie.py +1 -1
  5. StreamingCommunity/Api/Site/animeworld/__init__.py +15 -13
  6. StreamingCommunity/Api/Site/animeworld/serie.py +1 -1
  7. StreamingCommunity/Api/Site/crunchyroll/__init__.py +16 -17
  8. StreamingCommunity/Api/Site/crunchyroll/series.py +6 -1
  9. StreamingCommunity/Api/Site/guardaserie/__init__.py +17 -19
  10. StreamingCommunity/Api/Site/guardaserie/series.py +4 -0
  11. StreamingCommunity/Api/Site/guardaserie/site.py +2 -7
  12. StreamingCommunity/Api/Site/mediasetinfinity/__init__.py +15 -15
  13. StreamingCommunity/Api/Site/mediasetinfinity/series.py +4 -0
  14. StreamingCommunity/Api/Site/mediasetinfinity/site.py +12 -2
  15. StreamingCommunity/Api/Site/mediasetinfinity/util/ScrapeSerie.py +67 -98
  16. StreamingCommunity/Api/Site/raiplay/__init__.py +15 -15
  17. StreamingCommunity/Api/Site/raiplay/series.py +5 -1
  18. StreamingCommunity/Api/Site/streamingcommunity/__init__.py +16 -14
  19. StreamingCommunity/Api/Site/streamingwatch/__init__.py +12 -12
  20. StreamingCommunity/Api/Site/streamingwatch/series.py +4 -0
  21. StreamingCommunity/Api/Template/Class/SearchType.py +0 -1
  22. StreamingCommunity/Api/Template/Util/manage_ep.py +1 -11
  23. StreamingCommunity/Api/Template/site.py +2 -3
  24. StreamingCommunity/Lib/Downloader/DASH/downloader.py +55 -17
  25. StreamingCommunity/Lib/Downloader/DASH/segments.py +73 -17
  26. StreamingCommunity/Lib/Downloader/HLS/downloader.py +282 -152
  27. StreamingCommunity/Lib/Downloader/HLS/segments.py +1 -5
  28. StreamingCommunity/Lib/FFmpeg/capture.py +1 -1
  29. StreamingCommunity/Lib/FFmpeg/command.py +6 -6
  30. StreamingCommunity/Lib/FFmpeg/util.py +11 -30
  31. StreamingCommunity/Lib/M3U8/estimator.py +27 -13
  32. StreamingCommunity/Upload/update.py +2 -2
  33. StreamingCommunity/Upload/version.py +1 -1
  34. StreamingCommunity/Util/installer/__init__.py +11 -0
  35. StreamingCommunity/Util/installer/device_install.py +1 -1
  36. StreamingCommunity/Util/os.py +2 -6
  37. StreamingCommunity/Util/table.py +40 -8
  38. StreamingCommunity/run.py +15 -8
  39. {streamingcommunity-3.3.5.dist-info → streamingcommunity-3.3.6.dist-info}/METADATA +38 -51
  40. {streamingcommunity-3.3.5.dist-info → streamingcommunity-3.3.6.dist-info}/RECORD +44 -43
  41. {streamingcommunity-3.3.5.dist-info → streamingcommunity-3.3.6.dist-info}/WHEEL +0 -0
  42. {streamingcommunity-3.3.5.dist-info → streamingcommunity-3.3.6.dist-info}/entry_points.txt +0 -0
  43. {streamingcommunity-3.3.5.dist-info → streamingcommunity-3.3.6.dist-info}/licenses/LICENSE +0 -0
  44. {streamingcommunity-3.3.5.dist-info → streamingcommunity-3.3.6.dist-info}/top_level.txt +0 -0
@@ -2,6 +2,7 @@
2
2
 
3
3
  import os
4
4
  import asyncio
5
+ import time
5
6
 
6
7
 
7
8
  # External libraries
@@ -33,6 +34,15 @@ class MPD_Segments:
33
34
  self.pssh = pssh
34
35
  self.download_interrupted = False
35
36
  self.info_nFailed = 0
37
+
38
+ # OTHER INFO
39
+ self.downloaded_segments = set()
40
+ self.info_maxRetry = 0
41
+ self.info_nRetry = 0
42
+
43
+ # Progress
44
+ self._last_progress_update = 0
45
+ self._progress_update_interval = 0.5
36
46
 
37
47
  def get_concat_path(self, output_dir: str = None):
38
48
  """
@@ -61,7 +71,7 @@ class MPD_Segments:
61
71
  "pssh": self.pssh
62
72
  }
63
73
 
64
- async def download_segments(self, output_dir: str = None, concurrent_downloads: int = 8, description: str = "DASH"):
74
+ async def download_segments(self, output_dir: str = None, concurrent_downloads: int = None, description: str = "DASH"):
65
75
  """
66
76
  Download and concatenate all segments (including init) asynchronously and in order.
67
77
  """
@@ -75,12 +85,15 @@ class MPD_Segments:
75
85
 
76
86
  # Determine stream type (video/audio) for progress bar
77
87
  stream_type = rep.get('type', description)
88
+ if concurrent_downloads is None:
89
+ concurrent_downloads = self._get_worker_count(stream_type)
90
+
78
91
  progress_bar = tqdm(
79
92
  total=len(segment_urls) + 1,
80
93
  desc=f"Downloading {rep_id}",
81
94
  bar_format=self._get_bar_format(stream_type),
82
- mininterval=0.6,
83
- maxinterval=1.0
95
+ mininterval=1.0,
96
+ maxinterval=2.5,
84
97
  )
85
98
 
86
99
  # Define semaphore for concurrent downloads
@@ -94,9 +107,14 @@ class MPD_Segments:
94
107
  self.info_nFailed = 0
95
108
  self.download_interrupted = False
96
109
  self.info_nRetry = 0
110
+ self.info_maxRetry = 0
97
111
 
98
112
  try:
99
- async with httpx.AsyncClient(timeout=SEGMENT_MAX_TIMEOUT) as client:
113
+ timeout_config = httpx.Timeout(SEGMENT_MAX_TIMEOUT, connect=10.0)
114
+ limits = httpx.Limits(max_keepalive_connections=20, max_connections=100)
115
+
116
+ async with httpx.AsyncClient(timeout=timeout_config, limits=limits) as client:
117
+
100
118
  # Download init segment
101
119
  await self._download_init_segment(client, init_url, concat_path, estimator, progress_bar)
102
120
 
@@ -145,12 +163,21 @@ class MPD_Segments:
145
163
  progress_bar.update(1)
146
164
 
147
165
  # Update progress bar with estimated info
148
- estimator.update_progress_bar(len(response.content), progress_bar)
166
+ self._throttled_progress_update(len(response.content), estimator, progress_bar)
149
167
 
150
168
  except Exception as e:
151
169
  progress_bar.close()
152
170
  raise RuntimeError(f"Error downloading init segment: {e}")
153
171
 
172
+ def _throttled_progress_update(self, content_size: int, estimator, progress_bar):
173
+ """
174
+ Throttled progress update to reduce CPU usage.
175
+ """
176
+ current_time = time.time()
177
+ if current_time - self._last_progress_update > self._progress_update_interval:
178
+ estimator.update_progress_bar(content_size, progress_bar)
179
+ self._last_progress_update = current_time
180
+
154
181
  async def _download_segments_batch(self, client, segment_urls, results, semaphore, max_retry, estimator, progress_bar):
155
182
  """
156
183
  Download a batch of segments and update results.
@@ -158,16 +185,28 @@ class MPD_Segments:
158
185
  async def download_single(url, idx):
159
186
  async with semaphore:
160
187
  headers = {'User-Agent': get_userAgent()}
188
+
161
189
  for attempt in range(max_retry):
190
+ if self.download_interrupted:
191
+ return idx, b'', attempt
192
+
162
193
  try:
163
- resp = await client.get(url, headers=headers, follow_redirects=True)
194
+ timeout = min(SEGMENT_MAX_TIMEOUT, 10 + attempt * 3)
195
+ resp = await client.get(url, headers=headers, follow_redirects=True, timeout=timeout)
164
196
 
165
197
  if resp.status_code == 200:
166
198
  return idx, resp.content, attempt
167
199
  else:
168
- await asyncio.sleep(1.1 * (2 ** attempt))
200
+ if attempt < 2:
201
+ sleep_time = 0.5 + attempt * 0.5
202
+ else:
203
+ sleep_time = min(2.0, 1.1 * (2 ** attempt))
204
+ await asyncio.sleep(sleep_time)
205
+
169
206
  except Exception:
170
- await asyncio.sleep(1.1 * (2 ** attempt))
207
+ sleep_time = min(2.0, 1.1 * (2 ** attempt))
208
+ await asyncio.sleep(sleep_time)
209
+
171
210
  return idx, b'', max_retry
172
211
 
173
212
  # Initial download attempt
@@ -177,18 +216,23 @@ class MPD_Segments:
177
216
  try:
178
217
  idx, data, nretry = await coro
179
218
  results[idx] = data
219
+
180
220
  if data and len(data) > 0:
181
221
  self.downloaded_segments.add(idx)
182
222
  else:
183
223
  self.info_nFailed += 1
224
+
225
+ if nretry > self.info_maxRetry:
226
+ self.info_maxRetry = nretry
184
227
  self.info_nRetry += nretry
228
+
185
229
  progress_bar.update(1)
186
230
 
187
231
  # Update estimator with segment size
188
232
  estimator.add_ts_file(len(data))
189
233
 
190
234
  # Update progress bar with estimated info
191
- estimator.update_progress_bar(len(data), progress_bar)
235
+ self._throttled_progress_update(len(data), estimator, progress_bar)
192
236
 
193
237
  except KeyboardInterrupt:
194
238
  self.download_interrupted = True
@@ -197,9 +241,9 @@ class MPD_Segments:
197
241
 
198
242
  async def _retry_failed_segments(self, client, segment_urls, results, semaphore, max_retry, estimator, progress_bar):
199
243
  """
200
- Retry failed segments up to 5 times.
244
+ Retry failed segments up to 3 times.
201
245
  """
202
- max_global_retries = 5
246
+ max_global_retries = 3
203
247
  global_retry_count = 0
204
248
 
205
249
  while self.info_nFailed > 0 and global_retry_count < max_global_retries and not self.download_interrupted:
@@ -208,21 +252,27 @@ class MPD_Segments:
208
252
  break
209
253
 
210
254
  print(f"[yellow]Retrying {len(failed_indices)} failed segments (attempt {global_retry_count+1}/{max_global_retries})...")
255
+
211
256
  async def download_single(url, idx):
212
257
  async with semaphore:
213
258
  headers = {'User-Agent': get_userAgent()}
214
259
 
215
260
  for attempt in range(max_retry):
261
+ if self.download_interrupted:
262
+ return idx, b'', attempt
263
+
216
264
  try:
217
- resp = await client.get(url, headers=headers)
265
+ timeout = min(SEGMENT_MAX_TIMEOUT, 15 + attempt * 5)
266
+ resp = await client.get(url, headers=headers, timeout=timeout)
218
267
 
219
268
  if resp.status_code == 200:
220
269
  return idx, resp.content, attempt
221
270
  else:
222
- await asyncio.sleep(1.1 * (2 ** attempt))
271
+ await asyncio.sleep(1.5 * (2 ** attempt))
223
272
 
224
273
  except Exception:
225
- await asyncio.sleep(1.1 * (2 ** attempt))
274
+ await asyncio.sleep(1.5 * (2 ** attempt))
275
+
226
276
  return idx, b'', max_retry
227
277
 
228
278
  retry_tasks = [download_single(segment_urls[i], i) for i in failed_indices]
@@ -239,15 +289,19 @@ class MPD_Segments:
239
289
  else:
240
290
  nFailed_this_round += 1
241
291
 
292
+ if nretry > self.info_maxRetry:
293
+ self.info_maxRetry = nretry
242
294
  self.info_nRetry += nretry
295
+
243
296
  progress_bar.update(0) # No progress bar increment, already counted
244
297
  estimator.add_ts_file(len(data))
245
- estimator.update_progress_bar(len(data), progress_bar)
298
+ self._throttled_progress_update(len(data), estimator, progress_bar)
246
299
 
247
300
  except KeyboardInterrupt:
248
301
  self.download_interrupted = True
249
302
  print("\n[red]Download interrupted by user (Ctrl+C).")
250
303
  break
304
+
251
305
  self.info_nFailed = nFailed_this_round
252
306
  global_retry_count += 1
253
307
 
@@ -278,7 +332,7 @@ class MPD_Segments:
278
332
  base_workers = {
279
333
  'video': DEFAULT_VIDEO_WORKERS,
280
334
  'audio': DEFAULT_AUDIO_WORKERS
281
- }.get(stream_type.lower(), 1)
335
+ }.get(stream_type.lower(), 2)
282
336
  return base_workers
283
337
 
284
338
  def _generate_results(self, stream_type: str) -> dict:
@@ -317,6 +371,7 @@ class MPD_Segments:
317
371
  if getattr(self, 'info_nFailed', 0) > 0:
318
372
  self._display_error_summary()
319
373
 
374
+ # Clear memory
320
375
  self.buffer = {}
321
376
  self.expected_index = 0
322
377
 
@@ -324,10 +379,11 @@ class MPD_Segments:
324
379
  """
325
380
  Generate final error report.
326
381
  """
382
+ total_segments = len(self.selected_representation.get('segment_urls', []))
327
383
  print(f"\n[cyan]Retry Summary: "
328
384
  f"[white]Max retries: [green]{getattr(self, 'info_maxRetry', 0)} "
329
385
  f"[white]Total retries: [green]{getattr(self, 'info_nRetry', 0)} "
330
386
  f"[white]Failed segments: [red]{getattr(self, 'info_nFailed', 0)}")
331
387
 
332
- if getattr(self, 'info_nRetry', 0) > len(self.selected_representation['segment_urls']) * 0.3:
388
+ if getattr(self, 'info_nRetry', 0) > total_segments * 0.3:
333
389
  print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")