StreamingCommunity 3.3.1__py3-none-any.whl → 3.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (30) hide show
  1. StreamingCommunity/Api/Site/mediasetinfinity/util/get_license.py +28 -1
  2. StreamingCommunity/Api/Site/raiplay/site.py +6 -4
  3. StreamingCommunity/Api/Site/raiplay/util/ScrapeSerie.py +6 -2
  4. StreamingCommunity/Api/Site/streamingcommunity/site.py +0 -3
  5. StreamingCommunity/Api/Site/streamingwatch/site.py +0 -3
  6. StreamingCommunity/Lib/Downloader/DASH/cdm_helpher.py +1 -18
  7. StreamingCommunity/Lib/Downloader/DASH/downloader.py +18 -14
  8. StreamingCommunity/Lib/Downloader/HLS/downloader.py +22 -10
  9. StreamingCommunity/Lib/Downloader/HLS/segments.py +126 -72
  10. StreamingCommunity/Lib/M3U8/decryptor.py +0 -14
  11. StreamingCommunity/Lib/M3U8/estimator.py +44 -34
  12. StreamingCommunity/Lib/TMBD/tmdb.py +0 -12
  13. StreamingCommunity/Upload/update.py +1 -1
  14. StreamingCommunity/Upload/version.py +1 -1
  15. StreamingCommunity/Util/{bento4_installer.py → installer/bento4_install.py} +56 -44
  16. StreamingCommunity/Util/installer/binary_paths.py +83 -0
  17. StreamingCommunity/Util/installer/device_install.py +133 -0
  18. StreamingCommunity/Util/{ffmpeg_installer.py → installer/ffmpeg_install.py} +100 -138
  19. StreamingCommunity/Util/logger.py +3 -8
  20. StreamingCommunity/Util/os.py +34 -150
  21. StreamingCommunity/run.py +2 -3
  22. {streamingcommunity-3.3.1.dist-info → streamingcommunity-3.3.3.dist-info}/METADATA +295 -532
  23. {streamingcommunity-3.3.1.dist-info → streamingcommunity-3.3.3.dist-info}/RECORD +27 -28
  24. StreamingCommunity/Api/Player/ddl.py +0 -82
  25. StreamingCommunity/Api/Player/maxstream.py +0 -141
  26. StreamingCommunity/Api/Player/mixdrop.py +0 -146
  27. {streamingcommunity-3.3.1.dist-info → streamingcommunity-3.3.3.dist-info}/WHEEL +0 -0
  28. {streamingcommunity-3.3.1.dist-info → streamingcommunity-3.3.3.dist-info}/entry_points.txt +0 -0
  29. {streamingcommunity-3.3.1.dist-info → streamingcommunity-3.3.3.dist-info}/licenses/LICENSE +0 -0
  30. {streamingcommunity-3.3.1.dist-info → streamingcommunity-3.3.3.dist-info}/top_level.txt +0 -0
@@ -73,13 +73,15 @@ class M3U8_Segments:
73
73
  self.class_url_fixer = M3U8_UrlFix(url)
74
74
 
75
75
  # Sync
76
- self.queue = PriorityQueue()
76
+ self.queue = PriorityQueue(maxsize=20)
77
77
  self.buffer = {}
78
78
  self.expected_index = 0
79
+ self.write_buffer = bytearray()
80
+ self.write_batch_size = 50
79
81
 
80
82
  self.stop_event = threading.Event()
81
83
  self.downloaded_segments = set()
82
- self.base_timeout = 0.5
84
+ self.base_timeout = 1.0
83
85
  self.current_timeout = 3.0
84
86
 
85
87
  # Stopping
@@ -89,6 +91,10 @@ class M3U8_Segments:
89
91
  self.force_stop = False
90
92
  self.interrupt_lock = threading.Lock()
91
93
 
94
+ # HTTP Client
95
+ self._client = None
96
+ self._client_lock = threading.Lock()
97
+
92
98
  # OTHER INFO
93
99
  self.info_maxRetry = 0
94
100
  self.info_nRetry = 0
@@ -96,6 +102,9 @@ class M3U8_Segments:
96
102
  self.active_retries = 0
97
103
  self.active_retries_lock = threading.Lock()
98
104
 
105
+ self._last_progress_update = 0
106
+ self._progress_update_interval = 0.5
107
+
99
108
  def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
100
109
  """
101
110
  Fetches the encryption key from the M3U8 playlist.
@@ -151,11 +160,6 @@ class M3U8_Segments:
151
160
  def get_info(self) -> None:
152
161
  """
153
162
  Retrieves M3U8 playlist information from the given URL.
154
-
155
- If the URL is an index URL, this method:
156
- - Sends an HTTP GET request to fetch the M3U8 playlist.
157
- - Parses the M3U8 content using `parse_data`.
158
- - Saves the playlist to a temporary folder.
159
163
  """
160
164
  if self.is_index_url:
161
165
  try:
@@ -182,6 +186,7 @@ class M3U8_Segments:
182
186
 
183
187
  if self.force_stop:
184
188
  console.print("\n[red]Force stop triggered! Exiting immediately.")
189
+ self._cleanup_client()
185
190
 
186
191
  else:
187
192
  if not self.interrupt_flag.is_set():
@@ -191,7 +196,6 @@ class M3U8_Segments:
191
196
 
192
197
  if remaining == 1:
193
198
  self.interrupt_flag.set()
194
-
195
199
 
196
200
  if threading.current_thread() is threading.main_thread():
197
201
  signal.signal(signal.SIGINT, interrupt_handler)
@@ -199,72 +203,119 @@ class M3U8_Segments:
199
203
  print("Signal handler must be set in the main thread")
200
204
 
201
205
  def _get_http_client(self):
202
- return create_client(headers={'User-Agent': get_userAgent()}, follow_redirects=True)
206
+ """
207
+ Get a reusable HTTP client using the centralized factory.
208
+ Uses optimized settings for segment downloading.
209
+ """
210
+ if self._client is None:
211
+ with self._client_lock:
212
+ self._client = create_client(
213
+ timeout=SEGMENT_MAX_TIMEOUT
214
+ )
215
+
216
+ return self._client
217
+
218
+ def _cleanup_client(self):
219
+ """Pulizia client"""
220
+ if self._client:
221
+ try:
222
+ self._client.close()
223
+ except Exception:
224
+ pass
225
+ self._client = None
203
226
 
204
- def download_segment(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.1) -> None:
227
+ def download_segment(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.02) -> None:
205
228
  """
206
- Downloads a TS segment and adds it to the segment queue with retry logic.
229
+ Downloads a TS segment
207
230
 
208
231
  Parameters:
209
232
  - ts_url (str): The URL of the TS segment.
210
233
  - index (int): The index of the segment.
211
234
  - progress_bar (tqdm): Progress counter for tracking download progress.
212
- - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
235
+ - backoff_factor (float): Backoff factor.
213
236
  """
214
237
  for attempt in range(REQUEST_MAX_RETRY):
215
238
  if self.interrupt_flag.is_set():
216
239
  return
217
240
 
218
241
  try:
219
- with self._get_http_client() as client:
220
- response = client.get(ts_url)
221
-
222
- # Validate response and content
223
- response.raise_for_status()
224
- segment_content = response.content
225
- content_size = len(segment_content)
226
-
227
- # Decrypt if needed and verify decrypted content
228
- if self.decryption is not None:
229
- try:
230
- segment_content = self.decryption.decrypt(segment_content)
231
-
232
- except Exception as e:
233
- logging.error(f"Decryption failed for segment {index}: {str(e)}")
234
- self.interrupt_flag.set() # Interrupt the download process
235
- self.stop_event.set() # Trigger the stopping event for all threads
236
- break # Stop the current task immediately
242
+ client = self._get_http_client()
243
+ timeout = min(SEGMENT_MAX_TIMEOUT, 10 + attempt * 5)
244
+
245
+ # Make request
246
+ response = client.get(ts_url, timeout=timeout, headers={"User-Agent": get_userAgent()})
247
+ response.raise_for_status()
248
+ segment_content = response.content
249
+ content_size = len(segment_content)
250
+
251
+ # Decrypt if needed
252
+ if self.decryption is not None:
253
+ try:
254
+ segment_content = self.decryption.decrypt(segment_content)
255
+ except Exception as e:
256
+ logging.error(f"Decryption failed for segment {index}: {str(e)}")
257
+
258
+ if attempt + 1 == REQUEST_MAX_RETRY:
259
+ self.interrupt_flag.set()
260
+ self.stop_event.set()
261
+
262
+ raise e
237
263
 
264
+ current_time = time.time()
265
+ if current_time - self._last_progress_update > self._progress_update_interval:
238
266
  self.class_ts_estimator.update_progress_bar(content_size, progress_bar)
239
- self.queue.put((index, segment_content))
267
+ self._last_progress_update = current_time
268
+
269
+ try:
270
+ self.queue.put((index, segment_content), timeout=0.05)
240
271
  self.downloaded_segments.add(index)
241
272
  progress_bar.update(1)
242
273
  return
274
+
275
+ except queue.Full:
276
+ time.sleep(0.02)
277
+
278
+ try:
279
+ self.queue.put((index, segment_content), timeout=0.1)
280
+ self.downloaded_segments.add(index)
281
+ progress_bar.update(1)
282
+ return
283
+
284
+ except queue.Full:
285
+ self.queue.put((index, segment_content))
286
+ self.downloaded_segments.add(index)
287
+ progress_bar.update(1)
288
+ return
243
289
 
244
290
  except Exception as e:
245
- logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
291
+ error_msg = str(e)
292
+
293
+ if attempt == 0:
294
+ logging.warning(f"Segment {index} failed on first attempt: {error_msg}")
246
295
 
247
296
  if attempt > self.info_maxRetry:
248
- self.info_maxRetry = ( attempt + 1 )
297
+ self.info_maxRetry = attempt + 1
249
298
  self.info_nRetry += 1
250
299
 
251
300
  if attempt + 1 == REQUEST_MAX_RETRY:
252
301
  console.print(f"[red]Final retry failed for segment: {index}")
253
- self.queue.put((index, None)) # Marker for failed segment
302
+
303
+ try:
304
+ self.queue.put((index, None), timeout=0.1)
305
+ except queue.Full:
306
+ time.sleep(0.02)
307
+ self.queue.put((index, None))
308
+
254
309
  progress_bar.update(1)
255
310
  self.info_nFailed += 1
256
311
  return
257
312
 
258
- with self.active_retries_lock:
259
- self.active_retries += 1
313
+ if attempt < 2:
314
+ sleep_time = 0.5 + attempt * 0.5
315
+ else:
316
+ sleep_time = min(3.0, backoff_factor ** attempt)
260
317
 
261
- #sleep_time = backoff_factor * (2 ** attempt)
262
- sleep_time = backoff_factor * (attempt + 1)
263
- logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
264
318
  time.sleep(sleep_time)
265
-
266
- with self.active_retries_lock:
267
- self.active_retries -= 1
268
319
 
269
320
  def write_segments_to_file(self):
270
321
  """
@@ -325,8 +376,6 @@ class M3U8_Segments:
325
376
  - type (str): Type of download: 'video' or 'audio'
326
377
  """
327
378
  if TELEGRAM_BOT:
328
-
329
- # Viene usato per lo screen
330
379
  console.log("####")
331
380
 
332
381
  self.get_info()
@@ -337,59 +386,64 @@ class M3U8_Segments:
337
386
  unit='s',
338
387
  ascii='░▒█',
339
388
  bar_format=self._get_bar_format(description),
340
- mininterval=0.6,
341
- maxinterval=1.0,
342
- file=sys.stdout, # Using file=sys.stdout to force in-place updates because sys.stderr may not support carriage returns in this environment.
389
+ mininterval=2.0,
390
+ maxinterval=5.0,
391
+ file=sys.stdout,
343
392
  )
344
393
 
345
394
  try:
346
395
  writer_thread = threading.Thread(target=self.write_segments_to_file)
347
396
  writer_thread.daemon = True
348
397
  writer_thread.start()
349
-
350
- # Configure workers and delay
351
398
  max_workers = self._get_worker_count(type)
352
399
 
353
- # Download segments with completion verification
354
400
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
355
401
  futures = []
402
+
356
403
  for index, segment_url in enumerate(self.segments):
357
-
358
- # Check for interrupt before submitting each task
359
404
  if self.interrupt_flag.is_set():
360
405
  break
361
406
 
362
- time.sleep(TQDM_DELAY_WORKER)
407
+ # Delay every 200 submissions to reduce CPU usage
408
+ if index % 200 == 0 and index > 0:
409
+ time.sleep(TQDM_DELAY_WORKER)
410
+
363
411
  futures.append(executor.submit(self.download_segment, segment_url, index, progress_bar))
364
412
 
365
- # Wait for futures with interrupt handling
413
+ # Process completed futures
366
414
  for future in as_completed(futures):
367
415
  if self.interrupt_flag.is_set():
368
416
  break
369
417
  try:
370
- future.result()
418
+ future.result(timeout=1.0)
371
419
  except Exception as e:
372
420
  logging.error(f"Error in download thread: {str(e)}")
373
421
 
374
- # Interrupt handling for missing segments
422
+ # Retry missing segments if necessary
375
423
  if not self.interrupt_flag.is_set():
376
424
  total_segments = len(self.segments)
377
425
  completed_segments = len(self.downloaded_segments)
378
426
 
379
427
  if completed_segments < total_segments:
380
428
  missing_segments = set(range(total_segments)) - self.downloaded_segments
381
- logging.warning(f"Missing segments: {sorted(missing_segments)}")
382
-
383
- # Retry missing segments with interrupt check
384
- for index in missing_segments:
385
- if self.interrupt_flag.is_set():
386
- break
429
+ logging.warning(f"Missing {len(missing_segments)} segments")
387
430
 
388
- try:
389
- self.download_segment(self.segments[index], index, progress_bar)
390
-
391
- except Exception as e:
392
- logging.error(f"Failed to retry segment {index}: {str(e)}")
431
+ # Retry missing segments with interrupt check
432
+ retry_workers = min(2, len(missing_segments))
433
+ if retry_workers > 0:
434
+ retry_futures = []
435
+ for index in missing_segments:
436
+ if self.interrupt_flag.is_set():
437
+ break
438
+ retry_futures.append(executor.submit(self.download_segment, self.segments[index], index, progress_bar))
439
+
440
+ for future in as_completed(retry_futures):
441
+ if self.interrupt_flag.is_set():
442
+ break
443
+ try:
444
+ future.result(timeout=2.0)
445
+ except Exception as e:
446
+ logging.error(f"Failed to retry segment: {str(e)}")
393
447
 
394
448
  finally:
395
449
  self._cleanup_resources(writer_thread, progress_bar)
@@ -412,14 +466,12 @@ class M3U8_Segments:
412
466
 
413
467
  def _get_worker_count(self, stream_type: str) -> int:
414
468
  """
415
- Calculate optimal parallel workers based on stream type and infrastructure.
469
+ Return parallel workers based on stream type and infrastructure.
416
470
  """
417
- base_workers = {
471
+ return {
418
472
  'video': DEFAULT_VIDEO_WORKERS,
419
473
  'audio': DEFAULT_AUDIO_WORKERS
420
474
  }.get(stream_type.lower(), 1)
421
-
422
- return base_workers
423
475
 
424
476
  def _generate_results(self, stream_type: str) -> Dict:
425
477
  """Package final download results."""
@@ -441,11 +493,13 @@ class M3U8_Segments:
441
493
  self.stop_event.set()
442
494
  writer_thread.join(timeout=30)
443
495
  progress_bar.close()
496
+ self._cleanup_client()
444
497
 
445
498
  if self.info_nFailed > 0:
446
499
  self._display_error_summary()
447
500
 
448
501
  self.buffer = {}
502
+ self.write_buffer.clear()
449
503
  self.expected_index = 0
450
504
 
451
505
  def _display_error_summary(self) -> None:
@@ -456,4 +510,4 @@ class M3U8_Segments:
456
510
  f"[white]Failed segments: [red]{self.info_nFailed}")
457
511
 
458
512
  if self.info_nRetry > len(self.segments) * 0.3:
459
- console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
513
+ console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
@@ -78,18 +78,4 @@ class M3U8_Decryption:
78
78
  else:
79
79
  raise ValueError("Invalid or unsupported method")
80
80
 
81
- """
82
- end = time.perf_counter_ns()
83
-
84
- # Calculate the elapsed time with high precision
85
- elapsed_nanoseconds = end - start
86
- elapsed_milliseconds = elapsed_nanoseconds / 1_000_000
87
- elapsed_seconds = elapsed_nanoseconds / 1_000_000_000
88
-
89
- # Log performance metrics
90
- logging.info("[Crypto Decryption Performance]")
91
- logging.info(f"Method: {self.method}")
92
- logging.info(f"Decryption Time: {elapsed_milliseconds:.4f} ms ({elapsed_seconds:.6f} s)")
93
- logging.info(f"Decrypted Content Length: {len(decrypted_content)} bytes")
94
- """
95
81
  return decrypted_content
@@ -30,7 +30,6 @@ class M3U8_Ts_Estimator:
30
30
  self.lock = threading.Lock()
31
31
  self.speed = {"upload": "N/A", "download": "N/A"}
32
32
  self._running = True
33
-
34
33
  self.speed_thread = threading.Thread(target=self.capture_speed)
35
34
  self.speed_thread.daemon = True
36
35
  self.speed_thread.start()
@@ -47,14 +46,15 @@ class M3U8_Ts_Estimator:
47
46
 
48
47
  self.ts_file_sizes.append(size)
49
48
 
50
- def capture_speed(self, interval: float = 1.5):
51
- """Capture the internet speed periodically with improved efficiency."""
49
+ def capture_speed(self, interval: float = 3.0):
50
+ """Capture the internet speed periodically."""
52
51
  last_upload, last_download = 0, 0
53
52
  speed_buffer = deque(maxlen=3)
53
+ error_count = 0
54
+ max_errors = 5
54
55
 
55
56
  while self._running:
56
57
  try:
57
- # Get IO counters only once per loop to reduce function calls
58
58
  io_counters = psutil.net_io_counters()
59
59
  if not io_counters:
60
60
  raise ValueError("No IO counters available")
@@ -65,28 +65,39 @@ class M3U8_Ts_Estimator:
65
65
  upload_speed = (current_upload - last_upload) / interval
66
66
  download_speed = (current_download - last_download) / interval
67
67
 
68
- # Only update buffer when we have valid data
69
- if download_speed > 0:
68
+ if download_speed > 1024:
70
69
  speed_buffer.append(download_speed)
71
-
72
- # Use a more efficient approach for thread synchronization
73
- avg_speed = sum(speed_buffer) / len(speed_buffer) if speed_buffer else 0
74
- formatted_upload = internet_manager.format_transfer_speed(max(0, upload_speed))
75
- formatted_download = internet_manager.format_transfer_speed(avg_speed)
76
-
77
- # Minimize lock time by preparing data outside the lock
78
- with self.lock:
79
- self.speed = {
80
- "upload": formatted_upload,
81
- "download": formatted_download
82
- }
70
+
71
+ if speed_buffer:
72
+ avg_speed = sum(speed_buffer) / len(speed_buffer)
73
+
74
+ try:
75
+ formatted_upload = internet_manager.format_transfer_speed(max(0, upload_speed))
76
+ formatted_download = internet_manager.format_transfer_speed(avg_speed)
77
+
78
+ # Lock minimale
79
+ with self.lock:
80
+ self.speed = {
81
+ "upload": formatted_upload,
82
+ "download": formatted_download
83
+ }
84
+
85
+ except ImportError:
86
+ with self.lock:
87
+ self.speed = {"upload": "N/A", "download": "N/A"}
83
88
 
84
89
  last_upload, last_download = current_upload, current_download
90
+ error_count = 0
85
91
 
86
92
  except Exception as e:
87
- if self._running: # Only log if we're still supposed to be running
88
- logging.error(f"Error in speed capture: {str(e)}")
89
- self.speed = {"upload": "N/A", "download": "N/A"}
93
+ error_count += 1
94
+ if error_count <= max_errors and self._running:
95
+ logging.debug(f"Speed capture error: {str(e)}")
96
+
97
+ if error_count > max_errors:
98
+ with self.lock:
99
+ self.speed = {"upload": "N/A", "download": "N/A"}
100
+ interval = 10.0
90
101
 
91
102
  time.sleep(interval)
92
103
 
@@ -98,7 +109,6 @@ class M3U8_Ts_Estimator:
98
109
  str: The mean size of the files in a human-readable format.
99
110
  """
100
111
  try:
101
- # Only do calculations if we have data
102
112
  if not self.ts_file_sizes:
103
113
  return "0 B"
104
114
 
@@ -111,6 +121,7 @@ class M3U8_Ts_Estimator:
111
121
  return "Error"
112
122
 
113
123
  def update_progress_bar(self, total_downloaded: int, progress_counter: tqdm) -> None:
124
+ """Update progress bar"""
114
125
  try:
115
126
  self.add_ts_file(total_downloaded * self.total_segments)
116
127
 
@@ -118,27 +129,26 @@ class M3U8_Ts_Estimator:
118
129
  if file_total_size == "Error":
119
130
  return
120
131
 
121
- number_file_total_size = file_total_size.split(' ')[0]
122
- units_file_total_size = file_total_size.split(' ')[1]
123
-
124
- # Get speed data outside of any locks
125
- speed_data = ["N/A", ""]
132
+ number_file_total_size, units_file_total_size = file_total_size.split(' ', 1)
133
+
126
134
  with self.lock:
127
135
  download_speed = self.speed['download']
128
136
 
129
- if download_speed != "N/A":
130
- speed_data = download_speed.split(" ")
131
-
132
- average_internet_speed = speed_data[0] if len(speed_data) >= 1 else "N/A"
133
- average_internet_unit = speed_data[1] if len(speed_data) >= 2 else ""
137
+ if download_speed != "N/A" and ' ' in download_speed:
138
+ average_internet_speed, average_internet_unit = download_speed.split(' ', 1)
139
+ else:
140
+ average_internet_speed, average_internet_unit = "N/A", ""
134
141
 
135
142
  progress_str = (
136
143
  f"{Colors.GREEN}{number_file_total_size} {Colors.RED}{units_file_total_size}"
137
144
  f"{Colors.WHITE}, {Colors.CYAN}{average_internet_speed} {Colors.RED}{average_internet_unit} "
138
- #f"{Colors.WHITE}, {Colors.GREEN}CRR {Colors.RED}{retry_count} "
139
145
  )
140
146
 
141
147
  progress_counter.set_postfix_str(progress_str)
142
148
 
143
149
  except Exception as e:
144
- logging.error(f"Error updating progress bar: {str(e)}")
150
+ logging.error(f"Error updating progress bar: {str(e)}")
151
+
152
+ def stop(self):
153
+ """Stop speed monitoring thread."""
154
+ self._running = False
@@ -1,7 +1,6 @@
1
1
  # 24.08.24
2
2
 
3
3
  import sys
4
- from typing import Dict
5
4
 
6
5
 
7
6
  # External libraries
@@ -95,7 +94,6 @@ class TheMovieDB:
95
94
  """
96
95
  self.api_key = api_key
97
96
  self.base_url = "https://api.themoviedb.org/3"
98
- #self.genres = self._fetch_genres()
99
97
  self._cached_trending_tv = None
100
98
  self._cached_trending_movies = None
101
99
 
@@ -120,16 +118,6 @@ class TheMovieDB:
120
118
 
121
119
  return response.json()
122
120
 
123
- def _fetch_genres(self) -> Dict[int, str]:
124
- """
125
- Fetch and return the genre names from TheMovieDB.
126
-
127
- Returns:
128
- Dict[int, str]: A dictionary mapping genre IDs to genre names.
129
- """
130
- genres = self._make_request("genre/movie/list")
131
- return {genre['id']: genre['name'] for genre in genres.get('genres', [])}
132
-
133
121
  def _display_top_5(self, category: str, data, name_key='title'):
134
122
  """
135
123
  Display top 5 most popular items in a single line with colors.
@@ -98,4 +98,4 @@ def update():
98
98
  console.print(f"\n[red]{__title__} has been downloaded [yellow]{total_download_count} [red]times, but only [yellow]{percentual_stars}% [red]of users have starred it.\n\
99
99
  [cyan]Help the repository grow today by leaving a [yellow]star [cyan]and [yellow]sharing [cyan]it with others online!")
100
100
 
101
- time.sleep(4)
101
+ time.sleep(2.5)
@@ -1,5 +1,5 @@
1
1
  __title__ = 'StreamingCommunity'
2
- __version__ = '3.3.1'
2
+ __version__ = '3.3.3'
3
3
  __author__ = 'Arrowar'
4
4
  __description__ = 'A command-line program to download film'
5
5
  __copyright__ = 'Copyright 2025'