StreamingCommunity 3.2.7__py3-none-any.whl → 3.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

@@ -2,10 +2,15 @@
2
2
 
3
3
  import os
4
4
  import sys
5
- import asyncio
5
+ import time
6
+ import queue
7
+ import signal
6
8
  import logging
7
9
  import binascii
10
+ import threading
11
+ from queue import PriorityQueue
8
12
  from urllib.parse import urljoin, urlparse
13
+ from concurrent.futures import ThreadPoolExecutor, as_completed
9
14
  from typing import Dict
10
15
 
11
16
 
@@ -37,7 +42,8 @@ DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_w
37
42
  DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workers')
38
43
  MAX_TIMEOOUT = config_manager.get_int("REQUESTS", "timeout")
39
44
  SEGMENT_MAX_TIMEOUT = config_manager.get_int("M3U8_DOWNLOAD", "segment_timeout")
40
-
45
+ TELEGRAM_BOT = config_manager.get_bool('DEFAULT', 'telegram_bot')
46
+ MAX_INTERRUPT_COUNT = 3
41
47
 
42
48
  # Variable
43
49
  console = Console()
@@ -56,18 +62,38 @@ class M3U8_Segments:
56
62
  self.url = url
57
63
  self.tmp_folder = tmp_folder
58
64
  self.is_index_url = is_index_url
65
+ self.expected_real_time = None
59
66
  self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
60
67
  os.makedirs(self.tmp_folder, exist_ok=True)
61
68
 
62
69
  # Util class
63
70
  self.decryption: M3U8_Decryption = None
71
+ self.class_ts_estimator = M3U8_Ts_Estimator(0, self)
64
72
  self.class_url_fixer = M3U8_UrlFix(url)
65
-
66
- # Download tracking
73
+
74
+ # Sync
75
+ self.queue = PriorityQueue()
76
+ self.buffer = {}
77
+ self.expected_index = 0
78
+
79
+ self.stop_event = threading.Event()
67
80
  self.downloaded_segments = set()
81
+ self.base_timeout = 0.5
82
+ self.current_timeout = 3.0
83
+
84
+ # Stopping
85
+ self.interrupt_flag = threading.Event()
68
86
  self.download_interrupted = False
69
- self.info_nFailed = 0
87
+ self.interrupt_count = 0
88
+ self.force_stop = False
89
+ self.interrupt_lock = threading.Lock()
90
+
91
+ # OTHER INFO
92
+ self.info_maxRetry = 0
70
93
  self.info_nRetry = 0
94
+ self.info_nFailed = 0
95
+ self.active_retries = 0
96
+ self.active_retries_lock = threading.Lock()
71
97
 
72
98
  def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
73
99
  """
@@ -119,10 +145,16 @@ class M3U8_Segments:
119
145
  if "http" not in seg else seg
120
146
  for seg in m3u8_parser.segments
121
147
  ]
148
+ self.class_ts_estimator.total_segments = len(self.segments)
122
149
 
123
150
  def get_info(self) -> None:
124
151
  """
125
152
  Retrieves M3U8 playlist information from the given URL.
153
+
154
+ If the URL is an index URL, this method:
155
+ - Sends an HTTP GET request to fetch the M3U8 playlist.
156
+ - Parses the M3U8 content using `parse_data`.
157
+ - Saves the playlist to a temporary folder.
126
158
  """
127
159
  if self.is_index_url:
128
160
  try:
@@ -137,156 +169,241 @@ class M3U8_Segments:
137
169
  except Exception as e:
138
170
  raise RuntimeError(f"M3U8 info retrieval failed: {e}")
139
171
 
140
- def download_streams(self, description: str, type: str):
172
+ def setup_interrupt_handler(self):
141
173
  """
142
- Synchronous wrapper for async download.
174
+ Set up a signal handler for graceful interruption.
143
175
  """
144
- try:
145
- return asyncio.run(self.download_segments(description=description, type=type))
146
-
147
- except KeyboardInterrupt:
148
- self.download_interrupted = True
149
- console.print("\n[red]Download interrupted by user (Ctrl+C).")
150
- return self._generate_results(type)
176
+ def interrupt_handler(signum, frame):
177
+ with self.interrupt_lock:
178
+ self.interrupt_count += 1
179
+ if self.interrupt_count >= MAX_INTERRUPT_COUNT:
180
+ self.force_stop = True
181
+
182
+ if self.force_stop:
183
+ console.print("\n[red]Force stop triggered! Exiting immediately.")
151
184
 
152
- async def download_segments(self, description: str, type: str, concurrent_downloads: int = 8):
153
- """
154
- Download segments asynchronously.
155
- """
156
- self.get_info()
157
-
158
- progress_bar = tqdm(
159
- total=len(self.segments),
160
- unit='s',
161
- ascii='░▒█',
162
- bar_format=self._get_bar_format(description),
163
- mininterval=0.6,
164
- maxinterval=1.0,
165
- file=sys.stdout
166
- )
185
+ else:
186
+ if not self.interrupt_flag.is_set():
187
+ remaining = MAX_INTERRUPT_COUNT - self.interrupt_count
188
+ console.print(f"\n[red]- Stopping gracefully... (Ctrl+C {remaining}x to force)")
189
+ self.download_interrupted = True
167
190
 
168
- # Initialize estimator
169
- estimator = M3U8_Ts_Estimator(total_segments=len(self.segments))
170
- semaphore = asyncio.Semaphore(self._get_worker_count(type))
171
-
172
- results = [None] * len(self.segments)
173
-
174
- try:
175
- async with httpx.AsyncClient(timeout=SEGMENT_MAX_TIMEOUT) as client:
191
+ if remaining == 1:
192
+ self.interrupt_flag.set()
176
193
 
177
- # Download all segments (first batch)
178
- await self._download_segments_batch(
179
- client, self.segments, results, semaphore,
180
- REQUEST_MAX_RETRY, estimator, progress_bar
181
- )
194
+
195
+ if threading.current_thread() is threading.main_thread():
196
+ signal.signal(signal.SIGINT, interrupt_handler)
197
+ else:
198
+ print("Signal handler must be set in the main thread")
199
+
200
+ def _get_http_client(self):
201
+ client_params = {
202
+ 'headers': {'User-Agent': get_userAgent()},
203
+ 'timeout': SEGMENT_MAX_TIMEOUT,
204
+ 'follow_redirects': True,
205
+ 'http2': False,
206
+ 'verify': REQUEST_VERIFY
207
+ }
208
+ return httpx.Client(**client_params)
209
+
210
+ def download_segment(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.1) -> None:
211
+ """
212
+ Downloads a TS segment and adds it to the segment queue with retry logic.
182
213
 
183
- # Retry failed segments
184
- await self._retry_failed_segments(
185
- client, self.segments, results, semaphore,
186
- REQUEST_MAX_RETRY, estimator, progress_bar
187
- )
214
+ Parameters:
215
+ - ts_url (str): The URL of the TS segment.
216
+ - index (int): The index of the segment.
217
+ - progress_bar (tqdm): Progress counter for tracking download progress.
218
+ - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
219
+ """
220
+ for attempt in range(REQUEST_MAX_RETRY):
221
+ if self.interrupt_flag.is_set():
222
+ return
223
+
224
+ try:
225
+ with self._get_http_client() as client:
226
+ response = client.get(ts_url)
227
+
228
+ # Validate response and content
229
+ response.raise_for_status()
230
+ segment_content = response.content
231
+ content_size = len(segment_content)
232
+
233
+ # Decrypt if needed and verify decrypted content
234
+ if self.decryption is not None:
235
+ try:
236
+ segment_content = self.decryption.decrypt(segment_content)
237
+
238
+ except Exception as e:
239
+ logging.error(f"Decryption failed for segment {index}: {str(e)}")
240
+ self.interrupt_flag.set() # Interrupt the download process
241
+ self.stop_event.set() # Trigger the stopping event for all threads
242
+ break # Stop the current task immediately
243
+
244
+ self.class_ts_estimator.update_progress_bar(content_size, progress_bar)
245
+ self.queue.put((index, segment_content))
246
+ self.downloaded_segments.add(index)
247
+ progress_bar.update(1)
248
+ return
188
249
 
189
- # Write results
190
- self._write_results_to_file(results)
250
+ except Exception as e:
251
+ logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
252
+
253
+ if attempt > self.info_maxRetry:
254
+ self.info_maxRetry = ( attempt + 1 )
255
+ self.info_nRetry += 1
256
+
257
+ if attempt + 1 == REQUEST_MAX_RETRY:
258
+ console.log(f"[red]Final retry failed for segment: {index}")
259
+ self.queue.put((index, None)) # Marker for failed segment
260
+ progress_bar.update(1)
261
+ self.info_nFailed += 1
262
+ return
263
+
264
+ with self.active_retries_lock:
265
+ self.active_retries += 1
266
+
267
+ sleep_time = backoff_factor * (2 ** attempt)
268
+ logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
269
+ time.sleep(sleep_time)
270
+
271
+ with self.active_retries_lock:
272
+ self.active_retries -= 1
191
273
 
192
- except Exception as e:
193
- logging.error(f"Download error: {e}")
194
- raise
274
+ def write_segments_to_file(self):
275
+ """
276
+ Writes segments to file with additional verification.
277
+ """
278
+ with open(self.tmp_file_path, 'wb') as f:
279
+ while not self.stop_event.is_set() or not self.queue.empty():
280
+ if self.interrupt_flag.is_set():
281
+ break
282
+
283
+ try:
284
+ index, segment_content = self.queue.get(timeout=self.current_timeout)
195
285
 
196
- finally:
197
- self._cleanup_resources(progress_bar)
286
+ # Successful queue retrieval: reduce timeout
287
+ self.current_timeout = max(self.base_timeout, self.current_timeout / 2)
198
288
 
199
- if not self.download_interrupted:
200
- self._verify_download_completion()
289
+ # Handle failed segments
290
+ if segment_content is None:
291
+ if index == self.expected_index:
292
+ self.expected_index += 1
293
+ continue
201
294
 
202
- return self._generate_results(type)
295
+ # Write segment if it's the next expected one
296
+ if index == self.expected_index:
297
+ f.write(segment_content)
298
+ f.flush()
299
+ self.expected_index += 1
203
300
 
204
- async def _download_segments_batch(self, client, segment_urls, results, semaphore, max_retry, estimator, progress_bar):
205
- """
206
- Download a batch of segments with retry logic.
207
- """
208
- async def download_single(url, idx):
209
- async with semaphore:
210
- for attempt in range(max_retry):
211
- try:
212
- resp = await client.get(url, headers={'User-Agent': get_userAgent()})
301
+ # Write any buffered segments that are now in order
302
+ while self.expected_index in self.buffer:
303
+ next_segment = self.buffer.pop(self.expected_index)
213
304
 
214
- if resp.status_code == 200:
215
- content = resp.content
305
+ if next_segment is not None:
306
+ f.write(next_segment)
307
+ f.flush()
216
308
 
217
- if self.decryption:
218
- content = self.decryption.decrypt(content)
219
- return idx, content, attempt
220
-
221
- await asyncio.sleep(1.1 * (2 ** attempt))
222
- logging.info(f"Segment {idx} failed with status {resp.status_code}. Retrying...")
309
+ self.expected_index += 1
223
310
 
224
- except Exception:
225
- await asyncio.sleep(1.1 * (2 ** attempt))
226
- logging.info(f"Segment {idx} download failed: {sys.exc_info()[1]}. Retrying...")
311
+ else:
312
+ self.buffer[index] = segment_content
227
313
 
228
- return idx, b'', max_retry
314
+ except queue.Empty:
315
+ self.current_timeout = min(MAX_TIMEOOUT, self.current_timeout * 1.1)
316
+ time.sleep(0.05)
229
317
 
230
- tasks = [download_single(url, i) for i, url in enumerate(segment_urls)]
231
-
232
- for coro in asyncio.as_completed(tasks):
233
- try:
234
- idx, data, nretry = await coro
235
- results[idx] = data
318
+ if self.stop_event.is_set():
319
+ break
236
320
 
237
- if data:
238
- self.downloaded_segments.add(idx)
239
- estimator.add_ts_file(len(data))
240
- estimator.update_progress_bar(len(data), progress_bar)
321
+ except Exception as e:
322
+ logging.error(f"Error writing segment {index}: {str(e)}")
323
+
324
+ def download_streams(self, description: str, type: str):
325
+ """
326
+ Downloads all TS segments in parallel and writes them to a file.
241
327
 
242
- else:
243
- self.info_nFailed += 1
328
+ Parameters:
329
+ - description: Description to insert on tqdm bar
330
+ - type (str): Type of download: 'video' or 'audio'
331
+ """
332
+ if TELEGRAM_BOT:
244
333
 
245
- self.info_nRetry += nretry
246
- progress_bar.update(1)
334
+ # Viene usato per lo screen
335
+ console.log("####")
336
+
337
+ self.get_info()
338
+ self.setup_interrupt_handler()
247
339
 
248
- except KeyboardInterrupt:
249
- self.download_interrupted = True
250
- break
340
+ progress_bar = tqdm(
341
+ total=len(self.segments),
342
+ unit='s',
343
+ ascii='░▒█',
344
+ bar_format=self._get_bar_format(description),
345
+ mininterval=0.6,
346
+ maxinterval=1.0,
347
+ file=sys.stdout, # Using file=sys.stdout to force in-place updates because sys.stderr may not support carriage returns in this environment.
348
+ )
251
349
 
252
- async def _retry_failed_segments(self, client, segment_urls, results, semaphore, max_retry, estimator, progress_bar):
253
- """
254
- Retry failed segments with exponential backoff.
255
- """
256
- max_global_retries = 5
257
- global_retry_count = 0
350
+ try:
351
+ writer_thread = threading.Thread(target=self.write_segments_to_file)
352
+ writer_thread.daemon = True
353
+ writer_thread.start()
258
354
 
259
- while (self.info_nFailed > 0 and
260
- global_retry_count < max_global_retries and
261
- not self.download_interrupted):
355
+ # Configure workers and delay
356
+ max_workers = self._get_worker_count(type)
262
357
 
263
- failed_indices = [i for i, data in enumerate(results) if not data]
264
- if not failed_indices:
265
- break
266
-
267
- logging.info(f"[yellow]Retrying {len(failed_indices)} failed segments...")
268
-
269
- retry_tasks = [
270
- self._download_segments_batch(
271
- client, [segment_urls[i]], [results[i]],
272
- semaphore, max_retry, estimator, progress_bar
273
- )
274
- for i in failed_indices
275
- ]
276
-
277
- await asyncio.gather(*retry_tasks)
278
- global_retry_count += 1
358
+ # Download segments with completion verification
359
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
360
+ futures = []
361
+ for index, segment_url in enumerate(self.segments):
362
+
363
+ # Check for interrupt before submitting each task
364
+ if self.interrupt_flag.is_set():
365
+ break
366
+
367
+ time.sleep(TQDM_DELAY_WORKER)
368
+ futures.append(executor.submit(self.download_segment, segment_url, index, progress_bar))
369
+
370
+ # Wait for futures with interrupt handling
371
+ for future in as_completed(futures):
372
+ if self.interrupt_flag.is_set():
373
+ break
374
+ try:
375
+ future.result()
376
+ except Exception as e:
377
+ logging.error(f"Error in download thread: {str(e)}")
378
+
379
+ # Interrupt handling for missing segments
380
+ if not self.interrupt_flag.is_set():
381
+ total_segments = len(self.segments)
382
+ completed_segments = len(self.downloaded_segments)
383
+
384
+ if completed_segments < total_segments:
385
+ missing_segments = set(range(total_segments)) - self.downloaded_segments
386
+ logging.warning(f"Missing segments: {sorted(missing_segments)}")
387
+
388
+ # Retry missing segments with interrupt check
389
+ for index in missing_segments:
390
+ if self.interrupt_flag.is_set():
391
+ break
279
392
 
280
- def _write_results_to_file(self, results):
281
- """
282
- Write downloaded segments to file.
283
- """
284
- with open(self.tmp_file_path, 'wb') as f:
285
- for data in results:
286
- if data:
287
- f.write(data)
288
- f.flush()
393
+ try:
394
+ self.download_segment(self.segments[index], index, progress_bar)
395
+
396
+ except Exception as e:
397
+ logging.error(f"Failed to retry segment {index}: {str(e)}")
398
+
399
+ finally:
400
+ self._cleanup_resources(writer_thread, progress_bar)
289
401
 
402
+ if not self.interrupt_flag.is_set():
403
+ self._verify_download_completion()
404
+
405
+ return self._generate_results(type)
406
+
290
407
  def _get_bar_format(self, description: str) -> str:
291
408
  """
292
409
  Generate platform-appropriate progress bar format.
@@ -310,9 +427,7 @@ class M3U8_Segments:
310
427
  return base_workers
311
428
 
312
429
  def _generate_results(self, stream_type: str) -> Dict:
313
- """
314
- Package final download results.
315
- """
430
+ """Package final download results."""
316
431
  return {
317
432
  'type': stream_type,
318
433
  'nFailed': self.info_nFailed,
@@ -320,31 +435,30 @@ class M3U8_Segments:
320
435
  }
321
436
 
322
437
  def _verify_download_completion(self) -> None:
323
- """
324
- Validate final download integrity.
325
- """
438
+ """Validate final download integrity."""
326
439
  total = len(self.segments)
327
440
  if len(self.downloaded_segments) / total < 0.999:
328
441
  missing = sorted(set(range(total)) - self.downloaded_segments)
329
442
  raise RuntimeError(f"Download incomplete ({len(self.downloaded_segments)/total:.1%}). Missing segments: {missing}")
330
443
 
331
- def _cleanup_resources(self, progress_bar: tqdm) -> None:
332
- """
333
- Ensure resource cleanup and final reporting.
334
- """
444
+ def _cleanup_resources(self, writer_thread: threading.Thread, progress_bar: tqdm) -> None:
445
+ """Ensure resource cleanup and final reporting."""
446
+ self.stop_event.set()
447
+ writer_thread.join(timeout=30)
335
448
  progress_bar.close()
336
-
449
+
337
450
  if self.info_nFailed > 0:
338
451
  self._display_error_summary()
339
452
 
453
+ self.buffer = {}
454
+ self.expected_index = 0
455
+
340
456
  def _display_error_summary(self) -> None:
341
- """
342
- Generate final error report.
343
- """
457
+ """Generate final error report."""
344
458
  console.print(f"\n[cyan]Retry Summary: "
345
459
  f"[white]Max retries: [green]{self.info_maxRetry} "
346
460
  f"[white]Total retries: [green]{self.info_nRetry} "
347
461
  f"[white]Failed segments: [red]{self.info_nFailed}")
348
462
 
349
463
  if self.info_nRetry > len(self.segments) * 0.3:
350
- console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
464
+ console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
@@ -1,5 +1,5 @@
1
1
  __title__ = 'StreamingCommunity'
2
- __version__ = '3.2.7'
2
+ __version__ = '3.2.8'
3
3
  __author__ = 'Arrowar'
4
4
  __description__ = 'A command-line program to download film'
5
5
  __copyright__ = 'Copyright 2025'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: StreamingCommunity
3
- Version: 3.2.7
3
+ Version: 3.2.8
4
4
  Home-page: https://github.com/Lovi-0/StreamingCommunity
5
5
  Author: Lovi-0
6
6
  Project-URL: Bug Reports, https://github.com/Lovi-0/StreamingCommunity/issues
@@ -74,7 +74,7 @@ StreamingCommunity/Lib/Downloader/DASH/downloader.py,sha256=nuYZLM_Np0TKZ7wc7hYL
74
74
  StreamingCommunity/Lib/Downloader/DASH/parser.py,sha256=ZW6oErH9i8Nrp6yPc8usiRBv9ftkfVYh46wEg8MOl6o,9835
75
75
  StreamingCommunity/Lib/Downloader/DASH/segments.py,sha256=ZV9FYibbS8E722MHVxl16N6rN1tPioZTNqO4jWBpGGo,12672
76
76
  StreamingCommunity/Lib/Downloader/HLS/downloader.py,sha256=yzusDF32uSR_MZqdrNoJ27nR2VXRvflckTxiw04JoNk,21189
77
- StreamingCommunity/Lib/Downloader/HLS/segments.py,sha256=0uEjyG8vZLKSEkZGmkheeZXqWCyF8-UbuFI1tEil5JM,12257
77
+ StreamingCommunity/Lib/Downloader/HLS/segments.py,sha256=8Ze3s2YNglBonXKngDIs89LH48b_IZy7vx4h138uydQ,17982
78
78
  StreamingCommunity/Lib/Downloader/MP4/downloader.py,sha256=OBObY930wrOG0IUlDRROOfgU_u6uYHvckED4nnPjXzs,7367
79
79
  StreamingCommunity/Lib/Downloader/TOR/downloader.py,sha256=CrRGdLGI_45AnhtTZm8r7KO7uGmU9k6pywy-qO18LG8,19242
80
80
  StreamingCommunity/Lib/FFmpeg/__init__.py,sha256=6PBsZdE1jrD2EKOVyx3JEHnyDZzVeKlPkH5T0zyfOgU,130
@@ -93,7 +93,7 @@ StreamingCommunity/TelegramHelp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm
93
93
  StreamingCommunity/TelegramHelp/config.json,sha256=4Tnram-K5wNK0QjWY9R4VpiHfGnMbahvDEdJ2VBWOoc,1460
94
94
  StreamingCommunity/TelegramHelp/telegram_bot.py,sha256=zCqj7xBofh9FYfEYl55mgT945jqtKo7qJhn-SMLvAvA,26455
95
95
  StreamingCommunity/Upload/update.py,sha256=ZGQHcnTLAfXla_PqvykDeg2-WKOYfX9zX2I3KrdKHpc,3814
96
- StreamingCommunity/Upload/version.py,sha256=xHCI5wpbmfATQoDKu6smmB7Jq_R3elpw8qGq48nmaA8,171
96
+ StreamingCommunity/Upload/version.py,sha256=NaOdgJwSHkakkbsYisKWKKqEMAjCYxhIHttZMn77p_c,171
97
97
  StreamingCommunity/Util/bento4_installer.py,sha256=P5ipziMCvezxan8GUh9vm8B1LXGyHusFVDf842LSwis,6966
98
98
  StreamingCommunity/Util/color.py,sha256=NvD0Eni-25oOOkY-szCEoc0lGvzQxyL7xhM0RE4EvUM,458
99
99
  StreamingCommunity/Util/config_json.py,sha256=4sn-vvrYybZMOlT-blx3bM9njxQRQS_HwSPfqQ0qZ94,24226
@@ -103,9 +103,9 @@ StreamingCommunity/Util/logger.py,sha256=9kGD6GmWj2pM8ADpJc85o7jm8DD0c5Aguqnq-9k
103
103
  StreamingCommunity/Util/message.py,sha256=81vPmsGBusovIhheIO4Ec6p7BYvMj1wE_CthtRyp6OM,1333
104
104
  StreamingCommunity/Util/os.py,sha256=vLNRGWsQYTSUGdpj19fR_n0i-6bhZYeJh5IZpahdyKM,16832
105
105
  StreamingCommunity/Util/table.py,sha256=Nw5PlsvfEIOQZWy5VhsU5OK3heuBXGwsqmLl0k8yQzc,9813
106
- streamingcommunity-3.2.7.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
107
- streamingcommunity-3.2.7.dist-info/METADATA,sha256=pCGVpbT5rIjdWH1qxZcIUxdNWvIAWd78bA0KIJ0hk4U,25312
108
- streamingcommunity-3.2.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
109
- streamingcommunity-3.2.7.dist-info/entry_points.txt,sha256=Qph9XYfDC8n4LfDLOSl6gJGlkb9eFb5f-JOr_Wb_5rk,67
110
- streamingcommunity-3.2.7.dist-info/top_level.txt,sha256=YsOcxKP-WOhWpIWgBlh0coll9XUx7aqmRPT7kmt3fH0,19
111
- streamingcommunity-3.2.7.dist-info/RECORD,,
106
+ streamingcommunity-3.2.8.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
107
+ streamingcommunity-3.2.8.dist-info/METADATA,sha256=9f38qTwHxcDyf-v7jtyZOL7orWpp5x9IZkRogepZAwU,25312
108
+ streamingcommunity-3.2.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
109
+ streamingcommunity-3.2.8.dist-info/entry_points.txt,sha256=Qph9XYfDC8n4LfDLOSl6gJGlkb9eFb5f-JOr_Wb_5rk,67
110
+ streamingcommunity-3.2.8.dist-info/top_level.txt,sha256=YsOcxKP-WOhWpIWgBlh0coll9XUx7aqmRPT7kmt3fH0,19
111
+ streamingcommunity-3.2.8.dist-info/RECORD,,