StreamingCommunity 2.4.0__py3-none-any.whl → 2.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of StreamingCommunity might be problematic. Click here for more details.
- {StreamingCommunity-2.4.0.dist-info → StreamingCommunity-2.5.0.dist-info}/METADATA +9 -6
- StreamingCommunity-2.5.0.dist-info/RECORD +8 -0
- StreamingCommunity/Api/Player/Helper/Vixcloud/js_parser.py +0 -143
- StreamingCommunity/Api/Player/Helper/Vixcloud/util.py +0 -136
- StreamingCommunity/Api/Player/ddl.py +0 -89
- StreamingCommunity/Api/Player/maxstream.py +0 -151
- StreamingCommunity/Api/Player/supervideo.py +0 -194
- StreamingCommunity/Api/Player/vixcloud.py +0 -273
- StreamingCommunity/Api/Site/1337xx/__init__.py +0 -51
- StreamingCommunity/Api/Site/1337xx/costant.py +0 -15
- StreamingCommunity/Api/Site/1337xx/site.py +0 -89
- StreamingCommunity/Api/Site/1337xx/title.py +0 -66
- StreamingCommunity/Api/Site/altadefinizionegratis/__init__.py +0 -51
- StreamingCommunity/Api/Site/altadefinizionegratis/costant.py +0 -19
- StreamingCommunity/Api/Site/altadefinizionegratis/film.py +0 -74
- StreamingCommunity/Api/Site/altadefinizionegratis/site.py +0 -95
- StreamingCommunity/Api/Site/animeunity/__init__.py +0 -51
- StreamingCommunity/Api/Site/animeunity/costant.py +0 -19
- StreamingCommunity/Api/Site/animeunity/film_serie.py +0 -135
- StreamingCommunity/Api/Site/animeunity/site.py +0 -175
- StreamingCommunity/Api/Site/animeunity/util/ScrapeSerie.py +0 -97
- StreamingCommunity/Api/Site/cb01new/__init__.py +0 -52
- StreamingCommunity/Api/Site/cb01new/costant.py +0 -19
- StreamingCommunity/Api/Site/cb01new/film.py +0 -73
- StreamingCommunity/Api/Site/cb01new/site.py +0 -83
- StreamingCommunity/Api/Site/ddlstreamitaly/__init__.py +0 -56
- StreamingCommunity/Api/Site/ddlstreamitaly/costant.py +0 -20
- StreamingCommunity/Api/Site/ddlstreamitaly/series.py +0 -146
- StreamingCommunity/Api/Site/ddlstreamitaly/site.py +0 -99
- StreamingCommunity/Api/Site/ddlstreamitaly/util/ScrapeSerie.py +0 -85
- StreamingCommunity/Api/Site/guardaserie/__init__.py +0 -51
- StreamingCommunity/Api/Site/guardaserie/costant.py +0 -19
- StreamingCommunity/Api/Site/guardaserie/series.py +0 -198
- StreamingCommunity/Api/Site/guardaserie/site.py +0 -90
- StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +0 -110
- StreamingCommunity/Api/Site/ilcorsaronero/__init__.py +0 -52
- StreamingCommunity/Api/Site/ilcorsaronero/costant.py +0 -19
- StreamingCommunity/Api/Site/ilcorsaronero/site.py +0 -72
- StreamingCommunity/Api/Site/ilcorsaronero/title.py +0 -46
- StreamingCommunity/Api/Site/ilcorsaronero/util/ilCorsarScraper.py +0 -149
- StreamingCommunity/Api/Site/mostraguarda/__init__.py +0 -49
- StreamingCommunity/Api/Site/mostraguarda/costant.py +0 -19
- StreamingCommunity/Api/Site/mostraguarda/film.py +0 -101
- StreamingCommunity/Api/Site/streamingcommunity/__init__.py +0 -56
- StreamingCommunity/Api/Site/streamingcommunity/costant.py +0 -19
- StreamingCommunity/Api/Site/streamingcommunity/film.py +0 -75
- StreamingCommunity/Api/Site/streamingcommunity/series.py +0 -206
- StreamingCommunity/Api/Site/streamingcommunity/site.py +0 -142
- StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py +0 -123
- StreamingCommunity/Api/Template/Class/SearchType.py +0 -101
- StreamingCommunity/Api/Template/Util/__init__.py +0 -5
- StreamingCommunity/Api/Template/Util/get_domain.py +0 -203
- StreamingCommunity/Api/Template/Util/manage_ep.py +0 -179
- StreamingCommunity/Api/Template/Util/recall_search.py +0 -37
- StreamingCommunity/Api/Template/__init__.py +0 -3
- StreamingCommunity/Api/Template/site.py +0 -87
- StreamingCommunity/Lib/Downloader/HLS/downloader.py +0 -965
- StreamingCommunity/Lib/Downloader/HLS/proxyes.py +0 -110
- StreamingCommunity/Lib/Downloader/HLS/segments.py +0 -573
- StreamingCommunity/Lib/Downloader/MP4/downloader.py +0 -155
- StreamingCommunity/Lib/Downloader/TOR/downloader.py +0 -296
- StreamingCommunity/Lib/Downloader/__init__.py +0 -5
- StreamingCommunity/Lib/FFmpeg/__init__.py +0 -4
- StreamingCommunity/Lib/FFmpeg/capture.py +0 -170
- StreamingCommunity/Lib/FFmpeg/command.py +0 -296
- StreamingCommunity/Lib/FFmpeg/util.py +0 -249
- StreamingCommunity/Lib/M3U8/__init__.py +0 -6
- StreamingCommunity/Lib/M3U8/decryptor.py +0 -165
- StreamingCommunity/Lib/M3U8/estimator.py +0 -229
- StreamingCommunity/Lib/M3U8/parser.py +0 -666
- StreamingCommunity/Lib/M3U8/url_fixer.py +0 -58
- StreamingCommunity/Lib/TMBD/__init__.py +0 -2
- StreamingCommunity/Lib/TMBD/obj_tmbd.py +0 -39
- StreamingCommunity/Lib/TMBD/tmdb.py +0 -346
- StreamingCommunity/Upload/update.py +0 -67
- StreamingCommunity/Upload/version.py +0 -5
- StreamingCommunity/Util/_jsonConfig.py +0 -228
- StreamingCommunity/Util/call_stack.py +0 -42
- StreamingCommunity/Util/color.py +0 -20
- StreamingCommunity/Util/console.py +0 -12
- StreamingCommunity/Util/ffmpeg_installer.py +0 -368
- StreamingCommunity/Util/headers.py +0 -160
- StreamingCommunity/Util/logger.py +0 -62
- StreamingCommunity/Util/message.py +0 -64
- StreamingCommunity/Util/os.py +0 -507
- StreamingCommunity/Util/table.py +0 -229
- StreamingCommunity-2.4.0.dist-info/RECORD +0 -92
- {StreamingCommunity-2.4.0.dist-info → StreamingCommunity-2.5.0.dist-info}/LICENSE +0 -0
- {StreamingCommunity-2.4.0.dist-info → StreamingCommunity-2.5.0.dist-info}/WHEEL +0 -0
- {StreamingCommunity-2.4.0.dist-info → StreamingCommunity-2.5.0.dist-info}/entry_points.txt +0 -0
- {StreamingCommunity-2.4.0.dist-info → StreamingCommunity-2.5.0.dist-info}/top_level.txt +0 -0
|
@@ -1,110 +0,0 @@
|
|
|
1
|
-
# 09.06.24
|
|
2
|
-
|
|
3
|
-
import os
|
|
4
|
-
import sys
|
|
5
|
-
import logging
|
|
6
|
-
from concurrent.futures import ThreadPoolExecutor
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
# External libraries
|
|
10
|
-
import httpx
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
# Internal utilities
|
|
14
|
-
from StreamingCommunity.Util._jsonConfig import config_manager
|
|
15
|
-
from StreamingCommunity.Util.headers import get_headers
|
|
16
|
-
from StreamingCommunity.Util.os import os_manager
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class ProxyManager:
|
|
20
|
-
def __init__(self, proxy_list=None, url=None):
|
|
21
|
-
"""
|
|
22
|
-
Initialize ProxyManager with a list of proxies and timeout.
|
|
23
|
-
|
|
24
|
-
Parameters:
|
|
25
|
-
- proxy_list: List of proxy strings
|
|
26
|
-
- timeout: Timeout for proxy requests
|
|
27
|
-
"""
|
|
28
|
-
self.proxy_list = proxy_list or []
|
|
29
|
-
self.verified_proxies = []
|
|
30
|
-
self.timeout = config_manager.get_float('REQUESTS', 'timeout')
|
|
31
|
-
self.url = url
|
|
32
|
-
|
|
33
|
-
def _check_proxy(self, proxy):
|
|
34
|
-
"""
|
|
35
|
-
Check if a single proxy is working by making a request to Google.
|
|
36
|
-
|
|
37
|
-
Parameters:
|
|
38
|
-
- proxy: Proxy string to be checked
|
|
39
|
-
|
|
40
|
-
Returns:
|
|
41
|
-
- Proxy string if working, None otherwise
|
|
42
|
-
"""
|
|
43
|
-
protocol = proxy.split(":")[0].lower()
|
|
44
|
-
protocol = f'{protocol}://'
|
|
45
|
-
proxy = {protocol: proxy, "https://": proxy}
|
|
46
|
-
|
|
47
|
-
try:
|
|
48
|
-
with httpx.Client(proxies=proxy, verify=False) as client:
|
|
49
|
-
response = client.get(self.url, timeout=self.timeout, headers={'user-agent': get_headers()})
|
|
50
|
-
|
|
51
|
-
if response.status_code == 200:
|
|
52
|
-
logging.info(f"Proxy {proxy} is working.")
|
|
53
|
-
return proxy
|
|
54
|
-
|
|
55
|
-
except Exception as e:
|
|
56
|
-
logging.error(f"Test proxy {proxy} failed: {e}")
|
|
57
|
-
return None
|
|
58
|
-
|
|
59
|
-
def verify_proxies(self):
|
|
60
|
-
"""
|
|
61
|
-
Verify all proxies in the list and store the working ones.
|
|
62
|
-
"""
|
|
63
|
-
logging.info("Starting proxy verification...")
|
|
64
|
-
with ThreadPoolExecutor(max_workers=os.cpu_count()) as executor:
|
|
65
|
-
self.verified_proxies = list(executor.map(self._check_proxy, self.proxy_list))
|
|
66
|
-
|
|
67
|
-
self.verified_proxies = [proxy for proxy in self.verified_proxies if proxy]
|
|
68
|
-
logging.info(f"Verification complete. {len(self.verified_proxies)} proxies are working.")
|
|
69
|
-
|
|
70
|
-
def get_verified_proxies(self):
|
|
71
|
-
"""
|
|
72
|
-
Get validate proxies.
|
|
73
|
-
"""
|
|
74
|
-
|
|
75
|
-
if len(self.verified_proxies) > 0:
|
|
76
|
-
return self.verified_proxies
|
|
77
|
-
|
|
78
|
-
else:
|
|
79
|
-
logging.error("Cant find valid proxy.")
|
|
80
|
-
sys.exit(0)
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
def main_test_proxy(url_test):
|
|
84
|
-
|
|
85
|
-
path_file_proxt_list = "list_proxy.txt"
|
|
86
|
-
|
|
87
|
-
if os_manager.check_file(path_file_proxt_list):
|
|
88
|
-
|
|
89
|
-
# Read file
|
|
90
|
-
with open(path_file_proxt_list, 'r') as file:
|
|
91
|
-
ip_addresses = file.readlines()
|
|
92
|
-
|
|
93
|
-
# Formatt ip
|
|
94
|
-
ip_addresses = [ip.strip() for ip in ip_addresses]
|
|
95
|
-
formatted_ips = [f"http://{ip}" for ip in ip_addresses]
|
|
96
|
-
|
|
97
|
-
# Get list of proxy from config.json
|
|
98
|
-
proxy_list = formatted_ips
|
|
99
|
-
|
|
100
|
-
# Verify proxy
|
|
101
|
-
manager = ProxyManager(proxy_list, url_test)
|
|
102
|
-
manager.verify_proxies()
|
|
103
|
-
|
|
104
|
-
# Write valid ip in txt file
|
|
105
|
-
with open(path_file_proxt_list, 'w') as file:
|
|
106
|
-
for ip in ip_addresses:
|
|
107
|
-
file.write(f"{ip}\n")
|
|
108
|
-
|
|
109
|
-
# Return valid proxy
|
|
110
|
-
return manager.get_verified_proxies()
|
|
@@ -1,573 +0,0 @@
|
|
|
1
|
-
# 18.04.24
|
|
2
|
-
|
|
3
|
-
import os
|
|
4
|
-
import sys
|
|
5
|
-
import time
|
|
6
|
-
import queue
|
|
7
|
-
import signal
|
|
8
|
-
import logging
|
|
9
|
-
import binascii
|
|
10
|
-
import threading
|
|
11
|
-
|
|
12
|
-
from queue import PriorityQueue
|
|
13
|
-
from urllib.parse import urljoin, urlparse
|
|
14
|
-
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
# External libraries
|
|
18
|
-
import httpx
|
|
19
|
-
from tqdm import tqdm
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
# Internal utilities
|
|
23
|
-
from StreamingCommunity.Util.console import console
|
|
24
|
-
from StreamingCommunity.Util.headers import get_headers, random_headers
|
|
25
|
-
from StreamingCommunity.Util.color import Colors
|
|
26
|
-
from StreamingCommunity.Util._jsonConfig import config_manager
|
|
27
|
-
from StreamingCommunity.Util.os import os_manager
|
|
28
|
-
from StreamingCommunity.Util.call_stack import get_call_stack
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
# Logic class
|
|
32
|
-
from ...M3U8 import (
|
|
33
|
-
M3U8_Decryption,
|
|
34
|
-
M3U8_Ts_Estimator,
|
|
35
|
-
M3U8_Parser,
|
|
36
|
-
M3U8_UrlFix
|
|
37
|
-
)
|
|
38
|
-
from ...FFmpeg.util import print_duration_table, format_duration
|
|
39
|
-
from .proxyes import main_test_proxy
|
|
40
|
-
|
|
41
|
-
# Config
|
|
42
|
-
TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
|
|
43
|
-
TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
|
|
44
|
-
|
|
45
|
-
REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
|
|
46
|
-
REQUEST_VERIFY = False
|
|
47
|
-
|
|
48
|
-
THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt")
|
|
49
|
-
PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min')
|
|
50
|
-
PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
|
|
51
|
-
|
|
52
|
-
DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
|
|
53
|
-
DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
# Variable
|
|
58
|
-
max_timeout = config_manager.get_int("REQUESTS", "timeout")
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
class M3U8_Segments:
|
|
63
|
-
def __init__(self, url: str, tmp_folder: str, is_index_url: bool = True):
|
|
64
|
-
"""
|
|
65
|
-
Initializes the M3U8_Segments object.
|
|
66
|
-
|
|
67
|
-
Parameters:
|
|
68
|
-
- url (str): The URL of the M3U8 playlist.
|
|
69
|
-
- tmp_folder (str): The temporary folder to store downloaded segments.
|
|
70
|
-
- is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
|
|
71
|
-
"""
|
|
72
|
-
self.url = url
|
|
73
|
-
self.tmp_folder = tmp_folder
|
|
74
|
-
self.is_index_url = is_index_url
|
|
75
|
-
self.expected_real_time = None
|
|
76
|
-
self.max_timeout = max_timeout
|
|
77
|
-
|
|
78
|
-
self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
|
|
79
|
-
os.makedirs(self.tmp_folder, exist_ok=True)
|
|
80
|
-
|
|
81
|
-
# Util class
|
|
82
|
-
self.decryption: M3U8_Decryption = None
|
|
83
|
-
self.class_ts_estimator = M3U8_Ts_Estimator(0)
|
|
84
|
-
self.class_url_fixer = M3U8_UrlFix(url)
|
|
85
|
-
|
|
86
|
-
# Sync
|
|
87
|
-
self.queue = PriorityQueue()
|
|
88
|
-
self.stop_event = threading.Event()
|
|
89
|
-
self.downloaded_segments = set()
|
|
90
|
-
self.base_timeout = 1.0
|
|
91
|
-
self.current_timeout = 5.0
|
|
92
|
-
|
|
93
|
-
# Stopping
|
|
94
|
-
self.interrupt_flag = threading.Event()
|
|
95
|
-
self.download_interrupted = False
|
|
96
|
-
|
|
97
|
-
# OTHER INFO
|
|
98
|
-
self.info_maxRetry = 0
|
|
99
|
-
self.info_nRetry = 0
|
|
100
|
-
self.info_nFailed = 0
|
|
101
|
-
|
|
102
|
-
def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
|
|
103
|
-
"""
|
|
104
|
-
Retrieves the encryption key from the M3U8 playlist.
|
|
105
|
-
|
|
106
|
-
Parameters:
|
|
107
|
-
- m3u8_parser (M3U8_Parser): The parser object containing M3U8 playlist information.
|
|
108
|
-
|
|
109
|
-
Returns:
|
|
110
|
-
bytes: The encryption key in bytes.
|
|
111
|
-
"""
|
|
112
|
-
|
|
113
|
-
# Construct the full URL of the key
|
|
114
|
-
key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
|
|
115
|
-
parsed_url = urlparse(key_uri)
|
|
116
|
-
self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
|
|
117
|
-
logging.info(f"Uri key: {key_uri}")
|
|
118
|
-
|
|
119
|
-
# Make request to get porxy
|
|
120
|
-
try:
|
|
121
|
-
response = httpx.get(
|
|
122
|
-
url=key_uri,
|
|
123
|
-
headers={'User-Agent': get_headers()},
|
|
124
|
-
timeout=max_timeout
|
|
125
|
-
)
|
|
126
|
-
response.raise_for_status()
|
|
127
|
-
|
|
128
|
-
except Exception as e:
|
|
129
|
-
raise Exception(f"Failed to fetch key from {key_uri}: {e}")
|
|
130
|
-
|
|
131
|
-
# Convert the content of the response to hexadecimal and then to bytes
|
|
132
|
-
hex_content = binascii.hexlify(response.content).decode('utf-8')
|
|
133
|
-
byte_content = bytes.fromhex(hex_content)
|
|
134
|
-
logging.info(f"URI: Hex content: {hex_content}, Byte content: {byte_content}")
|
|
135
|
-
|
|
136
|
-
#console.print(f"[cyan]Find key: [red]{hex_content}")
|
|
137
|
-
return byte_content
|
|
138
|
-
|
|
139
|
-
def parse_data(self, m3u8_content: str) -> None:
|
|
140
|
-
"""
|
|
141
|
-
Parses the M3U8 content to extract segment information.
|
|
142
|
-
|
|
143
|
-
Parameters:
|
|
144
|
-
- m3u8_content (str): The content of the M3U8 file.
|
|
145
|
-
"""
|
|
146
|
-
m3u8_parser = M3U8_Parser()
|
|
147
|
-
m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
|
|
148
|
-
|
|
149
|
-
self.expected_real_time = m3u8_parser.get_duration(return_string=False)
|
|
150
|
-
self.expected_real_time_s = m3u8_parser.duration
|
|
151
|
-
|
|
152
|
-
# Check if there is an encryption key in the playlis
|
|
153
|
-
if m3u8_parser.keys is not None:
|
|
154
|
-
try:
|
|
155
|
-
|
|
156
|
-
# Extract byte from the key
|
|
157
|
-
key = self.__get_key__(m3u8_parser)
|
|
158
|
-
|
|
159
|
-
except Exception as e:
|
|
160
|
-
raise Exception(f"Failed to retrieve encryption key {e}.")
|
|
161
|
-
|
|
162
|
-
iv = m3u8_parser.keys.get('iv')
|
|
163
|
-
method = m3u8_parser.keys.get('method')
|
|
164
|
-
logging.info(f"M3U8_Decryption - IV: {iv}, method: {method}")
|
|
165
|
-
|
|
166
|
-
# Create a decryption object with the key and set the method
|
|
167
|
-
self.decryption = M3U8_Decryption(key, iv, method)
|
|
168
|
-
|
|
169
|
-
# Store the segment information parsed from the playlist
|
|
170
|
-
self.segments = m3u8_parser.segments
|
|
171
|
-
|
|
172
|
-
# Fix URL if it is incomplete (missing 'http')
|
|
173
|
-
for i in range(len(self.segments)):
|
|
174
|
-
segment_url = self.segments[i]
|
|
175
|
-
|
|
176
|
-
if "http" not in segment_url:
|
|
177
|
-
self.segments[i] = self.class_url_fixer.generate_full_url(segment_url)
|
|
178
|
-
logging.info(f"Generated new URL: {self.segments[i]}, from: {segment_url}")
|
|
179
|
-
|
|
180
|
-
# Update segments for estimator
|
|
181
|
-
self.class_ts_estimator.total_segments = len(self.segments)
|
|
182
|
-
logging.info(f"Segmnets to download: [{len(self.segments)}]")
|
|
183
|
-
|
|
184
|
-
# Proxy
|
|
185
|
-
if THERE_IS_PROXY_LIST:
|
|
186
|
-
console.log("[red]Start validation proxy.")
|
|
187
|
-
self.valid_proxy = main_test_proxy(self.segments[0])
|
|
188
|
-
console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}")
|
|
189
|
-
|
|
190
|
-
if len(self.valid_proxy) == 0:
|
|
191
|
-
sys.exit(0)
|
|
192
|
-
|
|
193
|
-
def get_info(self) -> None:
|
|
194
|
-
"""
|
|
195
|
-
Makes a request to the index M3U8 file to get information about segments.
|
|
196
|
-
"""
|
|
197
|
-
if self.is_index_url:
|
|
198
|
-
|
|
199
|
-
try:
|
|
200
|
-
|
|
201
|
-
# Send a GET request to retrieve the index M3U8 file
|
|
202
|
-
response = httpx.get(
|
|
203
|
-
self.url,
|
|
204
|
-
headers={'User-Agent': get_headers()},
|
|
205
|
-
timeout=max_timeout,
|
|
206
|
-
follow_redirects=True
|
|
207
|
-
)
|
|
208
|
-
response.raise_for_status()
|
|
209
|
-
|
|
210
|
-
# Save the M3U8 file to the temporary folder
|
|
211
|
-
path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
|
|
212
|
-
open(path_m3u8_file, "w+").write(response.text)
|
|
213
|
-
|
|
214
|
-
# Parse the text from the M3U8 index file
|
|
215
|
-
self.parse_data(response.text)
|
|
216
|
-
|
|
217
|
-
except Exception as e:
|
|
218
|
-
print(f"Error during M3U8 index request: {e}")
|
|
219
|
-
|
|
220
|
-
else:
|
|
221
|
-
# Parser data of content of index pass in input to class
|
|
222
|
-
self.parse_data(self.url)
|
|
223
|
-
|
|
224
|
-
def setup_interrupt_handler(self):
|
|
225
|
-
"""
|
|
226
|
-
Set up a signal handler for graceful interruption.
|
|
227
|
-
"""
|
|
228
|
-
def interrupt_handler(signum, frame):
|
|
229
|
-
if not self.interrupt_flag.is_set():
|
|
230
|
-
console.log("\n[red] Stopping download gracefully...")
|
|
231
|
-
self.interrupt_flag.set()
|
|
232
|
-
self.download_interrupted = True
|
|
233
|
-
self.stop_event.set()
|
|
234
|
-
|
|
235
|
-
if threading.current_thread() is threading.main_thread():
|
|
236
|
-
signal.signal(signal.SIGINT, interrupt_handler)
|
|
237
|
-
else:
|
|
238
|
-
print("Signal handler must be set in the main thread")
|
|
239
|
-
|
|
240
|
-
def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.5) -> None:
|
|
241
|
-
"""
|
|
242
|
-
Downloads a TS segment and adds it to the segment queue with retry logic.
|
|
243
|
-
|
|
244
|
-
Parameters:
|
|
245
|
-
- ts_url (str): The URL of the TS segment.
|
|
246
|
-
- index (int): The index of the segment.
|
|
247
|
-
- progress_bar (tqdm): Progress counter for tracking download progress.
|
|
248
|
-
- retries (int): The number of times to retry on failure (default is 3).
|
|
249
|
-
- backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
|
|
250
|
-
"""
|
|
251
|
-
for attempt in range(REQUEST_MAX_RETRY):
|
|
252
|
-
if self.interrupt_flag.is_set():
|
|
253
|
-
return
|
|
254
|
-
|
|
255
|
-
try:
|
|
256
|
-
start_time = time.time()
|
|
257
|
-
|
|
258
|
-
# Make request to get content
|
|
259
|
-
if THERE_IS_PROXY_LIST:
|
|
260
|
-
|
|
261
|
-
# Get proxy from list
|
|
262
|
-
proxy = self.valid_proxy[index % len(self.valid_proxy)]
|
|
263
|
-
logging.info(f"Use proxy: {proxy}")
|
|
264
|
-
|
|
265
|
-
with httpx.Client(proxies=proxy, verify=REQUEST_VERIFY) as client:
|
|
266
|
-
if 'key_base_url' in self.__dict__:
|
|
267
|
-
response = client.get(
|
|
268
|
-
url=ts_url,
|
|
269
|
-
headers=random_headers(self.key_base_url),
|
|
270
|
-
timeout=max_timeout,
|
|
271
|
-
follow_redirects=True
|
|
272
|
-
)
|
|
273
|
-
|
|
274
|
-
else:
|
|
275
|
-
response = client.get(
|
|
276
|
-
url=ts_url,
|
|
277
|
-
headers={'User-Agent': get_headers()},
|
|
278
|
-
timeout=max_timeout,
|
|
279
|
-
follow_redirects=True
|
|
280
|
-
)
|
|
281
|
-
|
|
282
|
-
else:
|
|
283
|
-
with httpx.Client(verify=REQUEST_VERIFY) as client_2:
|
|
284
|
-
if 'key_base_url' in self.__dict__:
|
|
285
|
-
response = client_2.get(
|
|
286
|
-
url=ts_url,
|
|
287
|
-
headers=random_headers(self.key_base_url),
|
|
288
|
-
timeout=max_timeout,
|
|
289
|
-
follow_redirects=True
|
|
290
|
-
)
|
|
291
|
-
|
|
292
|
-
else:
|
|
293
|
-
response = client_2.get(
|
|
294
|
-
url=ts_url,
|
|
295
|
-
headers={'User-Agent': get_headers()},
|
|
296
|
-
timeout=max_timeout,
|
|
297
|
-
follow_redirects=True
|
|
298
|
-
)
|
|
299
|
-
|
|
300
|
-
# Validate response and content
|
|
301
|
-
response.raise_for_status()
|
|
302
|
-
segment_content = response.content
|
|
303
|
-
content_size = len(segment_content)
|
|
304
|
-
duration = time.time() - start_time
|
|
305
|
-
|
|
306
|
-
# Decrypt if needed and verify decrypted content
|
|
307
|
-
if self.decryption is not None:
|
|
308
|
-
try:
|
|
309
|
-
segment_content = self.decryption.decrypt(segment_content)
|
|
310
|
-
|
|
311
|
-
except Exception as e:
|
|
312
|
-
logging.error(f"Decryption failed for segment {index}: {str(e)}")
|
|
313
|
-
self.interrupt_flag.set() # Interrupt the download process
|
|
314
|
-
self.stop_event.set() # Trigger the stopping event for all threads
|
|
315
|
-
break # Stop the current task immediately
|
|
316
|
-
|
|
317
|
-
# Update progress and queue
|
|
318
|
-
self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
|
|
319
|
-
|
|
320
|
-
# Add the segment to the queue
|
|
321
|
-
self.queue.put((index, segment_content))
|
|
322
|
-
|
|
323
|
-
# Track successfully downloaded segments
|
|
324
|
-
self.downloaded_segments.add(index)
|
|
325
|
-
progress_bar.update(1)
|
|
326
|
-
|
|
327
|
-
# Break out of the loop on success
|
|
328
|
-
return
|
|
329
|
-
|
|
330
|
-
except Exception as e:
|
|
331
|
-
logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
|
|
332
|
-
|
|
333
|
-
# Update stat variable class
|
|
334
|
-
if attempt > self.info_maxRetry:
|
|
335
|
-
self.info_maxRetry = ( attempt + 1 )
|
|
336
|
-
self.info_nRetry += 1
|
|
337
|
-
|
|
338
|
-
if attempt + 1 == REQUEST_MAX_RETRY:
|
|
339
|
-
console.log(f"[red]Final retry failed for segment: {index}")
|
|
340
|
-
self.queue.put((index, None)) # Marker for failed segment
|
|
341
|
-
progress_bar.update(1)
|
|
342
|
-
self.info_nFailed += 1
|
|
343
|
-
|
|
344
|
-
#break
|
|
345
|
-
|
|
346
|
-
sleep_time = backoff_factor * (2 ** attempt)
|
|
347
|
-
logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
|
|
348
|
-
time.sleep(sleep_time)
|
|
349
|
-
|
|
350
|
-
def write_segments_to_file(self):
|
|
351
|
-
"""
|
|
352
|
-
Writes segments to file with additional verification.
|
|
353
|
-
"""
|
|
354
|
-
buffer = {}
|
|
355
|
-
expected_index = 0
|
|
356
|
-
segments_written = set()
|
|
357
|
-
|
|
358
|
-
with open(self.tmp_file_path, 'wb') as f:
|
|
359
|
-
while not self.stop_event.is_set() or not self.queue.empty():
|
|
360
|
-
if self.interrupt_flag.is_set():
|
|
361
|
-
break
|
|
362
|
-
|
|
363
|
-
try:
|
|
364
|
-
index, segment_content = self.queue.get(timeout=self.current_timeout)
|
|
365
|
-
|
|
366
|
-
# Successful queue retrieval: reduce timeout
|
|
367
|
-
self.current_timeout = max(self.base_timeout, self.current_timeout / 2)
|
|
368
|
-
|
|
369
|
-
# Handle failed segments
|
|
370
|
-
if segment_content is None:
|
|
371
|
-
if index == expected_index:
|
|
372
|
-
expected_index += 1
|
|
373
|
-
continue
|
|
374
|
-
|
|
375
|
-
# Write segment if it's the next expected one
|
|
376
|
-
if index == expected_index:
|
|
377
|
-
f.write(segment_content)
|
|
378
|
-
segments_written.add(index)
|
|
379
|
-
f.flush()
|
|
380
|
-
expected_index += 1
|
|
381
|
-
|
|
382
|
-
# Write any buffered segments that are now in order
|
|
383
|
-
while expected_index in buffer:
|
|
384
|
-
next_segment = buffer.pop(expected_index)
|
|
385
|
-
|
|
386
|
-
if next_segment is not None:
|
|
387
|
-
f.write(next_segment)
|
|
388
|
-
segments_written.add(expected_index)
|
|
389
|
-
f.flush()
|
|
390
|
-
|
|
391
|
-
expected_index += 1
|
|
392
|
-
|
|
393
|
-
else:
|
|
394
|
-
buffer[index] = segment_content
|
|
395
|
-
|
|
396
|
-
except queue.Empty:
|
|
397
|
-
self.current_timeout = min(self.max_timeout, self.current_timeout * 1.25)
|
|
398
|
-
|
|
399
|
-
if self.stop_event.is_set():
|
|
400
|
-
break
|
|
401
|
-
|
|
402
|
-
except Exception as e:
|
|
403
|
-
logging.error(f"Error writing segment {index}: {str(e)}")
|
|
404
|
-
|
|
405
|
-
def download_streams(self, description: str, type: str):
|
|
406
|
-
"""
|
|
407
|
-
Downloads all TS segments in parallel and writes them to a file.
|
|
408
|
-
|
|
409
|
-
Parameters:
|
|
410
|
-
- description: Description to insert on tqdm bar
|
|
411
|
-
- type (str): Type of download: 'video' or 'audio'
|
|
412
|
-
"""
|
|
413
|
-
self.setup_interrupt_handler()
|
|
414
|
-
|
|
415
|
-
# Get config site from prev stack
|
|
416
|
-
frames = get_call_stack()
|
|
417
|
-
logging.info(f"Extract info from: {frames}")
|
|
418
|
-
config_site = str(frames[-4]['folder_base'])
|
|
419
|
-
logging.info(f"Use frame: {frames[-1]}")
|
|
420
|
-
|
|
421
|
-
# Workers to use for downloading
|
|
422
|
-
TQDM_MAX_WORKER = 0
|
|
423
|
-
|
|
424
|
-
# Select audio workers from folder of frames stack prev call.
|
|
425
|
-
try:
|
|
426
|
-
VIDEO_WORKERS = int(config_manager.get_dict('SITE', config_site)['video_workers'])
|
|
427
|
-
except:
|
|
428
|
-
#VIDEO_WORKERS = os.cpu_count()
|
|
429
|
-
VIDEO_WORKERS = DEFAULT_VIDEO_WORKERS
|
|
430
|
-
|
|
431
|
-
try:
|
|
432
|
-
AUDIO_WORKERS = int(config_manager.get_dict('SITE', config_site)['audio_workers'])
|
|
433
|
-
except:
|
|
434
|
-
#AUDIO_WORKERS = os.cpu_count()
|
|
435
|
-
AUDIO_WORKERS = DEFAULT_AUDIO_WORKERS
|
|
436
|
-
|
|
437
|
-
# Differnt workers for audio and video
|
|
438
|
-
if "video" in str(type):
|
|
439
|
-
TQDM_MAX_WORKER = VIDEO_WORKERS
|
|
440
|
-
|
|
441
|
-
if "audio" in str(type):
|
|
442
|
-
TQDM_MAX_WORKER = AUDIO_WORKERS
|
|
443
|
-
|
|
444
|
-
#console.print(f"[cyan]Video workers[white]: [green]{VIDEO_WORKERS} [white]| [cyan]Audio workers[white]: [green]{AUDIO_WORKERS}")
|
|
445
|
-
|
|
446
|
-
# Custom bar for mobile and pc
|
|
447
|
-
if TQDM_USE_LARGE_BAR:
|
|
448
|
-
bar_format = (
|
|
449
|
-
f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{description}{Colors.WHITE}): "
|
|
450
|
-
f"{Colors.RED}{{percentage:.2f}}% "
|
|
451
|
-
f"{Colors.MAGENTA}{{bar}} "
|
|
452
|
-
f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
|
|
453
|
-
f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
|
|
454
|
-
)
|
|
455
|
-
else:
|
|
456
|
-
bar_format = (
|
|
457
|
-
f"{Colors.YELLOW}Proc{Colors.WHITE}: "
|
|
458
|
-
f"{Colors.RED}{{percentage:.2f}}% "
|
|
459
|
-
f"{Colors.WHITE}| "
|
|
460
|
-
f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
|
|
461
|
-
)
|
|
462
|
-
|
|
463
|
-
# Create progress bar
|
|
464
|
-
progress_bar = tqdm(
|
|
465
|
-
total=len(self.segments),
|
|
466
|
-
unit='s',
|
|
467
|
-
ascii='░▒█',
|
|
468
|
-
bar_format=bar_format,
|
|
469
|
-
mininterval=0.05
|
|
470
|
-
)
|
|
471
|
-
|
|
472
|
-
try:
|
|
473
|
-
|
|
474
|
-
# Start writer thread
|
|
475
|
-
writer_thread = threading.Thread(target=self.write_segments_to_file)
|
|
476
|
-
writer_thread.daemon = True
|
|
477
|
-
writer_thread.start()
|
|
478
|
-
|
|
479
|
-
# Configure workers and delay
|
|
480
|
-
max_workers = len(self.valid_proxy) if THERE_IS_PROXY_LIST else TQDM_MAX_WORKER
|
|
481
|
-
delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER
|
|
482
|
-
|
|
483
|
-
# Download segments with completion verification
|
|
484
|
-
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
485
|
-
futures = []
|
|
486
|
-
for index, segment_url in enumerate(self.segments):
|
|
487
|
-
# Check for interrupt before submitting each task
|
|
488
|
-
if self.interrupt_flag.is_set():
|
|
489
|
-
break
|
|
490
|
-
|
|
491
|
-
time.sleep(delay)
|
|
492
|
-
futures.append(executor.submit(self.make_requests_stream, segment_url, index, progress_bar))
|
|
493
|
-
|
|
494
|
-
# Wait for futures with interrupt handling
|
|
495
|
-
for future in as_completed(futures):
|
|
496
|
-
if self.interrupt_flag.is_set():
|
|
497
|
-
break
|
|
498
|
-
try:
|
|
499
|
-
future.result()
|
|
500
|
-
except Exception as e:
|
|
501
|
-
logging.error(f"Error in download thread: {str(e)}")
|
|
502
|
-
|
|
503
|
-
# Interrupt handling for missing segments
|
|
504
|
-
if not self.interrupt_flag.is_set():
|
|
505
|
-
total_segments = len(self.segments)
|
|
506
|
-
completed_segments = len(self.downloaded_segments)
|
|
507
|
-
|
|
508
|
-
if completed_segments < total_segments:
|
|
509
|
-
missing_segments = set(range(total_segments)) - self.downloaded_segments
|
|
510
|
-
logging.warning(f"Missing segments: {sorted(missing_segments)}")
|
|
511
|
-
|
|
512
|
-
# Retry missing segments with interrupt check
|
|
513
|
-
for index in missing_segments:
|
|
514
|
-
if self.interrupt_flag.is_set():
|
|
515
|
-
break
|
|
516
|
-
|
|
517
|
-
try:
|
|
518
|
-
self.make_requests_stream(self.segments[index], index, progress_bar)
|
|
519
|
-
|
|
520
|
-
except Exception as e:
|
|
521
|
-
logging.error(f"Failed to retry segment {index}: {str(e)}")
|
|
522
|
-
|
|
523
|
-
except Exception as e:
|
|
524
|
-
logging.error(f"Download failed: {str(e)}")
|
|
525
|
-
raise
|
|
526
|
-
|
|
527
|
-
finally:
|
|
528
|
-
|
|
529
|
-
# Clean up resources
|
|
530
|
-
self.stop_event.set()
|
|
531
|
-
writer_thread.join(timeout=30)
|
|
532
|
-
progress_bar.close()
|
|
533
|
-
|
|
534
|
-
# Check if download was interrupted
|
|
535
|
-
if self.download_interrupted:
|
|
536
|
-
console.log("[red] Download was manually stopped.")
|
|
537
|
-
|
|
538
|
-
# Clean up
|
|
539
|
-
self.stop_event.set()
|
|
540
|
-
writer_thread.join(timeout=30)
|
|
541
|
-
progress_bar.close()
|
|
542
|
-
|
|
543
|
-
# Final verification
|
|
544
|
-
try:
|
|
545
|
-
final_completion = (len(self.downloaded_segments) / total_segments) * 100
|
|
546
|
-
if final_completion < 99.9: # Less than 99.9% complete
|
|
547
|
-
missing = set(range(total_segments)) - self.downloaded_segments
|
|
548
|
-
raise Exception(f"Download incomplete ({final_completion:.1f}%). Missing segments: {sorted(missing)}")
|
|
549
|
-
|
|
550
|
-
except:
|
|
551
|
-
pass
|
|
552
|
-
|
|
553
|
-
# Verify output file
|
|
554
|
-
if not os.path.exists(self.tmp_file_path):
|
|
555
|
-
raise Exception("Output file missing")
|
|
556
|
-
|
|
557
|
-
file_size = os.path.getsize(self.tmp_file_path)
|
|
558
|
-
if file_size == 0:
|
|
559
|
-
raise Exception("Output file is empty")
|
|
560
|
-
|
|
561
|
-
# Display additional info when there is missing stream file
|
|
562
|
-
if self.info_nFailed > 0:
|
|
563
|
-
|
|
564
|
-
# Get expected time
|
|
565
|
-
ex_hours, ex_minutes, ex_seconds = format_duration(self.expected_real_time_s)
|
|
566
|
-
ex_formatted_duration = f"[yellow]{int(ex_hours)}[red]h [yellow]{int(ex_minutes)}[red]m [yellow]{int(ex_seconds)}[red]s"
|
|
567
|
-
console.print(f"[cyan]Max retry per URL[white]: [green]{self.info_maxRetry}[green] [white]| [cyan]Total retry done[white]: [green]{self.info_nRetry}[green] [white]| [cyan]Missing TS: [red]{self.info_nFailed} [white]| [cyan]Duration: {print_duration_table(self.tmp_file_path, None, True)} [white]| [cyan]Expected duation: {ex_formatted_duration} \n")
|
|
568
|
-
|
|
569
|
-
if self.info_nRetry >= len(self.segments) * 0.3:
|
|
570
|
-
console.print("[yellow]⚠ Warning:[/yellow] Too many retries detected! Consider reducing the number of [cyan]workers[/cyan] in the [magenta]config.json[/magenta] file. This will impact [bold]performance[/bold]. \n")
|
|
571
|
-
|
|
572
|
-
# Info to return
|
|
573
|
-
return {'type': type, 'nFailed': self.info_nFailed}
|