quasarr 1.3.5__py3-none-any.whl → 1.20.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasarr might be problematic. Click here for more details.
- quasarr/__init__.py +157 -56
- quasarr/api/__init__.py +141 -36
- quasarr/api/arr/__init__.py +197 -78
- quasarr/api/captcha/__init__.py +897 -42
- quasarr/api/config/__init__.py +23 -0
- quasarr/api/sponsors_helper/__init__.py +84 -22
- quasarr/api/statistics/__init__.py +196 -0
- quasarr/downloads/__init__.py +237 -434
- quasarr/downloads/linkcrypters/al.py +237 -0
- quasarr/downloads/linkcrypters/filecrypt.py +178 -31
- quasarr/downloads/linkcrypters/hide.py +123 -0
- quasarr/downloads/packages/__init__.py +461 -0
- quasarr/downloads/sources/al.py +697 -0
- quasarr/downloads/sources/by.py +106 -0
- quasarr/downloads/sources/dd.py +6 -78
- quasarr/downloads/sources/dj.py +7 -0
- quasarr/downloads/sources/dt.py +1 -1
- quasarr/downloads/sources/dw.py +2 -2
- quasarr/downloads/sources/he.py +112 -0
- quasarr/downloads/sources/mb.py +47 -0
- quasarr/downloads/sources/nk.py +51 -0
- quasarr/downloads/sources/nx.py +36 -81
- quasarr/downloads/sources/sf.py +27 -4
- quasarr/downloads/sources/sj.py +7 -0
- quasarr/downloads/sources/sl.py +90 -0
- quasarr/downloads/sources/wd.py +110 -0
- quasarr/providers/cloudflare.py +204 -0
- quasarr/providers/html_images.py +20 -0
- quasarr/providers/html_templates.py +210 -108
- quasarr/providers/imdb_metadata.py +15 -2
- quasarr/providers/myjd_api.py +36 -5
- quasarr/providers/notifications.py +30 -5
- quasarr/providers/obfuscated.py +35 -0
- quasarr/providers/sessions/__init__.py +0 -0
- quasarr/providers/sessions/al.py +286 -0
- quasarr/providers/sessions/dd.py +78 -0
- quasarr/providers/sessions/nx.py +76 -0
- quasarr/providers/shared_state.py +368 -23
- quasarr/providers/statistics.py +154 -0
- quasarr/providers/version.py +60 -1
- quasarr/search/__init__.py +112 -36
- quasarr/search/sources/al.py +448 -0
- quasarr/search/sources/by.py +203 -0
- quasarr/search/sources/dd.py +17 -6
- quasarr/search/sources/dj.py +213 -0
- quasarr/search/sources/dt.py +37 -7
- quasarr/search/sources/dw.py +27 -47
- quasarr/search/sources/fx.py +27 -29
- quasarr/search/sources/he.py +196 -0
- quasarr/search/sources/mb.py +195 -0
- quasarr/search/sources/nk.py +188 -0
- quasarr/search/sources/nx.py +22 -6
- quasarr/search/sources/sf.py +143 -151
- quasarr/search/sources/sj.py +213 -0
- quasarr/search/sources/sl.py +246 -0
- quasarr/search/sources/wd.py +208 -0
- quasarr/storage/config.py +20 -4
- quasarr/storage/setup.py +224 -56
- quasarr-1.20.4.dist-info/METADATA +304 -0
- quasarr-1.20.4.dist-info/RECORD +72 -0
- {quasarr-1.3.5.dist-info → quasarr-1.20.4.dist-info}/WHEEL +1 -1
- quasarr/providers/tvmaze_metadata.py +0 -23
- quasarr-1.3.5.dist-info/METADATA +0 -174
- quasarr-1.3.5.dist-info/RECORD +0 -43
- {quasarr-1.3.5.dist-info → quasarr-1.20.4.dist-info}/entry_points.txt +0 -0
- {quasarr-1.3.5.dist-info → quasarr-1.20.4.dist-info}/licenses/LICENSE +0 -0
- {quasarr-1.3.5.dist-info → quasarr-1.20.4.dist-info}/top_level.txt +0 -0
|
@@ -14,32 +14,40 @@ import requests
|
|
|
14
14
|
from Cryptodome.Cipher import AES
|
|
15
15
|
from bs4 import BeautifulSoup
|
|
16
16
|
|
|
17
|
-
from quasarr.providers.
|
|
17
|
+
from quasarr.providers.cloudflare import is_cloudflare_challenge, ensure_session_cf_bypassed
|
|
18
|
+
from quasarr.providers.log import info, debug
|
|
18
19
|
|
|
19
20
|
|
|
20
21
|
class CNL:
|
|
21
22
|
def __init__(self, crypted_data):
|
|
23
|
+
debug("Initializing CNL with crypted_data.")
|
|
22
24
|
self.crypted_data = crypted_data
|
|
23
25
|
|
|
24
26
|
def jk_eval(self, f_def):
|
|
27
|
+
debug("Evaluating JavaScript key function.")
|
|
25
28
|
js_code = f"""
|
|
26
29
|
{f_def}
|
|
27
30
|
f();
|
|
28
31
|
"""
|
|
29
32
|
|
|
30
33
|
result = dukpy.evaljs(js_code).strip()
|
|
31
|
-
|
|
34
|
+
debug("JavaScript evaluation complete.")
|
|
32
35
|
return result
|
|
33
36
|
|
|
34
37
|
def aes_decrypt(self, data, key):
|
|
38
|
+
debug("Starting AES decrypt.")
|
|
35
39
|
try:
|
|
36
40
|
encrypted_data = base64.b64decode(data)
|
|
41
|
+
debug("Base64 decode for AES decrypt successful.")
|
|
37
42
|
except Exception as e:
|
|
43
|
+
debug("Base64 decode for AES decrypt failed.")
|
|
38
44
|
raise ValueError("Failed to decode base64 data") from e
|
|
39
45
|
|
|
40
46
|
try:
|
|
41
47
|
key_bytes = bytes.fromhex(key)
|
|
48
|
+
debug("Key successfully converted from hex.")
|
|
42
49
|
except Exception as e:
|
|
50
|
+
debug("Failed converting key from hex.")
|
|
43
51
|
raise ValueError("Failed to convert key to bytes") from e
|
|
44
52
|
|
|
45
53
|
iv = key_bytes
|
|
@@ -47,26 +55,33 @@ class CNL:
|
|
|
47
55
|
|
|
48
56
|
try:
|
|
49
57
|
decrypted_data = cipher.decrypt(encrypted_data)
|
|
58
|
+
debug("AES decrypt operation successful.")
|
|
50
59
|
except ValueError as e:
|
|
60
|
+
debug("AES decrypt operation failed.")
|
|
51
61
|
raise ValueError("Decryption failed") from e
|
|
52
62
|
|
|
53
63
|
try:
|
|
54
|
-
|
|
64
|
+
decoded = decrypted_data.decode('utf-8').replace('\x00', '').replace('\x08', '')
|
|
65
|
+
debug("Decoded AES output successfully.")
|
|
66
|
+
return decoded
|
|
55
67
|
except UnicodeDecodeError as e:
|
|
68
|
+
debug("Failed decoding decrypted AES output.")
|
|
56
69
|
raise ValueError("Failed to decode decrypted data") from e
|
|
57
70
|
|
|
58
71
|
def decrypt(self):
|
|
72
|
+
debug("Starting Click'N'Load decrypt sequence.")
|
|
59
73
|
crypted = self.crypted_data[2]
|
|
60
74
|
jk = "function f(){ return \'" + self.crypted_data[1] + "';}"
|
|
61
75
|
key = self.jk_eval(jk)
|
|
62
76
|
uncrypted = self.aes_decrypt(crypted, key)
|
|
63
77
|
urls = [result for result in uncrypted.split("\r\n") if len(result) > 0]
|
|
64
|
-
|
|
78
|
+
debug(f"Extracted {len(urls)} URLs from CNL decrypt.")
|
|
65
79
|
return urls
|
|
66
80
|
|
|
67
81
|
|
|
68
82
|
class DLC:
|
|
69
83
|
def __init__(self, shared_state, dlc_file):
|
|
84
|
+
debug("Initializing DLC decrypt handler.")
|
|
70
85
|
self.shared_state = shared_state
|
|
71
86
|
self.data = dlc_file
|
|
72
87
|
self.KEY = b"cb99b5cbc24db398"
|
|
@@ -74,6 +89,7 @@ class DLC:
|
|
|
74
89
|
self.API_URL = "http://service.jdownloader.org/dlcrypt/service.php?srcType=dlc&destType=pylo&data="
|
|
75
90
|
|
|
76
91
|
def parse_packages(self, start_node):
|
|
92
|
+
debug("Parsing DLC packages from XML.")
|
|
77
93
|
return [
|
|
78
94
|
(
|
|
79
95
|
base64.b64decode(node.getAttribute("name")).decode("utf-8"),
|
|
@@ -83,41 +99,51 @@ class DLC:
|
|
|
83
99
|
]
|
|
84
100
|
|
|
85
101
|
def parse_links(self, start_node):
|
|
102
|
+
debug("Parsing DLC links in package.")
|
|
86
103
|
return [
|
|
87
104
|
base64.b64decode(node.getElementsByTagName("url")[0].firstChild.data).decode("utf-8")
|
|
88
105
|
for node in start_node.getElementsByTagName("file")
|
|
89
106
|
]
|
|
90
107
|
|
|
91
108
|
def decrypt(self):
|
|
109
|
+
debug("Starting DLC decrypt flow.")
|
|
92
110
|
if not isinstance(self.data, bytes):
|
|
111
|
+
debug("DLC data type invalid.")
|
|
93
112
|
raise TypeError("data must be bytes.")
|
|
94
113
|
|
|
95
114
|
all_urls = []
|
|
96
115
|
|
|
97
116
|
try:
|
|
117
|
+
debug("Preparing DLC data buffer.")
|
|
98
118
|
data = self.data.strip()
|
|
99
|
-
|
|
100
119
|
data += b"=" * (-len(data) % 4)
|
|
101
120
|
|
|
102
121
|
dlc_key = data[-88:].decode("utf-8")
|
|
103
122
|
dlc_data = base64.b64decode(data[:-88])
|
|
123
|
+
debug("DLC base64 decode successful.")
|
|
104
124
|
|
|
105
125
|
headers = {'User-Agent': self.shared_state.values["user_agent"]}
|
|
106
126
|
|
|
127
|
+
debug("Requesting DLC decryption service.")
|
|
107
128
|
dlc_content = requests.get(self.API_URL + dlc_key, headers=headers, timeout=10).content.decode("utf-8")
|
|
108
129
|
|
|
109
130
|
rc = base64.b64decode(re.search(r"<rc>(.+)</rc>", dlc_content, re.S).group(1))[:16]
|
|
131
|
+
debug("Received DLC RC block.")
|
|
110
132
|
|
|
111
133
|
cipher = AES.new(self.KEY, AES.MODE_CBC, self.IV)
|
|
112
134
|
key = iv = cipher.decrypt(rc)
|
|
135
|
+
debug("Decrypted DLC key material.")
|
|
113
136
|
|
|
114
137
|
cipher = AES.new(key, AES.MODE_CBC, iv)
|
|
115
138
|
xml_data = base64.b64decode(cipher.decrypt(dlc_data)).decode("utf-8")
|
|
139
|
+
debug("Final DLC decrypt successful.")
|
|
116
140
|
|
|
117
141
|
root = xml.dom.minidom.parseString(xml_data).documentElement
|
|
118
142
|
content_node = root.getElementsByTagName("content")[0]
|
|
143
|
+
debug("Parsed DLC XML content.")
|
|
119
144
|
|
|
120
145
|
packages = self.parse_packages(content_node)
|
|
146
|
+
debug(f"Found {len(packages)} DLC packages.")
|
|
121
147
|
|
|
122
148
|
for package in packages:
|
|
123
149
|
urls = package[1]
|
|
@@ -127,48 +153,83 @@ class DLC:
|
|
|
127
153
|
info("DLC Error: " + str(e))
|
|
128
154
|
return None
|
|
129
155
|
|
|
156
|
+
debug(f"DLC decrypt yielded {len(all_urls)} URLs.")
|
|
130
157
|
return all_urls
|
|
131
158
|
|
|
132
159
|
|
|
133
160
|
def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=None):
|
|
134
161
|
info("Attempting to decrypt Filecrypt link: " + url)
|
|
162
|
+
debug("Initializing Filecrypt session & headers.")
|
|
135
163
|
session = requests.Session()
|
|
136
|
-
|
|
137
164
|
headers = {'User-Agent': shared_state.values["user_agent"]}
|
|
138
165
|
|
|
166
|
+
debug("Ensuring Cloudflare bypass is ready.")
|
|
167
|
+
session, headers, output = ensure_session_cf_bypassed(info, shared_state, session, url, headers)
|
|
168
|
+
if not session or not output:
|
|
169
|
+
debug("Cloudflare bypass failed.")
|
|
170
|
+
return False
|
|
171
|
+
|
|
172
|
+
soup = BeautifulSoup(output.text, 'html.parser')
|
|
173
|
+
debug("Parsed initial Filecrypt HTML.")
|
|
174
|
+
|
|
139
175
|
password_field = None
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
176
|
+
try:
|
|
177
|
+
debug("Attempting password field auto-detection.")
|
|
178
|
+
input_elem = soup.find('input', attrs={'type': 'password'})
|
|
179
|
+
if not input_elem:
|
|
180
|
+
input_elem = soup.find('input', placeholder=lambda v: v and 'password' in v.lower())
|
|
181
|
+
if not input_elem:
|
|
182
|
+
input_elem = soup.find('input',
|
|
183
|
+
attrs={'name': lambda v: v and ('pass' in v.lower() or 'password' in v.lower())})
|
|
184
|
+
if input_elem and input_elem.has_attr('name'):
|
|
185
|
+
password_field = input_elem['name']
|
|
146
186
|
info("Password field name identified: " + password_field)
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
187
|
+
debug(f"Password field detected: {password_field}")
|
|
188
|
+
except Exception as e:
|
|
189
|
+
info(f"Password-field detection error: {e}")
|
|
190
|
+
debug("Password-field detection error raised.")
|
|
150
191
|
|
|
151
192
|
if password and password_field:
|
|
152
193
|
info("Using Password: " + password)
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
194
|
+
debug("Submitting password via POST.")
|
|
195
|
+
post_headers = {'User-Agent': shared_state.values["user_agent"],
|
|
196
|
+
'Content-Type': 'application/x-www-form-urlencoded'}
|
|
197
|
+
data = {password_field: password}
|
|
198
|
+
try:
|
|
199
|
+
output = session.post(output.url, data=data, headers=post_headers, timeout=30)
|
|
200
|
+
debug("Password POST request successful.")
|
|
201
|
+
except requests.RequestException as e:
|
|
202
|
+
info(f"POSTing password failed: {e}")
|
|
203
|
+
debug("Password POST request failed.")
|
|
204
|
+
return False
|
|
205
|
+
|
|
206
|
+
if output.status_code == 403 or is_cloudflare_challenge(output.text):
|
|
207
|
+
info("Encountered Cloudflare after password POST. Re-running FlareSolverr...")
|
|
208
|
+
debug("Cloudflare reappeared after password submit, retrying bypass.")
|
|
209
|
+
session, headers, output = ensure_session_cf_bypassed(info, shared_state, session, output.url, headers)
|
|
210
|
+
if not session or not output:
|
|
211
|
+
debug("Cloudflare bypass failed after password POST.")
|
|
212
|
+
return False
|
|
158
213
|
|
|
159
214
|
url = output.url
|
|
160
215
|
soup = BeautifulSoup(output.text, 'html.parser')
|
|
216
|
+
debug("Re-parsed HTML after password submit or initial load.")
|
|
217
|
+
|
|
161
218
|
if bool(soup.find_all("input", {"id": "p4assw0rt"})):
|
|
162
219
|
info(f"Password was wrong or missing. Could not get links for {title}")
|
|
220
|
+
debug("Incorrect password detected via p4assw0rt.")
|
|
163
221
|
return False
|
|
164
222
|
|
|
165
223
|
no_captcha_present = bool(soup.find("form", {"class": "cnlform"}))
|
|
166
224
|
if no_captcha_present:
|
|
167
225
|
info("No CAPTCHA present. Skipping token!")
|
|
226
|
+
debug("Detected no CAPTCHA (CNL direct form).")
|
|
168
227
|
else:
|
|
169
228
|
circle_captcha = bool(soup.find_all("div", {"class": "circle_captcha"}))
|
|
229
|
+
debug(f"Circle captcha present: {circle_captcha}")
|
|
170
230
|
i = 0
|
|
171
231
|
while circle_captcha and i < 3:
|
|
232
|
+
debug(f"Submitting fake circle captcha click attempt {i+1}.")
|
|
172
233
|
random_x = str(random.randint(100, 200))
|
|
173
234
|
random_y = str(random.randint(100, 200))
|
|
174
235
|
output = session.post(url, data="buttonx.x=" + random_x + "&buttonx.y=" + random_y,
|
|
@@ -177,40 +238,56 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
177
238
|
url = output.url
|
|
178
239
|
soup = BeautifulSoup(output.text, 'html.parser')
|
|
179
240
|
circle_captcha = bool(soup.find_all("div", {"class": "circle_captcha"}))
|
|
241
|
+
i += 1
|
|
242
|
+
debug(f"Circle captcha still present: {circle_captcha}")
|
|
180
243
|
|
|
244
|
+
debug("Submitting final CAPTCHA token.")
|
|
181
245
|
output = session.post(url, data="cap_token=" + token, headers={'User-Agent': shared_state.values["user_agent"],
|
|
182
246
|
'Content-Type': 'application/x-www-form-urlencoded'})
|
|
183
247
|
url = output.url
|
|
184
248
|
|
|
185
249
|
if "/404.html" in url:
|
|
186
250
|
info("Filecrypt returned 404 - current IP is likely banned or the link is offline.")
|
|
251
|
+
debug("Detected Filecrypt 404 page.")
|
|
187
252
|
|
|
188
253
|
soup = BeautifulSoup(output.text, 'html.parser')
|
|
254
|
+
debug("Parsed post-captcha response HTML.")
|
|
189
255
|
|
|
190
256
|
solved = bool(soup.find_all("div", {"class": "container"}))
|
|
191
257
|
if not solved:
|
|
192
258
|
info("Token rejected by Filecrypt! Try another CAPTCHA to proceed...")
|
|
259
|
+
debug("Token rejected; no 'container' div found.")
|
|
193
260
|
return False
|
|
194
261
|
else:
|
|
262
|
+
debug("CAPTCHA token accepted by Filecrypt.")
|
|
263
|
+
|
|
195
264
|
season_number = ""
|
|
196
265
|
episode_number = ""
|
|
197
266
|
episode_in_title = re.findall(r'.*\.s(\d{1,3})e(\d{1,3})\..*', title, re.IGNORECASE)
|
|
198
267
|
season_in_title = re.findall(r'.*\.s(\d{1,3})\..*', title, re.IGNORECASE)
|
|
268
|
+
debug("Attempting episode/season number parsing from title.")
|
|
269
|
+
|
|
199
270
|
if episode_in_title:
|
|
200
271
|
try:
|
|
201
272
|
season_number = str(int(episode_in_title[0][0]))
|
|
202
273
|
episode_number = str(int(episode_in_title[0][1]))
|
|
274
|
+
debug(f"Detected S{season_number}E{episode_number} from title.")
|
|
203
275
|
except:
|
|
276
|
+
debug("Failed parsing S/E numbers from title.")
|
|
204
277
|
pass
|
|
205
278
|
elif season_in_title:
|
|
206
279
|
try:
|
|
207
280
|
season_number = str(int(season_in_title[0]))
|
|
281
|
+
debug(f"Detected season {season_number} from title.")
|
|
208
282
|
except:
|
|
283
|
+
debug("Failed parsing season number from title.")
|
|
209
284
|
pass
|
|
210
285
|
|
|
211
286
|
season = ""
|
|
212
287
|
episode = ""
|
|
213
288
|
tv_show_selector = soup.find("div", {"class": "dlpart"})
|
|
289
|
+
debug(f"TV show selector found: {bool(tv_show_selector)}")
|
|
290
|
+
|
|
214
291
|
if tv_show_selector:
|
|
215
292
|
|
|
216
293
|
season = "season="
|
|
@@ -220,41 +297,53 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
220
297
|
try:
|
|
221
298
|
if season_selection:
|
|
222
299
|
season += str(season_number)
|
|
300
|
+
debug(f"Assigned season parameter: {season}")
|
|
223
301
|
except:
|
|
302
|
+
debug("Failed assigning season parameter.")
|
|
224
303
|
pass
|
|
225
304
|
|
|
226
305
|
episode_selection = soup.find("div", {"id": "selbox_episode"})
|
|
227
306
|
try:
|
|
228
307
|
if episode_selection:
|
|
229
308
|
episode += str(episode_number)
|
|
309
|
+
debug(f"Assigned episode parameter: {episode}")
|
|
230
310
|
except:
|
|
311
|
+
debug("Failed assigning episode parameter.")
|
|
231
312
|
pass
|
|
232
313
|
|
|
233
314
|
if episode_number and not episode:
|
|
234
315
|
info(f"Missing select for episode number {episode_number}! Expect undesired links in the output.")
|
|
316
|
+
debug("Episode number present but no episode selector container found.")
|
|
235
317
|
|
|
236
318
|
links = []
|
|
237
319
|
|
|
238
320
|
mirrors = []
|
|
239
321
|
mirrors_available = soup.select("a[href*=mirror]")
|
|
322
|
+
debug(f"Mirrors available: {len(mirrors_available)}")
|
|
323
|
+
|
|
240
324
|
if not mirror and mirrors_available:
|
|
241
325
|
for mirror in mirrors_available:
|
|
242
326
|
try:
|
|
243
327
|
mirror_query = mirror.get("href").split("?")[1]
|
|
244
328
|
base_url = url.split("?")[0] if "mirror" in url else url
|
|
245
329
|
mirrors.append(f"{base_url}?{mirror_query}")
|
|
330
|
+
debug(f"Discovered mirror: {mirrors[-1]}")
|
|
246
331
|
except IndexError:
|
|
332
|
+
debug("Mirror parsing failed due to missing '?'.")
|
|
247
333
|
continue
|
|
248
334
|
else:
|
|
249
335
|
mirrors = [url]
|
|
336
|
+
debug("Using direct URL as only mirror.")
|
|
250
337
|
|
|
251
338
|
for mirror in mirrors:
|
|
252
339
|
if not len(mirrors) == 1:
|
|
340
|
+
debug(f"Loading mirror: {mirror}")
|
|
253
341
|
output = session.get(mirror, headers=headers)
|
|
254
342
|
url = output.url
|
|
255
343
|
soup = BeautifulSoup(output.text, 'html.parser')
|
|
256
344
|
|
|
257
345
|
try:
|
|
346
|
+
debug("Attempting Click'n'Load decrypt.")
|
|
258
347
|
crypted_payload = soup.find("form", {"class": "cnlform"}).get('onsubmit')
|
|
259
348
|
crypted_data = re.findall(r"'(.*?)'", crypted_payload)
|
|
260
349
|
if not title:
|
|
@@ -265,7 +354,9 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
265
354
|
crypted_data[2],
|
|
266
355
|
title
|
|
267
356
|
]
|
|
357
|
+
|
|
268
358
|
if episode and season:
|
|
359
|
+
debug("Applying episode/season filtering to CNL.")
|
|
269
360
|
domain = urlparse(url).netloc
|
|
270
361
|
filtered_cnl_secret = soup.find("input", {"name": "hidden_cnl_id"}).attrs["value"]
|
|
271
362
|
filtered_cnl_link = f"https://{domain}/_CNL/{filtered_cnl_secret}.html?{season}&{episode}"
|
|
@@ -274,6 +365,7 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
274
365
|
if filtered_cnl_result.status_code == 200:
|
|
275
366
|
filtered_cnl_data = json.loads(filtered_cnl_result.text)
|
|
276
367
|
if filtered_cnl_data["success"]:
|
|
368
|
+
debug("Season/Episode filter applied successfully.")
|
|
277
369
|
crypted_data = [
|
|
278
370
|
crypted_data[0],
|
|
279
371
|
filtered_cnl_data["data"][0],
|
|
@@ -282,16 +374,71 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
282
374
|
]
|
|
283
375
|
links.extend(CNL(crypted_data).decrypt())
|
|
284
376
|
except:
|
|
377
|
+
debug("CNL decrypt failed; trying DLC fallback.")
|
|
378
|
+
if "The owner of this folder has deactivated all hosts in this container in their settings." in soup.text:
|
|
379
|
+
info(f"Mirror deactivated by the owner: {mirror}")
|
|
380
|
+
debug("Mirror deactivated detected in page text.")
|
|
381
|
+
continue
|
|
382
|
+
|
|
285
383
|
info("Click'n'Load not found! Falling back to DLC...")
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
384
|
+
try:
|
|
385
|
+
debug("Attempting DLC fallback.")
|
|
386
|
+
crypted_payload = soup.find("button", {"class": "dlcdownload"}).get("onclick")
|
|
387
|
+
crypted_data = re.findall(r"'(.*?)'", crypted_payload)
|
|
388
|
+
dlc_secret = crypted_data[0]
|
|
389
|
+
domain = urlparse(url).netloc
|
|
390
|
+
if episode and season:
|
|
391
|
+
dlc_link = f"https://{domain}/DLC/{dlc_secret}.dlc?{episode}&{season}"
|
|
392
|
+
else:
|
|
393
|
+
dlc_link = f"https://{domain}/DLC/{dlc_secret}.dlc"
|
|
394
|
+
dlc_file = session.get(dlc_link, headers=headers).content
|
|
395
|
+
links.extend(DLC(shared_state, dlc_file).decrypt())
|
|
396
|
+
except:
|
|
397
|
+
debug("DLC fallback failed, trying button fallback.")
|
|
398
|
+
info("DLC not found! Falling back to first available download Button...")
|
|
399
|
+
|
|
400
|
+
base_url = urlparse(url).netloc
|
|
401
|
+
phpsessid = session.cookies.get('PHPSESSID')
|
|
402
|
+
if not phpsessid:
|
|
403
|
+
info("PHPSESSID cookie not found! Cannot proceed with download links extraction.")
|
|
404
|
+
debug("Missing PHPSESSID cookie.")
|
|
405
|
+
return False
|
|
406
|
+
|
|
407
|
+
results = []
|
|
408
|
+
debug("Parsing fallback buttons for download links.")
|
|
409
|
+
|
|
410
|
+
for button in soup.find_all('button'):
|
|
411
|
+
data_attrs = [v for k, v in button.attrs.items() if k.startswith('data-') and k != 'data-i18n']
|
|
412
|
+
if not data_attrs:
|
|
413
|
+
continue
|
|
414
|
+
|
|
415
|
+
link_id = data_attrs[0]
|
|
416
|
+
row = button.find_parent('tr')
|
|
417
|
+
mirror_tag = row.find('a', class_='external_link') if row else None
|
|
418
|
+
mirror_name = mirror_tag.get_text(strip=True) if mirror_tag else 'unknown'
|
|
419
|
+
full_url = f"http://{base_url}/Link/{link_id}.html"
|
|
420
|
+
results.append((full_url, mirror_name))
|
|
421
|
+
|
|
422
|
+
sorted_results = sorted(results, key=lambda x: 0 if 'rapidgator' in x[1].lower() else 1)
|
|
423
|
+
debug(f"Found {len(sorted_results)} fallback link candidates.")
|
|
424
|
+
|
|
425
|
+
for result_url, mirror in sorted_results:
|
|
426
|
+
info("You must solve circlecaptcha separately!")
|
|
427
|
+
debug(f'Session "{phpsessid}" for {result_url} will not live long. Submit new CAPTCHA quickly!')
|
|
428
|
+
return {
|
|
429
|
+
"status": "replaced",
|
|
430
|
+
"replace_url": result_url,
|
|
431
|
+
"mirror": mirror,
|
|
432
|
+
"session": phpsessid
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
if not links:
|
|
436
|
+
info("No links found in Filecrypt response!")
|
|
437
|
+
debug("Extraction completed but yielded no links.")
|
|
438
|
+
return False
|
|
296
439
|
|
|
297
|
-
|
|
440
|
+
debug(f"Returning success with {len(links)} extracted links.")
|
|
441
|
+
return {
|
|
442
|
+
"status": "success",
|
|
443
|
+
"links": links
|
|
444
|
+
}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Quasarr
|
|
3
|
+
# Project by https://github.com/rix1337
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
from typing import List, Dict, Any
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
|
|
10
|
+
from quasarr.providers.log import info, debug
|
|
11
|
+
from quasarr.providers.statistics import StatsHelper
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def unhide_links(shared_state, url):
|
|
15
|
+
try:
|
|
16
|
+
links = []
|
|
17
|
+
|
|
18
|
+
match = re.search(r"container/([a-z0-9\-]+)", url)
|
|
19
|
+
if not match:
|
|
20
|
+
info(f"Invalid hide.cx URL: {url}")
|
|
21
|
+
return []
|
|
22
|
+
|
|
23
|
+
container_id = match.group(1)
|
|
24
|
+
info(f"Fetching hide.cx container with ID: {container_id}")
|
|
25
|
+
|
|
26
|
+
headers = {'User-Agent': shared_state.values["user_agent"]}
|
|
27
|
+
|
|
28
|
+
container_url = f"https://api.hide.cx/containers/{container_id}"
|
|
29
|
+
response = requests.get(container_url, headers=headers)
|
|
30
|
+
data = response.json()
|
|
31
|
+
|
|
32
|
+
for link in data.get("links", []):
|
|
33
|
+
link_id = link.get("id")
|
|
34
|
+
if not link_id:
|
|
35
|
+
continue
|
|
36
|
+
|
|
37
|
+
debug(f"Fetching hide.cx link with ID: {link_id}")
|
|
38
|
+
link_url = f"https://api.hide.cx/containers/{container_id}/links/{link_id}"
|
|
39
|
+
link_data = requests.get(link_url, headers=headers).json()
|
|
40
|
+
|
|
41
|
+
final_url = link_data.get("url")
|
|
42
|
+
if final_url and final_url not in links:
|
|
43
|
+
links.append(final_url)
|
|
44
|
+
|
|
45
|
+
success = bool(links)
|
|
46
|
+
if success:
|
|
47
|
+
StatsHelper(shared_state).increment_captcha_decryptions_automatic()
|
|
48
|
+
else:
|
|
49
|
+
StatsHelper(shared_state).increment_failed_decryptions_automatic()
|
|
50
|
+
|
|
51
|
+
return links
|
|
52
|
+
except Exception as e:
|
|
53
|
+
info(f"Error fetching hide.cx links: {e}")
|
|
54
|
+
StatsHelper(shared_state).increment_failed_decryptions_automatic()
|
|
55
|
+
return []
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def decrypt_links_if_hide(shared_state: Any, items: List[List[str]]) -> Dict[str, Any]:
|
|
59
|
+
"""
|
|
60
|
+
Resolve redirects and decrypt hide.cx links from a list of item lists.
|
|
61
|
+
|
|
62
|
+
Each item list must include:
|
|
63
|
+
- index 0: the URL to resolve
|
|
64
|
+
- any additional metadata at subsequent indices (ignored here)
|
|
65
|
+
|
|
66
|
+
:param shared_state: State object required by unhide_links function
|
|
67
|
+
:param items: List of lists, where each inner list has the URL at index 0
|
|
68
|
+
:return: Dict with 'status' and 'results' (flat list of decrypted link URLs)
|
|
69
|
+
"""
|
|
70
|
+
if not items:
|
|
71
|
+
info("No items provided to decrypt.")
|
|
72
|
+
return {"status": "error", "results": []}
|
|
73
|
+
|
|
74
|
+
session = requests.Session()
|
|
75
|
+
session.max_redirects = 5
|
|
76
|
+
|
|
77
|
+
hide_urls: List[str] = []
|
|
78
|
+
for item in items:
|
|
79
|
+
original_url = item[0]
|
|
80
|
+
if not original_url:
|
|
81
|
+
debug(f"Skipping item without URL: {item}")
|
|
82
|
+
continue
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
# Try HEAD first, fallback to GET
|
|
86
|
+
try:
|
|
87
|
+
resp = session.head(original_url, allow_redirects=True, timeout=10)
|
|
88
|
+
except requests.RequestException:
|
|
89
|
+
resp = session.get(original_url, allow_redirects=True, timeout=10)
|
|
90
|
+
|
|
91
|
+
final_url = resp.url
|
|
92
|
+
if "hide.cx" in final_url:
|
|
93
|
+
debug(f"Identified hide.cx link: {final_url}")
|
|
94
|
+
hide_urls.append(final_url)
|
|
95
|
+
else:
|
|
96
|
+
debug(f"Not a hide.cx link (skipped): {final_url}")
|
|
97
|
+
|
|
98
|
+
except requests.RequestException as e:
|
|
99
|
+
info(f"Error resolving URL {original_url}: {e}")
|
|
100
|
+
continue
|
|
101
|
+
|
|
102
|
+
if not hide_urls:
|
|
103
|
+
debug(f"No hide.cx links found among {len(items)} items.")
|
|
104
|
+
return {"status": "none", "results": []}
|
|
105
|
+
|
|
106
|
+
info(f"Found {len(hide_urls)} hide.cx URLs; decrypting...")
|
|
107
|
+
decrypted_links: List[str] = []
|
|
108
|
+
for url in hide_urls:
|
|
109
|
+
try:
|
|
110
|
+
links = unhide_links(shared_state, url)
|
|
111
|
+
if not links:
|
|
112
|
+
debug(f"No links decrypted for {url}")
|
|
113
|
+
continue
|
|
114
|
+
decrypted_links.extend(links)
|
|
115
|
+
except Exception as e:
|
|
116
|
+
info(f"Failed to decrypt {url}: {e}")
|
|
117
|
+
continue
|
|
118
|
+
|
|
119
|
+
if not decrypted_links:
|
|
120
|
+
info(f"Could not decrypt any links from hide.cx URLs.")
|
|
121
|
+
return {"status": "error", "results": []}
|
|
122
|
+
|
|
123
|
+
return {"status": "success", "results": decrypted_links}
|