quasarr 2.4.8__py3-none-any.whl → 2.4.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasarr might be problematic. Click here for more details.
- quasarr/__init__.py +134 -70
- quasarr/api/__init__.py +40 -31
- quasarr/api/arr/__init__.py +116 -108
- quasarr/api/captcha/__init__.py +262 -137
- quasarr/api/config/__init__.py +76 -46
- quasarr/api/packages/__init__.py +138 -102
- quasarr/api/sponsors_helper/__init__.py +29 -16
- quasarr/api/statistics/__init__.py +19 -19
- quasarr/downloads/__init__.py +165 -72
- quasarr/downloads/linkcrypters/al.py +35 -18
- quasarr/downloads/linkcrypters/filecrypt.py +107 -52
- quasarr/downloads/linkcrypters/hide.py +5 -6
- quasarr/downloads/packages/__init__.py +342 -177
- quasarr/downloads/sources/al.py +191 -100
- quasarr/downloads/sources/by.py +31 -13
- quasarr/downloads/sources/dd.py +27 -14
- quasarr/downloads/sources/dj.py +1 -3
- quasarr/downloads/sources/dl.py +126 -71
- quasarr/downloads/sources/dt.py +11 -5
- quasarr/downloads/sources/dw.py +28 -14
- quasarr/downloads/sources/he.py +32 -24
- quasarr/downloads/sources/mb.py +19 -9
- quasarr/downloads/sources/nk.py +14 -10
- quasarr/downloads/sources/nx.py +8 -18
- quasarr/downloads/sources/sf.py +45 -20
- quasarr/downloads/sources/sj.py +1 -3
- quasarr/downloads/sources/sl.py +9 -5
- quasarr/downloads/sources/wd.py +32 -12
- quasarr/downloads/sources/wx.py +35 -21
- quasarr/providers/auth.py +42 -37
- quasarr/providers/cloudflare.py +28 -30
- quasarr/providers/hostname_issues.py +2 -1
- quasarr/providers/html_images.py +2 -2
- quasarr/providers/html_templates.py +22 -14
- quasarr/providers/imdb_metadata.py +149 -80
- quasarr/providers/jd_cache.py +131 -39
- quasarr/providers/log.py +1 -1
- quasarr/providers/myjd_api.py +260 -196
- quasarr/providers/notifications.py +53 -41
- quasarr/providers/obfuscated.py +9 -4
- quasarr/providers/sessions/al.py +71 -55
- quasarr/providers/sessions/dd.py +21 -14
- quasarr/providers/sessions/dl.py +30 -19
- quasarr/providers/sessions/nx.py +23 -14
- quasarr/providers/shared_state.py +292 -141
- quasarr/providers/statistics.py +75 -43
- quasarr/providers/utils.py +33 -27
- quasarr/providers/version.py +45 -14
- quasarr/providers/web_server.py +10 -5
- quasarr/search/__init__.py +30 -18
- quasarr/search/sources/al.py +124 -73
- quasarr/search/sources/by.py +110 -59
- quasarr/search/sources/dd.py +57 -35
- quasarr/search/sources/dj.py +69 -48
- quasarr/search/sources/dl.py +159 -100
- quasarr/search/sources/dt.py +110 -74
- quasarr/search/sources/dw.py +121 -61
- quasarr/search/sources/fx.py +108 -62
- quasarr/search/sources/he.py +78 -49
- quasarr/search/sources/mb.py +96 -48
- quasarr/search/sources/nk.py +80 -50
- quasarr/search/sources/nx.py +91 -62
- quasarr/search/sources/sf.py +171 -106
- quasarr/search/sources/sj.py +69 -48
- quasarr/search/sources/sl.py +115 -71
- quasarr/search/sources/wd.py +67 -44
- quasarr/search/sources/wx.py +188 -123
- quasarr/storage/config.py +65 -52
- quasarr/storage/setup.py +238 -140
- quasarr/storage/sqlite_database.py +10 -4
- {quasarr-2.4.8.dist-info → quasarr-2.4.10.dist-info}/METADATA +4 -3
- quasarr-2.4.10.dist-info/RECORD +81 -0
- quasarr-2.4.8.dist-info/RECORD +0 -81
- {quasarr-2.4.8.dist-info → quasarr-2.4.10.dist-info}/WHEEL +0 -0
- {quasarr-2.4.8.dist-info → quasarr-2.4.10.dist-info}/entry_points.txt +0 -0
- {quasarr-2.4.8.dist-info → quasarr-2.4.10.dist-info}/licenses/LICENSE +0 -0
|
@@ -11,11 +11,14 @@ from urllib.parse import urlparse
|
|
|
11
11
|
|
|
12
12
|
import dukpy
|
|
13
13
|
import requests
|
|
14
|
-
from Cryptodome.Cipher import AES
|
|
15
14
|
from bs4 import BeautifulSoup
|
|
15
|
+
from Cryptodome.Cipher import AES
|
|
16
16
|
|
|
17
|
-
from quasarr.providers.cloudflare import
|
|
18
|
-
|
|
17
|
+
from quasarr.providers.cloudflare import (
|
|
18
|
+
ensure_session_cf_bypassed,
|
|
19
|
+
is_cloudflare_challenge,
|
|
20
|
+
)
|
|
21
|
+
from quasarr.providers.log import debug, info
|
|
19
22
|
|
|
20
23
|
|
|
21
24
|
class CNL:
|
|
@@ -61,7 +64,9 @@ class CNL:
|
|
|
61
64
|
raise ValueError("Decryption failed") from e
|
|
62
65
|
|
|
63
66
|
try:
|
|
64
|
-
decoded =
|
|
67
|
+
decoded = (
|
|
68
|
+
decrypted_data.decode("utf-8").replace("\x00", "").replace("\x08", "")
|
|
69
|
+
)
|
|
65
70
|
debug("Decoded AES output successfully.")
|
|
66
71
|
return decoded
|
|
67
72
|
except UnicodeDecodeError as e:
|
|
@@ -71,7 +76,7 @@ class CNL:
|
|
|
71
76
|
def decrypt(self):
|
|
72
77
|
debug("Starting Click'N'Load decrypt sequence.")
|
|
73
78
|
crypted = self.crypted_data[2]
|
|
74
|
-
jk = "function f(){ return
|
|
79
|
+
jk = "function f(){ return '" + self.crypted_data[1] + "';}"
|
|
75
80
|
key = self.jk_eval(jk)
|
|
76
81
|
uncrypted = self.aes_decrypt(crypted, key)
|
|
77
82
|
urls = [result for result in uncrypted.split("\r\n") if len(result) > 0]
|
|
@@ -93,7 +98,7 @@ class DLC:
|
|
|
93
98
|
return [
|
|
94
99
|
(
|
|
95
100
|
base64.b64decode(node.getAttribute("name")).decode("utf-8"),
|
|
96
|
-
self.parse_links(node)
|
|
101
|
+
self.parse_links(node),
|
|
97
102
|
)
|
|
98
103
|
for node in start_node.getElementsByTagName("package")
|
|
99
104
|
]
|
|
@@ -101,7 +106,9 @@ class DLC:
|
|
|
101
106
|
def parse_links(self, start_node):
|
|
102
107
|
debug("Parsing DLC links in package.")
|
|
103
108
|
return [
|
|
104
|
-
base64.b64decode(
|
|
109
|
+
base64.b64decode(
|
|
110
|
+
node.getElementsByTagName("url")[0].firstChild.data
|
|
111
|
+
).decode("utf-8")
|
|
105
112
|
for node in start_node.getElementsByTagName("file")
|
|
106
113
|
]
|
|
107
114
|
|
|
@@ -122,12 +129,16 @@ class DLC:
|
|
|
122
129
|
dlc_data = base64.b64decode(data[:-88])
|
|
123
130
|
debug("DLC base64 decode successful.")
|
|
124
131
|
|
|
125
|
-
headers = {
|
|
132
|
+
headers = {"User-Agent": self.shared_state.values["user_agent"]}
|
|
126
133
|
|
|
127
134
|
debug("Requesting DLC decryption service.")
|
|
128
|
-
dlc_content = requests.get(
|
|
135
|
+
dlc_content = requests.get(
|
|
136
|
+
self.API_URL + dlc_key, headers=headers, timeout=10
|
|
137
|
+
).content.decode("utf-8")
|
|
129
138
|
|
|
130
|
-
rc = base64.b64decode(
|
|
139
|
+
rc = base64.b64decode(
|
|
140
|
+
re.search(r"<rc>(.+)</rc>", dlc_content, re.S).group(1)
|
|
141
|
+
)[:16]
|
|
131
142
|
debug("Received DLC RC block.")
|
|
132
143
|
|
|
133
144
|
cipher = AES.new(self.KEY, AES.MODE_CBC, self.IV)
|
|
@@ -161,28 +172,37 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
161
172
|
info("Attempting to decrypt Filecrypt link: " + url)
|
|
162
173
|
debug("Initializing Filecrypt session & headers.")
|
|
163
174
|
session = requests.Session()
|
|
164
|
-
headers = {
|
|
175
|
+
headers = {"User-Agent": shared_state.values["user_agent"]}
|
|
165
176
|
|
|
166
177
|
debug("Ensuring Cloudflare bypass is ready.")
|
|
167
|
-
session, headers, output = ensure_session_cf_bypassed(
|
|
178
|
+
session, headers, output = ensure_session_cf_bypassed(
|
|
179
|
+
info, shared_state, session, url, headers
|
|
180
|
+
)
|
|
168
181
|
if not session or not output:
|
|
169
182
|
debug("Cloudflare bypass failed.")
|
|
170
183
|
return False
|
|
171
184
|
|
|
172
|
-
soup = BeautifulSoup(output.text,
|
|
185
|
+
soup = BeautifulSoup(output.text, "html.parser")
|
|
173
186
|
debug("Parsed initial Filecrypt HTML.")
|
|
174
187
|
|
|
175
188
|
password_field = None
|
|
176
189
|
try:
|
|
177
190
|
debug("Attempting password field auto-detection.")
|
|
178
|
-
input_elem = soup.find(
|
|
191
|
+
input_elem = soup.find("input", attrs={"type": "password"})
|
|
179
192
|
if not input_elem:
|
|
180
|
-
input_elem = soup.find(
|
|
193
|
+
input_elem = soup.find(
|
|
194
|
+
"input", placeholder=lambda v: v and "password" in v.lower()
|
|
195
|
+
)
|
|
181
196
|
if not input_elem:
|
|
182
|
-
input_elem = soup.find(
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
197
|
+
input_elem = soup.find(
|
|
198
|
+
"input",
|
|
199
|
+
attrs={
|
|
200
|
+
"name": lambda v: v
|
|
201
|
+
and ("pass" in v.lower() or "password" in v.lower())
|
|
202
|
+
},
|
|
203
|
+
)
|
|
204
|
+
if input_elem and input_elem.has_attr("name"):
|
|
205
|
+
password_field = input_elem["name"]
|
|
186
206
|
info("Password field name identified: " + password_field)
|
|
187
207
|
debug(f"Password field detected: {password_field}")
|
|
188
208
|
except Exception as e:
|
|
@@ -192,11 +212,15 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
192
212
|
if password and password_field:
|
|
193
213
|
info("Using Password: " + password)
|
|
194
214
|
debug("Submitting password via POST.")
|
|
195
|
-
post_headers = {
|
|
196
|
-
|
|
215
|
+
post_headers = {
|
|
216
|
+
"User-Agent": shared_state.values["user_agent"],
|
|
217
|
+
"Content-Type": "application/x-www-form-urlencoded",
|
|
218
|
+
}
|
|
197
219
|
data = {password_field: password}
|
|
198
220
|
try:
|
|
199
|
-
output = session.post(
|
|
221
|
+
output = session.post(
|
|
222
|
+
output.url, data=data, headers=post_headers, timeout=30
|
|
223
|
+
)
|
|
200
224
|
debug("Password POST request successful.")
|
|
201
225
|
except requests.RequestException as e:
|
|
202
226
|
info(f"POSTing password failed: {e}")
|
|
@@ -204,15 +228,19 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
204
228
|
return False
|
|
205
229
|
|
|
206
230
|
if output.status_code == 403 or is_cloudflare_challenge(output.text):
|
|
207
|
-
info(
|
|
231
|
+
info(
|
|
232
|
+
"Encountered Cloudflare after password POST. Re-running FlareSolverr..."
|
|
233
|
+
)
|
|
208
234
|
debug("Cloudflare reappeared after password submit, retrying bypass.")
|
|
209
|
-
session, headers, output = ensure_session_cf_bypassed(
|
|
235
|
+
session, headers, output = ensure_session_cf_bypassed(
|
|
236
|
+
info, shared_state, session, output.url, headers
|
|
237
|
+
)
|
|
210
238
|
if not session or not output:
|
|
211
239
|
debug("Cloudflare bypass failed after password POST.")
|
|
212
240
|
return False
|
|
213
241
|
|
|
214
242
|
url = output.url
|
|
215
|
-
soup = BeautifulSoup(output.text,
|
|
243
|
+
soup = BeautifulSoup(output.text, "html.parser")
|
|
216
244
|
debug("Re-parsed HTML after password submit or initial load.")
|
|
217
245
|
|
|
218
246
|
if bool(soup.find_all("input", {"id": "p4assw0rt"})):
|
|
@@ -232,25 +260,38 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
232
260
|
debug(f"Submitting fake circle captcha click attempt {i + 1}.")
|
|
233
261
|
random_x = str(random.randint(100, 200))
|
|
234
262
|
random_y = str(random.randint(100, 200))
|
|
235
|
-
output = session.post(
|
|
236
|
-
|
|
237
|
-
|
|
263
|
+
output = session.post(
|
|
264
|
+
url,
|
|
265
|
+
data="buttonx.x=" + random_x + "&buttonx.y=" + random_y,
|
|
266
|
+
headers={
|
|
267
|
+
"User-Agent": shared_state.values["user_agent"],
|
|
268
|
+
"Content-Type": "application/x-www-form-urlencoded",
|
|
269
|
+
},
|
|
270
|
+
)
|
|
238
271
|
url = output.url
|
|
239
|
-
soup = BeautifulSoup(output.text,
|
|
272
|
+
soup = BeautifulSoup(output.text, "html.parser")
|
|
240
273
|
circle_captcha = bool(soup.find_all("div", {"class": "circle_captcha"}))
|
|
241
274
|
i += 1
|
|
242
275
|
debug(f"Circle captcha still present: {circle_captcha}")
|
|
243
276
|
|
|
244
277
|
debug("Submitting final CAPTCHA token.")
|
|
245
|
-
output = session.post(
|
|
246
|
-
|
|
278
|
+
output = session.post(
|
|
279
|
+
url,
|
|
280
|
+
data="cap_token=" + token,
|
|
281
|
+
headers={
|
|
282
|
+
"User-Agent": shared_state.values["user_agent"],
|
|
283
|
+
"Content-Type": "application/x-www-form-urlencoded",
|
|
284
|
+
},
|
|
285
|
+
)
|
|
247
286
|
url = output.url
|
|
248
287
|
|
|
249
288
|
if "/404.html" in url:
|
|
250
|
-
info(
|
|
289
|
+
info(
|
|
290
|
+
"Filecrypt returned 404 - current IP is likely banned or the link is offline."
|
|
291
|
+
)
|
|
251
292
|
debug("Detected Filecrypt 404 page.")
|
|
252
293
|
|
|
253
|
-
soup = BeautifulSoup(output.text,
|
|
294
|
+
soup = BeautifulSoup(output.text, "html.parser")
|
|
254
295
|
debug("Parsed post-captcha response HTML.")
|
|
255
296
|
|
|
256
297
|
solved = bool(soup.find_all("div", {"class": "container"}))
|
|
@@ -263,8 +304,10 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
263
304
|
|
|
264
305
|
season_number = ""
|
|
265
306
|
episode_number = ""
|
|
266
|
-
episode_in_title = re.findall(
|
|
267
|
-
|
|
307
|
+
episode_in_title = re.findall(
|
|
308
|
+
r".*\.s(\d{1,3})e(\d{1,3})\..*", title, re.IGNORECASE
|
|
309
|
+
)
|
|
310
|
+
season_in_title = re.findall(r".*\.s(\d{1,3})\..*", title, re.IGNORECASE)
|
|
268
311
|
debug("Attempting episode/season number parsing from title.")
|
|
269
312
|
|
|
270
313
|
if episode_in_title:
|
|
@@ -289,7 +332,6 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
289
332
|
debug(f"TV show selector found: {bool(tv_show_selector)}")
|
|
290
333
|
|
|
291
334
|
if tv_show_selector:
|
|
292
|
-
|
|
293
335
|
season = "season="
|
|
294
336
|
episode = "episode="
|
|
295
337
|
|
|
@@ -312,7 +354,9 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
312
354
|
pass
|
|
313
355
|
|
|
314
356
|
if episode_number and not episode:
|
|
315
|
-
info(
|
|
357
|
+
info(
|
|
358
|
+
f"Missing select for episode number {episode_number}! Expect undesired links in the output."
|
|
359
|
+
)
|
|
316
360
|
debug("Episode number present but no episode selector container found.")
|
|
317
361
|
|
|
318
362
|
links = []
|
|
@@ -340,11 +384,13 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
340
384
|
debug(f"Loading mirror: {mirror}")
|
|
341
385
|
output = session.get(mirror, headers=headers)
|
|
342
386
|
url = output.url
|
|
343
|
-
soup = BeautifulSoup(output.text,
|
|
387
|
+
soup = BeautifulSoup(output.text, "html.parser")
|
|
344
388
|
|
|
345
389
|
try:
|
|
346
390
|
debug("Attempting Click'n'Load decrypt.")
|
|
347
|
-
crypted_payload = soup.find("form", {"class": "cnlform"}).get(
|
|
391
|
+
crypted_payload = soup.find("form", {"class": "cnlform"}).get(
|
|
392
|
+
"onsubmit"
|
|
393
|
+
)
|
|
348
394
|
crypted_data = re.findall(r"'(.*?)'", crypted_payload)
|
|
349
395
|
if not title:
|
|
350
396
|
title = crypted_data[3]
|
|
@@ -352,16 +398,19 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
352
398
|
crypted_data[0],
|
|
353
399
|
crypted_data[1],
|
|
354
400
|
crypted_data[2],
|
|
355
|
-
title
|
|
401
|
+
title,
|
|
356
402
|
]
|
|
357
403
|
|
|
358
404
|
if episode and season:
|
|
359
405
|
debug("Applying episode/season filtering to CNL.")
|
|
360
406
|
domain = urlparse(url).netloc
|
|
361
|
-
filtered_cnl_secret = soup.find(
|
|
407
|
+
filtered_cnl_secret = soup.find(
|
|
408
|
+
"input", {"name": "hidden_cnl_id"}
|
|
409
|
+
).attrs["value"]
|
|
362
410
|
filtered_cnl_link = f"https://{domain}/_CNL/{filtered_cnl_secret}.html?{season}&{episode}"
|
|
363
|
-
filtered_cnl_result = session.post(
|
|
364
|
-
|
|
411
|
+
filtered_cnl_result = session.post(
|
|
412
|
+
filtered_cnl_link, headers=headers
|
|
413
|
+
)
|
|
365
414
|
if filtered_cnl_result.status_code == 200:
|
|
366
415
|
filtered_cnl_data = json.loads(filtered_cnl_result.text)
|
|
367
416
|
if filtered_cnl_data["success"]:
|
|
@@ -370,12 +419,15 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
370
419
|
crypted_data[0],
|
|
371
420
|
filtered_cnl_data["data"][0],
|
|
372
421
|
filtered_cnl_data["data"][1],
|
|
373
|
-
title
|
|
422
|
+
title,
|
|
374
423
|
]
|
|
375
424
|
links.extend(CNL(crypted_data).decrypt())
|
|
376
425
|
except:
|
|
377
426
|
debug("CNL decrypt failed; trying DLC fallback.")
|
|
378
|
-
if
|
|
427
|
+
if (
|
|
428
|
+
"The owner of this folder has deactivated all hosts in this container in their settings."
|
|
429
|
+
in soup.text
|
|
430
|
+
):
|
|
379
431
|
info(f"Mirror deactivated by the owner: {mirror}")
|
|
380
432
|
debug("Mirror deactivated detected in page text.")
|
|
381
433
|
continue
|
|
@@ -383,19 +435,25 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
383
435
|
info("Click'n'Load not found! Falling back to DLC...")
|
|
384
436
|
try:
|
|
385
437
|
debug("Attempting DLC fallback.")
|
|
386
|
-
crypted_payload = soup.find("button", {"class": "dlcdownload"}).get(
|
|
438
|
+
crypted_payload = soup.find("button", {"class": "dlcdownload"}).get(
|
|
439
|
+
"onclick"
|
|
440
|
+
)
|
|
387
441
|
crypted_data = re.findall(r"'(.*?)'", crypted_payload)
|
|
388
442
|
dlc_secret = crypted_data[0]
|
|
389
443
|
domain = urlparse(url).netloc
|
|
390
444
|
if episode and season:
|
|
391
|
-
dlc_link =
|
|
445
|
+
dlc_link = (
|
|
446
|
+
f"https://{domain}/DLC/{dlc_secret}.dlc?{episode}&{season}"
|
|
447
|
+
)
|
|
392
448
|
else:
|
|
393
449
|
dlc_link = f"https://{domain}/DLC/{dlc_secret}.dlc"
|
|
394
450
|
dlc_file = session.get(dlc_link, headers=headers).content
|
|
395
451
|
links.extend(DLC(shared_state, dlc_file).decrypt())
|
|
396
452
|
except:
|
|
397
453
|
debug("DLC fallback failed, trying button fallback.")
|
|
398
|
-
info(
|
|
454
|
+
info(
|
|
455
|
+
"Click'n'Load and DLC not found. Please use the fallback userscript instead!"
|
|
456
|
+
)
|
|
399
457
|
return False
|
|
400
458
|
|
|
401
459
|
if not links:
|
|
@@ -404,7 +462,4 @@ def get_filecrypt_links(shared_state, token, title, url, password=None, mirror=N
|
|
|
404
462
|
return False
|
|
405
463
|
|
|
406
464
|
debug(f"Returning success with {len(links)} extracted links.")
|
|
407
|
-
return {
|
|
408
|
-
"status": "success",
|
|
409
|
-
"links": links
|
|
410
|
-
}
|
|
465
|
+
return {"status": "success", "links": links}
|
|
@@ -4,11 +4,11 @@
|
|
|
4
4
|
|
|
5
5
|
import re
|
|
6
6
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
7
|
-
from typing import
|
|
7
|
+
from typing import Any, Dict, List
|
|
8
8
|
|
|
9
9
|
import requests
|
|
10
10
|
|
|
11
|
-
from quasarr.providers.log import
|
|
11
|
+
from quasarr.providers.log import debug, info
|
|
12
12
|
from quasarr.providers.statistics import StatsHelper
|
|
13
13
|
|
|
14
14
|
|
|
@@ -57,10 +57,10 @@ def unhide_links(shared_state, url, session):
|
|
|
57
57
|
container_id = canonical_id
|
|
58
58
|
debug(f"Resolved to canonical container ID: {container_id}")
|
|
59
59
|
|
|
60
|
-
headers = {
|
|
60
|
+
headers = {"User-Agent": shared_state.values["user_agent"]}
|
|
61
61
|
info(f"Fetching hide.cx container with ID: {container_id}")
|
|
62
62
|
|
|
63
|
-
headers = {
|
|
63
|
+
headers = {"User-Agent": shared_state.values["user_agent"]}
|
|
64
64
|
|
|
65
65
|
container_url = f"https://api.hide.cx/containers/{container_id}"
|
|
66
66
|
response = session.get(container_url, headers=headers)
|
|
@@ -81,7 +81,7 @@ def unhide_links(shared_state, url, session):
|
|
|
81
81
|
# Process links in batches of 10
|
|
82
82
|
batch_size = 10
|
|
83
83
|
for i in range(0, len(link_ids), batch_size):
|
|
84
|
-
batch = link_ids[i:i + batch_size]
|
|
84
|
+
batch = link_ids[i : i + batch_size]
|
|
85
85
|
with ThreadPoolExecutor(max_workers=batch_size) as executor:
|
|
86
86
|
futures = [executor.submit(fetch_link, link_id) for link_id in batch]
|
|
87
87
|
for future in as_completed(futures):
|
|
@@ -147,7 +147,6 @@ def decrypt_links_if_hide(shared_state: Any, items: List[List[str]]) -> Dict[str
|
|
|
147
147
|
else:
|
|
148
148
|
debug(f"Not a hide.cx link (skipped): {final_url}")
|
|
149
149
|
|
|
150
|
-
|
|
151
150
|
except requests.RequestException as e:
|
|
152
151
|
info(f"Error resolving URL {original_url}: {e}")
|
|
153
152
|
continue
|