quasarr 2.4.10__py3-none-any.whl → 2.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasarr might be problematic. Click here for more details.
- quasarr/__init__.py +3 -3
- quasarr/api/__init__.py +7 -7
- quasarr/api/captcha/__init__.py +1 -1
- quasarr/api/config/__init__.py +22 -169
- quasarr/api/sponsors_helper/__init__.py +2 -2
- quasarr/downloads/linkcrypters/hide.py +1 -1
- quasarr/downloads/packages/__init__.py +5 -5
- quasarr/downloads/sources/al.py +1 -1
- quasarr/downloads/sources/dw.py +1 -1
- quasarr/downloads/sources/sf.py +1 -1
- quasarr/providers/notifications.py +1 -1
- quasarr/providers/shared_state.py +0 -6
- quasarr/providers/version.py +1 -1
- quasarr/search/__init__.py +86 -15
- quasarr/search/sources/al.py +0 -5
- quasarr/search/sources/he.py +1 -2
- quasarr/storage/setup.py +517 -236
- quasarr/storage/sqlite_database.py +1 -1
- {quasarr-2.4.10.dist-info → quasarr-2.5.0.dist-info}/METADATA +1 -1
- {quasarr-2.4.10.dist-info → quasarr-2.5.0.dist-info}/RECORD +23 -23
- {quasarr-2.4.10.dist-info → quasarr-2.5.0.dist-info}/WHEEL +0 -0
- {quasarr-2.4.10.dist-info → quasarr-2.5.0.dist-info}/entry_points.txt +0 -0
- {quasarr-2.4.10.dist-info → quasarr-2.5.0.dist-info}/licenses/LICENSE +0 -0
quasarr/__init__.py
CHANGED
|
@@ -15,7 +15,7 @@ import requests
|
|
|
15
15
|
import quasarr.providers.web_server
|
|
16
16
|
from quasarr.api import get_api
|
|
17
17
|
from quasarr.providers import shared_state, version
|
|
18
|
-
from quasarr.providers.log import
|
|
18
|
+
from quasarr.providers.log import info
|
|
19
19
|
from quasarr.providers.notifications import send_discord_message
|
|
20
20
|
from quasarr.providers.utils import (
|
|
21
21
|
FALLBACK_USER_AGENT,
|
|
@@ -194,7 +194,7 @@ def run():
|
|
|
194
194
|
print(
|
|
195
195
|
f"You have [{len(hostnames)} of {len(Config._DEFAULT_CONFIG['Hostnames'])}] supported hostnames set up"
|
|
196
196
|
)
|
|
197
|
-
print(
|
|
197
|
+
print("For efficiency it is recommended to set up as few hostnames as needed.")
|
|
198
198
|
|
|
199
199
|
# Check credentials for login-required hostnames
|
|
200
200
|
skip_login_db = DataBase("skip_login")
|
|
@@ -243,7 +243,7 @@ def run():
|
|
|
243
243
|
discord_webhook_pattern = r"^https://discord\.com/api/webhooks/\d+/[\w-]+$"
|
|
244
244
|
if re.match(discord_webhook_pattern, arguments.discord):
|
|
245
245
|
shared_state.update("webhook", arguments.discord)
|
|
246
|
-
print(
|
|
246
|
+
print("Using Discord Webhook URL for notifications.")
|
|
247
247
|
discord_url = arguments.discord
|
|
248
248
|
else:
|
|
249
249
|
print(f"Invalid Discord Webhook URL provided: {arguments.discord}")
|
quasarr/api/__init__.py
CHANGED
|
@@ -367,15 +367,15 @@ def get_api(shared_state_dict, shared_state_lock):
|
|
|
367
367
|
/* Dark mode */
|
|
368
368
|
@media (prefers-color-scheme: dark) {{
|
|
369
369
|
:root {{
|
|
370
|
-
--status-success-bg: #
|
|
371
|
-
--status-success-color: #
|
|
372
|
-
--status-success-border: #
|
|
370
|
+
--status-success-bg: #1c4532;
|
|
371
|
+
--status-success-color: #68d391;
|
|
372
|
+
--status-success-border: #276749;
|
|
373
373
|
--status-warning-bg: #3d3520;
|
|
374
374
|
--status-warning-color: #ffb74d;
|
|
375
375
|
--status-warning-border: #d69e2e;
|
|
376
|
-
--status-error-bg: #
|
|
377
|
-
--status-error-color: #
|
|
378
|
-
--status-error-border: #
|
|
376
|
+
--status-error-bg: #3d2d2d;
|
|
377
|
+
--status-error-color: #fc8181;
|
|
378
|
+
--status-error-border: #c53030;
|
|
379
379
|
--alert-warning-bg: #3d3520;
|
|
380
380
|
--alert-warning-border: #d69e2e;
|
|
381
381
|
--card-bg: #2d3748;
|
|
@@ -486,6 +486,6 @@ def get_api(shared_state_dict, shared_state_lock):
|
|
|
486
486
|
@app.get("/regenerate-api-key")
|
|
487
487
|
def regenerate_api_key():
|
|
488
488
|
shared_state.generate_api_key()
|
|
489
|
-
return render_success(
|
|
489
|
+
return render_success("API Key replaced!", 5)
|
|
490
490
|
|
|
491
491
|
Server(app, listen="0.0.0.0", port=shared_state.values["port"]).serve_forever()
|
quasarr/api/captcha/__init__.py
CHANGED
|
@@ -165,7 +165,7 @@ def setup_captcha_routes(app):
|
|
|
165
165
|
debug("Redirecting to ToLink CAPTCHA")
|
|
166
166
|
redirect(f"/captcha/tolink?data={quote(encoded_payload)}")
|
|
167
167
|
else:
|
|
168
|
-
debug(
|
|
168
|
+
debug("Redirecting to cutcaptcha")
|
|
169
169
|
redirect(f"/captcha/cutcaptcha?data={quote(encoded_payload)}")
|
|
170
170
|
|
|
171
171
|
return render_centered_html(f'''<h1><img src="{images.logo}" type="image/png" alt="Quasarr logo" class="logo"/>Quasarr</h1>
|
quasarr/api/config/__init__.py
CHANGED
|
@@ -3,27 +3,24 @@
|
|
|
3
3
|
# Project by https://github.com/rix1337
|
|
4
4
|
|
|
5
5
|
import os
|
|
6
|
-
import re
|
|
7
6
|
import signal
|
|
8
7
|
import threading
|
|
9
8
|
import time
|
|
10
|
-
from urllib.parse import urlparse
|
|
11
9
|
|
|
12
|
-
import
|
|
13
|
-
from bottle import request, response
|
|
10
|
+
from bottle import response
|
|
14
11
|
|
|
15
|
-
from quasarr.providers.html_templates import render_button,
|
|
12
|
+
from quasarr.providers.html_templates import render_button, render_form
|
|
16
13
|
from quasarr.providers.log import info
|
|
17
|
-
from quasarr.providers.shared_state import extract_valid_hostname
|
|
18
|
-
from quasarr.providers.utils import (
|
|
19
|
-
check_flaresolverr,
|
|
20
|
-
extract_allowed_keys,
|
|
21
|
-
extract_kv_pairs,
|
|
22
|
-
)
|
|
23
14
|
from quasarr.storage.config import Config
|
|
24
15
|
from quasarr.storage.setup import (
|
|
16
|
+
check_credentials,
|
|
17
|
+
clear_skip_login,
|
|
18
|
+
delete_skip_flaresolverr_preference,
|
|
19
|
+
get_flaresolverr_status_data,
|
|
20
|
+
get_skip_login,
|
|
25
21
|
hostname_form_html,
|
|
26
|
-
|
|
22
|
+
import_hostnames_from_url,
|
|
23
|
+
save_flaresolverr_url,
|
|
27
24
|
save_hostnames,
|
|
28
25
|
)
|
|
29
26
|
from quasarr.storage.sqlite_database import DataBase
|
|
@@ -50,7 +47,6 @@ def setup_config(app, shared_state):
|
|
|
50
47
|
hostname_form_html(
|
|
51
48
|
shared_state,
|
|
52
49
|
message,
|
|
53
|
-
show_restart_button=True,
|
|
54
50
|
show_skip_management=True,
|
|
55
51
|
)
|
|
56
52
|
+ back_button,
|
|
@@ -60,97 +56,21 @@ def setup_config(app, shared_state):
|
|
|
60
56
|
def hostnames_api():
|
|
61
57
|
return save_hostnames(shared_state, timeout=1, first_run=False)
|
|
62
58
|
|
|
63
|
-
@app.post("/api/hostnames/
|
|
64
|
-
def
|
|
65
|
-
|
|
66
|
-
response.content_type = "application/json"
|
|
67
|
-
try:
|
|
68
|
-
data = request.json
|
|
69
|
-
url = data.get("url", "").strip()
|
|
70
|
-
|
|
71
|
-
if not url:
|
|
72
|
-
return {"success": False, "error": "No URL provided"}
|
|
73
|
-
|
|
74
|
-
# Validate URL
|
|
75
|
-
parsed = urlparse(url)
|
|
76
|
-
if parsed.scheme not in ("http", "https") or not parsed.netloc:
|
|
77
|
-
return {"success": False, "error": "Invalid URL format"}
|
|
78
|
-
|
|
79
|
-
# Fetch content
|
|
80
|
-
try:
|
|
81
|
-
resp = requests.get(url, timeout=15)
|
|
82
|
-
resp.raise_for_status()
|
|
83
|
-
content = resp.text
|
|
84
|
-
except requests.RequestException as e:
|
|
85
|
-
info(f"Failed to fetch hostnames URL: {e}")
|
|
86
|
-
return {
|
|
87
|
-
"success": False,
|
|
88
|
-
"error": "Failed to fetch URL. Check the console log for details.",
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
# Parse hostnames
|
|
92
|
-
allowed_keys = extract_allowed_keys(Config._DEFAULT_CONFIG, "Hostnames")
|
|
93
|
-
results = extract_kv_pairs(content, allowed_keys)
|
|
94
|
-
|
|
95
|
-
if not results:
|
|
96
|
-
return {
|
|
97
|
-
"success": False,
|
|
98
|
-
"error": "No hostnames found in the provided URL",
|
|
99
|
-
}
|
|
59
|
+
@app.post("/api/hostnames/check-credentials/<shorthand>")
|
|
60
|
+
def check_credentials_api(shorthand):
|
|
61
|
+
return check_credentials(shared_state, shorthand)
|
|
100
62
|
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
for shorthand, hostname in results.items():
|
|
105
|
-
domain_check = extract_valid_hostname(hostname, shorthand)
|
|
106
|
-
domain = domain_check.get("domain")
|
|
107
|
-
if domain:
|
|
108
|
-
valid_hostnames[shorthand] = domain
|
|
109
|
-
else:
|
|
110
|
-
invalid_hostnames[shorthand] = domain_check.get(
|
|
111
|
-
"message", "Invalid"
|
|
112
|
-
)
|
|
113
|
-
|
|
114
|
-
if not valid_hostnames:
|
|
115
|
-
return {
|
|
116
|
-
"success": False,
|
|
117
|
-
"error": "No valid hostnames found in the provided URL",
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
return {
|
|
121
|
-
"success": True,
|
|
122
|
-
"hostnames": valid_hostnames,
|
|
123
|
-
"errors": invalid_hostnames,
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
except Exception as e:
|
|
127
|
-
return {"success": False, "error": f"Error: {str(e)}"}
|
|
63
|
+
@app.post("/api/hostnames/import-url")
|
|
64
|
+
def import_hostnames_route():
|
|
65
|
+
return import_hostnames_from_url()
|
|
128
66
|
|
|
129
67
|
@app.get("/api/skip-login")
|
|
130
|
-
def
|
|
131
|
-
|
|
132
|
-
response.content_type = "application/json"
|
|
133
|
-
skip_db = DataBase("skip_login")
|
|
134
|
-
login_required_sites = ["al", "dd", "dl", "nx"]
|
|
135
|
-
skipped = []
|
|
136
|
-
for site in login_required_sites:
|
|
137
|
-
if skip_db.retrieve(site):
|
|
138
|
-
skipped.append(site)
|
|
139
|
-
return {"skipped": skipped}
|
|
68
|
+
def get_skip_login_route():
|
|
69
|
+
return get_skip_login()
|
|
140
70
|
|
|
141
71
|
@app.delete("/api/skip-login/<shorthand>")
|
|
142
|
-
def
|
|
143
|
-
|
|
144
|
-
response.content_type = "application/json"
|
|
145
|
-
shorthand = shorthand.lower()
|
|
146
|
-
login_required_sites = ["al", "dd", "dl", "nx"]
|
|
147
|
-
if shorthand not in login_required_sites:
|
|
148
|
-
return {"success": False, "error": f"Invalid shorthand: {shorthand}"}
|
|
149
|
-
|
|
150
|
-
skip_db = DataBase("skip_login")
|
|
151
|
-
skip_db.delete(shorthand)
|
|
152
|
-
info(f'Skip login preference cleared for "{shorthand.upper()}"')
|
|
153
|
-
return {"success": True}
|
|
72
|
+
def clear_skip_login_route(shorthand):
|
|
73
|
+
return clear_skip_login(shorthand)
|
|
154
74
|
|
|
155
75
|
@app.get("/flaresolverr")
|
|
156
76
|
def flaresolverr_ui():
|
|
@@ -183,12 +103,6 @@ def setup_config(app, shared_state):
|
|
|
183
103
|
{form_content}
|
|
184
104
|
{render_button("Save", "primary", {"type": "submit", "id": "submitBtn"})}
|
|
185
105
|
</form>
|
|
186
|
-
<p style="font-size:0.875rem; color:var(--secondary, #6c757d); margin-top:1rem;">
|
|
187
|
-
A restart is recommended after configuring FlareSolverr.
|
|
188
|
-
</p>
|
|
189
|
-
<div class="section-divider" style="margin-top:1.5rem; padding-top:1rem; border-top:1px solid var(--divider-color, #dee2e6);">
|
|
190
|
-
{render_button("Restart Quasarr", "secondary", {"type": "button", "onclick": "confirmRestart()"})}
|
|
191
|
-
</div>
|
|
192
106
|
<p>{render_button("Back", "secondary", {"onclick": "location.href='/';"})}</p>
|
|
193
107
|
<script>
|
|
194
108
|
var formSubmitted = false;
|
|
@@ -278,78 +192,17 @@ def setup_config(app, shared_state):
|
|
|
278
192
|
@app.post("/api/flaresolverr")
|
|
279
193
|
def set_flaresolverr_url():
|
|
280
194
|
"""Save FlareSolverr URL from web UI."""
|
|
281
|
-
|
|
282
|
-
config = Config("FlareSolverr")
|
|
283
|
-
|
|
284
|
-
if not url:
|
|
285
|
-
return render_fail("Please provide a FlareSolverr URL.")
|
|
286
|
-
|
|
287
|
-
if not url.startswith("http://") and not url.startswith("https://"):
|
|
288
|
-
url = "http://" + url
|
|
289
|
-
|
|
290
|
-
# Validate URL format
|
|
291
|
-
if not re.search(r"/v\d+$", url):
|
|
292
|
-
return render_fail(
|
|
293
|
-
"FlareSolverr URL must end with /v1 (or similar version path)."
|
|
294
|
-
)
|
|
295
|
-
|
|
296
|
-
try:
|
|
297
|
-
headers = {"Content-Type": "application/json"}
|
|
298
|
-
data = {
|
|
299
|
-
"cmd": "request.get",
|
|
300
|
-
"url": "http://www.google.com/",
|
|
301
|
-
"maxTimeout": 30000,
|
|
302
|
-
}
|
|
303
|
-
resp = requests.post(url, headers=headers, json=data, timeout=30)
|
|
304
|
-
if resp.status_code == 200:
|
|
305
|
-
json_data = resp.json()
|
|
306
|
-
if json_data.get("status") == "ok":
|
|
307
|
-
config.save("url", url)
|
|
308
|
-
# Clear skip preference since we now have a working URL
|
|
309
|
-
DataBase("skip_flaresolverr").delete("skipped")
|
|
310
|
-
# Update user agent from FlareSolverr response
|
|
311
|
-
solution = json_data.get("solution", {})
|
|
312
|
-
solution_ua = solution.get("userAgent")
|
|
313
|
-
if solution_ua:
|
|
314
|
-
shared_state.update("user_agent", solution_ua)
|
|
315
|
-
info(f'FlareSolverr URL configured: "{url}"')
|
|
316
|
-
return render_reconnect_success(
|
|
317
|
-
"FlareSolverr URL saved successfully! A restart is recommended."
|
|
318
|
-
)
|
|
319
|
-
else:
|
|
320
|
-
return render_fail(
|
|
321
|
-
f"FlareSolverr returned unexpected status: {json_data.get('status')}"
|
|
322
|
-
)
|
|
323
|
-
except requests.RequestException:
|
|
324
|
-
return render_fail(f"Could not reach FlareSolverr!")
|
|
325
|
-
|
|
326
|
-
return render_fail(
|
|
327
|
-
"Could not reach FlareSolverr at that URL (expected HTTP 200)."
|
|
328
|
-
)
|
|
195
|
+
return save_flaresolverr_url(shared_state)
|
|
329
196
|
|
|
330
197
|
@app.get("/api/flaresolverr/status")
|
|
331
198
|
def get_flaresolverr_status():
|
|
332
199
|
"""Return FlareSolverr configuration status."""
|
|
333
|
-
|
|
334
|
-
skip_db = DataBase("skip_flaresolverr")
|
|
335
|
-
is_skipped = bool(skip_db.retrieve("skipped"))
|
|
336
|
-
current_url = Config("FlareSolverr").get("url") or ""
|
|
337
|
-
|
|
338
|
-
# Test connection if URL is set
|
|
339
|
-
is_working = False
|
|
340
|
-
if current_url and not is_skipped:
|
|
341
|
-
is_working = check_flaresolverr(shared_state, current_url)
|
|
342
|
-
|
|
343
|
-
return {"skipped": is_skipped, "url": current_url, "working": is_working}
|
|
200
|
+
return get_flaresolverr_status_data(shared_state)
|
|
344
201
|
|
|
345
202
|
@app.delete("/api/skip-flaresolverr")
|
|
346
203
|
def clear_skip_flaresolverr():
|
|
347
204
|
"""Clear skip FlareSolverr preference."""
|
|
348
|
-
|
|
349
|
-
skip_db = DataBase("skip_flaresolverr")
|
|
350
|
-
skip_db.delete("skipped")
|
|
351
|
-
info("Skip FlareSolverr preference cleared")
|
|
352
|
-
return {"success": True}
|
|
205
|
+
return delete_skip_flaresolverr_preference()
|
|
353
206
|
|
|
354
207
|
@app.post("/api/restart")
|
|
355
208
|
def restart_quasarr():
|
|
@@ -27,7 +27,7 @@ def setup_sponsors_helper_routes(app):
|
|
|
27
27
|
try:
|
|
28
28
|
if not shared_state.values["helper_active"]:
|
|
29
29
|
shared_state.update("helper_active", True)
|
|
30
|
-
info(
|
|
30
|
+
info("Sponsor status activated successfully")
|
|
31
31
|
|
|
32
32
|
protected = shared_state.get_db("protected").retrieve_all_titles()
|
|
33
33
|
if not protected:
|
|
@@ -174,7 +174,7 @@ def setup_sponsors_helper_routes(app):
|
|
|
174
174
|
payload = json.loads(data)
|
|
175
175
|
if payload["activate"]:
|
|
176
176
|
shared_state.update("helper_active", True)
|
|
177
|
-
info(
|
|
177
|
+
info("Sponsor status activated successfully")
|
|
178
178
|
return "Sponsor status activated successfully!"
|
|
179
179
|
except:
|
|
180
180
|
pass
|
|
@@ -169,7 +169,7 @@ def decrypt_links_if_hide(shared_state: Any, items: List[List[str]]) -> Dict[str
|
|
|
169
169
|
continue
|
|
170
170
|
|
|
171
171
|
if not decrypted_links:
|
|
172
|
-
info(
|
|
172
|
+
info("Could not decrypt any links from hide.cx URLs.")
|
|
173
173
|
return {"status": "error", "results": []}
|
|
174
174
|
|
|
175
175
|
return {"status": "success", "results": decrypted_links}
|
|
@@ -197,7 +197,7 @@ def get_links_status(package, all_links, is_archive=False):
|
|
|
197
197
|
break
|
|
198
198
|
|
|
199
199
|
if has_extraction_activity:
|
|
200
|
-
debug(
|
|
200
|
+
debug("get_links_status: Package has extraction activity detected")
|
|
201
201
|
|
|
202
202
|
# Second pass: check each link's status
|
|
203
203
|
for link in links_in_package:
|
|
@@ -856,7 +856,7 @@ def delete_package(shared_state, package_id):
|
|
|
856
856
|
)
|
|
857
857
|
else:
|
|
858
858
|
debug(
|
|
859
|
-
|
|
859
|
+
"delete_package: No link IDs found for linkgrabber package"
|
|
860
860
|
)
|
|
861
861
|
|
|
862
862
|
elif package_type == "downloader":
|
|
@@ -879,7 +879,7 @@ def delete_package(shared_state, package_id):
|
|
|
879
879
|
debug(f"delete_package: Downloads cleanup failed: {e}")
|
|
880
880
|
else:
|
|
881
881
|
debug(
|
|
882
|
-
|
|
882
|
+
"delete_package: No link IDs found for downloader package"
|
|
883
883
|
)
|
|
884
884
|
|
|
885
885
|
# Always clean up database entries (no state check - just clean whatever exists)
|
|
@@ -889,7 +889,7 @@ def delete_package(shared_state, package_id):
|
|
|
889
889
|
try:
|
|
890
890
|
shared_state.get_db("failed").delete(package_id)
|
|
891
891
|
debug(
|
|
892
|
-
|
|
892
|
+
"delete_package: Deleted from failed DB (or was not present)"
|
|
893
893
|
)
|
|
894
894
|
except Exception as e:
|
|
895
895
|
debug(
|
|
@@ -898,7 +898,7 @@ def delete_package(shared_state, package_id):
|
|
|
898
898
|
try:
|
|
899
899
|
shared_state.get_db("protected").delete(package_id)
|
|
900
900
|
debug(
|
|
901
|
-
|
|
901
|
+
"delete_package: Deleted from protected DB (or was not present)"
|
|
902
902
|
)
|
|
903
903
|
except Exception as e:
|
|
904
904
|
debug(
|
quasarr/downloads/sources/al.py
CHANGED
|
@@ -754,7 +754,7 @@ def get_al_download_links(shared_state, url, mirror, title, password):
|
|
|
754
754
|
break
|
|
755
755
|
else:
|
|
756
756
|
info(
|
|
757
|
-
|
|
757
|
+
"CAPTCHA was solved, but no links are available for the selection!"
|
|
758
758
|
)
|
|
759
759
|
StatsHelper(
|
|
760
760
|
shared_state
|
quasarr/downloads/sources/dw.py
CHANGED
|
@@ -70,7 +70,7 @@ def get_dw_download_links(shared_state, url, mirror, title, password):
|
|
|
70
70
|
button.nextSibling.img["src"].split("/")[-1].replace(".png", "")
|
|
71
71
|
)
|
|
72
72
|
hoster = (
|
|
73
|
-
|
|
73
|
+
"1fichier" if hoster.startswith("fichier") else hoster
|
|
74
74
|
) # align with expected mirror name
|
|
75
75
|
if mirror and mirror.lower() not in hoster.lower():
|
|
76
76
|
debug(
|
quasarr/downloads/sources/sf.py
CHANGED
|
@@ -130,7 +130,7 @@ def get_sf_download_links(shared_state, url, mirror, title, password):
|
|
|
130
130
|
+ sf
|
|
131
131
|
+ "/api/v1/"
|
|
132
132
|
+ season_id
|
|
133
|
-
+
|
|
133
|
+
+ "/season/ALL?lang=ALL&_="
|
|
134
134
|
+ epoch
|
|
135
135
|
)
|
|
136
136
|
r = requests.get(api_url, headers=headers, timeout=10)
|
|
@@ -71,7 +71,7 @@ def send_discord_message(
|
|
|
71
71
|
fields.append(
|
|
72
72
|
{
|
|
73
73
|
"name": "SponsorsHelper",
|
|
74
|
-
"value":
|
|
74
|
+
"value": "[Sponsors get automated CAPTCHA solutions!](https://github.com/rix1337/Quasarr?tab=readme-ov-file#sponsorshelper)",
|
|
75
75
|
}
|
|
76
76
|
)
|
|
77
77
|
elif case == "quasarr_update":
|
|
@@ -212,8 +212,6 @@ def connect_device():
|
|
|
212
212
|
|
|
213
213
|
def get_device():
|
|
214
214
|
attempts = 0
|
|
215
|
-
last_backoff_change = 0 # Track when we last changed backoff strategy
|
|
216
|
-
|
|
217
215
|
while True:
|
|
218
216
|
try:
|
|
219
217
|
if check_device(values["device"]):
|
|
@@ -951,14 +949,10 @@ def _month_num(name: str) -> int:
|
|
|
951
949
|
"january": 1,
|
|
952
950
|
"february": 2,
|
|
953
951
|
"march": 3,
|
|
954
|
-
"april": 4,
|
|
955
952
|
"may": 5,
|
|
956
953
|
"june": 6,
|
|
957
954
|
"july": 7,
|
|
958
|
-
"august": 8,
|
|
959
|
-
"september": 9,
|
|
960
955
|
"october": 10,
|
|
961
|
-
"november": 11,
|
|
962
956
|
"december": 12,
|
|
963
957
|
}
|
|
964
958
|
return mmap.get(name)
|
quasarr/providers/version.py
CHANGED
quasarr/search/__init__.py
CHANGED
|
@@ -35,8 +35,6 @@ def get_search_results(
|
|
|
35
35
|
season="",
|
|
36
36
|
episode="",
|
|
37
37
|
):
|
|
38
|
-
results = []
|
|
39
|
-
|
|
40
38
|
if imdb_id and not imdb_id.startswith("tt"):
|
|
41
39
|
imdb_id = f"tt{imdb_id}"
|
|
42
40
|
|
|
@@ -66,7 +64,7 @@ def get_search_results(
|
|
|
66
64
|
|
|
67
65
|
start_time = time.time()
|
|
68
66
|
|
|
69
|
-
|
|
67
|
+
search_executor = SearchExecutor()
|
|
70
68
|
|
|
71
69
|
# Radarr/Sonarr use imdb_id for searches
|
|
72
70
|
imdb_map = [
|
|
@@ -127,7 +125,7 @@ def get_search_results(
|
|
|
127
125
|
)
|
|
128
126
|
for flag, func in imdb_map:
|
|
129
127
|
if flag:
|
|
130
|
-
|
|
128
|
+
search_executor.add(func, args, kwargs, True)
|
|
131
129
|
|
|
132
130
|
elif (
|
|
133
131
|
search_phrase and docs_search
|
|
@@ -138,7 +136,7 @@ def get_search_results(
|
|
|
138
136
|
)
|
|
139
137
|
for flag, func in phrase_map:
|
|
140
138
|
if flag:
|
|
141
|
-
|
|
139
|
+
search_executor.add(func, args, kwargs)
|
|
142
140
|
|
|
143
141
|
elif search_phrase:
|
|
144
142
|
debug(
|
|
@@ -149,7 +147,7 @@ def get_search_results(
|
|
|
149
147
|
args, kwargs = ((shared_state, start_time, request_from), {"mirror": mirror})
|
|
150
148
|
for flag, func in feed_map:
|
|
151
149
|
if flag:
|
|
152
|
-
|
|
150
|
+
search_executor.add(func, args, kwargs)
|
|
153
151
|
|
|
154
152
|
if imdb_id:
|
|
155
153
|
stype = f'IMDb-ID "{imdb_id}"'
|
|
@@ -159,21 +157,94 @@ def get_search_results(
|
|
|
159
157
|
stype = "feed search"
|
|
160
158
|
|
|
161
159
|
info(
|
|
162
|
-
f"Starting {len(
|
|
160
|
+
f"Starting {len(search_executor.searches)} searches for {stype}... This may take some time."
|
|
161
|
+
)
|
|
162
|
+
results = search_executor.run_all()
|
|
163
|
+
elapsed_time = time.time() - start_time
|
|
164
|
+
info(
|
|
165
|
+
f"Providing {len(results)} releases to {request_from} for {stype}. Time taken: {elapsed_time:.2f} seconds"
|
|
163
166
|
)
|
|
164
167
|
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
+
return results
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
class SearchExecutor:
|
|
172
|
+
def __init__(self):
|
|
173
|
+
self.searches = []
|
|
174
|
+
|
|
175
|
+
def add(self, func, args, kwargs, use_cache=False):
|
|
176
|
+
# create cache key
|
|
177
|
+
key_args = list(args)
|
|
178
|
+
key_args[1] = None # ignore start_time in cache key
|
|
179
|
+
key_args = tuple(key_args)
|
|
180
|
+
key = hash((func.__name__, key_args, frozenset(kwargs.items())))
|
|
181
|
+
|
|
182
|
+
self.searches.append((key, lambda: func(*args, **kwargs), use_cache))
|
|
183
|
+
|
|
184
|
+
def run_all(self):
|
|
185
|
+
results = []
|
|
186
|
+
futures = []
|
|
187
|
+
cache_keys = []
|
|
188
|
+
cache_used = False
|
|
189
|
+
|
|
190
|
+
with ThreadPoolExecutor() as executor:
|
|
191
|
+
for key, func, use_cache in self.searches:
|
|
192
|
+
if use_cache:
|
|
193
|
+
cached_result = search_cache.get(key)
|
|
194
|
+
if cached_result is not None:
|
|
195
|
+
debug(f"Using cached result for {key}")
|
|
196
|
+
cache_used = True
|
|
197
|
+
results.extend(cached_result)
|
|
198
|
+
continue
|
|
199
|
+
|
|
200
|
+
futures.append(executor.submit(func))
|
|
201
|
+
cache_keys.append(key if use_cache else None)
|
|
202
|
+
|
|
203
|
+
for index, future in enumerate(as_completed(futures)):
|
|
168
204
|
try:
|
|
169
205
|
result = future.result()
|
|
170
206
|
results.extend(result)
|
|
207
|
+
|
|
208
|
+
if cache_keys[index]: # only cache if flag is set
|
|
209
|
+
search_cache.set(cache_keys[index], result)
|
|
171
210
|
except Exception as e:
|
|
172
211
|
info(f"An error occurred: {e}")
|
|
173
212
|
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
f"Providing {len(results)} releases to {request_from} for {stype}. Time taken: {elapsed_time:.2f} seconds"
|
|
177
|
-
)
|
|
213
|
+
if cache_used:
|
|
214
|
+
info("Presenting cached results instead of searching online.")
|
|
178
215
|
|
|
179
|
-
|
|
216
|
+
return results
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
class SearchCache:
|
|
220
|
+
def __init__(self):
|
|
221
|
+
self.last_cleaned = time.time()
|
|
222
|
+
self.cache = {}
|
|
223
|
+
|
|
224
|
+
def clean(self, now):
|
|
225
|
+
if now - self.last_cleaned < 60:
|
|
226
|
+
return
|
|
227
|
+
|
|
228
|
+
keys_to_delete = [
|
|
229
|
+
key for key, (_, expiry) in self.cache.items() if now >= expiry
|
|
230
|
+
]
|
|
231
|
+
|
|
232
|
+
for key in keys_to_delete:
|
|
233
|
+
del self.cache[key]
|
|
234
|
+
|
|
235
|
+
self.last_cleaned = now
|
|
236
|
+
|
|
237
|
+
def get(self, key):
|
|
238
|
+
value, expiry = self.cache.get(key, (None, 0))
|
|
239
|
+
if time.time() < expiry:
|
|
240
|
+
return value
|
|
241
|
+
|
|
242
|
+
return None
|
|
243
|
+
|
|
244
|
+
def set(self, key, value, ttl=300):
|
|
245
|
+
now = time.time()
|
|
246
|
+
self.cache[key] = (value, now + ttl)
|
|
247
|
+
self.clean(now)
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
search_cache = SearchCache()
|
quasarr/search/sources/al.py
CHANGED
|
@@ -23,11 +23,6 @@ hostname = "al"
|
|
|
23
23
|
supported_mirrors = ["rapidgator", "ddownload"]
|
|
24
24
|
|
|
25
25
|
|
|
26
|
-
def convert_to_rss_date(date_str: str) -> str:
|
|
27
|
-
parsed = datetime.strptime(date_str, "%d.%m.%Y - %H:%M")
|
|
28
|
-
return parsed.strftime("%a, %d %b %Y %H:%M:%S +0000")
|
|
29
|
-
|
|
30
|
-
|
|
31
26
|
import re
|
|
32
27
|
from datetime import datetime, timedelta
|
|
33
28
|
|
quasarr/search/sources/he.py
CHANGED
|
@@ -28,7 +28,6 @@ def parse_posted_ago(txt):
|
|
|
28
28
|
return ""
|
|
29
29
|
value = int(m.group(1))
|
|
30
30
|
unit = m.group(2).lower()
|
|
31
|
-
now = datetime.utcnow()
|
|
32
31
|
if unit.startswith("sec"):
|
|
33
32
|
delta = timedelta(seconds=value)
|
|
34
33
|
elif unit.startswith("min"):
|
|
@@ -210,7 +209,7 @@ def he_search(
|
|
|
210
209
|
continue
|
|
211
210
|
else:
|
|
212
211
|
debug(f"{hostname}: imdb link not found for title {title}")
|
|
213
|
-
except Exception
|
|
212
|
+
except Exception:
|
|
214
213
|
debug(f"{hostname}: failed to determine imdb_id for title {title}")
|
|
215
214
|
continue
|
|
216
215
|
|