quasarr 2.6.1__py3-none-any.whl → 2.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasarr might be problematic. Click here for more details.
- quasarr/__init__.py +71 -61
- quasarr/api/__init__.py +1 -2
- quasarr/api/arr/__init__.py +159 -56
- quasarr/api/captcha/__init__.py +203 -154
- quasarr/downloads/__init__.py +12 -8
- quasarr/downloads/linkcrypters/al.py +3 -3
- quasarr/downloads/linkcrypters/filecrypt.py +1 -2
- quasarr/downloads/packages/__init__.py +62 -88
- quasarr/downloads/sources/al.py +3 -3
- quasarr/downloads/sources/by.py +3 -3
- quasarr/downloads/sources/he.py +8 -9
- quasarr/downloads/sources/nk.py +3 -3
- quasarr/downloads/sources/sl.py +6 -1
- quasarr/downloads/sources/wd.py +93 -37
- quasarr/downloads/sources/wx.py +11 -17
- quasarr/providers/auth.py +9 -13
- quasarr/providers/cloudflare.py +4 -3
- quasarr/providers/imdb_metadata.py +0 -2
- quasarr/providers/jd_cache.py +64 -90
- quasarr/providers/log.py +226 -8
- quasarr/providers/myjd_api.py +116 -94
- quasarr/providers/sessions/al.py +20 -22
- quasarr/providers/sessions/dd.py +1 -1
- quasarr/providers/sessions/dl.py +8 -10
- quasarr/providers/sessions/nx.py +1 -1
- quasarr/providers/shared_state.py +26 -15
- quasarr/providers/utils.py +15 -6
- quasarr/providers/version.py +1 -1
- quasarr/search/__init__.py +91 -78
- quasarr/search/sources/al.py +19 -23
- quasarr/search/sources/by.py +6 -6
- quasarr/search/sources/dd.py +8 -10
- quasarr/search/sources/dj.py +15 -18
- quasarr/search/sources/dl.py +25 -37
- quasarr/search/sources/dt.py +13 -15
- quasarr/search/sources/dw.py +24 -16
- quasarr/search/sources/fx.py +25 -11
- quasarr/search/sources/he.py +16 -14
- quasarr/search/sources/hs.py +7 -7
- quasarr/search/sources/mb.py +7 -7
- quasarr/search/sources/nk.py +24 -25
- quasarr/search/sources/nx.py +22 -15
- quasarr/search/sources/sf.py +18 -9
- quasarr/search/sources/sj.py +7 -7
- quasarr/search/sources/sl.py +26 -14
- quasarr/search/sources/wd.py +61 -31
- quasarr/search/sources/wx.py +33 -47
- quasarr/storage/config.py +1 -3
- {quasarr-2.6.1.dist-info → quasarr-2.7.0.dist-info}/METADATA +4 -1
- quasarr-2.7.0.dist-info/RECORD +84 -0
- quasarr-2.6.1.dist-info/RECORD +0 -84
- {quasarr-2.6.1.dist-info → quasarr-2.7.0.dist-info}/WHEEL +0 -0
- {quasarr-2.6.1.dist-info → quasarr-2.7.0.dist-info}/entry_points.txt +0 -0
- {quasarr-2.6.1.dist-info → quasarr-2.7.0.dist-info}/licenses/LICENSE +0 -0
quasarr/search/sources/sl.py
CHANGED
|
@@ -14,9 +14,10 @@ from urllib.parse import quote_plus
|
|
|
14
14
|
import requests
|
|
15
15
|
from bs4 import BeautifulSoup
|
|
16
16
|
|
|
17
|
+
from quasarr.providers.cloudflare import ensure_session_cf_bypassed
|
|
17
18
|
from quasarr.providers.hostname_issues import clear_hostname_issue, mark_hostname_issue
|
|
18
19
|
from quasarr.providers.imdb_metadata import get_localized_title
|
|
19
|
-
from quasarr.providers.log import debug, info
|
|
20
|
+
from quasarr.providers.log import debug, info, warn
|
|
20
21
|
|
|
21
22
|
hostname = "sl"
|
|
22
23
|
supported_mirrors = [
|
|
@@ -66,7 +67,13 @@ def sl_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
66
67
|
headers = {"User-Agent": shared_state.values["user_agent"]}
|
|
67
68
|
|
|
68
69
|
try:
|
|
69
|
-
|
|
70
|
+
session = requests.Session()
|
|
71
|
+
session, headers, r = ensure_session_cf_bypassed(
|
|
72
|
+
info, shared_state, session, url, headers
|
|
73
|
+
)
|
|
74
|
+
if not r:
|
|
75
|
+
raise requests.RequestException("Cloudflare bypass failed")
|
|
76
|
+
|
|
70
77
|
r.raise_for_status()
|
|
71
78
|
root = ET.fromstring(r.text)
|
|
72
79
|
|
|
@@ -122,20 +129,20 @@ def sl_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
122
129
|
)
|
|
123
130
|
|
|
124
131
|
except Exception as e:
|
|
125
|
-
|
|
132
|
+
warn(f"Error parsing {hostname.upper()} feed item: {e}")
|
|
126
133
|
mark_hostname_issue(
|
|
127
134
|
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
128
135
|
)
|
|
129
136
|
continue
|
|
130
137
|
|
|
131
138
|
except Exception as e:
|
|
132
|
-
|
|
139
|
+
warn(f"Error loading {hostname.upper()} feed: {e}")
|
|
133
140
|
mark_hostname_issue(
|
|
134
141
|
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
135
142
|
)
|
|
136
143
|
|
|
137
144
|
elapsed = time.time() - start_time
|
|
138
|
-
debug(f"Time taken: {elapsed:.2f}s
|
|
145
|
+
debug(f"Time taken: {elapsed:.2f}s")
|
|
139
146
|
|
|
140
147
|
if releases:
|
|
141
148
|
clear_hostname_issue(hostname)
|
|
@@ -188,12 +195,17 @@ def sl_search(
|
|
|
188
195
|
# Fetch pages in parallel (so we don't double the slow site latency)
|
|
189
196
|
def fetch(url):
|
|
190
197
|
try:
|
|
191
|
-
debug(f"Fetching {url}
|
|
192
|
-
|
|
198
|
+
debug(f"Fetching {url}")
|
|
199
|
+
session = requests.Session()
|
|
200
|
+
session, _, r = ensure_session_cf_bypassed(
|
|
201
|
+
info, shared_state, session, url, headers
|
|
202
|
+
)
|
|
203
|
+
if not r:
|
|
204
|
+
raise requests.RequestException("Cloudflare bypass failed")
|
|
193
205
|
r.raise_for_status()
|
|
194
206
|
return r.text
|
|
195
207
|
except Exception as e:
|
|
196
|
-
info(f"Error fetching
|
|
208
|
+
info(f"Error fetching url {url}: {e}")
|
|
197
209
|
mark_hostname_issue(
|
|
198
210
|
hostname, "search", str(e) if "e" in dir() else "Error occurred"
|
|
199
211
|
)
|
|
@@ -206,7 +218,7 @@ def sl_search(
|
|
|
206
218
|
try:
|
|
207
219
|
html_texts.append(future.result())
|
|
208
220
|
except Exception as e:
|
|
209
|
-
|
|
221
|
+
warn(f"Error fetching search page: {e}")
|
|
210
222
|
mark_hostname_issue(
|
|
211
223
|
hostname, "search", str(e) if "e" in dir() else "Error occurred"
|
|
212
224
|
)
|
|
@@ -234,6 +246,7 @@ def sl_search(
|
|
|
234
246
|
|
|
235
247
|
if "lazylibrarian" in request_from.lower():
|
|
236
248
|
title = shared_state.normalize_magazine_title(title)
|
|
249
|
+
imdb_id = None
|
|
237
250
|
|
|
238
251
|
source = a["href"]
|
|
239
252
|
# dedupe
|
|
@@ -252,7 +265,6 @@ def sl_search(
|
|
|
252
265
|
)
|
|
253
266
|
|
|
254
267
|
size = 0
|
|
255
|
-
imdb_id = None
|
|
256
268
|
|
|
257
269
|
payload = urlsafe_b64encode(
|
|
258
270
|
f"{title}|{source}|{mirror}|0|{password}|{imdb_id}|{hostname}".encode(
|
|
@@ -277,7 +289,7 @@ def sl_search(
|
|
|
277
289
|
}
|
|
278
290
|
)
|
|
279
291
|
except Exception as e:
|
|
280
|
-
|
|
292
|
+
warn(f"Error parsing {hostname.upper()} search item: {e}")
|
|
281
293
|
mark_hostname_issue(
|
|
282
294
|
hostname,
|
|
283
295
|
"search",
|
|
@@ -285,20 +297,20 @@ def sl_search(
|
|
|
285
297
|
)
|
|
286
298
|
continue
|
|
287
299
|
except Exception as e:
|
|
288
|
-
|
|
300
|
+
warn(f"Error parsing {hostname.upper()} search HTML: {e}")
|
|
289
301
|
mark_hostname_issue(
|
|
290
302
|
hostname, "search", str(e) if "e" in dir() else "Error occurred"
|
|
291
303
|
)
|
|
292
304
|
continue
|
|
293
305
|
|
|
294
306
|
except Exception as e:
|
|
295
|
-
|
|
307
|
+
warn(f"Error loading {hostname.upper()} search page: {e}")
|
|
296
308
|
mark_hostname_issue(
|
|
297
309
|
hostname, "search", str(e) if "e" in dir() else "Error occurred"
|
|
298
310
|
)
|
|
299
311
|
|
|
300
312
|
elapsed = time.time() - start_time
|
|
301
|
-
debug(f"Search time: {elapsed:.2f}s
|
|
313
|
+
debug(f"Search time: {elapsed:.2f}s")
|
|
302
314
|
|
|
303
315
|
if releases:
|
|
304
316
|
clear_hostname_issue(hostname)
|
quasarr/search/sources/wd.py
CHANGED
|
@@ -9,12 +9,13 @@ from base64 import urlsafe_b64encode
|
|
|
9
9
|
from datetime import datetime, timedelta
|
|
10
10
|
from urllib.parse import quote, quote_plus
|
|
11
11
|
|
|
12
|
+
import requests
|
|
12
13
|
from bs4 import BeautifulSoup
|
|
13
14
|
|
|
14
15
|
from quasarr.providers.cloudflare import flaresolverr_get, is_cloudflare_challenge
|
|
15
16
|
from quasarr.providers.hostname_issues import clear_hostname_issue, mark_hostname_issue
|
|
16
17
|
from quasarr.providers.imdb_metadata import get_localized_title, get_year
|
|
17
|
-
from quasarr.providers.log import debug, info
|
|
18
|
+
from quasarr.providers.log import debug, error, info
|
|
18
19
|
from quasarr.providers.utils import is_flaresolverr_available
|
|
19
20
|
|
|
20
21
|
hostname = "wd"
|
|
@@ -57,6 +58,7 @@ def _parse_rows(
|
|
|
57
58
|
search_string=None,
|
|
58
59
|
season=None,
|
|
59
60
|
episode=None,
|
|
61
|
+
imdb_id=None,
|
|
60
62
|
):
|
|
61
63
|
"""
|
|
62
64
|
Walk the <table> rows, extract one release per row.
|
|
@@ -128,7 +130,6 @@ def _parse_rows(
|
|
|
128
130
|
mb = shared_state.convert_to_mb(sz)
|
|
129
131
|
size_bytes = mb * 1024 * 1024
|
|
130
132
|
|
|
131
|
-
imdb_id = None
|
|
132
133
|
published = convert_to_rss_date(date_txt) if date_txt else one_hour_ago
|
|
133
134
|
|
|
134
135
|
payload = urlsafe_b64encode(
|
|
@@ -154,7 +155,7 @@ def _parse_rows(
|
|
|
154
155
|
}
|
|
155
156
|
)
|
|
156
157
|
except Exception as e:
|
|
157
|
-
debug(f"Error parsing
|
|
158
|
+
debug(f"Error parsing row: {e}")
|
|
158
159
|
continue
|
|
159
160
|
return releases
|
|
160
161
|
|
|
@@ -171,31 +172,45 @@ def wd_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
171
172
|
feed_type = "Serien"
|
|
172
173
|
|
|
173
174
|
url = f"https://{wd}/{feed_type}"
|
|
174
|
-
|
|
175
|
-
if not is_flaresolverr_available(shared_state):
|
|
176
|
-
info(
|
|
177
|
-
f"FlareSolverr is not configured. Cannot access {hostname.upper()} feed due to Cloudflare protection."
|
|
178
|
-
)
|
|
179
|
-
mark_hostname_issue(hostname, "feed", "FlareSolverr missing")
|
|
180
|
-
return []
|
|
175
|
+
headers = {"User-Agent": shared_state.values["user_agent"]}
|
|
181
176
|
|
|
182
177
|
try:
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
178
|
+
# Try normal request first
|
|
179
|
+
try:
|
|
180
|
+
r = requests.get(url, headers=headers, timeout=30)
|
|
181
|
+
except requests.RequestException:
|
|
182
|
+
r = None
|
|
183
|
+
|
|
184
|
+
# If blocked or failed, try FlareSolverr
|
|
185
|
+
if r is None or r.status_code == 403 or is_cloudflare_challenge(r.text):
|
|
186
|
+
if is_flaresolverr_available(shared_state):
|
|
187
|
+
debug(
|
|
188
|
+
f"Encountered Cloudflare on {hostname} feed. Trying FlareSolverr..."
|
|
189
|
+
)
|
|
190
|
+
r = flaresolverr_get(shared_state, url)
|
|
191
|
+
elif r is None:
|
|
192
|
+
raise requests.RequestException(
|
|
193
|
+
"Connection failed and FlareSolverr not available"
|
|
194
|
+
)
|
|
195
|
+
elif r.status_code == 403 or is_cloudflare_challenge(r.text):
|
|
196
|
+
info(
|
|
197
|
+
f"Cloudflare protection detected on {hostname} feed but FlareSolverr is not configured."
|
|
198
|
+
)
|
|
199
|
+
mark_hostname_issue(
|
|
200
|
+
hostname, "feed", "Cloudflare protection - FlareSolverr missing"
|
|
201
|
+
)
|
|
202
|
+
return []
|
|
188
203
|
|
|
189
204
|
r.raise_for_status()
|
|
190
205
|
soup = BeautifulSoup(r.content, "html.parser")
|
|
191
206
|
releases = _parse_rows(soup, shared_state, wd, password, mirror)
|
|
192
207
|
except Exception as e:
|
|
193
|
-
|
|
208
|
+
error(f"Error loading feed: {e}")
|
|
194
209
|
mark_hostname_issue(
|
|
195
210
|
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
196
211
|
)
|
|
197
212
|
releases = []
|
|
198
|
-
debug(f"Time taken: {time.time() - start_time:.2f}s
|
|
213
|
+
debug(f"Time taken: {time.time() - start_time:.2f}s")
|
|
199
214
|
|
|
200
215
|
if releases:
|
|
201
216
|
clear_hostname_issue(hostname)
|
|
@@ -228,20 +243,34 @@ def wd_search(
|
|
|
228
243
|
|
|
229
244
|
q = quote_plus(search_string)
|
|
230
245
|
url = f"https://{wd}/search?q={q}"
|
|
231
|
-
|
|
232
|
-
if not is_flaresolverr_available(shared_state):
|
|
233
|
-
info(
|
|
234
|
-
f"FlareSolverr is not configured. Cannot access {hostname.upper()} search due to Cloudflare protection."
|
|
235
|
-
)
|
|
236
|
-
mark_hostname_issue(hostname, "search", "FlareSolverr missing")
|
|
237
|
-
return []
|
|
246
|
+
headers = {"User-Agent": shared_state.values["user_agent"]}
|
|
238
247
|
|
|
239
248
|
try:
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
249
|
+
# Try normal request first
|
|
250
|
+
try:
|
|
251
|
+
r = requests.get(url, headers=headers, timeout=30)
|
|
252
|
+
except requests.RequestException:
|
|
253
|
+
r = None
|
|
254
|
+
|
|
255
|
+
# If blocked or failed, try FlareSolverr
|
|
256
|
+
if r is None or r.status_code == 403 or is_cloudflare_challenge(r.text):
|
|
257
|
+
if is_flaresolverr_available(shared_state):
|
|
258
|
+
debug(
|
|
259
|
+
f"Encountered Cloudflare on {hostname} search. Trying FlareSolverr..."
|
|
260
|
+
)
|
|
261
|
+
r = flaresolverr_get(shared_state, url)
|
|
262
|
+
elif r is None:
|
|
263
|
+
raise requests.RequestException(
|
|
264
|
+
"Connection failed and FlareSolverr not available"
|
|
265
|
+
)
|
|
266
|
+
elif r.status_code == 403 or is_cloudflare_challenge(r.text):
|
|
267
|
+
info(
|
|
268
|
+
f"Cloudflare protection detected on {hostname} search but FlareSolverr is not configured."
|
|
269
|
+
)
|
|
270
|
+
mark_hostname_issue(
|
|
271
|
+
hostname, "search", "Cloudflare protection - FlareSolverr missing"
|
|
272
|
+
)
|
|
273
|
+
return []
|
|
245
274
|
|
|
246
275
|
r.raise_for_status()
|
|
247
276
|
soup = BeautifulSoup(r.content, "html.parser")
|
|
@@ -255,14 +284,15 @@ def wd_search(
|
|
|
255
284
|
search_string=search_string,
|
|
256
285
|
season=season,
|
|
257
286
|
episode=episode,
|
|
287
|
+
imdb_id=imdb_id,
|
|
258
288
|
)
|
|
259
289
|
except Exception as e:
|
|
260
|
-
|
|
290
|
+
error(f"Error loading search: {e}")
|
|
261
291
|
mark_hostname_issue(
|
|
262
292
|
hostname, "search", str(e) if "e" in dir() else "Error occurred"
|
|
263
293
|
)
|
|
264
294
|
releases = []
|
|
265
|
-
debug(f"Time taken: {time.time() - start_time:.2f}s
|
|
295
|
+
debug(f"Time taken: {time.time() - start_time:.2f}s")
|
|
266
296
|
|
|
267
297
|
if releases:
|
|
268
298
|
clear_hostname_issue(hostname)
|
quasarr/search/sources/wx.py
CHANGED
|
@@ -14,7 +14,7 @@ from bs4 import BeautifulSoup, XMLParsedAsHTMLWarning
|
|
|
14
14
|
|
|
15
15
|
from quasarr.providers.hostname_issues import clear_hostname_issue, mark_hostname_issue
|
|
16
16
|
from quasarr.providers.imdb_metadata import get_localized_title, get_year
|
|
17
|
-
from quasarr.providers.log import debug,
|
|
17
|
+
from quasarr.providers.log import debug, error, trace, warn
|
|
18
18
|
|
|
19
19
|
warnings.filterwarnings(
|
|
20
20
|
"ignore", category=XMLParsedAsHTMLWarning
|
|
@@ -32,9 +32,7 @@ def wx_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
32
32
|
host = shared_state.values["config"]("Hostnames").get(hostname)
|
|
33
33
|
|
|
34
34
|
if "lazylibrarian" in request_from.lower():
|
|
35
|
-
debug(
|
|
36
|
-
f'Skipping {request_from} search on "{hostname.upper()}" (unsupported media type)!'
|
|
37
|
-
)
|
|
35
|
+
debug(f"<d>Skipping {request_from}: unsupported media type.</d>")
|
|
38
36
|
return releases
|
|
39
37
|
|
|
40
38
|
rss_url = f"https://{host}/rss"
|
|
@@ -53,10 +51,10 @@ def wx_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
53
51
|
items = soup.find_all("item")
|
|
54
52
|
|
|
55
53
|
if not items:
|
|
56
|
-
|
|
54
|
+
warn("No entries found in RSS feed")
|
|
57
55
|
return releases
|
|
58
56
|
|
|
59
|
-
|
|
57
|
+
trace(f"Found {len(items)} entries in RSS feed")
|
|
60
58
|
|
|
61
59
|
for item in items:
|
|
62
60
|
try:
|
|
@@ -120,18 +118,18 @@ def wx_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
120
118
|
)
|
|
121
119
|
|
|
122
120
|
except Exception as e:
|
|
123
|
-
debug(f"
|
|
121
|
+
debug(f"Error parsing RSS entry: {e}")
|
|
124
122
|
continue
|
|
125
123
|
|
|
126
124
|
except Exception as e:
|
|
127
|
-
|
|
125
|
+
error(f"Error loading feed: {e}")
|
|
128
126
|
mark_hostname_issue(
|
|
129
127
|
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
130
128
|
)
|
|
131
129
|
return releases
|
|
132
130
|
|
|
133
131
|
elapsed_time = time.time() - start_time
|
|
134
|
-
debug(f"Time taken: {elapsed_time:.2f}s
|
|
132
|
+
debug(f"Time taken: {elapsed_time:.2f}s")
|
|
135
133
|
|
|
136
134
|
if releases:
|
|
137
135
|
clear_hostname_issue(hostname)
|
|
@@ -155,24 +153,20 @@ def wx_search(
|
|
|
155
153
|
host = shared_state.values["config"]("Hostnames").get(hostname)
|
|
156
154
|
|
|
157
155
|
if "lazylibrarian" in request_from.lower():
|
|
158
|
-
debug(
|
|
159
|
-
f'Skipping {request_from} search on "{hostname.upper()}" (unsupported media type)!'
|
|
160
|
-
)
|
|
156
|
+
debug(f"<d>Skipping {request_from}: unsupported media type.</d>")
|
|
161
157
|
return releases
|
|
162
158
|
|
|
163
159
|
imdb_id = shared_state.is_imdb_id(search_string)
|
|
164
160
|
if imdb_id:
|
|
165
|
-
debug(f"
|
|
161
|
+
debug(f"Received IMDb ID: {imdb_id}")
|
|
166
162
|
title = get_localized_title(shared_state, imdb_id, "de")
|
|
167
163
|
if not title:
|
|
168
|
-
|
|
164
|
+
error(f"No title found for IMDb '{imdb_id}'")
|
|
169
165
|
return releases
|
|
170
|
-
|
|
171
|
-
f"{hostname.upper()}: Translated IMDb {imdb_id} to German title: '{title}'"
|
|
172
|
-
)
|
|
166
|
+
trace(f"Resolved IMDb '{imdb_id}' to: '{title}'")
|
|
173
167
|
search_string = html.unescape(title)
|
|
174
168
|
else:
|
|
175
|
-
debug(f"
|
|
169
|
+
debug(f"Using search string directly: '{search_string}'")
|
|
176
170
|
|
|
177
171
|
api_url = f"https://api.{host}/start/search"
|
|
178
172
|
|
|
@@ -202,7 +196,7 @@ def wx_search(
|
|
|
202
196
|
elif "radarr" in request_from.lower():
|
|
203
197
|
params["types"] = "movie"
|
|
204
198
|
|
|
205
|
-
|
|
199
|
+
trace(f"Searching: '{search_string}'")
|
|
206
200
|
|
|
207
201
|
try:
|
|
208
202
|
r = requests.get(api_url, headers=headers, params=params, timeout=10)
|
|
@@ -219,7 +213,7 @@ def wx_search(
|
|
|
219
213
|
else:
|
|
220
214
|
items = data if isinstance(data, list) else []
|
|
221
215
|
|
|
222
|
-
|
|
216
|
+
trace(f"Found {len(items)} items in search results")
|
|
223
217
|
|
|
224
218
|
# Track seen titles to deduplicate (mirrors have same fulltitle)
|
|
225
219
|
seen_titles = set()
|
|
@@ -228,10 +222,10 @@ def wx_search(
|
|
|
228
222
|
try:
|
|
229
223
|
uid = item.get("uid")
|
|
230
224
|
if not uid:
|
|
231
|
-
debug(
|
|
225
|
+
debug("Item has no UID, skipping")
|
|
232
226
|
continue
|
|
233
227
|
|
|
234
|
-
|
|
228
|
+
trace(f"Fetching details for UID: {uid}")
|
|
235
229
|
|
|
236
230
|
detail_url = f"https://api.{host}/start/d/{uid}"
|
|
237
231
|
detail_r = requests.get(detail_url, headers=headers, timeout=10)
|
|
@@ -250,10 +244,13 @@ def wx_search(
|
|
|
250
244
|
|
|
251
245
|
if item_imdb_id and imdb_id and item_imdb_id != imdb_id:
|
|
252
246
|
debug(
|
|
253
|
-
f"
|
|
247
|
+
f"IMDb-ID mismatch ({imdb_id} != {item_imdb_id}), skipping item"
|
|
254
248
|
)
|
|
255
249
|
continue
|
|
256
250
|
|
|
251
|
+
if item_imdb_id is None:
|
|
252
|
+
item_imdb_id = imdb_id
|
|
253
|
+
|
|
257
254
|
source = f"https://{host}/detail/{uid}"
|
|
258
255
|
|
|
259
256
|
main_title = (
|
|
@@ -270,9 +267,7 @@ def wx_search(
|
|
|
270
267
|
):
|
|
271
268
|
# Skip if we've already seen this exact title
|
|
272
269
|
if title in seen_titles:
|
|
273
|
-
debug(
|
|
274
|
-
f"{hostname.upper()}: Skipping duplicate main title: {title}"
|
|
275
|
-
)
|
|
270
|
+
debug(f"Skipping duplicate main title: {title}")
|
|
276
271
|
else:
|
|
277
272
|
seen_titles.add(title)
|
|
278
273
|
published = detail_item.get(
|
|
@@ -285,7 +280,7 @@ def wx_search(
|
|
|
285
280
|
password = f"www.{host}"
|
|
286
281
|
|
|
287
282
|
payload = urlsafe_b64encode(
|
|
288
|
-
f"{title}|{source}|{mirror}|0|{password}|{item_imdb_id
|
|
283
|
+
f"{title}|{source}|{mirror}|0|{password}|{item_imdb_id}|{hostname}".encode(
|
|
289
284
|
"utf-8"
|
|
290
285
|
)
|
|
291
286
|
).decode("utf-8")
|
|
@@ -310,9 +305,7 @@ def wx_search(
|
|
|
310
305
|
if "releases" in detail_item and isinstance(
|
|
311
306
|
detail_item["releases"], list
|
|
312
307
|
):
|
|
313
|
-
|
|
314
|
-
f"{hostname.upper()}: Found {len(detail_item['releases'])} releases for {uid}"
|
|
315
|
-
)
|
|
308
|
+
trace(f"Found {len(detail_item['releases'])} releases for {uid}")
|
|
316
309
|
|
|
317
310
|
for release in detail_item["releases"]:
|
|
318
311
|
try:
|
|
@@ -330,16 +323,11 @@ def wx_search(
|
|
|
330
323
|
season,
|
|
331
324
|
episode,
|
|
332
325
|
):
|
|
333
|
-
debug(
|
|
334
|
-
f"{hostname.upper()}: ✗ Release filtered out: {release_title}"
|
|
335
|
-
)
|
|
336
326
|
continue
|
|
337
327
|
|
|
338
328
|
# Skip if we've already seen this exact title (deduplication)
|
|
339
329
|
if release_title in seen_titles:
|
|
340
|
-
|
|
341
|
-
f"{hostname.upper()}: Skipping duplicate release: {release_title}"
|
|
342
|
-
)
|
|
330
|
+
trace(f"Skipping duplicate release: {release_title}")
|
|
343
331
|
continue
|
|
344
332
|
|
|
345
333
|
seen_titles.add(release_title)
|
|
@@ -365,7 +353,7 @@ def wx_search(
|
|
|
365
353
|
password = f"www.{host}"
|
|
366
354
|
|
|
367
355
|
payload = urlsafe_b64encode(
|
|
368
|
-
f"{release_title}|{release_source}|{mirror}|{release_size}|{password}|{item_imdb_id
|
|
356
|
+
f"{release_title}|{release_source}|{mirror}|{release_size}|{password}|{item_imdb_id}|{hostname}".encode(
|
|
369
357
|
"utf-8"
|
|
370
358
|
)
|
|
371
359
|
).decode("utf-8")
|
|
@@ -388,31 +376,29 @@ def wx_search(
|
|
|
388
376
|
)
|
|
389
377
|
|
|
390
378
|
except Exception as e:
|
|
391
|
-
debug(f"
|
|
379
|
+
debug(f"Error parsing release: {e}")
|
|
392
380
|
continue
|
|
393
381
|
else:
|
|
394
|
-
debug(f"
|
|
382
|
+
debug(f"No releases array found for {uid}")
|
|
395
383
|
|
|
396
384
|
except Exception as e:
|
|
397
|
-
debug(f"
|
|
398
|
-
debug(f"{
|
|
385
|
+
debug(f"Error processing item: {e}")
|
|
386
|
+
debug(f"{traceback.format_exc()}")
|
|
399
387
|
continue
|
|
400
388
|
|
|
401
|
-
|
|
402
|
-
f"{hostname.upper()}: Returning {len(releases)} total releases (deduplicated)"
|
|
403
|
-
)
|
|
389
|
+
trace(f"Returning {len(releases)} total releases (deduplicated)")
|
|
404
390
|
|
|
405
391
|
except Exception as e:
|
|
406
|
-
|
|
392
|
+
error(f"Error in search: {e}")
|
|
407
393
|
mark_hostname_issue(
|
|
408
394
|
hostname, "search", str(e) if "e" in dir() else "Error occurred"
|
|
409
395
|
)
|
|
410
396
|
|
|
411
|
-
debug(f"{
|
|
397
|
+
debug(f"{traceback.format_exc()}")
|
|
412
398
|
return releases
|
|
413
399
|
|
|
414
400
|
elapsed_time = time.time() - start_time
|
|
415
|
-
debug(f"Time taken: {elapsed_time:.2f}s
|
|
401
|
+
debug(f"Time taken: {elapsed_time:.2f}s")
|
|
416
402
|
|
|
417
403
|
if releases:
|
|
418
404
|
clear_hostname_issue(hostname)
|
quasarr/storage/config.py
CHANGED
|
@@ -77,7 +77,7 @@ class Config(object):
|
|
|
77
77
|
|
|
78
78
|
def _set_default_config(self, section):
|
|
79
79
|
self._config.add_section(section)
|
|
80
|
-
for key,
|
|
80
|
+
for key, _key_type, value in self._DEFAULT_CONFIG[section]:
|
|
81
81
|
self._config.set(section, key, value)
|
|
82
82
|
with open(self._configfile, "w") as configfile:
|
|
83
83
|
self._config.write(configfile)
|
|
@@ -173,8 +173,6 @@ def get_clean_hostnames(shared_state):
|
|
|
173
173
|
hostnames.save(host, strg)
|
|
174
174
|
if strg and re.match(r".*[A-Z].*", strg):
|
|
175
175
|
hostnames.save(host, strg.lower())
|
|
176
|
-
if strg:
|
|
177
|
-
print(f'Using "{strg}" as hostname for "{host}"')
|
|
178
176
|
return strg
|
|
179
177
|
|
|
180
178
|
for name in shared_state.values["sites"]:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: quasarr
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.7.0
|
|
4
4
|
Summary: Quasarr connects JDownloader with Radarr, Sonarr and LazyLibrarian. It also decrypts links protected by CAPTCHAs.
|
|
5
5
|
Author-email: rix1337 <rix1337@users.noreply.github.com>
|
|
6
6
|
License-File: LICENSE
|
|
@@ -11,9 +11,12 @@ Requires-Python: >=3.12
|
|
|
11
11
|
Requires-Dist: beautifulsoup4>=4.14.3
|
|
12
12
|
Requires-Dist: bottle>=0.13.4
|
|
13
13
|
Requires-Dist: dukpy>=0.5.0
|
|
14
|
+
Requires-Dist: loguru>=0.7.3
|
|
14
15
|
Requires-Dist: pillow>=12.1.0
|
|
15
16
|
Requires-Dist: pycryptodomex>=3.23.0
|
|
17
|
+
Requires-Dist: python-dotenv>=1.2.1
|
|
16
18
|
Requires-Dist: requests>=2.32.5
|
|
19
|
+
Requires-Dist: wcwidth>=0.5.3
|
|
17
20
|
Description-Content-Type: text/markdown
|
|
18
21
|
|
|
19
22
|
#
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
quasarr/__init__.py,sha256=nh1MU1Evh0G1Pm657qtMMWWX4NSHm6PpETqGFtK2QLE,17197
|
|
2
|
+
quasarr/api/__init__.py,sha256=2CXR0JEjC3zooTB8Bk-z_aZgVM2cPE9ijfO5yJAE9CE,20142
|
|
3
|
+
quasarr/api/arr/__init__.py,sha256=1NcjcfNOjzTQCUDedTGJluK0xU-6krh0T8QSGu7eoeU,22283
|
|
4
|
+
quasarr/api/captcha/__init__.py,sha256=9wBmdYKn0DImiFatHe4y2icV57d4710vfXFncvPKki8,78030
|
|
5
|
+
quasarr/api/config/__init__.py,sha256=FJZHALhL6NExonhCk53vOYnM1ICkmbTRue5UMCy5Yzg,8813
|
|
6
|
+
quasarr/api/jdownloader/__init__.py,sha256=SixcV-sgMAunjAT5LawASb1qSuOOokorQo2F7cQ3jZ4,9427
|
|
7
|
+
quasarr/api/packages/__init__.py,sha256=4T6pw0N1DKpTCj2mAgdPOjo__nhxr56aEqZOiFvPvb0,30679
|
|
8
|
+
quasarr/api/sponsors_helper/__init__.py,sha256=QAFXK_JTtAnstRAlieCbbCsoTwIcBu7ZX8C3U4jZpR0,6475
|
|
9
|
+
quasarr/api/statistics/__init__.py,sha256=rJz6S4jSnpFDWtjU7O-2jECUEqlueOHOEfRUjSb3cMY,7943
|
|
10
|
+
quasarr/downloads/__init__.py,sha256=571QRloySskkg-JRi7JjyrKyfZIRnd9WgotbOOZ9k0s,17364
|
|
11
|
+
quasarr/downloads/linkcrypters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
+
quasarr/downloads/linkcrypters/al.py,sha256=sNdEl1gogVn2xerd5fSOkAOgEF2sslQr81g34Jhu5So,8996
|
|
13
|
+
quasarr/downloads/linkcrypters/filecrypt.py,sha256=yMkDM_GVOd3Bl9lgPkL1BDDuYOpMgxnVwlqRtskZ0Xo,17729
|
|
14
|
+
quasarr/downloads/linkcrypters/hide.py,sha256=t9p_Hb5taJDuRAPaWZw7T1GTcLVgd8keD9LlZJ1-Gsg,6266
|
|
15
|
+
quasarr/downloads/packages/__init__.py,sha256=MdKug4D-ex6sJBJuM0mi3_IjXX7AjV5ryUonOs2aupc,34887
|
|
16
|
+
quasarr/downloads/sources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
|
+
quasarr/downloads/sources/al.py,sha256=Hy7OiTQfc-f7zERRUXJUrLjc34tNLAWZrrfM7Z1-An8,29363
|
|
18
|
+
quasarr/downloads/sources/by.py,sha256=bSmihlaMMLLkoz63KJvX7sU-EhLJPpxQ_G09MrmvwSY,4587
|
|
19
|
+
quasarr/downloads/sources/dd.py,sha256=552p2V9HcfVPcoAuBddJe9DblVaIxH1-R2xZ8njEW34,3600
|
|
20
|
+
quasarr/downloads/sources/dj.py,sha256=QGfiZMcwgcMgT91hSV-MxD3tW3Wbm0ULZSfBZTh9TBM,390
|
|
21
|
+
quasarr/downloads/sources/dl.py,sha256=HYdlRhBhG5TjK6NadrUwBLI532LdL87gpXbAJY_7TkA,15655
|
|
22
|
+
quasarr/downloads/sources/dt.py,sha256=12Iiwr3ZMUivHDG4bjJHyM5vx3Dmfu3IKDd13sYKlUg,3166
|
|
23
|
+
quasarr/downloads/sources/dw.py,sha256=LgkHlYbA2nifLMMuoqZPpun-dLsmTYqIIhM6BB7i898,2993
|
|
24
|
+
quasarr/downloads/sources/he.py,sha256=-6bJhSsdGa_f3ikDMn1Qzs6fhAAWvFTW6BSfjOVxGY0,3971
|
|
25
|
+
quasarr/downloads/sources/hs.py,sha256=aZKtYg0RqZEyvo76_IlRpVWwFSc9BWvAyn-M8IaDxU4,4752
|
|
26
|
+
quasarr/downloads/sources/mb.py,sha256=nKO8y0fj-LP9QTXJs0Wxw21yXqe7N6xEYJ9UK2_mPeE,1947
|
|
27
|
+
quasarr/downloads/sources/nk.py,sha256=0R7JjP4t_yuP_LJb7dkhtWjK6rVgL_W5sDdHer_GkkY,2061
|
|
28
|
+
quasarr/downloads/sources/nx.py,sha256=Kn3Nn87NcrKada3j8jpTlunKJ7-ggDyQOiRSoCdxySk,3670
|
|
29
|
+
quasarr/downloads/sources/sf.py,sha256=f_jC4Shnl2GWCro6JcBLjbmZA8nSPVPr4vdf0WR_r7k,6927
|
|
30
|
+
quasarr/downloads/sources/sj.py,sha256=h3x7F8UUPvcyTf6gkKn6fBLTFeQjvqD7MJ1TtuiqhUU,390
|
|
31
|
+
quasarr/downloads/sources/sl.py,sha256=BDsyfqcZBu8Nrw-x8q3mJYoiDl1qvzdhuHayfVGEmYk,3795
|
|
32
|
+
quasarr/downloads/sources/wd.py,sha256=Xh5cvsGGBfM7iYWmBktmRHaWX6vZBRerBy-L8ycJW0s,8546
|
|
33
|
+
quasarr/downloads/sources/wx.py,sha256=b3_--zovX4BrknzGEmdh_QQw72dtyPfIrI_me_KyVjo,6772
|
|
34
|
+
quasarr/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
35
|
+
quasarr/providers/auth.py,sha256=qDdXr28SJ078Q8AVZ_50Z1FwVhfuinOtRl6JHF4RgnM,10412
|
|
36
|
+
quasarr/providers/cloudflare.py,sha256=oLtwQ_UElLIWJ-c-qH2c9NUjsZtlmzYpXlAWhQcE1FM,9076
|
|
37
|
+
quasarr/providers/hostname_issues.py,sha256=SpnZAxOLejSXJGFnYkCrRzR8D0IQsTMtylM-O0h21Z0,1462
|
|
38
|
+
quasarr/providers/html_images.py,sha256=xmxfNwqAqQimVaOq7IelkxlBdcRpPZZLGli_MJDOacI,19755
|
|
39
|
+
quasarr/providers/html_templates.py,sha256=e5b66N47y5Uq7Ikwcm6kOWiyXZ7Bz4gqg2DcajIBGgE,16360
|
|
40
|
+
quasarr/providers/imdb_metadata.py,sha256=JP9YQ7jU1H2-dify6q-qE7gpbJ9ospY4evNLQaa4FDY,21946
|
|
41
|
+
quasarr/providers/jd_cache.py,sha256=RZsjw9X8wouVH__T2EG7w18CLUrxKh73BHnk_rpHdgE,13534
|
|
42
|
+
quasarr/providers/log.py,sha256=E5g5Angdn9iflW_Z0PNbAmhVK_ZC6IwLnOaJ_mVarqM,7018
|
|
43
|
+
quasarr/providers/myjd_api.py,sha256=hCWVU5IAl7QQV_icMF0B91y7CLLM_j2xfyByTP7an0g,35206
|
|
44
|
+
quasarr/providers/notifications.py,sha256=fL0HQdk7jBLXToM_URQiJq6y2UAHs0RzMFMCFdb3SHQ,4894
|
|
45
|
+
quasarr/providers/obfuscated.py,sha256=IAN0-5m6UblLjaFdPhRy75ryqDMF0nlbkClq5-n1bQQ,2275634
|
|
46
|
+
quasarr/providers/shared_state.py,sha256=alUxC0KJQEGsERcHUSn-nSY53PcUjmgHk5R04kj_hOs,33247
|
|
47
|
+
quasarr/providers/statistics.py,sha256=1X_Aa7TE3W7ovwkemVMsgIx55Jw3eYMiyUxuCUDgO5s,8666
|
|
48
|
+
quasarr/providers/utils.py,sha256=FR0tGwao1ytYtWbmUocaHwt29pHKqskKMH2YE2bgSFI,12481
|
|
49
|
+
quasarr/providers/version.py,sha256=vYbQKxf4PPBZ1AradCg9Rn9q7TQrQLaNkfDHTi2Cs_k,4424
|
|
50
|
+
quasarr/providers/web_server.py,sha256=tHkMxhV6eaHC8cWsEpbUqD_U29IFE24VsU6tjk-xCEM,1765
|
|
51
|
+
quasarr/providers/sessions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
52
|
+
quasarr/providers/sessions/al.py,sha256=AOWl1v-wcwxUeo1bRizd7zAzbUludsFbgCGICHCVZFQ,13270
|
|
53
|
+
quasarr/providers/sessions/dd.py,sha256=K503Ny-3zWolzpGVane4ag5Gu1yzPv49eni0I8Hw4v8,3353
|
|
54
|
+
quasarr/providers/sessions/dl.py,sha256=PnyuX_h4gQIk81w0NKYCFxpg-Il0gi72BQxbdLED1ds,5820
|
|
55
|
+
quasarr/providers/sessions/nx.py,sha256=BkEMEVAiJQBlsGQYw4ZTSyys8Ua-WToAmqL0Il41OAg,3491
|
|
56
|
+
quasarr/search/__init__.py,sha256=ggQG8NreFQ4IU6SAigh3YXCScUcQbcQjf3-nyfICOoQ,8162
|
|
57
|
+
quasarr/search/sources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
58
|
+
quasarr/search/sources/al.py,sha256=2RsaIfA4o3uMZuJZwPh1tETdLkNeAQ6-ymFVBL706Po,18206
|
|
59
|
+
quasarr/search/sources/by.py,sha256=cgy39DN0LIMqO9Yfs6mx7Uio9unuEk4Our562BKQWz0,8971
|
|
60
|
+
quasarr/search/sources/dd.py,sha256=J5SBHgItYPS3UL_Fu8a9wP1j9Rs_t9-ZbLcZaRIjU-I,6144
|
|
61
|
+
quasarr/search/sources/dj.py,sha256=6mNuyhnG1MEf4fAVYjGGSbY_E_s9ENmiGRX6Eb16Qqw,7665
|
|
62
|
+
quasarr/search/sources/dl.py,sha256=QeKO7nKtMDzXLoWtus1Jl0uADcpKphfoLBNTlO85tYU,13888
|
|
63
|
+
quasarr/search/sources/dt.py,sha256=yAr3MKCLq-KOLaIv7npNprKOxHCaOEJ4eOqQErguohU,10480
|
|
64
|
+
quasarr/search/sources/dw.py,sha256=dbD5XErlPv3lJ2J7iyVKuFAuWmzidNNaOdT9mH_0b3k,9149
|
|
65
|
+
quasarr/search/sources/fx.py,sha256=gJKEdMGNbnQNaj_pibUrajVB3Wei4hUqp8hmHski9Ow,10797
|
|
66
|
+
quasarr/search/sources/he.py,sha256=m9zVU5NmctKQbc2aP7A-Yw-y94yX5HnjanVFOCnmdW0,7789
|
|
67
|
+
quasarr/search/sources/hs.py,sha256=pq-MwK7FGokszTMiojAq3miw-yAqZhRDO7xGwRQdUMg,17815
|
|
68
|
+
quasarr/search/sources/mb.py,sha256=f45R9Yh8kFtCudxhqNLFUwlQngMUfnZCowK65hhE3oM,8198
|
|
69
|
+
quasarr/search/sources/nk.py,sha256=r7t4mU4CP4IU7sr07f9NGa9pdAJnkKA7SeGZoUAdsLI,7497
|
|
70
|
+
quasarr/search/sources/nx.py,sha256=px29xMPSzNs60fM7mk59JgMZJaTHp-vbLAkYNy74uVU,8396
|
|
71
|
+
quasarr/search/sources/sf.py,sha256=l0kZ0crgf-ZOBvZCT7wk_7coS3Siw0KRycXeconHxA0,17434
|
|
72
|
+
quasarr/search/sources/sj.py,sha256=t3dp_SypujEfz0u8hjS5Xcflzf637EYrkUASAKUzhk0,7882
|
|
73
|
+
quasarr/search/sources/sl.py,sha256=9IqxOMJxL-SI5xwDVYO6PPPuatHOAXyh0_0bvRSaIfc,11511
|
|
74
|
+
quasarr/search/sources/wd.py,sha256=lJmeEZ9A3pDGX-BRTomZa7HyaRt1-zUwbPC_2oUNHdI,10389
|
|
75
|
+
quasarr/search/sources/wx.py,sha256=VLWY_BuVnk__MPdfufmQ2zkq4pGU1eD1-lLhWXQQPP4,14663
|
|
76
|
+
quasarr/storage/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
77
|
+
quasarr/storage/config.py,sha256=sjpfVq_Bxkj9gVwCXB_MAreB9ezf-YEJQKxQmQhUv9s,6540
|
|
78
|
+
quasarr/storage/setup.py,sha256=zb83kvQfxMFHxC7EvWWaVTy0MtG7iEjMRyfY4hdcbOk,61520
|
|
79
|
+
quasarr/storage/sqlite_database.py,sha256=tmHUotMWIwtyH-g244WvcGhMQMMjGokncv7JpFSi8NM,3639
|
|
80
|
+
quasarr-2.7.0.dist-info/METADATA,sha256=NtSLKAF7rVh-4Y_5M1ibnP8fgG7EOS8jdy1h_qZsjdA,14822
|
|
81
|
+
quasarr-2.7.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
82
|
+
quasarr-2.7.0.dist-info/entry_points.txt,sha256=gXi8mUKsIqKVvn-bOc8E5f04sK_KoMCC-ty6b2Hf-jc,40
|
|
83
|
+
quasarr-2.7.0.dist-info/licenses/LICENSE,sha256=QQFCAfDgt7lSA8oSWDHIZ9aTjFbZaBJdjnGOHkuhK7k,1060
|
|
84
|
+
quasarr-2.7.0.dist-info/RECORD,,
|