quasarr 2.6.1__py3-none-any.whl → 2.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasarr might be problematic. Click here for more details.
- quasarr/__init__.py +71 -61
- quasarr/api/__init__.py +1 -2
- quasarr/api/arr/__init__.py +159 -56
- quasarr/api/captcha/__init__.py +203 -154
- quasarr/downloads/__init__.py +12 -8
- quasarr/downloads/linkcrypters/al.py +3 -3
- quasarr/downloads/linkcrypters/filecrypt.py +1 -2
- quasarr/downloads/packages/__init__.py +62 -88
- quasarr/downloads/sources/al.py +3 -3
- quasarr/downloads/sources/by.py +3 -3
- quasarr/downloads/sources/he.py +8 -9
- quasarr/downloads/sources/nk.py +3 -3
- quasarr/downloads/sources/sl.py +6 -1
- quasarr/downloads/sources/wd.py +93 -37
- quasarr/downloads/sources/wx.py +11 -17
- quasarr/providers/auth.py +9 -13
- quasarr/providers/cloudflare.py +4 -3
- quasarr/providers/imdb_metadata.py +0 -2
- quasarr/providers/jd_cache.py +64 -90
- quasarr/providers/log.py +226 -8
- quasarr/providers/myjd_api.py +116 -94
- quasarr/providers/sessions/al.py +20 -22
- quasarr/providers/sessions/dd.py +1 -1
- quasarr/providers/sessions/dl.py +8 -10
- quasarr/providers/sessions/nx.py +1 -1
- quasarr/providers/shared_state.py +26 -15
- quasarr/providers/utils.py +15 -6
- quasarr/providers/version.py +1 -1
- quasarr/search/__init__.py +91 -78
- quasarr/search/sources/al.py +19 -23
- quasarr/search/sources/by.py +6 -6
- quasarr/search/sources/dd.py +8 -10
- quasarr/search/sources/dj.py +15 -18
- quasarr/search/sources/dl.py +25 -37
- quasarr/search/sources/dt.py +13 -15
- quasarr/search/sources/dw.py +24 -16
- quasarr/search/sources/fx.py +25 -11
- quasarr/search/sources/he.py +16 -14
- quasarr/search/sources/hs.py +7 -7
- quasarr/search/sources/mb.py +7 -7
- quasarr/search/sources/nk.py +24 -25
- quasarr/search/sources/nx.py +22 -15
- quasarr/search/sources/sf.py +18 -9
- quasarr/search/sources/sj.py +7 -7
- quasarr/search/sources/sl.py +26 -14
- quasarr/search/sources/wd.py +61 -31
- quasarr/search/sources/wx.py +33 -47
- quasarr/storage/config.py +1 -3
- {quasarr-2.6.1.dist-info → quasarr-2.7.0.dist-info}/METADATA +4 -1
- quasarr-2.7.0.dist-info/RECORD +84 -0
- quasarr-2.6.1.dist-info/RECORD +0 -84
- {quasarr-2.6.1.dist-info → quasarr-2.7.0.dist-info}/WHEEL +0 -0
- {quasarr-2.6.1.dist-info → quasarr-2.7.0.dist-info}/entry_points.txt +0 -0
- {quasarr-2.6.1.dist-info → quasarr-2.7.0.dist-info}/licenses/LICENSE +0 -0
quasarr/providers/utils.py
CHANGED
|
@@ -13,6 +13,8 @@ from urllib.parse import urlparse
|
|
|
13
13
|
import requests
|
|
14
14
|
from PIL import Image
|
|
15
15
|
|
|
16
|
+
from quasarr.providers.log import crit, error
|
|
17
|
+
|
|
16
18
|
# Fallback user agent when FlareSolverr is not available
|
|
17
19
|
FALLBACK_USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36"
|
|
18
20
|
|
|
@@ -95,7 +97,7 @@ def check_ip():
|
|
|
95
97
|
def check_flaresolverr(shared_state, flaresolverr_url):
|
|
96
98
|
# Ensure it ends with /v<digit+>
|
|
97
99
|
if not re.search(r"/v\d+$", flaresolverr_url):
|
|
98
|
-
|
|
100
|
+
error(f"FlareSolverr URL does not end with /v#: {flaresolverr_url}")
|
|
99
101
|
return False
|
|
100
102
|
|
|
101
103
|
# Try sending a simple test request
|
|
@@ -115,25 +117,32 @@ def check_flaresolverr(shared_state, flaresolverr_url):
|
|
|
115
117
|
solution_ua = solution.get("userAgent", None)
|
|
116
118
|
if solution_ua:
|
|
117
119
|
shared_state.update("user_agent", solution_ua)
|
|
118
|
-
|
|
120
|
+
try:
|
|
121
|
+
flaresolverr_version = json_data.get("version")
|
|
122
|
+
except Exception as e:
|
|
123
|
+
error(f"Could not grab Flaresolverr version: {str(e)}")
|
|
124
|
+
return False
|
|
125
|
+
return flaresolverr_version
|
|
119
126
|
else:
|
|
120
|
-
|
|
127
|
+
error(f"Unexpected FlareSolverr response: {json_data}")
|
|
121
128
|
return False
|
|
122
129
|
|
|
123
130
|
except Exception as e:
|
|
124
|
-
|
|
131
|
+
error(f"Failed to connect to FlareSolverr: {e}")
|
|
125
132
|
return False
|
|
126
133
|
|
|
127
134
|
|
|
128
135
|
def validate_address(address, name):
|
|
129
136
|
if not address.startswith("http"):
|
|
130
|
-
|
|
137
|
+
crit(f"Error: {name} '{address}' is invalid. It must start with 'http'.")
|
|
138
|
+
sys.exit(1)
|
|
131
139
|
|
|
132
140
|
colon_count = address.count(":")
|
|
133
141
|
if colon_count < 1 or colon_count > 2:
|
|
134
|
-
|
|
142
|
+
crit(
|
|
135
143
|
f"Error: {name} '{address}' is invalid. It must contain 1 or 2 colons, but it has {colon_count}."
|
|
136
144
|
)
|
|
145
|
+
sys.exit(1)
|
|
137
146
|
|
|
138
147
|
|
|
139
148
|
def is_flaresolverr_available(shared_state):
|
quasarr/providers/version.py
CHANGED
quasarr/search/__init__.py
CHANGED
|
@@ -39,36 +39,36 @@ def get_search_results(
|
|
|
39
39
|
if imdb_id and not imdb_id.startswith("tt"):
|
|
40
40
|
imdb_id = f"tt{imdb_id}"
|
|
41
41
|
|
|
42
|
-
# Pre-populate IMDb metadata cache to avoid API hammering by search threads
|
|
43
42
|
if imdb_id:
|
|
44
43
|
get_imdb_metadata(imdb_id)
|
|
45
44
|
|
|
46
45
|
docs_search = "lazylibrarian" in request_from.lower()
|
|
47
46
|
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
47
|
+
# Config retrieval
|
|
48
|
+
config = shared_state.values["config"]("Hostnames")
|
|
49
|
+
al = config.get("al")
|
|
50
|
+
by = config.get("by")
|
|
51
|
+
dd = config.get("dd")
|
|
52
|
+
dl = config.get("dl")
|
|
53
|
+
dt = config.get("dt")
|
|
54
|
+
dj = config.get("dj")
|
|
55
|
+
dw = config.get("dw")
|
|
56
|
+
fx = config.get("fx")
|
|
57
|
+
he = config.get("he")
|
|
58
|
+
hs = config.get("hs")
|
|
59
|
+
mb = config.get("mb")
|
|
60
|
+
nk = config.get("nk")
|
|
61
|
+
nx = config.get("nx")
|
|
62
|
+
sf = config.get("sf")
|
|
63
|
+
sj = config.get("sj")
|
|
64
|
+
sl = config.get("sl")
|
|
65
|
+
wd = config.get("wd")
|
|
66
|
+
wx = config.get("wx")
|
|
66
67
|
|
|
67
68
|
start_time = time.time()
|
|
68
|
-
|
|
69
69
|
search_executor = SearchExecutor()
|
|
70
70
|
|
|
71
|
-
#
|
|
71
|
+
# Mappings
|
|
72
72
|
imdb_map = [
|
|
73
73
|
(al, al_search),
|
|
74
74
|
(by, by_search),
|
|
@@ -90,7 +90,6 @@ def get_search_results(
|
|
|
90
90
|
(wx, wx_search),
|
|
91
91
|
]
|
|
92
92
|
|
|
93
|
-
# LazyLibrarian uses search_phrase for searches
|
|
94
93
|
phrase_map = [
|
|
95
94
|
(by, by_search),
|
|
96
95
|
(dl, dl_search),
|
|
@@ -100,7 +99,6 @@ def get_search_results(
|
|
|
100
99
|
(wd, wd_search),
|
|
101
100
|
]
|
|
102
101
|
|
|
103
|
-
# Feed searches omit imdb_id and search_phrase
|
|
104
102
|
feed_map = [
|
|
105
103
|
(al, al_feed),
|
|
106
104
|
(by, by_feed),
|
|
@@ -122,7 +120,8 @@ def get_search_results(
|
|
|
122
120
|
(wx, wx_feed),
|
|
123
121
|
]
|
|
124
122
|
|
|
125
|
-
|
|
123
|
+
# Add searches
|
|
124
|
+
if imdb_id:
|
|
126
125
|
args, kwargs = (
|
|
127
126
|
(shared_state, start_time, request_from, imdb_id),
|
|
128
127
|
{"mirror": mirror, "season": season, "episode": episode},
|
|
@@ -131,9 +130,7 @@ def get_search_results(
|
|
|
131
130
|
if flag:
|
|
132
131
|
search_executor.add(func, args, kwargs, True)
|
|
133
132
|
|
|
134
|
-
elif
|
|
135
|
-
search_phrase and docs_search
|
|
136
|
-
): # only LazyLibrarian is allowed to use search_phrase
|
|
133
|
+
elif search_phrase and docs_search:
|
|
137
134
|
args, kwargs = (
|
|
138
135
|
(shared_state, start_time, request_from, search_phrase),
|
|
139
136
|
{"mirror": mirror, "season": season, "episode": episode},
|
|
@@ -143,9 +140,7 @@ def get_search_results(
|
|
|
143
140
|
search_executor.add(func, args, kwargs)
|
|
144
141
|
|
|
145
142
|
elif search_phrase:
|
|
146
|
-
debug(
|
|
147
|
-
f"Search phrase '{search_phrase}' is not supported for {request_from}. Only LazyLibrarian can use search phrases."
|
|
148
|
-
)
|
|
143
|
+
debug(f"Search phrase '{search_phrase}' is not supported for {request_from}.")
|
|
149
144
|
|
|
150
145
|
else:
|
|
151
146
|
args, kwargs = ((shared_state, start_time, request_from), {"mirror": mirror})
|
|
@@ -153,20 +148,24 @@ def get_search_results(
|
|
|
153
148
|
if flag:
|
|
154
149
|
search_executor.add(func, args, kwargs)
|
|
155
150
|
|
|
151
|
+
# Clean description for Console UI
|
|
156
152
|
if imdb_id:
|
|
157
|
-
|
|
153
|
+
desc_text = f"Searching for IMDb-ID {imdb_id}"
|
|
154
|
+
stype = f"IMDb-ID <b>{imdb_id}</b>"
|
|
158
155
|
elif search_phrase:
|
|
159
|
-
|
|
156
|
+
desc_text = f"Searching for '{search_phrase}'"
|
|
157
|
+
stype = f"Search-Phrase <b>{search_phrase}</b>"
|
|
160
158
|
else:
|
|
161
|
-
|
|
159
|
+
desc_text = "Running Feed Search"
|
|
160
|
+
stype = "<b>feed</b> search"
|
|
161
|
+
|
|
162
|
+
debug(f"Starting <g>{len(search_executor.searches)}</g> searches for {stype}...")
|
|
163
|
+
|
|
164
|
+
results = search_executor.run_all(desc_text)
|
|
162
165
|
|
|
163
|
-
info(
|
|
164
|
-
f"Starting {len(search_executor.searches)} searches for {stype}... This may take some time."
|
|
165
|
-
)
|
|
166
|
-
results = search_executor.run_all()
|
|
167
166
|
elapsed_time = time.time() - start_time
|
|
168
167
|
info(
|
|
169
|
-
f"Providing {len(results)} releases to {request_from} for {stype}. Time taken: {elapsed_time:.2f} seconds"
|
|
168
|
+
f"Providing <g>{len(results)} releases</g> to <d>{request_from}</d> for {stype}. <blue>Time taken: {elapsed_time:.2f} seconds</blue>"
|
|
170
169
|
)
|
|
171
170
|
|
|
172
171
|
return results
|
|
@@ -177,45 +176,67 @@ class SearchExecutor:
|
|
|
177
176
|
self.searches = []
|
|
178
177
|
|
|
179
178
|
def add(self, func, args, kwargs, use_cache=False):
|
|
180
|
-
# create cache key
|
|
181
179
|
key_args = list(args)
|
|
182
|
-
key_args[1] = None
|
|
180
|
+
key_args[1] = None
|
|
183
181
|
key_args = tuple(key_args)
|
|
184
182
|
key = hash((func.__name__, key_args, frozenset(kwargs.items())))
|
|
185
|
-
|
|
186
183
|
self.searches.append((key, lambda: func(*args, **kwargs), use_cache))
|
|
187
184
|
|
|
188
|
-
def run_all(self):
|
|
185
|
+
def run_all(self, description):
|
|
189
186
|
results = []
|
|
190
|
-
|
|
191
|
-
cache_keys = []
|
|
192
|
-
cache_used = False
|
|
187
|
+
future_to_meta = {}
|
|
193
188
|
|
|
194
189
|
with ThreadPoolExecutor() as executor:
|
|
190
|
+
current_index = 0
|
|
191
|
+
pending_futures = []
|
|
192
|
+
cache_used = False
|
|
193
|
+
|
|
195
194
|
for key, func, use_cache in self.searches:
|
|
195
|
+
cached_result = None
|
|
196
196
|
if use_cache:
|
|
197
197
|
cached_result = search_cache.get(key)
|
|
198
|
-
if cached_result is not None:
|
|
199
|
-
debug(f"Using cached result for {key}")
|
|
200
|
-
cache_used = True
|
|
201
|
-
results.extend(cached_result)
|
|
202
|
-
continue
|
|
203
|
-
|
|
204
|
-
futures.append(executor.submit(func))
|
|
205
|
-
cache_keys.append(key if use_cache else None)
|
|
206
|
-
|
|
207
|
-
for index, future in enumerate(as_completed(futures)):
|
|
208
|
-
try:
|
|
209
|
-
result = future.result()
|
|
210
|
-
results.extend(result)
|
|
211
|
-
|
|
212
|
-
if cache_keys[index]: # only cache if flag is set
|
|
213
|
-
search_cache.set(cache_keys[index], result)
|
|
214
|
-
except Exception as e:
|
|
215
|
-
info(f"An error occurred: {e}")
|
|
216
198
|
|
|
217
|
-
|
|
218
|
-
|
|
199
|
+
if cached_result is not None:
|
|
200
|
+
debug(f"Using cached result for {key}")
|
|
201
|
+
cache_used = True
|
|
202
|
+
results.extend(cached_result)
|
|
203
|
+
else:
|
|
204
|
+
future = executor.submit(func)
|
|
205
|
+
cache_key = key if use_cache else None
|
|
206
|
+
future_to_meta[future] = (current_index, cache_key)
|
|
207
|
+
pending_futures.append(future)
|
|
208
|
+
current_index += 1
|
|
209
|
+
|
|
210
|
+
# Prepare list to track status of each provider
|
|
211
|
+
# Icons will be filled in as threads complete
|
|
212
|
+
total_active = len(pending_futures)
|
|
213
|
+
icons = ["▪️"] * total_active
|
|
214
|
+
|
|
215
|
+
for future in as_completed(pending_futures):
|
|
216
|
+
index, cache_key = future_to_meta[future]
|
|
217
|
+
try:
|
|
218
|
+
res = future.result()
|
|
219
|
+
if res and len(res) > 0:
|
|
220
|
+
status = "✅"
|
|
221
|
+
else:
|
|
222
|
+
status = "⚪"
|
|
223
|
+
|
|
224
|
+
icons[index] = status
|
|
225
|
+
|
|
226
|
+
results.extend(res)
|
|
227
|
+
if cache_key:
|
|
228
|
+
search_cache.set(cache_key, res)
|
|
229
|
+
except Exception as e:
|
|
230
|
+
icons[index] = "❌"
|
|
231
|
+
info(f"Search error: {e}")
|
|
232
|
+
|
|
233
|
+
# Log the final status summary if any searches were performed
|
|
234
|
+
if total_active > 0:
|
|
235
|
+
bar_str = "".join(icons)
|
|
236
|
+
info(f"{description} [{bar_str}]")
|
|
237
|
+
|
|
238
|
+
if cache_used:
|
|
239
|
+
info("Presenting cached results for some items.")
|
|
219
240
|
|
|
220
241
|
return results
|
|
221
242
|
|
|
@@ -228,22 +249,14 @@ class SearchCache:
|
|
|
228
249
|
def clean(self, now):
|
|
229
250
|
if now - self.last_cleaned < 60:
|
|
230
251
|
return
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
]
|
|
235
|
-
|
|
236
|
-
for key in keys_to_delete:
|
|
237
|
-
del self.cache[key]
|
|
238
|
-
|
|
252
|
+
keys_to_delete = [k for k, (_, exp) in self.cache.items() if now >= exp]
|
|
253
|
+
for k in keys_to_delete:
|
|
254
|
+
del self.cache[k]
|
|
239
255
|
self.last_cleaned = now
|
|
240
256
|
|
|
241
257
|
def get(self, key):
|
|
242
|
-
|
|
243
|
-
if time.time() <
|
|
244
|
-
return value
|
|
245
|
-
|
|
246
|
-
return None
|
|
258
|
+
val, exp = self.cache.get(key, (None, 0))
|
|
259
|
+
return val if time.time() < exp else None
|
|
247
260
|
|
|
248
261
|
def set(self, key, value, ttl=300):
|
|
249
262
|
now = time.time()
|
quasarr/search/sources/al.py
CHANGED
|
@@ -16,7 +16,7 @@ from quasarr.downloads.sources.al import (
|
|
|
16
16
|
)
|
|
17
17
|
from quasarr.providers.hostname_issues import clear_hostname_issue, mark_hostname_issue
|
|
18
18
|
from quasarr.providers.imdb_metadata import get_localized_title, get_year
|
|
19
|
-
from quasarr.providers.log import debug, info
|
|
19
|
+
from quasarr.providers.log import debug, error, info, trace
|
|
20
20
|
from quasarr.providers.sessions.al import fetch_via_requests_session, invalidate_session
|
|
21
21
|
|
|
22
22
|
hostname = "al"
|
|
@@ -37,9 +37,9 @@ def convert_to_rss_date(date_str: str) -> str:
|
|
|
37
37
|
try:
|
|
38
38
|
parsed = datetime.strptime(date_str, "%d.%m.%Y - %H:%M")
|
|
39
39
|
return parsed.strftime("%a, %d %b %Y %H:%M:%S +0000")
|
|
40
|
-
except ValueError:
|
|
40
|
+
except ValueError as e:
|
|
41
41
|
# If parsing fails, return the original string or handle as needed
|
|
42
|
-
raise ValueError(f"Could not parse date: {date_str}")
|
|
42
|
+
raise ValueError(f"Could not parse date: {date_str}") from e
|
|
43
43
|
|
|
44
44
|
|
|
45
45
|
def parse_relative_date(raw: str) -> datetime | None:
|
|
@@ -122,7 +122,7 @@ def al_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
122
122
|
host = shared_state.values["config"]("Hostnames").get(hostname)
|
|
123
123
|
|
|
124
124
|
if not "arr" in request_from.lower():
|
|
125
|
-
debug(f"
|
|
125
|
+
debug(f"<d>Skipping {request_from} search (unsupported media type)!</d>")
|
|
126
126
|
return releases
|
|
127
127
|
|
|
128
128
|
if "Radarr" in request_from:
|
|
@@ -131,7 +131,7 @@ def al_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
131
131
|
wanted_type = "series"
|
|
132
132
|
|
|
133
133
|
if mirror and mirror not in supported_mirrors:
|
|
134
|
-
debug(f'Mirror "{mirror}" not supported
|
|
134
|
+
debug(f'Mirror "{mirror}" not supported.')
|
|
135
135
|
return releases
|
|
136
136
|
|
|
137
137
|
try:
|
|
@@ -140,7 +140,7 @@ def al_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
140
140
|
)
|
|
141
141
|
r.raise_for_status()
|
|
142
142
|
except Exception as e:
|
|
143
|
-
|
|
143
|
+
error(f"Could not fetch feed: {e}")
|
|
144
144
|
mark_hostname_issue(
|
|
145
145
|
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
146
146
|
)
|
|
@@ -201,7 +201,7 @@ def al_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
201
201
|
try:
|
|
202
202
|
date_converted = convert_to_rss_date(raw_date_str)
|
|
203
203
|
except Exception as e:
|
|
204
|
-
debug(f"
|
|
204
|
+
debug(f"Could not parse date '{raw_date_str}': {e}")
|
|
205
205
|
|
|
206
206
|
# Each of these signifies an individual release block
|
|
207
207
|
mt_blocks = tr.find_all("div", class_="mt10")
|
|
@@ -239,13 +239,13 @@ def al_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
239
239
|
)
|
|
240
240
|
|
|
241
241
|
except Exception as e:
|
|
242
|
-
info(f"
|
|
242
|
+
info(f"Error parsing feed item: {e}")
|
|
243
243
|
mark_hostname_issue(
|
|
244
244
|
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
245
245
|
)
|
|
246
246
|
|
|
247
247
|
elapsed = time.time() - start_time
|
|
248
|
-
debug(f"Time taken: {elapsed:.2f}s
|
|
248
|
+
debug(f"Time taken: {elapsed:.2f}s")
|
|
249
249
|
|
|
250
250
|
if releases:
|
|
251
251
|
clear_hostname_issue(hostname)
|
|
@@ -272,7 +272,7 @@ def al_search(
|
|
|
272
272
|
host = shared_state.values["config"]("Hostnames").get(hostname)
|
|
273
273
|
|
|
274
274
|
if not "arr" in request_from.lower():
|
|
275
|
-
debug(f"
|
|
275
|
+
debug(f"<d>Skipping {request_from} search (unsupported media type)!</d>")
|
|
276
276
|
return releases
|
|
277
277
|
|
|
278
278
|
if "Radarr" in request_from:
|
|
@@ -281,14 +281,14 @@ def al_search(
|
|
|
281
281
|
valid_type = "series"
|
|
282
282
|
|
|
283
283
|
if mirror and mirror not in supported_mirrors:
|
|
284
|
-
debug(f'
|
|
284
|
+
debug(f'Mirror "{mirror}" not supported.')
|
|
285
285
|
return releases
|
|
286
286
|
|
|
287
287
|
imdb_id = shared_state.is_imdb_id(search_string)
|
|
288
288
|
if imdb_id:
|
|
289
289
|
title = get_localized_title(shared_state, imdb_id, "de")
|
|
290
290
|
if not title:
|
|
291
|
-
info(f"
|
|
291
|
+
info(f"No title for IMDb {imdb_id}")
|
|
292
292
|
return releases
|
|
293
293
|
search_string = title
|
|
294
294
|
|
|
@@ -307,7 +307,7 @@ def al_search(
|
|
|
307
307
|
)
|
|
308
308
|
r.raise_for_status()
|
|
309
309
|
except Exception as e:
|
|
310
|
-
info(f"
|
|
310
|
+
info(f"Search load error: {e}")
|
|
311
311
|
mark_hostname_issue(
|
|
312
312
|
hostname, "search", str(e) if "e" in dir() else "Error occurred"
|
|
313
313
|
)
|
|
@@ -322,7 +322,7 @@ def al_search(
|
|
|
322
322
|
last_redirect.url, redirect_location
|
|
323
323
|
) # in case of relative URL
|
|
324
324
|
debug(
|
|
325
|
-
f"{
|
|
325
|
+
f"{search_string} redirected to {absolute_redirect_url} instead of search results page"
|
|
326
326
|
)
|
|
327
327
|
|
|
328
328
|
try:
|
|
@@ -350,13 +350,9 @@ def al_search(
|
|
|
350
350
|
sanitized_search_string = shared_state.sanitize_string(search_string)
|
|
351
351
|
sanitized_title = shared_state.sanitize_string(name)
|
|
352
352
|
if not sanitized_search_string in sanitized_title:
|
|
353
|
-
debug(
|
|
354
|
-
f"{hostname}: Search string '{search_string}' doesn't match '{name}'"
|
|
355
|
-
)
|
|
353
|
+
debug(f"Search string '{search_string}' doesn't match '{name}'")
|
|
356
354
|
continue
|
|
357
|
-
|
|
358
|
-
f"{hostname}: Matched search string '{search_string}' with result '{name}'"
|
|
359
|
-
)
|
|
355
|
+
trace(f"Matched search string '{search_string}' with result '{name}'")
|
|
360
356
|
|
|
361
357
|
type_label = None
|
|
362
358
|
for lbl in body.select("div.label-group a[href]"):
|
|
@@ -388,7 +384,7 @@ def al_search(
|
|
|
388
384
|
use_cache = ts and ts > datetime.now() - timedelta(seconds=threshold)
|
|
389
385
|
|
|
390
386
|
if use_cache and entry.get("html"):
|
|
391
|
-
debug(f"
|
|
387
|
+
debug(f"Using cached content for '{url}'")
|
|
392
388
|
data_html = entry["html"]
|
|
393
389
|
else:
|
|
394
390
|
entry = {"timestamp": datetime.now()}
|
|
@@ -494,13 +490,13 @@ def al_search(
|
|
|
494
490
|
)
|
|
495
491
|
|
|
496
492
|
except Exception as e:
|
|
497
|
-
info(f"
|
|
493
|
+
info(f"Error parsing search item: {e}")
|
|
498
494
|
mark_hostname_issue(
|
|
499
495
|
hostname, "search", str(e) if "e" in dir() else "Error occurred"
|
|
500
496
|
)
|
|
501
497
|
|
|
502
498
|
elapsed = time.time() - start_time
|
|
503
|
-
debug(f"Time taken: {elapsed:.2f}s
|
|
499
|
+
debug(f"Time taken: {elapsed:.2f}s")
|
|
504
500
|
|
|
505
501
|
if releases:
|
|
506
502
|
clear_hostname_issue(hostname)
|
quasarr/search/sources/by.py
CHANGED
|
@@ -14,7 +14,7 @@ from bs4 import BeautifulSoup
|
|
|
14
14
|
|
|
15
15
|
from quasarr.providers.hostname_issues import clear_hostname_issue, mark_hostname_issue
|
|
16
16
|
from quasarr.providers.imdb_metadata import get_localized_title, get_year
|
|
17
|
-
from quasarr.providers.log import debug, info
|
|
17
|
+
from quasarr.providers.log import debug, error, info
|
|
18
18
|
|
|
19
19
|
hostname = "by"
|
|
20
20
|
supported_mirrors = ["rapidgator", "ddownload", "nitroflare"]
|
|
@@ -168,7 +168,7 @@ def _parse_posts(
|
|
|
168
168
|
}
|
|
169
169
|
)
|
|
170
170
|
except Exception as e:
|
|
171
|
-
debug(f"Error parsing
|
|
171
|
+
debug(f"Error parsing: {e}")
|
|
172
172
|
continue
|
|
173
173
|
|
|
174
174
|
return releases
|
|
@@ -201,12 +201,12 @@ def by_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
201
201
|
mirror_filter=mirror,
|
|
202
202
|
)
|
|
203
203
|
except Exception as e:
|
|
204
|
-
|
|
204
|
+
error(f"Error loading feed: {e}")
|
|
205
205
|
mark_hostname_issue(
|
|
206
206
|
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
207
207
|
)
|
|
208
208
|
releases = []
|
|
209
|
-
debug(f"Time taken: {time.time() - start_time:.2f}s
|
|
209
|
+
debug(f"Time taken: {time.time() - start_time:.2f}s")
|
|
210
210
|
|
|
211
211
|
if releases:
|
|
212
212
|
clear_hostname_issue(hostname)
|
|
@@ -257,12 +257,12 @@ def by_search(
|
|
|
257
257
|
episode=episode,
|
|
258
258
|
)
|
|
259
259
|
except Exception as e:
|
|
260
|
-
|
|
260
|
+
error(f"Error loading search: {e}")
|
|
261
261
|
mark_hostname_issue(
|
|
262
262
|
hostname, "search", str(e) if "e" in dir() else "Error occurred"
|
|
263
263
|
)
|
|
264
264
|
releases = []
|
|
265
|
-
debug(f"Time taken: {time.time() - start_time:.2f}s
|
|
265
|
+
debug(f"Time taken: {time.time() - start_time:.2f}s")
|
|
266
266
|
|
|
267
267
|
if releases:
|
|
268
268
|
clear_hostname_issue(hostname)
|
quasarr/search/sources/dd.py
CHANGED
|
@@ -9,7 +9,7 @@ from datetime import datetime, timezone
|
|
|
9
9
|
|
|
10
10
|
from quasarr.providers.hostname_issues import clear_hostname_issue, mark_hostname_issue
|
|
11
11
|
from quasarr.providers.imdb_metadata import get_localized_title, get_year
|
|
12
|
-
from quasarr.providers.log import debug, info
|
|
12
|
+
from quasarr.providers.log import debug, error, info, warn
|
|
13
13
|
from quasarr.providers.sessions.dd import (
|
|
14
14
|
create_and_persist_session,
|
|
15
15
|
retrieve_and_validate_session,
|
|
@@ -48,9 +48,7 @@ def dd_search(
|
|
|
48
48
|
password = dd
|
|
49
49
|
|
|
50
50
|
if not "arr" in request_from.lower():
|
|
51
|
-
debug(
|
|
52
|
-
f'Skipping {request_from} search on "{hostname.upper()}" (unsupported media type)!'
|
|
53
|
-
)
|
|
51
|
+
debug(f"<d>Skipping {request_from} search (unsupported media type)!</d>")
|
|
54
52
|
return releases
|
|
55
53
|
|
|
56
54
|
try:
|
|
@@ -65,7 +63,7 @@ def dd_search(
|
|
|
65
63
|
|
|
66
64
|
if mirror and mirror not in supported_mirrors:
|
|
67
65
|
debug(
|
|
68
|
-
f'Mirror "{mirror}" not supported
|
|
66
|
+
f'Mirror "{mirror}" not supported. Supported mirrors: {supported_mirrors}.'
|
|
69
67
|
" Skipping search!"
|
|
70
68
|
)
|
|
71
69
|
return releases
|
|
@@ -123,7 +121,7 @@ def dd_search(
|
|
|
123
121
|
try:
|
|
124
122
|
if release.get("fake"):
|
|
125
123
|
debug(
|
|
126
|
-
f"
|
|
124
|
+
f"Release {release.get('release')} marked as fake. Invalidating session..."
|
|
127
125
|
)
|
|
128
126
|
create_and_persist_session(shared_state)
|
|
129
127
|
return []
|
|
@@ -138,7 +136,7 @@ def dd_search(
|
|
|
138
136
|
release_imdb = release.get("imdbid", None)
|
|
139
137
|
if release_imdb and imdb_id and imdb_id != release_imdb:
|
|
140
138
|
debug(
|
|
141
|
-
f"
|
|
139
|
+
f"Release {title} IMDb-ID mismatch ({imdb_id} != {release.get('imdbid', None)})"
|
|
142
140
|
)
|
|
143
141
|
continue
|
|
144
142
|
|
|
@@ -169,20 +167,20 @@ def dd_search(
|
|
|
169
167
|
}
|
|
170
168
|
)
|
|
171
169
|
except Exception as e:
|
|
172
|
-
|
|
170
|
+
warn(f"Error parsing feed: {e}")
|
|
173
171
|
mark_hostname_issue(
|
|
174
172
|
hostname, "search", str(e) if "e" in dir() else "Error occurred"
|
|
175
173
|
)
|
|
176
174
|
continue
|
|
177
175
|
|
|
178
176
|
except Exception as e:
|
|
179
|
-
|
|
177
|
+
error(f"Error loading feed: {e}")
|
|
180
178
|
mark_hostname_issue(
|
|
181
179
|
hostname, search_type, str(e) if "e" in dir() else "Error occurred"
|
|
182
180
|
)
|
|
183
181
|
|
|
184
182
|
elapsed_time = time.time() - start_time
|
|
185
|
-
debug(f"Time taken: {elapsed_time:.2f}s
|
|
183
|
+
debug(f"Time taken: {elapsed_time:.2f}s")
|
|
186
184
|
|
|
187
185
|
if releases:
|
|
188
186
|
clear_hostname_issue(hostname)
|