quasarr 2.1.5__py3-none-any.whl → 2.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of quasarr might be problematic. Click here for more details.

Files changed (57) hide show
  1. quasarr/api/__init__.py +94 -23
  2. quasarr/api/captcha/__init__.py +0 -12
  3. quasarr/api/config/__init__.py +22 -11
  4. quasarr/api/packages/__init__.py +26 -34
  5. quasarr/api/statistics/__init__.py +15 -15
  6. quasarr/downloads/__init__.py +9 -1
  7. quasarr/downloads/packages/__init__.py +2 -2
  8. quasarr/downloads/sources/al.py +6 -0
  9. quasarr/downloads/sources/by.py +29 -20
  10. quasarr/downloads/sources/dd.py +9 -1
  11. quasarr/downloads/sources/dl.py +3 -0
  12. quasarr/downloads/sources/dt.py +16 -7
  13. quasarr/downloads/sources/dw.py +22 -17
  14. quasarr/downloads/sources/he.py +11 -6
  15. quasarr/downloads/sources/mb.py +9 -3
  16. quasarr/downloads/sources/nk.py +9 -3
  17. quasarr/downloads/sources/nx.py +21 -17
  18. quasarr/downloads/sources/sf.py +21 -13
  19. quasarr/downloads/sources/sl.py +10 -2
  20. quasarr/downloads/sources/wd.py +18 -9
  21. quasarr/downloads/sources/wx.py +7 -11
  22. quasarr/providers/auth.py +1 -1
  23. quasarr/providers/cloudflare.py +1 -1
  24. quasarr/providers/hostname_issues.py +63 -0
  25. quasarr/providers/html_images.py +1 -18
  26. quasarr/providers/html_templates.py +104 -12
  27. quasarr/providers/obfuscated.py +11 -11
  28. quasarr/providers/sessions/al.py +27 -11
  29. quasarr/providers/sessions/dd.py +12 -4
  30. quasarr/providers/sessions/dl.py +19 -11
  31. quasarr/providers/sessions/nx.py +12 -4
  32. quasarr/providers/version.py +1 -1
  33. quasarr/search/sources/al.py +12 -1
  34. quasarr/search/sources/by.py +15 -4
  35. quasarr/search/sources/dd.py +22 -3
  36. quasarr/search/sources/dj.py +12 -1
  37. quasarr/search/sources/dl.py +12 -6
  38. quasarr/search/sources/dt.py +17 -4
  39. quasarr/search/sources/dw.py +15 -4
  40. quasarr/search/sources/fx.py +19 -6
  41. quasarr/search/sources/he.py +15 -2
  42. quasarr/search/sources/mb.py +15 -4
  43. quasarr/search/sources/nk.py +15 -2
  44. quasarr/search/sources/nx.py +15 -4
  45. quasarr/search/sources/sf.py +25 -8
  46. quasarr/search/sources/sj.py +14 -1
  47. quasarr/search/sources/sl.py +17 -2
  48. quasarr/search/sources/wd.py +15 -4
  49. quasarr/search/sources/wx.py +16 -18
  50. quasarr/storage/setup.py +150 -35
  51. {quasarr-2.1.5.dist-info → quasarr-2.2.0.dist-info}/METADATA +6 -3
  52. quasarr-2.2.0.dist-info/RECORD +82 -0
  53. {quasarr-2.1.5.dist-info → quasarr-2.2.0.dist-info}/WHEEL +1 -1
  54. quasarr-2.1.5.dist-info/RECORD +0 -81
  55. {quasarr-2.1.5.dist-info → quasarr-2.2.0.dist-info}/entry_points.txt +0 -0
  56. {quasarr-2.1.5.dist-info → quasarr-2.2.0.dist-info}/licenses/LICENSE +0 -0
  57. {quasarr-2.1.5.dist-info → quasarr-2.2.0.dist-info}/top_level.txt +0 -0
@@ -12,6 +12,7 @@ import requests
12
12
  from bs4 import BeautifulSoup
13
13
  from requests.exceptions import Timeout, RequestException
14
14
 
15
+ from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
15
16
  from quasarr.providers.log import info, debug
16
17
  from quasarr.providers.utils import is_site_usable, is_flaresolverr_available
17
18
 
@@ -36,6 +37,7 @@ def create_and_persist_session(shared_state):
36
37
  if not is_flaresolverr_available(shared_state):
37
38
  info(f'"{hostname.upper()}" requires FlareSolverr which is not configured. '
38
39
  f'Please configure FlareSolverr in the web UI to use this site.')
40
+ mark_hostname_issue(hostname, "session", "FlareSolverr required but not configured")
39
41
  return None
40
42
 
41
43
  cfg = shared_state.values["config"]("Hostnames")
@@ -63,16 +65,19 @@ def create_and_persist_session(shared_state):
63
65
  fs_resp.raise_for_status()
64
66
  except Timeout:
65
67
  info(f"{hostname}: FlareSolverr request timed out")
68
+ mark_hostname_issue(hostname, "session", "FlareSolverr request timed out")
66
69
  return None
67
70
  except RequestException as e:
68
71
  # This covers HTTP errors and connection issues *other than* timeout
69
72
  info(f"{hostname}: FlareSolverr server error: {e}")
73
+ mark_hostname_issue(hostname, "session", str(e))
70
74
  return None
71
75
 
72
76
  fs_json = fs_resp.json()
73
77
  # Check if FlareSolverr actually solved the challenge
74
78
  if fs_json.get("status") != "ok" or "solution" not in fs_json:
75
79
  info(f"{hostname}: FlareSolverr did not return a valid solution")
80
+ mark_hostname_issue(hostname, "session", "FlareSolverr did not return a valid solution")
76
81
  return None
77
82
 
78
83
  solution = fs_json["solution"]
@@ -92,6 +97,7 @@ def create_and_persist_session(shared_state):
92
97
 
93
98
  except Exception as e:
94
99
  debug(f'Could not prime "{hostname}" session via FlareSolverr: {e}')
100
+ mark_hostname_issue(hostname, "session", str(e))
95
101
  return None
96
102
 
97
103
  if user and pw:
@@ -113,13 +119,16 @@ def create_and_persist_session(shared_state):
113
119
 
114
120
  if r.status_code != 200 or "invalid" in r.text.lower():
115
121
  info(f'Login failed: "{hostname}" - {r.status_code} - {r.text}')
122
+ mark_hostname_issue(hostname, "session", "Login failed")
116
123
  return None
117
124
  info(f'Login successful: "{hostname}"')
118
125
  else:
119
126
  info(f'Missing credentials for: "{hostname}" - skipping login')
127
+ mark_hostname_issue(hostname, "session", "Missing credentials")
120
128
  return None
121
129
 
122
130
  _persist_session_to_db(shared_state, sess)
131
+ clear_hostname_issue(hostname)
123
132
  return sess
124
133
 
125
134
 
@@ -130,6 +139,7 @@ def retrieve_and_validate_session(shared_state):
130
139
  # AL requires FlareSolverr - check availability
131
140
  if not is_flaresolverr_available(shared_state):
132
141
  info(f'"{hostname.upper()}" requires FlareSolverr which is not configured')
142
+ mark_hostname_issue(hostname, "session", "FlareSolverr required")
133
143
  return None
134
144
 
135
145
  db = shared_state.values["database"]("sessions")
@@ -193,7 +203,7 @@ def _persist_session_to_db(shared_state, sess):
193
203
 
194
204
  def _load_session_cookies_for_flaresolverr(sess):
195
205
  """
196
- Convert a requests.Session's cookies into FlareSolverrstyle list of dicts.
206
+ Convert a requests.Session's cookies into FlareSolverr-style list of dicts.
197
207
  """
198
208
  cookie_list = []
199
209
  for ck in sess.cookies:
@@ -235,9 +245,9 @@ def fetch_via_flaresolverr(shared_state,
235
245
  Re-persist the updated session to the DB.
236
246
  Return a dict with "status_code", "headers", "json" (parsed - if available), "text" and "cookies".
237
247
 
238
- method: "GET" or "POST"
239
- post_data: dict of formfields if method=="POST"
240
- timeout: seconds (FlareSolverr's internal maxTimeout = timeout*1000 ms)
248
+ - method: "GET" or "POST"
249
+ - post_data: dict of form-fields if method=="POST"
250
+ - timeout: seconds (FlareSolverr's internal maxTimeout = timeout*1000 ms)
241
251
  """
242
252
  # Check if FlareSolverr is available
243
253
  if not is_flaresolverr_available(shared_state):
@@ -289,9 +299,9 @@ def fetch_via_flaresolverr(shared_state,
289
299
  json=fs_payload,
290
300
  timeout=timeout + 10
291
301
  )
292
- resp.raise_for_status()
293
302
  except requests.exceptions.RequestException as e:
294
303
  info(f"Could not reach FlareSolverr: {e}")
304
+ mark_hostname_issue(hostname, "session", f"FlareSolverr error: {e}")
295
305
  return {
296
306
  "status_code": None,
297
307
  "headers": {},
@@ -303,6 +313,10 @@ def fetch_via_flaresolverr(shared_state,
303
313
  except Exception as e:
304
314
  raise RuntimeError(f"Could not reach FlareSolverr: {e}")
305
315
 
316
+ if resp.status_code >= 400:
317
+ mark_hostname_issue(hostname, "session", f"Request failed: {resp.status_code}")
318
+ raise RuntimeError(f"Request failed: {resp.status_code}")
319
+
306
320
  fs_json = resp.json()
307
321
  if fs_json.get("status") != "ok" or "solution" not in fs_json:
308
322
  raise RuntimeError(f"FlareSolverr did not return a valid solution: {fs_json.get('message', '<no message>')}")
@@ -345,9 +359,9 @@ def fetch_via_flaresolverr(shared_state,
345
359
 
346
360
  def fetch_via_requests_session(shared_state, method: str, target_url: str, post_data: dict = None, timeout: int = 30):
347
361
  """
348
- method: "GET" or "POST"
349
- post_data: for POST only (will be sent as form-data unless you explicitly JSON-encode)
350
- timeout: seconds
362
+ - method: "GET" or "POST"
363
+ - post_data: for POST only (will be sent as form-data unless you explicitly JSON-encode)
364
+ - timeout: seconds
351
365
  """
352
366
  sess = retrieve_and_validate_session(shared_state)
353
367
  if not sess:
@@ -355,11 +369,13 @@ def fetch_via_requests_session(shared_state, method: str, target_url: str, post_
355
369
 
356
370
  # Execute request
357
371
  if method.upper() == "GET":
358
- resp = sess.get(target_url, timeout=timeout)
372
+ r = sess.get(target_url, timeout=timeout)
359
373
  else: # POST
360
- resp = sess.post(target_url, data=post_data, timeout=timeout)
374
+ r = sess.post(target_url, data=post_data, timeout=timeout)
375
+
376
+ r.raise_for_status()
361
377
 
362
378
  # Re-persist cookies, since the site might have modified them during the request
363
379
  _persist_session_to_db(shared_state, sess)
364
380
 
365
- return resp
381
+ return r
@@ -7,6 +7,7 @@ import pickle
7
7
 
8
8
  import requests
9
9
 
10
+ from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
10
11
  from quasarr.providers.log import info, debug
11
12
  from quasarr.providers.utils import is_site_usable
12
13
 
@@ -30,24 +31,28 @@ def create_and_persist_session(shared_state):
30
31
  'Login': 'true',
31
32
  }
32
33
 
33
- dd_response = dd_session.post(f'https://{dd}/index/index',
34
+ r = dd_session.post(f'https://{dd}/index/index',
34
35
  cookies=cookies, headers=headers, data=data, timeout=10)
36
+ r.raise_for_status()
35
37
 
36
38
  error = False
37
- if dd_response.status_code == 200:
39
+ if r.status_code == 200:
38
40
  try:
39
- response_data = dd_response.json()
41
+ response_data = r.json()
40
42
  if not response_data.get('loggedin'):
41
43
  info("DD rejected login.")
44
+ mark_hostname_issue(hostname, "session", "Login rejected")
42
45
  raise ValueError
43
- session_id = dd_response.cookies.get("PHPSESSID")
46
+ session_id = r.cookies.get("PHPSESSID")
44
47
  if session_id:
45
48
  dd_session.cookies.set('PHPSESSID', session_id, domain=dd)
46
49
  else:
47
50
  info("Invalid DD response on login.")
51
+ mark_hostname_issue(hostname, "session", "Invalid login response")
48
52
  error = True
49
53
  except ValueError:
50
54
  info("Could not parse DD response on login.")
55
+ mark_hostname_issue(hostname, "session", "Could not parse login response")
51
56
  error = True
52
57
 
53
58
  if error:
@@ -58,9 +63,11 @@ def create_and_persist_session(shared_state):
58
63
  serialized_session = pickle.dumps(dd_session)
59
64
  session_string = base64.b64encode(serialized_session).decode('utf-8')
60
65
  shared_state.values["database"]("sessions").update_store("dd", session_string)
66
+ clear_hostname_issue(hostname)
61
67
  return dd_session
62
68
  else:
63
69
  info("Could not create DD session")
70
+ mark_hostname_issue(hostname, "session", f"HTTP {r.status_code}")
64
71
  return None
65
72
 
66
73
 
@@ -80,6 +87,7 @@ def retrieve_and_validate_session(shared_state):
80
87
  raise ValueError("Retrieved object is not a valid requests.Session instance.")
81
88
  except Exception as e:
82
89
  info(f"Session retrieval failed: {e}")
90
+ mark_hostname_issue(hostname, "session", str(e))
83
91
  dd_session = create_and_persist_session(shared_state)
84
92
 
85
93
  return dd_session
@@ -8,6 +8,7 @@ import pickle
8
8
  import requests
9
9
  from bs4 import BeautifulSoup
10
10
 
11
+ from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
11
12
  from quasarr.providers.log import info, debug
12
13
  from quasarr.providers.utils import is_site_usable
13
14
 
@@ -39,6 +40,7 @@ def create_and_persist_session(shared_state):
39
40
 
40
41
  if not user or not password:
41
42
  info(f'Missing credentials for: "{hostname}" - user and password are required')
43
+ mark_hostname_issue(hostname, "session", "Missing credentials")
42
44
  return None
43
45
 
44
46
  sess = requests.Session()
@@ -50,18 +52,17 @@ def create_and_persist_session(shared_state):
50
52
  try:
51
53
  # Step 1: Get login page to retrieve CSRF token
52
54
  login_page_url = f'https://www.{host}/login/'
53
- login_page = sess.get(login_page_url, timeout=30)
55
+ login_r = sess.get(login_page_url, timeout=30)
54
56
 
55
- if login_page.status_code != 200:
56
- info(f'Failed to load login page for: "{hostname}" - Status {login_page.status_code}')
57
- return None
57
+ login_r.raise_for_status()
58
58
 
59
59
  # Extract CSRF token from login form
60
- soup = BeautifulSoup(login_page.text, 'html.parser')
60
+ soup = BeautifulSoup(login_r.text, 'html.parser')
61
61
  csrf_input = soup.find('input', {'name': '_xfToken'})
62
62
 
63
63
  if not csrf_input or not csrf_input.get('value'):
64
64
  info(f'Could not find CSRF token on login page for: "{hostname}"')
65
+ mark_hostname_issue(hostname, "session", "Could not find CSRF token")
65
66
  return None
66
67
 
67
68
  csrf_token = csrf_input['value']
@@ -76,19 +77,23 @@ def create_and_persist_session(shared_state):
76
77
  }
77
78
 
78
79
  login_url = f'https://www.{host}/login/login'
79
- login_response = sess.post(login_url, data=login_data, timeout=30)
80
+ submit_r = sess.post(login_url, data=login_data, timeout=30)
81
+ submit_r.raise_for_status()
80
82
 
81
83
  # Step 3: Verify login success
82
84
  # Check if we're logged in by accessing the main page
83
- verify_response = sess.get(f'https://www.{host}/', timeout=30)
85
+ verify_r = sess.get(f'https://www.{host}/', timeout=30)
86
+ verify_r.raise_for_status()
84
87
 
85
- if 'data-logged-in="true"' not in verify_response.text:
88
+ if 'data-logged-in="true"' not in verify_r.text:
86
89
  info(f'Login verification failed for: "{hostname}" - invalid credentials or login failed')
90
+ mark_hostname_issue(hostname, "session", "Login verification failed")
87
91
  return None
88
92
 
89
93
  info(f'Session successfully created for: "{hostname}" using user/password')
90
94
  except Exception as e:
91
95
  info(f'Failed to create session for: "{hostname}" - {e}')
96
+ mark_hostname_issue(hostname, "session", str(e))
92
97
  return None
93
98
 
94
99
  # Persist session to database
@@ -96,6 +101,7 @@ def create_and_persist_session(shared_state):
96
101
  token = base64.b64encode(blob).decode("utf-8")
97
102
  shared_state.values["database"]("sessions").update_store(hostname, token)
98
103
 
104
+ clear_hostname_issue(hostname)
99
105
  return sess
100
106
 
101
107
 
@@ -176,11 +182,13 @@ def fetch_via_requests_session(shared_state, method: str, target_url: str, post_
176
182
 
177
183
  # Execute request
178
184
  if method.upper() == "GET":
179
- resp = sess.get(target_url, params=get_params, timeout=timeout)
185
+ r = sess.get(target_url, params=get_params, timeout=timeout)
180
186
  else: # POST
181
- resp = sess.post(target_url, data=post_data, timeout=timeout)
187
+ r = sess.post(target_url, data=post_data, timeout=timeout)
188
+
189
+ r.raise_for_status()
182
190
 
183
191
  # Re-persist cookies, since the site might have modified them during the request
184
192
  _persist_session_to_db(shared_state, sess)
185
193
 
186
- return resp
194
+ return r
@@ -7,6 +7,7 @@ import pickle
7
7
 
8
8
  import requests
9
9
 
10
+ from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
10
11
  from quasarr.providers.log import info, debug
11
12
  from quasarr.providers.utils import is_site_usable
12
13
 
@@ -28,24 +29,28 @@ def create_and_persist_session(shared_state):
28
29
  'password': shared_state.values["config"]("NX").get("password")
29
30
  }
30
31
 
31
- nx_response = nx_session.post(f'https://{nx}/api/user/auth', cookies=cookies, headers=headers, json=json_data,
32
- timeout=10)
32
+ r = nx_session.post(f'https://{nx}/api/user/auth', cookies=cookies, headers=headers, json=json_data,
33
+ timeout=10)
34
+ r.raise_for_status()
33
35
 
34
36
  error = False
35
- if nx_response.status_code == 200:
37
+ if r.status_code == 200:
36
38
  try:
37
- response_data = nx_response.json()
39
+ response_data = r.json()
38
40
  if response_data.get('err', {}).get('status') == 403:
39
41
  info("Invalid NX credentials provided.")
42
+ mark_hostname_issue(hostname, "session", "Invalid credentials")
40
43
  error = True
41
44
  elif response_data.get('user').get('username') != shared_state.values["config"]("NX").get("user"):
42
45
  info("Invalid NX response on login.")
46
+ mark_hostname_issue(hostname, "session", "Invalid login response")
43
47
  error = True
44
48
  else:
45
49
  sessiontoken = response_data.get('user').get('sessiontoken')
46
50
  nx_session.cookies.set('sessiontoken', sessiontoken, domain=nx)
47
51
  except ValueError:
48
52
  info("Could not parse NX response on login.")
53
+ mark_hostname_issue(hostname, "session", "Could not parse login response")
49
54
  error = True
50
55
 
51
56
  if error:
@@ -56,9 +61,11 @@ def create_and_persist_session(shared_state):
56
61
  serialized_session = pickle.dumps(nx_session)
57
62
  session_string = base64.b64encode(serialized_session).decode('utf-8')
58
63
  shared_state.values["database"]("sessions").update_store("nx", session_string)
64
+ clear_hostname_issue(hostname)
59
65
  return nx_session
60
66
  else:
61
67
  info("Could not create NX session")
68
+ mark_hostname_issue(hostname, "session", f"HTTP {r.status_code}")
62
69
  return None
63
70
 
64
71
 
@@ -78,6 +85,7 @@ def retrieve_and_validate_session(shared_state):
78
85
  raise ValueError("Retrieved object is not a valid requests.Session instance.")
79
86
  except Exception as e:
80
87
  info(f"Session retrieval failed: {e}")
88
+ mark_hostname_issue(hostname, "session", str(e))
81
89
  nx_session = create_and_persist_session(shared_state)
82
90
 
83
91
  return nx_session
@@ -8,7 +8,7 @@ import requests
8
8
 
9
9
 
10
10
  def get_version():
11
- return "2.1.5"
11
+ return "2.2.0"
12
12
 
13
13
 
14
14
  def get_latest_version():
@@ -11,6 +11,7 @@ from bs4 import BeautifulSoup
11
11
 
12
12
  from quasarr.downloads.sources.al import (guess_title,
13
13
  parse_info_from_feed_entry, parse_info_from_download_item)
14
+ from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
14
15
  from quasarr.providers.imdb_metadata import get_localized_title
15
16
  from quasarr.providers.log import info, debug
16
17
  from quasarr.providers.sessions.al import invalidate_session, fetch_via_requests_session
@@ -136,10 +137,11 @@ def al_feed(shared_state, start_time, request_from, mirror=None):
136
137
  return releases
137
138
 
138
139
  try:
139
- r = fetch_via_requests_session(shared_state, method="GET", target_url=f'https://www.{host}/', timeout=10)
140
+ r = fetch_via_requests_session(shared_state, method="GET", target_url=f'https://www.{host}/', timeout=30)
140
141
  r.raise_for_status()
141
142
  except Exception as e:
142
143
  info(f"{hostname}: could not fetch feed: {e}")
144
+ mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
143
145
  invalidate_session(shared_state)
144
146
  return releases
145
147
 
@@ -230,9 +232,13 @@ def al_feed(shared_state, start_time, request_from, mirror=None):
230
232
 
231
233
  except Exception as e:
232
234
  info(f"{hostname}: error parsing feed item: {e}")
235
+ mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
233
236
 
234
237
  elapsed = time.time() - start_time
235
238
  debug(f"Time taken: {elapsed:.2f}s ({hostname})")
239
+
240
+ if releases:
241
+ clear_hostname_issue(hostname)
236
242
  return releases
237
243
 
238
244
 
@@ -279,6 +285,7 @@ def al_search(shared_state, start_time, request_from, search_string,
279
285
  r.raise_for_status()
280
286
  except Exception as e:
281
287
  info(f"{hostname}: search load error: {e}")
288
+ mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
282
289
  invalidate_session(shared_state)
283
290
  return releases
284
291
 
@@ -440,7 +447,11 @@ def al_search(shared_state, start_time, request_from, search_string,
440
447
 
441
448
  except Exception as e:
442
449
  info(f"{hostname}: error parsing search item: {e}")
450
+ mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
443
451
 
444
452
  elapsed = time.time() - start_time
445
453
  debug(f"Time taken: {elapsed:.2f}s ({hostname})")
454
+
455
+ if releases:
456
+ clear_hostname_issue(hostname)
446
457
  return releases
@@ -12,6 +12,7 @@ from urllib.parse import quote_plus
12
12
  import requests
13
13
  from bs4 import BeautifulSoup
14
14
 
15
+ from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
15
16
  from quasarr.providers.imdb_metadata import get_localized_title
16
17
  from quasarr.providers.log import info, debug
17
18
 
@@ -163,13 +164,18 @@ def by_feed(shared_state, start_time, request_from, mirror=None):
163
164
  url = f"{base_url}/{feed_type}"
164
165
  headers = {'User-Agent': shared_state.values['user_agent']}
165
166
  try:
166
- html_doc = requests.get(url, headers=headers, timeout=10).content
167
- soup = BeautifulSoup(html_doc, 'html.parser')
167
+ r = requests.get(url, headers=headers, timeout=30)
168
+ r.raise_for_status()
169
+ soup = BeautifulSoup(r.content, 'html.parser')
168
170
  releases = _parse_posts(soup, shared_state, base_url, password, request_from=request_from, mirror_filter=mirror)
169
171
  except Exception as e:
170
172
  info(f"Error loading {hostname.upper()} feed: {e}")
173
+ mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
171
174
  releases = []
172
175
  debug(f"Time taken: {time.time() - start_time:.2f}s ({hostname})")
176
+
177
+ if releases:
178
+ clear_hostname_issue(hostname)
173
179
  return releases
174
180
 
175
181
 
@@ -190,8 +196,9 @@ def by_search(shared_state, start_time, request_from, search_string, mirror=None
190
196
  url = f"{base_url}/?q={q}"
191
197
  headers = {'User-Agent': shared_state.values['user_agent']}
192
198
  try:
193
- html_doc = requests.get(url, headers=headers, timeout=10).content
194
- soup = BeautifulSoup(html_doc, 'html.parser')
199
+ r = requests.get(url, headers=headers, timeout=10)
200
+ r.raise_for_status()
201
+ soup = BeautifulSoup(r.content, 'html.parser')
195
202
  releases = _parse_posts(
196
203
  soup, shared_state, base_url, password, mirror_filter=mirror,
197
204
  is_search=True, request_from=request_from,
@@ -199,6 +206,10 @@ def by_search(shared_state, start_time, request_from, search_string, mirror=None
199
206
  )
200
207
  except Exception as e:
201
208
  info(f"Error loading {hostname.upper()} search: {e}")
209
+ mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
202
210
  releases = []
203
211
  debug(f"Time taken: {time.time() - start_time:.2f}s ({hostname})")
212
+
213
+ if releases:
214
+ clear_hostname_issue(hostname)
204
215
  return releases
@@ -7,6 +7,7 @@ import time
7
7
  from base64 import urlsafe_b64encode
8
8
  from datetime import datetime, timezone
9
9
 
10
+ from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
10
11
  from quasarr.providers.imdb_metadata import get_localized_title
11
12
  from quasarr.providers.log import info, debug
12
13
  from quasarr.providers.sessions.dd import create_and_persist_session, retrieve_and_validate_session
@@ -39,7 +40,12 @@ def dd_search(shared_state, start_time, request_from, search_string="", mirror=N
39
40
  debug(f'Skipping {request_from} search on "{hostname.upper()}" (unsupported media type)!')
40
41
  return releases
41
42
 
42
- dd_session = retrieve_and_validate_session(shared_state)
43
+ try:
44
+ dd_session = retrieve_and_validate_session(shared_state)
45
+ except Exception as e:
46
+ mark_hostname_issue(hostname, "search", str(e))
47
+ return releases
48
+
43
49
  if not dd_session:
44
50
  info(f"Could not retrieve valid session for {dd}")
45
51
  return releases
@@ -57,6 +63,13 @@ def dd_search(shared_state, start_time, request_from, search_string="", mirror=N
57
63
  return releases
58
64
  search_string = html.unescape(search_string)
59
65
 
66
+ if not search_string:
67
+ search_type = "feed"
68
+ timeout = 30
69
+ else:
70
+ search_type = "search"
71
+ timeout = 10
72
+
60
73
  qualities = [
61
74
  "disk-480p",
62
75
  "web-480p",
@@ -78,7 +91,9 @@ def dd_search(shared_state, start_time, request_from, search_string="", mirror=N
78
91
  for page in range(0, 100, 20):
79
92
  url = f'https://{dd}/index/search/keyword/{search_string}/qualities/{','.join(qualities)}/from/{page}/search'
80
93
 
81
- releases_on_page = dd_session.get(url, headers=headers, timeout=10).json()
94
+ r = dd_session.get(url, headers=headers, timeout=timeout)
95
+ r.raise_for_status()
96
+ releases_on_page = r.json()
82
97
  if releases_on_page:
83
98
  release_list.extend(releases_on_page)
84
99
 
@@ -125,12 +140,16 @@ def dd_search(shared_state, start_time, request_from, search_string="", mirror=N
125
140
  })
126
141
  except Exception as e:
127
142
  info(f"Error parsing {hostname.upper()} feed: {e}")
143
+ mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
128
144
  continue
129
145
 
130
146
  except Exception as e:
131
- info(f"Error loading {hostname.upper()} feed: {e}")
147
+ info(f"Error loading {hostname.upper()} {search_type}: {e}")
148
+ mark_hostname_issue(hostname, search_type, str(e) if "e" in dir() else "Error occurred")
132
149
 
133
150
  elapsed_time = time.time() - start_time
134
151
  debug(f"Time taken: {elapsed_time:.2f}s ({hostname})")
135
152
 
153
+ if releases:
154
+ clear_hostname_issue(hostname)
136
155
  return releases
@@ -11,6 +11,7 @@ from datetime import datetime, timedelta
11
11
  import requests
12
12
  from bs4 import BeautifulSoup
13
13
 
14
+ from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
14
15
  from quasarr.providers.imdb_metadata import get_localized_title
15
16
  from quasarr.providers.log import info, debug
16
17
 
@@ -40,10 +41,12 @@ def dj_feed(shared_state, start_time, request_from, mirror=None):
40
41
  headers = {"User-Agent": shared_state.values["user_agent"]}
41
42
 
42
43
  try:
43
- r = requests.get(url, headers=headers, timeout=10)
44
+ r = requests.get(url, headers=headers, timeout=30)
45
+ r.raise_for_status()
44
46
  data = json.loads(r.content)
45
47
  except Exception as e:
46
48
  info(f"{hostname.upper()}: feed load error: {e}")
49
+ mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
47
50
  return releases
48
51
 
49
52
  for release in data:
@@ -92,6 +95,9 @@ def dj_feed(shared_state, start_time, request_from, mirror=None):
92
95
  continue
93
96
 
94
97
  debug(f"Time taken: {time.time() - start_time:.2f}s ({hostname})")
98
+
99
+ if releases:
100
+ clear_hostname_issue(hostname)
95
101
  return releases
96
102
 
97
103
 
@@ -124,6 +130,7 @@ def dj_search(shared_state, start_time, request_from, search_string, mirror=None
124
130
  results = soup.find_all("a", href=re.compile(r"^/serie/"))
125
131
  except Exception as e:
126
132
  info(f"{hostname.upper()}: search load error: {e}")
133
+ mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
127
134
  return releases
128
135
 
129
136
  one_hour_ago = (datetime.now() - timedelta(hours=1)).strftime('%Y-%m-%d %H:%M:%S')
@@ -160,6 +167,7 @@ def dj_search(shared_state, start_time, request_from, search_string, mirror=None
160
167
  api_url = f"https://{sj_host}/api/media/{media_id}/releases"
161
168
 
162
169
  r = requests.get(api_url, headers=headers, timeout=10)
170
+ r.raise_for_status()
163
171
  data = json.loads(r.content)
164
172
 
165
173
  for season_block in data.values():
@@ -210,4 +218,7 @@ def dj_search(shared_state, start_time, request_from, search_string, mirror=None
210
218
  continue
211
219
 
212
220
  debug(f"Time taken: {time.time() - start_time:.2f}s ({hostname})")
221
+
222
+ if releases:
223
+ clear_hostname_issue(hostname)
213
224
  return releases
@@ -10,6 +10,7 @@ from html import unescape
10
10
 
11
11
  from bs4 import BeautifulSoup
12
12
 
13
+ from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
13
14
  from quasarr.providers.imdb_metadata import get_localized_title
14
15
  from quasarr.providers.log import info, debug
15
16
  from quasarr.providers.sessions.dl import retrieve_and_validate_session, invalidate_session, fetch_via_requests_session
@@ -75,13 +76,10 @@ def dl_feed(shared_state, start_time, request_from, mirror=None):
75
76
  return releases
76
77
 
77
78
  forum_url = f'https://www.{host}/forums/{forum}/?order=post_date&direction=desc'
78
- response = sess.get(forum_url, timeout=30)
79
+ r = sess.get(forum_url, timeout=30)
80
+ r.raise_for_status()
79
81
 
80
- if response.status_code != 200:
81
- info(f"{hostname}: Forum request failed with {response.status_code}")
82
- return releases
83
-
84
- soup = BeautifulSoup(response.content, 'html.parser')
82
+ soup = BeautifulSoup(r.content, 'html.parser')
85
83
 
86
84
  # Find all thread items in the forum
87
85
  items = soup.select('div.structItem.structItem--thread')
@@ -147,10 +145,14 @@ def dl_feed(shared_state, start_time, request_from, mirror=None):
147
145
 
148
146
  except Exception as e:
149
147
  info(f"{hostname}: Forum feed error: {e}")
148
+ mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
150
149
  invalidate_session(shared_state)
151
150
 
152
151
  elapsed = time.time() - start_time
153
152
  debug(f"Time taken: {elapsed:.2f}s ({hostname})")
153
+
154
+ if releases:
155
+ clear_hostname_issue(hostname)
154
156
  return releases
155
157
 
156
158
 
@@ -292,6 +294,7 @@ def _search_single_page(shared_state, host, search_string, search_id, page_num,
292
294
 
293
295
  except Exception as e:
294
296
  info(f"{hostname}: [Page {page_num}] error: {e}")
297
+ mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
295
298
  return page_releases, None
296
299
 
297
300
 
@@ -355,6 +358,7 @@ def dl_search(shared_state, start_time, request_from, search_string,
355
358
 
356
359
  except Exception as e:
357
360
  info(f"{hostname}: search error: {e}")
361
+ mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
358
362
  invalidate_session(shared_state)
359
363
 
360
364
  debug(f"{hostname}: FINAL - Found {len(releases)} valid releases - providing to {request_from}")
@@ -362,4 +366,6 @@ def dl_search(shared_state, start_time, request_from, search_string,
362
366
  elapsed = time.time() - start_time
363
367
  debug(f"Time taken: {elapsed:.2f}s ({hostname})")
364
368
 
369
+ if releases:
370
+ clear_hostname_issue(hostname)
365
371
  return releases