quasarr 1.26.6__py3-none-any.whl → 1.27.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of quasarr might be problematic. Click here for more details.

quasarr/__init__.py CHANGED
@@ -6,19 +6,18 @@ import argparse
6
6
  import multiprocessing
7
7
  import os
8
8
  import re
9
- import socket
10
9
  import sys
11
10
  import tempfile
12
11
  import time
13
- from urllib.parse import urlparse, urljoin, parse_qs
14
12
 
15
- import dukpy
16
13
  import requests
17
14
 
18
15
  from quasarr.api import get_api
19
16
  from quasarr.providers import shared_state, version
20
17
  from quasarr.providers.log import info, debug
21
18
  from quasarr.providers.notifications import send_discord_message
19
+ from quasarr.providers.utils import extract_allowed_keys, extract_kv_pairs, is_valid_url, check_ip, check_flaresolverr, \
20
+ validate_address, Unbuffered
22
21
  from quasarr.storage.config import Config, get_clean_hostnames
23
22
  from quasarr.storage.setup import path_config, hostnames_config, hostname_credentials_config, flaresolverr_config, \
24
23
  jdownloader_config
@@ -120,19 +119,15 @@ def run():
120
119
  if arguments.hostnames:
121
120
  hostnames_link = arguments.hostnames
122
121
  if is_valid_url(hostnames_link):
123
- if "pastebin.com" in hostnames_link:
124
- hostnames_link = make_raw_pastebin_link(hostnames_link)
125
-
122
+ # Store the hostnames URL for later use in web UI
123
+ Config("Settings").save("hostnames_url", hostnames_link)
126
124
  print(f"Extracting hostnames from {hostnames_link}...")
127
125
  allowed_keys = supported_hostnames
128
126
  max_keys = len(allowed_keys)
129
127
  shorthand_list = ', '.join(
130
128
  [f'"{key}"' for key in allowed_keys[:-1]]) + ' and ' + f'"{allowed_keys[-1]}"'
131
129
  print(f'There are up to {max_keys} hostnames currently supported: {shorthand_list}')
132
- if "/ini.html" in hostnames_link:
133
- data = build_ini_from_ini_html(hostnames_link)
134
- else:
135
- data = requests.get(hostnames_link).text
130
+ data = requests.get(hostnames_link).text
136
131
  results = extract_kv_pairs(data, allowed_keys)
137
132
 
138
133
  extracted_hostnames = 0
@@ -166,33 +161,21 @@ def run():
166
161
  print(f"You have [{len(hostnames)} of {len(Config._DEFAULT_CONFIG['Hostnames'])}] supported hostnames set up")
167
162
  print(f"For efficiency it is recommended to set up as few hostnames as needed.")
168
163
 
169
- al = Config('Hostnames').get('al')
170
- if al:
171
- user = Config('AL').get('user')
172
- password = Config('AL').get('password')
173
- if not user or not password:
174
- hostname_credentials_config(shared_state, "AL", al)
175
-
176
- dd = Config('Hostnames').get('dd')
177
- if dd:
178
- user = Config('DD').get('user')
179
- password = Config('DD').get('password')
180
- if not user or not password:
181
- hostname_credentials_config(shared_state, "DD", dd)
182
-
183
- nx = Config('Hostnames').get('nx')
184
- if nx:
185
- user = Config('NX').get('user')
186
- password = Config('NX').get('password')
187
- if not user or not password:
188
- hostname_credentials_config(shared_state, "NX", nx)
189
-
190
- dl = Config('Hostnames').get('dl')
191
- if dl:
192
- user = Config('DL').get('user')
193
- password = Config('DL').get('password')
194
- if not user or not password:
195
- hostname_credentials_config(shared_state, "DL", dl)
164
+ # Check credentials for login-required hostnames
165
+ skip_login_db = DataBase("skip_login")
166
+ login_required_sites = ['al', 'dd', 'nx', 'dl']
167
+
168
+ for site in login_required_sites:
169
+ hostname = Config('Hostnames').get(site)
170
+ if hostname:
171
+ site_config = Config(site.upper())
172
+ user = site_config.get('user')
173
+ password = site_config.get('password')
174
+ if not user or not password:
175
+ if skip_login_db.retrieve(site):
176
+ info(f'"{site.upper()}" login skipped by user preference')
177
+ else:
178
+ hostname_credentials_config(shared_state, site.upper(), hostname)
196
179
 
197
180
  config = Config('JDownloader')
198
181
  user = config.get('user')
@@ -240,21 +223,21 @@ def run():
240
223
 
241
224
  jdownloader = multiprocessing.Process(
242
225
  target=jdownloader_connection,
243
- args=(shared_state_dict, shared_state_lock)
226
+ args=(shared_state_dict, shared_state_lock),
227
+ daemon=True
244
228
  )
245
229
  jdownloader.start()
246
230
 
247
231
  updater = multiprocessing.Process(
248
232
  target=update_checker,
249
- args=(shared_state_dict, shared_state_lock)
233
+ args=(shared_state_dict, shared_state_lock),
234
+ daemon=True
250
235
  )
251
236
  updater.start()
252
237
 
253
238
  try:
254
239
  get_api(shared_state_dict, shared_state_lock)
255
240
  except KeyboardInterrupt:
256
- jdownloader.kill()
257
- updater.kill()
258
241
  sys.exit(0)
259
242
 
260
243
 
@@ -333,170 +316,3 @@ def jdownloader_connection(shared_state_dict, shared_state_lock):
333
316
 
334
317
  except KeyboardInterrupt:
335
318
  pass
336
-
337
-
338
- class Unbuffered(object):
339
- def __init__(self, stream):
340
- self.stream = stream
341
-
342
- def write(self, data):
343
- self.stream.write(data)
344
- self.stream.flush()
345
-
346
- def writelines(self, datas):
347
- self.stream.writelines(datas)
348
- self.stream.flush()
349
-
350
- def __getattr__(self, attr):
351
- return getattr(self.stream, attr)
352
-
353
-
354
- def check_ip():
355
- s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
356
- try:
357
- s.connect(('10.255.255.255', 0))
358
- ip = s.getsockname()[0]
359
- except:
360
- ip = '127.0.0.1'
361
- finally:
362
- s.close()
363
- return ip
364
-
365
-
366
- def check_flaresolverr(shared_state, flaresolverr_url):
367
- # Ensure it ends with /v<digit+>
368
- if not re.search(r"/v\d+$", flaresolverr_url):
369
- print(f"FlareSolverr URL does not end with /v#: {flaresolverr_url}")
370
- return False
371
-
372
- # Try sending a simple test request
373
- headers = {"Content-Type": "application/json"}
374
- data = {
375
- "cmd": "request.get",
376
- "url": "http://www.google.com/",
377
- "maxTimeout": 10000
378
- }
379
-
380
- try:
381
- response = requests.post(flaresolverr_url, headers=headers, json=data, timeout=10)
382
- response.raise_for_status()
383
- json_data = response.json()
384
-
385
- # Check if the structure looks like a valid FlareSolverr response
386
- if "status" in json_data and json_data["status"] == "ok":
387
- solution = json_data["solution"]
388
- solution_ua = solution.get("userAgent", None)
389
- if solution_ua:
390
- shared_state.update("user_agent", solution_ua)
391
- return True
392
- else:
393
- print(f"Unexpected FlareSolverr response: {json_data}")
394
- return False
395
-
396
- except Exception as e:
397
- print(f"Failed to connect to FlareSolverr: {e}")
398
- return False
399
-
400
-
401
- def make_raw_pastebin_link(url):
402
- """
403
- Takes a Pastebin URL and ensures it is a raw link.
404
- If it's not a Pastebin URL, it returns the URL unchanged.
405
- """
406
- # Check if the URL is already a raw Pastebin link
407
- if re.match(r"https?://(?:www\.)?pastebin\.com/raw/\w+", url):
408
- return url # Already raw, return as is
409
-
410
- # Check if the URL is a standard Pastebin link
411
- pastebin_pattern = r"https?://(?:www\.)?pastebin\.com/(\w+)"
412
- match = re.match(pastebin_pattern, url)
413
-
414
- if match:
415
- paste_id = match.group(1)
416
- print(f"The link you provided is not a raw Pastebin link. Attempting to convert it to a raw link from {url}...")
417
- return f"https://pastebin.com/raw/{paste_id}"
418
-
419
- return url # Not a Pastebin link, return unchanged
420
-
421
-
422
- def build_ini_from_ini_html(url: str) -> str:
423
- def get(u: str) -> str:
424
- r = requests.get(u, timeout=10)
425
- r.raise_for_status()
426
- return r.text
427
-
428
- parsed = urlparse(url)
429
- params = parse_qs(parsed.query)
430
-
431
- data_js = get(urljoin(f"{parsed.scheme}://{parsed.netloc}", "data.js"))
432
-
433
- hostnames = dukpy.evaljs("""
434
- var window = {};
435
- %s
436
- window.HOSTNAMES;
437
- """ % data_js)
438
-
439
- excluded = set()
440
- if "exclude" in params:
441
- excluded = set(params["exclude"][0].split(","))
442
-
443
- lines = []
444
- for h in hostnames:
445
- if h["key"] not in excluded:
446
- lines.append(f"{h['key']} = {h['name']}")
447
-
448
- return "\n".join(lines) + "\n"
449
-
450
-
451
- def is_valid_url(url):
452
- if "https://pastebin.com/raw/eX4Mpl3" in url:
453
- print("Example URL detected. Please provide a valid URL found on pastebin or another public site!")
454
- return False
455
-
456
- parsed = urlparse(url)
457
- return parsed.scheme in ("http", "https") and bool(parsed.netloc)
458
-
459
-
460
- def validate_address(address, name):
461
- if not address.startswith("http"):
462
- sys.exit(f"Error: {name} '{address}' is invalid. It must start with 'http'.")
463
-
464
- colon_count = address.count(":")
465
- if colon_count < 1 or colon_count > 2:
466
- sys.exit(
467
- f"Error: {name} '{address}' is invalid. It must contain 1 or 2 colons, but it has {colon_count}.")
468
-
469
-
470
- def extract_allowed_keys(config, section):
471
- """
472
- Extracts allowed keys from the specified section in the configuration.
473
-
474
- :param config: The configuration dictionary.
475
- :param section: The section from which to extract keys.
476
- :return: A list of allowed keys.
477
- """
478
- if section not in config:
479
- raise ValueError(f"Section '{section}' not found in configuration.")
480
- return [key for key, *_ in config[section]]
481
-
482
-
483
- def extract_kv_pairs(input_text, allowed_keys):
484
- """
485
- Extracts key-value pairs from the given text where keys match allowed_keys.
486
-
487
- :param input_text: The input text containing key-value pairs.
488
- :param allowed_keys: A list of allowed two-letter shorthand keys.
489
- :return: A dictionary of extracted key-value pairs.
490
- """
491
- kv_pattern = re.compile(rf"^({'|'.join(map(re.escape, allowed_keys))})\s*=\s*(.*)$")
492
- kv_pairs = {}
493
-
494
- for line in input_text.splitlines():
495
- match = kv_pattern.match(line.strip())
496
- if match:
497
- key, value = match.groups()
498
- kv_pairs[key] = value
499
- else:
500
- print(f"Skipping line because it does not contain any supported hostname: {line}")
501
-
502
- return kv_pairs
@@ -2,9 +2,22 @@
2
2
  # Quasarr
3
3
  # Project by https://github.com/rix1337
4
4
 
5
- from quasarr.providers.html_templates import render_form
6
- from quasarr.providers.html_templates import render_button
5
+ import os
6
+ import signal
7
+ import threading
8
+ import time
9
+ from urllib.parse import urlparse
10
+
11
+ import requests
12
+ from bottle import request, response
13
+
14
+ from quasarr.providers.html_templates import render_form, render_button
15
+ from quasarr.providers.log import info
16
+ from quasarr.providers.shared_state import extract_valid_hostname
17
+ from quasarr.providers.utils import extract_kv_pairs, extract_allowed_keys
18
+ from quasarr.storage.config import Config
7
19
  from quasarr.storage.setup import hostname_form_html, save_hostnames
20
+ from quasarr.storage.sqlite_database import DataBase
8
21
 
9
22
 
10
23
  def setup_config(app, shared_state):
@@ -16,8 +29,107 @@ def setup_config(app, shared_state):
16
29
  back_button = f'''<p>
17
30
  {render_button("Back", "secondary", {"onclick": "location.href='/'"})}
18
31
  </p>'''
19
- return render_form("Hostnames", hostname_form_html(shared_state, message) + back_button)
32
+ return render_form("Hostnames",
33
+ hostname_form_html(shared_state, message, show_restart_button=True,
34
+ show_skip_management=True) + back_button)
20
35
 
21
36
  @app.post("/api/hostnames")
22
37
  def hostnames_api():
23
38
  return save_hostnames(shared_state, timeout=1, first_run=False)
39
+
40
+ @app.post("/api/hostnames/import-url")
41
+ def import_hostnames_from_url():
42
+ """Fetch URL and parse hostnames, return JSON for JS to populate fields."""
43
+ response.content_type = 'application/json'
44
+ try:
45
+ data = request.json
46
+ url = data.get('url', '').strip()
47
+
48
+ if not url:
49
+ return {"success": False, "error": "No URL provided"}
50
+
51
+ # Validate URL
52
+ parsed = urlparse(url)
53
+ if parsed.scheme not in ("http", "https") or not parsed.netloc:
54
+ return {"success": False, "error": "Invalid URL format"}
55
+
56
+ if "/raw/eX4Mpl3" in url:
57
+ return {"success": False, "error": "Example URL detected. Please provide a real URL."}
58
+
59
+ # Fetch content
60
+ try:
61
+ resp = requests.get(url, timeout=15)
62
+ resp.raise_for_status()
63
+ content = resp.text
64
+ except requests.RequestException as e:
65
+ return {"success": False, "error": f"Failed to fetch URL: {str(e)}"}
66
+
67
+ # Parse hostnames
68
+ allowed_keys = extract_allowed_keys(Config._DEFAULT_CONFIG, 'Hostnames')
69
+ results = extract_kv_pairs(content, allowed_keys)
70
+
71
+ if not results:
72
+ return {"success": False, "error": "No hostnames found in the provided URL"}
73
+
74
+ # Validate each hostname
75
+ valid_hostnames = {}
76
+ invalid_hostnames = {}
77
+ for shorthand, hostname in results.items():
78
+ domain_check = extract_valid_hostname(hostname, shorthand)
79
+ domain = domain_check.get('domain')
80
+ if domain:
81
+ valid_hostnames[shorthand] = domain
82
+ else:
83
+ invalid_hostnames[shorthand] = domain_check.get('message', 'Invalid')
84
+
85
+ if not valid_hostnames:
86
+ return {"success": False, "error": "No valid hostnames found in the provided URL"}
87
+
88
+ return {
89
+ "success": True,
90
+ "hostnames": valid_hostnames,
91
+ "errors": invalid_hostnames
92
+ }
93
+
94
+ except Exception as e:
95
+ return {"success": False, "error": f"Error: {str(e)}"}
96
+
97
+ @app.get("/api/skip-login")
98
+ def get_skip_login():
99
+ """Return list of hostnames with skipped login."""
100
+ response.content_type = 'application/json'
101
+ skip_db = DataBase("skip_login")
102
+ login_required_sites = ['al', 'dd', 'dl', 'nx']
103
+ skipped = []
104
+ for site in login_required_sites:
105
+ if skip_db.retrieve(site):
106
+ skipped.append(site)
107
+ return {"skipped": skipped}
108
+
109
+ @app.delete("/api/skip-login/<shorthand>")
110
+ def clear_skip_login(shorthand):
111
+ """Clear skip login preference for a hostname."""
112
+ response.content_type = 'application/json'
113
+ shorthand = shorthand.lower()
114
+ login_required_sites = ['al', 'dd', 'dl', 'nx']
115
+ if shorthand not in login_required_sites:
116
+ return {"success": False, "error": f"Invalid shorthand: {shorthand}"}
117
+
118
+ skip_db = DataBase("skip_login")
119
+ skip_db.delete(shorthand)
120
+ info(f'Skip login preference cleared for "{shorthand.upper()}"')
121
+ return {"success": True}
122
+
123
+ @app.post("/api/restart")
124
+ def restart_quasarr():
125
+ """Restart Quasarr. In Docker with the restart loop, exit(0) triggers restart."""
126
+ response.content_type = 'application/json'
127
+ info("Restart requested via web UI")
128
+
129
+ def delayed_exit():
130
+ time.sleep(0.5)
131
+ # Send SIGINT to main process - triggers KeyboardInterrupt handler
132
+ os.kill(os.getpid(), signal.SIGINT)
133
+
134
+ threading.Thread(target=delayed_exit, daemon=True).start()
135
+ return {"success": True, "message": "Restarting..."}
@@ -13,6 +13,12 @@ from bs4 import BeautifulSoup
13
13
  from requests.exceptions import Timeout, RequestException
14
14
 
15
15
  from quasarr.providers.log import info, debug
16
+ from quasarr.providers.utils import is_site_usable
17
+
18
+
19
+ class SkippedSiteError(Exception):
20
+ """Raised when a site is skipped due to missing credentials or login being skipped."""
21
+ pass
16
22
 
17
23
  hostname = "al"
18
24
 
@@ -106,6 +112,9 @@ def create_and_persist_session(shared_state):
106
112
 
107
113
 
108
114
  def retrieve_and_validate_session(shared_state):
115
+ if not is_site_usable(shared_state, hostname):
116
+ return None
117
+
109
118
  db = shared_state.values["database"]("sessions")
110
119
  stored = db.retrieve(hostname)
111
120
  if not stored:
@@ -216,6 +225,16 @@ def fetch_via_flaresolverr(shared_state,
216
225
  flaresolverr_url = shared_state.values["config"]('FlareSolverr').get('url')
217
226
 
218
227
  sess = retrieve_and_validate_session(shared_state)
228
+ if not sess:
229
+ debug(f"Skipping {hostname}: site not usable (login skipped or no credentials)")
230
+ return {
231
+ "status_code": None,
232
+ "headers": {},
233
+ "json": None,
234
+ "text": "",
235
+ "cookies": [],
236
+ "error": f"Site '{hostname}' is not usable (login skipped or no credentials)"
237
+ }
219
238
 
220
239
  cmd = "request.get" if method.upper() == "GET" else "request.post"
221
240
  fs_payload = {
@@ -301,6 +320,8 @@ def fetch_via_requests_session(shared_state, method: str, target_url: str, post_
301
320
  – timeout: seconds
302
321
  """
303
322
  sess = retrieve_and_validate_session(shared_state)
323
+ if not sess:
324
+ raise SkippedSiteError(f"{hostname}: site not usable (login skipped or no credentials)")
304
325
 
305
326
  # Execute request
306
327
  if method.upper() == "GET":
@@ -7,7 +7,10 @@ import pickle
7
7
 
8
8
  import requests
9
9
 
10
- from quasarr.providers.log import info
10
+ from quasarr.providers.log import info, debug
11
+ from quasarr.providers.utils import is_site_usable
12
+
13
+ hostname = "dd"
11
14
 
12
15
 
13
16
  def create_and_persist_session(shared_state):
@@ -62,6 +65,10 @@ def create_and_persist_session(shared_state):
62
65
 
63
66
 
64
67
  def retrieve_and_validate_session(shared_state):
68
+ if not is_site_usable(shared_state, hostname):
69
+ debug(f"Skipping {hostname}: site not usable (login skipped or no credentials)")
70
+ return None
71
+
65
72
  session_string = shared_state.values["database"]("sessions").retrieve("dd")
66
73
  if not session_string:
67
74
  dd_session = create_and_persist_session(shared_state)
@@ -9,6 +9,13 @@ import requests
9
9
  from bs4 import BeautifulSoup
10
10
 
11
11
  from quasarr.providers.log import info, debug
12
+ from quasarr.providers.utils import is_site_usable
13
+
14
+
15
+ class SkippedSiteError(Exception):
16
+ """Raised when a site is skipped due to missing credentials or login being skipped."""
17
+ pass
18
+
12
19
 
13
20
  hostname = "dl"
14
21
 
@@ -16,17 +23,17 @@ hostname = "dl"
16
23
  def create_and_persist_session(shared_state):
17
24
  """
18
25
  Create and persist a session using user and password.
19
-
26
+
20
27
  Args:
21
28
  shared_state: Shared state object
22
-
29
+
23
30
  Returns:
24
31
  requests.Session or None
25
32
  """
26
33
  cfg = shared_state.values["config"]("Hostnames")
27
34
  host = cfg.get(hostname)
28
35
  credentials_cfg = shared_state.values["config"](hostname.upper())
29
-
36
+
30
37
  user = credentials_cfg.get("user")
31
38
  password = credentials_cfg.get("password")
32
39
 
@@ -35,30 +42,30 @@ def create_and_persist_session(shared_state):
35
42
  return None
36
43
 
37
44
  sess = requests.Session()
38
-
45
+
39
46
  # Set user agent
40
47
  ua = shared_state.values["user_agent"]
41
48
  sess.headers.update({'User-Agent': ua})
42
-
49
+
43
50
  try:
44
51
  # Step 1: Get login page to retrieve CSRF token
45
52
  login_page_url = f'https://www.{host}/login/'
46
53
  login_page = sess.get(login_page_url, timeout=30)
47
-
54
+
48
55
  if login_page.status_code != 200:
49
56
  info(f'Failed to load login page for: "{hostname}" - Status {login_page.status_code}')
50
57
  return None
51
-
58
+
52
59
  # Extract CSRF token from login form
53
60
  soup = BeautifulSoup(login_page.text, 'html.parser')
54
61
  csrf_input = soup.find('input', {'name': '_xfToken'})
55
-
62
+
56
63
  if not csrf_input or not csrf_input.get('value'):
57
64
  info(f'Could not find CSRF token on login page for: "{hostname}"')
58
65
  return None
59
-
66
+
60
67
  csrf_token = csrf_input['value']
61
-
68
+
62
69
  # Step 2: Submit login form
63
70
  login_data = {
64
71
  'login': user,
@@ -67,18 +74,18 @@ def create_and_persist_session(shared_state):
67
74
  'remember': '1',
68
75
  '_xfRedirect': f'https://www.{host}/'
69
76
  }
70
-
77
+
71
78
  login_url = f'https://www.{host}/login/login'
72
79
  login_response = sess.post(login_url, data=login_data, timeout=30)
73
-
80
+
74
81
  # Step 3: Verify login success
75
82
  # Check if we're logged in by accessing the main page
76
83
  verify_response = sess.get(f'https://www.{host}/', timeout=30)
77
-
84
+
78
85
  if 'data-logged-in="true"' not in verify_response.text:
79
86
  info(f'Login verification failed for: "{hostname}" - invalid credentials or login failed')
80
87
  return None
81
-
88
+
82
89
  info(f'Session successfully created for: "{hostname}" using user/password')
83
90
  except Exception as e:
84
91
  info(f'Failed to create session for: "{hostname}" - {e}')
@@ -88,20 +95,23 @@ def create_and_persist_session(shared_state):
88
95
  blob = pickle.dumps(sess)
89
96
  token = base64.b64encode(blob).decode("utf-8")
90
97
  shared_state.values["database"]("sessions").update_store(hostname, token)
91
-
98
+
92
99
  return sess
93
100
 
94
101
 
95
102
  def retrieve_and_validate_session(shared_state):
96
103
  """
97
104
  Retrieve session from database or create a new one.
98
-
105
+
99
106
  Args:
100
107
  shared_state: Shared state object
101
-
108
+
102
109
  Returns:
103
110
  requests.Session or None
104
111
  """
112
+ if not is_site_usable(shared_state, hostname):
113
+ return None
114
+
105
115
  db = shared_state.values["database"]("sessions")
106
116
  token = db.retrieve(hostname)
107
117
  if not token:
@@ -122,7 +132,7 @@ def retrieve_and_validate_session(shared_state):
122
132
  def invalidate_session(shared_state):
123
133
  """
124
134
  Invalidate the current session.
125
-
135
+
126
136
  Args:
127
137
  shared_state: Shared state object
128
138
  """
@@ -134,7 +144,7 @@ def invalidate_session(shared_state):
134
144
  def _persist_session_to_db(shared_state, sess):
135
145
  """
136
146
  Serialize & store the given requests.Session into the database under `hostname`.
137
-
147
+
138
148
  Args:
139
149
  shared_state: Shared state object
140
150
  sess: requests.Session to persist
@@ -144,10 +154,11 @@ def _persist_session_to_db(shared_state, sess):
144
154
  shared_state.values["database"]("sessions").update_store(hostname, token)
145
155
 
146
156
 
147
- def fetch_via_requests_session(shared_state, method: str, target_url: str, post_data: dict = None, get_params: dict = None, timeout: int = 30):
157
+ def fetch_via_requests_session(shared_state, method: str, target_url: str, post_data: dict = None,
158
+ get_params: dict = None, timeout: int = 30):
148
159
  """
149
160
  Execute request using the session.
150
-
161
+
151
162
  Args:
152
163
  shared_state: Shared state object
153
164
  method: "GET" or "POST"
@@ -155,13 +166,13 @@ def fetch_via_requests_session(shared_state, method: str, target_url: str, post_
155
166
  post_data: POST data (for POST requests)
156
167
  get_params: URL parameters (for GET requests)
157
168
  timeout: Request timeout in seconds
158
-
169
+
159
170
  Returns:
160
171
  Response object
161
172
  """
162
173
  sess = retrieve_and_validate_session(shared_state)
163
174
  if not sess:
164
- raise Exception(f"Could not retrieve valid session for {hostname}")
175
+ raise SkippedSiteError(f"{hostname}: site not usable (login skipped or no credentials)")
165
176
 
166
177
  # Execute request
167
178
  if method.upper() == "GET":
@@ -7,7 +7,10 @@ import pickle
7
7
 
8
8
  import requests
9
9
 
10
- from quasarr.providers.log import info
10
+ from quasarr.providers.log import info, debug
11
+ from quasarr.providers.utils import is_site_usable
12
+
13
+ hostname = "nx"
11
14
 
12
15
 
13
16
  def create_and_persist_session(shared_state):
@@ -60,6 +63,10 @@ def create_and_persist_session(shared_state):
60
63
 
61
64
 
62
65
  def retrieve_and_validate_session(shared_state):
66
+ if not is_site_usable(shared_state, hostname):
67
+ debug(f"Skipping {hostname}: site not usable (login skipped or no credentials)")
68
+ return None
69
+
63
70
  session_string = shared_state.values["database"]("sessions").retrieve("nx")
64
71
  if not session_string:
65
72
  nx_session = create_and_persist_session(shared_state)