quasarr 1.20.7__py3-none-any.whl → 1.21.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quasarr/__init__.py +7 -0
- quasarr/api/arr/__init__.py +4 -1
- quasarr/downloads/__init__.py +93 -27
- quasarr/downloads/sources/dl.py +196 -0
- quasarr/downloads/sources/nk.py +8 -5
- quasarr/downloads/sources/wx.py +127 -0
- quasarr/providers/html_images.py +2 -0
- quasarr/providers/myjd_api.py +35 -4
- quasarr/providers/sessions/dl.py +175 -0
- quasarr/providers/shared_state.py +21 -5
- quasarr/providers/version.py +1 -1
- quasarr/search/__init__.py +9 -0
- quasarr/search/sources/dl.py +316 -0
- quasarr/search/sources/wx.py +342 -0
- quasarr/storage/config.py +7 -1
- quasarr/storage/setup.py +10 -2
- {quasarr-1.20.7.dist-info → quasarr-1.21.0.dist-info}/METADATA +3 -1
- {quasarr-1.20.7.dist-info → quasarr-1.21.0.dist-info}/RECORD +22 -17
- {quasarr-1.20.7.dist-info → quasarr-1.21.0.dist-info}/WHEEL +0 -0
- {quasarr-1.20.7.dist-info → quasarr-1.21.0.dist-info}/entry_points.txt +0 -0
- {quasarr-1.20.7.dist-info → quasarr-1.21.0.dist-info}/licenses/LICENSE +0 -0
- {quasarr-1.20.7.dist-info → quasarr-1.21.0.dist-info}/top_level.txt +0 -0
quasarr/providers/myjd_api.py
CHANGED
|
@@ -38,7 +38,7 @@ import requests
|
|
|
38
38
|
import urllib3
|
|
39
39
|
from Cryptodome.Cipher import AES
|
|
40
40
|
|
|
41
|
-
from quasarr.providers.log import debug
|
|
41
|
+
from quasarr.providers.log import info, debug
|
|
42
42
|
from quasarr.providers.version import get_version
|
|
43
43
|
|
|
44
44
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
|
@@ -569,6 +569,8 @@ class Myjdapi:
|
|
|
569
569
|
Main class for connecting to JD API.
|
|
570
570
|
|
|
571
571
|
"""
|
|
572
|
+
# Class variable to track connection failures across all instances
|
|
573
|
+
_connection_failed_at = None
|
|
572
574
|
|
|
573
575
|
def __init__(self):
|
|
574
576
|
"""
|
|
@@ -708,6 +710,15 @@ class Myjdapi:
|
|
|
708
710
|
:returns: boolean -- True if succesful, False if there was any error.
|
|
709
711
|
|
|
710
712
|
"""
|
|
713
|
+
# Check if we're in cooldown period (5 minutes = 300 seconds)
|
|
714
|
+
if Myjdapi._connection_failed_at is not None:
|
|
715
|
+
time_since_failure = time.time() - Myjdapi._connection_failed_at
|
|
716
|
+
if time_since_failure < 300:
|
|
717
|
+
# Silently return False during cooldown - don't log anything
|
|
718
|
+
return False
|
|
719
|
+
# Cooldown expired, reset for retry
|
|
720
|
+
Myjdapi._connection_failed_at = None
|
|
721
|
+
|
|
711
722
|
self.update_request_id()
|
|
712
723
|
self.__login_secret = None
|
|
713
724
|
self.__device_secret = None
|
|
@@ -723,6 +734,15 @@ class Myjdapi:
|
|
|
723
734
|
response = self.request_api("/my/connect", "GET", [("email", email),
|
|
724
735
|
("appkey",
|
|
725
736
|
self.__app_key)])
|
|
737
|
+
|
|
738
|
+
if response is None:
|
|
739
|
+
# Log and set failure timestamp
|
|
740
|
+
info("JDownloader API is currently unavailable! Stopping connection attempts for 5 minutes.")
|
|
741
|
+
Myjdapi._connection_failed_at = time.time()
|
|
742
|
+
return False
|
|
743
|
+
|
|
744
|
+
# Connection successful, reset failure timestamp
|
|
745
|
+
Myjdapi._connection_failed_at = None
|
|
726
746
|
self.__connected = True
|
|
727
747
|
self.update_request_id()
|
|
728
748
|
self.__session_token = response["sessiontoken"]
|
|
@@ -826,9 +846,16 @@ class Myjdapi:
|
|
|
826
846
|
}
|
|
827
847
|
try:
|
|
828
848
|
encrypted_response = requests.get(api + query, timeout=timeout, headers=headers)
|
|
849
|
+
except requests.exceptions.ConnectionError:
|
|
850
|
+
return None
|
|
829
851
|
except Exception:
|
|
830
|
-
|
|
831
|
-
|
|
852
|
+
try:
|
|
853
|
+
encrypted_response = requests.get(api + query, timeout=timeout, headers=headers, verify=False)
|
|
854
|
+
debug("Could not establish secure connection to JDownloader. Is your time / timezone correct?")
|
|
855
|
+
except requests.exceptions.ConnectionError:
|
|
856
|
+
return None
|
|
857
|
+
except Exception:
|
|
858
|
+
return None
|
|
832
859
|
else:
|
|
833
860
|
params_request = []
|
|
834
861
|
if params is not None:
|
|
@@ -863,6 +890,8 @@ class Myjdapi:
|
|
|
863
890
|
data=encrypted_data,
|
|
864
891
|
timeout=timeout
|
|
865
892
|
)
|
|
893
|
+
except requests.exceptions.ConnectionError:
|
|
894
|
+
return None
|
|
866
895
|
except Exception:
|
|
867
896
|
try:
|
|
868
897
|
encrypted_response = requests.post(
|
|
@@ -875,7 +904,9 @@ class Myjdapi:
|
|
|
875
904
|
timeout=timeout,
|
|
876
905
|
verify=False
|
|
877
906
|
)
|
|
878
|
-
debug("Could not establish secure connection to JDownloader.")
|
|
907
|
+
debug("Could not establish secure connection to JDownloader. Is your time / timezone correct?")
|
|
908
|
+
except requests.exceptions.ConnectionError:
|
|
909
|
+
return None
|
|
879
910
|
except Exception:
|
|
880
911
|
return None
|
|
881
912
|
if encrypted_response.status_code == 403:
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Quasarr
|
|
3
|
+
# Project by https://github.com/rix1337
|
|
4
|
+
|
|
5
|
+
import base64
|
|
6
|
+
import pickle
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
from bs4 import BeautifulSoup
|
|
10
|
+
|
|
11
|
+
from quasarr.providers.log import info, debug
|
|
12
|
+
|
|
13
|
+
hostname = "dl"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def create_and_persist_session(shared_state):
|
|
17
|
+
"""
|
|
18
|
+
Create and persist a session using user and password.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
shared_state: Shared state object
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
requests.Session or None
|
|
25
|
+
"""
|
|
26
|
+
cfg = shared_state.values["config"]("Hostnames")
|
|
27
|
+
host = cfg.get(hostname)
|
|
28
|
+
credentials_cfg = shared_state.values["config"](hostname.upper())
|
|
29
|
+
|
|
30
|
+
user = credentials_cfg.get("user")
|
|
31
|
+
password = credentials_cfg.get("password")
|
|
32
|
+
|
|
33
|
+
if not user or not password:
|
|
34
|
+
info(f'Missing credentials for: "{hostname}" - user and password are required')
|
|
35
|
+
return None
|
|
36
|
+
|
|
37
|
+
sess = requests.Session()
|
|
38
|
+
|
|
39
|
+
# Set user agent
|
|
40
|
+
ua = shared_state.values["user_agent"]
|
|
41
|
+
sess.headers.update({'User-Agent': ua})
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
# Step 1: Get login page to retrieve CSRF token
|
|
45
|
+
login_page_url = f'https://www.{host}/login/'
|
|
46
|
+
login_page = sess.get(login_page_url, timeout=30)
|
|
47
|
+
|
|
48
|
+
if login_page.status_code != 200:
|
|
49
|
+
info(f'Failed to load login page for: "{hostname}" - Status {login_page.status_code}')
|
|
50
|
+
return None
|
|
51
|
+
|
|
52
|
+
# Extract CSRF token from login form
|
|
53
|
+
soup = BeautifulSoup(login_page.text, 'html.parser')
|
|
54
|
+
csrf_input = soup.find('input', {'name': '_xfToken'})
|
|
55
|
+
|
|
56
|
+
if not csrf_input or not csrf_input.get('value'):
|
|
57
|
+
info(f'Could not find CSRF token on login page for: "{hostname}"')
|
|
58
|
+
return None
|
|
59
|
+
|
|
60
|
+
csrf_token = csrf_input['value']
|
|
61
|
+
|
|
62
|
+
# Step 2: Submit login form
|
|
63
|
+
login_data = {
|
|
64
|
+
'login': user,
|
|
65
|
+
'password': password,
|
|
66
|
+
'_xfToken': csrf_token,
|
|
67
|
+
'remember': '1',
|
|
68
|
+
'_xfRedirect': f'https://www.{host}/'
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
login_url = f'https://www.{host}/login/login'
|
|
72
|
+
login_response = sess.post(login_url, data=login_data, timeout=30)
|
|
73
|
+
|
|
74
|
+
# Step 3: Verify login success
|
|
75
|
+
# Check if we're logged in by accessing the main page
|
|
76
|
+
verify_response = sess.get(f'https://www.{host}/', timeout=30)
|
|
77
|
+
|
|
78
|
+
if 'data-logged-in="true"' not in verify_response.text:
|
|
79
|
+
info(f'Login verification failed for: "{hostname}" - invalid credentials or login failed')
|
|
80
|
+
return None
|
|
81
|
+
|
|
82
|
+
info(f'Session successfully created for: "{hostname}" using user/password')
|
|
83
|
+
except Exception as e:
|
|
84
|
+
info(f'Failed to create session for: "{hostname}" - {e}')
|
|
85
|
+
return None
|
|
86
|
+
|
|
87
|
+
# Persist session to database
|
|
88
|
+
blob = pickle.dumps(sess)
|
|
89
|
+
token = base64.b64encode(blob).decode("utf-8")
|
|
90
|
+
shared_state.values["database"]("sessions").update_store(hostname, token)
|
|
91
|
+
|
|
92
|
+
return sess
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def retrieve_and_validate_session(shared_state):
|
|
96
|
+
"""
|
|
97
|
+
Retrieve session from database or create a new one.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
shared_state: Shared state object
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
requests.Session or None
|
|
104
|
+
"""
|
|
105
|
+
db = shared_state.values["database"]("sessions")
|
|
106
|
+
token = db.retrieve(hostname)
|
|
107
|
+
if not token:
|
|
108
|
+
return create_and_persist_session(shared_state)
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
blob = base64.b64decode(token.encode("utf-8"))
|
|
112
|
+
sess = pickle.loads(blob)
|
|
113
|
+
if not isinstance(sess, requests.Session):
|
|
114
|
+
raise ValueError("Not a Session")
|
|
115
|
+
except Exception as e:
|
|
116
|
+
debug(f"{hostname}: session load failed: {e}")
|
|
117
|
+
return create_and_persist_session(shared_state)
|
|
118
|
+
|
|
119
|
+
return sess
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def invalidate_session(shared_state):
|
|
123
|
+
"""
|
|
124
|
+
Invalidate the current session.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
shared_state: Shared state object
|
|
128
|
+
"""
|
|
129
|
+
db = shared_state.values["database"]("sessions")
|
|
130
|
+
db.delete(hostname)
|
|
131
|
+
debug(f'Session for "{hostname}" marked as invalid!')
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def _persist_session_to_db(shared_state, sess):
|
|
135
|
+
"""
|
|
136
|
+
Serialize & store the given requests.Session into the database under `hostname`.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
shared_state: Shared state object
|
|
140
|
+
sess: requests.Session to persist
|
|
141
|
+
"""
|
|
142
|
+
blob = pickle.dumps(sess)
|
|
143
|
+
token = base64.b64encode(blob).decode("utf-8")
|
|
144
|
+
shared_state.values["database"]("sessions").update_store(hostname, token)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def fetch_via_requests_session(shared_state, method: str, target_url: str, post_data: dict = None, get_params: dict = None, timeout: int = 30):
|
|
148
|
+
"""
|
|
149
|
+
Execute request using the session.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
shared_state: Shared state object
|
|
153
|
+
method: "GET" or "POST"
|
|
154
|
+
target_url: URL to fetch
|
|
155
|
+
post_data: POST data (for POST requests)
|
|
156
|
+
get_params: URL parameters (for GET requests)
|
|
157
|
+
timeout: Request timeout in seconds
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
Response object
|
|
161
|
+
"""
|
|
162
|
+
sess = retrieve_and_validate_session(shared_state)
|
|
163
|
+
if not sess:
|
|
164
|
+
raise Exception(f"Could not retrieve valid session for {hostname}")
|
|
165
|
+
|
|
166
|
+
# Execute request
|
|
167
|
+
if method.upper() == "GET":
|
|
168
|
+
resp = sess.get(target_url, params=get_params, timeout=timeout)
|
|
169
|
+
else: # POST
|
|
170
|
+
resp = sess.post(target_url, data=post_data, timeout=timeout)
|
|
171
|
+
|
|
172
|
+
# Re-persist cookies, since the site might have modified them during the request
|
|
173
|
+
_persist_session_to_db(shared_state, sess)
|
|
174
|
+
|
|
175
|
+
return resp
|
|
@@ -188,6 +188,7 @@ def connect_device():
|
|
|
188
188
|
|
|
189
189
|
def get_device():
|
|
190
190
|
attempts = 0
|
|
191
|
+
last_backoff_change = 0 # Track when we last changed backoff strategy
|
|
191
192
|
|
|
192
193
|
while True:
|
|
193
194
|
try:
|
|
@@ -199,14 +200,30 @@ def get_device():
|
|
|
199
200
|
|
|
200
201
|
update("device", False)
|
|
201
202
|
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
203
|
+
# Determine sleep time based on failure count
|
|
204
|
+
if attempts <= 10:
|
|
205
|
+
# First 10 failures: 3 seconds
|
|
206
|
+
sleep_time = 3
|
|
207
|
+
if attempts == 10:
|
|
208
|
+
info(f"WARNING: {attempts} consecutive JDownloader connection errors. Switching to 1-minute intervals.")
|
|
209
|
+
elif attempts <= 15:
|
|
210
|
+
# Next 5 failures (11-15): 1 minute
|
|
211
|
+
sleep_time = 60
|
|
212
|
+
if attempts % 10 == 0:
|
|
213
|
+
info(f"WARNING: {attempts} consecutive JDownloader connection errors. Please check your credentials!")
|
|
214
|
+
if attempts == 15:
|
|
215
|
+
info(f"WARNING: Still failing after {attempts} attempts. Switching to 5-minute intervals.")
|
|
216
|
+
else:
|
|
217
|
+
# After 15 failures: 5 minutes
|
|
218
|
+
sleep_time = 300
|
|
219
|
+
if attempts % 10 == 0:
|
|
220
|
+
info(f"WARNING: {attempts} consecutive JDownloader connection errors. Please check your credentials!")
|
|
206
221
|
|
|
207
222
|
if connect_device():
|
|
208
223
|
break
|
|
209
224
|
|
|
225
|
+
time.sleep(sleep_time)
|
|
226
|
+
|
|
210
227
|
return values["device"]
|
|
211
228
|
|
|
212
229
|
|
|
@@ -608,7 +625,6 @@ def is_valid_release(title: str,
|
|
|
608
625
|
debug(f"Skipping {title!r} as it doesn't match sanitized search string: {search_string!r}")
|
|
609
626
|
return False
|
|
610
627
|
|
|
611
|
-
|
|
612
628
|
# if it's a movie search, don't allow any TV show titles (check for NO season or episode tags in the title)
|
|
613
629
|
if is_movie_search:
|
|
614
630
|
if not MOVIE_REGEX.match(title):
|
quasarr/providers/version.py
CHANGED
quasarr/search/__init__.py
CHANGED
|
@@ -10,6 +10,7 @@ from quasarr.search.sources.al import al_feed, al_search
|
|
|
10
10
|
from quasarr.search.sources.by import by_feed, by_search
|
|
11
11
|
from quasarr.search.sources.dd import dd_search, dd_feed
|
|
12
12
|
from quasarr.search.sources.dj import dj_search, dj_feed
|
|
13
|
+
from quasarr.search.sources.dl import dl_search, dl_feed
|
|
13
14
|
from quasarr.search.sources.dt import dt_feed, dt_search
|
|
14
15
|
from quasarr.search.sources.dw import dw_feed, dw_search
|
|
15
16
|
from quasarr.search.sources.fx import fx_feed, fx_search
|
|
@@ -21,6 +22,7 @@ from quasarr.search.sources.sf import sf_feed, sf_search
|
|
|
21
22
|
from quasarr.search.sources.sj import sj_search, sj_feed
|
|
22
23
|
from quasarr.search.sources.sl import sl_feed, sl_search
|
|
23
24
|
from quasarr.search.sources.wd import wd_feed, wd_search
|
|
25
|
+
from quasarr.search.sources.wx import wx_feed, wx_search
|
|
24
26
|
|
|
25
27
|
|
|
26
28
|
def get_search_results(shared_state, request_from, imdb_id="", search_phrase="", mirror=None, season="", episode=""):
|
|
@@ -34,6 +36,7 @@ def get_search_results(shared_state, request_from, imdb_id="", search_phrase="",
|
|
|
34
36
|
al = shared_state.values["config"]("Hostnames").get("al")
|
|
35
37
|
by = shared_state.values["config"]("Hostnames").get("by")
|
|
36
38
|
dd = shared_state.values["config"]("Hostnames").get("dd")
|
|
39
|
+
dl = shared_state.values["config"]("Hostnames").get("dl")
|
|
37
40
|
dt = shared_state.values["config"]("Hostnames").get("dt")
|
|
38
41
|
dj = shared_state.values["config"]("Hostnames").get("dj")
|
|
39
42
|
dw = shared_state.values["config"]("Hostnames").get("dw")
|
|
@@ -46,6 +49,7 @@ def get_search_results(shared_state, request_from, imdb_id="", search_phrase="",
|
|
|
46
49
|
sj = shared_state.values["config"]("Hostnames").get("sj")
|
|
47
50
|
sl = shared_state.values["config"]("Hostnames").get("sl")
|
|
48
51
|
wd = shared_state.values["config"]("Hostnames").get("wd")
|
|
52
|
+
wx = shared_state.values["config"]("Hostnames").get("wx")
|
|
49
53
|
|
|
50
54
|
start_time = time.time()
|
|
51
55
|
|
|
@@ -56,6 +60,7 @@ def get_search_results(shared_state, request_from, imdb_id="", search_phrase="",
|
|
|
56
60
|
(al, al_search),
|
|
57
61
|
(by, by_search),
|
|
58
62
|
(dd, dd_search),
|
|
63
|
+
(dl, dl_search),
|
|
59
64
|
(dt, dt_search),
|
|
60
65
|
(dj, dj_search),
|
|
61
66
|
(dw, dw_search),
|
|
@@ -68,11 +73,13 @@ def get_search_results(shared_state, request_from, imdb_id="", search_phrase="",
|
|
|
68
73
|
(sj, sj_search),
|
|
69
74
|
(sl, sl_search),
|
|
70
75
|
(wd, wd_search),
|
|
76
|
+
(wx, wx_search),
|
|
71
77
|
]
|
|
72
78
|
|
|
73
79
|
# LazyLibrarian uses search_phrase for searches
|
|
74
80
|
phrase_map = [
|
|
75
81
|
(by, by_search),
|
|
82
|
+
(dl, dl_search),
|
|
76
83
|
(dt, dt_search),
|
|
77
84
|
(nx, nx_search),
|
|
78
85
|
(sl, sl_search),
|
|
@@ -85,6 +92,7 @@ def get_search_results(shared_state, request_from, imdb_id="", search_phrase="",
|
|
|
85
92
|
(by, by_feed),
|
|
86
93
|
(dd, dd_feed),
|
|
87
94
|
(dj, dj_feed),
|
|
95
|
+
(dl, dl_feed),
|
|
88
96
|
(dt, dt_feed),
|
|
89
97
|
(dw, dw_feed),
|
|
90
98
|
(fx, fx_feed),
|
|
@@ -96,6 +104,7 @@ def get_search_results(shared_state, request_from, imdb_id="", search_phrase="",
|
|
|
96
104
|
(sj, sj_feed),
|
|
97
105
|
(sl, sl_feed),
|
|
98
106
|
(wd, wd_feed),
|
|
107
|
+
(wx, wx_feed),
|
|
99
108
|
]
|
|
100
109
|
|
|
101
110
|
if imdb_id: # only Radarr/Sonarr are using imdb_id
|