quasarr 0.1.6__py3-none-any.whl → 1.23.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasarr might be problematic. Click here for more details.
- quasarr/__init__.py +316 -42
- quasarr/api/__init__.py +187 -0
- quasarr/api/arr/__init__.py +387 -0
- quasarr/api/captcha/__init__.py +1189 -0
- quasarr/api/config/__init__.py +23 -0
- quasarr/api/sponsors_helper/__init__.py +166 -0
- quasarr/api/statistics/__init__.py +196 -0
- quasarr/downloads/__init__.py +319 -256
- quasarr/downloads/linkcrypters/__init__.py +0 -0
- quasarr/downloads/linkcrypters/al.py +237 -0
- quasarr/downloads/linkcrypters/filecrypt.py +444 -0
- quasarr/downloads/linkcrypters/hide.py +123 -0
- quasarr/downloads/packages/__init__.py +476 -0
- quasarr/downloads/sources/al.py +697 -0
- quasarr/downloads/sources/by.py +106 -0
- quasarr/downloads/sources/dd.py +76 -0
- quasarr/downloads/sources/dj.py +7 -0
- quasarr/downloads/sources/dl.py +199 -0
- quasarr/downloads/sources/dt.py +66 -0
- quasarr/downloads/sources/dw.py +14 -7
- quasarr/downloads/sources/he.py +112 -0
- quasarr/downloads/sources/mb.py +47 -0
- quasarr/downloads/sources/nk.py +54 -0
- quasarr/downloads/sources/nx.py +42 -83
- quasarr/downloads/sources/sf.py +159 -0
- quasarr/downloads/sources/sj.py +7 -0
- quasarr/downloads/sources/sl.py +90 -0
- quasarr/downloads/sources/wd.py +110 -0
- quasarr/downloads/sources/wx.py +127 -0
- quasarr/providers/cloudflare.py +204 -0
- quasarr/providers/html_images.py +22 -0
- quasarr/providers/html_templates.py +211 -104
- quasarr/providers/imdb_metadata.py +108 -3
- quasarr/providers/log.py +19 -0
- quasarr/providers/myjd_api.py +201 -40
- quasarr/providers/notifications.py +99 -11
- quasarr/providers/obfuscated.py +65 -0
- quasarr/providers/sessions/__init__.py +0 -0
- quasarr/providers/sessions/al.py +286 -0
- quasarr/providers/sessions/dd.py +78 -0
- quasarr/providers/sessions/dl.py +175 -0
- quasarr/providers/sessions/nx.py +76 -0
- quasarr/providers/shared_state.py +656 -79
- quasarr/providers/statistics.py +154 -0
- quasarr/providers/version.py +60 -1
- quasarr/providers/web_server.py +1 -1
- quasarr/search/__init__.py +144 -15
- quasarr/search/sources/al.py +448 -0
- quasarr/search/sources/by.py +204 -0
- quasarr/search/sources/dd.py +135 -0
- quasarr/search/sources/dj.py +213 -0
- quasarr/search/sources/dl.py +354 -0
- quasarr/search/sources/dt.py +265 -0
- quasarr/search/sources/dw.py +94 -67
- quasarr/search/sources/fx.py +89 -33
- quasarr/search/sources/he.py +196 -0
- quasarr/search/sources/mb.py +195 -0
- quasarr/search/sources/nk.py +188 -0
- quasarr/search/sources/nx.py +75 -21
- quasarr/search/sources/sf.py +374 -0
- quasarr/search/sources/sj.py +213 -0
- quasarr/search/sources/sl.py +246 -0
- quasarr/search/sources/wd.py +208 -0
- quasarr/search/sources/wx.py +337 -0
- quasarr/storage/config.py +39 -10
- quasarr/storage/setup.py +269 -97
- quasarr/storage/sqlite_database.py +6 -1
- quasarr-1.23.0.dist-info/METADATA +306 -0
- quasarr-1.23.0.dist-info/RECORD +77 -0
- {quasarr-0.1.6.dist-info → quasarr-1.23.0.dist-info}/WHEEL +1 -1
- quasarr/arr/__init__.py +0 -423
- quasarr/captcha_solver/__init__.py +0 -284
- quasarr-0.1.6.dist-info/METADATA +0 -81
- quasarr-0.1.6.dist-info/RECORD +0 -31
- {quasarr-0.1.6.dist-info → quasarr-1.23.0.dist-info}/entry_points.txt +0 -0
- {quasarr-0.1.6.dist-info → quasarr-1.23.0.dist-info/licenses}/LICENSE +0 -0
- {quasarr-0.1.6.dist-info → quasarr-1.23.0.dist-info}/top_level.txt +0 -0
|
File without changes
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Quasarr
|
|
3
|
+
# Project by https://github.com/rix1337
|
|
4
|
+
|
|
5
|
+
import base64
|
|
6
|
+
import json
|
|
7
|
+
import pickle
|
|
8
|
+
import urllib.parse
|
|
9
|
+
|
|
10
|
+
import requests
|
|
11
|
+
from bs4 import BeautifulSoup
|
|
12
|
+
from requests.exceptions import Timeout, RequestException
|
|
13
|
+
|
|
14
|
+
from quasarr.providers.log import info, debug
|
|
15
|
+
|
|
16
|
+
hostname = "al"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def create_and_persist_session(shared_state):
|
|
20
|
+
cfg = shared_state.values["config"]("Hostnames")
|
|
21
|
+
host = cfg.get(hostname)
|
|
22
|
+
credentials_cfg = shared_state.values["config"](hostname.upper())
|
|
23
|
+
user = credentials_cfg.get("user")
|
|
24
|
+
pw = credentials_cfg.get("password")
|
|
25
|
+
|
|
26
|
+
flaresolverr_url = shared_state.values["config"]('FlareSolverr').get('url')
|
|
27
|
+
|
|
28
|
+
sess = requests.Session()
|
|
29
|
+
|
|
30
|
+
# Prime cookies via FlareSolverr
|
|
31
|
+
try:
|
|
32
|
+
info(f'Priming "{hostname}" session via FlareSolverr...')
|
|
33
|
+
fs_headers = {"Content-Type": "application/json"}
|
|
34
|
+
fs_payload = {
|
|
35
|
+
"cmd": "request.get",
|
|
36
|
+
"url": f"https://www.{host}/",
|
|
37
|
+
"maxTimeout": 60000
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
fs_resp = requests.post(flaresolverr_url, headers=fs_headers, json=fs_payload, timeout=30)
|
|
42
|
+
fs_resp.raise_for_status()
|
|
43
|
+
except Timeout:
|
|
44
|
+
info(f"{hostname}: FlareSolverr request timed out")
|
|
45
|
+
return None
|
|
46
|
+
except RequestException as e:
|
|
47
|
+
# This covers HTTP errors and connection issues *other than* timeout
|
|
48
|
+
info(f"{hostname}: FlareSolverr server error: {e}")
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
fs_json = fs_resp.json()
|
|
52
|
+
# Check if FlareSolverr actually solved the challenge
|
|
53
|
+
if fs_json.get("status") != "ok" or "solution" not in fs_json:
|
|
54
|
+
info(f"{hostname}: FlareSolverr did not return a valid solution")
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
solution = fs_json["solution"]
|
|
58
|
+
# store FlareSolverr’s UA into our requests.Session
|
|
59
|
+
fl_ua = solution.get("userAgent")
|
|
60
|
+
if fl_ua:
|
|
61
|
+
sess.headers.update({'User-Agent': fl_ua})
|
|
62
|
+
|
|
63
|
+
# Extract any cookies returned by FlareSolverr and add them into our session
|
|
64
|
+
for ck in solution.get("cookies", []):
|
|
65
|
+
name = ck.get("name")
|
|
66
|
+
value = ck.get("value")
|
|
67
|
+
domain = ck.get("domain")
|
|
68
|
+
path = ck.get("path", "/")
|
|
69
|
+
# Set cookie on the session (ignoring expires/secure/httpOnly)
|
|
70
|
+
sess.cookies.set(name, value, domain=domain, path=path)
|
|
71
|
+
|
|
72
|
+
except Exception as e:
|
|
73
|
+
debug(f'Could not prime "{hostname}" session via FlareSolverr: {e}')
|
|
74
|
+
return None
|
|
75
|
+
|
|
76
|
+
if user and pw:
|
|
77
|
+
data = {
|
|
78
|
+
"identity": user,
|
|
79
|
+
"password": pw,
|
|
80
|
+
"remember": "1"
|
|
81
|
+
}
|
|
82
|
+
encoded_data = urllib.parse.urlencode(data)
|
|
83
|
+
|
|
84
|
+
login_headers = {
|
|
85
|
+
"Content-Type": "application/x-www-form-urlencoded"
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
r = sess.post(f'https://www.{host}/auth/signin',
|
|
89
|
+
data=encoded_data,
|
|
90
|
+
headers=login_headers,
|
|
91
|
+
timeout=30)
|
|
92
|
+
|
|
93
|
+
if r.status_code != 200 or "invalid" in r.text.lower():
|
|
94
|
+
info(f'Login failed: "{hostname}" - {r.status_code} - {r.text}')
|
|
95
|
+
return None
|
|
96
|
+
info(f'Login successful: "{hostname}"')
|
|
97
|
+
else:
|
|
98
|
+
info(f'Missing credentials for: "{hostname}" - skipping login')
|
|
99
|
+
return None
|
|
100
|
+
|
|
101
|
+
blob = pickle.dumps(sess)
|
|
102
|
+
token = base64.b64encode(blob).decode("utf-8")
|
|
103
|
+
shared_state.values["database"]("sessions").update_store(hostname, token)
|
|
104
|
+
return sess
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def retrieve_and_validate_session(shared_state):
|
|
108
|
+
db = shared_state.values["database"]("sessions")
|
|
109
|
+
token = db.retrieve(hostname)
|
|
110
|
+
if not token:
|
|
111
|
+
return create_and_persist_session(shared_state)
|
|
112
|
+
|
|
113
|
+
try:
|
|
114
|
+
blob = base64.b64decode(token.encode("utf-8"))
|
|
115
|
+
sess = pickle.loads(blob)
|
|
116
|
+
if not isinstance(sess, requests.Session):
|
|
117
|
+
raise ValueError("Not a Session")
|
|
118
|
+
except Exception as e:
|
|
119
|
+
debug(f"{hostname}: session load failed: {e}")
|
|
120
|
+
return create_and_persist_session(shared_state)
|
|
121
|
+
|
|
122
|
+
return sess
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def invalidate_session(shared_state):
|
|
126
|
+
db = shared_state.values["database"]("sessions")
|
|
127
|
+
db.delete(hostname)
|
|
128
|
+
debug(f'Session for "{hostname}" marked as invalid!')
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _persist_session_to_db(shared_state, sess):
|
|
132
|
+
"""
|
|
133
|
+
Serialize & store the given requests.Session into the database under `hostname`.
|
|
134
|
+
"""
|
|
135
|
+
blob = pickle.dumps(sess)
|
|
136
|
+
token = base64.b64encode(blob).decode("utf-8")
|
|
137
|
+
shared_state.values["database"]("sessions").update_store(hostname, token)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def _load_session_cookies_for_flaresolverr(sess):
|
|
141
|
+
"""
|
|
142
|
+
Convert a requests.Session's cookies into FlareSolverr‐style list of dicts.
|
|
143
|
+
"""
|
|
144
|
+
cookie_list = []
|
|
145
|
+
for ck in sess.cookies:
|
|
146
|
+
cookie_list.append({
|
|
147
|
+
"name": ck.name,
|
|
148
|
+
"value": ck.value,
|
|
149
|
+
"domain": ck.domain,
|
|
150
|
+
"path": ck.path or "/",
|
|
151
|
+
})
|
|
152
|
+
return cookie_list
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def unwrap_flaresolverr_body(raw_text: str) -> str:
|
|
156
|
+
"""
|
|
157
|
+
Use BeautifulSoup to remove any HTML tags and return the raw text.
|
|
158
|
+
If raw_text is:
|
|
159
|
+
<html><body>{"foo":123}</body></html>
|
|
160
|
+
or:
|
|
161
|
+
<html><body><pre>[...array...]</pre></body></html>
|
|
162
|
+
or even just:
|
|
163
|
+
{"foo":123}
|
|
164
|
+
this will return the inner JSON string in all cases.
|
|
165
|
+
"""
|
|
166
|
+
soup = BeautifulSoup(raw_text, "html.parser")
|
|
167
|
+
text = soup.get_text().strip()
|
|
168
|
+
return text
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def fetch_via_flaresolverr(shared_state,
|
|
172
|
+
method: str,
|
|
173
|
+
target_url: str,
|
|
174
|
+
post_data: dict = None,
|
|
175
|
+
timeout: int = 60):
|
|
176
|
+
"""
|
|
177
|
+
Load (or recreate) the requests.Session from DB.
|
|
178
|
+
Package its cookies into FlareSolverr payload.
|
|
179
|
+
Ask FlareSolverr to do a request.get or request.post on target_url.
|
|
180
|
+
Replace the Session’s cookies with FlareSolverr’s new cookies.
|
|
181
|
+
Re-persist the updated session to the DB.
|
|
182
|
+
Return a dict with “status_code”, “headers”, “json” (parsed - if available), “text” and “cookies”.
|
|
183
|
+
|
|
184
|
+
– method: "GET" or "POST"
|
|
185
|
+
– post_data: dict of form‐fields if method=="POST"
|
|
186
|
+
– timeout: seconds (FlareSolverr’s internal maxTimeout = timeout*1000 ms)
|
|
187
|
+
"""
|
|
188
|
+
flaresolverr_url = shared_state.values["config"]('FlareSolverr').get('url')
|
|
189
|
+
|
|
190
|
+
sess = retrieve_and_validate_session(shared_state)
|
|
191
|
+
|
|
192
|
+
cmd = "request.get" if method.upper() == "GET" else "request.post"
|
|
193
|
+
fs_payload = {
|
|
194
|
+
"cmd": cmd,
|
|
195
|
+
"url": target_url,
|
|
196
|
+
"maxTimeout": timeout * 1000,
|
|
197
|
+
# Inject every cookie from our Python session into FlareSolverr
|
|
198
|
+
"cookies": _load_session_cookies_for_flaresolverr(sess)
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
if method.upper() == "POST":
|
|
202
|
+
# FlareSolverr expects postData as urlencoded string
|
|
203
|
+
encoded = urllib.parse.urlencode(post_data or {})
|
|
204
|
+
fs_payload["postData"] = encoded
|
|
205
|
+
|
|
206
|
+
# Send the JSON request to FlareSolverr
|
|
207
|
+
fs_headers = {"Content-Type": "application/json"}
|
|
208
|
+
try:
|
|
209
|
+
resp = requests.post(
|
|
210
|
+
flaresolverr_url,
|
|
211
|
+
headers=fs_headers,
|
|
212
|
+
json=fs_payload,
|
|
213
|
+
timeout=timeout + 10
|
|
214
|
+
)
|
|
215
|
+
resp.raise_for_status()
|
|
216
|
+
except requests.exceptions.RequestException as e:
|
|
217
|
+
info(f"Could not reach FlareSolverr: {e}")
|
|
218
|
+
return {
|
|
219
|
+
"status_code": None,
|
|
220
|
+
"headers": {},
|
|
221
|
+
"json": None,
|
|
222
|
+
"text": "",
|
|
223
|
+
"cookies": [],
|
|
224
|
+
"error": f"FlareSolverr request failed: {e}"
|
|
225
|
+
}
|
|
226
|
+
except Exception as e:
|
|
227
|
+
raise RuntimeError(f"Could not reach FlareSolverr: {e}")
|
|
228
|
+
|
|
229
|
+
fs_json = resp.json()
|
|
230
|
+
if fs_json.get("status") != "ok" or "solution" not in fs_json:
|
|
231
|
+
raise RuntimeError(f"FlareSolverr did not return a valid solution: {fs_json.get('message', '<no message>')}")
|
|
232
|
+
|
|
233
|
+
solution = fs_json["solution"]
|
|
234
|
+
|
|
235
|
+
# Extract the raw HTML/JSON body that FlareSolverr fetched
|
|
236
|
+
raw_body = solution.get("response", "")
|
|
237
|
+
# Get raw body as text, since it might contain JSON
|
|
238
|
+
unwrapped = unwrap_flaresolverr_body(raw_body)
|
|
239
|
+
|
|
240
|
+
# Attempt to parse it as JSON
|
|
241
|
+
try:
|
|
242
|
+
parsed_json = json.loads(unwrapped)
|
|
243
|
+
except ValueError:
|
|
244
|
+
parsed_json = None
|
|
245
|
+
|
|
246
|
+
# Replace our requests.Session cookies with whatever FlareSolverr solved
|
|
247
|
+
sess.cookies.clear()
|
|
248
|
+
for ck in solution.get("cookies", []):
|
|
249
|
+
sess.cookies.set(
|
|
250
|
+
ck.get("name"),
|
|
251
|
+
ck.get("value"),
|
|
252
|
+
domain=ck.get("domain"),
|
|
253
|
+
path=ck.get("path", "/")
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
# Persist the updated Session back into your DB
|
|
257
|
+
_persist_session_to_db(shared_state, sess)
|
|
258
|
+
|
|
259
|
+
# Return a small dict containing status, headers, parsed JSON, and cookie list
|
|
260
|
+
return {
|
|
261
|
+
"status_code": solution.get("status"),
|
|
262
|
+
"headers": solution.get("headers", {}),
|
|
263
|
+
"json": parsed_json,
|
|
264
|
+
"text": raw_body,
|
|
265
|
+
"cookies": solution.get("cookies", [])
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def fetch_via_requests_session(shared_state, method: str, target_url: str, post_data: dict = None, timeout: int = 30):
|
|
270
|
+
"""
|
|
271
|
+
– method: "GET" or "POST"
|
|
272
|
+
– post_data: for POST only (will be sent as form-data unless you explicitly JSON-encode)
|
|
273
|
+
– timeout: seconds
|
|
274
|
+
"""
|
|
275
|
+
sess = retrieve_and_validate_session(shared_state)
|
|
276
|
+
|
|
277
|
+
# Execute request
|
|
278
|
+
if method.upper() == "GET":
|
|
279
|
+
resp = sess.get(target_url, timeout=timeout)
|
|
280
|
+
else: # POST
|
|
281
|
+
resp = sess.post(target_url, data=post_data, timeout=timeout)
|
|
282
|
+
|
|
283
|
+
# Re-persist cookies, since the site might have modified them during the request
|
|
284
|
+
_persist_session_to_db(shared_state, sess)
|
|
285
|
+
|
|
286
|
+
return resp
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Quasarr
|
|
3
|
+
# Project by https://github.com/rix1337
|
|
4
|
+
|
|
5
|
+
import base64
|
|
6
|
+
import pickle
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
|
|
10
|
+
from quasarr.providers.log import info
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def create_and_persist_session(shared_state):
|
|
14
|
+
dd = shared_state.values["config"]("Hostnames").get("dd")
|
|
15
|
+
|
|
16
|
+
dd_session = requests.Session()
|
|
17
|
+
|
|
18
|
+
cookies = {}
|
|
19
|
+
headers = {
|
|
20
|
+
'User-Agent': shared_state.values["user_agent"],
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
data = {
|
|
24
|
+
'username': shared_state.values["config"]("DD").get("user"),
|
|
25
|
+
'password': shared_state.values["config"]("DD").get("password"),
|
|
26
|
+
'ajax': 'true',
|
|
27
|
+
'Login': 'true',
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
dd_response = dd_session.post(f'https://{dd}/index/index',
|
|
31
|
+
cookies=cookies, headers=headers, data=data, timeout=10)
|
|
32
|
+
|
|
33
|
+
error = False
|
|
34
|
+
if dd_response.status_code == 200:
|
|
35
|
+
try:
|
|
36
|
+
response_data = dd_response.json()
|
|
37
|
+
if not response_data.get('loggedin'):
|
|
38
|
+
info("DD rejected login.")
|
|
39
|
+
raise ValueError
|
|
40
|
+
session_id = dd_response.cookies.get("PHPSESSID")
|
|
41
|
+
if session_id:
|
|
42
|
+
dd_session.cookies.set('PHPSESSID', session_id, domain=dd)
|
|
43
|
+
else:
|
|
44
|
+
info("Invalid DD response on login.")
|
|
45
|
+
error = True
|
|
46
|
+
except ValueError:
|
|
47
|
+
info("Could not parse DD response on login.")
|
|
48
|
+
error = True
|
|
49
|
+
|
|
50
|
+
if error:
|
|
51
|
+
shared_state.values["config"]("DD").save("user", "")
|
|
52
|
+
shared_state.values["config"]("DD").save("password", "")
|
|
53
|
+
return None
|
|
54
|
+
|
|
55
|
+
serialized_session = pickle.dumps(dd_session)
|
|
56
|
+
session_string = base64.b64encode(serialized_session).decode('utf-8')
|
|
57
|
+
shared_state.values["database"]("sessions").update_store("dd", session_string)
|
|
58
|
+
return dd_session
|
|
59
|
+
else:
|
|
60
|
+
info("Could not create DD session")
|
|
61
|
+
return None
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def retrieve_and_validate_session(shared_state):
|
|
65
|
+
session_string = shared_state.values["database"]("sessions").retrieve("dd")
|
|
66
|
+
if not session_string:
|
|
67
|
+
dd_session = create_and_persist_session(shared_state)
|
|
68
|
+
else:
|
|
69
|
+
try:
|
|
70
|
+
serialized_session = base64.b64decode(session_string.encode('utf-8'))
|
|
71
|
+
dd_session = pickle.loads(serialized_session)
|
|
72
|
+
if not isinstance(dd_session, requests.Session):
|
|
73
|
+
raise ValueError("Retrieved object is not a valid requests.Session instance.")
|
|
74
|
+
except Exception as e:
|
|
75
|
+
info(f"Session retrieval failed: {e}")
|
|
76
|
+
dd_session = create_and_persist_session(shared_state)
|
|
77
|
+
|
|
78
|
+
return dd_session
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Quasarr
|
|
3
|
+
# Project by https://github.com/rix1337
|
|
4
|
+
|
|
5
|
+
import base64
|
|
6
|
+
import pickle
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
from bs4 import BeautifulSoup
|
|
10
|
+
|
|
11
|
+
from quasarr.providers.log import info, debug
|
|
12
|
+
|
|
13
|
+
hostname = "dl"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def create_and_persist_session(shared_state):
|
|
17
|
+
"""
|
|
18
|
+
Create and persist a session using user and password.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
shared_state: Shared state object
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
requests.Session or None
|
|
25
|
+
"""
|
|
26
|
+
cfg = shared_state.values["config"]("Hostnames")
|
|
27
|
+
host = cfg.get(hostname)
|
|
28
|
+
credentials_cfg = shared_state.values["config"](hostname.upper())
|
|
29
|
+
|
|
30
|
+
user = credentials_cfg.get("user")
|
|
31
|
+
password = credentials_cfg.get("password")
|
|
32
|
+
|
|
33
|
+
if not user or not password:
|
|
34
|
+
info(f'Missing credentials for: "{hostname}" - user and password are required')
|
|
35
|
+
return None
|
|
36
|
+
|
|
37
|
+
sess = requests.Session()
|
|
38
|
+
|
|
39
|
+
# Set user agent
|
|
40
|
+
ua = shared_state.values["user_agent"]
|
|
41
|
+
sess.headers.update({'User-Agent': ua})
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
# Step 1: Get login page to retrieve CSRF token
|
|
45
|
+
login_page_url = f'https://www.{host}/login/'
|
|
46
|
+
login_page = sess.get(login_page_url, timeout=30)
|
|
47
|
+
|
|
48
|
+
if login_page.status_code != 200:
|
|
49
|
+
info(f'Failed to load login page for: "{hostname}" - Status {login_page.status_code}')
|
|
50
|
+
return None
|
|
51
|
+
|
|
52
|
+
# Extract CSRF token from login form
|
|
53
|
+
soup = BeautifulSoup(login_page.text, 'html.parser')
|
|
54
|
+
csrf_input = soup.find('input', {'name': '_xfToken'})
|
|
55
|
+
|
|
56
|
+
if not csrf_input or not csrf_input.get('value'):
|
|
57
|
+
info(f'Could not find CSRF token on login page for: "{hostname}"')
|
|
58
|
+
return None
|
|
59
|
+
|
|
60
|
+
csrf_token = csrf_input['value']
|
|
61
|
+
|
|
62
|
+
# Step 2: Submit login form
|
|
63
|
+
login_data = {
|
|
64
|
+
'login': user,
|
|
65
|
+
'password': password,
|
|
66
|
+
'_xfToken': csrf_token,
|
|
67
|
+
'remember': '1',
|
|
68
|
+
'_xfRedirect': f'https://www.{host}/'
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
login_url = f'https://www.{host}/login/login'
|
|
72
|
+
login_response = sess.post(login_url, data=login_data, timeout=30)
|
|
73
|
+
|
|
74
|
+
# Step 3: Verify login success
|
|
75
|
+
# Check if we're logged in by accessing the main page
|
|
76
|
+
verify_response = sess.get(f'https://www.{host}/', timeout=30)
|
|
77
|
+
|
|
78
|
+
if 'data-logged-in="true"' not in verify_response.text:
|
|
79
|
+
info(f'Login verification failed for: "{hostname}" - invalid credentials or login failed')
|
|
80
|
+
return None
|
|
81
|
+
|
|
82
|
+
info(f'Session successfully created for: "{hostname}" using user/password')
|
|
83
|
+
except Exception as e:
|
|
84
|
+
info(f'Failed to create session for: "{hostname}" - {e}')
|
|
85
|
+
return None
|
|
86
|
+
|
|
87
|
+
# Persist session to database
|
|
88
|
+
blob = pickle.dumps(sess)
|
|
89
|
+
token = base64.b64encode(blob).decode("utf-8")
|
|
90
|
+
shared_state.values["database"]("sessions").update_store(hostname, token)
|
|
91
|
+
|
|
92
|
+
return sess
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def retrieve_and_validate_session(shared_state):
|
|
96
|
+
"""
|
|
97
|
+
Retrieve session from database or create a new one.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
shared_state: Shared state object
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
requests.Session or None
|
|
104
|
+
"""
|
|
105
|
+
db = shared_state.values["database"]("sessions")
|
|
106
|
+
token = db.retrieve(hostname)
|
|
107
|
+
if not token:
|
|
108
|
+
return create_and_persist_session(shared_state)
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
blob = base64.b64decode(token.encode("utf-8"))
|
|
112
|
+
sess = pickle.loads(blob)
|
|
113
|
+
if not isinstance(sess, requests.Session):
|
|
114
|
+
raise ValueError("Not a Session")
|
|
115
|
+
except Exception as e:
|
|
116
|
+
debug(f"{hostname}: session load failed: {e}")
|
|
117
|
+
return create_and_persist_session(shared_state)
|
|
118
|
+
|
|
119
|
+
return sess
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def invalidate_session(shared_state):
|
|
123
|
+
"""
|
|
124
|
+
Invalidate the current session.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
shared_state: Shared state object
|
|
128
|
+
"""
|
|
129
|
+
db = shared_state.values["database"]("sessions")
|
|
130
|
+
db.delete(hostname)
|
|
131
|
+
debug(f'Session for "{hostname}" marked as invalid!')
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def _persist_session_to_db(shared_state, sess):
|
|
135
|
+
"""
|
|
136
|
+
Serialize & store the given requests.Session into the database under `hostname`.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
shared_state: Shared state object
|
|
140
|
+
sess: requests.Session to persist
|
|
141
|
+
"""
|
|
142
|
+
blob = pickle.dumps(sess)
|
|
143
|
+
token = base64.b64encode(blob).decode("utf-8")
|
|
144
|
+
shared_state.values["database"]("sessions").update_store(hostname, token)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def fetch_via_requests_session(shared_state, method: str, target_url: str, post_data: dict = None, get_params: dict = None, timeout: int = 30):
|
|
148
|
+
"""
|
|
149
|
+
Execute request using the session.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
shared_state: Shared state object
|
|
153
|
+
method: "GET" or "POST"
|
|
154
|
+
target_url: URL to fetch
|
|
155
|
+
post_data: POST data (for POST requests)
|
|
156
|
+
get_params: URL parameters (for GET requests)
|
|
157
|
+
timeout: Request timeout in seconds
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
Response object
|
|
161
|
+
"""
|
|
162
|
+
sess = retrieve_and_validate_session(shared_state)
|
|
163
|
+
if not sess:
|
|
164
|
+
raise Exception(f"Could not retrieve valid session for {hostname}")
|
|
165
|
+
|
|
166
|
+
# Execute request
|
|
167
|
+
if method.upper() == "GET":
|
|
168
|
+
resp = sess.get(target_url, params=get_params, timeout=timeout)
|
|
169
|
+
else: # POST
|
|
170
|
+
resp = sess.post(target_url, data=post_data, timeout=timeout)
|
|
171
|
+
|
|
172
|
+
# Re-persist cookies, since the site might have modified them during the request
|
|
173
|
+
_persist_session_to_db(shared_state, sess)
|
|
174
|
+
|
|
175
|
+
return resp
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Quasarr
|
|
3
|
+
# Project by https://github.com/rix1337
|
|
4
|
+
|
|
5
|
+
import base64
|
|
6
|
+
import pickle
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
|
|
10
|
+
from quasarr.providers.log import info
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def create_and_persist_session(shared_state):
|
|
14
|
+
nx = shared_state.values["config"]("Hostnames").get("nx")
|
|
15
|
+
|
|
16
|
+
nx_session = requests.Session()
|
|
17
|
+
|
|
18
|
+
cookies = {}
|
|
19
|
+
headers = {
|
|
20
|
+
'User-Agent': shared_state.values["user_agent"],
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
json_data = {
|
|
24
|
+
'username': shared_state.values["config"]("NX").get("user"),
|
|
25
|
+
'password': shared_state.values["config"]("NX").get("password")
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
nx_response = nx_session.post(f'https://{nx}/api/user/auth', cookies=cookies, headers=headers, json=json_data,
|
|
29
|
+
timeout=10)
|
|
30
|
+
|
|
31
|
+
error = False
|
|
32
|
+
if nx_response.status_code == 200:
|
|
33
|
+
try:
|
|
34
|
+
response_data = nx_response.json()
|
|
35
|
+
if response_data.get('err', {}).get('status') == 403:
|
|
36
|
+
info("Invalid NX credentials provided.")
|
|
37
|
+
error = True
|
|
38
|
+
elif response_data.get('user').get('username') != shared_state.values["config"]("NX").get("user"):
|
|
39
|
+
info("Invalid NX response on login.")
|
|
40
|
+
error = True
|
|
41
|
+
else:
|
|
42
|
+
sessiontoken = response_data.get('user').get('sessiontoken')
|
|
43
|
+
nx_session.cookies.set('sessiontoken', sessiontoken, domain=nx)
|
|
44
|
+
except ValueError:
|
|
45
|
+
info("Could not parse NX response on login.")
|
|
46
|
+
error = True
|
|
47
|
+
|
|
48
|
+
if error:
|
|
49
|
+
shared_state.values["config"]("NX").save("user", "")
|
|
50
|
+
shared_state.values["config"]("NX").save("password", "")
|
|
51
|
+
return None
|
|
52
|
+
|
|
53
|
+
serialized_session = pickle.dumps(nx_session)
|
|
54
|
+
session_string = base64.b64encode(serialized_session).decode('utf-8')
|
|
55
|
+
shared_state.values["database"]("sessions").update_store("nx", session_string)
|
|
56
|
+
return nx_session
|
|
57
|
+
else:
|
|
58
|
+
info("Could not create NX session")
|
|
59
|
+
return None
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def retrieve_and_validate_session(shared_state):
|
|
63
|
+
session_string = shared_state.values["database"]("sessions").retrieve("nx")
|
|
64
|
+
if not session_string:
|
|
65
|
+
nx_session = create_and_persist_session(shared_state)
|
|
66
|
+
else:
|
|
67
|
+
try:
|
|
68
|
+
serialized_session = base64.b64decode(session_string.encode('utf-8'))
|
|
69
|
+
nx_session = pickle.loads(serialized_session)
|
|
70
|
+
if not isinstance(nx_session, requests.Session):
|
|
71
|
+
raise ValueError("Retrieved object is not a valid requests.Session instance.")
|
|
72
|
+
except Exception as e:
|
|
73
|
+
info(f"Session retrieval failed: {e}")
|
|
74
|
+
nx_session = create_and_persist_session(shared_state)
|
|
75
|
+
|
|
76
|
+
return nx_session
|