quasarr 2.4.8__py3-none-any.whl → 2.4.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasarr might be problematic. Click here for more details.
- quasarr/__init__.py +134 -70
- quasarr/api/__init__.py +40 -31
- quasarr/api/arr/__init__.py +116 -108
- quasarr/api/captcha/__init__.py +262 -137
- quasarr/api/config/__init__.py +76 -46
- quasarr/api/packages/__init__.py +138 -102
- quasarr/api/sponsors_helper/__init__.py +29 -16
- quasarr/api/statistics/__init__.py +19 -19
- quasarr/downloads/__init__.py +165 -72
- quasarr/downloads/linkcrypters/al.py +35 -18
- quasarr/downloads/linkcrypters/filecrypt.py +107 -52
- quasarr/downloads/linkcrypters/hide.py +5 -6
- quasarr/downloads/packages/__init__.py +342 -177
- quasarr/downloads/sources/al.py +191 -100
- quasarr/downloads/sources/by.py +31 -13
- quasarr/downloads/sources/dd.py +27 -14
- quasarr/downloads/sources/dj.py +1 -3
- quasarr/downloads/sources/dl.py +126 -71
- quasarr/downloads/sources/dt.py +11 -5
- quasarr/downloads/sources/dw.py +28 -14
- quasarr/downloads/sources/he.py +32 -24
- quasarr/downloads/sources/mb.py +19 -9
- quasarr/downloads/sources/nk.py +14 -10
- quasarr/downloads/sources/nx.py +8 -18
- quasarr/downloads/sources/sf.py +45 -20
- quasarr/downloads/sources/sj.py +1 -3
- quasarr/downloads/sources/sl.py +9 -5
- quasarr/downloads/sources/wd.py +32 -12
- quasarr/downloads/sources/wx.py +35 -21
- quasarr/providers/auth.py +42 -37
- quasarr/providers/cloudflare.py +28 -30
- quasarr/providers/hostname_issues.py +2 -1
- quasarr/providers/html_images.py +2 -2
- quasarr/providers/html_templates.py +22 -14
- quasarr/providers/imdb_metadata.py +149 -80
- quasarr/providers/jd_cache.py +131 -39
- quasarr/providers/log.py +1 -1
- quasarr/providers/myjd_api.py +260 -196
- quasarr/providers/notifications.py +53 -41
- quasarr/providers/obfuscated.py +9 -4
- quasarr/providers/sessions/al.py +71 -55
- quasarr/providers/sessions/dd.py +21 -14
- quasarr/providers/sessions/dl.py +30 -19
- quasarr/providers/sessions/nx.py +23 -14
- quasarr/providers/shared_state.py +292 -141
- quasarr/providers/statistics.py +75 -43
- quasarr/providers/utils.py +33 -27
- quasarr/providers/version.py +45 -14
- quasarr/providers/web_server.py +10 -5
- quasarr/search/__init__.py +30 -18
- quasarr/search/sources/al.py +124 -73
- quasarr/search/sources/by.py +110 -59
- quasarr/search/sources/dd.py +57 -35
- quasarr/search/sources/dj.py +69 -48
- quasarr/search/sources/dl.py +159 -100
- quasarr/search/sources/dt.py +110 -74
- quasarr/search/sources/dw.py +121 -61
- quasarr/search/sources/fx.py +108 -62
- quasarr/search/sources/he.py +78 -49
- quasarr/search/sources/mb.py +96 -48
- quasarr/search/sources/nk.py +80 -50
- quasarr/search/sources/nx.py +91 -62
- quasarr/search/sources/sf.py +171 -106
- quasarr/search/sources/sj.py +69 -48
- quasarr/search/sources/sl.py +115 -71
- quasarr/search/sources/wd.py +67 -44
- quasarr/search/sources/wx.py +188 -123
- quasarr/storage/config.py +65 -52
- quasarr/storage/setup.py +238 -140
- quasarr/storage/sqlite_database.py +10 -4
- {quasarr-2.4.8.dist-info → quasarr-2.4.10.dist-info}/METADATA +4 -3
- quasarr-2.4.10.dist-info/RECORD +81 -0
- quasarr-2.4.8.dist-info/RECORD +0 -81
- {quasarr-2.4.8.dist-info → quasarr-2.4.10.dist-info}/WHEEL +0 -0
- {quasarr-2.4.8.dist-info → quasarr-2.4.10.dist-info}/entry_points.txt +0 -0
- {quasarr-2.4.8.dist-info → quasarr-2.4.10.dist-info}/licenses/LICENSE +0 -0
quasarr/search/sources/nx.py
CHANGED
|
@@ -8,9 +8,9 @@ from base64 import urlsafe_b64encode
|
|
|
8
8
|
|
|
9
9
|
import requests
|
|
10
10
|
|
|
11
|
-
from quasarr.providers.hostname_issues import
|
|
11
|
+
from quasarr.providers.hostname_issues import clear_hostname_issue, mark_hostname_issue
|
|
12
12
|
from quasarr.providers.imdb_metadata import get_localized_title
|
|
13
|
-
from quasarr.providers.log import
|
|
13
|
+
from quasarr.providers.log import debug, info
|
|
14
14
|
|
|
15
15
|
hostname = "nx"
|
|
16
16
|
supported_mirrors = ["filer"]
|
|
@@ -29,13 +29,17 @@ def nx_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
29
29
|
category = "episode"
|
|
30
30
|
|
|
31
31
|
if mirror and mirror not in supported_mirrors:
|
|
32
|
-
debug(
|
|
33
|
-
|
|
32
|
+
debug(
|
|
33
|
+
f'Mirror "{mirror}" not supported by "{hostname.upper()}". Supported mirrors: {supported_mirrors}.'
|
|
34
|
+
" Skipping search!"
|
|
35
|
+
)
|
|
34
36
|
return releases
|
|
35
37
|
|
|
36
|
-
url =
|
|
38
|
+
url = (
|
|
39
|
+
f"https://{nx}/api/frontend/releases/category/{category}/tag/all/1/51?sort=date"
|
|
40
|
+
)
|
|
37
41
|
headers = {
|
|
38
|
-
|
|
42
|
+
"User-Agent": shared_state.values["user_agent"],
|
|
39
43
|
}
|
|
40
44
|
|
|
41
45
|
try:
|
|
@@ -44,26 +48,30 @@ def nx_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
44
48
|
feed = r.json()
|
|
45
49
|
except Exception as e:
|
|
46
50
|
info(f"Error loading {hostname.upper()} feed: {e}")
|
|
47
|
-
mark_hostname_issue(
|
|
51
|
+
mark_hostname_issue(
|
|
52
|
+
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
53
|
+
)
|
|
48
54
|
return releases
|
|
49
55
|
|
|
50
|
-
items = feed[
|
|
56
|
+
items = feed["result"]["list"]
|
|
51
57
|
for item in items:
|
|
52
58
|
try:
|
|
53
|
-
title = item[
|
|
59
|
+
title = item["name"]
|
|
54
60
|
|
|
55
61
|
if title:
|
|
56
62
|
try:
|
|
57
|
-
if
|
|
63
|
+
if "lazylibrarian" in request_from.lower():
|
|
58
64
|
# lazylibrarian can only detect specific date formats / issue numbering for magazines
|
|
59
65
|
title = shared_state.normalize_magazine_title(title)
|
|
60
66
|
|
|
61
67
|
source = f"https://{nx}/release/{item['slug']}"
|
|
62
|
-
imdb_id = item.get(
|
|
68
|
+
imdb_id = item.get("_media", {}).get("imdbid", None)
|
|
63
69
|
mb = shared_state.convert_to_mb(item)
|
|
64
70
|
payload = urlsafe_b64encode(
|
|
65
|
-
f"{title}|{source}|{mirror}|{mb}|{password}|{imdb_id}|{hostname}".encode(
|
|
66
|
-
|
|
71
|
+
f"{title}|{source}|{mirror}|{mb}|{password}|{imdb_id}|{hostname}".encode(
|
|
72
|
+
"utf-8"
|
|
73
|
+
)
|
|
74
|
+
).decode("utf-8")
|
|
67
75
|
link = f"{shared_state.values['internal_address']}/download/?payload={payload}"
|
|
68
76
|
except:
|
|
69
77
|
continue
|
|
@@ -74,27 +82,31 @@ def nx_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
74
82
|
continue
|
|
75
83
|
|
|
76
84
|
try:
|
|
77
|
-
published = item[
|
|
85
|
+
published = item["publishat"]
|
|
78
86
|
except:
|
|
79
87
|
continue
|
|
80
88
|
|
|
81
|
-
releases.append(
|
|
82
|
-
|
|
83
|
-
"
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
89
|
+
releases.append(
|
|
90
|
+
{
|
|
91
|
+
"details": {
|
|
92
|
+
"title": title,
|
|
93
|
+
"hostname": hostname.lower(),
|
|
94
|
+
"imdb_id": imdb_id,
|
|
95
|
+
"link": link,
|
|
96
|
+
"mirror": mirror,
|
|
97
|
+
"size": size,
|
|
98
|
+
"date": published,
|
|
99
|
+
"source": source,
|
|
100
|
+
},
|
|
101
|
+
"type": "protected",
|
|
102
|
+
}
|
|
103
|
+
)
|
|
94
104
|
|
|
95
105
|
except Exception as e:
|
|
96
106
|
info(f"Error parsing {hostname.upper()} feed: {e}")
|
|
97
|
-
mark_hostname_issue(
|
|
107
|
+
mark_hostname_issue(
|
|
108
|
+
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
109
|
+
)
|
|
98
110
|
|
|
99
111
|
elapsed_time = time.time() - start_time
|
|
100
112
|
debug(f"Time taken: {elapsed_time:.2f}s ({hostname})")
|
|
@@ -104,7 +116,15 @@ def nx_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
104
116
|
return releases
|
|
105
117
|
|
|
106
118
|
|
|
107
|
-
def nx_search(
|
|
119
|
+
def nx_search(
|
|
120
|
+
shared_state,
|
|
121
|
+
start_time,
|
|
122
|
+
request_from,
|
|
123
|
+
search_string,
|
|
124
|
+
mirror=None,
|
|
125
|
+
season=None,
|
|
126
|
+
episode=None,
|
|
127
|
+
):
|
|
108
128
|
releases = []
|
|
109
129
|
nx = shared_state.values["config"]("Hostnames").get(hostname.lower())
|
|
110
130
|
password = nx
|
|
@@ -117,21 +137,23 @@ def nx_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
117
137
|
valid_type = "episode"
|
|
118
138
|
|
|
119
139
|
if mirror and mirror not in supported_mirrors:
|
|
120
|
-
debug(
|
|
121
|
-
|
|
140
|
+
debug(
|
|
141
|
+
f'Mirror "{mirror}" not supported by "{hostname.upper()}". Supported mirrors: {supported_mirrors}.'
|
|
142
|
+
" Skipping search!"
|
|
143
|
+
)
|
|
122
144
|
return releases
|
|
123
145
|
|
|
124
146
|
imdb_id = shared_state.is_imdb_id(search_string)
|
|
125
147
|
if imdb_id:
|
|
126
|
-
search_string = get_localized_title(shared_state, imdb_id,
|
|
148
|
+
search_string = get_localized_title(shared_state, imdb_id, "de")
|
|
127
149
|
if not search_string:
|
|
128
150
|
info(f"Could not extract title from IMDb-ID {imdb_id}")
|
|
129
151
|
return releases
|
|
130
152
|
search_string = html.unescape(search_string)
|
|
131
153
|
|
|
132
|
-
url = f
|
|
154
|
+
url = f"https://{nx}/api/frontend/search/{search_string}"
|
|
133
155
|
headers = {
|
|
134
|
-
|
|
156
|
+
"User-Agent": shared_state.values["user_agent"],
|
|
135
157
|
}
|
|
136
158
|
|
|
137
159
|
try:
|
|
@@ -140,34 +162,37 @@ def nx_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
140
162
|
feed = r.json()
|
|
141
163
|
except Exception as e:
|
|
142
164
|
info(f"Error loading {hostname.upper()} search: {e}")
|
|
143
|
-
mark_hostname_issue(
|
|
165
|
+
mark_hostname_issue(
|
|
166
|
+
hostname, "search", str(e) if "e" in dir() else "Error occurred"
|
|
167
|
+
)
|
|
144
168
|
return releases
|
|
145
169
|
|
|
146
|
-
items = feed[
|
|
170
|
+
items = feed["result"]["releases"]
|
|
147
171
|
for item in items:
|
|
148
172
|
try:
|
|
149
|
-
if item[
|
|
150
|
-
title = item[
|
|
173
|
+
if item["type"] == valid_type:
|
|
174
|
+
title = item["name"]
|
|
151
175
|
if title:
|
|
152
|
-
if not shared_state.is_valid_release(
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
season,
|
|
156
|
-
episode):
|
|
176
|
+
if not shared_state.is_valid_release(
|
|
177
|
+
title, request_from, search_string, season, episode
|
|
178
|
+
):
|
|
157
179
|
continue
|
|
158
180
|
|
|
159
|
-
if
|
|
181
|
+
if "lazylibrarian" in request_from.lower():
|
|
160
182
|
# lazylibrarian can only detect specific date formats / issue numbering for magazines
|
|
161
183
|
title = shared_state.normalize_magazine_title(title)
|
|
162
184
|
|
|
163
185
|
try:
|
|
164
186
|
source = f"https://{nx}/release/{item['slug']}"
|
|
165
187
|
if not imdb_id:
|
|
166
|
-
imdb_id = item.get(
|
|
188
|
+
imdb_id = item.get("_media", {}).get("imdbid", None)
|
|
167
189
|
|
|
168
190
|
mb = shared_state.convert_to_mb(item)
|
|
169
|
-
payload = urlsafe_b64encode(
|
|
170
|
-
|
|
191
|
+
payload = urlsafe_b64encode(
|
|
192
|
+
f"{title}|{source}|{mirror}|{mb}|{password}|{imdb_id}".encode(
|
|
193
|
+
"utf-8"
|
|
194
|
+
)
|
|
195
|
+
).decode("utf-8")
|
|
171
196
|
link = f"{shared_state.values['internal_address']}/download/?payload={payload}"
|
|
172
197
|
except:
|
|
173
198
|
continue
|
|
@@ -178,27 +203,31 @@ def nx_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
178
203
|
continue
|
|
179
204
|
|
|
180
205
|
try:
|
|
181
|
-
published = item[
|
|
206
|
+
published = item["publishat"]
|
|
182
207
|
except:
|
|
183
208
|
published = ""
|
|
184
209
|
|
|
185
|
-
releases.append(
|
|
186
|
-
|
|
187
|
-
"
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
210
|
+
releases.append(
|
|
211
|
+
{
|
|
212
|
+
"details": {
|
|
213
|
+
"title": title,
|
|
214
|
+
"hostname": hostname.lower(),
|
|
215
|
+
"imdb_id": imdb_id,
|
|
216
|
+
"link": link,
|
|
217
|
+
"mirror": mirror,
|
|
218
|
+
"size": size,
|
|
219
|
+
"date": published,
|
|
220
|
+
"source": source,
|
|
221
|
+
},
|
|
222
|
+
"type": "protected",
|
|
223
|
+
}
|
|
224
|
+
)
|
|
198
225
|
|
|
199
226
|
except Exception as e:
|
|
200
227
|
info(f"Error parsing {hostname.upper()} search: {e}")
|
|
201
|
-
mark_hostname_issue(
|
|
228
|
+
mark_hostname_issue(
|
|
229
|
+
hostname, "search", str(e) if "e" in dir() else "Error occurred"
|
|
230
|
+
)
|
|
202
231
|
|
|
203
232
|
elapsed_time = time.time() - start_time
|
|
204
233
|
debug(f"Time taken: {elapsed_time:.2f}s ({hostname})")
|