quasarr 2.1.5__py3-none-any.whl → 2.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasarr might be problematic. Click here for more details.
- quasarr/__init__.py +38 -29
- quasarr/api/__init__.py +94 -23
- quasarr/api/captcha/__init__.py +0 -12
- quasarr/api/config/__init__.py +22 -11
- quasarr/api/packages/__init__.py +26 -34
- quasarr/api/statistics/__init__.py +15 -15
- quasarr/downloads/__init__.py +9 -1
- quasarr/downloads/packages/__init__.py +2 -2
- quasarr/downloads/sources/al.py +6 -0
- quasarr/downloads/sources/by.py +29 -20
- quasarr/downloads/sources/dd.py +9 -1
- quasarr/downloads/sources/dl.py +3 -0
- quasarr/downloads/sources/dt.py +16 -7
- quasarr/downloads/sources/dw.py +22 -17
- quasarr/downloads/sources/he.py +11 -6
- quasarr/downloads/sources/mb.py +9 -3
- quasarr/downloads/sources/nk.py +9 -3
- quasarr/downloads/sources/nx.py +21 -17
- quasarr/downloads/sources/sf.py +21 -13
- quasarr/downloads/sources/sl.py +10 -2
- quasarr/downloads/sources/wd.py +18 -9
- quasarr/downloads/sources/wx.py +7 -11
- quasarr/providers/auth.py +1 -1
- quasarr/providers/cloudflare.py +1 -1
- quasarr/providers/hostname_issues.py +63 -0
- quasarr/providers/html_images.py +1 -18
- quasarr/providers/html_templates.py +104 -12
- quasarr/providers/imdb_metadata.py +288 -75
- quasarr/providers/obfuscated.py +11 -11
- quasarr/providers/sessions/al.py +27 -11
- quasarr/providers/sessions/dd.py +12 -4
- quasarr/providers/sessions/dl.py +19 -11
- quasarr/providers/sessions/nx.py +12 -4
- quasarr/providers/version.py +1 -1
- quasarr/search/__init__.py +5 -0
- quasarr/search/sources/al.py +12 -1
- quasarr/search/sources/by.py +15 -4
- quasarr/search/sources/dd.py +22 -3
- quasarr/search/sources/dj.py +12 -1
- quasarr/search/sources/dl.py +12 -6
- quasarr/search/sources/dt.py +17 -4
- quasarr/search/sources/dw.py +15 -4
- quasarr/search/sources/fx.py +19 -6
- quasarr/search/sources/he.py +22 -3
- quasarr/search/sources/mb.py +15 -4
- quasarr/search/sources/nk.py +19 -3
- quasarr/search/sources/nx.py +15 -4
- quasarr/search/sources/sf.py +25 -8
- quasarr/search/sources/sj.py +14 -1
- quasarr/search/sources/sl.py +17 -2
- quasarr/search/sources/wd.py +15 -4
- quasarr/search/sources/wx.py +16 -18
- quasarr/storage/setup.py +150 -35
- {quasarr-2.1.5.dist-info → quasarr-2.3.0.dist-info}/METADATA +6 -3
- quasarr-2.3.0.dist-info/RECORD +82 -0
- {quasarr-2.1.5.dist-info → quasarr-2.3.0.dist-info}/WHEEL +1 -1
- quasarr-2.1.5.dist-info/RECORD +0 -81
- {quasarr-2.1.5.dist-info → quasarr-2.3.0.dist-info}/entry_points.txt +0 -0
- {quasarr-2.1.5.dist-info → quasarr-2.3.0.dist-info}/licenses/LICENSE +0 -0
- {quasarr-2.1.5.dist-info → quasarr-2.3.0.dist-info}/top_level.txt +0 -0
quasarr/search/sources/dt.py
CHANGED
|
@@ -13,6 +13,7 @@ from urllib.parse import quote_plus
|
|
|
13
13
|
import requests
|
|
14
14
|
from bs4 import BeautifulSoup
|
|
15
15
|
|
|
16
|
+
from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
|
|
16
17
|
from quasarr.providers.imdb_metadata import get_localized_title
|
|
17
18
|
from quasarr.providers.log import info, debug
|
|
18
19
|
|
|
@@ -75,8 +76,9 @@ def dt_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
75
76
|
headers = {'User-Agent': shared_state.values["user_agent"]}
|
|
76
77
|
|
|
77
78
|
try:
|
|
78
|
-
|
|
79
|
-
|
|
79
|
+
r = requests.get(url, headers=headers, timeout=30)
|
|
80
|
+
r.raise_for_status()
|
|
81
|
+
feed = BeautifulSoup(r.content, "html.parser")
|
|
80
82
|
|
|
81
83
|
for article in feed.find_all('article'):
|
|
82
84
|
try:
|
|
@@ -117,6 +119,7 @@ def dt_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
117
119
|
|
|
118
120
|
except Exception as e:
|
|
119
121
|
info(f"Error parsing {hostname.upper()} feed: {e}")
|
|
122
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
120
123
|
continue
|
|
121
124
|
|
|
122
125
|
releases.append({
|
|
@@ -135,9 +138,13 @@ def dt_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
135
138
|
|
|
136
139
|
except Exception as e:
|
|
137
140
|
info(f"Error loading {hostname.upper()} feed: {e}")
|
|
141
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
138
142
|
|
|
139
143
|
elapsed = time.time() - start_time
|
|
140
144
|
debug(f"Time taken: {elapsed:.2f}s ({hostname})")
|
|
145
|
+
|
|
146
|
+
if releases:
|
|
147
|
+
clear_hostname_issue(hostname)
|
|
141
148
|
return releases
|
|
142
149
|
|
|
143
150
|
|
|
@@ -188,8 +195,9 @@ def dt_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
188
195
|
)
|
|
189
196
|
headers = {"User-Agent": shared_state.values["user_agent"]}
|
|
190
197
|
|
|
191
|
-
|
|
192
|
-
|
|
198
|
+
r = requests.get(url, headers=headers, timeout=10)
|
|
199
|
+
r.raise_for_status()
|
|
200
|
+
page = BeautifulSoup(r.content, "html.parser")
|
|
193
201
|
|
|
194
202
|
for article in page.find_all("article"):
|
|
195
203
|
try:
|
|
@@ -241,6 +249,7 @@ def dt_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
241
249
|
|
|
242
250
|
except Exception as e:
|
|
243
251
|
info(f"Error parsing {hostname.upper()} search item: {e}")
|
|
252
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
244
253
|
continue
|
|
245
254
|
|
|
246
255
|
releases.append({
|
|
@@ -259,7 +268,11 @@ def dt_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
259
268
|
|
|
260
269
|
except Exception as e:
|
|
261
270
|
info(f"Error loading {hostname.upper()} search page: {e}")
|
|
271
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
262
272
|
|
|
263
273
|
elapsed = time.time() - start_time
|
|
264
274
|
debug(f"Search time: {elapsed:.2f}s ({hostname})")
|
|
275
|
+
|
|
276
|
+
if releases:
|
|
277
|
+
clear_hostname_issue(hostname)
|
|
265
278
|
return releases
|
quasarr/search/sources/dw.py
CHANGED
|
@@ -10,6 +10,7 @@ from base64 import urlsafe_b64encode
|
|
|
10
10
|
import requests
|
|
11
11
|
from bs4 import BeautifulSoup
|
|
12
12
|
|
|
13
|
+
from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
|
|
13
14
|
from quasarr.providers.log import info, debug
|
|
14
15
|
|
|
15
16
|
hostname = "dw"
|
|
@@ -77,8 +78,9 @@ def dw_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
77
78
|
}
|
|
78
79
|
|
|
79
80
|
try:
|
|
80
|
-
|
|
81
|
-
|
|
81
|
+
r = requests.get(url, headers=headers, timeout=30)
|
|
82
|
+
r.raise_for_status()
|
|
83
|
+
feed = BeautifulSoup(r.content, "html.parser")
|
|
82
84
|
articles = feed.find_all('h4')
|
|
83
85
|
|
|
84
86
|
for article in articles:
|
|
@@ -102,6 +104,7 @@ def dw_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
102
104
|
link = f"{shared_state.values['internal_address']}/download/?payload={payload}"
|
|
103
105
|
except Exception as e:
|
|
104
106
|
info(f"Error parsing {hostname.upper()} feed: {e}")
|
|
107
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
105
108
|
continue
|
|
106
109
|
|
|
107
110
|
releases.append({
|
|
@@ -120,10 +123,13 @@ def dw_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
120
123
|
|
|
121
124
|
except Exception as e:
|
|
122
125
|
info(f"Error loading {hostname.upper()} feed: {e}")
|
|
126
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
123
127
|
|
|
124
128
|
elapsed_time = time.time() - start_time
|
|
125
129
|
debug(f"Time taken: {elapsed_time:.2f}s ({hostname})")
|
|
126
130
|
|
|
131
|
+
if releases:
|
|
132
|
+
clear_hostname_issue(hostname)
|
|
127
133
|
return releases
|
|
128
134
|
|
|
129
135
|
|
|
@@ -151,12 +157,14 @@ def dw_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
151
157
|
}
|
|
152
158
|
|
|
153
159
|
try:
|
|
154
|
-
|
|
155
|
-
|
|
160
|
+
r = requests.get(url, headers=headers, timeout=10)
|
|
161
|
+
r.raise_for_status()
|
|
162
|
+
search = BeautifulSoup(r.content, "html.parser")
|
|
156
163
|
results = search.find_all('h4')
|
|
157
164
|
|
|
158
165
|
except Exception as e:
|
|
159
166
|
info(f"Error loading {hostname.upper()} search feed: {e}")
|
|
167
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
160
168
|
return releases
|
|
161
169
|
|
|
162
170
|
imdb_id = shared_state.is_imdb_id(search_string)
|
|
@@ -191,6 +199,7 @@ def dw_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
191
199
|
link = f"{shared_state.values['internal_address']}/download/?payload={payload}"
|
|
192
200
|
except Exception as e:
|
|
193
201
|
info(f"Error parsing {hostname.upper()} search: {e}")
|
|
202
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
194
203
|
continue
|
|
195
204
|
|
|
196
205
|
releases.append({
|
|
@@ -210,4 +219,6 @@ def dw_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
210
219
|
elapsed_time = time.time() - start_time
|
|
211
220
|
debug(f"Time taken: {elapsed_time:.2f}s ({hostname})")
|
|
212
221
|
|
|
222
|
+
if releases:
|
|
223
|
+
clear_hostname_issue(hostname)
|
|
213
224
|
return releases
|
quasarr/search/sources/fx.py
CHANGED
|
@@ -9,6 +9,7 @@ from base64 import urlsafe_b64encode
|
|
|
9
9
|
import requests
|
|
10
10
|
from bs4 import BeautifulSoup
|
|
11
11
|
|
|
12
|
+
from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
|
|
12
13
|
from quasarr.providers.log import info, debug
|
|
13
14
|
|
|
14
15
|
hostname = "fx"
|
|
@@ -46,11 +47,13 @@ def fx_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
46
47
|
}
|
|
47
48
|
|
|
48
49
|
try:
|
|
49
|
-
|
|
50
|
-
|
|
50
|
+
r = requests.get(url, headers=headers, timeout=30)
|
|
51
|
+
r.raise_for_status()
|
|
52
|
+
feed = BeautifulSoup(r.content, "html.parser")
|
|
51
53
|
items = feed.find_all("article")
|
|
52
54
|
except Exception as e:
|
|
53
55
|
info(f"Error loading {hostname.upper()} feed: {e}")
|
|
56
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
54
57
|
return releases
|
|
55
58
|
|
|
56
59
|
if items:
|
|
@@ -109,10 +112,13 @@ def fx_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
109
112
|
|
|
110
113
|
except Exception as e:
|
|
111
114
|
info(f"Error parsing {hostname.upper()} feed: {e}")
|
|
115
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
112
116
|
|
|
113
117
|
elapsed_time = time.time() - start_time
|
|
114
118
|
debug(f"Time taken: {elapsed_time:.2f}s ({hostname})")
|
|
115
119
|
|
|
120
|
+
if releases:
|
|
121
|
+
clear_hostname_issue(hostname)
|
|
116
122
|
return releases
|
|
117
123
|
|
|
118
124
|
|
|
@@ -136,23 +142,27 @@ def fx_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
136
142
|
}
|
|
137
143
|
|
|
138
144
|
try:
|
|
139
|
-
|
|
140
|
-
|
|
145
|
+
r = requests.get(url, headers=headers, timeout=10)
|
|
146
|
+
r.raise_for_status()
|
|
147
|
+
search = BeautifulSoup(r.content, "html.parser")
|
|
141
148
|
results = search.find('h2', class_='entry-title')
|
|
142
149
|
|
|
143
150
|
except Exception as e:
|
|
144
151
|
info(f"Error loading {hostname.upper()} feed: {e}")
|
|
152
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
145
153
|
return releases
|
|
146
154
|
|
|
147
155
|
if results:
|
|
148
156
|
for result in results:
|
|
149
157
|
try:
|
|
150
158
|
result_source = result["href"]
|
|
151
|
-
|
|
152
|
-
|
|
159
|
+
result_r = requests.get(result_source, headers=headers, timeout=10)
|
|
160
|
+
result_r.raise_for_status()
|
|
161
|
+
feed = BeautifulSoup(result_r.content, "html.parser")
|
|
153
162
|
items = feed.find_all("article")
|
|
154
163
|
except Exception as e:
|
|
155
164
|
info(f"Error loading {hostname.upper()} feed: {e}")
|
|
165
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
156
166
|
return releases
|
|
157
167
|
|
|
158
168
|
for item in items:
|
|
@@ -216,8 +226,11 @@ def fx_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
216
226
|
|
|
217
227
|
except Exception as e:
|
|
218
228
|
info(f"Error parsing {hostname.upper()} search: {e}")
|
|
229
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
219
230
|
|
|
220
231
|
elapsed_time = time.time() - start_time
|
|
221
232
|
debug(f"Time taken: {elapsed_time:.2f}s ({hostname})")
|
|
222
233
|
|
|
234
|
+
if releases:
|
|
235
|
+
clear_hostname_issue(hostname)
|
|
223
236
|
return releases
|
quasarr/search/sources/he.py
CHANGED
|
@@ -11,7 +11,8 @@ from html import unescape
|
|
|
11
11
|
import requests
|
|
12
12
|
from bs4 import BeautifulSoup
|
|
13
13
|
|
|
14
|
-
from quasarr.providers.
|
|
14
|
+
from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
|
|
15
|
+
from quasarr.providers.imdb_metadata import get_localized_title, get_year
|
|
15
16
|
from quasarr.providers.log import info, debug
|
|
16
17
|
|
|
17
18
|
hostname = "he"
|
|
@@ -83,6 +84,9 @@ def he_search(shared_state, start_time, request_from, search_string="", mirror=N
|
|
|
83
84
|
if not local_title:
|
|
84
85
|
info(f"{hostname}: no title for IMDb {imdb_id}")
|
|
85
86
|
return releases
|
|
87
|
+
year = get_year(imdb_id)
|
|
88
|
+
if year:
|
|
89
|
+
local_title += f" {year}"
|
|
86
90
|
source_search = local_title
|
|
87
91
|
else:
|
|
88
92
|
return releases
|
|
@@ -90,6 +94,13 @@ def he_search(shared_state, start_time, request_from, search_string="", mirror=N
|
|
|
90
94
|
else:
|
|
91
95
|
imdb_id = None
|
|
92
96
|
|
|
97
|
+
if not source_search:
|
|
98
|
+
search_type = "feed"
|
|
99
|
+
timeout=30
|
|
100
|
+
else:
|
|
101
|
+
search_type = "search"
|
|
102
|
+
timeout = 10
|
|
103
|
+
|
|
93
104
|
if season:
|
|
94
105
|
source_search += f" S{int(season):02d}"
|
|
95
106
|
|
|
@@ -102,11 +113,13 @@ def he_search(shared_state, start_time, request_from, search_string="", mirror=N
|
|
|
102
113
|
params = {"s": source_search}
|
|
103
114
|
|
|
104
115
|
try:
|
|
105
|
-
r = requests.get(url, headers=headers, params=params, timeout=
|
|
116
|
+
r = requests.get(url, headers=headers, params=params, timeout=timeout)
|
|
117
|
+
r.raise_for_status()
|
|
106
118
|
soup = BeautifulSoup(r.content, 'html.parser')
|
|
107
119
|
results = soup.find_all('div', class_='item')
|
|
108
120
|
except Exception as e:
|
|
109
|
-
info(f"{hostname}:
|
|
121
|
+
info(f"{hostname}: {search_type} load error: {e}")
|
|
122
|
+
mark_hostname_issue(hostname, search_type, str(e) if "e" in dir() else "Error occurred")
|
|
110
123
|
return releases
|
|
111
124
|
|
|
112
125
|
if not results:
|
|
@@ -163,6 +176,9 @@ def he_search(shared_state, start_time, request_from, search_string="", mirror=N
|
|
|
163
176
|
try:
|
|
164
177
|
r = requests.get(source, headers=headers, timeout=10)
|
|
165
178
|
soup = BeautifulSoup(r.content, 'html.parser')
|
|
179
|
+
except Exception as e:
|
|
180
|
+
mark_hostname_issue(hostname, search_type, str(e) if "e" in dir() else "Error occurred")
|
|
181
|
+
try:
|
|
166
182
|
imdb_link = soup.find('a', href=re.compile(r"imdb\.com/title/tt\d+", re.IGNORECASE))
|
|
167
183
|
if imdb_link:
|
|
168
184
|
release_imdb_id = re.search(r'tt\d+', imdb_link['href']).group()
|
|
@@ -199,4 +215,7 @@ def he_search(shared_state, start_time, request_from, search_string="", mirror=N
|
|
|
199
215
|
|
|
200
216
|
elapsed = time.time() - start_time
|
|
201
217
|
debug(f"Time taken: {elapsed:.2f}s ({hostname})")
|
|
218
|
+
|
|
219
|
+
if releases:
|
|
220
|
+
clear_hostname_issue(hostname)
|
|
202
221
|
return releases
|
quasarr/search/sources/mb.py
CHANGED
|
@@ -12,6 +12,7 @@ from urllib.parse import quote_plus
|
|
|
12
12
|
import requests
|
|
13
13
|
from bs4 import BeautifulSoup
|
|
14
14
|
|
|
15
|
+
from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
|
|
15
16
|
from quasarr.providers.imdb_metadata import get_localized_title
|
|
16
17
|
from quasarr.providers.log import info, debug
|
|
17
18
|
|
|
@@ -151,13 +152,18 @@ def mb_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
151
152
|
url = f"https://{mb}/category/{section}/"
|
|
152
153
|
headers = {'User-Agent': shared_state.values["user_agent"]}
|
|
153
154
|
try:
|
|
154
|
-
|
|
155
|
-
|
|
155
|
+
r = requests.get(url, headers=headers, timeout=30)
|
|
156
|
+
r.raise_for_status()
|
|
157
|
+
soup = BeautifulSoup(r.content, "html.parser")
|
|
156
158
|
releases = _parse_posts(soup, shared_state, password, mirror_filter=mirror)
|
|
157
159
|
except Exception as e:
|
|
158
160
|
info(f"Error loading {hostname.upper()} feed: {e}")
|
|
161
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
159
162
|
releases = []
|
|
160
163
|
debug(f"Time taken: {time.time() - start_time:.2f}s ({hostname})")
|
|
164
|
+
|
|
165
|
+
if releases:
|
|
166
|
+
clear_hostname_issue(hostname)
|
|
161
167
|
return releases
|
|
162
168
|
|
|
163
169
|
|
|
@@ -181,8 +187,9 @@ def mb_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
181
187
|
url = f"https://{mb}/?s={q}&id=20&post_type=post"
|
|
182
188
|
headers = {'User-Agent': shared_state.values["user_agent"]}
|
|
183
189
|
try:
|
|
184
|
-
|
|
185
|
-
|
|
190
|
+
r = requests.get(url, headers=headers, timeout=10)
|
|
191
|
+
r.raise_for_status()
|
|
192
|
+
soup = BeautifulSoup(r.content, "html.parser")
|
|
186
193
|
releases = _parse_posts(
|
|
187
194
|
soup, shared_state, password, mirror_filter=mirror,
|
|
188
195
|
is_search=True, request_from=request_from,
|
|
@@ -190,6 +197,10 @@ def mb_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
190
197
|
)
|
|
191
198
|
except Exception as e:
|
|
192
199
|
info(f"Error loading {hostname.upper()} search: {e}")
|
|
200
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
193
201
|
releases = []
|
|
194
202
|
debug(f"Time taken: {time.time() - start_time:.2f}s ({hostname})")
|
|
203
|
+
|
|
204
|
+
if releases:
|
|
205
|
+
clear_hostname_issue(hostname)
|
|
195
206
|
return releases
|
quasarr/search/sources/nk.py
CHANGED
|
@@ -12,7 +12,8 @@ from urllib.parse import urljoin
|
|
|
12
12
|
import requests
|
|
13
13
|
from bs4 import BeautifulSoup
|
|
14
14
|
|
|
15
|
-
from quasarr.providers.
|
|
15
|
+
from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
|
|
16
|
+
from quasarr.providers.imdb_metadata import get_localized_title, get_year
|
|
16
17
|
from quasarr.providers.log import info, debug
|
|
17
18
|
|
|
18
19
|
hostname = "nk"
|
|
@@ -74,6 +75,9 @@ def nk_search(shared_state, start_time, request_from, search_string="", mirror=N
|
|
|
74
75
|
if not local_title:
|
|
75
76
|
info(f"{hostname}: no title for IMDb {imdb_id}")
|
|
76
77
|
return releases
|
|
78
|
+
year = get_year(imdb_id)
|
|
79
|
+
if year:
|
|
80
|
+
local_title += f" {year}"
|
|
77
81
|
source_search = local_title
|
|
78
82
|
else:
|
|
79
83
|
return releases
|
|
@@ -81,6 +85,13 @@ def nk_search(shared_state, start_time, request_from, search_string="", mirror=N
|
|
|
81
85
|
else:
|
|
82
86
|
imdb_id = None
|
|
83
87
|
|
|
88
|
+
if not source_search:
|
|
89
|
+
search_type = "feed"
|
|
90
|
+
timeout = 30
|
|
91
|
+
else:
|
|
92
|
+
search_type = "search"
|
|
93
|
+
timeout = 10
|
|
94
|
+
|
|
84
95
|
if season:
|
|
85
96
|
source_search += f" S{int(season):02d}"
|
|
86
97
|
|
|
@@ -92,11 +103,13 @@ def nk_search(shared_state, start_time, request_from, search_string="", mirror=N
|
|
|
92
103
|
data = {"search": source_search}
|
|
93
104
|
|
|
94
105
|
try:
|
|
95
|
-
r = requests.post(url, headers=headers, data=data, timeout=
|
|
106
|
+
r = requests.post(url, headers=headers, data=data, timeout=timeout)
|
|
107
|
+
r.raise_for_status()
|
|
96
108
|
soup = BeautifulSoup(r.content, 'html.parser')
|
|
97
109
|
results = soup.find_all('div', class_='article-right')
|
|
98
110
|
except Exception as e:
|
|
99
|
-
info(f"{hostname}:
|
|
111
|
+
info(f"{hostname}: {search_type} load error: {e}")
|
|
112
|
+
mark_hostname_issue(hostname, search_type, str(e) if "e" in dir() else "Error occurred")
|
|
100
113
|
return releases
|
|
101
114
|
|
|
102
115
|
if not results:
|
|
@@ -191,4 +204,7 @@ def nk_search(shared_state, start_time, request_from, search_string="", mirror=N
|
|
|
191
204
|
|
|
192
205
|
elapsed = time.time() - start_time
|
|
193
206
|
debug(f"Time taken: {elapsed:.2f}s ({hostname})")
|
|
207
|
+
|
|
208
|
+
if releases:
|
|
209
|
+
clear_hostname_issue(hostname)
|
|
194
210
|
return releases
|
quasarr/search/sources/nx.py
CHANGED
|
@@ -8,6 +8,7 @@ from base64 import urlsafe_b64encode
|
|
|
8
8
|
|
|
9
9
|
import requests
|
|
10
10
|
|
|
11
|
+
from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
|
|
11
12
|
from quasarr.providers.imdb_metadata import get_localized_title
|
|
12
13
|
from quasarr.providers.log import info, debug
|
|
13
14
|
|
|
@@ -38,10 +39,12 @@ def nx_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
38
39
|
}
|
|
39
40
|
|
|
40
41
|
try:
|
|
41
|
-
|
|
42
|
-
|
|
42
|
+
r = requests.get(url, headers, timeout=30)
|
|
43
|
+
r.raise_for_status()
|
|
44
|
+
feed = r.json()
|
|
43
45
|
except Exception as e:
|
|
44
46
|
info(f"Error loading {hostname.upper()} feed: {e}")
|
|
47
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
45
48
|
return releases
|
|
46
49
|
|
|
47
50
|
items = feed['result']['list']
|
|
@@ -91,10 +94,13 @@ def nx_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
91
94
|
|
|
92
95
|
except Exception as e:
|
|
93
96
|
info(f"Error parsing {hostname.upper()} feed: {e}")
|
|
97
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
94
98
|
|
|
95
99
|
elapsed_time = time.time() - start_time
|
|
96
100
|
debug(f"Time taken: {elapsed_time:.2f}s ({hostname})")
|
|
97
101
|
|
|
102
|
+
if releases:
|
|
103
|
+
clear_hostname_issue(hostname)
|
|
98
104
|
return releases
|
|
99
105
|
|
|
100
106
|
|
|
@@ -129,10 +135,12 @@ def nx_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
129
135
|
}
|
|
130
136
|
|
|
131
137
|
try:
|
|
132
|
-
|
|
133
|
-
|
|
138
|
+
r = requests.get(url, headers, timeout=10)
|
|
139
|
+
r.raise_for_status()
|
|
140
|
+
feed = r.json()
|
|
134
141
|
except Exception as e:
|
|
135
142
|
info(f"Error loading {hostname.upper()} search: {e}")
|
|
143
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
136
144
|
return releases
|
|
137
145
|
|
|
138
146
|
items = feed['result']['releases']
|
|
@@ -190,8 +198,11 @@ def nx_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
190
198
|
|
|
191
199
|
except Exception as e:
|
|
192
200
|
info(f"Error parsing {hostname.upper()} search: {e}")
|
|
201
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
193
202
|
|
|
194
203
|
elapsed_time = time.time() - start_time
|
|
195
204
|
debug(f"Time taken: {elapsed_time:.2f}s ({hostname})")
|
|
196
205
|
|
|
206
|
+
if releases:
|
|
207
|
+
clear_hostname_issue(hostname)
|
|
197
208
|
return releases
|
quasarr/search/sources/sf.py
CHANGED
|
@@ -10,6 +10,7 @@ from datetime import datetime, timedelta
|
|
|
10
10
|
|
|
11
11
|
import requests
|
|
12
12
|
|
|
13
|
+
from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
|
|
13
14
|
from quasarr.providers.imdb_metadata import get_localized_title
|
|
14
15
|
from quasarr.providers.log import info, debug
|
|
15
16
|
|
|
@@ -91,6 +92,7 @@ def parse_mirrors(base_url, entry):
|
|
|
91
92
|
}
|
|
92
93
|
except Exception as e:
|
|
93
94
|
info(f"Error parsing mirrors: {e}")
|
|
95
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
94
96
|
|
|
95
97
|
return mirrors
|
|
96
98
|
|
|
@@ -122,12 +124,14 @@ def sf_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
122
124
|
date -= timedelta(days=1)
|
|
123
125
|
|
|
124
126
|
try:
|
|
125
|
-
|
|
127
|
+
r = requests.get(f"https://{sf}/updates/{formatted_date}#list", headers, timeout=30)
|
|
128
|
+
r.raise_for_status()
|
|
126
129
|
except Exception as e:
|
|
127
130
|
info(f"Error loading {hostname.upper()} feed: {e} for {formatted_date}")
|
|
131
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
128
132
|
return releases
|
|
129
133
|
|
|
130
|
-
content = BeautifulSoup(
|
|
134
|
+
content = BeautifulSoup(r.text, "html.parser")
|
|
131
135
|
items = content.find_all("div", {"class": "row"}, style=re.compile("order"))
|
|
132
136
|
|
|
133
137
|
for item in items:
|
|
@@ -175,10 +179,13 @@ def sf_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
175
179
|
|
|
176
180
|
except Exception as e:
|
|
177
181
|
info(f"Error parsing {hostname.upper()} feed: {e}")
|
|
182
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
178
183
|
|
|
179
184
|
elapsed_time = time.time() - start_time
|
|
180
185
|
debug(f"Time taken: {elapsed_time:.2f}s ({hostname})")
|
|
181
186
|
|
|
187
|
+
if releases:
|
|
188
|
+
clear_hostname_issue(hostname)
|
|
182
189
|
return releases
|
|
183
190
|
|
|
184
191
|
|
|
@@ -220,10 +227,12 @@ def sf_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
220
227
|
headers = {'User-Agent': shared_state.values["user_agent"]}
|
|
221
228
|
|
|
222
229
|
try:
|
|
223
|
-
|
|
224
|
-
|
|
230
|
+
r = requests.get(url, headers=headers, timeout=10)
|
|
231
|
+
r.raise_for_status()
|
|
232
|
+
feed = r.json()
|
|
225
233
|
except Exception as e:
|
|
226
234
|
info(f"Error loading {hostname.upper()} search: {e}")
|
|
235
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
227
236
|
return releases
|
|
228
237
|
|
|
229
238
|
results = feed.get('result', [])
|
|
@@ -257,12 +266,15 @@ def sf_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
257
266
|
# load series page
|
|
258
267
|
series_url = f"https://{sf}/{series_id}"
|
|
259
268
|
try:
|
|
260
|
-
|
|
269
|
+
r = requests.get(series_url, headers=headers, timeout=10)
|
|
270
|
+
r.raise_for_status()
|
|
271
|
+
series_page = r.text
|
|
261
272
|
imdb_link = BeautifulSoup(series_page, "html.parser").find("a", href=re.compile(r"imdb\.com"))
|
|
262
273
|
imdb_id = re.search(r'tt\d+', str(imdb_link)).group() if imdb_link else None
|
|
263
274
|
season_id = re.findall(r"initSeason\('(.+?)\',", series_page)[0]
|
|
264
|
-
except Exception:
|
|
275
|
+
except Exception as e:
|
|
265
276
|
debug(f"Failed to load or parse series page for {series_id}")
|
|
277
|
+
mark_hostname_issue(hostname, "search", str(e))
|
|
266
278
|
continue
|
|
267
279
|
|
|
268
280
|
# fetch API HTML
|
|
@@ -270,14 +282,16 @@ def sf_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
270
282
|
api_url = f'https://{sf}/api/v1/{season_id}/season/ALL?lang=ALL&_={epoch}'
|
|
271
283
|
debug(f"Requesting SF API URL: {api_url}")
|
|
272
284
|
try:
|
|
273
|
-
|
|
274
|
-
|
|
285
|
+
r = requests.get(api_url, headers=headers, timeout=10)
|
|
286
|
+
r.raise_for_status()
|
|
287
|
+
resp_json = r.json()
|
|
275
288
|
if resp_json.get('error'):
|
|
276
289
|
info(f"SF API error for series '{series_id}' at URL {api_url}: {resp_json.get('message')}")
|
|
277
290
|
continue
|
|
278
291
|
data_html = resp_json.get("html", "")
|
|
279
292
|
except Exception as e:
|
|
280
293
|
info(f"Error loading SF API for {series_id} at {api_url}: {e}")
|
|
294
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
281
295
|
continue
|
|
282
296
|
|
|
283
297
|
# cache content and imdb_id
|
|
@@ -373,4 +387,7 @@ def sf_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
373
387
|
|
|
374
388
|
elapsed_time = time.time() - start_time
|
|
375
389
|
debug(f"Time taken: {elapsed_time:.2f}s ({hostname})")
|
|
390
|
+
|
|
391
|
+
if releases:
|
|
392
|
+
clear_hostname_issue(hostname)
|
|
376
393
|
return releases
|
quasarr/search/sources/sj.py
CHANGED
|
@@ -11,6 +11,7 @@ from datetime import datetime, timedelta
|
|
|
11
11
|
import requests
|
|
12
12
|
from bs4 import BeautifulSoup
|
|
13
13
|
|
|
14
|
+
from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
|
|
14
15
|
from quasarr.providers.imdb_metadata import get_localized_title
|
|
15
16
|
from quasarr.providers.log import info, debug
|
|
16
17
|
|
|
@@ -40,10 +41,12 @@ def sj_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
40
41
|
headers = {"User-Agent": shared_state.values["user_agent"]}
|
|
41
42
|
|
|
42
43
|
try:
|
|
43
|
-
r = requests.get(url, headers=headers, timeout=
|
|
44
|
+
r = requests.get(url, headers=headers, timeout=30)
|
|
45
|
+
r.raise_for_status()
|
|
44
46
|
data = json.loads(r.content)
|
|
45
47
|
except Exception as e:
|
|
46
48
|
info(f"{hostname.upper()}: feed load error: {e}")
|
|
49
|
+
mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
|
|
47
50
|
return releases
|
|
48
51
|
|
|
49
52
|
for release in data:
|
|
@@ -92,6 +95,9 @@ def sj_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
92
95
|
continue
|
|
93
96
|
|
|
94
97
|
debug(f"Time taken: {time.time() - start_time:.2f}s ({hostname})")
|
|
98
|
+
|
|
99
|
+
if releases:
|
|
100
|
+
clear_hostname_issue(hostname)
|
|
95
101
|
return releases
|
|
96
102
|
|
|
97
103
|
|
|
@@ -120,10 +126,12 @@ def sj_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
120
126
|
|
|
121
127
|
try:
|
|
122
128
|
r = requests.get(search_url, headers=headers, params=params, timeout=10)
|
|
129
|
+
r.raise_for_status()
|
|
123
130
|
soup = BeautifulSoup(r.content, "html.parser")
|
|
124
131
|
results = soup.find_all("a", href=re.compile(r"^/serie/"))
|
|
125
132
|
except Exception as e:
|
|
126
133
|
info(f"{hostname.upper()}: search load error: {e}")
|
|
134
|
+
mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
|
|
127
135
|
return releases
|
|
128
136
|
|
|
129
137
|
one_hour_ago = (datetime.now() - timedelta(hours=1)).strftime('%Y-%m-%d %H:%M:%S')
|
|
@@ -151,6 +159,7 @@ def sj_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
151
159
|
series_url = f"https://{sj_host}{result['href']}"
|
|
152
160
|
|
|
153
161
|
r = requests.get(series_url, headers=headers, timeout=10)
|
|
162
|
+
r.raise_for_status()
|
|
154
163
|
media_id_match = re.search(r'data-mediaid="([^"]+)"', r.text)
|
|
155
164
|
if not media_id_match:
|
|
156
165
|
debug(f"{hostname.upper()}: no media id for {result_title}")
|
|
@@ -160,6 +169,7 @@ def sj_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
160
169
|
api_url = f"https://{sj_host}/api/media/{media_id}/releases"
|
|
161
170
|
|
|
162
171
|
r = requests.get(api_url, headers=headers, timeout=10)
|
|
172
|
+
r.raise_for_status()
|
|
163
173
|
data = json.loads(r.content)
|
|
164
174
|
|
|
165
175
|
for season_block in data.values():
|
|
@@ -210,4 +220,7 @@ def sj_search(shared_state, start_time, request_from, search_string, mirror=None
|
|
|
210
220
|
continue
|
|
211
221
|
|
|
212
222
|
debug(f"Time taken: {time.time() - start_time:.2f}s ({hostname})")
|
|
223
|
+
|
|
224
|
+
if releases:
|
|
225
|
+
clear_hostname_issue(hostname)
|
|
213
226
|
return releases
|