quasarr 1.3.5__py3-none-any.whl → 1.20.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasarr might be problematic. Click here for more details.
- quasarr/__init__.py +157 -56
- quasarr/api/__init__.py +141 -36
- quasarr/api/arr/__init__.py +197 -78
- quasarr/api/captcha/__init__.py +897 -42
- quasarr/api/config/__init__.py +23 -0
- quasarr/api/sponsors_helper/__init__.py +84 -22
- quasarr/api/statistics/__init__.py +196 -0
- quasarr/downloads/__init__.py +237 -434
- quasarr/downloads/linkcrypters/al.py +237 -0
- quasarr/downloads/linkcrypters/filecrypt.py +178 -31
- quasarr/downloads/linkcrypters/hide.py +123 -0
- quasarr/downloads/packages/__init__.py +461 -0
- quasarr/downloads/sources/al.py +697 -0
- quasarr/downloads/sources/by.py +106 -0
- quasarr/downloads/sources/dd.py +6 -78
- quasarr/downloads/sources/dj.py +7 -0
- quasarr/downloads/sources/dt.py +1 -1
- quasarr/downloads/sources/dw.py +2 -2
- quasarr/downloads/sources/he.py +112 -0
- quasarr/downloads/sources/mb.py +47 -0
- quasarr/downloads/sources/nk.py +51 -0
- quasarr/downloads/sources/nx.py +36 -81
- quasarr/downloads/sources/sf.py +27 -4
- quasarr/downloads/sources/sj.py +7 -0
- quasarr/downloads/sources/sl.py +90 -0
- quasarr/downloads/sources/wd.py +110 -0
- quasarr/providers/cloudflare.py +204 -0
- quasarr/providers/html_images.py +20 -0
- quasarr/providers/html_templates.py +210 -108
- quasarr/providers/imdb_metadata.py +15 -2
- quasarr/providers/myjd_api.py +36 -5
- quasarr/providers/notifications.py +30 -5
- quasarr/providers/obfuscated.py +35 -0
- quasarr/providers/sessions/__init__.py +0 -0
- quasarr/providers/sessions/al.py +286 -0
- quasarr/providers/sessions/dd.py +78 -0
- quasarr/providers/sessions/nx.py +76 -0
- quasarr/providers/shared_state.py +368 -23
- quasarr/providers/statistics.py +154 -0
- quasarr/providers/version.py +60 -1
- quasarr/search/__init__.py +112 -36
- quasarr/search/sources/al.py +448 -0
- quasarr/search/sources/by.py +203 -0
- quasarr/search/sources/dd.py +17 -6
- quasarr/search/sources/dj.py +213 -0
- quasarr/search/sources/dt.py +37 -7
- quasarr/search/sources/dw.py +27 -47
- quasarr/search/sources/fx.py +27 -29
- quasarr/search/sources/he.py +196 -0
- quasarr/search/sources/mb.py +195 -0
- quasarr/search/sources/nk.py +188 -0
- quasarr/search/sources/nx.py +22 -6
- quasarr/search/sources/sf.py +143 -151
- quasarr/search/sources/sj.py +213 -0
- quasarr/search/sources/sl.py +246 -0
- quasarr/search/sources/wd.py +208 -0
- quasarr/storage/config.py +20 -4
- quasarr/storage/setup.py +224 -56
- quasarr-1.20.4.dist-info/METADATA +304 -0
- quasarr-1.20.4.dist-info/RECORD +72 -0
- {quasarr-1.3.5.dist-info → quasarr-1.20.4.dist-info}/WHEEL +1 -1
- quasarr/providers/tvmaze_metadata.py +0 -23
- quasarr-1.3.5.dist-info/METADATA +0 -174
- quasarr-1.3.5.dist-info/RECORD +0 -43
- {quasarr-1.3.5.dist-info → quasarr-1.20.4.dist-info}/entry_points.txt +0 -0
- {quasarr-1.3.5.dist-info → quasarr-1.20.4.dist-info}/licenses/LICENSE +0 -0
- {quasarr-1.3.5.dist-info → quasarr-1.20.4.dist-info}/top_level.txt +0 -0
quasarr/downloads/__init__.py
CHANGED
|
@@ -1,376 +1,194 @@
|
|
|
1
1
|
# -*- coding: utf-8 -*-
|
|
2
2
|
# Quasarr
|
|
3
3
|
# Project by https://github.com/rix1337
|
|
4
|
+
#
|
|
5
|
+
# Special note: The signatures of all handlers must stay the same so we can neatly call them in download()
|
|
6
|
+
# Same is true for every get_xx_download_links() function in sources/xx.py
|
|
4
7
|
|
|
5
8
|
import json
|
|
6
|
-
from collections import defaultdict
|
|
7
|
-
from urllib.parse import urlparse
|
|
8
9
|
|
|
10
|
+
from quasarr.downloads.linkcrypters.hide import decrypt_links_if_hide
|
|
11
|
+
from quasarr.downloads.sources.al import get_al_download_links
|
|
12
|
+
from quasarr.downloads.sources.by import get_by_download_links
|
|
9
13
|
from quasarr.downloads.sources.dd import get_dd_download_links
|
|
14
|
+
from quasarr.downloads.sources.dj import get_dj_download_links
|
|
10
15
|
from quasarr.downloads.sources.dt import get_dt_download_links
|
|
11
16
|
from quasarr.downloads.sources.dw import get_dw_download_links
|
|
17
|
+
from quasarr.downloads.sources.he import get_he_download_links
|
|
18
|
+
from quasarr.downloads.sources.mb import get_mb_download_links
|
|
19
|
+
from quasarr.downloads.sources.nk import get_nk_download_links
|
|
12
20
|
from quasarr.downloads.sources.nx import get_nx_download_links
|
|
13
21
|
from quasarr.downloads.sources.sf import get_sf_download_links, resolve_sf_redirect
|
|
14
|
-
from quasarr.
|
|
15
|
-
from quasarr.
|
|
22
|
+
from quasarr.downloads.sources.sj import get_sj_download_links
|
|
23
|
+
from quasarr.downloads.sources.sl import get_sl_download_links
|
|
24
|
+
from quasarr.downloads.sources.wd import get_wd_download_links
|
|
25
|
+
from quasarr.providers.log import info
|
|
16
26
|
from quasarr.providers.notifications import send_discord_message
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
link_package_uuid = link.get("packageUUID")
|
|
34
|
-
if link_package_uuid and link_package_uuid == package_uuid:
|
|
35
|
-
links_in_package.append(link)
|
|
36
|
-
|
|
37
|
-
all_finished = True
|
|
38
|
-
eta = None
|
|
39
|
-
error = None
|
|
40
|
-
|
|
41
|
-
mirrors = defaultdict(list)
|
|
42
|
-
for link in links_in_package:
|
|
43
|
-
url = link.get("url", "")
|
|
44
|
-
base_domain = urlparse(url).netloc
|
|
45
|
-
mirrors[base_domain].append(link)
|
|
46
|
-
|
|
47
|
-
has_mirror_all_online = False
|
|
48
|
-
for mirror_links in mirrors.values():
|
|
49
|
-
if all(link.get('availability', '').lower() == 'online' for link in mirror_links):
|
|
50
|
-
has_mirror_all_online = True
|
|
51
|
-
break
|
|
52
|
-
|
|
53
|
-
for link in links_in_package:
|
|
54
|
-
availability = link.get('availability', "")
|
|
55
|
-
if availability.lower() == "offline" and not has_mirror_all_online:
|
|
56
|
-
error = "Links offline for all mirrors"
|
|
57
|
-
link_finished = link.get('finished', False)
|
|
58
|
-
link_extraction_status = link.get('extractionStatus', '').lower() # "error" signifies an issue
|
|
59
|
-
link_eta = link.get('eta', 0) // 1000
|
|
60
|
-
if not link_finished:
|
|
61
|
-
all_finished = False
|
|
62
|
-
elif link_extraction_status and link_extraction_status != 'successful':
|
|
63
|
-
if link_extraction_status == 'error':
|
|
64
|
-
error = link.get('status', '')
|
|
65
|
-
elif link_extraction_status == 'running' and link_eta > 0:
|
|
66
|
-
if eta and link_eta > eta or not eta:
|
|
67
|
-
eta = link_eta
|
|
68
|
-
all_finished = False
|
|
69
|
-
|
|
70
|
-
return {"all_finished": all_finished, "eta": eta, "error": error}
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
def get_links_matching_package_uuid(package, package_links):
|
|
74
|
-
package_uuid = package.get("uuid")
|
|
75
|
-
link_ids = []
|
|
76
|
-
if package_uuid:
|
|
77
|
-
for link in package_links:
|
|
78
|
-
if link.get("packageUUID") == package_uuid:
|
|
79
|
-
link_ids.append(link.get("uuid"))
|
|
27
|
+
from quasarr.providers.statistics import StatsHelper
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def handle_unprotected(shared_state, title, password, package_id, imdb_id, url,
|
|
31
|
+
mirror=None, size_mb=None, links=None, func=None, label=""):
|
|
32
|
+
if func:
|
|
33
|
+
links = func(shared_state, url, mirror, title)
|
|
34
|
+
|
|
35
|
+
if links:
|
|
36
|
+
info(f"Decrypted {len(links)} download links for {title}")
|
|
37
|
+
send_discord_message(shared_state, title=title, case="unprotected", imdb_id=imdb_id, source=url)
|
|
38
|
+
added = shared_state.download_package(links, title, password, package_id)
|
|
39
|
+
if not added:
|
|
40
|
+
fail(title, package_id, shared_state,
|
|
41
|
+
reason=f'Failed to add {len(links)} links for "{title}" to linkgrabber')
|
|
42
|
+
return {"success": False, "title": title}
|
|
80
43
|
else:
|
|
81
|
-
|
|
82
|
-
|
|
44
|
+
fail(title, package_id, shared_state,
|
|
45
|
+
reason=f'Offline / no links found for "{title}" on {label} - "{url}"')
|
|
46
|
+
return {"success": False, "title": title}
|
|
47
|
+
|
|
48
|
+
StatsHelper(shared_state).increment_package_with_links(links)
|
|
49
|
+
return {"success": True, "title": title}
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def handle_protected(shared_state, title, password, package_id, imdb_id, url,
|
|
53
|
+
mirror=None, size_mb=None, func=None, label=""):
|
|
54
|
+
links = func(shared_state, url, mirror, title)
|
|
55
|
+
if links:
|
|
56
|
+
valid_links = [pair for pair in links if "/404.html" not in pair[0]]
|
|
57
|
+
|
|
58
|
+
# If none left, IP was banned
|
|
59
|
+
if not valid_links:
|
|
60
|
+
fail(
|
|
61
|
+
title,
|
|
62
|
+
package_id,
|
|
63
|
+
shared_state,
|
|
64
|
+
reason=f'IP was banned during download of "{title}" on {label} - "{url}"'
|
|
65
|
+
)
|
|
66
|
+
return {"success": False, "title": title}
|
|
67
|
+
links = valid_links
|
|
83
68
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
69
|
+
info(f'CAPTCHA-Solution required for "{title}" at: "{shared_state.values['external_address']}/captcha"')
|
|
70
|
+
send_discord_message(shared_state, title=title, case="captcha", imdb_id=imdb_id, source=url)
|
|
71
|
+
blob = json.dumps({"title": title, "links": links, "size_mb": size_mb, "password": password})
|
|
72
|
+
shared_state.values["database"]("protected").update_store(package_id, blob)
|
|
88
73
|
else:
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
def
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
data = json.loads(package[1])
|
|
124
|
-
details = {
|
|
125
|
-
"name": data["title"],
|
|
126
|
-
"bytesLoaded": 0,
|
|
127
|
-
"saveTo": "/"
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
packages.append({
|
|
131
|
-
"details": details,
|
|
132
|
-
"location": "history",
|
|
133
|
-
"type": "failed",
|
|
134
|
-
"error": "Too many failed attempts by SponsorsHelper",
|
|
135
|
-
"comment": package_id,
|
|
136
|
-
"uuid": package_id
|
|
137
|
-
})
|
|
138
|
-
try:
|
|
139
|
-
linkgrabber_packages = shared_state.get_device().linkgrabber.query_packages()
|
|
140
|
-
linkgrabber_links = shared_state.get_device().linkgrabber.query_links()
|
|
141
|
-
except (TokenExpiredException, RequestTimeoutException, MYJDException):
|
|
142
|
-
linkgrabber_packages = []
|
|
143
|
-
linkgrabber_links = []
|
|
144
|
-
|
|
145
|
-
if linkgrabber_packages:
|
|
146
|
-
for package in linkgrabber_packages:
|
|
147
|
-
comment = get_links_comment(package, shared_state.get_device().linkgrabber.query_links())
|
|
148
|
-
link_details = get_links_status(package, linkgrabber_links)
|
|
149
|
-
error = link_details["error"]
|
|
150
|
-
location = "history" if error else "queue"
|
|
151
|
-
packages.append({
|
|
152
|
-
"details": package,
|
|
153
|
-
"location": location,
|
|
154
|
-
"type": "linkgrabber",
|
|
155
|
-
"comment": comment,
|
|
156
|
-
"uuid": package.get("uuid"),
|
|
157
|
-
"error": error
|
|
158
|
-
})
|
|
159
|
-
try:
|
|
160
|
-
downloader_packages = shared_state.get_device().downloads.query_packages()
|
|
161
|
-
downloader_links = shared_state.get_device().downloads.query_links()
|
|
162
|
-
except (TokenExpiredException, RequestTimeoutException, MYJDException):
|
|
163
|
-
downloader_packages = []
|
|
164
|
-
downloader_links = []
|
|
165
|
-
|
|
166
|
-
if downloader_packages and downloader_links:
|
|
167
|
-
for package in downloader_packages:
|
|
168
|
-
comment = get_links_comment(package, downloader_links)
|
|
169
|
-
link_details = get_links_status(package, downloader_links)
|
|
170
|
-
|
|
171
|
-
error = link_details["error"]
|
|
172
|
-
finished = link_details["all_finished"]
|
|
173
|
-
if not finished and link_details["eta"]:
|
|
174
|
-
package["eta"] = link_details["eta"]
|
|
175
|
-
|
|
176
|
-
location = "history" if error or finished else "queue"
|
|
177
|
-
|
|
178
|
-
packages.append({
|
|
179
|
-
"details": package,
|
|
180
|
-
"location": location,
|
|
181
|
-
"type": "downloader",
|
|
182
|
-
"comment": comment,
|
|
183
|
-
"uuid": package.get("uuid"),
|
|
184
|
-
"error": error
|
|
185
|
-
})
|
|
186
|
-
|
|
187
|
-
downloads = {
|
|
188
|
-
"queue": [],
|
|
189
|
-
"history": []
|
|
190
|
-
}
|
|
191
|
-
for package in packages:
|
|
192
|
-
queue_index = 0
|
|
193
|
-
history_index = 0
|
|
194
|
-
|
|
195
|
-
package_id = None
|
|
196
|
-
|
|
197
|
-
if package["location"] == "queue":
|
|
198
|
-
time_left = "23:59:59"
|
|
199
|
-
if package["type"] == "linkgrabber":
|
|
200
|
-
details = package["details"]
|
|
201
|
-
name = f"[Linkgrabber] {details["name"]}"
|
|
202
|
-
try:
|
|
203
|
-
mb = mb_left = int(details["bytesTotal"]) / (1024 * 1024)
|
|
204
|
-
except KeyError:
|
|
205
|
-
mb = mb_left = 0
|
|
206
|
-
try:
|
|
207
|
-
package_id = package["comment"]
|
|
208
|
-
if "movies" in package_id:
|
|
209
|
-
category = "movies"
|
|
210
|
-
else:
|
|
211
|
-
category = "tv"
|
|
212
|
-
except TypeError:
|
|
213
|
-
category = "not_quasarr"
|
|
214
|
-
package_type = "linkgrabber"
|
|
215
|
-
package_uuid = package["uuid"]
|
|
216
|
-
elif package["type"] == "downloader":
|
|
217
|
-
details = package["details"]
|
|
218
|
-
status = "Downloading"
|
|
219
|
-
eta = details.get("eta")
|
|
220
|
-
bytes_total = int(details.get("bytesTotal", 0))
|
|
221
|
-
bytes_loaded = int(details.get("bytesLoaded", 0))
|
|
222
|
-
|
|
223
|
-
mb = bytes_total / (1024 * 1024)
|
|
224
|
-
mb_left = (bytes_total - bytes_loaded) / (1024 * 1024) if bytes_total else 0
|
|
225
|
-
if mb_left < 0:
|
|
226
|
-
mb_left = 0
|
|
227
|
-
|
|
228
|
-
if eta is None:
|
|
229
|
-
status = "Paused"
|
|
230
|
-
else:
|
|
231
|
-
time_left = format_eta(int(eta))
|
|
232
|
-
if mb_left == 0:
|
|
233
|
-
status = "Extracting"
|
|
234
|
-
|
|
235
|
-
name = f"[{status}] {details['name']}"
|
|
236
|
-
|
|
237
|
-
try:
|
|
238
|
-
package_id = package["comment"]
|
|
239
|
-
if "movies" in package_id:
|
|
240
|
-
category = "movies"
|
|
241
|
-
else:
|
|
242
|
-
category = "tv"
|
|
243
|
-
except TypeError:
|
|
244
|
-
category = "not_quasarr"
|
|
245
|
-
package_type = "downloader"
|
|
246
|
-
package_uuid = package["uuid"]
|
|
247
|
-
else:
|
|
248
|
-
details = package["details"]
|
|
249
|
-
name = f"[CAPTCHA not solved!] {details["title"]}"
|
|
250
|
-
mb = mb_left = details["size_mb"]
|
|
251
|
-
try:
|
|
252
|
-
package_id = package["package_id"]
|
|
253
|
-
if "movies" in package_id:
|
|
254
|
-
category = "movies"
|
|
255
|
-
else:
|
|
256
|
-
category = "tv"
|
|
257
|
-
except TypeError:
|
|
258
|
-
category = "not_quasarr"
|
|
259
|
-
package_type = "protected"
|
|
260
|
-
package_uuid = None
|
|
261
|
-
|
|
262
|
-
try:
|
|
263
|
-
if package_id:
|
|
264
|
-
downloads["queue"].append({
|
|
265
|
-
"index": queue_index,
|
|
266
|
-
"nzo_id": package_id,
|
|
267
|
-
"priority": "Normal",
|
|
268
|
-
"filename": name,
|
|
269
|
-
"cat": category,
|
|
270
|
-
"mbleft": int(mb_left),
|
|
271
|
-
"mb": int(mb),
|
|
272
|
-
"status": "Downloading",
|
|
273
|
-
"timeleft": time_left,
|
|
274
|
-
"type": package_type,
|
|
275
|
-
"uuid": package_uuid
|
|
276
|
-
})
|
|
277
|
-
except:
|
|
278
|
-
debug(f"Parameters missing for {package}")
|
|
279
|
-
queue_index += 1
|
|
280
|
-
elif package["location"] == "history":
|
|
281
|
-
details = package["details"]
|
|
282
|
-
name = details["name"]
|
|
283
|
-
try:
|
|
284
|
-
size = int(details["bytesLoaded"])
|
|
285
|
-
except KeyError:
|
|
286
|
-
size = 0
|
|
287
|
-
storage = details["saveTo"]
|
|
288
|
-
try:
|
|
289
|
-
package_id = package["comment"]
|
|
290
|
-
if "movies" in package_id:
|
|
291
|
-
category = "movies"
|
|
292
|
-
else:
|
|
293
|
-
category = "tv"
|
|
294
|
-
except TypeError:
|
|
295
|
-
category = "not_quasarr"
|
|
296
|
-
|
|
297
|
-
error = package.get("error")
|
|
298
|
-
fail_message = ""
|
|
299
|
-
if error:
|
|
300
|
-
status = "Failed"
|
|
301
|
-
fail_message = error
|
|
302
|
-
else:
|
|
303
|
-
status = "Completed"
|
|
304
|
-
|
|
305
|
-
downloads["history"].append({
|
|
306
|
-
"fail_message": fail_message,
|
|
307
|
-
"category": category,
|
|
308
|
-
"storage": storage,
|
|
309
|
-
"status": status,
|
|
310
|
-
"nzo_id": package_id,
|
|
311
|
-
"name": name,
|
|
312
|
-
"bytes": int(size),
|
|
313
|
-
"type": "downloader",
|
|
314
|
-
"uuid": package["uuid"]
|
|
315
|
-
})
|
|
316
|
-
history_index += 1
|
|
74
|
+
fail(title, package_id, shared_state,
|
|
75
|
+
reason=f'No protected links found for "{title}" on {label} - "{url}"')
|
|
76
|
+
return {"success": False, "title": title}
|
|
77
|
+
return {"success": True, "title": title}
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def handle_al(shared_state, title, password, package_id, imdb_id, url, mirror, size_mb):
|
|
81
|
+
data = get_al_download_links(shared_state, url, mirror, title, password)
|
|
82
|
+
links = data.get("links", [])
|
|
83
|
+
title = data.get("title", title)
|
|
84
|
+
password = data.get("password", "")
|
|
85
|
+
return handle_unprotected(
|
|
86
|
+
shared_state, title, password, package_id, imdb_id, url,
|
|
87
|
+
links=links,
|
|
88
|
+
label='AL'
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def handle_by(shared_state, title, password, package_id, imdb_id, url, mirror, size_mb):
|
|
93
|
+
links = get_by_download_links(shared_state, url, mirror, title)
|
|
94
|
+
if not links:
|
|
95
|
+
fail(title, package_id, shared_state,
|
|
96
|
+
reason=f'Offline / no links found for "{title}" on BY - "{url}"')
|
|
97
|
+
return {"success": False, "title": title}
|
|
98
|
+
|
|
99
|
+
decrypted = decrypt_links_if_hide(shared_state, links)
|
|
100
|
+
if decrypted and decrypted.get("status") != "none":
|
|
101
|
+
status = decrypted.get("status", "error")
|
|
102
|
+
links = decrypted.get("results", [])
|
|
103
|
+
if status == "success":
|
|
104
|
+
return handle_unprotected(
|
|
105
|
+
shared_state, title, password, package_id, imdb_id, url,
|
|
106
|
+
links=links, label='BY'
|
|
107
|
+
)
|
|
317
108
|
else:
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
109
|
+
fail(title, package_id, shared_state,
|
|
110
|
+
reason=f'Error decrypting hide.cx links for "{title}" on BY - "{url}"')
|
|
111
|
+
return {"success": False, "title": title}
|
|
112
|
+
|
|
113
|
+
return handle_protected(
|
|
114
|
+
shared_state, title, password, package_id, imdb_id, url,
|
|
115
|
+
mirror=mirror,
|
|
116
|
+
size_mb=size_mb,
|
|
117
|
+
func=lambda ss, u, m, t: links,
|
|
118
|
+
label='BY'
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def handle_sf(shared_state, title, password, package_id, imdb_id, url, mirror, size_mb):
|
|
123
|
+
if url.startswith(f"https://{shared_state.values['config']('Hostnames').get('sf')}/external"):
|
|
124
|
+
url = resolve_sf_redirect(url, shared_state.values["user_agent"])
|
|
125
|
+
elif url.startswith(f"https://{shared_state.values['config']('Hostnames').get('sf')}/"):
|
|
126
|
+
data = get_sf_download_links(shared_state, url, mirror, title)
|
|
127
|
+
url = data.get("real_url")
|
|
128
|
+
if not imdb_id:
|
|
129
|
+
imdb_id = data.get("imdb_id")
|
|
130
|
+
|
|
131
|
+
if not url:
|
|
132
|
+
fail(title, package_id, shared_state,
|
|
133
|
+
reason=f'Failed to get download link from SF for "{title}" - "{url}"')
|
|
134
|
+
return {"success": False, "title": title}
|
|
135
|
+
|
|
136
|
+
return handle_protected(
|
|
137
|
+
shared_state, title, password, package_id, imdb_id, url,
|
|
138
|
+
mirror=mirror,
|
|
139
|
+
size_mb=size_mb,
|
|
140
|
+
func=lambda ss, u, m, t: [[url, "filecrypt"]],
|
|
141
|
+
label='SF'
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def handle_sl(shared_state, title, password, package_id, imdb_id, url, mirror, size_mb):
|
|
146
|
+
data = get_sl_download_links(shared_state, url, mirror, title)
|
|
147
|
+
links = data.get("links")
|
|
148
|
+
if not imdb_id:
|
|
149
|
+
imdb_id = data.get("imdb_id")
|
|
150
|
+
return handle_unprotected(
|
|
151
|
+
shared_state, title, password, package_id, imdb_id, url,
|
|
152
|
+
links=links,
|
|
153
|
+
label='SL'
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def handle_wd(shared_state, title, password, package_id, imdb_id, url, mirror, size_mb):
|
|
158
|
+
data = get_wd_download_links(shared_state, url, mirror, title)
|
|
159
|
+
links = data.get("links", []) if data else []
|
|
160
|
+
if not links:
|
|
161
|
+
fail(title, package_id, shared_state,
|
|
162
|
+
reason=f'Offline / no links found for "{title}" on WD - "{url}"')
|
|
163
|
+
return {"success": False, "title": title}
|
|
164
|
+
|
|
165
|
+
decrypted = decrypt_links_if_hide(shared_state, links)
|
|
166
|
+
if decrypted and decrypted.get("status") != "none":
|
|
167
|
+
status = decrypted.get("status", "error")
|
|
168
|
+
links = decrypted.get("results", [])
|
|
169
|
+
if status == "success":
|
|
170
|
+
return handle_unprotected(
|
|
171
|
+
shared_state, title, password, package_id, imdb_id, url,
|
|
172
|
+
links=links, label='WD'
|
|
173
|
+
)
|
|
174
|
+
else:
|
|
175
|
+
fail(title, package_id, shared_state,
|
|
176
|
+
reason=f'Error decrypting hide.cx links for "{title}" on WD - "{url}"')
|
|
177
|
+
return {"success": False, "title": title}
|
|
178
|
+
|
|
179
|
+
return handle_protected(
|
|
180
|
+
shared_state, title, password, package_id, imdb_id, url,
|
|
181
|
+
mirror=mirror,
|
|
182
|
+
size_mb=size_mb,
|
|
183
|
+
func=lambda ss, u, m, t: links,
|
|
184
|
+
label='WD'
|
|
185
|
+
)
|
|
370
186
|
|
|
371
187
|
|
|
372
188
|
def download(shared_state, request_from, title, url, mirror, size_mb, password, imdb_id=None):
|
|
373
|
-
if "
|
|
189
|
+
if "lazylibrarian" in request_from.lower():
|
|
190
|
+
category = "docs"
|
|
191
|
+
elif "radarr" in request_from.lower():
|
|
374
192
|
category = "movies"
|
|
375
193
|
else:
|
|
376
194
|
category = "tv"
|
|
@@ -380,85 +198,70 @@ def download(shared_state, request_from, title, url, mirror, size_mb, password,
|
|
|
380
198
|
if imdb_id is not None and imdb_id.lower() == "none":
|
|
381
199
|
imdb_id = None
|
|
382
200
|
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
package_id = None
|
|
401
|
-
|
|
402
|
-
elif dt and dt.lower() in url.lower():
|
|
403
|
-
links = get_dt_download_links(shared_state, url, mirror, title)
|
|
404
|
-
if links:
|
|
405
|
-
info(f"Decrypted {len(links)} download links for {title}")
|
|
406
|
-
send_discord_message(shared_state, title=title, case="unprotected", imdb_id=imdb_id)
|
|
407
|
-
added = shared_state.download_package(links, title, password, package_id)
|
|
408
|
-
if not added:
|
|
409
|
-
info(f"Failed to add {title} to linkgrabber")
|
|
410
|
-
package_id = None
|
|
411
|
-
else:
|
|
412
|
-
info(f"Found 0 links decrypting {title}")
|
|
413
|
-
package_id = None
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
elif dw and dw.lower() in url.lower():
|
|
417
|
-
links = get_dw_download_links(shared_state, url, mirror, title)
|
|
418
|
-
info(f'CAPTCHA-Solution required for "{title}" at: "{shared_state.values['external_address']}/captcha"')
|
|
419
|
-
send_discord_message(shared_state, title=title, case="captcha", imdb_id=imdb_id)
|
|
420
|
-
blob = json.dumps({"title": title, "links": links, "size_mb": size_mb, "password": password})
|
|
421
|
-
shared_state.values["database"]("protected").update_store(package_id, blob)
|
|
201
|
+
config = shared_state.values["config"]("Hostnames")
|
|
202
|
+
flags = {
|
|
203
|
+
'AL': config.get("al"),
|
|
204
|
+
'BY': config.get("by"),
|
|
205
|
+
'DD': config.get("dd"),
|
|
206
|
+
'DJ': config.get("dj"),
|
|
207
|
+
'DT': config.get("dt"),
|
|
208
|
+
'DW': config.get("dw"),
|
|
209
|
+
'HE': config.get("he"),
|
|
210
|
+
'MB': config.get("mb"),
|
|
211
|
+
'NK': config.get("nk"),
|
|
212
|
+
'NX': config.get("nx"),
|
|
213
|
+
'SF': config.get("sf"),
|
|
214
|
+
'SJ': config.get("sj"),
|
|
215
|
+
'SL': config.get("sl"),
|
|
216
|
+
'WD': config.get("wd")
|
|
217
|
+
}
|
|
422
218
|
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
219
|
+
handlers = [
|
|
220
|
+
(flags['AL'], handle_al),
|
|
221
|
+
(flags['BY'], handle_by),
|
|
222
|
+
(flags['DD'], lambda *a: handle_unprotected(*a, func=get_dd_download_links, label='DD')),
|
|
223
|
+
(flags['DJ'], lambda *a: handle_protected(*a, func=get_dj_download_links, label='DJ')),
|
|
224
|
+
(flags['DT'], lambda *a: handle_unprotected(*a, func=get_dt_download_links, label='DT')),
|
|
225
|
+
(flags['DW'], lambda *a: handle_protected(*a, func=get_dw_download_links, label='DW')),
|
|
226
|
+
(flags['HE'], lambda *a: handle_unprotected(*a, func=get_he_download_links, label='HE')),
|
|
227
|
+
(flags['MB'], lambda *a: handle_protected(*a, func=get_mb_download_links, label='MB')),
|
|
228
|
+
(flags['NK'], lambda *a: handle_protected(*a, func=get_nk_download_links, label='NK')),
|
|
229
|
+
(flags['NX'], lambda *a: handle_unprotected(*a, func=get_nx_download_links, label='NX')),
|
|
230
|
+
(flags['SF'], handle_sf),
|
|
231
|
+
(flags['SJ'], lambda *a: handle_protected(*a, func=get_sj_download_links, label='SJ')),
|
|
232
|
+
(flags['SL'], handle_sl),
|
|
233
|
+
(flags['WD'], handle_wd),
|
|
234
|
+
]
|
|
235
|
+
|
|
236
|
+
for flag, fn in handlers:
|
|
237
|
+
if flag and flag.lower() in url.lower():
|
|
238
|
+
return {"package_id": package_id,
|
|
239
|
+
**fn(shared_state, title, password, package_id, imdb_id, url, mirror, size_mb)}
|
|
240
|
+
|
|
241
|
+
if "filecrypt" in url.lower():
|
|
242
|
+
return {"package_id": package_id, **handle_protected(
|
|
243
|
+
shared_state, title, password, package_id, imdb_id, url, mirror, size_mb,
|
|
244
|
+
func=lambda ss, u, m, t: [[u, "filecrypt"]],
|
|
245
|
+
label='filecrypt'
|
|
246
|
+
)}
|
|
247
|
+
|
|
248
|
+
info(f'Could not parse URL for "{title}" - "{url}"')
|
|
249
|
+
StatsHelper(shared_state).increment_failed_downloads()
|
|
250
|
+
return {"success": False, "package_id": package_id, "title": title}
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def fail(title, package_id, shared_state, reason="Offline / no links found"):
|
|
254
|
+
try:
|
|
255
|
+
info(f"Reason for failure: {reason}")
|
|
256
|
+
StatsHelper(shared_state).increment_failed_downloads()
|
|
257
|
+
blob = json.dumps({"title": title, "error": reason})
|
|
258
|
+
stored = shared_state.get_db("failed").store(package_id, json.dumps(blob))
|
|
259
|
+
if stored:
|
|
260
|
+
info(f'Package "{title}" marked as failed!"')
|
|
261
|
+
return True
|
|
449
262
|
else:
|
|
450
|
-
info(f
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
send_discord_message(shared_state, title=title, case="captcha", imdb_id=imdb_id)
|
|
456
|
-
blob = json.dumps(
|
|
457
|
-
{"title": title, "links": [[url, "filecrypt"]], "size_mb": size_mb, "password": password, "mirror": mirror})
|
|
458
|
-
shared_state.values["database"]("protected").update_store(package_id, blob)
|
|
459
|
-
|
|
460
|
-
else:
|
|
461
|
-
package_id = None
|
|
462
|
-
info(f"Could not parse URL for {title} - {url}")
|
|
463
|
-
|
|
464
|
-
return package_id
|
|
263
|
+
info(f'Failed to mark package "{title}" as failed!"')
|
|
264
|
+
return False
|
|
265
|
+
except Exception as e:
|
|
266
|
+
info(f'Error marking package "{package_id}" as failed: {e}')
|
|
267
|
+
return False
|