quasarr 1.4.1__py3-none-any.whl → 1.20.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of quasarr might be problematic. Click here for more details.

Files changed (67) hide show
  1. quasarr/__init__.py +157 -67
  2. quasarr/api/__init__.py +126 -43
  3. quasarr/api/arr/__init__.py +197 -78
  4. quasarr/api/captcha/__init__.py +885 -39
  5. quasarr/api/config/__init__.py +23 -0
  6. quasarr/api/sponsors_helper/__init__.py +84 -22
  7. quasarr/api/statistics/__init__.py +196 -0
  8. quasarr/downloads/__init__.py +236 -487
  9. quasarr/downloads/linkcrypters/al.py +237 -0
  10. quasarr/downloads/linkcrypters/filecrypt.py +178 -31
  11. quasarr/downloads/linkcrypters/hide.py +123 -0
  12. quasarr/downloads/packages/__init__.py +461 -0
  13. quasarr/downloads/sources/al.py +697 -0
  14. quasarr/downloads/sources/by.py +106 -0
  15. quasarr/downloads/sources/dd.py +6 -78
  16. quasarr/downloads/sources/dj.py +7 -0
  17. quasarr/downloads/sources/dt.py +1 -1
  18. quasarr/downloads/sources/dw.py +2 -2
  19. quasarr/downloads/sources/he.py +112 -0
  20. quasarr/downloads/sources/mb.py +47 -0
  21. quasarr/downloads/sources/nk.py +51 -0
  22. quasarr/downloads/sources/nx.py +36 -81
  23. quasarr/downloads/sources/sf.py +27 -4
  24. quasarr/downloads/sources/sj.py +7 -0
  25. quasarr/downloads/sources/sl.py +90 -0
  26. quasarr/downloads/sources/wd.py +110 -0
  27. quasarr/providers/cloudflare.py +204 -0
  28. quasarr/providers/html_images.py +20 -0
  29. quasarr/providers/html_templates.py +48 -39
  30. quasarr/providers/imdb_metadata.py +15 -2
  31. quasarr/providers/myjd_api.py +34 -5
  32. quasarr/providers/notifications.py +30 -5
  33. quasarr/providers/obfuscated.py +35 -0
  34. quasarr/providers/sessions/__init__.py +0 -0
  35. quasarr/providers/sessions/al.py +286 -0
  36. quasarr/providers/sessions/dd.py +78 -0
  37. quasarr/providers/sessions/nx.py +76 -0
  38. quasarr/providers/shared_state.py +347 -20
  39. quasarr/providers/statistics.py +154 -0
  40. quasarr/providers/version.py +1 -1
  41. quasarr/search/__init__.py +112 -36
  42. quasarr/search/sources/al.py +448 -0
  43. quasarr/search/sources/by.py +203 -0
  44. quasarr/search/sources/dd.py +17 -6
  45. quasarr/search/sources/dj.py +213 -0
  46. quasarr/search/sources/dt.py +37 -7
  47. quasarr/search/sources/dw.py +27 -47
  48. quasarr/search/sources/fx.py +27 -29
  49. quasarr/search/sources/he.py +196 -0
  50. quasarr/search/sources/mb.py +195 -0
  51. quasarr/search/sources/nk.py +188 -0
  52. quasarr/search/sources/nx.py +22 -6
  53. quasarr/search/sources/sf.py +143 -151
  54. quasarr/search/sources/sj.py +213 -0
  55. quasarr/search/sources/sl.py +246 -0
  56. quasarr/search/sources/wd.py +208 -0
  57. quasarr/storage/config.py +20 -4
  58. quasarr/storage/setup.py +216 -51
  59. quasarr-1.20.4.dist-info/METADATA +304 -0
  60. quasarr-1.20.4.dist-info/RECORD +72 -0
  61. {quasarr-1.4.1.dist-info → quasarr-1.20.4.dist-info}/WHEEL +1 -1
  62. quasarr/providers/tvmaze_metadata.py +0 -23
  63. quasarr-1.4.1.dist-info/METADATA +0 -174
  64. quasarr-1.4.1.dist-info/RECORD +0 -43
  65. {quasarr-1.4.1.dist-info → quasarr-1.20.4.dist-info}/entry_points.txt +0 -0
  66. {quasarr-1.4.1.dist-info → quasarr-1.20.4.dist-info}/licenses/LICENSE +0 -0
  67. {quasarr-1.4.1.dist-info → quasarr-1.20.4.dist-info}/top_level.txt +0 -0
@@ -1,430 +1,194 @@
1
1
  # -*- coding: utf-8 -*-
2
2
  # Quasarr
3
3
  # Project by https://github.com/rix1337
4
+ #
5
+ # Special note: The signatures of all handlers must stay the same so we can neatly call them in download()
6
+ # Same is true for every get_xx_download_links() function in sources/xx.py
4
7
 
5
8
  import json
6
- from collections import defaultdict
7
- from urllib.parse import urlparse
8
9
 
10
+ from quasarr.downloads.linkcrypters.hide import decrypt_links_if_hide
11
+ from quasarr.downloads.sources.al import get_al_download_links
12
+ from quasarr.downloads.sources.by import get_by_download_links
9
13
  from quasarr.downloads.sources.dd import get_dd_download_links
14
+ from quasarr.downloads.sources.dj import get_dj_download_links
10
15
  from quasarr.downloads.sources.dt import get_dt_download_links
11
16
  from quasarr.downloads.sources.dw import get_dw_download_links
17
+ from quasarr.downloads.sources.he import get_he_download_links
18
+ from quasarr.downloads.sources.mb import get_mb_download_links
19
+ from quasarr.downloads.sources.nk import get_nk_download_links
12
20
  from quasarr.downloads.sources.nx import get_nx_download_links
13
21
  from quasarr.downloads.sources.sf import get_sf_download_links, resolve_sf_redirect
14
- from quasarr.providers.log import info, debug
15
- from quasarr.providers.myjd_api import TokenExpiredException, RequestTimeoutException, MYJDException
22
+ from quasarr.downloads.sources.sj import get_sj_download_links
23
+ from quasarr.downloads.sources.sl import get_sl_download_links
24
+ from quasarr.downloads.sources.wd import get_wd_download_links
25
+ from quasarr.providers.log import info
16
26
  from quasarr.providers.notifications import send_discord_message
17
-
18
-
19
- def get_links_comment(package, package_links):
20
- package_uuid = package.get("uuid")
21
- if package_uuid and package_links:
22
- for link in package_links:
23
- if link.get("packageUUID") == package_uuid:
24
- return link.get("comment")
25
- return None
26
-
27
-
28
- def get_links_status(package, all_links):
29
- links_in_package = []
30
- package_uuid = package.get("uuid")
31
- if package_uuid and all_links:
32
- for link in all_links:
33
- link_package_uuid = link.get("packageUUID")
34
- if link_package_uuid and link_package_uuid == package_uuid:
35
- links_in_package.append(link)
36
-
37
- all_finished = True
38
- eta = None
39
- error = None
40
-
41
- mirrors = defaultdict(list)
42
- for link in links_in_package:
43
- url = link.get("url", "")
44
- base_domain = urlparse(url).netloc
45
- mirrors[base_domain].append(link)
46
-
47
- has_mirror_all_online = False
48
- for mirror_links in mirrors.values():
49
- if all(link.get('availability', '').lower() == 'online' for link in mirror_links):
50
- has_mirror_all_online = True
51
- break
52
-
53
- offline_links = [link for link in links_in_package if link.get('availability', '').lower() == 'offline']
54
- offline_ids = [link.get('uuid') for link in offline_links]
55
- offline_mirror_linkids = offline_ids if has_mirror_all_online else []
56
-
57
- for link in links_in_package:
58
- if link.get('availability', "").lower() == "offline" and not has_mirror_all_online:
59
- error = "Links offline for all mirrors"
60
- if link.get('statusIconKey', '').lower() == "false":
61
- error = "File error in package"
62
- link_finished = link.get('finished', False)
63
- link_extraction_status = link.get('extractionStatus', '').lower() # "error" signifies an issue
64
- link_eta = link.get('eta', 0) // 1000
65
- if not link_finished:
66
- all_finished = False
67
- elif link_extraction_status and link_extraction_status != 'successful':
68
- if link_extraction_status == 'error':
69
- error = link.get('status', '')
70
- elif link_extraction_status == 'running' and link_eta > 0:
71
- if eta and link_eta > eta or not eta:
72
- eta = link_eta
73
- all_finished = False
74
-
75
- return {"all_finished": all_finished, "eta": eta, "error": error, "offline_mirror_linkids": offline_mirror_linkids}
76
-
77
-
78
- def get_links_matching_package_uuid(package, package_links):
79
- package_uuid = package.get("uuid")
80
- link_ids = []
81
- if package_uuid:
82
- for link in package_links:
83
- if link.get("packageUUID") == package_uuid:
84
- link_ids.append(link.get("uuid"))
27
+ from quasarr.providers.statistics import StatsHelper
28
+
29
+
30
+ def handle_unprotected(shared_state, title, password, package_id, imdb_id, url,
31
+ mirror=None, size_mb=None, links=None, func=None, label=""):
32
+ if func:
33
+ links = func(shared_state, url, mirror, title)
34
+
35
+ if links:
36
+ info(f"Decrypted {len(links)} download links for {title}")
37
+ send_discord_message(shared_state, title=title, case="unprotected", imdb_id=imdb_id, source=url)
38
+ added = shared_state.download_package(links, title, password, package_id)
39
+ if not added:
40
+ fail(title, package_id, shared_state,
41
+ reason=f'Failed to add {len(links)} links for "{title}" to linkgrabber')
42
+ return {"success": False, "title": title}
85
43
  else:
86
- info("Error - package uuid missing in delete request!")
87
- return link_ids
88
-
44
+ fail(title, package_id, shared_state,
45
+ reason=f'Offline / no links found for "{title}" on {label} - "{url}"')
46
+ return {"success": False, "title": title}
47
+
48
+ StatsHelper(shared_state).increment_package_with_links(links)
49
+ return {"success": True, "title": title}
50
+
51
+
52
+ def handle_protected(shared_state, title, password, package_id, imdb_id, url,
53
+ mirror=None, size_mb=None, func=None, label=""):
54
+ links = func(shared_state, url, mirror, title)
55
+ if links:
56
+ valid_links = [pair for pair in links if "/404.html" not in pair[0]]
57
+
58
+ # If none left, IP was banned
59
+ if not valid_links:
60
+ fail(
61
+ title,
62
+ package_id,
63
+ shared_state,
64
+ reason=f'IP was banned during download of "{title}" on {label} - "{url}"'
65
+ )
66
+ return {"success": False, "title": title}
67
+ links = valid_links
89
68
 
90
- def format_eta(seconds):
91
- if seconds < 0:
92
- return "23:59:59"
69
+ info(f'CAPTCHA-Solution required for "{title}" at: "{shared_state.values['external_address']}/captcha"')
70
+ send_discord_message(shared_state, title=title, case="captcha", imdb_id=imdb_id, source=url)
71
+ blob = json.dumps({"title": title, "links": links, "size_mb": size_mb, "password": password})
72
+ shared_state.values["database"]("protected").update_store(package_id, blob)
93
73
  else:
94
- hours = seconds // 3600
95
- minutes = (seconds % 3600) // 60
96
- seconds = seconds % 60
97
- return f"{hours:02}:{minutes:02}:{seconds:02}"
98
-
99
-
100
- def get_packages(shared_state):
101
- packages = []
102
-
103
- protected_packages = shared_state.get_db("protected").retrieve_all_titles()
104
- if protected_packages:
105
- for package in protected_packages:
106
- package_id = package[0]
107
-
108
- data = json.loads(package[1])
109
- details = {
110
- "title": data["title"],
111
- "urls": data["links"],
112
- "size_mb": data["size_mb"],
113
- "password": data["password"]
114
- }
115
-
116
- packages.append({
117
- "details": details,
118
- "location": "queue",
119
- "type": "protected",
120
- "package_id": package_id
121
- })
122
-
123
- failed_packages = shared_state.get_db("failed").retrieve_all_titles()
124
- if failed_packages:
125
- for package in failed_packages:
126
- package_id = package[0]
127
-
128
- data = json.loads(package[1])
129
- details = {
130
- "name": data["title"],
131
- "bytesLoaded": 0,
132
- "saveTo": "/"
133
- }
134
-
135
- packages.append({
136
- "details": details,
137
- "location": "history",
138
- "type": "failed",
139
- "error": "Too many failed attempts by SponsorsHelper",
140
- "comment": package_id,
141
- "uuid": package_id
142
- })
143
- try:
144
- linkgrabber_packages = shared_state.get_device().linkgrabber.query_packages()
145
- linkgrabber_links = shared_state.get_device().linkgrabber.query_links()
146
- except (TokenExpiredException, RequestTimeoutException, MYJDException):
147
- linkgrabber_packages = []
148
- linkgrabber_links = []
149
-
150
- if linkgrabber_packages:
151
- for package in linkgrabber_packages:
152
- comment = get_links_comment(package, shared_state.get_device().linkgrabber.query_links())
153
- link_details = get_links_status(package, linkgrabber_links)
154
-
155
- error = link_details["error"]
156
- offline_mirror_linkids = link_details["offline_mirror_linkids"]
157
- if offline_mirror_linkids:
158
- shared_state.get_device().linkgrabber.cleanup(
159
- "DELETE_OFFLINE",
160
- "REMOVE_LINKS_ONLY",
161
- "SELECTED",
162
- offline_mirror_linkids,
163
- [package["uuid"]]
164
- )
165
-
166
- location = "history" if error else "queue"
167
- packages.append({
168
- "details": package,
169
- "location": location,
170
- "type": "linkgrabber",
171
- "comment": comment,
172
- "uuid": package.get("uuid"),
173
- "error": error
174
- })
175
- try:
176
- downloader_packages = shared_state.get_device().downloads.query_packages()
177
- downloader_links = shared_state.get_device().downloads.query_links()
178
- except (TokenExpiredException, RequestTimeoutException, MYJDException):
179
- downloader_packages = []
180
- downloader_links = []
181
-
182
- if downloader_packages and downloader_links:
183
- for package in downloader_packages:
184
- comment = get_links_comment(package, downloader_links)
185
- link_details = get_links_status(package, downloader_links)
186
-
187
- error = link_details["error"]
188
- finished = link_details["all_finished"]
189
- if not finished and link_details["eta"]:
190
- package["eta"] = link_details["eta"]
191
-
192
- location = "history" if error or finished else "queue"
193
-
194
- packages.append({
195
- "details": package,
196
- "location": location,
197
- "type": "downloader",
198
- "comment": comment,
199
- "uuid": package.get("uuid"),
200
- "error": error
201
- })
202
-
203
- downloads = {
204
- "queue": [],
205
- "history": []
206
- }
207
- for package in packages:
208
- queue_index = 0
209
- history_index = 0
210
-
211
- package_id = None
212
-
213
- if package["location"] == "queue":
214
- time_left = "23:59:59"
215
- if package["type"] == "linkgrabber":
216
- details = package["details"]
217
- name = f"[Linkgrabber] {details["name"]}"
218
- try:
219
- mb = mb_left = int(details["bytesTotal"]) / (1024 * 1024)
220
- except KeyError:
221
- mb = mb_left = 0
222
- try:
223
- package_id = package["comment"]
224
- if "movies" in package_id:
225
- category = "movies"
226
- else:
227
- category = "tv"
228
- except TypeError:
229
- category = "not_quasarr"
230
- package_type = "linkgrabber"
231
- package_uuid = package["uuid"]
232
- elif package["type"] == "downloader":
233
- details = package["details"]
234
- status = "Downloading"
235
- eta = details.get("eta")
236
- bytes_total = int(details.get("bytesTotal", 0))
237
- bytes_loaded = int(details.get("bytesLoaded", 0))
238
-
239
- mb = bytes_total / (1024 * 1024)
240
- mb_left = (bytes_total - bytes_loaded) / (1024 * 1024) if bytes_total else 0
241
- if mb_left < 0:
242
- mb_left = 0
243
-
244
- if eta is None:
245
- status = "Paused"
246
- else:
247
- time_left = format_eta(int(eta))
248
- if mb_left == 0:
249
- status = "Extracting"
250
-
251
- name = f"[{status}] {details['name']}"
252
-
253
- try:
254
- package_id = package["comment"]
255
- if "movies" in package_id:
256
- category = "movies"
257
- else:
258
- category = "tv"
259
- except TypeError:
260
- category = "not_quasarr"
261
- package_type = "downloader"
262
- package_uuid = package["uuid"]
263
- else:
264
- details = package["details"]
265
- name = f"[CAPTCHA not solved!] {details["title"]}"
266
- mb = mb_left = details["size_mb"]
267
- try:
268
- package_id = package["package_id"]
269
- if "movies" in package_id:
270
- category = "movies"
271
- else:
272
- category = "tv"
273
- except TypeError:
274
- category = "not_quasarr"
275
- package_type = "protected"
276
- package_uuid = None
277
-
278
- try:
279
- if package_id:
280
- downloads["queue"].append({
281
- "index": queue_index,
282
- "nzo_id": package_id,
283
- "priority": "Normal",
284
- "filename": name,
285
- "cat": category,
286
- "mbleft": int(mb_left),
287
- "mb": int(mb),
288
- "status": "Downloading",
289
- "timeleft": time_left,
290
- "type": package_type,
291
- "uuid": package_uuid
292
- })
293
- except:
294
- debug(f"Parameters missing for {package}")
295
- queue_index += 1
296
- elif package["location"] == "history":
297
- details = package["details"]
298
- name = details["name"]
299
- try:
300
- size = int(details["bytesLoaded"])
301
- except KeyError:
302
- size = 0
303
- storage = details["saveTo"]
304
- try:
305
- package_id = package["comment"]
306
- if "movies" in package_id:
307
- category = "movies"
308
- else:
309
- category = "tv"
310
- except TypeError:
311
- category = "not_quasarr"
312
-
313
- error = package.get("error")
314
- fail_message = ""
315
- if error:
316
- status = "Failed"
317
- fail_message = error
318
- else:
319
- status = "Completed"
320
-
321
- downloads["history"].append({
322
- "fail_message": fail_message,
323
- "category": category,
324
- "storage": storage,
325
- "status": status,
326
- "nzo_id": package_id,
327
- "name": name,
328
- "bytes": int(size),
329
- "type": "downloader",
330
- "uuid": package["uuid"]
331
- })
332
- history_index += 1
74
+ fail(title, package_id, shared_state,
75
+ reason=f'No protected links found for "{title}" on {label} - "{url}"')
76
+ return {"success": False, "title": title}
77
+ return {"success": True, "title": title}
78
+
79
+
80
+ def handle_al(shared_state, title, password, package_id, imdb_id, url, mirror, size_mb):
81
+ data = get_al_download_links(shared_state, url, mirror, title, password)
82
+ links = data.get("links", [])
83
+ title = data.get("title", title)
84
+ password = data.get("password", "")
85
+ return handle_unprotected(
86
+ shared_state, title, password, package_id, imdb_id, url,
87
+ links=links,
88
+ label='AL'
89
+ )
90
+
91
+
92
+ def handle_by(shared_state, title, password, package_id, imdb_id, url, mirror, size_mb):
93
+ links = get_by_download_links(shared_state, url, mirror, title)
94
+ if not links:
95
+ fail(title, package_id, shared_state,
96
+ reason=f'Offline / no links found for "{title}" on BY - "{url}"')
97
+ return {"success": False, "title": title}
98
+
99
+ decrypted = decrypt_links_if_hide(shared_state, links)
100
+ if decrypted and decrypted.get("status") != "none":
101
+ status = decrypted.get("status", "error")
102
+ links = decrypted.get("results", [])
103
+ if status == "success":
104
+ return handle_unprotected(
105
+ shared_state, title, password, package_id, imdb_id, url,
106
+ links=links, label='BY'
107
+ )
333
108
  else:
334
- info(f"Invalid package location {package['location']}")
335
-
336
- if not shared_state.get_device().linkgrabber.is_collecting():
337
- linkgrabber_packages = shared_state.get_device().linkgrabber.query_packages()
338
- linkgrabber_links = shared_state.get_device().linkgrabber.query_links()
339
-
340
- packages_to_start = []
341
- links_to_start = []
342
-
343
- for package in linkgrabber_packages:
344
- comment = get_links_comment(package, shared_state.get_device().linkgrabber.query_links())
345
- if comment.startswith("Quasarr_"):
346
- package_uuid = package.get("uuid")
347
- if package_uuid:
348
- linkgrabber_links = [link.get("uuid") for link in linkgrabber_links if
349
- link.get("packageUUID") == package_uuid]
350
- if linkgrabber_links:
351
- packages_to_start.append(package_uuid)
352
- links_to_start.extend(linkgrabber_links)
353
- else:
354
- info(f"Package {package_uuid} has no links in linkgrabber - skipping start")
355
-
356
- break
357
-
358
- if packages_to_start and links_to_start:
359
- shared_state.get_device().linkgrabber.move_to_downloadlist(links_to_start, packages_to_start)
360
- info(f"Started {len(packages_to_start)} package download"
361
- f"{'s' if len(packages_to_start) > 1 else ''} from linkgrabber")
362
-
363
- return downloads
364
-
365
-
366
- def delete_package(shared_state, package_id):
367
- try:
368
- deleted_title = ""
369
-
370
- packages = get_packages(shared_state)
371
- for package_location in packages:
372
- for package in packages[package_location]:
373
- if package["nzo_id"] == package_id:
374
- if package["type"] == "linkgrabber":
375
- ids = get_links_matching_package_uuid(package,
376
- shared_state.get_device().linkgrabber.query_links())
377
- if ids:
378
- shared_state.get_device().linkgrabber.cleanup(
379
- "DELETE_ALL",
380
- "REMOVE_LINKS_AND_DELETE_FILES",
381
- "SELECTED",
382
- ids,
383
- [package["uuid"]]
384
- )
385
- break
386
- elif package["type"] == "downloader":
387
- ids = get_links_matching_package_uuid(package,
388
- shared_state.get_device().downloads.query_links())
389
- if ids:
390
- shared_state.get_device().downloads.cleanup(
391
- "DELETE_ALL",
392
- "REMOVE_LINKS_AND_DELETE_FILES",
393
- "SELECTED",
394
- ids,
395
- [package["uuid"]]
396
- )
397
- break
398
-
399
- # no state check, just clean up whatever exists with the package id
400
- shared_state.get_db("failed").delete(package_id)
401
- shared_state.get_db("protected").delete(package_id)
402
-
403
- if package_location == "queue":
404
- package_name_field = "filename"
405
- else:
406
- package_name_field = "name"
407
-
408
- try:
409
- deleted_title = package[package_name_field]
410
- except KeyError:
411
- pass
412
-
413
- # Leave the loop
414
- break
415
-
416
- if deleted_title:
417
- info(f'Deleted package "{deleted_title}" with ID "{package_id}"')
109
+ fail(title, package_id, shared_state,
110
+ reason=f'Error decrypting hide.cx links for "{title}" on BY - "{url}"')
111
+ return {"success": False, "title": title}
112
+
113
+ return handle_protected(
114
+ shared_state, title, password, package_id, imdb_id, url,
115
+ mirror=mirror,
116
+ size_mb=size_mb,
117
+ func=lambda ss, u, m, t: links,
118
+ label='BY'
119
+ )
120
+
121
+
122
+ def handle_sf(shared_state, title, password, package_id, imdb_id, url, mirror, size_mb):
123
+ if url.startswith(f"https://{shared_state.values['config']('Hostnames').get('sf')}/external"):
124
+ url = resolve_sf_redirect(url, shared_state.values["user_agent"])
125
+ elif url.startswith(f"https://{shared_state.values['config']('Hostnames').get('sf')}/"):
126
+ data = get_sf_download_links(shared_state, url, mirror, title)
127
+ url = data.get("real_url")
128
+ if not imdb_id:
129
+ imdb_id = data.get("imdb_id")
130
+
131
+ if not url:
132
+ fail(title, package_id, shared_state,
133
+ reason=f'Failed to get download link from SF for "{title}" - "{url}"')
134
+ return {"success": False, "title": title}
135
+
136
+ return handle_protected(
137
+ shared_state, title, password, package_id, imdb_id, url,
138
+ mirror=mirror,
139
+ size_mb=size_mb,
140
+ func=lambda ss, u, m, t: [[url, "filecrypt"]],
141
+ label='SF'
142
+ )
143
+
144
+
145
+ def handle_sl(shared_state, title, password, package_id, imdb_id, url, mirror, size_mb):
146
+ data = get_sl_download_links(shared_state, url, mirror, title)
147
+ links = data.get("links")
148
+ if not imdb_id:
149
+ imdb_id = data.get("imdb_id")
150
+ return handle_unprotected(
151
+ shared_state, title, password, package_id, imdb_id, url,
152
+ links=links,
153
+ label='SL'
154
+ )
155
+
156
+
157
+ def handle_wd(shared_state, title, password, package_id, imdb_id, url, mirror, size_mb):
158
+ data = get_wd_download_links(shared_state, url, mirror, title)
159
+ links = data.get("links", []) if data else []
160
+ if not links:
161
+ fail(title, package_id, shared_state,
162
+ reason=f'Offline / no links found for "{title}" on WD - "{url}"')
163
+ return {"success": False, "title": title}
164
+
165
+ decrypted = decrypt_links_if_hide(shared_state, links)
166
+ if decrypted and decrypted.get("status") != "none":
167
+ status = decrypted.get("status", "error")
168
+ links = decrypted.get("results", [])
169
+ if status == "success":
170
+ return handle_unprotected(
171
+ shared_state, title, password, package_id, imdb_id, url,
172
+ links=links, label='WD'
173
+ )
418
174
  else:
419
- info(f'Deleted package "{package_id}"')
420
- except:
421
- info(f"Failed to delete package {package_id}")
422
- return False
423
- return True
175
+ fail(title, package_id, shared_state,
176
+ reason=f'Error decrypting hide.cx links for "{title}" on WD - "{url}"')
177
+ return {"success": False, "title": title}
178
+
179
+ return handle_protected(
180
+ shared_state, title, password, package_id, imdb_id, url,
181
+ mirror=mirror,
182
+ size_mb=size_mb,
183
+ func=lambda ss, u, m, t: links,
184
+ label='WD'
185
+ )
424
186
 
425
187
 
426
188
  def download(shared_state, request_from, title, url, mirror, size_mb, password, imdb_id=None):
427
- if "radarr".lower() in request_from.lower():
189
+ if "lazylibrarian" in request_from.lower():
190
+ category = "docs"
191
+ elif "radarr" in request_from.lower():
428
192
  category = "movies"
429
193
  else:
430
194
  category = "tv"
@@ -434,85 +198,70 @@ def download(shared_state, request_from, title, url, mirror, size_mb, password,
434
198
  if imdb_id is not None and imdb_id.lower() == "none":
435
199
  imdb_id = None
436
200
 
437
- dd = shared_state.values["config"]("Hostnames").get("dd")
438
- dt = shared_state.values["config"]("Hostnames").get("dt")
439
- dw = shared_state.values["config"]("Hostnames").get("dw")
440
- nx = shared_state.values["config"]("Hostnames").get("nx")
441
- sf = shared_state.values["config"]("Hostnames").get("sf")
442
-
443
- if dd and dd.lower() in url.lower():
444
- links = get_dd_download_links(shared_state, mirror, title)
445
- if links:
446
- info(f"Decrypted {len(links)} download links for {title}")
447
- send_discord_message(shared_state, title=title, case="unprotected", imdb_id=imdb_id)
448
- added = shared_state.download_package(links, title, password, package_id)
449
- if not added:
450
- info(f"Failed to add {title} to linkgrabber")
451
- package_id = None
452
- else:
453
- info(f"Found 0 links decrypting {title}")
454
- package_id = None
455
-
456
- elif dt and dt.lower() in url.lower():
457
- links = get_dt_download_links(shared_state, url, mirror, title)
458
- if links:
459
- info(f"Decrypted {len(links)} download links for {title}")
460
- send_discord_message(shared_state, title=title, case="unprotected", imdb_id=imdb_id)
461
- added = shared_state.download_package(links, title, password, package_id)
462
- if not added:
463
- info(f"Failed to add {title} to linkgrabber")
464
- package_id = None
465
- else:
466
- info(f"Found 0 links decrypting {title}")
467
- package_id = None
468
-
469
-
470
- elif dw and dw.lower() in url.lower():
471
- links = get_dw_download_links(shared_state, url, mirror, title)
472
- info(f'CAPTCHA-Solution required for "{title}" at: "{shared_state.values['external_address']}/captcha"')
473
- send_discord_message(shared_state, title=title, case="captcha", imdb_id=imdb_id)
474
- blob = json.dumps({"title": title, "links": links, "size_mb": size_mb, "password": password})
475
- shared_state.values["database"]("protected").update_store(package_id, blob)
201
+ config = shared_state.values["config"]("Hostnames")
202
+ flags = {
203
+ 'AL': config.get("al"),
204
+ 'BY': config.get("by"),
205
+ 'DD': config.get("dd"),
206
+ 'DJ': config.get("dj"),
207
+ 'DT': config.get("dt"),
208
+ 'DW': config.get("dw"),
209
+ 'HE': config.get("he"),
210
+ 'MB': config.get("mb"),
211
+ 'NK': config.get("nk"),
212
+ 'NX': config.get("nx"),
213
+ 'SF': config.get("sf"),
214
+ 'SJ': config.get("sj"),
215
+ 'SL': config.get("sl"),
216
+ 'WD': config.get("wd")
217
+ }
476
218
 
477
- elif nx and nx.lower() in url.lower():
478
- links = get_nx_download_links(shared_state, url, title)
479
- if links:
480
- info(f"Decrypted {len(links)} download links for {title}")
481
- send_discord_message(shared_state, title=title, case="unprotected", imdb_id=imdb_id)
482
- added = shared_state.download_package(links, title, password, package_id)
483
- if not added:
484
- info(f"Failed to add {title} to linkgrabber")
485
- package_id = None
486
- else:
487
- info(f"Found 0 links decrypting {title}")
488
- package_id = None
489
-
490
- elif sf and sf.lower() in url.lower():
491
- if url.startswith(f"https://{sf}/external"): # from interactive search
492
- url = resolve_sf_redirect(url, shared_state.values["user_agent"])
493
- elif url.startswith(f"https://{sf}/"): # from feed search
494
- url = get_sf_download_links(shared_state, url, mirror, title)
495
-
496
- if url:
497
- info(f'CAPTCHA-Solution required for "{title}" at: "{shared_state.values['external_address']}/captcha"')
498
- send_discord_message(shared_state, title=title, case="captcha", imdb_id=imdb_id)
499
- blob = json.dumps(
500
- {"title": title, "links": [[url, "filecrypt"]], "size_mb": size_mb, "password": password,
501
- "mirror": mirror})
502
- shared_state.values["database"]("protected").update_store(package_id, blob)
219
+ handlers = [
220
+ (flags['AL'], handle_al),
221
+ (flags['BY'], handle_by),
222
+ (flags['DD'], lambda *a: handle_unprotected(*a, func=get_dd_download_links, label='DD')),
223
+ (flags['DJ'], lambda *a: handle_protected(*a, func=get_dj_download_links, label='DJ')),
224
+ (flags['DT'], lambda *a: handle_unprotected(*a, func=get_dt_download_links, label='DT')),
225
+ (flags['DW'], lambda *a: handle_protected(*a, func=get_dw_download_links, label='DW')),
226
+ (flags['HE'], lambda *a: handle_unprotected(*a, func=get_he_download_links, label='HE')),
227
+ (flags['MB'], lambda *a: handle_protected(*a, func=get_mb_download_links, label='MB')),
228
+ (flags['NK'], lambda *a: handle_protected(*a, func=get_nk_download_links, label='NK')),
229
+ (flags['NX'], lambda *a: handle_unprotected(*a, func=get_nx_download_links, label='NX')),
230
+ (flags['SF'], handle_sf),
231
+ (flags['SJ'], lambda *a: handle_protected(*a, func=get_sj_download_links, label='SJ')),
232
+ (flags['SL'], handle_sl),
233
+ (flags['WD'], handle_wd),
234
+ ]
235
+
236
+ for flag, fn in handlers:
237
+ if flag and flag.lower() in url.lower():
238
+ return {"package_id": package_id,
239
+ **fn(shared_state, title, password, package_id, imdb_id, url, mirror, size_mb)}
240
+
241
+ if "filecrypt" in url.lower():
242
+ return {"package_id": package_id, **handle_protected(
243
+ shared_state, title, password, package_id, imdb_id, url, mirror, size_mb,
244
+ func=lambda ss, u, m, t: [[u, "filecrypt"]],
245
+ label='filecrypt'
246
+ )}
247
+
248
+ info(f'Could not parse URL for "{title}" - "{url}"')
249
+ StatsHelper(shared_state).increment_failed_downloads()
250
+ return {"success": False, "package_id": package_id, "title": title}
251
+
252
+
253
+ def fail(title, package_id, shared_state, reason="Offline / no links found"):
254
+ try:
255
+ info(f"Reason for failure: {reason}")
256
+ StatsHelper(shared_state).increment_failed_downloads()
257
+ blob = json.dumps({"title": title, "error": reason})
258
+ stored = shared_state.get_db("failed").store(package_id, json.dumps(blob))
259
+ if stored:
260
+ info(f'Package "{title}" marked as failed!"')
261
+ return True
503
262
  else:
504
- info(f"Failed to get download link from SF for {title} - {url}")
505
- package_id = None
506
-
507
- elif "filecrypt".lower() in url.lower():
508
- info(f'CAPTCHA-Solution required for "{title}" at: "{shared_state.values['external_address']}/captcha"')
509
- send_discord_message(shared_state, title=title, case="captcha", imdb_id=imdb_id)
510
- blob = json.dumps(
511
- {"title": title, "links": [[url, "filecrypt"]], "size_mb": size_mb, "password": password, "mirror": mirror})
512
- shared_state.values["database"]("protected").update_store(package_id, blob)
513
-
514
- else:
515
- package_id = None
516
- info(f"Could not parse URL for {title} - {url}")
517
-
518
- return package_id
263
+ info(f'Failed to mark package "{title}" as failed!"')
264
+ return False
265
+ except Exception as e:
266
+ info(f'Error marking package "{package_id}" as failed: {e}')
267
+ return False