quasarr 1.3.5__py3-none-any.whl → 1.20.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasarr might be problematic. Click here for more details.
- quasarr/__init__.py +157 -56
- quasarr/api/__init__.py +141 -36
- quasarr/api/arr/__init__.py +197 -78
- quasarr/api/captcha/__init__.py +897 -42
- quasarr/api/config/__init__.py +23 -0
- quasarr/api/sponsors_helper/__init__.py +84 -22
- quasarr/api/statistics/__init__.py +196 -0
- quasarr/downloads/__init__.py +237 -434
- quasarr/downloads/linkcrypters/al.py +237 -0
- quasarr/downloads/linkcrypters/filecrypt.py +178 -31
- quasarr/downloads/linkcrypters/hide.py +123 -0
- quasarr/downloads/packages/__init__.py +461 -0
- quasarr/downloads/sources/al.py +697 -0
- quasarr/downloads/sources/by.py +106 -0
- quasarr/downloads/sources/dd.py +6 -78
- quasarr/downloads/sources/dj.py +7 -0
- quasarr/downloads/sources/dt.py +1 -1
- quasarr/downloads/sources/dw.py +2 -2
- quasarr/downloads/sources/he.py +112 -0
- quasarr/downloads/sources/mb.py +47 -0
- quasarr/downloads/sources/nk.py +51 -0
- quasarr/downloads/sources/nx.py +36 -81
- quasarr/downloads/sources/sf.py +27 -4
- quasarr/downloads/sources/sj.py +7 -0
- quasarr/downloads/sources/sl.py +90 -0
- quasarr/downloads/sources/wd.py +110 -0
- quasarr/providers/cloudflare.py +204 -0
- quasarr/providers/html_images.py +20 -0
- quasarr/providers/html_templates.py +210 -108
- quasarr/providers/imdb_metadata.py +15 -2
- quasarr/providers/myjd_api.py +36 -5
- quasarr/providers/notifications.py +30 -5
- quasarr/providers/obfuscated.py +35 -0
- quasarr/providers/sessions/__init__.py +0 -0
- quasarr/providers/sessions/al.py +286 -0
- quasarr/providers/sessions/dd.py +78 -0
- quasarr/providers/sessions/nx.py +76 -0
- quasarr/providers/shared_state.py +368 -23
- quasarr/providers/statistics.py +154 -0
- quasarr/providers/version.py +60 -1
- quasarr/search/__init__.py +112 -36
- quasarr/search/sources/al.py +448 -0
- quasarr/search/sources/by.py +203 -0
- quasarr/search/sources/dd.py +17 -6
- quasarr/search/sources/dj.py +213 -0
- quasarr/search/sources/dt.py +37 -7
- quasarr/search/sources/dw.py +27 -47
- quasarr/search/sources/fx.py +27 -29
- quasarr/search/sources/he.py +196 -0
- quasarr/search/sources/mb.py +195 -0
- quasarr/search/sources/nk.py +188 -0
- quasarr/search/sources/nx.py +22 -6
- quasarr/search/sources/sf.py +143 -151
- quasarr/search/sources/sj.py +213 -0
- quasarr/search/sources/sl.py +246 -0
- quasarr/search/sources/wd.py +208 -0
- quasarr/storage/config.py +20 -4
- quasarr/storage/setup.py +224 -56
- quasarr-1.20.4.dist-info/METADATA +304 -0
- quasarr-1.20.4.dist-info/RECORD +72 -0
- {quasarr-1.3.5.dist-info → quasarr-1.20.4.dist-info}/WHEEL +1 -1
- quasarr/providers/tvmaze_metadata.py +0 -23
- quasarr-1.3.5.dist-info/METADATA +0 -174
- quasarr-1.3.5.dist-info/RECORD +0 -43
- {quasarr-1.3.5.dist-info → quasarr-1.20.4.dist-info}/entry_points.txt +0 -0
- {quasarr-1.3.5.dist-info → quasarr-1.20.4.dist-info}/licenses/LICENSE +0 -0
- {quasarr-1.3.5.dist-info → quasarr-1.20.4.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,461 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Quasarr
|
|
3
|
+
# Project by https://github.com/rix1337
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from collections import defaultdict
|
|
7
|
+
from urllib.parse import urlparse
|
|
8
|
+
|
|
9
|
+
from quasarr.providers.log import info, debug
|
|
10
|
+
from quasarr.providers.myjd_api import TokenExpiredException, RequestTimeoutException, MYJDException
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def get_links_comment(package, package_links):
|
|
14
|
+
package_uuid = package.get("uuid")
|
|
15
|
+
if package_uuid and package_links:
|
|
16
|
+
for link in package_links:
|
|
17
|
+
if link.get("packageUUID") == package_uuid:
|
|
18
|
+
return link.get("comment")
|
|
19
|
+
return None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def get_links_status(package, all_links, is_archive=False):
|
|
23
|
+
links_in_package = []
|
|
24
|
+
package_uuid = package.get("uuid")
|
|
25
|
+
if package_uuid and all_links:
|
|
26
|
+
for link in all_links:
|
|
27
|
+
link_package_uuid = link.get("packageUUID")
|
|
28
|
+
if link_package_uuid and link_package_uuid == package_uuid:
|
|
29
|
+
links_in_package.append(link)
|
|
30
|
+
|
|
31
|
+
all_finished = True
|
|
32
|
+
eta = None
|
|
33
|
+
error = None
|
|
34
|
+
|
|
35
|
+
mirrors = defaultdict(list)
|
|
36
|
+
for link in links_in_package:
|
|
37
|
+
url = link.get("url", "")
|
|
38
|
+
base_domain = urlparse(url).netloc
|
|
39
|
+
mirrors[base_domain].append(link)
|
|
40
|
+
|
|
41
|
+
has_mirror_all_online = False
|
|
42
|
+
for mirror_links in mirrors.values():
|
|
43
|
+
if all(link.get('availability', '').lower() == 'online' for link in mirror_links):
|
|
44
|
+
has_mirror_all_online = True
|
|
45
|
+
break
|
|
46
|
+
|
|
47
|
+
offline_links = [link for link in links_in_package if link.get('availability', '').lower() == 'offline']
|
|
48
|
+
offline_ids = [link.get('uuid') for link in offline_links]
|
|
49
|
+
offline_mirror_linkids = offline_ids if has_mirror_all_online else []
|
|
50
|
+
|
|
51
|
+
for link in links_in_package:
|
|
52
|
+
if link.get('availability', "").lower() == "offline" and not has_mirror_all_online:
|
|
53
|
+
error = "Links offline for all mirrors"
|
|
54
|
+
if link.get('statusIconKey', '').lower() == "false":
|
|
55
|
+
error = "File error in package"
|
|
56
|
+
link_finished = link.get('finished', False)
|
|
57
|
+
link_extraction_status = link.get('extractionStatus', '').lower() # "error" signifies an issue
|
|
58
|
+
link_eta = link.get('eta', 0) // 1000
|
|
59
|
+
if not link_finished:
|
|
60
|
+
all_finished = False
|
|
61
|
+
elif link_extraction_status and link_extraction_status != 'successful':
|
|
62
|
+
if link_extraction_status == 'error':
|
|
63
|
+
error = link.get('status', '')
|
|
64
|
+
elif link_extraction_status == 'running' and link_eta > 0:
|
|
65
|
+
if eta and link_eta > eta or not eta:
|
|
66
|
+
eta = link_eta
|
|
67
|
+
all_finished = False
|
|
68
|
+
elif is_archive and link.get('status', '').lower() != 'extraction ok':
|
|
69
|
+
all_finished = False
|
|
70
|
+
|
|
71
|
+
return {"all_finished": all_finished, "eta": eta, "error": error, "offline_mirror_linkids": offline_mirror_linkids}
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def get_links_matching_package_uuid(package, package_links):
|
|
75
|
+
package_uuid = package.get("uuid")
|
|
76
|
+
link_ids = []
|
|
77
|
+
|
|
78
|
+
if not isinstance(package_links, list):
|
|
79
|
+
debug("Error - expected a list of package_links, got: %r" % type(package_links).__name__)
|
|
80
|
+
return link_ids
|
|
81
|
+
|
|
82
|
+
if package_uuid:
|
|
83
|
+
for link in package_links:
|
|
84
|
+
if link.get("packageUUID") == package_uuid:
|
|
85
|
+
link_ids.append(link.get("uuid"))
|
|
86
|
+
else:
|
|
87
|
+
info("Error - package uuid missing in delete request!")
|
|
88
|
+
return link_ids
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def format_eta(seconds):
|
|
92
|
+
if seconds < 0:
|
|
93
|
+
return "23:59:59"
|
|
94
|
+
else:
|
|
95
|
+
hours = seconds // 3600
|
|
96
|
+
minutes = (seconds % 3600) // 60
|
|
97
|
+
seconds = seconds % 60
|
|
98
|
+
return f"{hours:02}:{minutes:02}:{seconds:02}"
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def get_packages(shared_state):
|
|
102
|
+
packages = []
|
|
103
|
+
|
|
104
|
+
protected_packages = shared_state.get_db("protected").retrieve_all_titles()
|
|
105
|
+
if protected_packages:
|
|
106
|
+
for package in protected_packages:
|
|
107
|
+
package_id = package[0]
|
|
108
|
+
|
|
109
|
+
data = json.loads(package[1])
|
|
110
|
+
details = {
|
|
111
|
+
"title": data["title"],
|
|
112
|
+
"urls": data["links"],
|
|
113
|
+
"size_mb": data["size_mb"],
|
|
114
|
+
"password": data["password"]
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
packages.append({
|
|
118
|
+
"details": details,
|
|
119
|
+
"location": "queue",
|
|
120
|
+
"type": "protected",
|
|
121
|
+
"package_id": package_id
|
|
122
|
+
})
|
|
123
|
+
|
|
124
|
+
failed_packages = shared_state.get_db("failed").retrieve_all_titles()
|
|
125
|
+
if failed_packages:
|
|
126
|
+
for package in failed_packages:
|
|
127
|
+
package_id = package[0]
|
|
128
|
+
|
|
129
|
+
data = json.loads(package[1])
|
|
130
|
+
try:
|
|
131
|
+
if type(data) is str:
|
|
132
|
+
data = json.loads(data)
|
|
133
|
+
except json.JSONDecodeError:
|
|
134
|
+
pass
|
|
135
|
+
details = {
|
|
136
|
+
"name": data["title"],
|
|
137
|
+
"bytesLoaded": 0,
|
|
138
|
+
"saveTo": "/"
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
error = data.get("error", "Unknown error")
|
|
142
|
+
|
|
143
|
+
packages.append({
|
|
144
|
+
"details": details,
|
|
145
|
+
"location": "history",
|
|
146
|
+
"type": "failed",
|
|
147
|
+
"error": error,
|
|
148
|
+
"comment": package_id,
|
|
149
|
+
"uuid": package_id
|
|
150
|
+
})
|
|
151
|
+
try:
|
|
152
|
+
linkgrabber_packages = shared_state.get_device().linkgrabber.query_packages()
|
|
153
|
+
linkgrabber_links = shared_state.get_device().linkgrabber.query_links()
|
|
154
|
+
except (TokenExpiredException, RequestTimeoutException, MYJDException):
|
|
155
|
+
linkgrabber_packages = []
|
|
156
|
+
linkgrabber_links = []
|
|
157
|
+
|
|
158
|
+
if linkgrabber_packages:
|
|
159
|
+
for package in linkgrabber_packages:
|
|
160
|
+
comment = get_links_comment(package, shared_state.get_device().linkgrabber.query_links())
|
|
161
|
+
link_details = get_links_status(package, linkgrabber_links)
|
|
162
|
+
|
|
163
|
+
error = link_details["error"]
|
|
164
|
+
offline_mirror_linkids = link_details["offline_mirror_linkids"]
|
|
165
|
+
if offline_mirror_linkids:
|
|
166
|
+
shared_state.get_device().linkgrabber.cleanup(
|
|
167
|
+
"DELETE_OFFLINE",
|
|
168
|
+
"REMOVE_LINKS_ONLY",
|
|
169
|
+
"SELECTED",
|
|
170
|
+
offline_mirror_linkids,
|
|
171
|
+
[package["uuid"]]
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
location = "history" if error else "queue"
|
|
175
|
+
packages.append({
|
|
176
|
+
"details": package,
|
|
177
|
+
"location": location,
|
|
178
|
+
"type": "linkgrabber",
|
|
179
|
+
"comment": comment,
|
|
180
|
+
"uuid": package.get("uuid"),
|
|
181
|
+
"error": error
|
|
182
|
+
})
|
|
183
|
+
try:
|
|
184
|
+
downloader_packages = shared_state.get_device().downloads.query_packages()
|
|
185
|
+
downloader_links = shared_state.get_device().downloads.query_links()
|
|
186
|
+
except (TokenExpiredException, RequestTimeoutException, MYJDException):
|
|
187
|
+
downloader_packages = []
|
|
188
|
+
downloader_links = []
|
|
189
|
+
|
|
190
|
+
if downloader_packages and downloader_links:
|
|
191
|
+
for package in downloader_packages:
|
|
192
|
+
comment = get_links_comment(package, downloader_links)
|
|
193
|
+
|
|
194
|
+
try:
|
|
195
|
+
archive_info = shared_state.get_device().extraction.get_archive_info([], [package.get("uuid")])
|
|
196
|
+
is_archive = True if archive_info and archive_info[0] else False
|
|
197
|
+
except:
|
|
198
|
+
is_archive = True # in case of error assume archive to avoid false finished state
|
|
199
|
+
link_details = get_links_status(package, downloader_links, is_archive)
|
|
200
|
+
|
|
201
|
+
error = link_details["error"]
|
|
202
|
+
finished = link_details["all_finished"]
|
|
203
|
+
if not finished and link_details["eta"]:
|
|
204
|
+
package["eta"] = link_details["eta"]
|
|
205
|
+
|
|
206
|
+
location = "history" if error or finished else "queue"
|
|
207
|
+
|
|
208
|
+
packages.append({
|
|
209
|
+
"details": package,
|
|
210
|
+
"location": location,
|
|
211
|
+
"type": "downloader",
|
|
212
|
+
"comment": comment,
|
|
213
|
+
"uuid": package.get("uuid"),
|
|
214
|
+
"error": error
|
|
215
|
+
})
|
|
216
|
+
|
|
217
|
+
downloads = {
|
|
218
|
+
"queue": [],
|
|
219
|
+
"history": []
|
|
220
|
+
}
|
|
221
|
+
for package in packages:
|
|
222
|
+
queue_index = 0
|
|
223
|
+
history_index = 0
|
|
224
|
+
|
|
225
|
+
package_id = None
|
|
226
|
+
|
|
227
|
+
if package["location"] == "queue":
|
|
228
|
+
time_left = "23:59:59"
|
|
229
|
+
if package["type"] == "linkgrabber":
|
|
230
|
+
details = package["details"]
|
|
231
|
+
name = f"[Linkgrabber] {details["name"]}"
|
|
232
|
+
try:
|
|
233
|
+
mb = mb_left = int(details["bytesTotal"]) / (1024 * 1024)
|
|
234
|
+
except KeyError:
|
|
235
|
+
mb = mb_left = 0
|
|
236
|
+
try:
|
|
237
|
+
package_id = package["comment"]
|
|
238
|
+
if "movies" in package_id:
|
|
239
|
+
category = "movies"
|
|
240
|
+
elif "docs" in package_id:
|
|
241
|
+
category = "docs"
|
|
242
|
+
else:
|
|
243
|
+
category = "tv"
|
|
244
|
+
except TypeError:
|
|
245
|
+
category = "not_quasarr"
|
|
246
|
+
package_type = "linkgrabber"
|
|
247
|
+
package_uuid = package["uuid"]
|
|
248
|
+
elif package["type"] == "downloader":
|
|
249
|
+
details = package["details"]
|
|
250
|
+
status = "Downloading"
|
|
251
|
+
eta = details.get("eta")
|
|
252
|
+
bytes_total = int(details.get("bytesTotal", 0))
|
|
253
|
+
bytes_loaded = int(details.get("bytesLoaded", 0))
|
|
254
|
+
|
|
255
|
+
mb = bytes_total / (1024 * 1024)
|
|
256
|
+
mb_left = (bytes_total - bytes_loaded) / (1024 * 1024) if bytes_total else 0
|
|
257
|
+
if mb_left < 0:
|
|
258
|
+
mb_left = 0
|
|
259
|
+
|
|
260
|
+
# Check if package is actually finished (should be in history, not queue)
|
|
261
|
+
# This handles the case where finished packages haven't been moved to history yet
|
|
262
|
+
if eta is None:
|
|
263
|
+
# No ETA could mean paused OR finished
|
|
264
|
+
# Check if download is complete
|
|
265
|
+
if bytes_total > 0 and bytes_loaded >= bytes_total:
|
|
266
|
+
status = "Completed"
|
|
267
|
+
else:
|
|
268
|
+
status = "Paused"
|
|
269
|
+
else:
|
|
270
|
+
time_left = format_eta(int(eta))
|
|
271
|
+
if mb_left == 0:
|
|
272
|
+
status = "Extracting"
|
|
273
|
+
|
|
274
|
+
name = f"[{status}] {details['name']}"
|
|
275
|
+
|
|
276
|
+
try:
|
|
277
|
+
package_id = package["comment"]
|
|
278
|
+
if "movies" in package_id:
|
|
279
|
+
category = "movies"
|
|
280
|
+
elif "docs" in package_id:
|
|
281
|
+
category = "docs"
|
|
282
|
+
else:
|
|
283
|
+
category = "tv"
|
|
284
|
+
except TypeError:
|
|
285
|
+
category = "not_quasarr"
|
|
286
|
+
package_type = "downloader"
|
|
287
|
+
package_uuid = package["uuid"]
|
|
288
|
+
else:
|
|
289
|
+
details = package["details"]
|
|
290
|
+
name = f"[CAPTCHA not solved!] {details["title"]}"
|
|
291
|
+
mb = mb_left = details["size_mb"]
|
|
292
|
+
try:
|
|
293
|
+
package_id = package["package_id"]
|
|
294
|
+
if "movies" in package_id:
|
|
295
|
+
category = "movies"
|
|
296
|
+
elif "docs" in package_id:
|
|
297
|
+
category = "docs"
|
|
298
|
+
else:
|
|
299
|
+
category = "tv"
|
|
300
|
+
except TypeError:
|
|
301
|
+
category = "not_quasarr"
|
|
302
|
+
package_type = "protected"
|
|
303
|
+
package_uuid = None
|
|
304
|
+
|
|
305
|
+
try:
|
|
306
|
+
if package_id:
|
|
307
|
+
mb_left = int(mb_left)
|
|
308
|
+
mb = int(mb)
|
|
309
|
+
try:
|
|
310
|
+
percentage = int(100 * (mb - mb_left) / mb)
|
|
311
|
+
except ZeroDivisionError:
|
|
312
|
+
percentage = 0
|
|
313
|
+
|
|
314
|
+
downloads["queue"].append({
|
|
315
|
+
"index": queue_index,
|
|
316
|
+
"nzo_id": package_id,
|
|
317
|
+
"priority": "Normal",
|
|
318
|
+
"filename": name,
|
|
319
|
+
"cat": category,
|
|
320
|
+
"mbleft": mb_left,
|
|
321
|
+
"mb": mb,
|
|
322
|
+
"status": "Downloading",
|
|
323
|
+
"percentage": percentage,
|
|
324
|
+
"timeleft": time_left,
|
|
325
|
+
"type": package_type,
|
|
326
|
+
"uuid": package_uuid
|
|
327
|
+
})
|
|
328
|
+
except:
|
|
329
|
+
debug(f"Parameters missing for {package}")
|
|
330
|
+
queue_index += 1
|
|
331
|
+
elif package["location"] == "history":
|
|
332
|
+
details = package["details"]
|
|
333
|
+
name = details["name"]
|
|
334
|
+
try:
|
|
335
|
+
size = int(details["bytesLoaded"])
|
|
336
|
+
except KeyError:
|
|
337
|
+
size = 0
|
|
338
|
+
storage = details["saveTo"]
|
|
339
|
+
try:
|
|
340
|
+
package_id = package["comment"]
|
|
341
|
+
if "movies" in package_id:
|
|
342
|
+
category = "movies"
|
|
343
|
+
elif "docs" in package_id:
|
|
344
|
+
category = "docs"
|
|
345
|
+
else:
|
|
346
|
+
category = "tv"
|
|
347
|
+
except TypeError:
|
|
348
|
+
category = "not_quasarr"
|
|
349
|
+
|
|
350
|
+
error = package.get("error")
|
|
351
|
+
fail_message = ""
|
|
352
|
+
if error:
|
|
353
|
+
status = "Failed"
|
|
354
|
+
fail_message = error
|
|
355
|
+
else:
|
|
356
|
+
status = "Completed"
|
|
357
|
+
|
|
358
|
+
downloads["history"].append({
|
|
359
|
+
"fail_message": fail_message,
|
|
360
|
+
"category": category,
|
|
361
|
+
"storage": storage,
|
|
362
|
+
"status": status,
|
|
363
|
+
"nzo_id": package_id,
|
|
364
|
+
"name": name,
|
|
365
|
+
"bytes": int(size),
|
|
366
|
+
"percentage": 100,
|
|
367
|
+
"type": "downloader",
|
|
368
|
+
"uuid": package["uuid"]
|
|
369
|
+
})
|
|
370
|
+
history_index += 1
|
|
371
|
+
else:
|
|
372
|
+
info(f"Invalid package location {package['location']}")
|
|
373
|
+
|
|
374
|
+
if not shared_state.get_device().linkgrabber.is_collecting():
|
|
375
|
+
linkgrabber_packages = shared_state.get_device().linkgrabber.query_packages()
|
|
376
|
+
linkgrabber_links = shared_state.get_device().linkgrabber.query_links()
|
|
377
|
+
|
|
378
|
+
packages_to_start = []
|
|
379
|
+
links_to_start = []
|
|
380
|
+
|
|
381
|
+
for package in linkgrabber_packages:
|
|
382
|
+
comment = get_links_comment(package, shared_state.get_device().linkgrabber.query_links())
|
|
383
|
+
if comment and comment.startswith("Quasarr_"):
|
|
384
|
+
package_uuid = package.get("uuid")
|
|
385
|
+
if package_uuid:
|
|
386
|
+
linkgrabber_links = [link.get("uuid") for link in linkgrabber_links if
|
|
387
|
+
link.get("packageUUID") == package_uuid]
|
|
388
|
+
if linkgrabber_links:
|
|
389
|
+
packages_to_start.append(package_uuid)
|
|
390
|
+
links_to_start.extend(linkgrabber_links)
|
|
391
|
+
else:
|
|
392
|
+
info(f"Package {package_uuid} has no links in linkgrabber - skipping start")
|
|
393
|
+
|
|
394
|
+
break
|
|
395
|
+
|
|
396
|
+
if packages_to_start and links_to_start:
|
|
397
|
+
shared_state.get_device().linkgrabber.move_to_downloadlist(links_to_start, packages_to_start)
|
|
398
|
+
info(f"Started {len(packages_to_start)} package download"
|
|
399
|
+
f"{'s' if len(packages_to_start) > 1 else ''} from linkgrabber")
|
|
400
|
+
|
|
401
|
+
return downloads
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
def delete_package(shared_state, package_id):
|
|
405
|
+
try:
|
|
406
|
+
deleted_title = ""
|
|
407
|
+
|
|
408
|
+
packages = get_packages(shared_state)
|
|
409
|
+
for package_location in packages:
|
|
410
|
+
for package in packages[package_location]:
|
|
411
|
+
if package["nzo_id"] == package_id:
|
|
412
|
+
if package["type"] == "linkgrabber":
|
|
413
|
+
ids = get_links_matching_package_uuid(package,
|
|
414
|
+
shared_state.get_device().linkgrabber.query_links())
|
|
415
|
+
if ids:
|
|
416
|
+
shared_state.get_device().linkgrabber.cleanup(
|
|
417
|
+
"DELETE_ALL",
|
|
418
|
+
"REMOVE_LINKS_AND_DELETE_FILES",
|
|
419
|
+
"SELECTED",
|
|
420
|
+
ids,
|
|
421
|
+
[package["uuid"]]
|
|
422
|
+
)
|
|
423
|
+
break
|
|
424
|
+
elif package["type"] == "downloader":
|
|
425
|
+
ids = get_links_matching_package_uuid(package,
|
|
426
|
+
shared_state.get_device().downloads.query_links())
|
|
427
|
+
if ids:
|
|
428
|
+
shared_state.get_device().downloads.cleanup(
|
|
429
|
+
"DELETE_ALL",
|
|
430
|
+
"REMOVE_LINKS_AND_DELETE_FILES",
|
|
431
|
+
"SELECTED",
|
|
432
|
+
ids,
|
|
433
|
+
[package["uuid"]]
|
|
434
|
+
)
|
|
435
|
+
break
|
|
436
|
+
|
|
437
|
+
# no state check, just clean up whatever exists with the package id
|
|
438
|
+
shared_state.get_db("failed").delete(package_id)
|
|
439
|
+
shared_state.get_db("protected").delete(package_id)
|
|
440
|
+
|
|
441
|
+
if package_location == "queue":
|
|
442
|
+
package_name_field = "filename"
|
|
443
|
+
else:
|
|
444
|
+
package_name_field = "name"
|
|
445
|
+
|
|
446
|
+
try:
|
|
447
|
+
deleted_title = package[package_name_field]
|
|
448
|
+
except KeyError:
|
|
449
|
+
pass
|
|
450
|
+
|
|
451
|
+
# Leave the loop
|
|
452
|
+
break
|
|
453
|
+
|
|
454
|
+
if deleted_title:
|
|
455
|
+
info(f'Deleted package "{deleted_title}" with ID "{package_id}"')
|
|
456
|
+
else:
|
|
457
|
+
info(f'Deleted package "{package_id}"')
|
|
458
|
+
except:
|
|
459
|
+
info(f"Failed to delete package {package_id}")
|
|
460
|
+
return False
|
|
461
|
+
return True
|