quasarr 2.4.8__py3-none-any.whl → 2.4.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quasarr/__init__.py +134 -70
- quasarr/api/__init__.py +40 -31
- quasarr/api/arr/__init__.py +116 -108
- quasarr/api/captcha/__init__.py +262 -137
- quasarr/api/config/__init__.py +76 -46
- quasarr/api/packages/__init__.py +138 -102
- quasarr/api/sponsors_helper/__init__.py +29 -16
- quasarr/api/statistics/__init__.py +19 -19
- quasarr/downloads/__init__.py +165 -72
- quasarr/downloads/linkcrypters/al.py +35 -18
- quasarr/downloads/linkcrypters/filecrypt.py +107 -52
- quasarr/downloads/linkcrypters/hide.py +5 -6
- quasarr/downloads/packages/__init__.py +342 -177
- quasarr/downloads/sources/al.py +191 -100
- quasarr/downloads/sources/by.py +31 -13
- quasarr/downloads/sources/dd.py +27 -14
- quasarr/downloads/sources/dj.py +1 -3
- quasarr/downloads/sources/dl.py +126 -71
- quasarr/downloads/sources/dt.py +11 -5
- quasarr/downloads/sources/dw.py +28 -14
- quasarr/downloads/sources/he.py +32 -24
- quasarr/downloads/sources/mb.py +19 -9
- quasarr/downloads/sources/nk.py +14 -10
- quasarr/downloads/sources/nx.py +8 -18
- quasarr/downloads/sources/sf.py +45 -20
- quasarr/downloads/sources/sj.py +1 -3
- quasarr/downloads/sources/sl.py +9 -5
- quasarr/downloads/sources/wd.py +32 -12
- quasarr/downloads/sources/wx.py +35 -21
- quasarr/providers/auth.py +42 -37
- quasarr/providers/cloudflare.py +28 -30
- quasarr/providers/hostname_issues.py +2 -1
- quasarr/providers/html_images.py +2 -2
- quasarr/providers/html_templates.py +22 -14
- quasarr/providers/imdb_metadata.py +149 -80
- quasarr/providers/jd_cache.py +131 -39
- quasarr/providers/log.py +1 -1
- quasarr/providers/myjd_api.py +260 -196
- quasarr/providers/notifications.py +53 -41
- quasarr/providers/obfuscated.py +9 -4
- quasarr/providers/sessions/al.py +71 -55
- quasarr/providers/sessions/dd.py +21 -14
- quasarr/providers/sessions/dl.py +30 -19
- quasarr/providers/sessions/nx.py +23 -14
- quasarr/providers/shared_state.py +292 -141
- quasarr/providers/statistics.py +75 -43
- quasarr/providers/utils.py +33 -27
- quasarr/providers/version.py +45 -14
- quasarr/providers/web_server.py +10 -5
- quasarr/search/__init__.py +30 -18
- quasarr/search/sources/al.py +124 -73
- quasarr/search/sources/by.py +110 -59
- quasarr/search/sources/dd.py +57 -35
- quasarr/search/sources/dj.py +69 -48
- quasarr/search/sources/dl.py +159 -100
- quasarr/search/sources/dt.py +110 -74
- quasarr/search/sources/dw.py +121 -61
- quasarr/search/sources/fx.py +108 -62
- quasarr/search/sources/he.py +78 -49
- quasarr/search/sources/mb.py +96 -48
- quasarr/search/sources/nk.py +80 -50
- quasarr/search/sources/nx.py +91 -62
- quasarr/search/sources/sf.py +171 -106
- quasarr/search/sources/sj.py +69 -48
- quasarr/search/sources/sl.py +115 -71
- quasarr/search/sources/wd.py +67 -44
- quasarr/search/sources/wx.py +188 -123
- quasarr/storage/config.py +65 -52
- quasarr/storage/setup.py +238 -140
- quasarr/storage/sqlite_database.py +10 -4
- {quasarr-2.4.8.dist-info → quasarr-2.4.9.dist-info}/METADATA +2 -2
- quasarr-2.4.9.dist-info/RECORD +81 -0
- quasarr-2.4.8.dist-info/RECORD +0 -81
- {quasarr-2.4.8.dist-info → quasarr-2.4.9.dist-info}/WHEEL +0 -0
- {quasarr-2.4.8.dist-info → quasarr-2.4.9.dist-info}/entry_points.txt +0 -0
- {quasarr-2.4.8.dist-info → quasarr-2.4.9.dist-info}/licenses/LICENSE +0 -0
|
@@ -8,7 +8,7 @@ from collections import defaultdict
|
|
|
8
8
|
from urllib.parse import urlparse
|
|
9
9
|
|
|
10
10
|
from quasarr.providers.jd_cache import JDPackageCache
|
|
11
|
-
from quasarr.providers.log import
|
|
11
|
+
from quasarr.providers.log import debug, info
|
|
12
12
|
|
|
13
13
|
# =============================================================================
|
|
14
14
|
# CONSTANTS
|
|
@@ -23,19 +23,48 @@ CATEGORY_DOCS = "docs"
|
|
|
23
23
|
CATEGORY_NOT_QUASARR = "not_quasarr"
|
|
24
24
|
|
|
25
25
|
# Known archive extensions for file detection
|
|
26
|
-
ARCHIVE_EXTENSIONS = frozenset(
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
26
|
+
ARCHIVE_EXTENSIONS = frozenset(
|
|
27
|
+
[
|
|
28
|
+
".rar",
|
|
29
|
+
".zip",
|
|
30
|
+
".7z",
|
|
31
|
+
".tar",
|
|
32
|
+
".gz",
|
|
33
|
+
".bz2",
|
|
34
|
+
".xz",
|
|
35
|
+
".001",
|
|
36
|
+
".002",
|
|
37
|
+
".003",
|
|
38
|
+
".004",
|
|
39
|
+
".005",
|
|
40
|
+
".006",
|
|
41
|
+
".007",
|
|
42
|
+
".008",
|
|
43
|
+
".009",
|
|
44
|
+
".r00",
|
|
45
|
+
".r01",
|
|
46
|
+
".r02",
|
|
47
|
+
".r03",
|
|
48
|
+
".r04",
|
|
49
|
+
".r05",
|
|
50
|
+
".r06",
|
|
51
|
+
".r07",
|
|
52
|
+
".r08",
|
|
53
|
+
".r09",
|
|
54
|
+
".part1.rar",
|
|
55
|
+
".part01.rar",
|
|
56
|
+
".part001.rar",
|
|
57
|
+
".part2.rar",
|
|
58
|
+
".part02.rar",
|
|
59
|
+
".part002.rar",
|
|
60
|
+
]
|
|
61
|
+
)
|
|
33
62
|
|
|
34
63
|
# JDownloader extraction complete status markers (checked case-insensitively)
|
|
35
64
|
# Add new languages here as needed
|
|
36
65
|
EXTRACTION_COMPLETE_MARKERS = (
|
|
37
|
-
|
|
38
|
-
|
|
66
|
+
"extraction ok", # English
|
|
67
|
+
"entpacken ok", # German
|
|
39
68
|
)
|
|
40
69
|
|
|
41
70
|
|
|
@@ -43,6 +72,7 @@ EXTRACTION_COMPLETE_MARKERS = (
|
|
|
43
72
|
# HELPER FUNCTIONS
|
|
44
73
|
# =============================================================================
|
|
45
74
|
|
|
75
|
+
|
|
46
76
|
def is_extraction_complete(status):
|
|
47
77
|
"""Check if a JDownloader status string indicates extraction is complete (case-insensitive)."""
|
|
48
78
|
if not status:
|
|
@@ -51,7 +81,7 @@ def is_extraction_complete(status):
|
|
|
51
81
|
return any(marker in status_lower for marker in EXTRACTION_COMPLETE_MARKERS)
|
|
52
82
|
|
|
53
83
|
|
|
54
|
-
def is_archive_file(filename, extraction_status=
|
|
84
|
+
def is_archive_file(filename, extraction_status=""):
|
|
55
85
|
"""Check if a file is an archive based on extension or extraction status."""
|
|
56
86
|
if extraction_status:
|
|
57
87
|
return True
|
|
@@ -88,7 +118,9 @@ def get_links_comment(package, package_links):
|
|
|
88
118
|
if link.get("packageUUID") == package_uuid:
|
|
89
119
|
comment = link.get("comment")
|
|
90
120
|
if comment:
|
|
91
|
-
debug(
|
|
121
|
+
debug(
|
|
122
|
+
f"get_links_comment: Found comment '{comment}' for package {package_uuid}"
|
|
123
|
+
)
|
|
92
124
|
return comment
|
|
93
125
|
return None
|
|
94
126
|
|
|
@@ -105,7 +137,9 @@ def get_links_status(package, all_links, is_archive=False):
|
|
|
105
137
|
"""
|
|
106
138
|
package_uuid = package.get("uuid")
|
|
107
139
|
package_name = package.get("name", "unknown")
|
|
108
|
-
debug(
|
|
140
|
+
debug(
|
|
141
|
+
f"get_links_status: Checking package '{package_name}' ({package_uuid}), is_archive={is_archive}"
|
|
142
|
+
)
|
|
109
143
|
|
|
110
144
|
links_in_package = []
|
|
111
145
|
if package_uuid and all_links:
|
|
@@ -133,22 +167,32 @@ def get_links_status(package, all_links, is_archive=False):
|
|
|
133
167
|
# Check if any mirror has all links online
|
|
134
168
|
has_mirror_all_online = False
|
|
135
169
|
for domain, mirror_links in mirrors.items():
|
|
136
|
-
if all(
|
|
170
|
+
if all(
|
|
171
|
+
link.get("availability", "").lower() == "online" for link in mirror_links
|
|
172
|
+
):
|
|
137
173
|
has_mirror_all_online = True
|
|
138
|
-
debug(
|
|
174
|
+
debug(
|
|
175
|
+
f"get_links_status: Mirror '{domain}' has all {len(mirror_links)} links online"
|
|
176
|
+
)
|
|
139
177
|
break
|
|
140
178
|
|
|
141
179
|
# Collect offline link IDs (only if there's an online mirror available)
|
|
142
|
-
offline_links = [
|
|
143
|
-
|
|
180
|
+
offline_links = [
|
|
181
|
+
link
|
|
182
|
+
for link in links_in_package
|
|
183
|
+
if link.get("availability", "").lower() == "offline"
|
|
184
|
+
]
|
|
185
|
+
offline_ids = [link.get("uuid") for link in offline_links]
|
|
144
186
|
offline_mirror_linkids = offline_ids if has_mirror_all_online else []
|
|
145
187
|
|
|
146
188
|
if offline_links:
|
|
147
|
-
debug(
|
|
189
|
+
debug(
|
|
190
|
+
f"get_links_status: {len(offline_links)} offline links, has_mirror_all_online={has_mirror_all_online}"
|
|
191
|
+
)
|
|
148
192
|
|
|
149
193
|
# First pass: detect if ANY link has extraction activity (for safety override)
|
|
150
194
|
for link in links_in_package:
|
|
151
|
-
if link.get(
|
|
195
|
+
if link.get("extractionStatus", ""):
|
|
152
196
|
has_extraction_activity = True
|
|
153
197
|
break
|
|
154
198
|
|
|
@@ -157,27 +201,33 @@ def get_links_status(package, all_links, is_archive=False):
|
|
|
157
201
|
|
|
158
202
|
# Second pass: check each link's status
|
|
159
203
|
for link in links_in_package:
|
|
160
|
-
link_name = link.get(
|
|
161
|
-
link_finished = link.get(
|
|
162
|
-
link_availability = link.get(
|
|
163
|
-
link_extraction_status = link.get(
|
|
164
|
-
link_status = link.get(
|
|
165
|
-
link_status_icon = link.get(
|
|
166
|
-
link_eta = link.get(
|
|
204
|
+
link_name = link.get("name", "unknown")
|
|
205
|
+
link_finished = link.get("finished", False)
|
|
206
|
+
link_availability = link.get("availability", "").lower()
|
|
207
|
+
link_extraction_status = link.get("extractionStatus", "").lower()
|
|
208
|
+
link_status = link.get("status", "")
|
|
209
|
+
link_status_icon = link.get("statusIconKey", "").lower()
|
|
210
|
+
link_eta = link.get("eta", 0) // 1000 if link.get("eta") else 0
|
|
167
211
|
|
|
168
212
|
# Determine if THIS LINK is an archive file
|
|
169
213
|
link_is_archive_file = is_archive_file(link_name, link_extraction_status)
|
|
170
214
|
|
|
171
|
-
link_status_preview =
|
|
215
|
+
link_status_preview = (
|
|
216
|
+
link_status[:50] + "..." if len(link_status) > 50 else link_status
|
|
217
|
+
)
|
|
172
218
|
|
|
173
|
-
debug(
|
|
174
|
-
|
|
175
|
-
|
|
219
|
+
debug(
|
|
220
|
+
f"get_links_status: Link '{link_name}': finished={link_finished}, "
|
|
221
|
+
f"is_archive_file={link_is_archive_file}, availability={link_availability}, "
|
|
222
|
+
f"extractionStatus='{link_extraction_status}', status='{link_status_preview}'"
|
|
223
|
+
)
|
|
176
224
|
|
|
177
225
|
# Check for offline links
|
|
178
226
|
if link_availability == "offline" and not has_mirror_all_online:
|
|
179
227
|
error = "Links offline for all mirrors"
|
|
180
|
-
debug(
|
|
228
|
+
debug(
|
|
229
|
+
f"get_links_status: ERROR - Link offline with no online mirror: {link_name}"
|
|
230
|
+
)
|
|
181
231
|
|
|
182
232
|
# Check for file errors
|
|
183
233
|
if link_status_icon == "false":
|
|
@@ -189,20 +239,26 @@ def get_links_status(package, all_links, is_archive=False):
|
|
|
189
239
|
if not link_finished:
|
|
190
240
|
# Download not complete
|
|
191
241
|
all_finished = False
|
|
192
|
-
debug(
|
|
242
|
+
debug(
|
|
243
|
+
f"get_links_status: Link not finished (download in progress): {link_name}"
|
|
244
|
+
)
|
|
193
245
|
|
|
194
|
-
elif link_extraction_status and link_extraction_status !=
|
|
246
|
+
elif link_extraction_status and link_extraction_status != "successful":
|
|
195
247
|
# Extraction is running or errored (applies to archive files only)
|
|
196
|
-
if link_extraction_status ==
|
|
197
|
-
error = link.get(
|
|
248
|
+
if link_extraction_status == "error":
|
|
249
|
+
error = link.get("status", "Extraction error")
|
|
198
250
|
debug(f"get_links_status: Extraction ERROR on {link_name}: {error}")
|
|
199
|
-
elif link_extraction_status ==
|
|
200
|
-
debug(
|
|
251
|
+
elif link_extraction_status == "running":
|
|
252
|
+
debug(
|
|
253
|
+
f"get_links_status: Extraction RUNNING on {link_name}, eta={link_eta}s"
|
|
254
|
+
)
|
|
201
255
|
if link_eta > 0:
|
|
202
256
|
if eta is None or link_eta > eta:
|
|
203
257
|
eta = link_eta
|
|
204
258
|
else:
|
|
205
|
-
debug(
|
|
259
|
+
debug(
|
|
260
|
+
f"get_links_status: Extraction status '{link_extraction_status}' on {link_name}"
|
|
261
|
+
)
|
|
206
262
|
all_finished = False
|
|
207
263
|
|
|
208
264
|
elif link_is_archive_file:
|
|
@@ -210,27 +266,33 @@ def get_links_status(package, all_links, is_archive=False):
|
|
|
210
266
|
if is_extraction_complete(link_status):
|
|
211
267
|
debug(f"get_links_status: Archive link COMPLETE: {link_name}")
|
|
212
268
|
else:
|
|
213
|
-
debug(
|
|
269
|
+
debug(
|
|
270
|
+
f"get_links_status: Archive link WAITING for extraction: {link_name}, status='{link_status}'"
|
|
271
|
+
)
|
|
214
272
|
all_finished = False
|
|
215
273
|
|
|
216
274
|
elif is_archive or has_extraction_activity:
|
|
217
275
|
# Package is marked as archive but THIS link doesn't look like an archive file
|
|
218
276
|
# (e.g., .mkv in a package with .rar files)
|
|
219
277
|
# These non-archive files are finished when download is complete
|
|
220
|
-
debug(
|
|
278
|
+
debug(
|
|
279
|
+
f"get_links_status: Non-archive link in archive package COMPLETE: {link_name}"
|
|
280
|
+
)
|
|
221
281
|
|
|
222
282
|
else:
|
|
223
283
|
# Non-archive file in non-archive package - finished when downloaded
|
|
224
284
|
debug(f"get_links_status: Non-archive link COMPLETE: {link_name}")
|
|
225
285
|
|
|
226
|
-
debug(
|
|
227
|
-
|
|
286
|
+
debug(
|
|
287
|
+
f"get_links_status: RESULT for '{package_name}': all_finished={all_finished}, "
|
|
288
|
+
f"eta={eta}, error={error}, is_archive={is_archive}, has_extraction_activity={has_extraction_activity}"
|
|
289
|
+
)
|
|
228
290
|
|
|
229
291
|
return {
|
|
230
292
|
"all_finished": all_finished,
|
|
231
293
|
"eta": eta,
|
|
232
294
|
"error": error,
|
|
233
|
-
"offline_mirror_linkids": offline_mirror_linkids
|
|
295
|
+
"offline_mirror_linkids": offline_mirror_linkids,
|
|
234
296
|
}
|
|
235
297
|
|
|
236
298
|
|
|
@@ -240,14 +302,18 @@ def get_links_matching_package_uuid(package, package_links):
|
|
|
240
302
|
link_ids = []
|
|
241
303
|
|
|
242
304
|
if not isinstance(package_links, list):
|
|
243
|
-
debug(
|
|
305
|
+
debug(
|
|
306
|
+
f"get_links_matching_package_uuid: ERROR - expected list, got {type(package_links).__name__}"
|
|
307
|
+
)
|
|
244
308
|
return link_ids
|
|
245
309
|
|
|
246
310
|
if package_uuid:
|
|
247
311
|
for link in package_links:
|
|
248
312
|
if link.get("packageUUID") == package_uuid:
|
|
249
313
|
link_ids.append(link.get("uuid"))
|
|
250
|
-
debug(
|
|
314
|
+
debug(
|
|
315
|
+
f"get_links_matching_package_uuid: Found {len(link_ids)} links for package {package_uuid}"
|
|
316
|
+
)
|
|
251
317
|
else:
|
|
252
318
|
info("Error - package uuid missing in delete request!")
|
|
253
319
|
return link_ids
|
|
@@ -267,6 +333,7 @@ def format_eta(seconds):
|
|
|
267
333
|
# MAIN FUNCTIONS
|
|
268
334
|
# =============================================================================
|
|
269
335
|
|
|
336
|
+
|
|
270
337
|
def get_packages(shared_state, _cache=None):
|
|
271
338
|
"""
|
|
272
339
|
Get all packages from protected DB, failed DB, linkgrabber, and downloader.
|
|
@@ -289,7 +356,9 @@ def get_packages(shared_state, _cache=None):
|
|
|
289
356
|
|
|
290
357
|
# === PROTECTED PACKAGES (CAPTCHA required) ===
|
|
291
358
|
protected_packages = shared_state.get_db("protected").retrieve_all_titles()
|
|
292
|
-
debug(
|
|
359
|
+
debug(
|
|
360
|
+
f"get_packages: Found {len(protected_packages) if protected_packages else 0} protected packages"
|
|
361
|
+
)
|
|
293
362
|
|
|
294
363
|
if protected_packages:
|
|
295
364
|
for package in protected_packages:
|
|
@@ -300,21 +369,29 @@ def get_packages(shared_state, _cache=None):
|
|
|
300
369
|
"title": data["title"],
|
|
301
370
|
"urls": data["links"],
|
|
302
371
|
"size_mb": data.get("size_mb"),
|
|
303
|
-
"password": data.get("password")
|
|
372
|
+
"password": data.get("password"),
|
|
304
373
|
}
|
|
305
|
-
packages.append(
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
374
|
+
packages.append(
|
|
375
|
+
{
|
|
376
|
+
"details": details,
|
|
377
|
+
"location": "queue",
|
|
378
|
+
"type": "protected",
|
|
379
|
+
"package_id": package_id,
|
|
380
|
+
}
|
|
381
|
+
)
|
|
382
|
+
debug(
|
|
383
|
+
f"get_packages: Added protected package '{data['title']}' ({package_id})"
|
|
384
|
+
)
|
|
312
385
|
except (json.JSONDecodeError, KeyError) as e:
|
|
313
|
-
debug(
|
|
386
|
+
debug(
|
|
387
|
+
f"get_packages: Failed to parse protected package {package_id}: {e}"
|
|
388
|
+
)
|
|
314
389
|
|
|
315
390
|
# === FAILED PACKAGES ===
|
|
316
391
|
failed_packages = shared_state.get_db("failed").retrieve_all_titles()
|
|
317
|
-
debug(
|
|
392
|
+
debug(
|
|
393
|
+
f"get_packages: Found {len(failed_packages) if failed_packages else 0} failed packages"
|
|
394
|
+
)
|
|
318
395
|
|
|
319
396
|
if failed_packages:
|
|
320
397
|
for package in failed_packages:
|
|
@@ -328,19 +405,23 @@ def get_packages(shared_state, _cache=None):
|
|
|
328
405
|
details = {
|
|
329
406
|
"name": data.get("title", "Unknown"),
|
|
330
407
|
"bytesLoaded": 0,
|
|
331
|
-
"saveTo": "/"
|
|
408
|
+
"saveTo": "/",
|
|
332
409
|
}
|
|
333
410
|
error = data.get("error", "Unknown error")
|
|
334
411
|
|
|
335
|
-
packages.append(
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
412
|
+
packages.append(
|
|
413
|
+
{
|
|
414
|
+
"details": details,
|
|
415
|
+
"location": "history",
|
|
416
|
+
"type": "failed",
|
|
417
|
+
"error": error,
|
|
418
|
+
"comment": package_id,
|
|
419
|
+
"uuid": package_id,
|
|
420
|
+
}
|
|
421
|
+
)
|
|
422
|
+
debug(
|
|
423
|
+
f"get_packages: Added failed package '{details['name']}' ({package_id}): {error}"
|
|
424
|
+
)
|
|
344
425
|
except (json.JSONDecodeError, KeyError, TypeError) as e:
|
|
345
426
|
debug(f"get_packages: Failed to parse failed package {package_id}: {e}")
|
|
346
427
|
|
|
@@ -356,46 +437,60 @@ def get_packages(shared_state, _cache=None):
|
|
|
356
437
|
package_uuid = package.get("uuid")
|
|
357
438
|
|
|
358
439
|
comment = get_links_comment(package, linkgrabber_links)
|
|
359
|
-
link_details = get_links_status(
|
|
440
|
+
link_details = get_links_status(
|
|
441
|
+
package, linkgrabber_links, is_archive=False
|
|
442
|
+
)
|
|
360
443
|
|
|
361
444
|
error = link_details["error"]
|
|
362
445
|
offline_mirror_linkids = link_details["offline_mirror_linkids"]
|
|
363
446
|
|
|
364
447
|
# Clean up offline links if we have online mirrors
|
|
365
448
|
if offline_mirror_linkids:
|
|
366
|
-
debug(
|
|
449
|
+
debug(
|
|
450
|
+
f"get_packages: Cleaning up {len(offline_mirror_linkids)} offline links from '{package_name}'"
|
|
451
|
+
)
|
|
367
452
|
try:
|
|
368
453
|
shared_state.get_device().linkgrabber.cleanup(
|
|
369
454
|
"DELETE_OFFLINE",
|
|
370
455
|
"REMOVE_LINKS_ONLY",
|
|
371
456
|
"SELECTED",
|
|
372
457
|
offline_mirror_linkids,
|
|
373
|
-
[package_uuid]
|
|
458
|
+
[package_uuid],
|
|
374
459
|
)
|
|
375
460
|
except Exception as e:
|
|
376
461
|
debug(f"get_packages: Failed to cleanup offline links: {e}")
|
|
377
462
|
|
|
378
463
|
location = "history" if error else "queue"
|
|
379
|
-
packages.append(
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
464
|
+
packages.append(
|
|
465
|
+
{
|
|
466
|
+
"details": package,
|
|
467
|
+
"location": location,
|
|
468
|
+
"type": "linkgrabber",
|
|
469
|
+
"comment": comment,
|
|
470
|
+
"uuid": package_uuid,
|
|
471
|
+
"error": error,
|
|
472
|
+
}
|
|
473
|
+
)
|
|
474
|
+
debug(
|
|
475
|
+
f"get_packages: Added linkgrabber package '{package_name}' -> {location}"
|
|
476
|
+
)
|
|
388
477
|
|
|
389
478
|
# === DOWNLOADER PACKAGES ===
|
|
390
479
|
downloader_packages = cache.downloader_packages
|
|
391
480
|
downloader_links = cache.downloader_links
|
|
392
481
|
|
|
393
|
-
debug(
|
|
482
|
+
debug(
|
|
483
|
+
f"get_packages: Processing {len(downloader_packages)} downloader packages with {len(downloader_links)} links"
|
|
484
|
+
)
|
|
394
485
|
|
|
395
486
|
if downloader_packages and downloader_links:
|
|
396
487
|
# ONE bulk API call for all archive detection, with safety fallbacks
|
|
397
|
-
archive_package_uuids = cache.detect_all_archives(
|
|
398
|
-
|
|
488
|
+
archive_package_uuids = cache.detect_all_archives(
|
|
489
|
+
downloader_packages, downloader_links
|
|
490
|
+
)
|
|
491
|
+
debug(
|
|
492
|
+
f"get_packages: Archive detection complete - {len(archive_package_uuids)} packages are archives"
|
|
493
|
+
)
|
|
399
494
|
|
|
400
495
|
for package in downloader_packages:
|
|
401
496
|
package_name = package.get("name", "unknown")
|
|
@@ -404,7 +499,9 @@ def get_packages(shared_state, _cache=None):
|
|
|
404
499
|
comment = get_links_comment(package, downloader_links)
|
|
405
500
|
|
|
406
501
|
# Lookup from cache (populated by detect_all_archives above)
|
|
407
|
-
is_archive =
|
|
502
|
+
is_archive = (
|
|
503
|
+
package_uuid in archive_package_uuids if package_uuid else False
|
|
504
|
+
)
|
|
408
505
|
debug(f"get_packages: Package '{package_name}' is_archive={is_archive}")
|
|
409
506
|
|
|
410
507
|
link_details = get_links_status(package, downloader_links, is_archive)
|
|
@@ -424,36 +521,39 @@ def get_packages(shared_state, _cache=None):
|
|
|
424
521
|
# Only mark as finished if it's not an archive
|
|
425
522
|
if not is_archive:
|
|
426
523
|
debug(
|
|
427
|
-
f"get_packages: Package '{package_name}' bytes complete and not archive -> marking finished"
|
|
524
|
+
f"get_packages: Package '{package_name}' bytes complete and not archive -> marking finished"
|
|
525
|
+
)
|
|
428
526
|
finished = True
|
|
429
527
|
else:
|
|
430
528
|
debug(
|
|
431
|
-
f"get_packages: Package '{package_name}' bytes complete BUT is_archive=True -> NOT marking finished yet"
|
|
529
|
+
f"get_packages: Package '{package_name}' bytes complete BUT is_archive=True -> NOT marking finished yet"
|
|
530
|
+
)
|
|
432
531
|
|
|
433
532
|
if not finished and link_details["eta"]:
|
|
434
533
|
package["eta"] = link_details["eta"]
|
|
435
534
|
|
|
436
535
|
location = "history" if error or finished else "queue"
|
|
437
536
|
|
|
438
|
-
debug(
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
537
|
+
debug(
|
|
538
|
+
f"get_packages: Package '{package_name}' -> location={location}, "
|
|
539
|
+
f"finished={finished}, error={error}, is_archive={is_archive}"
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
packages.append(
|
|
543
|
+
{
|
|
544
|
+
"details": package,
|
|
545
|
+
"location": location,
|
|
546
|
+
"type": "downloader",
|
|
547
|
+
"comment": comment,
|
|
548
|
+
"uuid": package_uuid,
|
|
549
|
+
"error": error,
|
|
550
|
+
"is_archive": is_archive,
|
|
551
|
+
"extraction_ok": finished and is_archive,
|
|
552
|
+
}
|
|
553
|
+
)
|
|
451
554
|
|
|
452
555
|
# === BUILD RESPONSE ===
|
|
453
|
-
downloads = {
|
|
454
|
-
"queue": [],
|
|
455
|
-
"history": []
|
|
456
|
-
}
|
|
556
|
+
downloads = {"queue": [], "history": []}
|
|
457
557
|
|
|
458
558
|
queue_index = 0
|
|
459
559
|
history_index = 0
|
|
@@ -489,7 +589,9 @@ def get_packages(shared_state, _cache=None):
|
|
|
489
589
|
bytes_loaded = int(details.get("bytesLoaded", 0))
|
|
490
590
|
|
|
491
591
|
mb = bytes_total / (1024 * 1024)
|
|
492
|
-
mb_left = (
|
|
592
|
+
mb_left = (
|
|
593
|
+
(bytes_total - bytes_loaded) / (1024 * 1024) if bytes_total else 0
|
|
594
|
+
)
|
|
493
595
|
if mb_left < 0:
|
|
494
596
|
mb_left = 0
|
|
495
597
|
|
|
@@ -525,33 +627,39 @@ def get_packages(shared_state, _cache=None):
|
|
|
525
627
|
except (ZeroDivisionError, ValueError, TypeError):
|
|
526
628
|
percentage = 0
|
|
527
629
|
|
|
528
|
-
downloads["queue"].append(
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
630
|
+
downloads["queue"].append(
|
|
631
|
+
{
|
|
632
|
+
"index": queue_index,
|
|
633
|
+
"nzo_id": effective_id,
|
|
634
|
+
"priority": "Normal",
|
|
635
|
+
"filename": name,
|
|
636
|
+
"cat": category,
|
|
637
|
+
"mbleft": int(mb_left) if mb_left else 0,
|
|
638
|
+
"mb": int(mb) if mb else 0,
|
|
639
|
+
"bytes": bytes_total,
|
|
640
|
+
"status": "Downloading",
|
|
641
|
+
"percentage": percentage,
|
|
642
|
+
"timeleft": time_left,
|
|
643
|
+
"type": package_type,
|
|
644
|
+
"uuid": package_uuid,
|
|
645
|
+
"is_archive": package.get("is_archive", False),
|
|
646
|
+
"storage": storage,
|
|
647
|
+
}
|
|
648
|
+
)
|
|
545
649
|
queue_index += 1
|
|
546
650
|
else:
|
|
547
|
-
debug(
|
|
651
|
+
debug(
|
|
652
|
+
f"get_packages: Skipping queue package without package_id or uuid: {name}"
|
|
653
|
+
)
|
|
548
654
|
|
|
549
655
|
elif package["location"] == "history":
|
|
550
656
|
details = package["details"]
|
|
551
657
|
name = details.get("name", "unknown")
|
|
552
658
|
try:
|
|
553
659
|
# Use bytesLoaded first, fall back to bytesTotal for failed/incomplete downloads
|
|
554
|
-
size = int(details.get("bytesLoaded", 0)) or int(
|
|
660
|
+
size = int(details.get("bytesLoaded", 0)) or int(
|
|
661
|
+
details.get("bytesTotal", 0)
|
|
662
|
+
)
|
|
555
663
|
except (KeyError, TypeError, ValueError):
|
|
556
664
|
size = 0
|
|
557
665
|
storage = details.get("saveTo", "/")
|
|
@@ -569,29 +677,36 @@ def get_packages(shared_state, _cache=None):
|
|
|
569
677
|
else:
|
|
570
678
|
status = "Completed"
|
|
571
679
|
|
|
572
|
-
downloads["history"].append(
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
"
|
|
587
|
-
|
|
680
|
+
downloads["history"].append(
|
|
681
|
+
{
|
|
682
|
+
"fail_message": fail_message,
|
|
683
|
+
"category": category,
|
|
684
|
+
"storage": storage,
|
|
685
|
+
"status": status,
|
|
686
|
+
"nzo_id": effective_id,
|
|
687
|
+
"name": name,
|
|
688
|
+
"bytes": int(size),
|
|
689
|
+
"percentage": 100,
|
|
690
|
+
"type": "downloader",
|
|
691
|
+
"uuid": package.get("uuid"),
|
|
692
|
+
"is_archive": package.get("is_archive", False),
|
|
693
|
+
"extraction_ok": package.get("extraction_ok", False),
|
|
694
|
+
"extraction_status": "SUCCESSFUL"
|
|
695
|
+
if package.get("extraction_ok", False)
|
|
696
|
+
else "RUNNING"
|
|
697
|
+
if package.get("is_archive", False)
|
|
698
|
+
else "",
|
|
699
|
+
}
|
|
700
|
+
)
|
|
588
701
|
history_index += 1
|
|
589
702
|
else:
|
|
590
703
|
info(f"Invalid package location {package['location']}")
|
|
591
704
|
|
|
592
705
|
# === AUTO-START QUASARR PACKAGES ===
|
|
593
706
|
if not cache.is_collecting:
|
|
594
|
-
debug(
|
|
707
|
+
debug(
|
|
708
|
+
"get_packages: Linkgrabber not collecting, checking for packages to auto-start"
|
|
709
|
+
)
|
|
595
710
|
|
|
596
711
|
packages_to_start = []
|
|
597
712
|
links_to_start = []
|
|
@@ -602,67 +717,90 @@ def get_packages(shared_state, _cache=None):
|
|
|
602
717
|
package_uuid = package.get("uuid")
|
|
603
718
|
if package_uuid:
|
|
604
719
|
package_link_ids = [
|
|
605
|
-
link.get("uuid")
|
|
720
|
+
link.get("uuid")
|
|
721
|
+
for link in linkgrabber_links
|
|
606
722
|
if link.get("packageUUID") == package_uuid and link.get("uuid")
|
|
607
723
|
]
|
|
608
724
|
if package_link_ids:
|
|
609
725
|
debug(
|
|
610
|
-
f"get_packages: Found Quasarr package to start: {package.get('name')} with {len(package_link_ids)} links"
|
|
726
|
+
f"get_packages: Found Quasarr package to start: {package.get('name')} with {len(package_link_ids)} links"
|
|
727
|
+
)
|
|
611
728
|
packages_to_start.append(package_uuid)
|
|
612
729
|
links_to_start.extend(package_link_ids)
|
|
613
730
|
else:
|
|
614
|
-
info(
|
|
731
|
+
info(
|
|
732
|
+
f"Package {package_uuid} has no links in linkgrabber - skipping start"
|
|
733
|
+
)
|
|
615
734
|
# Only start one package at a time
|
|
616
735
|
break
|
|
617
736
|
|
|
618
737
|
if packages_to_start and links_to_start:
|
|
619
738
|
debug(
|
|
620
|
-
f"get_packages: Moving {len(packages_to_start)} packages with {len(links_to_start)} links to download list"
|
|
739
|
+
f"get_packages: Moving {len(packages_to_start)} packages with {len(links_to_start)} links to download list"
|
|
740
|
+
)
|
|
621
741
|
try:
|
|
622
|
-
shared_state.get_device().linkgrabber.move_to_downloadlist(
|
|
742
|
+
shared_state.get_device().linkgrabber.move_to_downloadlist(
|
|
743
|
+
links_to_start, packages_to_start
|
|
744
|
+
)
|
|
623
745
|
info(
|
|
624
|
-
f"Started {len(packages_to_start)} package download{'s' if len(packages_to_start) > 1 else ''} from linkgrabber"
|
|
746
|
+
f"Started {len(packages_to_start)} package download{'s' if len(packages_to_start) > 1 else ''} from linkgrabber"
|
|
747
|
+
)
|
|
625
748
|
except Exception as e:
|
|
626
749
|
debug(f"get_packages: Failed to move packages to download list: {e}")
|
|
627
750
|
else:
|
|
628
751
|
debug("get_packages: Linkgrabber is collecting, skipping auto-start")
|
|
629
752
|
|
|
630
|
-
debug(
|
|
753
|
+
debug(
|
|
754
|
+
f"get_packages: COMPLETE - queue={len(downloads['queue'])}, history={len(downloads['history'])}"
|
|
755
|
+
)
|
|
631
756
|
|
|
632
757
|
# Summary overview for quick debugging
|
|
633
|
-
if downloads[
|
|
758
|
+
if downloads["queue"] or downloads["history"]:
|
|
634
759
|
debug("=" * 60)
|
|
635
760
|
debug("PACKAGE SUMMARY")
|
|
636
761
|
debug("=" * 60)
|
|
637
762
|
debug(f" CACHE: {cache.get_stats()}")
|
|
638
763
|
debug("-" * 60)
|
|
639
|
-
for item in downloads[
|
|
640
|
-
is_archive = item.get(
|
|
764
|
+
for item in downloads["queue"]:
|
|
765
|
+
is_archive = item.get("is_archive", False)
|
|
641
766
|
archive_indicator = "[ARCHIVE]" if is_archive else ""
|
|
642
|
-
mb = item.get(
|
|
767
|
+
mb = item.get("mb", 0)
|
|
643
768
|
size_str = f"{mb:.0f} MB" if mb < 1024 else f"{mb / 1024:.1f} GB"
|
|
644
|
-
debug(f" QUEUE: {item['filename'][:50]}{'...' if len(item['filename']) > 50 else ''}")
|
|
645
769
|
debug(
|
|
646
|
-
f"
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
770
|
+
f" QUEUE: {item['filename'][:50]}{'...' if len(item['filename']) > 50 else ''}"
|
|
771
|
+
)
|
|
772
|
+
debug(
|
|
773
|
+
f" -> {item['percentage']}% | {item['timeleft']} | {size_str} | {item['cat']} {archive_indicator}"
|
|
774
|
+
)
|
|
775
|
+
for item in downloads["history"]:
|
|
776
|
+
status_icon = "✅" if item["status"] == "Completed" else "✗"
|
|
777
|
+
is_archive = item.get("is_archive")
|
|
778
|
+
extraction_ok = item.get("extraction_ok", False)
|
|
651
779
|
# Only show archive status if we know it's an archive
|
|
652
780
|
if is_archive:
|
|
653
|
-
archive_status =
|
|
781
|
+
archive_status = (
|
|
782
|
+
f"[ARCHIVE: {'EXTRACTED ✅' if extraction_ok else 'NOT EXTRACTED'}]"
|
|
783
|
+
)
|
|
654
784
|
else:
|
|
655
785
|
archive_status = ""
|
|
656
786
|
# Format size
|
|
657
|
-
size_bytes = item.get(
|
|
787
|
+
size_bytes = item.get("bytes", 0)
|
|
658
788
|
if size_bytes > 0:
|
|
659
789
|
size_mb = size_bytes / (1024 * 1024)
|
|
660
|
-
size_str =
|
|
790
|
+
size_str = (
|
|
791
|
+
f"{size_mb:.0f} MB"
|
|
792
|
+
if size_mb < 1024
|
|
793
|
+
else f"{size_mb / 1024:.1f} GB"
|
|
794
|
+
)
|
|
661
795
|
else:
|
|
662
796
|
size_str = "? MB"
|
|
663
|
-
debug(
|
|
664
|
-
|
|
665
|
-
|
|
797
|
+
debug(
|
|
798
|
+
f" HISTORY: {item['name'][:50]}{'...' if len(item['name']) > 50 else ''}"
|
|
799
|
+
)
|
|
800
|
+
debug(
|
|
801
|
+
f" -> {status_icon} {item['status']} | {size_str} | {item['category']} {archive_status}"
|
|
802
|
+
)
|
|
803
|
+
if item.get("fail_message"):
|
|
666
804
|
debug(f" Error: {item['fail_message']}")
|
|
667
805
|
debug("=" * 60)
|
|
668
806
|
|
|
@@ -692,55 +830,80 @@ def delete_package(shared_state, package_id):
|
|
|
692
830
|
package_uuid = package.get("uuid")
|
|
693
831
|
|
|
694
832
|
debug(
|
|
695
|
-
f"delete_package: Found package to delete - type={package_type}, uuid={package_uuid}, location={package_location}"
|
|
833
|
+
f"delete_package: Found package to delete - type={package_type}, uuid={package_uuid}, location={package_location}"
|
|
834
|
+
)
|
|
696
835
|
|
|
697
836
|
# Clean up JDownloader links if applicable
|
|
698
837
|
if package_type == "linkgrabber":
|
|
699
|
-
ids = get_links_matching_package_uuid(
|
|
838
|
+
ids = get_links_matching_package_uuid(
|
|
839
|
+
package, cache.linkgrabber_links
|
|
840
|
+
)
|
|
700
841
|
if ids:
|
|
701
|
-
debug(
|
|
842
|
+
debug(
|
|
843
|
+
f"delete_package: Deleting {len(ids)} links from linkgrabber"
|
|
844
|
+
)
|
|
702
845
|
try:
|
|
703
846
|
shared_state.get_device().linkgrabber.cleanup(
|
|
704
847
|
"DELETE_ALL",
|
|
705
848
|
"REMOVE_LINKS_AND_DELETE_FILES",
|
|
706
849
|
"SELECTED",
|
|
707
850
|
ids,
|
|
708
|
-
[package_uuid]
|
|
851
|
+
[package_uuid],
|
|
709
852
|
)
|
|
710
853
|
except Exception as e:
|
|
711
|
-
debug(
|
|
854
|
+
debug(
|
|
855
|
+
f"delete_package: Linkgrabber cleanup failed: {e}"
|
|
856
|
+
)
|
|
712
857
|
else:
|
|
713
|
-
debug(
|
|
858
|
+
debug(
|
|
859
|
+
f"delete_package: No link IDs found for linkgrabber package"
|
|
860
|
+
)
|
|
714
861
|
|
|
715
862
|
elif package_type == "downloader":
|
|
716
|
-
ids = get_links_matching_package_uuid(
|
|
863
|
+
ids = get_links_matching_package_uuid(
|
|
864
|
+
package, cache.downloader_links
|
|
865
|
+
)
|
|
717
866
|
if ids:
|
|
718
|
-
debug(
|
|
867
|
+
debug(
|
|
868
|
+
f"delete_package: Deleting {len(ids)} links from downloader"
|
|
869
|
+
)
|
|
719
870
|
try:
|
|
720
871
|
shared_state.get_device().downloads.cleanup(
|
|
721
872
|
"DELETE_ALL",
|
|
722
873
|
"REMOVE_LINKS_AND_DELETE_FILES",
|
|
723
874
|
"SELECTED",
|
|
724
875
|
ids,
|
|
725
|
-
[package_uuid]
|
|
876
|
+
[package_uuid],
|
|
726
877
|
)
|
|
727
878
|
except Exception as e:
|
|
728
879
|
debug(f"delete_package: Downloads cleanup failed: {e}")
|
|
729
880
|
else:
|
|
730
|
-
debug(
|
|
881
|
+
debug(
|
|
882
|
+
f"delete_package: No link IDs found for downloader package"
|
|
883
|
+
)
|
|
731
884
|
|
|
732
885
|
# Always clean up database entries (no state check - just clean whatever exists)
|
|
733
|
-
debug(
|
|
886
|
+
debug(
|
|
887
|
+
f"delete_package: Cleaning up database entries for {package_id}"
|
|
888
|
+
)
|
|
734
889
|
try:
|
|
735
890
|
shared_state.get_db("failed").delete(package_id)
|
|
736
|
-
debug(
|
|
891
|
+
debug(
|
|
892
|
+
f"delete_package: Deleted from failed DB (or was not present)"
|
|
893
|
+
)
|
|
737
894
|
except Exception as e:
|
|
738
|
-
debug(
|
|
895
|
+
debug(
|
|
896
|
+
f"delete_package: Failed DB delete exception (may be normal): {e}"
|
|
897
|
+
)
|
|
739
898
|
try:
|
|
740
899
|
shared_state.get_db("protected").delete(package_id)
|
|
741
|
-
debug(
|
|
900
|
+
debug(
|
|
901
|
+
f"delete_package: Deleted from protected DB (or was not present)"
|
|
902
|
+
)
|
|
742
903
|
except Exception as e:
|
|
743
|
-
debug(
|
|
904
|
+
debug(
|
|
905
|
+
f"delete_package: Protected DB delete exception (may be normal): {e}"
|
|
906
|
+
)
|
|
744
907
|
|
|
745
908
|
# Get title for logging
|
|
746
909
|
if package_location == "queue":
|
|
@@ -758,7 +921,9 @@ def delete_package(shared_state, package_id):
|
|
|
758
921
|
else:
|
|
759
922
|
info(f'Deleted package "{package_id}"')
|
|
760
923
|
|
|
761
|
-
debug(
|
|
924
|
+
debug(
|
|
925
|
+
f"delete_package: Successfully completed deletion for package {package_id}, found={found}"
|
|
926
|
+
)
|
|
762
927
|
return True
|
|
763
928
|
|
|
764
929
|
except Exception as e:
|