quasarr 1.31.0__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasarr might be problematic. Click here for more details.
- quasarr/api/__init__.py +324 -106
- quasarr/api/captcha/__init__.py +26 -1
- quasarr/api/packages/__init__.py +374 -0
- quasarr/api/sponsors_helper/__init__.py +4 -0
- quasarr/downloads/__init__.py +2 -0
- quasarr/downloads/linkcrypters/hide.py +45 -6
- quasarr/downloads/packages/__init__.py +482 -219
- quasarr/providers/auth.py +250 -0
- quasarr/providers/jd_cache.py +211 -53
- quasarr/providers/obfuscated.py +9 -7
- quasarr/providers/shared_state.py +24 -0
- quasarr/providers/version.py +1 -1
- quasarr/search/sources/dl.py +3 -2
- quasarr/storage/setup.py +15 -1
- {quasarr-1.31.0.dist-info → quasarr-2.0.0.dist-info}/METADATA +12 -2
- {quasarr-1.31.0.dist-info → quasarr-2.0.0.dist-info}/RECORD +20 -18
- {quasarr-1.31.0.dist-info → quasarr-2.0.0.dist-info}/WHEEL +0 -0
- {quasarr-1.31.0.dist-info → quasarr-2.0.0.dist-info}/entry_points.txt +0 -0
- {quasarr-1.31.0.dist-info → quasarr-2.0.0.dist-info}/licenses/LICENSE +0 -0
- {quasarr-1.31.0.dist-info → quasarr-2.0.0.dist-info}/top_level.txt +0 -0
|
@@ -3,106 +3,270 @@
|
|
|
3
3
|
# Project by https://github.com/rix1337
|
|
4
4
|
|
|
5
5
|
import json
|
|
6
|
+
import traceback
|
|
6
7
|
from collections import defaultdict
|
|
7
8
|
from urllib.parse import urlparse
|
|
8
9
|
|
|
9
10
|
from quasarr.providers.jd_cache import JDPackageCache
|
|
10
11
|
from quasarr.providers.log import info, debug
|
|
11
|
-
|
|
12
|
+
|
|
13
|
+
# =============================================================================
|
|
14
|
+
# CONSTANTS
|
|
15
|
+
# =============================================================================
|
|
16
|
+
|
|
17
|
+
PACKAGE_ID_PREFIX = "Quasarr_"
|
|
18
|
+
|
|
19
|
+
# Categories used for package classification
|
|
20
|
+
CATEGORY_MOVIES = "movies"
|
|
21
|
+
CATEGORY_TV = "tv"
|
|
22
|
+
CATEGORY_DOCS = "docs"
|
|
23
|
+
CATEGORY_NOT_QUASARR = "not_quasarr"
|
|
24
|
+
|
|
25
|
+
# Known archive extensions for file detection
|
|
26
|
+
ARCHIVE_EXTENSIONS = frozenset([
|
|
27
|
+
'.rar', '.zip', '.7z', '.tar', '.gz', '.bz2', '.xz',
|
|
28
|
+
'.001', '.002', '.003', '.004', '.005', '.006', '.007', '.008', '.009',
|
|
29
|
+
'.r00', '.r01', '.r02', '.r03', '.r04', '.r05', '.r06', '.r07', '.r08', '.r09',
|
|
30
|
+
'.part1.rar', '.part01.rar', '.part001.rar',
|
|
31
|
+
'.part2.rar', '.part02.rar', '.part002.rar',
|
|
32
|
+
])
|
|
33
|
+
|
|
34
|
+
# JDownloader extraction complete status markers (checked case-insensitively)
|
|
35
|
+
# Add new languages here as needed
|
|
36
|
+
EXTRACTION_COMPLETE_MARKERS = (
|
|
37
|
+
'extraction ok', # English
|
|
38
|
+
'entpacken ok', # German
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
# =============================================================================
|
|
43
|
+
# HELPER FUNCTIONS
|
|
44
|
+
# =============================================================================
|
|
45
|
+
|
|
46
|
+
def is_extraction_complete(status):
|
|
47
|
+
"""Check if a JDownloader status string indicates extraction is complete (case-insensitive)."""
|
|
48
|
+
if not status:
|
|
49
|
+
return False
|
|
50
|
+
status_lower = status.lower()
|
|
51
|
+
return any(marker in status_lower for marker in EXTRACTION_COMPLETE_MARKERS)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def is_archive_file(filename, extraction_status=''):
|
|
55
|
+
"""Check if a file is an archive based on extension or extraction status."""
|
|
56
|
+
if extraction_status:
|
|
57
|
+
return True
|
|
58
|
+
if not filename:
|
|
59
|
+
return False
|
|
60
|
+
filename_lower = filename.lower()
|
|
61
|
+
return any(filename_lower.endswith(ext) for ext in ARCHIVE_EXTENSIONS)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def get_category_from_package_id(package_id):
|
|
65
|
+
"""Extract category from a Quasarr package ID."""
|
|
66
|
+
if not package_id:
|
|
67
|
+
return CATEGORY_NOT_QUASARR
|
|
68
|
+
if CATEGORY_MOVIES in package_id:
|
|
69
|
+
return CATEGORY_MOVIES
|
|
70
|
+
elif CATEGORY_DOCS in package_id:
|
|
71
|
+
return CATEGORY_DOCS
|
|
72
|
+
elif PACKAGE_ID_PREFIX in package_id:
|
|
73
|
+
return CATEGORY_TV
|
|
74
|
+
else:
|
|
75
|
+
return CATEGORY_NOT_QUASARR
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def is_quasarr_package(package_id):
|
|
79
|
+
"""Check if a package ID belongs to Quasarr."""
|
|
80
|
+
return bool(package_id) and package_id.startswith(PACKAGE_ID_PREFIX)
|
|
12
81
|
|
|
13
82
|
|
|
14
83
|
def get_links_comment(package, package_links):
|
|
84
|
+
"""Get comment from the first link matching the package UUID."""
|
|
15
85
|
package_uuid = package.get("uuid")
|
|
16
86
|
if package_uuid and package_links:
|
|
17
87
|
for link in package_links:
|
|
18
88
|
if link.get("packageUUID") == package_uuid:
|
|
19
|
-
|
|
89
|
+
comment = link.get("comment")
|
|
90
|
+
if comment:
|
|
91
|
+
debug(f"get_links_comment: Found comment '{comment}' for package {package_uuid}")
|
|
92
|
+
return comment
|
|
20
93
|
return None
|
|
21
94
|
|
|
22
95
|
|
|
23
96
|
def get_links_status(package, all_links, is_archive=False):
|
|
24
|
-
|
|
97
|
+
"""
|
|
98
|
+
Determine the status of links in a package.
|
|
99
|
+
|
|
100
|
+
Returns dict with:
|
|
101
|
+
- all_finished: bool - True if all links are done (download + extraction if applicable)
|
|
102
|
+
- eta: int or None - estimated time remaining
|
|
103
|
+
- error: str or None - error message if any
|
|
104
|
+
- offline_mirror_linkids: list - link UUIDs that are offline but have online mirrors
|
|
105
|
+
"""
|
|
25
106
|
package_uuid = package.get("uuid")
|
|
107
|
+
package_name = package.get("name", "unknown")
|
|
108
|
+
debug(f"get_links_status: Checking package '{package_name}' ({package_uuid}), is_archive={is_archive}")
|
|
109
|
+
|
|
110
|
+
links_in_package = []
|
|
26
111
|
if package_uuid and all_links:
|
|
27
112
|
for link in all_links:
|
|
28
|
-
|
|
29
|
-
if link_package_uuid and link_package_uuid == package_uuid:
|
|
113
|
+
if link.get("packageUUID") == package_uuid:
|
|
30
114
|
links_in_package.append(link)
|
|
31
115
|
|
|
116
|
+
debug(f"get_links_status: Found {len(links_in_package)} links in package")
|
|
117
|
+
|
|
32
118
|
all_finished = True
|
|
33
119
|
eta = None
|
|
34
120
|
error = None
|
|
35
121
|
|
|
122
|
+
# SAFETY: Track if ANY link has extraction activity - this overrides is_archive=False
|
|
123
|
+
# Catches cases where archive detection failed but extraction is clearly happening
|
|
124
|
+
has_extraction_activity = False
|
|
125
|
+
|
|
126
|
+
# Group links by mirror domain
|
|
36
127
|
mirrors = defaultdict(list)
|
|
37
128
|
for link in links_in_package:
|
|
38
129
|
url = link.get("url", "")
|
|
39
130
|
base_domain = urlparse(url).netloc
|
|
40
131
|
mirrors[base_domain].append(link)
|
|
41
132
|
|
|
133
|
+
# Check if any mirror has all links online
|
|
42
134
|
has_mirror_all_online = False
|
|
43
|
-
for mirror_links in mirrors.
|
|
135
|
+
for domain, mirror_links in mirrors.items():
|
|
44
136
|
if all(link.get('availability', '').lower() == 'online' for link in mirror_links):
|
|
45
137
|
has_mirror_all_online = True
|
|
138
|
+
debug(f"get_links_status: Mirror '{domain}' has all {len(mirror_links)} links online")
|
|
46
139
|
break
|
|
47
140
|
|
|
141
|
+
# Collect offline link IDs (only if there's an online mirror available)
|
|
48
142
|
offline_links = [link for link in links_in_package if link.get('availability', '').lower() == 'offline']
|
|
49
143
|
offline_ids = [link.get('uuid') for link in offline_links]
|
|
50
144
|
offline_mirror_linkids = offline_ids if has_mirror_all_online else []
|
|
51
145
|
|
|
146
|
+
if offline_links:
|
|
147
|
+
debug(f"get_links_status: {len(offline_links)} offline links, has_mirror_all_online={has_mirror_all_online}")
|
|
148
|
+
|
|
149
|
+
# First pass: detect if ANY link has extraction activity (for safety override)
|
|
150
|
+
for link in links_in_package:
|
|
151
|
+
if link.get('extractionStatus', ''):
|
|
152
|
+
has_extraction_activity = True
|
|
153
|
+
break
|
|
154
|
+
|
|
155
|
+
if has_extraction_activity:
|
|
156
|
+
debug(f"get_links_status: Package has extraction activity detected")
|
|
157
|
+
|
|
158
|
+
# Second pass: check each link's status
|
|
52
159
|
for link in links_in_package:
|
|
53
|
-
|
|
160
|
+
link_name = link.get('name', 'unknown')
|
|
161
|
+
link_finished = link.get('finished', False)
|
|
162
|
+
link_availability = link.get('availability', '').lower()
|
|
163
|
+
link_extraction_status = link.get('extractionStatus', '').lower()
|
|
164
|
+
link_status = link.get('status', '')
|
|
165
|
+
link_status_icon = link.get('statusIconKey', '').lower()
|
|
166
|
+
link_eta = link.get('eta', 0) // 1000 if link.get('eta') else 0
|
|
167
|
+
|
|
168
|
+
# Determine if THIS LINK is an archive file
|
|
169
|
+
link_is_archive_file = is_archive_file(link_name, link_extraction_status)
|
|
170
|
+
|
|
171
|
+
link_status_preview = link_status[:50] + '...' if len(link_status) > 50 else link_status
|
|
172
|
+
|
|
173
|
+
debug(f"get_links_status: Link '{link_name}': finished={link_finished}, "
|
|
174
|
+
f"is_archive_file={link_is_archive_file}, availability={link_availability}, "
|
|
175
|
+
f"extractionStatus='{link_extraction_status}', status='{link_status_preview}'")
|
|
176
|
+
|
|
177
|
+
# Check for offline links
|
|
178
|
+
if link_availability == "offline" and not has_mirror_all_online:
|
|
54
179
|
error = "Links offline for all mirrors"
|
|
55
|
-
|
|
180
|
+
debug(f"get_links_status: ERROR - Link offline with no online mirror: {link_name}")
|
|
181
|
+
|
|
182
|
+
# Check for file errors
|
|
183
|
+
if link_status_icon == "false":
|
|
56
184
|
error = "File error in package"
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
185
|
+
debug(f"get_links_status: ERROR - File error in link: {link_name}")
|
|
186
|
+
|
|
187
|
+
# === MAIN LINK STATUS LOGIC ===
|
|
188
|
+
|
|
60
189
|
if not link_finished:
|
|
190
|
+
# Download not complete
|
|
61
191
|
all_finished = False
|
|
192
|
+
debug(f"get_links_status: Link not finished (download in progress): {link_name}")
|
|
193
|
+
|
|
62
194
|
elif link_extraction_status and link_extraction_status != 'successful':
|
|
195
|
+
# Extraction is running or errored (applies to archive files only)
|
|
63
196
|
if link_extraction_status == 'error':
|
|
64
|
-
error = link.get('status', '')
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
197
|
+
error = link.get('status', 'Extraction error')
|
|
198
|
+
debug(f"get_links_status: Extraction ERROR on {link_name}: {error}")
|
|
199
|
+
elif link_extraction_status == 'running':
|
|
200
|
+
debug(f"get_links_status: Extraction RUNNING on {link_name}, eta={link_eta}s")
|
|
201
|
+
if link_eta > 0:
|
|
202
|
+
if eta is None or link_eta > eta:
|
|
203
|
+
eta = link_eta
|
|
204
|
+
else:
|
|
205
|
+
debug(f"get_links_status: Extraction status '{link_extraction_status}' on {link_name}")
|
|
68
206
|
all_finished = False
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
207
|
+
|
|
208
|
+
elif link_is_archive_file:
|
|
209
|
+
# This specific link IS an archive file - must have "extraction ok"
|
|
210
|
+
if is_extraction_complete(link_status):
|
|
211
|
+
debug(f"get_links_status: Archive link COMPLETE: {link_name}")
|
|
212
|
+
else:
|
|
213
|
+
debug(f"get_links_status: Archive link WAITING for extraction: {link_name}, status='{link_status}'")
|
|
74
214
|
all_finished = False
|
|
75
215
|
|
|
76
|
-
|
|
216
|
+
elif is_archive or has_extraction_activity:
|
|
217
|
+
# Package is marked as archive but THIS link doesn't look like an archive file
|
|
218
|
+
# (e.g., .mkv in a package with .rar files)
|
|
219
|
+
# These non-archive files are finished when download is complete
|
|
220
|
+
debug(f"get_links_status: Non-archive link in archive package COMPLETE: {link_name}")
|
|
221
|
+
|
|
222
|
+
else:
|
|
223
|
+
# Non-archive file in non-archive package - finished when downloaded
|
|
224
|
+
debug(f"get_links_status: Non-archive link COMPLETE: {link_name}")
|
|
225
|
+
|
|
226
|
+
debug(f"get_links_status: RESULT for '{package_name}': all_finished={all_finished}, "
|
|
227
|
+
f"eta={eta}, error={error}, is_archive={is_archive}, has_extraction_activity={has_extraction_activity}")
|
|
228
|
+
|
|
229
|
+
return {
|
|
230
|
+
"all_finished": all_finished,
|
|
231
|
+
"eta": eta,
|
|
232
|
+
"error": error,
|
|
233
|
+
"offline_mirror_linkids": offline_mirror_linkids
|
|
234
|
+
}
|
|
77
235
|
|
|
78
236
|
|
|
79
237
|
def get_links_matching_package_uuid(package, package_links):
|
|
238
|
+
"""Get all link UUIDs belonging to a package."""
|
|
80
239
|
package_uuid = package.get("uuid")
|
|
81
240
|
link_ids = []
|
|
82
241
|
|
|
83
242
|
if not isinstance(package_links, list):
|
|
84
|
-
debug("
|
|
243
|
+
debug(f"get_links_matching_package_uuid: ERROR - expected list, got {type(package_links).__name__}")
|
|
85
244
|
return link_ids
|
|
86
245
|
|
|
87
246
|
if package_uuid:
|
|
88
247
|
for link in package_links:
|
|
89
248
|
if link.get("packageUUID") == package_uuid:
|
|
90
249
|
link_ids.append(link.get("uuid"))
|
|
250
|
+
debug(f"get_links_matching_package_uuid: Found {len(link_ids)} links for package {package_uuid}")
|
|
91
251
|
else:
|
|
92
252
|
info("Error - package uuid missing in delete request!")
|
|
93
253
|
return link_ids
|
|
94
254
|
|
|
95
255
|
|
|
96
256
|
def format_eta(seconds):
|
|
97
|
-
|
|
257
|
+
"""Format seconds as HH:MM:SS."""
|
|
258
|
+
if seconds is None or seconds < 0:
|
|
98
259
|
return "23:59:59"
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
return f"{hours:02}:{minutes:02}:{seconds:02}"
|
|
260
|
+
hours = seconds // 3600
|
|
261
|
+
minutes = (seconds % 3600) // 60
|
|
262
|
+
secs = seconds % 60
|
|
263
|
+
return f"{hours:02}:{minutes:02}:{secs:02}"
|
|
104
264
|
|
|
105
265
|
|
|
266
|
+
# =============================================================================
|
|
267
|
+
# MAIN FUNCTIONS
|
|
268
|
+
# =============================================================================
|
|
269
|
+
|
|
106
270
|
def get_packages(shared_state, _cache=None):
|
|
107
271
|
"""
|
|
108
272
|
Get all packages from protected DB, failed DB, linkgrabber, and downloader.
|
|
@@ -112,82 +276,104 @@ def get_packages(shared_state, _cache=None):
|
|
|
112
276
|
_cache: INTERNAL USE ONLY. Used by delete_package() to share cached data
|
|
113
277
|
within a single request. External callers should never pass this.
|
|
114
278
|
"""
|
|
279
|
+
debug("get_packages: Starting package retrieval")
|
|
115
280
|
packages = []
|
|
116
281
|
|
|
117
282
|
# Create cache for this request - only valid for duration of this call
|
|
118
283
|
if _cache is None:
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
284
|
+
cache = JDPackageCache(shared_state.get_device())
|
|
285
|
+
debug("get_packages: Created new JDPackageCache")
|
|
286
|
+
else:
|
|
287
|
+
cache = _cache
|
|
288
|
+
debug("get_packages: Using provided cache instance")
|
|
122
289
|
|
|
290
|
+
# === PROTECTED PACKAGES (CAPTCHA required) ===
|
|
123
291
|
protected_packages = shared_state.get_db("protected").retrieve_all_titles()
|
|
292
|
+
debug(f"get_packages: Found {len(protected_packages) if protected_packages else 0} protected packages")
|
|
293
|
+
|
|
124
294
|
if protected_packages:
|
|
125
295
|
for package in protected_packages:
|
|
126
296
|
package_id = package[0]
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
297
|
+
try:
|
|
298
|
+
data = json.loads(package[1])
|
|
299
|
+
details = {
|
|
300
|
+
"title": data["title"],
|
|
301
|
+
"urls": data["links"],
|
|
302
|
+
"size_mb": data.get("size_mb"),
|
|
303
|
+
"password": data.get("password")
|
|
304
|
+
}
|
|
305
|
+
packages.append({
|
|
306
|
+
"details": details,
|
|
307
|
+
"location": "queue",
|
|
308
|
+
"type": "protected",
|
|
309
|
+
"package_id": package_id
|
|
310
|
+
})
|
|
311
|
+
debug(f"get_packages: Added protected package '{data['title']}' ({package_id})")
|
|
312
|
+
except (json.JSONDecodeError, KeyError) as e:
|
|
313
|
+
debug(f"get_packages: Failed to parse protected package {package_id}: {e}")
|
|
314
|
+
|
|
315
|
+
# === FAILED PACKAGES ===
|
|
143
316
|
failed_packages = shared_state.get_db("failed").retrieve_all_titles()
|
|
317
|
+
debug(f"get_packages: Found {len(failed_packages) if failed_packages else 0} failed packages")
|
|
318
|
+
|
|
144
319
|
if failed_packages:
|
|
145
320
|
for package in failed_packages:
|
|
146
321
|
package_id = package[0]
|
|
147
|
-
|
|
148
|
-
data = json.loads(package[1])
|
|
149
322
|
try:
|
|
150
|
-
|
|
323
|
+
data = json.loads(package[1])
|
|
324
|
+
# Handle double-encoded JSON
|
|
325
|
+
if isinstance(data, str):
|
|
151
326
|
data = json.loads(data)
|
|
152
|
-
except json.JSONDecodeError:
|
|
153
|
-
pass
|
|
154
|
-
details = {
|
|
155
|
-
"name": data["title"],
|
|
156
|
-
"bytesLoaded": 0,
|
|
157
|
-
"saveTo": "/"
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
error = data.get("error", "Unknown error")
|
|
161
|
-
|
|
162
|
-
packages.append({
|
|
163
|
-
"details": details,
|
|
164
|
-
"location": "history",
|
|
165
|
-
"type": "failed",
|
|
166
|
-
"error": error,
|
|
167
|
-
"comment": package_id,
|
|
168
|
-
"uuid": package_id
|
|
169
|
-
})
|
|
170
327
|
|
|
171
|
-
|
|
328
|
+
details = {
|
|
329
|
+
"name": data.get("title", "Unknown"),
|
|
330
|
+
"bytesLoaded": 0,
|
|
331
|
+
"saveTo": "/"
|
|
332
|
+
}
|
|
333
|
+
error = data.get("error", "Unknown error")
|
|
334
|
+
|
|
335
|
+
packages.append({
|
|
336
|
+
"details": details,
|
|
337
|
+
"location": "history",
|
|
338
|
+
"type": "failed",
|
|
339
|
+
"error": error,
|
|
340
|
+
"comment": package_id,
|
|
341
|
+
"uuid": package_id
|
|
342
|
+
})
|
|
343
|
+
debug(f"get_packages: Added failed package '{details['name']}' ({package_id}): {error}")
|
|
344
|
+
except (json.JSONDecodeError, KeyError, TypeError) as e:
|
|
345
|
+
debug(f"get_packages: Failed to parse failed package {package_id}: {e}")
|
|
346
|
+
|
|
347
|
+
# === LINKGRABBER PACKAGES ===
|
|
172
348
|
linkgrabber_packages = cache.linkgrabber_packages
|
|
173
349
|
linkgrabber_links = cache.linkgrabber_links
|
|
174
350
|
|
|
351
|
+
debug(f"get_packages: Processing {len(linkgrabber_packages)} linkgrabber packages")
|
|
352
|
+
|
|
175
353
|
if linkgrabber_packages:
|
|
176
354
|
for package in linkgrabber_packages:
|
|
177
|
-
|
|
355
|
+
package_name = package.get("name", "unknown")
|
|
356
|
+
package_uuid = package.get("uuid")
|
|
357
|
+
|
|
178
358
|
comment = get_links_comment(package, linkgrabber_links)
|
|
179
359
|
link_details = get_links_status(package, linkgrabber_links, is_archive=False)
|
|
180
360
|
|
|
181
361
|
error = link_details["error"]
|
|
182
362
|
offline_mirror_linkids = link_details["offline_mirror_linkids"]
|
|
363
|
+
|
|
364
|
+
# Clean up offline links if we have online mirrors
|
|
183
365
|
if offline_mirror_linkids:
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
366
|
+
debug(f"get_packages: Cleaning up {len(offline_mirror_linkids)} offline links from '{package_name}'")
|
|
367
|
+
try:
|
|
368
|
+
shared_state.get_device().linkgrabber.cleanup(
|
|
369
|
+
"DELETE_OFFLINE",
|
|
370
|
+
"REMOVE_LINKS_ONLY",
|
|
371
|
+
"SELECTED",
|
|
372
|
+
offline_mirror_linkids,
|
|
373
|
+
[package_uuid]
|
|
374
|
+
)
|
|
375
|
+
except Exception as e:
|
|
376
|
+
debug(f"get_packages: Failed to cleanup offline links: {e}")
|
|
191
377
|
|
|
192
378
|
location = "history" if error else "queue"
|
|
193
379
|
packages.append({
|
|
@@ -195,24 +381,31 @@ def get_packages(shared_state, _cache=None):
|
|
|
195
381
|
"location": location,
|
|
196
382
|
"type": "linkgrabber",
|
|
197
383
|
"comment": comment,
|
|
198
|
-
"uuid":
|
|
384
|
+
"uuid": package_uuid,
|
|
199
385
|
"error": error
|
|
200
386
|
})
|
|
387
|
+
debug(f"get_packages: Added linkgrabber package '{package_name}' -> {location}")
|
|
201
388
|
|
|
202
|
-
#
|
|
389
|
+
# === DOWNLOADER PACKAGES ===
|
|
203
390
|
downloader_packages = cache.downloader_packages
|
|
204
391
|
downloader_links = cache.downloader_links
|
|
205
392
|
|
|
393
|
+
debug(f"get_packages: Processing {len(downloader_packages)} downloader packages with {len(downloader_links)} links")
|
|
394
|
+
|
|
206
395
|
if downloader_packages and downloader_links:
|
|
207
|
-
#
|
|
208
|
-
archive_package_uuids = cache.
|
|
396
|
+
# ONE bulk API call for all archive detection, with safety fallbacks
|
|
397
|
+
archive_package_uuids = cache.detect_all_archives(downloader_packages, downloader_links)
|
|
398
|
+
debug(f"get_packages: Archive detection complete - {len(archive_package_uuids)} packages are archives")
|
|
209
399
|
|
|
210
400
|
for package in downloader_packages:
|
|
401
|
+
package_name = package.get("name", "unknown")
|
|
402
|
+
package_uuid = package.get("uuid")
|
|
403
|
+
|
|
211
404
|
comment = get_links_comment(package, downloader_links)
|
|
212
405
|
|
|
213
|
-
#
|
|
214
|
-
package_uuid = package.get("uuid")
|
|
406
|
+
# Lookup from cache (populated by detect_all_archives above)
|
|
215
407
|
is_archive = package_uuid in archive_package_uuids if package_uuid else False
|
|
408
|
+
debug(f"get_packages: Package '{package_name}' is_archive={is_archive}")
|
|
216
409
|
|
|
217
410
|
link_details = get_links_status(package, downloader_links, is_archive)
|
|
218
411
|
|
|
@@ -224,63 +417,69 @@ def get_packages(shared_state, _cache=None):
|
|
|
224
417
|
if not finished and not error:
|
|
225
418
|
bytes_total = int(package.get("bytesTotal", 0))
|
|
226
419
|
bytes_loaded = int(package.get("bytesLoaded", 0))
|
|
227
|
-
|
|
420
|
+
pkg_eta = package.get("eta")
|
|
228
421
|
|
|
229
422
|
# If download is complete and no ETA (paused/finished state)
|
|
230
|
-
if bytes_total > 0 and bytes_loaded >= bytes_total and
|
|
231
|
-
# Only mark as finished if it's not an archive
|
|
423
|
+
if bytes_total > 0 and bytes_loaded >= bytes_total and pkg_eta is None:
|
|
424
|
+
# Only mark as finished if it's not an archive
|
|
232
425
|
if not is_archive:
|
|
426
|
+
debug(
|
|
427
|
+
f"get_packages: Package '{package_name}' bytes complete and not archive -> marking finished")
|
|
233
428
|
finished = True
|
|
429
|
+
else:
|
|
430
|
+
debug(
|
|
431
|
+
f"get_packages: Package '{package_name}' bytes complete BUT is_archive=True -> NOT marking finished yet")
|
|
234
432
|
|
|
235
433
|
if not finished and link_details["eta"]:
|
|
236
434
|
package["eta"] = link_details["eta"]
|
|
237
435
|
|
|
238
436
|
location = "history" if error or finished else "queue"
|
|
239
437
|
|
|
438
|
+
debug(f"get_packages: Package '{package_name}' -> location={location}, "
|
|
439
|
+
f"finished={finished}, error={error}, is_archive={is_archive}")
|
|
440
|
+
|
|
240
441
|
packages.append({
|
|
241
442
|
"details": package,
|
|
242
443
|
"location": location,
|
|
243
444
|
"type": "downloader",
|
|
244
445
|
"comment": comment,
|
|
245
|
-
"uuid":
|
|
246
|
-
"error": error
|
|
446
|
+
"uuid": package_uuid,
|
|
447
|
+
"error": error,
|
|
448
|
+
"is_archive": is_archive,
|
|
449
|
+
"extraction_ok": finished and is_archive
|
|
247
450
|
})
|
|
248
451
|
|
|
452
|
+
# === BUILD RESPONSE ===
|
|
249
453
|
downloads = {
|
|
250
454
|
"queue": [],
|
|
251
455
|
"history": []
|
|
252
456
|
}
|
|
253
|
-
for package in packages:
|
|
254
|
-
queue_index = 0
|
|
255
|
-
history_index = 0
|
|
256
457
|
|
|
458
|
+
queue_index = 0
|
|
459
|
+
history_index = 0
|
|
460
|
+
|
|
461
|
+
for package in packages:
|
|
257
462
|
package_id = None
|
|
258
463
|
|
|
259
464
|
if package["location"] == "queue":
|
|
260
465
|
time_left = "23:59:59"
|
|
466
|
+
|
|
261
467
|
if package["type"] == "linkgrabber":
|
|
262
468
|
details = package["details"]
|
|
263
|
-
name = f"[Linkgrabber] {details
|
|
469
|
+
name = f"[Linkgrabber] {details.get('name', 'unknown')}"
|
|
264
470
|
try:
|
|
265
|
-
mb = mb_left = int(details
|
|
266
|
-
except KeyError:
|
|
471
|
+
mb = mb_left = int(details.get("bytesTotal", 0)) / (1024 * 1024)
|
|
472
|
+
except (KeyError, TypeError, ValueError):
|
|
267
473
|
mb = mb_left = 0
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
if "movies" in package_id:
|
|
271
|
-
category = "movies"
|
|
272
|
-
elif "docs" in package_id:
|
|
273
|
-
category = "docs"
|
|
274
|
-
else:
|
|
275
|
-
category = "tv"
|
|
276
|
-
except TypeError:
|
|
277
|
-
category = "not_quasarr"
|
|
474
|
+
package_id = package["comment"]
|
|
475
|
+
category = get_category_from_package_id(package_id)
|
|
278
476
|
package_type = "linkgrabber"
|
|
279
477
|
package_uuid = package["uuid"]
|
|
478
|
+
|
|
280
479
|
elif package["type"] == "downloader":
|
|
281
480
|
details = package["details"]
|
|
282
481
|
status = "Downloading"
|
|
283
|
-
|
|
482
|
+
pkg_eta = details.get("eta")
|
|
284
483
|
bytes_total = int(details.get("bytesTotal", 0))
|
|
285
484
|
bytes_loaded = int(details.get("bytesLoaded", 0))
|
|
286
485
|
|
|
@@ -289,94 +488,72 @@ def get_packages(shared_state, _cache=None):
|
|
|
289
488
|
if mb_left < 0:
|
|
290
489
|
mb_left = 0
|
|
291
490
|
|
|
292
|
-
if
|
|
491
|
+
if pkg_eta is None:
|
|
293
492
|
status = "Paused"
|
|
294
493
|
else:
|
|
295
|
-
time_left = format_eta(int(
|
|
494
|
+
time_left = format_eta(int(pkg_eta))
|
|
296
495
|
if mb_left == 0:
|
|
297
496
|
status = "Extracting"
|
|
298
497
|
|
|
299
|
-
name = f"[{status}] {details
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
package_id = package["comment"]
|
|
303
|
-
if "movies" in package_id:
|
|
304
|
-
category = "movies"
|
|
305
|
-
elif "docs" in package_id:
|
|
306
|
-
category = "docs"
|
|
307
|
-
else:
|
|
308
|
-
category = "tv"
|
|
309
|
-
except TypeError:
|
|
310
|
-
category = "not_quasarr"
|
|
498
|
+
name = f"[{status}] {details.get('name', 'unknown')}"
|
|
499
|
+
package_id = package["comment"]
|
|
500
|
+
category = get_category_from_package_id(package_id)
|
|
311
501
|
package_type = "downloader"
|
|
312
502
|
package_uuid = package["uuid"]
|
|
313
|
-
|
|
503
|
+
|
|
504
|
+
else: # protected
|
|
314
505
|
details = package["details"]
|
|
315
|
-
name = f"[CAPTCHA not solved!] {details
|
|
316
|
-
mb = mb_left = details
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
if "movies" in package_id:
|
|
320
|
-
category = "movies"
|
|
321
|
-
elif "docs" in package_id:
|
|
322
|
-
category = "docs"
|
|
323
|
-
else:
|
|
324
|
-
category = "tv"
|
|
325
|
-
except TypeError:
|
|
326
|
-
category = "not_quasarr"
|
|
506
|
+
name = f"[CAPTCHA not solved!] {details.get('title', 'unknown')}"
|
|
507
|
+
mb = mb_left = details.get("size_mb") or 0
|
|
508
|
+
package_id = package.get("package_id")
|
|
509
|
+
category = get_category_from_package_id(package_id)
|
|
327
510
|
package_type = "protected"
|
|
328
511
|
package_uuid = None
|
|
329
512
|
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
mb_left = int(mb_left)
|
|
333
|
-
mb = int(mb)
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
513
|
+
if package_id:
|
|
514
|
+
try:
|
|
515
|
+
mb_left = int(mb_left) if mb_left else 0
|
|
516
|
+
mb = int(mb) if mb else 0
|
|
517
|
+
percentage = int(100 * (mb - mb_left) / mb) if mb > 0 else 0
|
|
518
|
+
except (ZeroDivisionError, ValueError, TypeError):
|
|
519
|
+
percentage = 0
|
|
520
|
+
|
|
521
|
+
downloads["queue"].append({
|
|
522
|
+
"index": queue_index,
|
|
523
|
+
"nzo_id": package_id,
|
|
524
|
+
"priority": "Normal",
|
|
525
|
+
"filename": name,
|
|
526
|
+
"cat": category,
|
|
527
|
+
"mbleft": mb_left,
|
|
528
|
+
"mb": mb,
|
|
529
|
+
"status": "Downloading",
|
|
530
|
+
"percentage": percentage,
|
|
531
|
+
"timeleft": time_left,
|
|
532
|
+
"type": package_type,
|
|
533
|
+
"uuid": package_uuid,
|
|
534
|
+
"is_archive": package.get("is_archive", False)
|
|
535
|
+
})
|
|
536
|
+
queue_index += 1
|
|
537
|
+
else:
|
|
538
|
+
debug(f"get_packages: Skipping queue package without package_id: {name}")
|
|
539
|
+
|
|
356
540
|
elif package["location"] == "history":
|
|
357
541
|
details = package["details"]
|
|
358
|
-
name = details
|
|
542
|
+
name = details.get("name", "unknown")
|
|
359
543
|
try:
|
|
360
|
-
size = int(details
|
|
361
|
-
except KeyError:
|
|
544
|
+
size = int(details.get("bytesLoaded", 0))
|
|
545
|
+
except (KeyError, TypeError, ValueError):
|
|
362
546
|
size = 0
|
|
363
|
-
storage = details
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
category = "movies"
|
|
368
|
-
elif "docs" in package_id:
|
|
369
|
-
category = "docs"
|
|
370
|
-
else:
|
|
371
|
-
category = "tv"
|
|
372
|
-
except TypeError:
|
|
373
|
-
category = "not_quasarr"
|
|
547
|
+
storage = details.get("saveTo", "/")
|
|
548
|
+
|
|
549
|
+
package_id = package.get("comment")
|
|
550
|
+
category = get_category_from_package_id(package_id)
|
|
374
551
|
|
|
375
552
|
error = package.get("error")
|
|
376
553
|
fail_message = ""
|
|
377
554
|
if error:
|
|
378
555
|
status = "Failed"
|
|
379
|
-
fail_message = error
|
|
556
|
+
fail_message = str(error)
|
|
380
557
|
else:
|
|
381
558
|
status = "Completed"
|
|
382
559
|
|
|
@@ -390,43 +567,98 @@ def get_packages(shared_state, _cache=None):
|
|
|
390
567
|
"bytes": int(size),
|
|
391
568
|
"percentage": 100,
|
|
392
569
|
"type": "downloader",
|
|
393
|
-
"uuid": package
|
|
570
|
+
"uuid": package.get("uuid"),
|
|
571
|
+
"is_archive": package.get("is_archive", False),
|
|
572
|
+
"extraction_ok": package.get("extraction_ok", False)
|
|
394
573
|
})
|
|
395
574
|
history_index += 1
|
|
396
575
|
else:
|
|
397
576
|
info(f"Invalid package location {package['location']}")
|
|
398
577
|
|
|
399
|
-
#
|
|
578
|
+
# === AUTO-START QUASARR PACKAGES ===
|
|
400
579
|
if not cache.is_collecting:
|
|
401
|
-
|
|
580
|
+
debug("get_packages: Linkgrabber not collecting, checking for packages to auto-start")
|
|
581
|
+
|
|
402
582
|
packages_to_start = []
|
|
403
583
|
links_to_start = []
|
|
404
584
|
|
|
405
585
|
for package in linkgrabber_packages:
|
|
406
|
-
# Use cached linkgrabber_links instead of re-querying
|
|
407
586
|
comment = get_links_comment(package, linkgrabber_links)
|
|
408
|
-
if comment
|
|
587
|
+
if is_quasarr_package(comment):
|
|
409
588
|
package_uuid = package.get("uuid")
|
|
410
589
|
if package_uuid:
|
|
411
|
-
package_link_ids = [
|
|
412
|
-
|
|
590
|
+
package_link_ids = [
|
|
591
|
+
link.get("uuid") for link in linkgrabber_links
|
|
592
|
+
if link.get("packageUUID") == package_uuid and link.get("uuid")
|
|
593
|
+
]
|
|
413
594
|
if package_link_ids:
|
|
595
|
+
debug(
|
|
596
|
+
f"get_packages: Found Quasarr package to start: {package.get('name')} with {len(package_link_ids)} links")
|
|
414
597
|
packages_to_start.append(package_uuid)
|
|
415
598
|
links_to_start.extend(package_link_ids)
|
|
416
599
|
else:
|
|
417
600
|
info(f"Package {package_uuid} has no links in linkgrabber - skipping start")
|
|
418
|
-
|
|
601
|
+
# Only start one package at a time
|
|
419
602
|
break
|
|
420
603
|
|
|
421
604
|
if packages_to_start and links_to_start:
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
605
|
+
debug(
|
|
606
|
+
f"get_packages: Moving {len(packages_to_start)} packages with {len(links_to_start)} links to download list")
|
|
607
|
+
try:
|
|
608
|
+
shared_state.get_device().linkgrabber.move_to_downloadlist(links_to_start, packages_to_start)
|
|
609
|
+
info(
|
|
610
|
+
f"Started {len(packages_to_start)} package download{'s' if len(packages_to_start) > 1 else ''} from linkgrabber")
|
|
611
|
+
except Exception as e:
|
|
612
|
+
debug(f"get_packages: Failed to move packages to download list: {e}")
|
|
613
|
+
else:
|
|
614
|
+
debug("get_packages: Linkgrabber is collecting, skipping auto-start")
|
|
615
|
+
|
|
616
|
+
debug(f"get_packages: COMPLETE - queue={len(downloads['queue'])}, history={len(downloads['history'])}")
|
|
617
|
+
|
|
618
|
+
# Summary overview for quick debugging
|
|
619
|
+
if downloads['queue'] or downloads['history']:
|
|
620
|
+
debug("=" * 60)
|
|
621
|
+
debug("PACKAGE SUMMARY")
|
|
622
|
+
debug("=" * 60)
|
|
623
|
+
debug(f" CACHE: {cache.get_stats()}")
|
|
624
|
+
debug("-" * 60)
|
|
625
|
+
for item in downloads['queue']:
|
|
626
|
+
is_archive = item.get('is_archive', False)
|
|
627
|
+
archive_indicator = "[ARCHIVE]" if is_archive else ""
|
|
628
|
+
mb = item.get('mb', 0)
|
|
629
|
+
size_str = f"{mb:.0f} MB" if mb < 1024 else f"{mb / 1024:.1f} GB"
|
|
630
|
+
debug(f" QUEUE: {item['filename'][:50]}{'...' if len(item['filename']) > 50 else ''}")
|
|
631
|
+
debug(
|
|
632
|
+
f" -> {item['percentage']}% | {item['timeleft']} | {size_str} | {item['cat']} {archive_indicator}")
|
|
633
|
+
for item in downloads['history']:
|
|
634
|
+
status_icon = "✓" if item['status'] == 'Completed' else "✗"
|
|
635
|
+
is_archive = item.get('is_archive')
|
|
636
|
+
extraction_ok = item.get('extraction_ok', False)
|
|
637
|
+
# Only show archive status if we know it's an archive
|
|
638
|
+
if is_archive:
|
|
639
|
+
archive_status = f"[ARCHIVE: {'EXTRACTED ✓' if extraction_ok else 'NOT EXTRACTED'}]"
|
|
640
|
+
else:
|
|
641
|
+
archive_status = ""
|
|
642
|
+
# Format size
|
|
643
|
+
size_bytes = item.get('bytes', 0)
|
|
644
|
+
if size_bytes > 0:
|
|
645
|
+
size_mb = size_bytes / (1024 * 1024)
|
|
646
|
+
size_str = f"{size_mb:.0f} MB" if size_mb < 1024 else f"{size_mb / 1024:.1f} GB"
|
|
647
|
+
else:
|
|
648
|
+
size_str = "? MB"
|
|
649
|
+
debug(f" HISTORY: {item['name'][:50]}{'...' if len(item['name']) > 50 else ''}")
|
|
650
|
+
debug(f" -> {status_icon} {item['status']} | {size_str} | {item['category']} {archive_status}")
|
|
651
|
+
if item.get('fail_message'):
|
|
652
|
+
debug(f" Error: {item['fail_message']}")
|
|
653
|
+
debug("=" * 60)
|
|
425
654
|
|
|
426
655
|
return downloads
|
|
427
656
|
|
|
428
657
|
|
|
429
658
|
def delete_package(shared_state, package_id):
|
|
659
|
+
"""Delete a package from JDownloader and/or the database."""
|
|
660
|
+
debug(f"delete_package: Starting deletion of package {package_id}")
|
|
661
|
+
|
|
430
662
|
try:
|
|
431
663
|
deleted_title = ""
|
|
432
664
|
|
|
@@ -435,56 +667,87 @@ def delete_package(shared_state, package_id):
|
|
|
435
667
|
cache = JDPackageCache(shared_state.get_device())
|
|
436
668
|
|
|
437
669
|
packages = get_packages(shared_state, _cache=cache)
|
|
670
|
+
|
|
671
|
+
found = False
|
|
438
672
|
for package_location in packages:
|
|
439
673
|
for package in packages[package_location]:
|
|
440
|
-
if package
|
|
441
|
-
|
|
442
|
-
|
|
674
|
+
if package.get("nzo_id") == package_id:
|
|
675
|
+
found = True
|
|
676
|
+
package_type = package.get("type")
|
|
677
|
+
package_uuid = package.get("uuid")
|
|
678
|
+
|
|
679
|
+
debug(
|
|
680
|
+
f"delete_package: Found package to delete - type={package_type}, uuid={package_uuid}, location={package_location}")
|
|
681
|
+
|
|
682
|
+
# Clean up JDownloader links if applicable
|
|
683
|
+
if package_type == "linkgrabber":
|
|
443
684
|
ids = get_links_matching_package_uuid(package, cache.linkgrabber_links)
|
|
444
685
|
if ids:
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
686
|
+
debug(f"delete_package: Deleting {len(ids)} links from linkgrabber")
|
|
687
|
+
try:
|
|
688
|
+
shared_state.get_device().linkgrabber.cleanup(
|
|
689
|
+
"DELETE_ALL",
|
|
690
|
+
"REMOVE_LINKS_AND_DELETE_FILES",
|
|
691
|
+
"SELECTED",
|
|
692
|
+
ids,
|
|
693
|
+
[package_uuid]
|
|
694
|
+
)
|
|
695
|
+
except Exception as e:
|
|
696
|
+
debug(f"delete_package: Linkgrabber cleanup failed: {e}")
|
|
697
|
+
else:
|
|
698
|
+
debug(f"delete_package: No link IDs found for linkgrabber package")
|
|
699
|
+
|
|
700
|
+
elif package_type == "downloader":
|
|
455
701
|
ids = get_links_matching_package_uuid(package, cache.downloader_links)
|
|
456
702
|
if ids:
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
703
|
+
debug(f"delete_package: Deleting {len(ids)} links from downloader")
|
|
704
|
+
try:
|
|
705
|
+
shared_state.get_device().downloads.cleanup(
|
|
706
|
+
"DELETE_ALL",
|
|
707
|
+
"REMOVE_LINKS_AND_DELETE_FILES",
|
|
708
|
+
"SELECTED",
|
|
709
|
+
ids,
|
|
710
|
+
[package_uuid]
|
|
711
|
+
)
|
|
712
|
+
except Exception as e:
|
|
713
|
+
debug(f"delete_package: Downloads cleanup failed: {e}")
|
|
714
|
+
else:
|
|
715
|
+
debug(f"delete_package: No link IDs found for downloader package")
|
|
716
|
+
|
|
717
|
+
# Always clean up database entries (no state check - just clean whatever exists)
|
|
718
|
+
debug(f"delete_package: Cleaning up database entries for {package_id}")
|
|
719
|
+
try:
|
|
720
|
+
shared_state.get_db("failed").delete(package_id)
|
|
721
|
+
debug(f"delete_package: Deleted from failed DB (or was not present)")
|
|
722
|
+
except Exception as e:
|
|
723
|
+
debug(f"delete_package: Failed DB delete exception (may be normal): {e}")
|
|
724
|
+
try:
|
|
725
|
+
shared_state.get_db("protected").delete(package_id)
|
|
726
|
+
debug(f"delete_package: Deleted from protected DB (or was not present)")
|
|
727
|
+
except Exception as e:
|
|
728
|
+
debug(f"delete_package: Protected DB delete exception (may be normal): {e}")
|
|
469
729
|
|
|
730
|
+
# Get title for logging
|
|
470
731
|
if package_location == "queue":
|
|
471
|
-
|
|
732
|
+
deleted_title = package.get("filename", "")
|
|
472
733
|
else:
|
|
473
|
-
|
|
734
|
+
deleted_title = package.get("name", "")
|
|
474
735
|
|
|
475
|
-
|
|
476
|
-
deleted_title = package[package_name_field]
|
|
477
|
-
except KeyError:
|
|
478
|
-
pass
|
|
736
|
+
break # Exit inner loop - we found and processed the package
|
|
479
737
|
|
|
480
|
-
|
|
481
|
-
|
|
738
|
+
if found:
|
|
739
|
+
break # Exit outer loop
|
|
482
740
|
|
|
483
741
|
if deleted_title:
|
|
484
742
|
info(f'Deleted package "{deleted_title}" with ID "{package_id}"')
|
|
485
743
|
else:
|
|
486
744
|
info(f'Deleted package "{package_id}"')
|
|
487
|
-
|
|
745
|
+
|
|
746
|
+
debug(f"delete_package: Successfully completed deletion for package {package_id}, found={found}")
|
|
747
|
+
return True
|
|
748
|
+
|
|
749
|
+
except Exception as e:
|
|
488
750
|
info(f"Failed to delete package {package_id}")
|
|
751
|
+
debug(f"delete_package: Exception during deletion: {type(e).__name__}: {e}")
|
|
752
|
+
debug(f"delete_package: Traceback: {traceback.format_exc()}")
|
|
489
753
|
return False
|
|
490
|
-
return True
|