quasarr 2.4.8__py3-none-any.whl → 2.4.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of quasarr might be problematic. Click here for more details.

Files changed (76) hide show
  1. quasarr/__init__.py +134 -70
  2. quasarr/api/__init__.py +40 -31
  3. quasarr/api/arr/__init__.py +116 -108
  4. quasarr/api/captcha/__init__.py +262 -137
  5. quasarr/api/config/__init__.py +76 -46
  6. quasarr/api/packages/__init__.py +138 -102
  7. quasarr/api/sponsors_helper/__init__.py +29 -16
  8. quasarr/api/statistics/__init__.py +19 -19
  9. quasarr/downloads/__init__.py +165 -72
  10. quasarr/downloads/linkcrypters/al.py +35 -18
  11. quasarr/downloads/linkcrypters/filecrypt.py +107 -52
  12. quasarr/downloads/linkcrypters/hide.py +5 -6
  13. quasarr/downloads/packages/__init__.py +342 -177
  14. quasarr/downloads/sources/al.py +191 -100
  15. quasarr/downloads/sources/by.py +31 -13
  16. quasarr/downloads/sources/dd.py +27 -14
  17. quasarr/downloads/sources/dj.py +1 -3
  18. quasarr/downloads/sources/dl.py +126 -71
  19. quasarr/downloads/sources/dt.py +11 -5
  20. quasarr/downloads/sources/dw.py +28 -14
  21. quasarr/downloads/sources/he.py +32 -24
  22. quasarr/downloads/sources/mb.py +19 -9
  23. quasarr/downloads/sources/nk.py +14 -10
  24. quasarr/downloads/sources/nx.py +8 -18
  25. quasarr/downloads/sources/sf.py +45 -20
  26. quasarr/downloads/sources/sj.py +1 -3
  27. quasarr/downloads/sources/sl.py +9 -5
  28. quasarr/downloads/sources/wd.py +32 -12
  29. quasarr/downloads/sources/wx.py +35 -21
  30. quasarr/providers/auth.py +42 -37
  31. quasarr/providers/cloudflare.py +28 -30
  32. quasarr/providers/hostname_issues.py +2 -1
  33. quasarr/providers/html_images.py +2 -2
  34. quasarr/providers/html_templates.py +22 -14
  35. quasarr/providers/imdb_metadata.py +149 -80
  36. quasarr/providers/jd_cache.py +131 -39
  37. quasarr/providers/log.py +1 -1
  38. quasarr/providers/myjd_api.py +260 -196
  39. quasarr/providers/notifications.py +53 -41
  40. quasarr/providers/obfuscated.py +9 -4
  41. quasarr/providers/sessions/al.py +71 -55
  42. quasarr/providers/sessions/dd.py +21 -14
  43. quasarr/providers/sessions/dl.py +30 -19
  44. quasarr/providers/sessions/nx.py +23 -14
  45. quasarr/providers/shared_state.py +292 -141
  46. quasarr/providers/statistics.py +75 -43
  47. quasarr/providers/utils.py +33 -27
  48. quasarr/providers/version.py +45 -14
  49. quasarr/providers/web_server.py +10 -5
  50. quasarr/search/__init__.py +30 -18
  51. quasarr/search/sources/al.py +124 -73
  52. quasarr/search/sources/by.py +110 -59
  53. quasarr/search/sources/dd.py +57 -35
  54. quasarr/search/sources/dj.py +69 -48
  55. quasarr/search/sources/dl.py +159 -100
  56. quasarr/search/sources/dt.py +110 -74
  57. quasarr/search/sources/dw.py +121 -61
  58. quasarr/search/sources/fx.py +108 -62
  59. quasarr/search/sources/he.py +78 -49
  60. quasarr/search/sources/mb.py +96 -48
  61. quasarr/search/sources/nk.py +80 -50
  62. quasarr/search/sources/nx.py +91 -62
  63. quasarr/search/sources/sf.py +171 -106
  64. quasarr/search/sources/sj.py +69 -48
  65. quasarr/search/sources/sl.py +115 -71
  66. quasarr/search/sources/wd.py +67 -44
  67. quasarr/search/sources/wx.py +188 -123
  68. quasarr/storage/config.py +65 -52
  69. quasarr/storage/setup.py +238 -140
  70. quasarr/storage/sqlite_database.py +10 -4
  71. {quasarr-2.4.8.dist-info → quasarr-2.4.10.dist-info}/METADATA +4 -3
  72. quasarr-2.4.10.dist-info/RECORD +81 -0
  73. quasarr-2.4.8.dist-info/RECORD +0 -81
  74. {quasarr-2.4.8.dist-info → quasarr-2.4.10.dist-info}/WHEEL +0 -0
  75. {quasarr-2.4.8.dist-info → quasarr-2.4.10.dist-info}/entry_points.txt +0 -0
  76. {quasarr-2.4.8.dist-info → quasarr-2.4.10.dist-info}/licenses/LICENSE +0 -0
@@ -8,7 +8,7 @@ from collections import defaultdict
8
8
  from urllib.parse import urlparse
9
9
 
10
10
  from quasarr.providers.jd_cache import JDPackageCache
11
- from quasarr.providers.log import info, debug
11
+ from quasarr.providers.log import debug, info
12
12
 
13
13
  # =============================================================================
14
14
  # CONSTANTS
@@ -23,19 +23,48 @@ CATEGORY_DOCS = "docs"
23
23
  CATEGORY_NOT_QUASARR = "not_quasarr"
24
24
 
25
25
  # Known archive extensions for file detection
26
- ARCHIVE_EXTENSIONS = frozenset([
27
- '.rar', '.zip', '.7z', '.tar', '.gz', '.bz2', '.xz',
28
- '.001', '.002', '.003', '.004', '.005', '.006', '.007', '.008', '.009',
29
- '.r00', '.r01', '.r02', '.r03', '.r04', '.r05', '.r06', '.r07', '.r08', '.r09',
30
- '.part1.rar', '.part01.rar', '.part001.rar',
31
- '.part2.rar', '.part02.rar', '.part002.rar',
32
- ])
26
+ ARCHIVE_EXTENSIONS = frozenset(
27
+ [
28
+ ".rar",
29
+ ".zip",
30
+ ".7z",
31
+ ".tar",
32
+ ".gz",
33
+ ".bz2",
34
+ ".xz",
35
+ ".001",
36
+ ".002",
37
+ ".003",
38
+ ".004",
39
+ ".005",
40
+ ".006",
41
+ ".007",
42
+ ".008",
43
+ ".009",
44
+ ".r00",
45
+ ".r01",
46
+ ".r02",
47
+ ".r03",
48
+ ".r04",
49
+ ".r05",
50
+ ".r06",
51
+ ".r07",
52
+ ".r08",
53
+ ".r09",
54
+ ".part1.rar",
55
+ ".part01.rar",
56
+ ".part001.rar",
57
+ ".part2.rar",
58
+ ".part02.rar",
59
+ ".part002.rar",
60
+ ]
61
+ )
33
62
 
34
63
  # JDownloader extraction complete status markers (checked case-insensitively)
35
64
  # Add new languages here as needed
36
65
  EXTRACTION_COMPLETE_MARKERS = (
37
- 'extraction ok', # English
38
- 'entpacken ok', # German
66
+ "extraction ok", # English
67
+ "entpacken ok", # German
39
68
  )
40
69
 
41
70
 
@@ -43,6 +72,7 @@ EXTRACTION_COMPLETE_MARKERS = (
43
72
  # HELPER FUNCTIONS
44
73
  # =============================================================================
45
74
 
75
+
46
76
  def is_extraction_complete(status):
47
77
  """Check if a JDownloader status string indicates extraction is complete (case-insensitive)."""
48
78
  if not status:
@@ -51,7 +81,7 @@ def is_extraction_complete(status):
51
81
  return any(marker in status_lower for marker in EXTRACTION_COMPLETE_MARKERS)
52
82
 
53
83
 
54
- def is_archive_file(filename, extraction_status=''):
84
+ def is_archive_file(filename, extraction_status=""):
55
85
  """Check if a file is an archive based on extension or extraction status."""
56
86
  if extraction_status:
57
87
  return True
@@ -88,7 +118,9 @@ def get_links_comment(package, package_links):
88
118
  if link.get("packageUUID") == package_uuid:
89
119
  comment = link.get("comment")
90
120
  if comment:
91
- debug(f"get_links_comment: Found comment '{comment}' for package {package_uuid}")
121
+ debug(
122
+ f"get_links_comment: Found comment '{comment}' for package {package_uuid}"
123
+ )
92
124
  return comment
93
125
  return None
94
126
 
@@ -105,7 +137,9 @@ def get_links_status(package, all_links, is_archive=False):
105
137
  """
106
138
  package_uuid = package.get("uuid")
107
139
  package_name = package.get("name", "unknown")
108
- debug(f"get_links_status: Checking package '{package_name}' ({package_uuid}), is_archive={is_archive}")
140
+ debug(
141
+ f"get_links_status: Checking package '{package_name}' ({package_uuid}), is_archive={is_archive}"
142
+ )
109
143
 
110
144
  links_in_package = []
111
145
  if package_uuid and all_links:
@@ -133,22 +167,32 @@ def get_links_status(package, all_links, is_archive=False):
133
167
  # Check if any mirror has all links online
134
168
  has_mirror_all_online = False
135
169
  for domain, mirror_links in mirrors.items():
136
- if all(link.get('availability', '').lower() == 'online' for link in mirror_links):
170
+ if all(
171
+ link.get("availability", "").lower() == "online" for link in mirror_links
172
+ ):
137
173
  has_mirror_all_online = True
138
- debug(f"get_links_status: Mirror '{domain}' has all {len(mirror_links)} links online")
174
+ debug(
175
+ f"get_links_status: Mirror '{domain}' has all {len(mirror_links)} links online"
176
+ )
139
177
  break
140
178
 
141
179
  # Collect offline link IDs (only if there's an online mirror available)
142
- offline_links = [link for link in links_in_package if link.get('availability', '').lower() == 'offline']
143
- offline_ids = [link.get('uuid') for link in offline_links]
180
+ offline_links = [
181
+ link
182
+ for link in links_in_package
183
+ if link.get("availability", "").lower() == "offline"
184
+ ]
185
+ offline_ids = [link.get("uuid") for link in offline_links]
144
186
  offline_mirror_linkids = offline_ids if has_mirror_all_online else []
145
187
 
146
188
  if offline_links:
147
- debug(f"get_links_status: {len(offline_links)} offline links, has_mirror_all_online={has_mirror_all_online}")
189
+ debug(
190
+ f"get_links_status: {len(offline_links)} offline links, has_mirror_all_online={has_mirror_all_online}"
191
+ )
148
192
 
149
193
  # First pass: detect if ANY link has extraction activity (for safety override)
150
194
  for link in links_in_package:
151
- if link.get('extractionStatus', ''):
195
+ if link.get("extractionStatus", ""):
152
196
  has_extraction_activity = True
153
197
  break
154
198
 
@@ -157,27 +201,33 @@ def get_links_status(package, all_links, is_archive=False):
157
201
 
158
202
  # Second pass: check each link's status
159
203
  for link in links_in_package:
160
- link_name = link.get('name', 'unknown')
161
- link_finished = link.get('finished', False)
162
- link_availability = link.get('availability', '').lower()
163
- link_extraction_status = link.get('extractionStatus', '').lower()
164
- link_status = link.get('status', '')
165
- link_status_icon = link.get('statusIconKey', '').lower()
166
- link_eta = link.get('eta', 0) // 1000 if link.get('eta') else 0
204
+ link_name = link.get("name", "unknown")
205
+ link_finished = link.get("finished", False)
206
+ link_availability = link.get("availability", "").lower()
207
+ link_extraction_status = link.get("extractionStatus", "").lower()
208
+ link_status = link.get("status", "")
209
+ link_status_icon = link.get("statusIconKey", "").lower()
210
+ link_eta = link.get("eta", 0) // 1000 if link.get("eta") else 0
167
211
 
168
212
  # Determine if THIS LINK is an archive file
169
213
  link_is_archive_file = is_archive_file(link_name, link_extraction_status)
170
214
 
171
- link_status_preview = link_status[:50] + '...' if len(link_status) > 50 else link_status
215
+ link_status_preview = (
216
+ link_status[:50] + "..." if len(link_status) > 50 else link_status
217
+ )
172
218
 
173
- debug(f"get_links_status: Link '{link_name}': finished={link_finished}, "
174
- f"is_archive_file={link_is_archive_file}, availability={link_availability}, "
175
- f"extractionStatus='{link_extraction_status}', status='{link_status_preview}'")
219
+ debug(
220
+ f"get_links_status: Link '{link_name}': finished={link_finished}, "
221
+ f"is_archive_file={link_is_archive_file}, availability={link_availability}, "
222
+ f"extractionStatus='{link_extraction_status}', status='{link_status_preview}'"
223
+ )
176
224
 
177
225
  # Check for offline links
178
226
  if link_availability == "offline" and not has_mirror_all_online:
179
227
  error = "Links offline for all mirrors"
180
- debug(f"get_links_status: ERROR - Link offline with no online mirror: {link_name}")
228
+ debug(
229
+ f"get_links_status: ERROR - Link offline with no online mirror: {link_name}"
230
+ )
181
231
 
182
232
  # Check for file errors
183
233
  if link_status_icon == "false":
@@ -189,20 +239,26 @@ def get_links_status(package, all_links, is_archive=False):
189
239
  if not link_finished:
190
240
  # Download not complete
191
241
  all_finished = False
192
- debug(f"get_links_status: Link not finished (download in progress): {link_name}")
242
+ debug(
243
+ f"get_links_status: Link not finished (download in progress): {link_name}"
244
+ )
193
245
 
194
- elif link_extraction_status and link_extraction_status != 'successful':
246
+ elif link_extraction_status and link_extraction_status != "successful":
195
247
  # Extraction is running or errored (applies to archive files only)
196
- if link_extraction_status == 'error':
197
- error = link.get('status', 'Extraction error')
248
+ if link_extraction_status == "error":
249
+ error = link.get("status", "Extraction error")
198
250
  debug(f"get_links_status: Extraction ERROR on {link_name}: {error}")
199
- elif link_extraction_status == 'running':
200
- debug(f"get_links_status: Extraction RUNNING on {link_name}, eta={link_eta}s")
251
+ elif link_extraction_status == "running":
252
+ debug(
253
+ f"get_links_status: Extraction RUNNING on {link_name}, eta={link_eta}s"
254
+ )
201
255
  if link_eta > 0:
202
256
  if eta is None or link_eta > eta:
203
257
  eta = link_eta
204
258
  else:
205
- debug(f"get_links_status: Extraction status '{link_extraction_status}' on {link_name}")
259
+ debug(
260
+ f"get_links_status: Extraction status '{link_extraction_status}' on {link_name}"
261
+ )
206
262
  all_finished = False
207
263
 
208
264
  elif link_is_archive_file:
@@ -210,27 +266,33 @@ def get_links_status(package, all_links, is_archive=False):
210
266
  if is_extraction_complete(link_status):
211
267
  debug(f"get_links_status: Archive link COMPLETE: {link_name}")
212
268
  else:
213
- debug(f"get_links_status: Archive link WAITING for extraction: {link_name}, status='{link_status}'")
269
+ debug(
270
+ f"get_links_status: Archive link WAITING for extraction: {link_name}, status='{link_status}'"
271
+ )
214
272
  all_finished = False
215
273
 
216
274
  elif is_archive or has_extraction_activity:
217
275
  # Package is marked as archive but THIS link doesn't look like an archive file
218
276
  # (e.g., .mkv in a package with .rar files)
219
277
  # These non-archive files are finished when download is complete
220
- debug(f"get_links_status: Non-archive link in archive package COMPLETE: {link_name}")
278
+ debug(
279
+ f"get_links_status: Non-archive link in archive package COMPLETE: {link_name}"
280
+ )
221
281
 
222
282
  else:
223
283
  # Non-archive file in non-archive package - finished when downloaded
224
284
  debug(f"get_links_status: Non-archive link COMPLETE: {link_name}")
225
285
 
226
- debug(f"get_links_status: RESULT for '{package_name}': all_finished={all_finished}, "
227
- f"eta={eta}, error={error}, is_archive={is_archive}, has_extraction_activity={has_extraction_activity}")
286
+ debug(
287
+ f"get_links_status: RESULT for '{package_name}': all_finished={all_finished}, "
288
+ f"eta={eta}, error={error}, is_archive={is_archive}, has_extraction_activity={has_extraction_activity}"
289
+ )
228
290
 
229
291
  return {
230
292
  "all_finished": all_finished,
231
293
  "eta": eta,
232
294
  "error": error,
233
- "offline_mirror_linkids": offline_mirror_linkids
295
+ "offline_mirror_linkids": offline_mirror_linkids,
234
296
  }
235
297
 
236
298
 
@@ -240,14 +302,18 @@ def get_links_matching_package_uuid(package, package_links):
240
302
  link_ids = []
241
303
 
242
304
  if not isinstance(package_links, list):
243
- debug(f"get_links_matching_package_uuid: ERROR - expected list, got {type(package_links).__name__}")
305
+ debug(
306
+ f"get_links_matching_package_uuid: ERROR - expected list, got {type(package_links).__name__}"
307
+ )
244
308
  return link_ids
245
309
 
246
310
  if package_uuid:
247
311
  for link in package_links:
248
312
  if link.get("packageUUID") == package_uuid:
249
313
  link_ids.append(link.get("uuid"))
250
- debug(f"get_links_matching_package_uuid: Found {len(link_ids)} links for package {package_uuid}")
314
+ debug(
315
+ f"get_links_matching_package_uuid: Found {len(link_ids)} links for package {package_uuid}"
316
+ )
251
317
  else:
252
318
  info("Error - package uuid missing in delete request!")
253
319
  return link_ids
@@ -267,6 +333,7 @@ def format_eta(seconds):
267
333
  # MAIN FUNCTIONS
268
334
  # =============================================================================
269
335
 
336
+
270
337
  def get_packages(shared_state, _cache=None):
271
338
  """
272
339
  Get all packages from protected DB, failed DB, linkgrabber, and downloader.
@@ -289,7 +356,9 @@ def get_packages(shared_state, _cache=None):
289
356
 
290
357
  # === PROTECTED PACKAGES (CAPTCHA required) ===
291
358
  protected_packages = shared_state.get_db("protected").retrieve_all_titles()
292
- debug(f"get_packages: Found {len(protected_packages) if protected_packages else 0} protected packages")
359
+ debug(
360
+ f"get_packages: Found {len(protected_packages) if protected_packages else 0} protected packages"
361
+ )
293
362
 
294
363
  if protected_packages:
295
364
  for package in protected_packages:
@@ -300,21 +369,29 @@ def get_packages(shared_state, _cache=None):
300
369
  "title": data["title"],
301
370
  "urls": data["links"],
302
371
  "size_mb": data.get("size_mb"),
303
- "password": data.get("password")
372
+ "password": data.get("password"),
304
373
  }
305
- packages.append({
306
- "details": details,
307
- "location": "queue",
308
- "type": "protected",
309
- "package_id": package_id
310
- })
311
- debug(f"get_packages: Added protected package '{data['title']}' ({package_id})")
374
+ packages.append(
375
+ {
376
+ "details": details,
377
+ "location": "queue",
378
+ "type": "protected",
379
+ "package_id": package_id,
380
+ }
381
+ )
382
+ debug(
383
+ f"get_packages: Added protected package '{data['title']}' ({package_id})"
384
+ )
312
385
  except (json.JSONDecodeError, KeyError) as e:
313
- debug(f"get_packages: Failed to parse protected package {package_id}: {e}")
386
+ debug(
387
+ f"get_packages: Failed to parse protected package {package_id}: {e}"
388
+ )
314
389
 
315
390
  # === FAILED PACKAGES ===
316
391
  failed_packages = shared_state.get_db("failed").retrieve_all_titles()
317
- debug(f"get_packages: Found {len(failed_packages) if failed_packages else 0} failed packages")
392
+ debug(
393
+ f"get_packages: Found {len(failed_packages) if failed_packages else 0} failed packages"
394
+ )
318
395
 
319
396
  if failed_packages:
320
397
  for package in failed_packages:
@@ -328,19 +405,23 @@ def get_packages(shared_state, _cache=None):
328
405
  details = {
329
406
  "name": data.get("title", "Unknown"),
330
407
  "bytesLoaded": 0,
331
- "saveTo": "/"
408
+ "saveTo": "/",
332
409
  }
333
410
  error = data.get("error", "Unknown error")
334
411
 
335
- packages.append({
336
- "details": details,
337
- "location": "history",
338
- "type": "failed",
339
- "error": error,
340
- "comment": package_id,
341
- "uuid": package_id
342
- })
343
- debug(f"get_packages: Added failed package '{details['name']}' ({package_id}): {error}")
412
+ packages.append(
413
+ {
414
+ "details": details,
415
+ "location": "history",
416
+ "type": "failed",
417
+ "error": error,
418
+ "comment": package_id,
419
+ "uuid": package_id,
420
+ }
421
+ )
422
+ debug(
423
+ f"get_packages: Added failed package '{details['name']}' ({package_id}): {error}"
424
+ )
344
425
  except (json.JSONDecodeError, KeyError, TypeError) as e:
345
426
  debug(f"get_packages: Failed to parse failed package {package_id}: {e}")
346
427
 
@@ -356,46 +437,60 @@ def get_packages(shared_state, _cache=None):
356
437
  package_uuid = package.get("uuid")
357
438
 
358
439
  comment = get_links_comment(package, linkgrabber_links)
359
- link_details = get_links_status(package, linkgrabber_links, is_archive=False)
440
+ link_details = get_links_status(
441
+ package, linkgrabber_links, is_archive=False
442
+ )
360
443
 
361
444
  error = link_details["error"]
362
445
  offline_mirror_linkids = link_details["offline_mirror_linkids"]
363
446
 
364
447
  # Clean up offline links if we have online mirrors
365
448
  if offline_mirror_linkids:
366
- debug(f"get_packages: Cleaning up {len(offline_mirror_linkids)} offline links from '{package_name}'")
449
+ debug(
450
+ f"get_packages: Cleaning up {len(offline_mirror_linkids)} offline links from '{package_name}'"
451
+ )
367
452
  try:
368
453
  shared_state.get_device().linkgrabber.cleanup(
369
454
  "DELETE_OFFLINE",
370
455
  "REMOVE_LINKS_ONLY",
371
456
  "SELECTED",
372
457
  offline_mirror_linkids,
373
- [package_uuid]
458
+ [package_uuid],
374
459
  )
375
460
  except Exception as e:
376
461
  debug(f"get_packages: Failed to cleanup offline links: {e}")
377
462
 
378
463
  location = "history" if error else "queue"
379
- packages.append({
380
- "details": package,
381
- "location": location,
382
- "type": "linkgrabber",
383
- "comment": comment,
384
- "uuid": package_uuid,
385
- "error": error
386
- })
387
- debug(f"get_packages: Added linkgrabber package '{package_name}' -> {location}")
464
+ packages.append(
465
+ {
466
+ "details": package,
467
+ "location": location,
468
+ "type": "linkgrabber",
469
+ "comment": comment,
470
+ "uuid": package_uuid,
471
+ "error": error,
472
+ }
473
+ )
474
+ debug(
475
+ f"get_packages: Added linkgrabber package '{package_name}' -> {location}"
476
+ )
388
477
 
389
478
  # === DOWNLOADER PACKAGES ===
390
479
  downloader_packages = cache.downloader_packages
391
480
  downloader_links = cache.downloader_links
392
481
 
393
- debug(f"get_packages: Processing {len(downloader_packages)} downloader packages with {len(downloader_links)} links")
482
+ debug(
483
+ f"get_packages: Processing {len(downloader_packages)} downloader packages with {len(downloader_links)} links"
484
+ )
394
485
 
395
486
  if downloader_packages and downloader_links:
396
487
  # ONE bulk API call for all archive detection, with safety fallbacks
397
- archive_package_uuids = cache.detect_all_archives(downloader_packages, downloader_links)
398
- debug(f"get_packages: Archive detection complete - {len(archive_package_uuids)} packages are archives")
488
+ archive_package_uuids = cache.detect_all_archives(
489
+ downloader_packages, downloader_links
490
+ )
491
+ debug(
492
+ f"get_packages: Archive detection complete - {len(archive_package_uuids)} packages are archives"
493
+ )
399
494
 
400
495
  for package in downloader_packages:
401
496
  package_name = package.get("name", "unknown")
@@ -404,7 +499,9 @@ def get_packages(shared_state, _cache=None):
404
499
  comment = get_links_comment(package, downloader_links)
405
500
 
406
501
  # Lookup from cache (populated by detect_all_archives above)
407
- is_archive = package_uuid in archive_package_uuids if package_uuid else False
502
+ is_archive = (
503
+ package_uuid in archive_package_uuids if package_uuid else False
504
+ )
408
505
  debug(f"get_packages: Package '{package_name}' is_archive={is_archive}")
409
506
 
410
507
  link_details = get_links_status(package, downloader_links, is_archive)
@@ -424,36 +521,39 @@ def get_packages(shared_state, _cache=None):
424
521
  # Only mark as finished if it's not an archive
425
522
  if not is_archive:
426
523
  debug(
427
- f"get_packages: Package '{package_name}' bytes complete and not archive -> marking finished")
524
+ f"get_packages: Package '{package_name}' bytes complete and not archive -> marking finished"
525
+ )
428
526
  finished = True
429
527
  else:
430
528
  debug(
431
- f"get_packages: Package '{package_name}' bytes complete BUT is_archive=True -> NOT marking finished yet")
529
+ f"get_packages: Package '{package_name}' bytes complete BUT is_archive=True -> NOT marking finished yet"
530
+ )
432
531
 
433
532
  if not finished and link_details["eta"]:
434
533
  package["eta"] = link_details["eta"]
435
534
 
436
535
  location = "history" if error or finished else "queue"
437
536
 
438
- debug(f"get_packages: Package '{package_name}' -> location={location}, "
439
- f"finished={finished}, error={error}, is_archive={is_archive}")
440
-
441
- packages.append({
442
- "details": package,
443
- "location": location,
444
- "type": "downloader",
445
- "comment": comment,
446
- "uuid": package_uuid,
447
- "error": error,
448
- "is_archive": is_archive,
449
- "extraction_ok": finished and is_archive
450
- })
537
+ debug(
538
+ f"get_packages: Package '{package_name}' -> location={location}, "
539
+ f"finished={finished}, error={error}, is_archive={is_archive}"
540
+ )
541
+
542
+ packages.append(
543
+ {
544
+ "details": package,
545
+ "location": location,
546
+ "type": "downloader",
547
+ "comment": comment,
548
+ "uuid": package_uuid,
549
+ "error": error,
550
+ "is_archive": is_archive,
551
+ "extraction_ok": finished and is_archive,
552
+ }
553
+ )
451
554
 
452
555
  # === BUILD RESPONSE ===
453
- downloads = {
454
- "queue": [],
455
- "history": []
456
- }
556
+ downloads = {"queue": [], "history": []}
457
557
 
458
558
  queue_index = 0
459
559
  history_index = 0
@@ -489,7 +589,9 @@ def get_packages(shared_state, _cache=None):
489
589
  bytes_loaded = int(details.get("bytesLoaded", 0))
490
590
 
491
591
  mb = bytes_total / (1024 * 1024)
492
- mb_left = (bytes_total - bytes_loaded) / (1024 * 1024) if bytes_total else 0
592
+ mb_left = (
593
+ (bytes_total - bytes_loaded) / (1024 * 1024) if bytes_total else 0
594
+ )
493
595
  if mb_left < 0:
494
596
  mb_left = 0
495
597
 
@@ -525,33 +627,39 @@ def get_packages(shared_state, _cache=None):
525
627
  except (ZeroDivisionError, ValueError, TypeError):
526
628
  percentage = 0
527
629
 
528
- downloads["queue"].append({
529
- "index": queue_index,
530
- "nzo_id": effective_id,
531
- "priority": "Normal",
532
- "filename": name,
533
- "cat": category,
534
- "mbleft": int(mb_left) if mb_left else 0,
535
- "mb": int(mb) if mb else 0,
536
- "bytes": bytes_total,
537
- "status": "Downloading",
538
- "percentage": percentage,
539
- "timeleft": time_left,
540
- "type": package_type,
541
- "uuid": package_uuid,
542
- "is_archive": package.get("is_archive", False),
543
- "storage": storage
544
- })
630
+ downloads["queue"].append(
631
+ {
632
+ "index": queue_index,
633
+ "nzo_id": effective_id,
634
+ "priority": "Normal",
635
+ "filename": name,
636
+ "cat": category,
637
+ "mbleft": int(mb_left) if mb_left else 0,
638
+ "mb": int(mb) if mb else 0,
639
+ "bytes": bytes_total,
640
+ "status": "Downloading",
641
+ "percentage": percentage,
642
+ "timeleft": time_left,
643
+ "type": package_type,
644
+ "uuid": package_uuid,
645
+ "is_archive": package.get("is_archive", False),
646
+ "storage": storage,
647
+ }
648
+ )
545
649
  queue_index += 1
546
650
  else:
547
- debug(f"get_packages: Skipping queue package without package_id or uuid: {name}")
651
+ debug(
652
+ f"get_packages: Skipping queue package without package_id or uuid: {name}"
653
+ )
548
654
 
549
655
  elif package["location"] == "history":
550
656
  details = package["details"]
551
657
  name = details.get("name", "unknown")
552
658
  try:
553
659
  # Use bytesLoaded first, fall back to bytesTotal for failed/incomplete downloads
554
- size = int(details.get("bytesLoaded", 0)) or int(details.get("bytesTotal", 0))
660
+ size = int(details.get("bytesLoaded", 0)) or int(
661
+ details.get("bytesTotal", 0)
662
+ )
555
663
  except (KeyError, TypeError, ValueError):
556
664
  size = 0
557
665
  storage = details.get("saveTo", "/")
@@ -569,29 +677,36 @@ def get_packages(shared_state, _cache=None):
569
677
  else:
570
678
  status = "Completed"
571
679
 
572
- downloads["history"].append({
573
- "fail_message": fail_message,
574
- "category": category,
575
- "storage": storage,
576
- "status": status,
577
- "nzo_id": effective_id,
578
- "name": name,
579
- "bytes": int(size),
580
- "percentage": 100,
581
- "type": "downloader",
582
- "uuid": package.get("uuid"),
583
- "is_archive": package.get("is_archive", False),
584
- "extraction_ok": package.get("extraction_ok", False),
585
- "extraction_status": "SUCCESSFUL" if package.get("extraction_ok", False) else "RUNNING" if package.get(
586
- "is_archive", False) else ""
587
- })
680
+ downloads["history"].append(
681
+ {
682
+ "fail_message": fail_message,
683
+ "category": category,
684
+ "storage": storage,
685
+ "status": status,
686
+ "nzo_id": effective_id,
687
+ "name": name,
688
+ "bytes": int(size),
689
+ "percentage": 100,
690
+ "type": "downloader",
691
+ "uuid": package.get("uuid"),
692
+ "is_archive": package.get("is_archive", False),
693
+ "extraction_ok": package.get("extraction_ok", False),
694
+ "extraction_status": "SUCCESSFUL"
695
+ if package.get("extraction_ok", False)
696
+ else "RUNNING"
697
+ if package.get("is_archive", False)
698
+ else "",
699
+ }
700
+ )
588
701
  history_index += 1
589
702
  else:
590
703
  info(f"Invalid package location {package['location']}")
591
704
 
592
705
  # === AUTO-START QUASARR PACKAGES ===
593
706
  if not cache.is_collecting:
594
- debug("get_packages: Linkgrabber not collecting, checking for packages to auto-start")
707
+ debug(
708
+ "get_packages: Linkgrabber not collecting, checking for packages to auto-start"
709
+ )
595
710
 
596
711
  packages_to_start = []
597
712
  links_to_start = []
@@ -602,67 +717,90 @@ def get_packages(shared_state, _cache=None):
602
717
  package_uuid = package.get("uuid")
603
718
  if package_uuid:
604
719
  package_link_ids = [
605
- link.get("uuid") for link in linkgrabber_links
720
+ link.get("uuid")
721
+ for link in linkgrabber_links
606
722
  if link.get("packageUUID") == package_uuid and link.get("uuid")
607
723
  ]
608
724
  if package_link_ids:
609
725
  debug(
610
- f"get_packages: Found Quasarr package to start: {package.get('name')} with {len(package_link_ids)} links")
726
+ f"get_packages: Found Quasarr package to start: {package.get('name')} with {len(package_link_ids)} links"
727
+ )
611
728
  packages_to_start.append(package_uuid)
612
729
  links_to_start.extend(package_link_ids)
613
730
  else:
614
- info(f"Package {package_uuid} has no links in linkgrabber - skipping start")
731
+ info(
732
+ f"Package {package_uuid} has no links in linkgrabber - skipping start"
733
+ )
615
734
  # Only start one package at a time
616
735
  break
617
736
 
618
737
  if packages_to_start and links_to_start:
619
738
  debug(
620
- f"get_packages: Moving {len(packages_to_start)} packages with {len(links_to_start)} links to download list")
739
+ f"get_packages: Moving {len(packages_to_start)} packages with {len(links_to_start)} links to download list"
740
+ )
621
741
  try:
622
- shared_state.get_device().linkgrabber.move_to_downloadlist(links_to_start, packages_to_start)
742
+ shared_state.get_device().linkgrabber.move_to_downloadlist(
743
+ links_to_start, packages_to_start
744
+ )
623
745
  info(
624
- f"Started {len(packages_to_start)} package download{'s' if len(packages_to_start) > 1 else ''} from linkgrabber")
746
+ f"Started {len(packages_to_start)} package download{'s' if len(packages_to_start) > 1 else ''} from linkgrabber"
747
+ )
625
748
  except Exception as e:
626
749
  debug(f"get_packages: Failed to move packages to download list: {e}")
627
750
  else:
628
751
  debug("get_packages: Linkgrabber is collecting, skipping auto-start")
629
752
 
630
- debug(f"get_packages: COMPLETE - queue={len(downloads['queue'])}, history={len(downloads['history'])}")
753
+ debug(
754
+ f"get_packages: COMPLETE - queue={len(downloads['queue'])}, history={len(downloads['history'])}"
755
+ )
631
756
 
632
757
  # Summary overview for quick debugging
633
- if downloads['queue'] or downloads['history']:
758
+ if downloads["queue"] or downloads["history"]:
634
759
  debug("=" * 60)
635
760
  debug("PACKAGE SUMMARY")
636
761
  debug("=" * 60)
637
762
  debug(f" CACHE: {cache.get_stats()}")
638
763
  debug("-" * 60)
639
- for item in downloads['queue']:
640
- is_archive = item.get('is_archive', False)
764
+ for item in downloads["queue"]:
765
+ is_archive = item.get("is_archive", False)
641
766
  archive_indicator = "[ARCHIVE]" if is_archive else ""
642
- mb = item.get('mb', 0)
767
+ mb = item.get("mb", 0)
643
768
  size_str = f"{mb:.0f} MB" if mb < 1024 else f"{mb / 1024:.1f} GB"
644
- debug(f" QUEUE: {item['filename'][:50]}{'...' if len(item['filename']) > 50 else ''}")
645
769
  debug(
646
- f" -> {item['percentage']}% | {item['timeleft']} | {size_str} | {item['cat']} {archive_indicator}")
647
- for item in downloads['history']:
648
- status_icon = "✅" if item['status'] == 'Completed' else "✗"
649
- is_archive = item.get('is_archive')
650
- extraction_ok = item.get('extraction_ok', False)
770
+ f" QUEUE: {item['filename'][:50]}{'...' if len(item['filename']) > 50 else ''}"
771
+ )
772
+ debug(
773
+ f" -> {item['percentage']}% | {item['timeleft']} | {size_str} | {item['cat']} {archive_indicator}"
774
+ )
775
+ for item in downloads["history"]:
776
+ status_icon = "✅" if item["status"] == "Completed" else "✗"
777
+ is_archive = item.get("is_archive")
778
+ extraction_ok = item.get("extraction_ok", False)
651
779
  # Only show archive status if we know it's an archive
652
780
  if is_archive:
653
- archive_status = f"[ARCHIVE: {'EXTRACTED ✅' if extraction_ok else 'NOT EXTRACTED'}]"
781
+ archive_status = (
782
+ f"[ARCHIVE: {'EXTRACTED ✅' if extraction_ok else 'NOT EXTRACTED'}]"
783
+ )
654
784
  else:
655
785
  archive_status = ""
656
786
  # Format size
657
- size_bytes = item.get('bytes', 0)
787
+ size_bytes = item.get("bytes", 0)
658
788
  if size_bytes > 0:
659
789
  size_mb = size_bytes / (1024 * 1024)
660
- size_str = f"{size_mb:.0f} MB" if size_mb < 1024 else f"{size_mb / 1024:.1f} GB"
790
+ size_str = (
791
+ f"{size_mb:.0f} MB"
792
+ if size_mb < 1024
793
+ else f"{size_mb / 1024:.1f} GB"
794
+ )
661
795
  else:
662
796
  size_str = "? MB"
663
- debug(f" HISTORY: {item['name'][:50]}{'...' if len(item['name']) > 50 else ''}")
664
- debug(f" -> {status_icon} {item['status']} | {size_str} | {item['category']} {archive_status}")
665
- if item.get('fail_message'):
797
+ debug(
798
+ f" HISTORY: {item['name'][:50]}{'...' if len(item['name']) > 50 else ''}"
799
+ )
800
+ debug(
801
+ f" -> {status_icon} {item['status']} | {size_str} | {item['category']} {archive_status}"
802
+ )
803
+ if item.get("fail_message"):
666
804
  debug(f" Error: {item['fail_message']}")
667
805
  debug("=" * 60)
668
806
 
@@ -692,55 +830,80 @@ def delete_package(shared_state, package_id):
692
830
  package_uuid = package.get("uuid")
693
831
 
694
832
  debug(
695
- f"delete_package: Found package to delete - type={package_type}, uuid={package_uuid}, location={package_location}")
833
+ f"delete_package: Found package to delete - type={package_type}, uuid={package_uuid}, location={package_location}"
834
+ )
696
835
 
697
836
  # Clean up JDownloader links if applicable
698
837
  if package_type == "linkgrabber":
699
- ids = get_links_matching_package_uuid(package, cache.linkgrabber_links)
838
+ ids = get_links_matching_package_uuid(
839
+ package, cache.linkgrabber_links
840
+ )
700
841
  if ids:
701
- debug(f"delete_package: Deleting {len(ids)} links from linkgrabber")
842
+ debug(
843
+ f"delete_package: Deleting {len(ids)} links from linkgrabber"
844
+ )
702
845
  try:
703
846
  shared_state.get_device().linkgrabber.cleanup(
704
847
  "DELETE_ALL",
705
848
  "REMOVE_LINKS_AND_DELETE_FILES",
706
849
  "SELECTED",
707
850
  ids,
708
- [package_uuid]
851
+ [package_uuid],
709
852
  )
710
853
  except Exception as e:
711
- debug(f"delete_package: Linkgrabber cleanup failed: {e}")
854
+ debug(
855
+ f"delete_package: Linkgrabber cleanup failed: {e}"
856
+ )
712
857
  else:
713
- debug(f"delete_package: No link IDs found for linkgrabber package")
858
+ debug(
859
+ f"delete_package: No link IDs found for linkgrabber package"
860
+ )
714
861
 
715
862
  elif package_type == "downloader":
716
- ids = get_links_matching_package_uuid(package, cache.downloader_links)
863
+ ids = get_links_matching_package_uuid(
864
+ package, cache.downloader_links
865
+ )
717
866
  if ids:
718
- debug(f"delete_package: Deleting {len(ids)} links from downloader")
867
+ debug(
868
+ f"delete_package: Deleting {len(ids)} links from downloader"
869
+ )
719
870
  try:
720
871
  shared_state.get_device().downloads.cleanup(
721
872
  "DELETE_ALL",
722
873
  "REMOVE_LINKS_AND_DELETE_FILES",
723
874
  "SELECTED",
724
875
  ids,
725
- [package_uuid]
876
+ [package_uuid],
726
877
  )
727
878
  except Exception as e:
728
879
  debug(f"delete_package: Downloads cleanup failed: {e}")
729
880
  else:
730
- debug(f"delete_package: No link IDs found for downloader package")
881
+ debug(
882
+ f"delete_package: No link IDs found for downloader package"
883
+ )
731
884
 
732
885
  # Always clean up database entries (no state check - just clean whatever exists)
733
- debug(f"delete_package: Cleaning up database entries for {package_id}")
886
+ debug(
887
+ f"delete_package: Cleaning up database entries for {package_id}"
888
+ )
734
889
  try:
735
890
  shared_state.get_db("failed").delete(package_id)
736
- debug(f"delete_package: Deleted from failed DB (or was not present)")
891
+ debug(
892
+ f"delete_package: Deleted from failed DB (or was not present)"
893
+ )
737
894
  except Exception as e:
738
- debug(f"delete_package: Failed DB delete exception (may be normal): {e}")
895
+ debug(
896
+ f"delete_package: Failed DB delete exception (may be normal): {e}"
897
+ )
739
898
  try:
740
899
  shared_state.get_db("protected").delete(package_id)
741
- debug(f"delete_package: Deleted from protected DB (or was not present)")
900
+ debug(
901
+ f"delete_package: Deleted from protected DB (or was not present)"
902
+ )
742
903
  except Exception as e:
743
- debug(f"delete_package: Protected DB delete exception (may be normal): {e}")
904
+ debug(
905
+ f"delete_package: Protected DB delete exception (may be normal): {e}"
906
+ )
744
907
 
745
908
  # Get title for logging
746
909
  if package_location == "queue":
@@ -758,7 +921,9 @@ def delete_package(shared_state, package_id):
758
921
  else:
759
922
  info(f'Deleted package "{package_id}"')
760
923
 
761
- debug(f"delete_package: Successfully completed deletion for package {package_id}, found={found}")
924
+ debug(
925
+ f"delete_package: Successfully completed deletion for package {package_id}, found={found}"
926
+ )
762
927
  return True
763
928
 
764
929
  except Exception as e: