qBitrr2 5.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. qBitrr/__init__.py +14 -0
  2. qBitrr/arss.py +7100 -0
  3. qBitrr/auto_update.py +382 -0
  4. qBitrr/bundled_data.py +7 -0
  5. qBitrr/config.py +192 -0
  6. qBitrr/config_version.py +144 -0
  7. qBitrr/db_lock.py +400 -0
  8. qBitrr/db_recovery.py +202 -0
  9. qBitrr/env_config.py +73 -0
  10. qBitrr/errors.py +41 -0
  11. qBitrr/ffprobe.py +105 -0
  12. qBitrr/gen_config.py +1331 -0
  13. qBitrr/home_path.py +23 -0
  14. qBitrr/logger.py +235 -0
  15. qBitrr/main.py +790 -0
  16. qBitrr/search_activity_store.py +92 -0
  17. qBitrr/static/assets/ArrView.js +2 -0
  18. qBitrr/static/assets/ArrView.js.map +1 -0
  19. qBitrr/static/assets/ConfigView.js +4 -0
  20. qBitrr/static/assets/ConfigView.js.map +1 -0
  21. qBitrr/static/assets/LogsView.js +2 -0
  22. qBitrr/static/assets/LogsView.js.map +1 -0
  23. qBitrr/static/assets/ProcessesView.js +2 -0
  24. qBitrr/static/assets/ProcessesView.js.map +1 -0
  25. qBitrr/static/assets/app.css +1 -0
  26. qBitrr/static/assets/app.js +11 -0
  27. qBitrr/static/assets/app.js.map +1 -0
  28. qBitrr/static/assets/build.svg +3 -0
  29. qBitrr/static/assets/check-mark.svg +5 -0
  30. qBitrr/static/assets/close.svg +4 -0
  31. qBitrr/static/assets/download.svg +5 -0
  32. qBitrr/static/assets/gear.svg +5 -0
  33. qBitrr/static/assets/live-streaming.svg +8 -0
  34. qBitrr/static/assets/log.svg +3 -0
  35. qBitrr/static/assets/logo.svg +48 -0
  36. qBitrr/static/assets/plus.svg +4 -0
  37. qBitrr/static/assets/process.svg +15 -0
  38. qBitrr/static/assets/react-select.esm.js +7 -0
  39. qBitrr/static/assets/react-select.esm.js.map +1 -0
  40. qBitrr/static/assets/refresh-arrow.svg +3 -0
  41. qBitrr/static/assets/table.js +5 -0
  42. qBitrr/static/assets/table.js.map +1 -0
  43. qBitrr/static/assets/trash.svg +8 -0
  44. qBitrr/static/assets/up-arrow.svg +3 -0
  45. qBitrr/static/assets/useInterval.js +2 -0
  46. qBitrr/static/assets/useInterval.js.map +1 -0
  47. qBitrr/static/assets/vendor.js +2 -0
  48. qBitrr/static/assets/vendor.js.map +1 -0
  49. qBitrr/static/assets/visibility.svg +9 -0
  50. qBitrr/static/index.html +33 -0
  51. qBitrr/static/logov2-clean.svg +48 -0
  52. qBitrr/static/manifest.json +23 -0
  53. qBitrr/static/sw.js +87 -0
  54. qBitrr/static/vite.svg +1 -0
  55. qBitrr/tables.py +143 -0
  56. qBitrr/utils.py +274 -0
  57. qBitrr/versioning.py +136 -0
  58. qBitrr/webui.py +3114 -0
  59. qbitrr2-5.5.5.dist-info/METADATA +1191 -0
  60. qbitrr2-5.5.5.dist-info/RECORD +64 -0
  61. qbitrr2-5.5.5.dist-info/WHEEL +5 -0
  62. qbitrr2-5.5.5.dist-info/entry_points.txt +2 -0
  63. qbitrr2-5.5.5.dist-info/licenses/LICENSE +21 -0
  64. qbitrr2-5.5.5.dist-info/top_level.txt +1 -0
qBitrr/webui.py ADDED
@@ -0,0 +1,3114 @@
1
+ from __future__ import annotations
2
+
3
+ import io
4
+ import logging
5
+ import os
6
+ import re
7
+ import secrets
8
+ import threading
9
+ import time
10
+ from collections.abc import Mapping
11
+ from datetime import datetime, timedelta, timezone
12
+ from pathlib import Path
13
+ from typing import Any
14
+
15
+ from flask import Flask, jsonify, redirect, request, send_file
16
+ from peewee import fn
17
+
18
+ from qBitrr.arss import FreeSpaceManager, PlaceHolderArr
19
+ from qBitrr.bundled_data import patched_version, tagged_version
20
+ from qBitrr.config import CONFIG, HOME_PATH
21
+ from qBitrr.logger import run_logs
22
+ from qBitrr.search_activity_store import (
23
+ clear_search_activity,
24
+ fetch_search_activities,
25
+ )
26
+ from qBitrr.versioning import fetch_latest_release, fetch_release_by_tag
27
+
28
+
29
+ def _toml_set(doc, dotted_key: str, value: Any):
30
+ from tomlkit import inline_table, table
31
+
32
+ keys = dotted_key.split(".")
33
+ cur = doc
34
+ for k in keys[:-1]:
35
+ if k not in cur or not isinstance(cur[k], dict):
36
+ cur[k] = table()
37
+ cur = cur[k]
38
+
39
+ # Convert plain Python dicts to inline tables for proper TOML serialization
40
+ # This ensures dicts are rendered as inline {key = "value"} not as sections [key]
41
+ if isinstance(value, dict) and not hasattr(value, "as_string"):
42
+ inline = inline_table()
43
+ inline.update(value)
44
+ cur[keys[-1]] = inline
45
+ else:
46
+ cur[keys[-1]] = value
47
+
48
+
49
+ def _toml_delete(doc, dotted_key: str) -> None:
50
+ keys = dotted_key.split(".")
51
+ cur = doc
52
+ parents = []
53
+ for k in keys[:-1]:
54
+ next_cur = cur.get(k)
55
+ if not isinstance(next_cur, dict):
56
+ return
57
+ parents.append((cur, k))
58
+ cur = next_cur
59
+ cur.pop(keys[-1], None)
60
+ for parent, key in reversed(parents):
61
+ node = parent.get(key)
62
+ if isinstance(node, dict) and not node:
63
+ parent.pop(key, None)
64
+ else:
65
+ break
66
+
67
+
68
+ def _toml_to_jsonable(obj: Any) -> Any:
69
+ try:
70
+ if hasattr(obj, "unwrap"):
71
+ return _toml_to_jsonable(obj.unwrap())
72
+ if isinstance(obj, dict):
73
+ return {k: _toml_to_jsonable(v) for k, v in obj.items()}
74
+ if isinstance(obj, list):
75
+ return [_toml_to_jsonable(v) for v in obj]
76
+ return obj
77
+ except Exception:
78
+ return obj
79
+
80
+
81
+ class WebUI:
82
+ def __init__(self, manager, host: str = "0.0.0.0", port: int = 6969):
83
+ self.manager = manager
84
+ self.host = host
85
+ self.port = port
86
+ self.app = Flask(__name__)
87
+ self.logger = logging.getLogger("qBitrr.WebUI")
88
+ run_logs(self.logger, "WebUI")
89
+ self.logger.info("Initialising WebUI on %s:%s", self.host, self.port)
90
+ if self.host in {"0.0.0.0", "::"}:
91
+ self.logger.warning(
92
+ "WebUI configured to listen on %s. Expose this only behind a trusted reverse proxy.",
93
+ self.host,
94
+ )
95
+ self.app.logger.handlers.clear()
96
+ self.app.logger.propagate = True
97
+ self.app.logger.setLevel(self.logger.level)
98
+ werkzeug_logger = logging.getLogger("werkzeug")
99
+ werkzeug_logger.handlers.clear()
100
+ werkzeug_logger.propagate = True
101
+ werkzeug_logger.setLevel(self.logger.level)
102
+
103
+ # Add cache control for static files to support config reload
104
+ @self.app.after_request
105
+ def add_cache_headers(response):
106
+ # Prevent caching of index.html and service worker to ensure fresh config loads
107
+ if request.path in ("/static/index.html", "/ui", "/static/sw.js", "/sw.js"):
108
+ response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
109
+ response.headers["Pragma"] = "no-cache"
110
+ response.headers["Expires"] = "0"
111
+ return response
112
+
113
+ # Security token (optional) - auto-generate and persist if empty
114
+ self.token = CONFIG.get("WebUI.Token", fallback=None)
115
+ if not self.token:
116
+ self.token = secrets.token_hex(32)
117
+ try:
118
+ _toml_set(CONFIG.config, "WebUI.Token", self.token)
119
+ CONFIG.save()
120
+ except Exception:
121
+ pass
122
+ else:
123
+ self.logger.notice("Generated new WebUI token")
124
+ self._github_repo = "Feramance/qBitrr"
125
+ self._version_lock = threading.Lock()
126
+ self._version_cache = {
127
+ "current_version": patched_version,
128
+ "latest_version": None,
129
+ "changelog": "", # Latest version changelog
130
+ "current_version_changelog": "", # Current version changelog
131
+ "changelog_url": f"https://github.com/{self._github_repo}/releases",
132
+ "repository_url": f"https://github.com/{self._github_repo}",
133
+ "homepage_url": f"https://github.com/{self._github_repo}",
134
+ "update_available": False,
135
+ "last_checked": None,
136
+ "error": None,
137
+ "installation_type": "unknown",
138
+ "binary_download_url": None,
139
+ "binary_download_name": None,
140
+ "binary_download_size": None,
141
+ "binary_download_error": None,
142
+ }
143
+ self._version_cache_expiry = datetime.utcnow() - timedelta(seconds=1)
144
+ self._update_state = {
145
+ "in_progress": False,
146
+ "last_result": None,
147
+ "last_error": None,
148
+ "completed_at": None,
149
+ }
150
+ self._update_thread: threading.Thread | None = None
151
+ self._rebuilding_arrs = False
152
+ self._register_routes()
153
+ static_root = Path(__file__).with_name("static")
154
+ if not (static_root / "index.html").exists():
155
+ self.logger.warning(
156
+ "WebUI static bundle is missing. Install npm and run "
157
+ "'npm ci && npm run build' inside the 'webui' folder before packaging."
158
+ )
159
+ self._thread: threading.Thread | None = None
160
+ self._use_dev_server: bool | None = None
161
+
162
+ # Shutdown control for graceful restart
163
+ self._shutdown_event = threading.Event()
164
+ self._restart_requested = False
165
+ self._server = None # Will hold Waitress server reference
166
+
167
+ def _fetch_version_info(self) -> dict[str, Any]:
168
+ info = fetch_latest_release(self._github_repo)
169
+ if info.get("error"):
170
+ self.logger.debug("Failed to fetch latest release information: %s", info["error"])
171
+ return {"error": info["error"]}
172
+ latest_display = info.get("raw_tag") or info.get("normalized")
173
+ return {
174
+ "latest_version": latest_display,
175
+ "update_available": bool(info.get("update_available")),
176
+ "changelog": info.get("changelog") or "",
177
+ "changelog_url": info.get("changelog_url"),
178
+ "error": None,
179
+ }
180
+
181
+ def _fetch_current_version_changelog(self) -> dict[str, Any]:
182
+ """Fetch changelog for the current running version."""
183
+ current_ver = tagged_version
184
+ if not current_ver:
185
+ return {
186
+ "changelog": "",
187
+ "changelog_url": f"https://github.com/{self._github_repo}/releases",
188
+ "error": "No current version",
189
+ }
190
+
191
+ info = fetch_release_by_tag(current_ver, self._github_repo)
192
+ if info.get("error"):
193
+ self.logger.debug("Failed to fetch current version changelog: %s", info["error"])
194
+ # Fallback to generic releases page
195
+ return {
196
+ "changelog": "",
197
+ "changelog_url": f"https://github.com/{self._github_repo}/releases",
198
+ "error": info["error"],
199
+ }
200
+
201
+ return {
202
+ "changelog": info.get("changelog") or "",
203
+ "changelog_url": info.get("changelog_url")
204
+ or f"https://github.com/{self._github_repo}/releases/tag/v{current_ver}",
205
+ "error": None,
206
+ }
207
+
208
+ def _ensure_version_info(self, force: bool = False) -> dict[str, Any]:
209
+ now = datetime.utcnow()
210
+ with self._version_lock:
211
+ if not force and now < self._version_cache_expiry:
212
+ snapshot = dict(self._version_cache)
213
+ snapshot["update_state"] = dict(self._update_state)
214
+ return snapshot
215
+ # optimistic expiry to avoid concurrent fetches
216
+ self._version_cache_expiry = now + timedelta(minutes=5)
217
+
218
+ latest_info = self._fetch_version_info()
219
+ current_ver_info = self._fetch_current_version_changelog()
220
+
221
+ with self._version_lock:
222
+ if latest_info:
223
+ if latest_info.get("latest_version") is not None:
224
+ self._version_cache["latest_version"] = latest_info["latest_version"]
225
+ if latest_info.get("changelog") is not None:
226
+ self._version_cache["changelog"] = latest_info.get("changelog") or ""
227
+ if latest_info.get("changelog_url"):
228
+ self._version_cache["changelog_url"] = latest_info["changelog_url"]
229
+ if "update_available" in latest_info:
230
+ self._version_cache["update_available"] = bool(latest_info["update_available"])
231
+ if "error" in latest_info:
232
+ self._version_cache["error"] = latest_info["error"]
233
+ # Store current version changelog
234
+ if current_ver_info and not current_ver_info.get("error"):
235
+ self._version_cache["current_version_changelog"] = (
236
+ current_ver_info.get("changelog") or ""
237
+ )
238
+
239
+ self._version_cache["current_version"] = patched_version
240
+ self._version_cache["last_checked"] = now.isoformat()
241
+
242
+ # Add installation type and binary download info
243
+ from qBitrr.auto_update import get_binary_download_url, get_installation_type
244
+
245
+ install_type = get_installation_type()
246
+ self._version_cache["installation_type"] = install_type
247
+
248
+ # If binary and update available, get download URL
249
+ if install_type == "binary" and self._version_cache.get("update_available"):
250
+ latest_version = self._version_cache.get("latest_version")
251
+ if latest_version:
252
+ binary_info = get_binary_download_url(latest_version, self.logger)
253
+ self._version_cache["binary_download_url"] = binary_info.get("url")
254
+ self._version_cache["binary_download_name"] = binary_info.get("name")
255
+ self._version_cache["binary_download_size"] = binary_info.get("size")
256
+ if binary_info.get("error"):
257
+ self._version_cache["binary_download_error"] = binary_info["error"]
258
+
259
+ # Extend cache validity if fetch succeeded; otherwise allow quick retry.
260
+ if not latest_info or latest_info.get("error"):
261
+ self._version_cache_expiry = now + timedelta(minutes=5)
262
+ else:
263
+ self._version_cache_expiry = now + timedelta(hours=1)
264
+ snapshot = dict(self._version_cache)
265
+ snapshot["update_state"] = dict(self._update_state)
266
+ return snapshot
267
+
268
+ def _trigger_manual_update(self) -> tuple[bool, str]:
269
+ with self._version_lock:
270
+ if self._update_state["in_progress"]:
271
+ return False, "An update is already in progress."
272
+ update_thread = threading.Thread(
273
+ target=self._run_manual_update, name="ManualUpdater", daemon=True
274
+ )
275
+ self._update_state["in_progress"] = True
276
+ self._update_state["last_error"] = None
277
+ self._update_state["last_result"] = None
278
+ self._update_thread = update_thread
279
+ update_thread.start()
280
+ return True, "started"
281
+
282
+ def _run_manual_update(self) -> None:
283
+ result = "success"
284
+ error_message: str | None = None
285
+ try:
286
+ self.logger.notice("Manual update triggered from WebUI")
287
+ try:
288
+ self.manager._perform_auto_update()
289
+ except AttributeError:
290
+ from qBitrr.auto_update import perform_self_update
291
+
292
+ if not perform_self_update(self.manager.logger):
293
+ raise RuntimeError("pip upgrade did not complete successfully")
294
+ try:
295
+ self.manager.request_restart()
296
+ except Exception:
297
+ self.logger.warning(
298
+ "Update applied but restart request failed; exiting manually."
299
+ )
300
+ except Exception as exc:
301
+ result = "error"
302
+ error_message = str(exc)
303
+ self.logger.exception("Manual update failed")
304
+ finally:
305
+ completed_at = datetime.utcnow().isoformat()
306
+ with self._version_lock:
307
+ self._update_state.update(
308
+ {
309
+ "in_progress": False,
310
+ "last_result": result,
311
+ "last_error": error_message,
312
+ "completed_at": completed_at,
313
+ }
314
+ )
315
+ self._update_thread = None
316
+ self._version_cache_expiry = datetime.utcnow() - timedelta(seconds=1)
317
+ try:
318
+ self.manager.configure_auto_update()
319
+ except Exception:
320
+ self.logger.exception("Failed to reconfigure auto update after manual update")
321
+ try:
322
+ self._ensure_version_info(force=True)
323
+ except Exception:
324
+ self.logger.debug("Version metadata refresh after update failed", exc_info=True)
325
+
326
+ @staticmethod
327
+ def _safe_str(value: Any) -> str:
328
+ if value is None:
329
+ return ""
330
+ return str(value)
331
+
332
+ def _ensure_arr_db(self, arr) -> bool:
333
+ if not getattr(arr, "search_setup_completed", False):
334
+ try:
335
+ arr.register_search_mode()
336
+ except Exception:
337
+ return False
338
+ if not getattr(arr, "search_setup_completed", False):
339
+ return False
340
+ if not getattr(arr, "_webui_db_loaded", False):
341
+ try:
342
+ arr.db_update()
343
+ arr._webui_db_loaded = True
344
+ except Exception:
345
+ arr._webui_db_loaded = False
346
+ return True
347
+
348
+ @staticmethod
349
+ def _safe_bool(value: Any) -> bool:
350
+ return bool(value) and str(value).lower() not in {"0", "false", "none"}
351
+
352
+ def _radarr_movies_from_db(
353
+ self,
354
+ arr,
355
+ search: str | None,
356
+ page: int,
357
+ page_size: int,
358
+ year_min: int | None = None,
359
+ year_max: int | None = None,
360
+ monitored: bool | None = None,
361
+ has_file: bool | None = None,
362
+ quality_met: bool | None = None,
363
+ is_request: bool | None = None,
364
+ ) -> dict[str, Any]:
365
+ if not self._ensure_arr_db(arr):
366
+ return {
367
+ "counts": {
368
+ "available": 0,
369
+ "monitored": 0,
370
+ "missing": 0,
371
+ "quality_met": 0,
372
+ "requests": 0,
373
+ },
374
+ "total": 0,
375
+ "page": max(page, 0),
376
+ "page_size": max(page_size, 1),
377
+ "movies": [],
378
+ }
379
+ model = getattr(arr, "model_file", None)
380
+ db = getattr(arr, "db", None)
381
+ if model is None or db is None:
382
+ return {
383
+ "counts": {
384
+ "available": 0,
385
+ "monitored": 0,
386
+ "missing": 0,
387
+ "quality_met": 0,
388
+ "requests": 0,
389
+ },
390
+ "total": 0,
391
+ "page": max(page, 0),
392
+ "page_size": max(page_size, 1),
393
+ "movies": [],
394
+ }
395
+ page = max(page, 0)
396
+ page_size = max(page_size, 1)
397
+ with db.connection_context():
398
+ base_query = model.select()
399
+
400
+ # Calculate counts
401
+ monitored_count = (
402
+ model.select(fn.COUNT(model.EntryId))
403
+ .where(model.Monitored == True) # noqa: E712
404
+ .scalar()
405
+ or 0
406
+ )
407
+ available_count = (
408
+ model.select(fn.COUNT(model.EntryId))
409
+ .where(
410
+ (model.Monitored == True) # noqa: E712
411
+ & (model.MovieFileId.is_null(False))
412
+ & (model.MovieFileId != 0)
413
+ )
414
+ .scalar()
415
+ or 0
416
+ )
417
+ missing_count = max(monitored_count - available_count, 0)
418
+ quality_met_count = (
419
+ model.select(fn.COUNT(model.EntryId))
420
+ .where(model.QualityMet == True) # noqa: E712
421
+ .scalar()
422
+ or 0
423
+ )
424
+ request_count = (
425
+ model.select(fn.COUNT(model.EntryId))
426
+ .where(model.IsRequest == True) # noqa: E712
427
+ .scalar()
428
+ or 0
429
+ )
430
+
431
+ # Build filtered query
432
+ query = base_query
433
+ if search:
434
+ query = query.where(model.Title.contains(search))
435
+ if year_min is not None:
436
+ query = query.where(model.Year >= year_min)
437
+ if year_max is not None:
438
+ query = query.where(model.Year <= year_max)
439
+ if monitored is not None:
440
+ query = query.where(model.Monitored == monitored)
441
+ if has_file is not None:
442
+ if has_file:
443
+ query = query.where(
444
+ (model.MovieFileId.is_null(False)) & (model.MovieFileId != 0)
445
+ )
446
+ else:
447
+ query = query.where(
448
+ (model.MovieFileId.is_null(True)) | (model.MovieFileId == 0)
449
+ )
450
+ if quality_met is not None:
451
+ query = query.where(model.QualityMet == quality_met)
452
+ if is_request is not None:
453
+ query = query.where(model.IsRequest == is_request)
454
+
455
+ total = query.count()
456
+ page_items = query.order_by(model.Title.asc()).paginate(page + 1, page_size).iterator()
457
+ movies = []
458
+ for movie in page_items:
459
+ # Read quality profile from database
460
+ quality_profile_id = (
461
+ getattr(movie, "QualityProfileId", None)
462
+ if hasattr(model, "QualityProfileId")
463
+ else None
464
+ )
465
+ quality_profile_name = (
466
+ getattr(movie, "QualityProfileName", None)
467
+ if hasattr(model, "QualityProfileName")
468
+ else None
469
+ )
470
+
471
+ movies.append(
472
+ {
473
+ "id": movie.EntryId,
474
+ "title": movie.Title or "",
475
+ "year": movie.Year,
476
+ "monitored": self._safe_bool(movie.Monitored),
477
+ "hasFile": self._safe_bool(movie.MovieFileId),
478
+ "qualityMet": self._safe_bool(movie.QualityMet),
479
+ "isRequest": self._safe_bool(movie.IsRequest),
480
+ "upgrade": self._safe_bool(movie.Upgrade),
481
+ "customFormatScore": movie.CustomFormatScore,
482
+ "minCustomFormatScore": movie.MinCustomFormatScore,
483
+ "customFormatMet": self._safe_bool(movie.CustomFormatMet),
484
+ "reason": movie.Reason,
485
+ "qualityProfileId": quality_profile_id,
486
+ "qualityProfileName": quality_profile_name,
487
+ }
488
+ )
489
+ return {
490
+ "counts": {
491
+ "available": available_count,
492
+ "monitored": monitored_count,
493
+ "missing": missing_count,
494
+ "quality_met": quality_met_count,
495
+ "requests": request_count,
496
+ },
497
+ "total": total,
498
+ "page": page,
499
+ "page_size": page_size,
500
+ "movies": movies,
501
+ }
502
+
503
+ def _lidarr_albums_from_db(
504
+ self,
505
+ arr,
506
+ search: str | None,
507
+ page: int,
508
+ page_size: int,
509
+ monitored: bool | None = None,
510
+ has_file: bool | None = None,
511
+ quality_met: bool | None = None,
512
+ is_request: bool | None = None,
513
+ group_by_artist: bool = True,
514
+ ) -> dict[str, Any]:
515
+ if not self._ensure_arr_db(arr):
516
+ return {
517
+ "counts": {
518
+ "available": 0,
519
+ "monitored": 0,
520
+ "missing": 0,
521
+ "quality_met": 0,
522
+ "requests": 0,
523
+ },
524
+ "total": 0,
525
+ "page": max(page, 0),
526
+ "page_size": max(page_size, 1),
527
+ "albums": [],
528
+ }
529
+ model = getattr(arr, "model_file", None)
530
+ db = getattr(arr, "db", None)
531
+ if model is None or db is None:
532
+ return {
533
+ "counts": {
534
+ "available": 0,
535
+ "monitored": 0,
536
+ "missing": 0,
537
+ "quality_met": 0,
538
+ "requests": 0,
539
+ },
540
+ "total": 0,
541
+ "page": max(page, 0),
542
+ "page_size": max(page_size, 1),
543
+ "albums": [],
544
+ }
545
+ page = max(page, 0)
546
+ page_size = max(page_size, 1)
547
+
548
+ # Quality profiles are now stored in the database
549
+ # No need to fetch from API
550
+
551
+ with db.connection_context():
552
+ base_query = model.select()
553
+
554
+ # Calculate counts
555
+ monitored_count = (
556
+ model.select(fn.COUNT(model.EntryId))
557
+ .where(model.Monitored == True) # noqa: E712
558
+ .scalar()
559
+ or 0
560
+ )
561
+ available_count = (
562
+ model.select(fn.COUNT(model.EntryId))
563
+ .where(
564
+ (model.Monitored == True) # noqa: E712
565
+ & (model.AlbumFileId.is_null(False))
566
+ & (model.AlbumFileId != 0)
567
+ )
568
+ .scalar()
569
+ or 0
570
+ )
571
+ missing_count = max(monitored_count - available_count, 0)
572
+ quality_met_count = (
573
+ model.select(fn.COUNT(model.EntryId))
574
+ .where(model.QualityMet == True) # noqa: E712
575
+ .scalar()
576
+ or 0
577
+ )
578
+ request_count = (
579
+ model.select(fn.COUNT(model.EntryId))
580
+ .where(model.IsRequest == True) # noqa: E712
581
+ .scalar()
582
+ or 0
583
+ )
584
+
585
+ # Build filtered query
586
+ query = base_query
587
+ if search:
588
+ query = query.where(model.Title.contains(search))
589
+ if monitored is not None:
590
+ query = query.where(model.Monitored == monitored)
591
+ if has_file is not None:
592
+ if has_file:
593
+ query = query.where(
594
+ (model.AlbumFileId.is_null(False)) & (model.AlbumFileId != 0)
595
+ )
596
+ else:
597
+ query = query.where(
598
+ (model.AlbumFileId.is_null(True)) | (model.AlbumFileId == 0)
599
+ )
600
+ if quality_met is not None:
601
+ query = query.where(model.QualityMet == quality_met)
602
+ if is_request is not None:
603
+ query = query.where(model.IsRequest == is_request)
604
+
605
+ albums = []
606
+
607
+ if group_by_artist:
608
+ # Paginate by artists: Two-pass approach with Peewee
609
+ # First, get all distinct artist names from the filtered query
610
+ # Use a subquery to get distinct artists efficiently
611
+ artists_subquery = (
612
+ query.select(model.ArtistTitle).distinct().order_by(model.ArtistTitle)
613
+ )
614
+
615
+ # Convert to list to avoid multiple iterations
616
+ all_artists = [row.ArtistTitle for row in artists_subquery]
617
+ total = len(all_artists)
618
+
619
+ # Paginate the artist list in Python
620
+ start_idx = page * page_size
621
+ end_idx = start_idx + page_size
622
+ paginated_artists = all_artists[start_idx:end_idx]
623
+
624
+ # Fetch all albums for these paginated artists
625
+ if paginated_artists:
626
+ album_results = list(
627
+ query.where(model.ArtistTitle.in_(paginated_artists)).order_by(
628
+ model.ArtistTitle, model.ReleaseDate
629
+ )
630
+ )
631
+ else:
632
+ album_results = []
633
+ else:
634
+ # Flat mode: paginate by albums as before
635
+ total = query.count()
636
+ album_results = list(query.order_by(model.Title).paginate(page + 1, page_size))
637
+
638
+ for album in album_results:
639
+ # Always fetch tracks from database (Lidarr only)
640
+ track_model = getattr(arr, "track_file_model", None)
641
+ tracks_list = []
642
+ track_monitored_count = 0
643
+ track_available_count = 0
644
+
645
+ if track_model:
646
+ try:
647
+ # Query tracks from database for this album
648
+ track_query = (
649
+ track_model.select()
650
+ .where(track_model.AlbumId == album.EntryId)
651
+ .order_by(track_model.TrackNumber)
652
+ )
653
+ track_count = track_query.count()
654
+ self.logger.debug(
655
+ f"Album {album.EntryId} ({album.Title}): Found {track_count} tracks in database"
656
+ )
657
+
658
+ for track in track_query:
659
+ is_monitored = self._safe_bool(track.Monitored)
660
+ has_file = self._safe_bool(track.HasFile)
661
+
662
+ if is_monitored:
663
+ track_monitored_count += 1
664
+ if has_file:
665
+ track_available_count += 1
666
+
667
+ tracks_list.append(
668
+ {
669
+ "id": track.EntryId,
670
+ "trackNumber": track.TrackNumber,
671
+ "title": track.Title,
672
+ "duration": track.Duration,
673
+ "hasFile": has_file,
674
+ "trackFileId": track.TrackFileId,
675
+ "monitored": is_monitored,
676
+ }
677
+ )
678
+ except Exception as e:
679
+ self.logger.warning(
680
+ f"Failed to fetch tracks for album {album.EntryId} ({album.Title}): {e}"
681
+ )
682
+
683
+ track_missing_count = max(track_monitored_count - track_available_count, 0)
684
+
685
+ # Get quality profile from database model
686
+ quality_profile_id = getattr(album, "QualityProfileId", None)
687
+ quality_profile_name = getattr(album, "QualityProfileName", None)
688
+
689
+ # Build album data in Sonarr-like structure
690
+ album_item = {
691
+ "album": {
692
+ "id": album.EntryId,
693
+ "title": album.Title,
694
+ "artistId": album.ArtistId,
695
+ "artistName": album.ArtistTitle,
696
+ "monitored": self._safe_bool(album.Monitored),
697
+ "hasFile": bool(album.AlbumFileId and album.AlbumFileId != 0),
698
+ "foreignAlbumId": album.ForeignAlbumId,
699
+ "releaseDate": (
700
+ album.ReleaseDate.isoformat()
701
+ if album.ReleaseDate and hasattr(album.ReleaseDate, "isoformat")
702
+ else album.ReleaseDate if isinstance(album.ReleaseDate, str) else None
703
+ ),
704
+ "qualityMet": self._safe_bool(album.QualityMet),
705
+ "isRequest": self._safe_bool(album.IsRequest),
706
+ "upgrade": self._safe_bool(album.Upgrade),
707
+ "customFormatScore": album.CustomFormatScore,
708
+ "minCustomFormatScore": album.MinCustomFormatScore,
709
+ "customFormatMet": self._safe_bool(album.CustomFormatMet),
710
+ "reason": album.Reason,
711
+ "qualityProfileId": quality_profile_id,
712
+ "qualityProfileName": quality_profile_name,
713
+ },
714
+ "totals": {
715
+ "available": track_available_count,
716
+ "monitored": track_monitored_count,
717
+ "missing": track_missing_count,
718
+ },
719
+ "tracks": tracks_list,
720
+ }
721
+
722
+ albums.append(album_item)
723
+ return {
724
+ "counts": {
725
+ "available": available_count,
726
+ "monitored": monitored_count,
727
+ "missing": missing_count,
728
+ "quality_met": quality_met_count,
729
+ "requests": request_count,
730
+ },
731
+ "total": total,
732
+ "page": page,
733
+ "page_size": page_size,
734
+ "albums": albums,
735
+ }
736
+
737
+ def _lidarr_tracks_from_db(
738
+ self,
739
+ arr,
740
+ search: str | None,
741
+ page: int,
742
+ page_size: int,
743
+ monitored: bool | None = None,
744
+ has_file: bool | None = None,
745
+ ) -> dict[str, Any]:
746
+ if not self._ensure_arr_db(arr):
747
+ return {
748
+ "counts": {
749
+ "available": 0,
750
+ "monitored": 0,
751
+ "missing": 0,
752
+ },
753
+ "total": 0,
754
+ "page": page,
755
+ "page_size": page_size,
756
+ "tracks": [],
757
+ }
758
+
759
+ track_model = getattr(arr, "track_file_model", None)
760
+ album_model = getattr(arr, "model_file", None)
761
+
762
+ if not track_model or not album_model:
763
+ return {
764
+ "counts": {
765
+ "available": 0,
766
+ "monitored": 0,
767
+ "missing": 0,
768
+ },
769
+ "total": 0,
770
+ "page": page,
771
+ "page_size": page_size,
772
+ "tracks": [],
773
+ }
774
+
775
+ try:
776
+ # Join tracks with albums to get artist/album info
777
+ query = (
778
+ track_model.select(
779
+ track_model,
780
+ album_model.Title.alias("AlbumTitle"),
781
+ album_model.ArtistTitle,
782
+ album_model.ArtistId,
783
+ )
784
+ .join(album_model, on=(track_model.AlbumId == album_model.EntryId))
785
+ .where(True)
786
+ )
787
+
788
+ # Apply filters
789
+ if monitored is not None:
790
+ query = query.where(track_model.Monitored == monitored)
791
+ if has_file is not None:
792
+ query = query.where(track_model.HasFile == has_file)
793
+ if search:
794
+ query = query.where(
795
+ (track_model.Title.contains(search))
796
+ | (album_model.Title.contains(search))
797
+ | (album_model.ArtistTitle.contains(search))
798
+ )
799
+
800
+ # Get counts
801
+ available_count = (
802
+ track_model.select()
803
+ .join(album_model, on=(track_model.AlbumId == album_model.EntryId))
804
+ .where(track_model.HasFile == True)
805
+ .count()
806
+ )
807
+ monitored_count = (
808
+ track_model.select()
809
+ .join(album_model, on=(track_model.AlbumId == album_model.EntryId))
810
+ .where(track_model.Monitored == True)
811
+ .count()
812
+ )
813
+ missing_count = (
814
+ track_model.select()
815
+ .join(album_model, on=(track_model.AlbumId == album_model.EntryId))
816
+ .where(track_model.HasFile == False)
817
+ .count()
818
+ )
819
+
820
+ total = query.count()
821
+
822
+ # Apply pagination
823
+ query = query.order_by(
824
+ album_model.ArtistTitle, album_model.Title, track_model.TrackNumber
825
+ ).paginate(page + 1, page_size)
826
+
827
+ tracks = []
828
+ for track in query:
829
+ tracks.append(
830
+ {
831
+ "id": track.EntryId,
832
+ "trackNumber": track.TrackNumber,
833
+ "title": track.Title,
834
+ "duration": track.Duration,
835
+ "hasFile": track.HasFile,
836
+ "trackFileId": track.TrackFileId,
837
+ "monitored": track.Monitored,
838
+ "albumId": track.AlbumId,
839
+ "albumTitle": track.AlbumTitle,
840
+ "artistTitle": track.ArtistTitle,
841
+ "artistId": track.ArtistId,
842
+ }
843
+ )
844
+
845
+ return {
846
+ "counts": {
847
+ "available": available_count,
848
+ "monitored": monitored_count,
849
+ "missing": missing_count,
850
+ },
851
+ "total": total,
852
+ "page": page,
853
+ "page_size": page_size,
854
+ "tracks": tracks,
855
+ }
856
+ except Exception as e:
857
+ self.logger.error(f"Error fetching Lidarr tracks: {e}")
858
+ return {
859
+ "counts": {"available": 0, "monitored": 0, "missing": 0},
860
+ "total": 0,
861
+ "page": page,
862
+ "page_size": page_size,
863
+ "tracks": [],
864
+ }
865
+
866
+ def _sonarr_series_from_db(
867
+ self,
868
+ arr,
869
+ search: str | None,
870
+ page: int,
871
+ page_size: int,
872
+ *,
873
+ missing_only: bool = False,
874
+ ) -> dict[str, Any]:
875
+ if not self._ensure_arr_db(arr):
876
+ return {
877
+ "counts": {"available": 0, "monitored": 0, "missing": 0},
878
+ "total": 0,
879
+ "page": max(page, 0),
880
+ "page_size": max(page_size, 1),
881
+ "series": [],
882
+ }
883
+ episodes_model = getattr(arr, "model_file", None)
884
+ series_model = getattr(arr, "series_file_model", None)
885
+ db = getattr(arr, "db", None)
886
+ if episodes_model is None or db is None:
887
+ return {
888
+ "counts": {"available": 0, "monitored": 0, "missing": 0},
889
+ "total": 0,
890
+ "page": max(page, 0),
891
+ "page_size": max(page_size, 1),
892
+ "series": [],
893
+ }
894
+ page = max(page, 0)
895
+ page_size = max(page_size, 1)
896
+ resolved_page = page
897
+ missing_condition = episodes_model.EpisodeFileId.is_null(True) | (
898
+ episodes_model.EpisodeFileId == 0
899
+ )
900
+
901
+ with db.connection_context():
902
+ monitored_count = (
903
+ episodes_model.select(fn.COUNT(episodes_model.EntryId))
904
+ .where(episodes_model.Monitored == True) # noqa: E712
905
+ .scalar()
906
+ or 0
907
+ )
908
+ available_count = (
909
+ episodes_model.select(fn.COUNT(episodes_model.EntryId))
910
+ .where(
911
+ (episodes_model.Monitored == True) # noqa: E712
912
+ & (episodes_model.EpisodeFileId.is_null(False))
913
+ & (episodes_model.EpisodeFileId != 0)
914
+ )
915
+ .scalar()
916
+ or 0
917
+ )
918
+ missing_count = max(monitored_count - available_count, 0)
919
+ missing_series_ids: list[int] = []
920
+ if missing_only:
921
+ missing_series_ids = [
922
+ row.SeriesId
923
+ for row in episodes_model.select(episodes_model.SeriesId)
924
+ .where((episodes_model.Monitored == True) & missing_condition) # noqa: E712
925
+ .distinct()
926
+ if getattr(row, "SeriesId", None) is not None
927
+ ]
928
+ if not missing_series_ids:
929
+ return {
930
+ "counts": {
931
+ "available": available_count,
932
+ "monitored": monitored_count,
933
+ "missing": missing_count,
934
+ },
935
+ "total": 0,
936
+ "page": resolved_page,
937
+ "page_size": page_size,
938
+ "series": [],
939
+ }
940
+ payload: list[dict[str, Any]] = []
941
+ total_series = 0
942
+
943
+ if series_model is not None:
944
+ series_query = series_model.select()
945
+ if search:
946
+ series_query = series_query.where(series_model.Title.contains(search))
947
+ if missing_only and missing_series_ids:
948
+ series_query = series_query.where(series_model.EntryId.in_(missing_series_ids))
949
+ total_series = series_query.count()
950
+ if total_series:
951
+ max_pages = (total_series + page_size - 1) // page_size
952
+ if max_pages:
953
+ resolved_page = min(resolved_page, max_pages - 1)
954
+ resolved_page = max(resolved_page, 0)
955
+ series_rows = (
956
+ series_query.order_by(series_model.Title.asc())
957
+ .paginate(resolved_page + 1, page_size)
958
+ .iterator()
959
+ )
960
+ for series in series_rows:
961
+ episodes_query = episodes_model.select().where(
962
+ episodes_model.SeriesId == series.EntryId
963
+ )
964
+ if missing_only:
965
+ episodes_query = episodes_query.where(missing_condition)
966
+ episodes_query = episodes_query.order_by(
967
+ episodes_model.SeasonNumber.asc(),
968
+ episodes_model.EpisodeNumber.asc(),
969
+ )
970
+ episodes = episodes_query.iterator()
971
+ episodes_list = list(episodes)
972
+ self.logger.debug(
973
+ f"[Sonarr Series] Series {getattr(series, 'Title', 'unknown')} (ID {getattr(series, 'EntryId', '?')}) has {len(episodes_list)} episodes (missing_only={missing_only})"
974
+ )
975
+ seasons: dict[str, dict[str, Any]] = {}
976
+ series_monitored = 0
977
+ series_available = 0
978
+ for ep in episodes_list:
979
+ season_value = getattr(ep, "SeasonNumber", None)
980
+ season_key = (
981
+ str(season_value) if season_value is not None else "unknown"
982
+ )
983
+ season_bucket = seasons.setdefault(
984
+ season_key,
985
+ {"monitored": 0, "available": 0, "episodes": []},
986
+ )
987
+ is_monitored = self._safe_bool(getattr(ep, "Monitored", None))
988
+ has_file = self._safe_bool(getattr(ep, "EpisodeFileId", None))
989
+ if is_monitored:
990
+ season_bucket["monitored"] += 1
991
+ series_monitored += 1
992
+ if has_file:
993
+ season_bucket["available"] += 1
994
+ if is_monitored:
995
+ series_available += 1
996
+ air_date = getattr(ep, "AirDateUtc", None)
997
+ if hasattr(air_date, "isoformat"):
998
+ try:
999
+ air_value = air_date.isoformat()
1000
+ except Exception:
1001
+ air_value = str(air_date)
1002
+ elif isinstance(air_date, str):
1003
+ air_value = air_date
1004
+ else:
1005
+ air_value = ""
1006
+ if (not missing_only) or (not has_file):
1007
+ season_bucket["episodes"].append(
1008
+ {
1009
+ "episodeNumber": getattr(ep, "EpisodeNumber", None),
1010
+ "title": getattr(ep, "Title", "") or "",
1011
+ "monitored": is_monitored,
1012
+ "hasFile": has_file,
1013
+ "airDateUtc": air_value,
1014
+ "reason": getattr(ep, "Reason", None),
1015
+ }
1016
+ )
1017
+ for bucket in seasons.values():
1018
+ monitored_eps = int(bucket.get("monitored", 0) or 0)
1019
+ available_eps = int(bucket.get("available", 0) or 0)
1020
+ bucket["missing"] = max(
1021
+ monitored_eps - min(available_eps, monitored_eps), 0
1022
+ )
1023
+ series_missing = max(series_monitored - series_available, 0)
1024
+ if missing_only:
1025
+ seasons = {
1026
+ key: data for key, data in seasons.items() if data["episodes"]
1027
+ }
1028
+ if not seasons:
1029
+ continue
1030
+
1031
+ # Get quality profile for this series from database
1032
+ series_id = getattr(series, "EntryId", None)
1033
+ quality_profile_id = (
1034
+ getattr(series, "QualityProfileId", None)
1035
+ if hasattr(series_model, "QualityProfileId")
1036
+ else None
1037
+ )
1038
+ quality_profile_name = (
1039
+ getattr(series, "QualityProfileName", None)
1040
+ if hasattr(series_model, "QualityProfileName")
1041
+ else None
1042
+ )
1043
+
1044
+ payload.append(
1045
+ {
1046
+ "series": {
1047
+ "id": series_id,
1048
+ "title": getattr(series, "Title", "") or "",
1049
+ "qualityProfileId": quality_profile_id,
1050
+ "qualityProfileName": quality_profile_name,
1051
+ },
1052
+ "totals": {
1053
+ "available": series_available,
1054
+ "monitored": series_monitored,
1055
+ "missing": series_missing,
1056
+ },
1057
+ "seasons": seasons,
1058
+ }
1059
+ )
1060
+
1061
+ if not payload:
1062
+ # Fallback: construct series payload from episode data (episode mode)
1063
+ base_episode_query = episodes_model.select()
1064
+ if search:
1065
+ search_filters = []
1066
+ if hasattr(episodes_model, "SeriesTitle"):
1067
+ search_filters.append(episodes_model.SeriesTitle.contains(search))
1068
+ search_filters.append(episodes_model.Title.contains(search))
1069
+ expr = search_filters[0]
1070
+ for extra in search_filters[1:]:
1071
+ expr |= extra
1072
+ base_episode_query = base_episode_query.where(expr)
1073
+ if missing_only:
1074
+ base_episode_query = base_episode_query.where(missing_condition)
1075
+
1076
+ series_id_field = (
1077
+ getattr(episodes_model, "SeriesId", None)
1078
+ if hasattr(episodes_model, "SeriesId")
1079
+ else None
1080
+ )
1081
+ series_title_field = (
1082
+ getattr(episodes_model, "SeriesTitle", None)
1083
+ if hasattr(episodes_model, "SeriesTitle")
1084
+ else None
1085
+ )
1086
+
1087
+ distinct_fields = []
1088
+ field_names: list[str] = []
1089
+ if series_id_field is not None:
1090
+ distinct_fields.append(series_id_field)
1091
+ field_names.append("SeriesId")
1092
+ if series_title_field is not None:
1093
+ distinct_fields.append(series_title_field)
1094
+ field_names.append("SeriesTitle")
1095
+ if not distinct_fields:
1096
+ # Fall back to title only to avoid empty select
1097
+ distinct_fields.append(episodes_model.Title.alias("SeriesTitle"))
1098
+ field_names.append("SeriesTitle")
1099
+
1100
+ distinct_query = (
1101
+ base_episode_query.select(*distinct_fields)
1102
+ .distinct()
1103
+ .order_by(
1104
+ series_title_field.asc()
1105
+ if series_title_field is not None
1106
+ else episodes_model.Title.asc()
1107
+ )
1108
+ )
1109
+ series_key_rows = list(distinct_query.tuples())
1110
+ total_series = len(series_key_rows)
1111
+ if total_series:
1112
+ max_pages = (total_series + page_size - 1) // page_size
1113
+ resolved_page = min(resolved_page, max_pages - 1)
1114
+ resolved_page = max(resolved_page, 0)
1115
+ start = resolved_page * page_size
1116
+ end = start + page_size
1117
+ page_keys = series_key_rows[start:end]
1118
+ else:
1119
+ resolved_page = 0
1120
+ page_keys = []
1121
+
1122
+ payload = []
1123
+ for key in page_keys:
1124
+ key_data = dict(zip(field_names, key))
1125
+ series_id = key_data.get("SeriesId")
1126
+ series_title = key_data.get("SeriesTitle")
1127
+ episode_conditions = []
1128
+ if series_id is not None:
1129
+ episode_conditions.append(episodes_model.SeriesId == series_id)
1130
+ if series_title is not None:
1131
+ episode_conditions.append(episodes_model.SeriesTitle == series_title)
1132
+ episodes_query = episodes_model.select()
1133
+ if episode_conditions:
1134
+ condition = episode_conditions[0]
1135
+ for extra in episode_conditions[1:]:
1136
+ condition &= extra
1137
+ episodes_query = episodes_query.where(condition)
1138
+ if missing_only:
1139
+ episodes_query = episodes_query.where(missing_condition)
1140
+ episodes_query = episodes_query.order_by(
1141
+ episodes_model.SeasonNumber.asc(),
1142
+ episodes_model.EpisodeNumber.asc(),
1143
+ )
1144
+ seasons: dict[str, dict[str, Any]] = {}
1145
+ series_monitored = 0
1146
+ series_available = 0
1147
+ # Track quality profile from first episode (all episodes in a series share the same profile)
1148
+ quality_profile_id = None
1149
+ quality_profile_name = None
1150
+ for ep in episodes_query.iterator():
1151
+ # Capture quality profile from first episode if available
1152
+ if quality_profile_id is None and hasattr(ep, "QualityProfileId"):
1153
+ quality_profile_id = getattr(ep, "QualityProfileId", None)
1154
+ if quality_profile_name is None and hasattr(ep, "QualityProfileName"):
1155
+ quality_profile_name = getattr(ep, "QualityProfileName", None)
1156
+ season_value = getattr(ep, "SeasonNumber", None)
1157
+ season_key = str(season_value) if season_value is not None else "unknown"
1158
+ season_bucket = seasons.setdefault(
1159
+ season_key,
1160
+ {"monitored": 0, "available": 0, "episodes": []},
1161
+ )
1162
+ is_monitored = self._safe_bool(getattr(ep, "Monitored", None))
1163
+ has_file = self._safe_bool(getattr(ep, "EpisodeFileId", None))
1164
+ if is_monitored:
1165
+ season_bucket["monitored"] += 1
1166
+ series_monitored += 1
1167
+ if has_file:
1168
+ season_bucket["available"] += 1
1169
+ if is_monitored:
1170
+ series_available += 1
1171
+ air_date = getattr(ep, "AirDateUtc", None)
1172
+ if hasattr(air_date, "isoformat"):
1173
+ try:
1174
+ air_value = air_date.isoformat()
1175
+ except Exception:
1176
+ air_value = str(air_date)
1177
+ elif isinstance(air_date, str):
1178
+ air_value = air_date
1179
+ else:
1180
+ air_value = ""
1181
+ season_bucket["episodes"].append(
1182
+ {
1183
+ "episodeNumber": getattr(ep, "EpisodeNumber", None),
1184
+ "title": getattr(ep, "Title", "") or "",
1185
+ "monitored": is_monitored,
1186
+ "hasFile": has_file,
1187
+ "airDateUtc": air_value,
1188
+ "reason": getattr(ep, "Reason", None),
1189
+ }
1190
+ )
1191
+ for bucket in seasons.values():
1192
+ monitored_eps = int(bucket.get("monitored", 0) or 0)
1193
+ available_eps = int(bucket.get("available", 0) or 0)
1194
+ bucket["missing"] = max(
1195
+ monitored_eps - min(available_eps, monitored_eps), 0
1196
+ )
1197
+ series_missing = max(series_monitored - series_available, 0)
1198
+ if missing_only:
1199
+ seasons = {key: data for key, data in seasons.items() if data["episodes"]}
1200
+ if not seasons:
1201
+ continue
1202
+
1203
+ # If quality profile is still None, fetch from Sonarr API
1204
+ if quality_profile_id is None and series_id is not None:
1205
+ try:
1206
+ client = getattr(arr, "client", None)
1207
+ if client and hasattr(client, "get_series"):
1208
+ series_data = client.get_series(series_id)
1209
+ if series_data:
1210
+ quality_profile_id = series_data.get("qualityProfileId")
1211
+ # Get quality profile name from cache or API
1212
+ if quality_profile_id:
1213
+ quality_cache = getattr(arr, "_quality_profile_cache", {})
1214
+ if quality_profile_id in quality_cache:
1215
+ quality_profile_name = quality_cache[
1216
+ quality_profile_id
1217
+ ].get("name")
1218
+ elif hasattr(client, "get_quality_profile"):
1219
+ try:
1220
+ profile = client.get_quality_profile(
1221
+ quality_profile_id
1222
+ )
1223
+ quality_profile_name = (
1224
+ profile.get("name") if profile else None
1225
+ )
1226
+ except Exception:
1227
+ pass
1228
+ except Exception:
1229
+ pass
1230
+
1231
+ payload.append(
1232
+ {
1233
+ "series": {
1234
+ "id": series_id,
1235
+ "title": (
1236
+ series_title
1237
+ or (
1238
+ f"Series {len(payload) + 1}"
1239
+ if series_id is None
1240
+ else str(series_id)
1241
+ )
1242
+ ),
1243
+ "qualityProfileId": quality_profile_id,
1244
+ "qualityProfileName": quality_profile_name,
1245
+ },
1246
+ "totals": {
1247
+ "available": series_available,
1248
+ "monitored": series_monitored,
1249
+ "missing": series_missing,
1250
+ },
1251
+ "seasons": seasons,
1252
+ }
1253
+ )
1254
+
1255
+ result = {
1256
+ "counts": {
1257
+ "available": available_count,
1258
+ "monitored": monitored_count,
1259
+ "missing": missing_count,
1260
+ },
1261
+ "total": total_series,
1262
+ "page": resolved_page,
1263
+ "page_size": page_size,
1264
+ "series": payload,
1265
+ }
1266
+ if payload:
1267
+ first_series = payload[0]
1268
+ first_seasons = first_series.get("seasons", {})
1269
+ total_episodes_in_response = sum(
1270
+ len(season.get("episodes", [])) for season in first_seasons.values()
1271
+ )
1272
+ self.logger.info(
1273
+ f"[Sonarr API] Returning {len(payload)} series, "
1274
+ f"first series '{first_series.get('series', {}).get('title', '?')}' has "
1275
+ f"{len(first_seasons)} seasons, {total_episodes_in_response} episodes "
1276
+ f"(missing_only={missing_only})"
1277
+ )
1278
+ return result
1279
+
1280
+ # Routes
1281
+ def _register_routes(self):
1282
+ app = self.app
1283
+ logs_root = (HOME_PATH / "logs").resolve()
1284
+
1285
+ def _resolve_log_file(name: str) -> Path | None:
1286
+ try:
1287
+ candidate = (logs_root / name).resolve(strict=False)
1288
+ except Exception:
1289
+ return None
1290
+ try:
1291
+ candidate.relative_to(logs_root)
1292
+ except ValueError:
1293
+ return None
1294
+ return candidate
1295
+
1296
+ def _managed_objects() -> dict[str, Any]:
1297
+ arr_manager = getattr(self.manager, "arr_manager", None)
1298
+ return getattr(arr_manager, "managed_objects", {}) if arr_manager else {}
1299
+
1300
+ def _ensure_arr_manager_ready() -> bool:
1301
+ return getattr(self.manager, "arr_manager", None) is not None
1302
+
1303
+ @app.get("/health")
1304
+ def health():
1305
+ return jsonify({"status": "ok"})
1306
+
1307
+ @app.get("/")
1308
+ def index():
1309
+ return redirect("/ui")
1310
+
1311
+ def _authorized():
1312
+ if not self.token:
1313
+ return True
1314
+ supplied = request.headers.get("Authorization", "").removeprefix(
1315
+ "Bearer "
1316
+ ) or request.args.get("token")
1317
+ return supplied == self.token
1318
+
1319
+ def require_token():
1320
+ if not _authorized():
1321
+ return jsonify({"error": "unauthorized"}), 401
1322
+ return None
1323
+
1324
+ @app.get("/ui")
1325
+ def ui_index():
1326
+ # Serve UI without requiring a token; API remains protected
1327
+ # Add cache-busting parameter based on config reload timestamp
1328
+ from flask import make_response
1329
+
1330
+ response = make_response(redirect("/static/index.html"))
1331
+ # Prevent caching of the UI entry point
1332
+ response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
1333
+ response.headers["Pragma"] = "no-cache"
1334
+ response.headers["Expires"] = "0"
1335
+ return response
1336
+
1337
+ @app.get("/sw.js")
1338
+ def service_worker():
1339
+ # Service worker must be served directly (not redirected) for PWA support
1340
+ # This allows the endpoint to be whitelisted in auth proxies (e.g., Authentik)
1341
+ import os
1342
+
1343
+ from flask import send_from_directory
1344
+
1345
+ static_dir = os.path.join(os.path.dirname(__file__), "static")
1346
+ response = send_from_directory(static_dir, "sw.js")
1347
+ # Prevent caching of the service worker to ensure updates are picked up
1348
+ response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
1349
+ response.headers["Pragma"] = "no-cache"
1350
+ response.headers["Expires"] = "0"
1351
+ return response
1352
+
1353
+ def _processes_payload() -> dict[str, Any]:
1354
+ procs = []
1355
+ search_activity_map = fetch_search_activities()
1356
+
1357
+ def _parse_timestamp(raw_value):
1358
+ if not raw_value:
1359
+ return None
1360
+ try:
1361
+ if isinstance(raw_value, (int, float)):
1362
+ return datetime.fromtimestamp(raw_value, timezone.utc).isoformat()
1363
+ if isinstance(raw_value, str):
1364
+ trimmed = raw_value.rstrip("Z")
1365
+ dt = datetime.fromisoformat(trimmed)
1366
+ if raw_value.endswith("Z"):
1367
+ dt = dt.replace(tzinfo=timezone.utc)
1368
+ elif dt.tzinfo is None:
1369
+ dt = dt.replace(tzinfo=timezone.utc)
1370
+ return dt.astimezone(timezone.utc).isoformat()
1371
+ except Exception:
1372
+ return None
1373
+ return None
1374
+
1375
+ def _format_queue_summary(arr_obj, record):
1376
+ if not isinstance(record, dict):
1377
+ return None
1378
+ pieces = []
1379
+ arr_type = (getattr(arr_obj, "type", "") or "").lower()
1380
+ if arr_type == "radarr":
1381
+ movie_info = record.get("movie") or {}
1382
+ title = movie_info.get("title")
1383
+ year = movie_info.get("year")
1384
+ release_title = record.get("title") or ""
1385
+ release_name = ""
1386
+ release_year = None
1387
+ if release_title:
1388
+ cleaned = release_title.split("/")[-1]
1389
+ cleaned = re.sub(r"\.[^.]+$", "", cleaned)
1390
+ cleaned = re.sub(r"[-_.]+", " ", cleaned).strip()
1391
+ release_name = cleaned
1392
+ match = re.match(
1393
+ r"(?P<name>.+?)\s+(?P<year>(?:19|20)\d{2})(?:\s|$)",
1394
+ cleaned,
1395
+ )
1396
+ if match:
1397
+ extracted_name = (match.group("name") or "").strip(" .-_")
1398
+ if extracted_name:
1399
+ release_name = re.sub(r"[-_.]+", " ", extracted_name).strip()
1400
+ release_year = match.group("year")
1401
+ if not title and release_name:
1402
+ title = release_name
1403
+ elif title and release_title and title == release_title and release_name:
1404
+ title = release_name
1405
+ if not year:
1406
+ year = release_year or record.get("year")
1407
+ if title:
1408
+ pieces.append(title)
1409
+ if year:
1410
+ pieces.append(str(year))
1411
+ elif arr_type == "sonarr":
1412
+ series = (record.get("series") or {}).get("title")
1413
+ episode = record.get("episode")
1414
+ if series:
1415
+ pieces.append(series)
1416
+ season = None
1417
+ episode_number = None
1418
+ if isinstance(episode, dict):
1419
+ season = episode.get("seasonNumber")
1420
+ episode_number = episode.get("episodeNumber")
1421
+ if season is not None and episode_number is not None:
1422
+ pieces.append(f"S{int(season):02d}E{int(episode_number):02d}")
1423
+ # Intentionally omit individual episode titles/status values
1424
+ else:
1425
+ title = record.get("title")
1426
+ if title:
1427
+ pieces.append(title)
1428
+ cleaned = [str(part) for part in pieces if part]
1429
+ return " | ".join(cleaned) if cleaned else None
1430
+
1431
+ def _collect_metrics(arr_obj):
1432
+ metrics = {
1433
+ "queue": None,
1434
+ "category": None,
1435
+ "summary": None,
1436
+ "timestamp": None,
1437
+ "metric_type": None,
1438
+ }
1439
+ manager_ref = getattr(arr_obj, "manager", None)
1440
+ if manager_ref and hasattr(manager_ref, "qbit_manager"):
1441
+ qbit_manager = manager_ref.qbit_manager
1442
+ else:
1443
+ qbit_manager = getattr(self.manager, "qbit_manager", self.manager)
1444
+ qbit_client = getattr(qbit_manager, "client", None)
1445
+ category = getattr(arr_obj, "category", None)
1446
+
1447
+ if isinstance(arr_obj, FreeSpaceManager):
1448
+ metrics["metric_type"] = "free-space"
1449
+ if qbit_client:
1450
+ try:
1451
+ torrents = qbit_client.torrents_info(status_filter="all")
1452
+ count = 0
1453
+ for torrent in torrents:
1454
+ tags = getattr(torrent, "tags", "") or ""
1455
+ if "qBitrr-free_space_paused" in str(tags):
1456
+ count += 1
1457
+ metrics["category"] = count
1458
+ metrics["queue"] = count
1459
+ except Exception:
1460
+ pass
1461
+ return metrics
1462
+
1463
+ if isinstance(arr_obj, PlaceHolderArr):
1464
+ metrics["metric_type"] = "category"
1465
+ if qbit_client and category:
1466
+ try:
1467
+ torrents = qbit_client.torrents_info(
1468
+ status_filter="all", category=category
1469
+ )
1470
+ count = sum(
1471
+ 1
1472
+ for torrent in torrents
1473
+ if getattr(torrent, "category", None) == category
1474
+ )
1475
+ metrics["queue"] = count
1476
+ metrics["category"] = count
1477
+ except Exception:
1478
+ pass
1479
+ return metrics
1480
+
1481
+ # Standard Arr (Radarr/Sonarr)
1482
+ records = []
1483
+ client = getattr(arr_obj, "client", None)
1484
+ if client is not None:
1485
+ try:
1486
+ raw_queue = arr_obj.get_queue(
1487
+ page=1, page_size=50, sort_direction="descending"
1488
+ )
1489
+ if isinstance(raw_queue, dict):
1490
+ records = raw_queue.get("records", []) or []
1491
+ else:
1492
+ records = list(raw_queue or [])
1493
+ except Exception:
1494
+ records = []
1495
+ queue_count = len(records)
1496
+ if queue_count:
1497
+ metrics["queue"] = queue_count
1498
+ records[0]
1499
+ if qbit_client and category:
1500
+ try:
1501
+ torrents = qbit_client.torrents_info(
1502
+ status_filter="all", category=category
1503
+ )
1504
+ metrics["category"] = sum(
1505
+ 1
1506
+ for torrent in torrents
1507
+ if getattr(torrent, "category", None) == category
1508
+ )
1509
+ except Exception:
1510
+ pass
1511
+ category_key = getattr(arr_obj, "category", None)
1512
+ if category_key:
1513
+ entry = search_activity_map.get(str(category_key))
1514
+ if isinstance(entry, Mapping):
1515
+ summary = entry.get("summary")
1516
+ timestamp = entry.get("timestamp")
1517
+ if summary:
1518
+ metrics["summary"] = summary
1519
+ if timestamp:
1520
+ metrics["timestamp"] = timestamp
1521
+ if metrics["summary"] is None and not getattr(arr_obj, "_webui_db_loaded", True):
1522
+ metrics["summary"] = "Updating database"
1523
+ return metrics
1524
+
1525
+ metrics_cache: dict[int, dict[str, object]] = {}
1526
+
1527
+ def _populate_process_metadata(arr_obj, proc_kind, payload_dict):
1528
+ metrics = metrics_cache.get(id(arr_obj))
1529
+ if metrics is None:
1530
+ metrics = _collect_metrics(arr_obj)
1531
+ metrics_cache[id(arr_obj)] = metrics
1532
+ if proc_kind == "search":
1533
+ category_key = getattr(arr_obj, "category", None)
1534
+ entry = None
1535
+ if category_key:
1536
+ entry = search_activity_map.get(str(category_key))
1537
+ summary = None
1538
+ timestamp = None
1539
+ if isinstance(entry, Mapping):
1540
+ summary = entry.get("summary")
1541
+ timestamp = entry.get("timestamp")
1542
+ if summary is None:
1543
+ summary = getattr(arr_obj, "last_search_description", None)
1544
+ timestamp = getattr(arr_obj, "last_search_timestamp", None)
1545
+ if summary is None:
1546
+ metrics_summary = metrics.get("summary")
1547
+ if metrics_summary:
1548
+ summary = metrics_summary
1549
+ metrics_timestamp = metrics.get("timestamp")
1550
+ if metrics_timestamp:
1551
+ timestamp = metrics_timestamp
1552
+ if summary:
1553
+ payload_dict["searchSummary"] = summary
1554
+ if timestamp:
1555
+ if isinstance(timestamp, datetime):
1556
+ payload_dict["searchTimestamp"] = timestamp.astimezone(
1557
+ timezone.utc
1558
+ ).isoformat()
1559
+ else:
1560
+ payload_dict["searchTimestamp"] = str(timestamp)
1561
+ elif category_key:
1562
+ key = str(category_key)
1563
+ clear_search_activity(key)
1564
+ search_activity_map.pop(key, None)
1565
+ elif proc_kind == "torrent":
1566
+ queue_count = metrics.get("queue")
1567
+ if queue_count is None:
1568
+ queue_count = getattr(arr_obj, "queue_active_count", None)
1569
+ category_count = metrics.get("category")
1570
+ if category_count is None:
1571
+ category_count = getattr(arr_obj, "category_torrent_count", None)
1572
+ metric_type = metrics.get("metric_type")
1573
+ if queue_count is not None:
1574
+ payload_dict["queueCount"] = queue_count
1575
+ if category_count is not None:
1576
+ payload_dict["categoryCount"] = category_count
1577
+ if metric_type:
1578
+ payload_dict["metricType"] = metric_type
1579
+
1580
+ for arr in _managed_objects().values():
1581
+ name = getattr(arr, "_name", "unknown")
1582
+ cat = getattr(arr, "category", name)
1583
+ for kind in ("search", "torrent"):
1584
+ p = getattr(arr, f"process_{kind}_loop", None)
1585
+ if p is None:
1586
+ continue
1587
+ try:
1588
+ payload = {
1589
+ "category": cat,
1590
+ "name": name,
1591
+ "kind": kind,
1592
+ "pid": getattr(p, "pid", None),
1593
+ "alive": bool(p.is_alive()),
1594
+ "rebuilding": self._rebuilding_arrs,
1595
+ }
1596
+ _populate_process_metadata(arr, kind, payload)
1597
+ procs.append(payload)
1598
+ except Exception:
1599
+ payload = {
1600
+ "category": cat,
1601
+ "name": name,
1602
+ "kind": kind,
1603
+ "pid": getattr(p, "pid", None),
1604
+ "alive": False,
1605
+ "rebuilding": self._rebuilding_arrs,
1606
+ }
1607
+ _populate_process_metadata(arr, kind, payload)
1608
+ procs.append(payload)
1609
+ return {"processes": procs}
1610
+
1611
+ @app.get("/api/processes")
1612
+ def api_processes():
1613
+ if (resp := require_token()) is not None:
1614
+ return resp
1615
+ return jsonify(_processes_payload())
1616
+
1617
+ # UI endpoints (mirror of /api/* for first-party WebUI clients)
1618
+ @app.get("/web/processes")
1619
+ def web_processes():
1620
+ return jsonify(_processes_payload())
1621
+
1622
+ def _restart_process(category: str, kind: str):
1623
+ kind_normalized = kind.lower()
1624
+ if kind_normalized not in ("search", "torrent", "all"):
1625
+ return jsonify({"error": "kind must be search, torrent or all"}), 400
1626
+ managed = _managed_objects()
1627
+ if not managed:
1628
+ if not _ensure_arr_manager_ready():
1629
+ return jsonify({"error": "Arr manager is still initialising"}), 503
1630
+ arr = managed.get(category)
1631
+ if arr is None:
1632
+ return jsonify({"error": f"Unknown category {category}"}), 404
1633
+ restarted: list[str] = []
1634
+ for loop_kind in ("search", "torrent"):
1635
+ if kind_normalized != "all" and loop_kind != kind_normalized:
1636
+ continue
1637
+ proc_attr = f"process_{loop_kind}_loop"
1638
+ process = getattr(arr, proc_attr, None)
1639
+ if process is not None:
1640
+ try:
1641
+ process.kill()
1642
+ except Exception:
1643
+ pass
1644
+ try:
1645
+ process.terminate()
1646
+ except Exception:
1647
+ pass
1648
+ try:
1649
+ self.manager.child_processes.remove(process)
1650
+ except Exception:
1651
+ pass
1652
+ target = getattr(arr, f"run_{loop_kind}_loop", None)
1653
+ if target is None:
1654
+ continue
1655
+ import pathos
1656
+
1657
+ new_process = pathos.helpers.mp.Process(target=target, daemon=False)
1658
+ setattr(arr, proc_attr, new_process)
1659
+ self.manager.child_processes.append(new_process)
1660
+ new_process.start()
1661
+ restarted.append(loop_kind)
1662
+ return jsonify({"status": "ok", "restarted": restarted})
1663
+
1664
+ @app.post("/api/processes/<category>/<kind>/restart")
1665
+ def api_restart_process(category: str, kind: str):
1666
+ if (resp := require_token()) is not None:
1667
+ return resp
1668
+ return _restart_process(category, kind)
1669
+
1670
+ @app.post("/web/processes/<category>/<kind>/restart")
1671
+ def web_restart_process(category: str, kind: str):
1672
+ return _restart_process(category, kind)
1673
+
1674
+ @app.post("/api/processes/restart_all")
1675
+ def api_restart_all():
1676
+ if (resp := require_token()) is not None:
1677
+ return resp
1678
+ self._reload_all()
1679
+ return jsonify({"status": "ok"})
1680
+
1681
+ @app.post("/web/processes/restart_all")
1682
+ def web_restart_all():
1683
+ self._reload_all()
1684
+ return jsonify({"status": "ok"})
1685
+
1686
+ @app.post("/api/loglevel")
1687
+ def api_loglevel():
1688
+ if (resp := require_token()) is not None:
1689
+ return resp
1690
+ body = request.get_json(silent=True) or {}
1691
+ level = str(body.get("level", "INFO")).upper()
1692
+ valid = {"CRITICAL", "ERROR", "WARNING", "NOTICE", "INFO", "DEBUG", "TRACE"}
1693
+ if level not in valid:
1694
+ return jsonify({"error": f"invalid level {level}"}), 400
1695
+ target_level = getattr(logging, level, logging.INFO)
1696
+ logging.getLogger().setLevel(target_level)
1697
+ for name, lg in logging.root.manager.loggerDict.items():
1698
+ if isinstance(lg, logging.Logger) and str(name).startswith("qBitrr"):
1699
+ lg.setLevel(target_level)
1700
+ try:
1701
+ _toml_set(CONFIG.config, "Settings.ConsoleLevel", level)
1702
+ CONFIG.save()
1703
+ except Exception:
1704
+ pass
1705
+ return jsonify({"status": "ok", "level": level})
1706
+
1707
+ @app.post("/web/loglevel")
1708
+ def web_loglevel():
1709
+ body = request.get_json(silent=True) or {}
1710
+ level = str(body.get("level", "INFO")).upper()
1711
+ valid = {"CRITICAL", "ERROR", "WARNING", "NOTICE", "INFO", "DEBUG", "TRACE"}
1712
+ if level not in valid:
1713
+ return jsonify({"error": f"invalid level {level}"}), 400
1714
+ target_level = getattr(logging, level, logging.INFO)
1715
+ logging.getLogger().setLevel(target_level)
1716
+ for name, lg in logging.root.manager.loggerDict.items():
1717
+ if isinstance(lg, logging.Logger) and str(name).startswith("qBitrr"):
1718
+ lg.setLevel(target_level)
1719
+ try:
1720
+ _toml_set(CONFIG.config, "Settings.ConsoleLevel", level)
1721
+ CONFIG.save()
1722
+ except Exception:
1723
+ pass
1724
+ return jsonify({"status": "ok", "level": level})
1725
+
1726
+ @app.post("/api/arr/rebuild")
1727
+ def api_arr_rebuild():
1728
+ if (resp := require_token()) is not None:
1729
+ return resp
1730
+ self._reload_all()
1731
+ return jsonify({"status": "ok"})
1732
+
1733
+ @app.post("/web/arr/rebuild")
1734
+ def web_arr_rebuild():
1735
+ self._reload_all()
1736
+ return jsonify({"status": "ok"})
1737
+
1738
+ def _list_logs() -> list[str]:
1739
+ if not logs_root.exists():
1740
+ return []
1741
+ log_files = sorted(f.name for f in logs_root.glob("*.log*"))
1742
+ return log_files
1743
+
1744
+ @app.get("/api/logs")
1745
+ def api_logs():
1746
+ if (resp := require_token()) is not None:
1747
+ return resp
1748
+ return jsonify({"files": _list_logs()})
1749
+
1750
+ @app.get("/web/logs")
1751
+ def web_logs():
1752
+ return jsonify({"files": _list_logs()})
1753
+
1754
+ @app.get("/api/logs/<name>")
1755
+ def api_log(name: str):
1756
+ if (resp := require_token()) is not None:
1757
+ return resp
1758
+ file = _resolve_log_file(name)
1759
+ if file is None or not file.exists():
1760
+ return jsonify({"error": "not found"}), 404
1761
+
1762
+ # Stream full log file to support dynamic loading in LazyLog
1763
+ try:
1764
+ content = file.read_text(encoding="utf-8", errors="ignore")
1765
+ except Exception:
1766
+ content = ""
1767
+ response = send_file(
1768
+ io.BytesIO(content.encode("utf-8")),
1769
+ mimetype="text/plain",
1770
+ as_attachment=False,
1771
+ )
1772
+ response.headers["Content-Type"] = "text/plain; charset=utf-8"
1773
+ response.headers["Cache-Control"] = "no-cache"
1774
+ return response
1775
+
1776
+ @app.get("/web/logs/<name>")
1777
+ def web_log(name: str):
1778
+ # Public endpoint for Authentik bypass - no token required
1779
+ file = _resolve_log_file(name)
1780
+ if file is None or not file.exists():
1781
+ return jsonify({"error": "not found"}), 404
1782
+
1783
+ # Stream full log file to support dynamic loading in LazyLog
1784
+ try:
1785
+ content = file.read_text(encoding="utf-8", errors="ignore")
1786
+ except Exception:
1787
+ content = ""
1788
+ response = send_file(
1789
+ io.BytesIO(content.encode("utf-8")),
1790
+ mimetype="text/plain",
1791
+ as_attachment=False,
1792
+ )
1793
+ response.headers["Content-Type"] = "text/plain; charset=utf-8"
1794
+ response.headers["Cache-Control"] = "no-cache"
1795
+ return response
1796
+
1797
+ @app.get("/api/logs/<name>/download")
1798
+ def api_log_download(name: str):
1799
+ if (resp := require_token()) is not None:
1800
+ return resp
1801
+ file = _resolve_log_file(name)
1802
+ if file is None or not file.exists():
1803
+ return jsonify({"error": "not found"}), 404
1804
+ return send_file(file, as_attachment=True)
1805
+
1806
+ @app.get("/web/logs/<name>/download")
1807
+ def web_log_download(name: str):
1808
+ file = _resolve_log_file(name)
1809
+ if file is None or not file.exists():
1810
+ return jsonify({"error": "not found"}), 404
1811
+ return send_file(file, as_attachment=True)
1812
+
1813
+ @app.get("/api/radarr/<category>/movies")
1814
+ def api_radarr_movies(category: str):
1815
+ if (resp := require_token()) is not None:
1816
+ return resp
1817
+ managed = _managed_objects()
1818
+ if not managed:
1819
+ if not _ensure_arr_manager_ready():
1820
+ return jsonify({"error": "Arr manager is still initialising"}), 503
1821
+ arr = managed.get(category)
1822
+ if arr is None or getattr(arr, "type", None) != "radarr":
1823
+ return jsonify({"error": f"Unknown radarr category {category}"}), 404
1824
+ q = request.args.get("q", default=None, type=str)
1825
+ page = request.args.get("page", default=0, type=int)
1826
+ page_size = request.args.get("page_size", default=50, type=int)
1827
+ year_min = request.args.get("year_min", default=None, type=int)
1828
+ year_max = request.args.get("year_max", default=None, type=int)
1829
+ monitored = (
1830
+ self._safe_bool(request.args.get("monitored"))
1831
+ if "monitored" in request.args
1832
+ else None
1833
+ )
1834
+ has_file = (
1835
+ self._safe_bool(request.args.get("has_file"))
1836
+ if "has_file" in request.args
1837
+ else None
1838
+ )
1839
+ quality_met = (
1840
+ self._safe_bool(request.args.get("quality_met"))
1841
+ if "quality_met" in request.args
1842
+ else None
1843
+ )
1844
+ is_request = (
1845
+ self._safe_bool(request.args.get("is_request"))
1846
+ if "is_request" in request.args
1847
+ else None
1848
+ )
1849
+ payload = self._radarr_movies_from_db(
1850
+ arr,
1851
+ q,
1852
+ page,
1853
+ page_size,
1854
+ year_min=year_min,
1855
+ year_max=year_max,
1856
+ monitored=monitored,
1857
+ has_file=has_file,
1858
+ quality_met=quality_met,
1859
+ is_request=is_request,
1860
+ )
1861
+ payload["category"] = category
1862
+ return jsonify(payload)
1863
+
1864
+ @app.get("/web/radarr/<category>/movies")
1865
+ def web_radarr_movies(category: str):
1866
+ managed = _managed_objects()
1867
+ if not managed:
1868
+ if not _ensure_arr_manager_ready():
1869
+ return jsonify({"error": "Arr manager is still initialising"}), 503
1870
+ arr = managed.get(category)
1871
+ if arr is None or getattr(arr, "type", None) != "radarr":
1872
+ return jsonify({"error": f"Unknown radarr category {category}"}), 404
1873
+ q = request.args.get("q", default=None, type=str)
1874
+ page = request.args.get("page", default=0, type=int)
1875
+ page_size = request.args.get("page_size", default=50, type=int)
1876
+ year_min = request.args.get("year_min", default=None, type=int)
1877
+ year_max = request.args.get("year_max", default=None, type=int)
1878
+ monitored = (
1879
+ self._safe_bool(request.args.get("monitored"))
1880
+ if "monitored" in request.args
1881
+ else None
1882
+ )
1883
+ has_file = (
1884
+ self._safe_bool(request.args.get("has_file"))
1885
+ if "has_file" in request.args
1886
+ else None
1887
+ )
1888
+ quality_met = (
1889
+ self._safe_bool(request.args.get("quality_met"))
1890
+ if "quality_met" in request.args
1891
+ else None
1892
+ )
1893
+ is_request = (
1894
+ self._safe_bool(request.args.get("is_request"))
1895
+ if "is_request" in request.args
1896
+ else None
1897
+ )
1898
+ payload = self._radarr_movies_from_db(
1899
+ arr,
1900
+ q,
1901
+ page,
1902
+ page_size,
1903
+ year_min=year_min,
1904
+ year_max=year_max,
1905
+ monitored=monitored,
1906
+ has_file=has_file,
1907
+ quality_met=quality_met,
1908
+ is_request=is_request,
1909
+ )
1910
+ payload["category"] = category
1911
+ return jsonify(payload)
1912
+
1913
+ @app.get("/api/sonarr/<category>/series")
1914
+ def api_sonarr_series(category: str):
1915
+ if (resp := require_token()) is not None:
1916
+ return resp
1917
+ managed = _managed_objects()
1918
+ if not managed:
1919
+ if not _ensure_arr_manager_ready():
1920
+ return jsonify({"error": "Arr manager is still initialising"}), 503
1921
+ arr = managed.get(category)
1922
+ if arr is None or getattr(arr, "type", None) != "sonarr":
1923
+ return jsonify({"error": f"Unknown sonarr category {category}"}), 404
1924
+ q = request.args.get("q", default=None, type=str)
1925
+ page = request.args.get("page", default=0, type=int)
1926
+ page_size = request.args.get("page_size", default=25, type=int)
1927
+ missing_only = self._safe_bool(
1928
+ request.args.get("missing") or request.args.get("only_missing")
1929
+ )
1930
+ payload = self._sonarr_series_from_db(
1931
+ arr, q, page, page_size, missing_only=missing_only
1932
+ )
1933
+ payload["category"] = category
1934
+ return jsonify(payload)
1935
+
1936
+ @app.get("/web/sonarr/<category>/series")
1937
+ def web_sonarr_series(category: str):
1938
+ managed = _managed_objects()
1939
+ if not managed:
1940
+ if not _ensure_arr_manager_ready():
1941
+ return jsonify({"error": "Arr manager is still initialising"}), 503
1942
+ arr = managed.get(category)
1943
+ if arr is None or getattr(arr, "type", None) != "sonarr":
1944
+ return jsonify({"error": f"Unknown sonarr category {category}"}), 404
1945
+ q = request.args.get("q", default=None, type=str)
1946
+ page = request.args.get("page", default=0, type=int)
1947
+ page_size = request.args.get("page_size", default=25, type=int)
1948
+ missing_only = self._safe_bool(
1949
+ request.args.get("missing") or request.args.get("only_missing")
1950
+ )
1951
+ payload = self._sonarr_series_from_db(
1952
+ arr, q, page, page_size, missing_only=missing_only
1953
+ )
1954
+ payload["category"] = category
1955
+ return jsonify(payload)
1956
+
1957
+ @app.get("/web/lidarr/<category>/albums")
1958
+ def web_lidarr_albums(category: str):
1959
+ managed = _managed_objects()
1960
+ if not managed:
1961
+ if not _ensure_arr_manager_ready():
1962
+ return jsonify({"error": "Arr manager is still initialising"}), 503
1963
+ arr = managed.get(category)
1964
+ if arr is None or getattr(arr, "type", None) != "lidarr":
1965
+ return jsonify({"error": f"Unknown lidarr category {category}"}), 404
1966
+ q = request.args.get("q", default=None, type=str)
1967
+ page = request.args.get("page", default=0, type=int)
1968
+ page_size = request.args.get("page_size", default=50, type=int)
1969
+ monitored = (
1970
+ self._safe_bool(request.args.get("monitored"))
1971
+ if "monitored" in request.args
1972
+ else None
1973
+ )
1974
+ has_file = (
1975
+ self._safe_bool(request.args.get("has_file"))
1976
+ if "has_file" in request.args
1977
+ else None
1978
+ )
1979
+ quality_met = (
1980
+ self._safe_bool(request.args.get("quality_met"))
1981
+ if "quality_met" in request.args
1982
+ else None
1983
+ )
1984
+ is_request = (
1985
+ self._safe_bool(request.args.get("is_request"))
1986
+ if "is_request" in request.args
1987
+ else None
1988
+ )
1989
+ flat_mode = self._safe_bool(request.args.get("flat_mode", False))
1990
+
1991
+ if flat_mode:
1992
+ # Flat mode: return tracks directly
1993
+ payload = self._lidarr_tracks_from_db(
1994
+ arr,
1995
+ q,
1996
+ page,
1997
+ page_size,
1998
+ monitored=monitored,
1999
+ has_file=has_file,
2000
+ )
2001
+ else:
2002
+ # Grouped mode: return albums with tracks, paginated by artist
2003
+ payload = self._lidarr_albums_from_db(
2004
+ arr,
2005
+ q,
2006
+ page,
2007
+ page_size,
2008
+ monitored=monitored,
2009
+ has_file=has_file,
2010
+ quality_met=quality_met,
2011
+ is_request=is_request,
2012
+ group_by_artist=True,
2013
+ )
2014
+ payload["category"] = category
2015
+ return jsonify(payload)
2016
+
2017
+ def _arr_list_payload() -> dict[str, Any]:
2018
+ items = []
2019
+ for k, arr in _managed_objects().items():
2020
+ t = getattr(arr, "type", None)
2021
+ if t in ("radarr", "sonarr", "lidarr"):
2022
+ name = getattr(arr, "_name", k)
2023
+ category = getattr(arr, "category", k)
2024
+ items.append({"category": category, "name": name, "type": t})
2025
+ return {"arr": items, "ready": _ensure_arr_manager_ready()}
2026
+
2027
+ @app.get("/api/arr")
2028
+ def api_arr_list():
2029
+ if (resp := require_token()) is not None:
2030
+ return resp
2031
+ return jsonify(_arr_list_payload())
2032
+
2033
+ @app.get("/web/arr")
2034
+ def web_arr_list():
2035
+ return jsonify(_arr_list_payload())
2036
+
2037
+ @app.get("/api/meta")
2038
+ def api_meta():
2039
+ if (resp := require_token()) is not None:
2040
+ return resp
2041
+ force = self._safe_bool(request.args.get("force"))
2042
+ return jsonify(self._ensure_version_info(force=force))
2043
+
2044
+ @app.get("/web/meta")
2045
+ def web_meta():
2046
+ force = self._safe_bool(request.args.get("force"))
2047
+ return jsonify(self._ensure_version_info(force=force))
2048
+
2049
+ @app.post("/api/update")
2050
+ def api_update():
2051
+ if (resp := require_token()) is not None:
2052
+ return resp
2053
+ ok, message = self._trigger_manual_update()
2054
+ if not ok:
2055
+ return jsonify({"error": message}), 409
2056
+ return jsonify({"status": "started"})
2057
+
2058
+ @app.post("/web/update")
2059
+ def web_update():
2060
+ ok, message = self._trigger_manual_update()
2061
+ if not ok:
2062
+ return jsonify({"error": message}), 409
2063
+ return jsonify({"status": "started"})
2064
+
2065
+ @app.get("/api/download-update")
2066
+ def api_download_update():
2067
+ """Redirect to binary download URL for current platform."""
2068
+ if (resp := require_token()) is not None:
2069
+ return resp
2070
+
2071
+ from qBitrr.auto_update import get_installation_type
2072
+
2073
+ install_type = get_installation_type()
2074
+
2075
+ if install_type != "binary":
2076
+ return jsonify({"error": "Download only available for binary installations"}), 400
2077
+
2078
+ # Get latest version info
2079
+ version_info = self._ensure_version_info()
2080
+
2081
+ if not version_info.get("update_available"):
2082
+ return jsonify({"error": "No update available"}), 404
2083
+
2084
+ download_url = version_info.get("binary_download_url")
2085
+ if not download_url:
2086
+ error = version_info.get(
2087
+ "binary_download_error", "No binary available for your platform"
2088
+ )
2089
+ return jsonify({"error": error}), 404
2090
+
2091
+ # Redirect to GitHub download URL
2092
+ from flask import redirect
2093
+
2094
+ return redirect(download_url)
2095
+
2096
+ @app.get("/web/download-update")
2097
+ def web_download_update():
2098
+ """Redirect to binary download URL for current platform."""
2099
+ from qBitrr.auto_update import get_installation_type
2100
+
2101
+ install_type = get_installation_type()
2102
+
2103
+ if install_type != "binary":
2104
+ return jsonify({"error": "Download only available for binary installations"}), 400
2105
+
2106
+ # Get latest version info
2107
+ version_info = self._ensure_version_info()
2108
+
2109
+ if not version_info.get("update_available"):
2110
+ return jsonify({"error": "No update available"}), 404
2111
+
2112
+ download_url = version_info.get("binary_download_url")
2113
+ if not download_url:
2114
+ error = version_info.get(
2115
+ "binary_download_error", "No binary available for your platform"
2116
+ )
2117
+ return jsonify({"error": error}), 404
2118
+
2119
+ # Redirect to GitHub download URL
2120
+ from flask import redirect
2121
+
2122
+ return redirect(download_url)
2123
+
2124
+ def _status_payload() -> dict[str, Any]:
2125
+ qb = {
2126
+ "alive": bool(self.manager.is_alive),
2127
+ "host": self.manager.qBit_Host,
2128
+ "port": self.manager.qBit_Port,
2129
+ "version": (
2130
+ str(self.manager.current_qbit_version)
2131
+ if self.manager.current_qbit_version
2132
+ else None
2133
+ ),
2134
+ }
2135
+ arrs = []
2136
+ for k, arr in _managed_objects().items():
2137
+ t = getattr(arr, "type", None)
2138
+ if t in ("radarr", "sonarr", "lidarr"):
2139
+ # Determine liveness based on child search/torrent processes
2140
+ alive = False
2141
+ for loop in ("search", "torrent"):
2142
+ p = getattr(arr, f"process_{loop}_loop", None)
2143
+ if p is not None:
2144
+ try:
2145
+ if p.is_alive():
2146
+ alive = True
2147
+ break
2148
+ except Exception:
2149
+ pass
2150
+ name = getattr(arr, "_name", k)
2151
+ category = getattr(arr, "category", k)
2152
+ arrs.append({"category": category, "name": name, "type": t, "alive": alive})
2153
+ return {"qbit": qb, "arrs": arrs, "ready": _ensure_arr_manager_ready()}
2154
+
2155
+ @app.get("/api/status")
2156
+ def api_status():
2157
+ if (resp := require_token()) is not None:
2158
+ return resp
2159
+ return jsonify(_status_payload())
2160
+
2161
+ @app.get("/web/status")
2162
+ def web_status():
2163
+ return jsonify(_status_payload())
2164
+
2165
+ @app.get("/api/token")
2166
+ def api_token():
2167
+ if (resp := require_token()) is not None:
2168
+ return resp
2169
+ # Expose token for API clients only; UI uses /web endpoints
2170
+ return jsonify({"token": self.token})
2171
+
2172
+ @app.post("/api/arr/<section>/restart")
2173
+ def api_arr_restart(section: str):
2174
+ if (resp := require_token()) is not None:
2175
+ return resp
2176
+ # Section is the category key in managed_objects
2177
+ managed = _managed_objects()
2178
+ if not managed:
2179
+ if not _ensure_arr_manager_ready():
2180
+ return jsonify({"error": "Arr manager is still initialising"}), 503
2181
+ if section not in managed:
2182
+ return jsonify({"error": f"Unknown section {section}"}), 404
2183
+ arr = managed[section]
2184
+ # Restart both loops for this arr
2185
+ restarted = []
2186
+ for k in ("search", "torrent"):
2187
+ proc_attr = f"process_{k}_loop"
2188
+ p = getattr(arr, proc_attr, None)
2189
+ if p is not None:
2190
+ try:
2191
+ p.kill()
2192
+ except Exception:
2193
+ pass
2194
+ try:
2195
+ p.terminate()
2196
+ except Exception:
2197
+ pass
2198
+ try:
2199
+ self.manager.child_processes.remove(p)
2200
+ except Exception:
2201
+ pass
2202
+ import pathos
2203
+
2204
+ target = getattr(arr, f"run_{k}_loop", None)
2205
+ if target is None:
2206
+ continue
2207
+ new_p = pathos.helpers.mp.Process(target=target, daemon=False)
2208
+ setattr(arr, proc_attr, new_p)
2209
+ self.manager.child_processes.append(new_p)
2210
+ new_p.start()
2211
+ restarted.append(k)
2212
+ return jsonify({"status": "ok", "restarted": restarted})
2213
+
2214
+ @app.post("/web/arr/<section>/restart")
2215
+ def web_arr_restart(section: str):
2216
+ managed = _managed_objects()
2217
+ if not managed:
2218
+ if not _ensure_arr_manager_ready():
2219
+ return jsonify({"error": "Arr manager is still initialising"}), 503
2220
+ if section not in managed:
2221
+ return jsonify({"error": f"Unknown section {section}"}), 404
2222
+ arr = managed[section]
2223
+ restarted = []
2224
+ for k in ("search", "torrent"):
2225
+ proc_attr = f"process_{k}_loop"
2226
+ p = getattr(arr, proc_attr, None)
2227
+ if p is not None:
2228
+ try:
2229
+ p.kill()
2230
+ except Exception:
2231
+ pass
2232
+ try:
2233
+ p.terminate()
2234
+ except Exception:
2235
+ pass
2236
+ try:
2237
+ self.manager.child_processes.remove(p)
2238
+ except Exception:
2239
+ pass
2240
+ import pathos
2241
+
2242
+ target = getattr(arr, f"run_{k}_loop", None)
2243
+ if target is None:
2244
+ continue
2245
+ new_p = pathos.helpers.mp.Process(target=target, daemon=False)
2246
+ setattr(arr, proc_attr, new_p)
2247
+ self.manager.child_processes.append(new_p)
2248
+ new_p.start()
2249
+ restarted.append(k)
2250
+ return jsonify({"status": "ok", "restarted": restarted})
2251
+
2252
+ @app.get("/api/config")
2253
+ def api_get_config():
2254
+ if (resp := require_token()) is not None:
2255
+ return resp
2256
+ try:
2257
+ # Reload config from disk to reflect latest file
2258
+ try:
2259
+ CONFIG.load()
2260
+ except Exception:
2261
+ pass
2262
+ # Render current config as a JSON-able dict via tomlkit
2263
+ data = _toml_to_jsonable(CONFIG.config)
2264
+ return jsonify(data)
2265
+ except Exception as e:
2266
+ return jsonify({"error": str(e)}), 500
2267
+
2268
+ @app.get("/web/config")
2269
+ def web_get_config():
2270
+ try:
2271
+ try:
2272
+ CONFIG.load()
2273
+ except Exception:
2274
+ pass
2275
+ data = _toml_to_jsonable(CONFIG.config)
2276
+
2277
+ # Check config version and add warning if mismatch
2278
+ from qBitrr.config_version import get_config_version, validate_config_version
2279
+
2280
+ is_valid, validation_result = validate_config_version(CONFIG)
2281
+ if not is_valid:
2282
+ # Add version mismatch warning to response
2283
+ response_data = {
2284
+ "config": data,
2285
+ "warning": {
2286
+ "type": "config_version_mismatch",
2287
+ "message": validation_result,
2288
+ "currentVersion": get_config_version(CONFIG),
2289
+ },
2290
+ }
2291
+ return jsonify(response_data)
2292
+
2293
+ return jsonify(data)
2294
+ except Exception as e:
2295
+ return jsonify({"error": str(e)}), 500
2296
+
2297
+ def _handle_config_update():
2298
+ """Common handler for config updates with intelligent reload detection."""
2299
+ body = request.get_json(silent=True) or {}
2300
+ changes: dict[str, Any] = body.get("changes", {})
2301
+ if not isinstance(changes, dict):
2302
+ return jsonify({"error": "changes must be an object"}), 400
2303
+
2304
+ # Prevent ConfigVersion from being modified by user
2305
+ protected_keys = {"Settings.ConfigVersion"}
2306
+ for key in protected_keys:
2307
+ if key in changes:
2308
+ return (
2309
+ jsonify({"error": f"Cannot modify protected configuration key: {key}"}),
2310
+ 403,
2311
+ )
2312
+
2313
+ # Define key categories
2314
+ frontend_only_keys = {
2315
+ "WebUI.LiveArr",
2316
+ "WebUI.GroupSonarr",
2317
+ "WebUI.GroupLidarr",
2318
+ "WebUI.Theme",
2319
+ }
2320
+ webui_restart_keys = {
2321
+ "WebUI.Host",
2322
+ "WebUI.Port",
2323
+ "WebUI.Token",
2324
+ }
2325
+
2326
+ # Analyze changes to determine reload strategy
2327
+ affected_arr_instances = set()
2328
+ has_global_changes = False
2329
+ has_webui_changes = False
2330
+ has_frontend_only_changes = False
2331
+
2332
+ for key in changes.keys():
2333
+ if key in frontend_only_keys:
2334
+ has_frontend_only_changes = True
2335
+ elif key in webui_restart_keys:
2336
+ has_webui_changes = True
2337
+ elif key.startswith("WebUI."):
2338
+ # Unknown WebUI key, treat as webui change for safety
2339
+ has_webui_changes = True
2340
+ elif match := re.match(
2341
+ r"^(Radarr|Sonarr|Lidarr|Animarr)[^.]*\.(.+)$", key, re.IGNORECASE
2342
+ ):
2343
+ # Arr instance specific change
2344
+ instance_name = key.split(".")[0]
2345
+ affected_arr_instances.add(instance_name)
2346
+ else:
2347
+ # Settings.*, qBit.*, or unknown - requires full reload
2348
+ has_global_changes = True
2349
+
2350
+ # Apply all changes to config
2351
+ for key, val in changes.items():
2352
+ if val is None:
2353
+ _toml_delete(CONFIG.config, key)
2354
+ if key == "WebUI.Token":
2355
+ self.token = ""
2356
+ continue
2357
+ _toml_set(CONFIG.config, key, val)
2358
+ if key == "WebUI.Token":
2359
+ # Update in-memory token immediately
2360
+ self.token = str(val) if val is not None else ""
2361
+
2362
+ # Persist config
2363
+ try:
2364
+ CONFIG.save()
2365
+ except Exception as e:
2366
+ return jsonify({"error": f"Failed to save config: {e}"}), 500
2367
+
2368
+ # Determine reload strategy
2369
+ reload_type = "none"
2370
+ affected_instances_list = []
2371
+
2372
+ if has_global_changes:
2373
+ # Global settings changed - full reload required
2374
+ # This affects ALL instances (qBit settings, loop timers, etc.)
2375
+ reload_type = "full"
2376
+ self.logger.notice("Global settings changed, performing full reload")
2377
+ try:
2378
+ self.manager.configure_auto_update()
2379
+ except Exception:
2380
+ self.logger.exception("Failed to refresh auto update configuration")
2381
+ self._reload_all()
2382
+
2383
+ elif len(affected_arr_instances) >= 1:
2384
+ # One or more Arr instances changed - reload each individually
2385
+ # NEVER trigger global reload for Arr-only changes
2386
+ reload_type = "multi_arr" if len(affected_arr_instances) > 1 else "single_arr"
2387
+ affected_instances_list = sorted(affected_arr_instances)
2388
+
2389
+ self.logger.notice(
2390
+ f"Reloading {len(affected_instances_list)} Arr instance(s): {', '.join(affected_instances_list)}"
2391
+ )
2392
+
2393
+ # Reload each affected instance in sequence
2394
+ for instance_name in affected_instances_list:
2395
+ self._reload_arr_instance(instance_name)
2396
+
2397
+ elif has_webui_changes:
2398
+ # Only WebUI settings changed - restart WebUI
2399
+ reload_type = "webui"
2400
+ self.logger.notice("WebUI settings changed, restarting WebUI server")
2401
+ # Run restart in background thread to avoid blocking response
2402
+ restart_thread = threading.Thread(
2403
+ target=self._restart_webui, name="WebUIRestart", daemon=True
2404
+ )
2405
+ restart_thread.start()
2406
+
2407
+ elif has_frontend_only_changes:
2408
+ # Only frontend settings changed - no reload
2409
+ reload_type = "frontend"
2410
+ self.logger.debug("Frontend-only settings changed, no reload required")
2411
+
2412
+ # Build response
2413
+ response_data = {
2414
+ "status": "ok",
2415
+ "configReloaded": reload_type not in ("none", "frontend"),
2416
+ "reloadType": reload_type,
2417
+ "affectedInstances": affected_instances_list,
2418
+ }
2419
+
2420
+ response = jsonify(response_data)
2421
+
2422
+ # Add headers for cache control
2423
+ response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
2424
+ response.headers["Pragma"] = "no-cache"
2425
+ response.headers["Expires"] = "0"
2426
+
2427
+ # Legacy header for compatibility
2428
+ if reload_type in ("full", "single_arr", "multi_arr", "webui"):
2429
+ response.headers["X-Config-Reloaded"] = "true"
2430
+
2431
+ return response
2432
+
2433
+ @app.post("/api/config")
2434
+ def api_update_config():
2435
+ if (resp := require_token()) is not None:
2436
+ return resp
2437
+ return _handle_config_update()
2438
+
2439
+ @app.post("/web/config")
2440
+ def web_update_config():
2441
+ return _handle_config_update()
2442
+
2443
+ @app.post("/api/arr/test-connection")
2444
+ def api_arr_test_connection():
2445
+ """
2446
+ Test connection to Arr instance without saving config.
2447
+ Accepts temporary URI/APIKey and returns connection status + quality profiles.
2448
+ """
2449
+ if (resp := require_token()) is not None:
2450
+ return resp
2451
+
2452
+ try:
2453
+ data = request.get_json()
2454
+ if not data:
2455
+ return jsonify({"success": False, "message": "Missing request body"}), 400
2456
+
2457
+ arr_type = data.get("arrType") # "radarr" | "sonarr" | "lidarr"
2458
+ uri = data.get("uri")
2459
+ api_key = data.get("apiKey")
2460
+
2461
+ # Validate inputs
2462
+ if not all([arr_type, uri, api_key]):
2463
+ return (
2464
+ jsonify(
2465
+ {
2466
+ "success": False,
2467
+ "message": "Missing required fields: arrType, uri, or apiKey",
2468
+ }
2469
+ ),
2470
+ 400,
2471
+ )
2472
+
2473
+ # Try to find existing Arr instance with matching URI
2474
+ existing_arr = None
2475
+ managed = _managed_objects()
2476
+ for group_name, arr_instance in managed.items():
2477
+ if hasattr(arr_instance, "uri") and hasattr(arr_instance, "apikey"):
2478
+ if arr_instance.uri == uri and arr_instance.apikey == api_key:
2479
+ existing_arr = arr_instance
2480
+ self.logger.info(f"Using existing Arr instance: {group_name}")
2481
+ break
2482
+
2483
+ # Use existing client if available, otherwise create temporary one
2484
+ if existing_arr and hasattr(existing_arr, "client"):
2485
+ client = existing_arr.client
2486
+ self.logger.info(f"Reusing existing client for {existing_arr._name}")
2487
+ else:
2488
+ # Create temporary Arr API client
2489
+ self.logger.info(f"Creating temporary {arr_type} client for {uri}")
2490
+ if arr_type == "radarr":
2491
+ from pyarr import RadarrAPI
2492
+
2493
+ client = RadarrAPI(uri, api_key)
2494
+ elif arr_type == "sonarr":
2495
+ from pyarr import SonarrAPI
2496
+
2497
+ client = SonarrAPI(uri, api_key)
2498
+ elif arr_type == "lidarr":
2499
+ from pyarr import LidarrAPI
2500
+
2501
+ client = LidarrAPI(uri, api_key)
2502
+ else:
2503
+ return (
2504
+ jsonify({"success": False, "message": f"Invalid arrType: {arr_type}"}),
2505
+ 400,
2506
+ )
2507
+
2508
+ # Test connection (no timeout - Flask/Waitress handles this)
2509
+ try:
2510
+ self.logger.info(f"Testing connection to {arr_type} at {uri}")
2511
+
2512
+ # Get system info to verify connection
2513
+ system_info = client.get_system_status()
2514
+ self.logger.info(
2515
+ f"System status retrieved: {system_info.get('version', 'unknown')}"
2516
+ )
2517
+
2518
+ # Fetch quality profiles with retry logic (same as backend)
2519
+ from json import JSONDecodeError
2520
+
2521
+ import requests
2522
+ from pyarr.exceptions import PyarrServerError
2523
+
2524
+ max_retries = 3
2525
+ retry_count = 0
2526
+ quality_profiles = []
2527
+
2528
+ while retry_count < max_retries:
2529
+ try:
2530
+ quality_profiles = client.get_quality_profile()
2531
+ self.logger.info(
2532
+ f"Quality profiles retrieved: {len(quality_profiles)} profiles"
2533
+ )
2534
+ break
2535
+ except (
2536
+ requests.exceptions.ChunkedEncodingError,
2537
+ requests.exceptions.ContentDecodingError,
2538
+ requests.exceptions.ConnectionError,
2539
+ JSONDecodeError,
2540
+ ) as e:
2541
+ retry_count += 1
2542
+ self.logger.warning(
2543
+ f"Transient error fetching quality profiles (attempt {retry_count}/{max_retries}): {e}"
2544
+ )
2545
+ if retry_count >= max_retries:
2546
+ self.logger.error("Failed to fetch quality profiles after retries")
2547
+ quality_profiles = []
2548
+ break
2549
+ time.sleep(1)
2550
+ except PyarrServerError as e:
2551
+ self.logger.error(f"Server error fetching quality profiles: {e}")
2552
+ quality_profiles = []
2553
+ break
2554
+ except Exception as e:
2555
+ self.logger.error(f"Unexpected error fetching quality profiles: {e}")
2556
+ quality_profiles = []
2557
+ break
2558
+
2559
+ # Format response
2560
+ return jsonify(
2561
+ {
2562
+ "success": True,
2563
+ "message": "Connected successfully",
2564
+ "systemInfo": {
2565
+ "version": system_info.get("version", "unknown"),
2566
+ "branch": system_info.get("branch"),
2567
+ },
2568
+ "qualityProfiles": [
2569
+ {"id": p["id"], "name": p["name"]} for p in quality_profiles
2570
+ ],
2571
+ }
2572
+ )
2573
+
2574
+ except Exception as e:
2575
+ # Handle specific error types
2576
+ error_msg = str(e)
2577
+ # Log full error for debugging but sanitize user-facing message
2578
+ self.logger.error(f"Connection test failed: {error_msg}")
2579
+
2580
+ if "401" in error_msg or "Unauthorized" in error_msg:
2581
+ return (
2582
+ jsonify(
2583
+ {"success": False, "message": "Unauthorized: Invalid API key"}
2584
+ ),
2585
+ 401,
2586
+ )
2587
+ elif "404" in error_msg:
2588
+ return (
2589
+ jsonify(
2590
+ {"success": False, "message": f"Not found: Check URI ({uri})"}
2591
+ ),
2592
+ 404,
2593
+ )
2594
+ elif "Connection refused" in error_msg or "ConnectionError" in error_msg:
2595
+ return (
2596
+ jsonify(
2597
+ {
2598
+ "success": False,
2599
+ "message": f"Connection refused: Cannot reach {uri}",
2600
+ }
2601
+ ),
2602
+ 503,
2603
+ )
2604
+ else:
2605
+ # Generic error message - details logged above
2606
+ return (
2607
+ jsonify({"success": False, "message": "Connection test failed"}),
2608
+ 500,
2609
+ )
2610
+
2611
+ except Exception as e:
2612
+ self.logger.error("Test connection error: %s", e)
2613
+ return jsonify({"success": False, "message": "Connection test failed"}), 500
2614
+
2615
+ @app.post("/web/arr/test-connection")
2616
+ def web_arr_test_connection():
2617
+ """
2618
+ Test connection to Arr instance without saving config.
2619
+ Accepts temporary URI/APIKey and returns connection status + quality profiles.
2620
+ Public endpoint (mirrors /api/arr/test-connection).
2621
+ """
2622
+ try:
2623
+ data = request.get_json()
2624
+ if not data:
2625
+ return jsonify({"success": False, "message": "Missing request body"}), 400
2626
+
2627
+ arr_type = data.get("arrType") # "radarr" | "sonarr" | "lidarr"
2628
+ uri = data.get("uri")
2629
+ api_key = data.get("apiKey")
2630
+
2631
+ # Validate inputs
2632
+ if not all([arr_type, uri, api_key]):
2633
+ return (
2634
+ jsonify(
2635
+ {
2636
+ "success": False,
2637
+ "message": "Missing required fields: arrType, uri, or apiKey",
2638
+ }
2639
+ ),
2640
+ 400,
2641
+ )
2642
+
2643
+ # Try to find existing Arr instance with matching URI
2644
+ existing_arr = None
2645
+ managed = _managed_objects()
2646
+ for group_name, arr_instance in managed.items():
2647
+ if hasattr(arr_instance, "uri") and hasattr(arr_instance, "apikey"):
2648
+ if arr_instance.uri == uri and arr_instance.apikey == api_key:
2649
+ existing_arr = arr_instance
2650
+ self.logger.info(f"Using existing Arr instance: {group_name}")
2651
+ break
2652
+
2653
+ # Use existing client if available, otherwise create temporary one
2654
+ if existing_arr and hasattr(existing_arr, "client"):
2655
+ client = existing_arr.client
2656
+ self.logger.info(f"Reusing existing client for {existing_arr._name}")
2657
+ else:
2658
+ # Create temporary Arr API client
2659
+ self.logger.info(f"Creating temporary {arr_type} client for {uri}")
2660
+ if arr_type == "radarr":
2661
+ from pyarr import RadarrAPI
2662
+
2663
+ client = RadarrAPI(uri, api_key)
2664
+ elif arr_type == "sonarr":
2665
+ from pyarr import SonarrAPI
2666
+
2667
+ client = SonarrAPI(uri, api_key)
2668
+ elif arr_type == "lidarr":
2669
+ from pyarr import LidarrAPI
2670
+
2671
+ client = LidarrAPI(uri, api_key)
2672
+ else:
2673
+ return (
2674
+ jsonify({"success": False, "message": f"Invalid arrType: {arr_type}"}),
2675
+ 400,
2676
+ )
2677
+
2678
+ # Test connection (no timeout - Flask/Waitress handles this)
2679
+ try:
2680
+ self.logger.info(f"Testing connection to {arr_type} at {uri}")
2681
+
2682
+ # Get system info to verify connection
2683
+ system_info = client.get_system_status()
2684
+ self.logger.info(
2685
+ f"System status retrieved: {system_info.get('version', 'unknown')}"
2686
+ )
2687
+
2688
+ # Fetch quality profiles with retry logic (same as backend)
2689
+ from json import JSONDecodeError
2690
+
2691
+ import requests
2692
+ from pyarr.exceptions import PyarrServerError
2693
+
2694
+ max_retries = 3
2695
+ retry_count = 0
2696
+ quality_profiles = []
2697
+
2698
+ while retry_count < max_retries:
2699
+ try:
2700
+ quality_profiles = client.get_quality_profile()
2701
+ self.logger.info(
2702
+ f"Quality profiles retrieved: {len(quality_profiles)} profiles"
2703
+ )
2704
+ break
2705
+ except (
2706
+ requests.exceptions.ChunkedEncodingError,
2707
+ requests.exceptions.ContentDecodingError,
2708
+ requests.exceptions.ConnectionError,
2709
+ JSONDecodeError,
2710
+ ) as e:
2711
+ retry_count += 1
2712
+ self.logger.warning(
2713
+ f"Transient error fetching quality profiles (attempt {retry_count}/{max_retries}): {e}"
2714
+ )
2715
+ if retry_count >= max_retries:
2716
+ self.logger.error("Failed to fetch quality profiles after retries")
2717
+ quality_profiles = []
2718
+ break
2719
+ time.sleep(1)
2720
+ except PyarrServerError as e:
2721
+ self.logger.error(f"Server error fetching quality profiles: {e}")
2722
+ quality_profiles = []
2723
+ break
2724
+ except Exception as e:
2725
+ self.logger.error(f"Unexpected error fetching quality profiles: {e}")
2726
+ quality_profiles = []
2727
+ break
2728
+
2729
+ # Format response
2730
+ return jsonify(
2731
+ {
2732
+ "success": True,
2733
+ "message": "Connected successfully",
2734
+ "systemInfo": {
2735
+ "version": system_info.get("version", "unknown"),
2736
+ "branch": system_info.get("branch"),
2737
+ },
2738
+ "qualityProfiles": [
2739
+ {"id": p["id"], "name": p["name"]} for p in quality_profiles
2740
+ ],
2741
+ }
2742
+ )
2743
+
2744
+ except Exception as e:
2745
+ # Handle specific error types
2746
+ error_msg = str(e)
2747
+ # Log full error for debugging but sanitize user-facing message
2748
+ self.logger.error(f"Connection test failed: {error_msg}")
2749
+
2750
+ if "401" in error_msg or "Unauthorized" in error_msg:
2751
+ return (
2752
+ jsonify(
2753
+ {"success": False, "message": "Unauthorized: Invalid API key"}
2754
+ ),
2755
+ 401,
2756
+ )
2757
+ elif "404" in error_msg:
2758
+ return (
2759
+ jsonify(
2760
+ {"success": False, "message": f"Not found: Check URI ({uri})"}
2761
+ ),
2762
+ 404,
2763
+ )
2764
+ elif "Connection refused" in error_msg or "ConnectionError" in error_msg:
2765
+ return (
2766
+ jsonify(
2767
+ {
2768
+ "success": False,
2769
+ "message": f"Connection refused: Cannot reach {uri}",
2770
+ }
2771
+ ),
2772
+ 503,
2773
+ )
2774
+ else:
2775
+ # Generic error message - details logged above
2776
+ return (
2777
+ jsonify({"success": False, "message": "Connection test failed"}),
2778
+ 500,
2779
+ )
2780
+
2781
+ except Exception as e:
2782
+ self.logger.error("Test connection error: %s", e)
2783
+ return jsonify({"success": False, "message": "Connection test failed"}), 500
2784
+
2785
+ def _reload_all(self):
2786
+ # Set rebuilding flag
2787
+ self._rebuilding_arrs = True
2788
+ try:
2789
+ # Stop current processes
2790
+ for p in list(self.manager.child_processes):
2791
+ try:
2792
+ p.kill()
2793
+ except Exception:
2794
+ pass
2795
+ try:
2796
+ p.terminate()
2797
+ except Exception:
2798
+ pass
2799
+ self.manager.child_processes.clear()
2800
+
2801
+ # Delete database files for all arr instances before rebuilding
2802
+ if hasattr(self.manager, "arr_manager") and self.manager.arr_manager:
2803
+ for arr in self.manager.arr_manager.managed_objects.values():
2804
+ try:
2805
+ if hasattr(arr, "search_db_file") and arr.search_db_file:
2806
+ # Delete main database file
2807
+ if arr.search_db_file.exists():
2808
+ self.logger.info(f"Deleting database file: {arr.search_db_file}")
2809
+ arr.search_db_file.unlink()
2810
+ self.logger.success(f"Deleted database file for {arr._name}")
2811
+ # Delete WAL file (Write-Ahead Log)
2812
+ wal_file = arr.search_db_file.with_suffix(".db-wal")
2813
+ if wal_file.exists():
2814
+ self.logger.info(f"Deleting WAL file: {wal_file}")
2815
+ wal_file.unlink()
2816
+ # Delete SHM file (Shared Memory)
2817
+ shm_file = arr.search_db_file.with_suffix(".db-shm")
2818
+ if shm_file.exists():
2819
+ self.logger.info(f"Deleting SHM file: {shm_file}")
2820
+ shm_file.unlink()
2821
+ except Exception as e:
2822
+ self.logger.warning(
2823
+ f"Failed to delete database files for {arr._name}: {e}"
2824
+ )
2825
+
2826
+ # Rebuild arr manager from config and spawn fresh
2827
+ from qBitrr.arss import ArrManager
2828
+
2829
+ self.manager.arr_manager = ArrManager(self.manager).build_arr_instances()
2830
+ self.manager.configure_auto_update()
2831
+ # Spawn and start new processes
2832
+ for arr in self.manager.arr_manager.managed_objects.values():
2833
+ _, procs = arr.spawn_child_processes()
2834
+ for p in procs:
2835
+ try:
2836
+ p.start()
2837
+ except Exception:
2838
+ pass
2839
+ finally:
2840
+ # Clear rebuilding flag
2841
+ self._rebuilding_arrs = False
2842
+
2843
+ def _restart_webui(self):
2844
+ """
2845
+ Gracefully restart the WebUI server without affecting Arr processes.
2846
+ This is used when WebUI.Host, WebUI.Port, or WebUI.Token changes.
2847
+ """
2848
+ self.logger.notice("WebUI restart requested (config changed)")
2849
+
2850
+ # Reload config values
2851
+ try:
2852
+ CONFIG.load()
2853
+ except Exception as e:
2854
+ self.logger.warning(f"Failed to reload config: {e}")
2855
+
2856
+ # Update in-memory values
2857
+ new_host = CONFIG.get("WebUI.Host", fallback="0.0.0.0")
2858
+ new_port = CONFIG.get("WebUI.Port", fallback=6969)
2859
+ new_token = CONFIG.get("WebUI.Token", fallback=None)
2860
+
2861
+ # Check if restart is actually needed
2862
+ needs_restart = new_host != self.host or new_port != self.port
2863
+
2864
+ # Token can be updated without restart
2865
+ if new_token != self.token:
2866
+ self.token = new_token
2867
+ self.logger.info("WebUI token updated")
2868
+
2869
+ if not needs_restart:
2870
+ self.logger.info("WebUI Host/Port unchanged, restart not required")
2871
+ return
2872
+
2873
+ # Update host/port
2874
+ self.host = new_host
2875
+ self.port = new_port
2876
+
2877
+ # Signal restart
2878
+ self._restart_requested = True
2879
+ self._shutdown_event.set()
2880
+
2881
+ self.logger.info(f"WebUI will restart on {self.host}:{self.port}")
2882
+
2883
+ def _stop_arr_instance(self, arr, category: str):
2884
+ """Stop and cleanup a single Arr instance."""
2885
+ self.logger.info(f"Stopping Arr instance: {category}")
2886
+
2887
+ # Stop processes
2888
+ for loop_kind in ("search", "torrent"):
2889
+ proc_attr = f"process_{loop_kind}_loop"
2890
+ process = getattr(arr, proc_attr, None)
2891
+ if process is not None:
2892
+ try:
2893
+ process.kill()
2894
+ except Exception:
2895
+ pass
2896
+ try:
2897
+ process.terminate()
2898
+ except Exception:
2899
+ pass
2900
+ try:
2901
+ self.manager.child_processes.remove(process)
2902
+ except Exception:
2903
+ pass
2904
+ self.logger.debug(f"Stopped {loop_kind} process for {category}")
2905
+
2906
+ # Delete database files
2907
+ try:
2908
+ if hasattr(arr, "search_db_file") and arr.search_db_file:
2909
+ if arr.search_db_file.exists():
2910
+ self.logger.info(f"Deleting database file: {arr.search_db_file}")
2911
+ arr.search_db_file.unlink()
2912
+ self.logger.success(
2913
+ f"Deleted database file for {getattr(arr, '_name', category)}"
2914
+ )
2915
+ # Delete WAL and SHM files
2916
+ for suffix in (".db-wal", ".db-shm"):
2917
+ aux_file = arr.search_db_file.with_suffix(suffix)
2918
+ if aux_file.exists():
2919
+ self.logger.debug(f"Deleting auxiliary file: {aux_file}")
2920
+ aux_file.unlink()
2921
+ except Exception as e:
2922
+ self.logger.warning(
2923
+ f"Failed to delete database files for {getattr(arr, '_name', category)}: {e}"
2924
+ )
2925
+
2926
+ # Remove from managed_objects
2927
+ self.manager.arr_manager.managed_objects.pop(category, None)
2928
+ self.manager.arr_manager.groups.discard(getattr(arr, "_name", ""))
2929
+ self.manager.arr_manager.uris.discard(getattr(arr, "uri", ""))
2930
+ self.manager.arr_manager.arr_categories.discard(category)
2931
+
2932
+ self.logger.success(f"Stopped and cleaned up Arr instance: {category}")
2933
+
2934
+ def _start_arr_instance(self, instance_name: str):
2935
+ """Create and start a single Arr instance."""
2936
+ self.logger.info(f"Starting Arr instance: {instance_name}")
2937
+
2938
+ # Check if instance is managed
2939
+ if not CONFIG.get(f"{instance_name}.Managed", fallback=False):
2940
+ self.logger.info(f"Instance {instance_name} is not managed, skipping")
2941
+ return
2942
+
2943
+ # Determine client class based on name
2944
+ client_cls = None
2945
+ if re.match(r"^(Rad|rad)arr", instance_name):
2946
+ from pyarr import RadarrAPI
2947
+
2948
+ client_cls = RadarrAPI
2949
+ elif re.match(r"^(Son|son|Anim|anim)arr", instance_name):
2950
+ from pyarr import SonarrAPI
2951
+
2952
+ client_cls = SonarrAPI
2953
+ elif re.match(r"^(Lid|lid)arr", instance_name):
2954
+ from pyarr import LidarrAPI
2955
+
2956
+ client_cls = LidarrAPI
2957
+ else:
2958
+ self.logger.error(f"Unknown Arr type for instance: {instance_name}")
2959
+ return
2960
+
2961
+ try:
2962
+ # Create new Arr instance
2963
+ from qBitrr.arss import Arr
2964
+ from qBitrr.errors import SkipException
2965
+
2966
+ new_arr = Arr(instance_name, self.manager.arr_manager, client_cls=client_cls)
2967
+
2968
+ # Register in manager
2969
+ self.manager.arr_manager.groups.add(instance_name)
2970
+ self.manager.arr_manager.uris.add(new_arr.uri)
2971
+ self.manager.arr_manager.managed_objects[new_arr.category] = new_arr
2972
+ self.manager.arr_manager.arr_categories.add(new_arr.category)
2973
+
2974
+ # Spawn and start processes
2975
+ _, procs = new_arr.spawn_child_processes()
2976
+ for p in procs:
2977
+ try:
2978
+ p.start()
2979
+ self.logger.debug(f"Started process (PID: {p.pid}) for {instance_name}")
2980
+ except Exception as e:
2981
+ self.logger.error(f"Failed to start process for {instance_name}: {e}")
2982
+
2983
+ self.logger.success(
2984
+ f"Started Arr instance: {instance_name} (category: {new_arr.category})"
2985
+ )
2986
+
2987
+ except SkipException:
2988
+ self.logger.info(f"Instance {instance_name} skipped (not managed or disabled)")
2989
+ except Exception as e:
2990
+ self.logger.error(f"Failed to start Arr instance {instance_name}: {e}", exc_info=True)
2991
+
2992
+ def _reload_arr_instance(self, instance_name: str):
2993
+ """Reload a single Arr instance without affecting others."""
2994
+ self.logger.notice(f"Reloading Arr instance: {instance_name}")
2995
+
2996
+ if not hasattr(self.manager, "arr_manager") or not self.manager.arr_manager:
2997
+ self.logger.warning("Cannot reload Arr instance: ArrManager not initialized")
2998
+ return
2999
+
3000
+ managed_objects = self.manager.arr_manager.managed_objects
3001
+
3002
+ # Find the instance by name (key is category, so search by _name attribute)
3003
+ old_arr = None
3004
+ old_category = None
3005
+ for category, arr in list(managed_objects.items()):
3006
+ if getattr(arr, "_name", None) == instance_name:
3007
+ old_arr = arr
3008
+ old_category = category
3009
+ break
3010
+
3011
+ # Check if instance exists in config
3012
+ instance_exists_in_config = instance_name in CONFIG.sections()
3013
+
3014
+ # Handle deletion case
3015
+ if not instance_exists_in_config:
3016
+ if old_arr:
3017
+ self.logger.info(f"Instance {instance_name} removed from config, stopping...")
3018
+ self._stop_arr_instance(old_arr, old_category)
3019
+ else:
3020
+ self.logger.debug(f"Instance {instance_name} not found in config or memory")
3021
+ return
3022
+
3023
+ # Handle update/addition
3024
+ if old_arr:
3025
+ # Update existing - stop old processes first
3026
+ self.logger.info(f"Updating existing Arr instance: {instance_name}")
3027
+ self._stop_arr_instance(old_arr, old_category)
3028
+ else:
3029
+ self.logger.info(f"Adding new Arr instance: {instance_name}")
3030
+
3031
+ # Small delay to ensure cleanup completes
3032
+ time.sleep(0.5)
3033
+
3034
+ # Create new instance
3035
+ self._start_arr_instance(instance_name)
3036
+
3037
+ self.logger.success(f"Successfully reloaded Arr instance: {instance_name}")
3038
+
3039
+ def start(self):
3040
+ if self._thread and self._thread.is_alive():
3041
+ self.logger.debug("WebUI already running on %s:%s", self.host, self.port)
3042
+ return
3043
+ self.logger.notice("Starting WebUI on %s:%s", self.host, self.port)
3044
+ self._thread = threading.Thread(target=self._serve, name="WebUI", daemon=True)
3045
+ self._thread.start()
3046
+ self.logger.success("WebUI thread started (name=%s)", self._thread.name)
3047
+
3048
+ def _serve(self):
3049
+ try:
3050
+ # Reset shutdown event at start
3051
+ self._shutdown_event.clear()
3052
+
3053
+ if self._should_use_dev_server():
3054
+ self.logger.info("Using Flask development server for WebUI")
3055
+ # Flask dev server - will exit on KeyboardInterrupt
3056
+ try:
3057
+ self.app.run(
3058
+ host=self.host,
3059
+ port=self.port,
3060
+ debug=False,
3061
+ use_reloader=False,
3062
+ threaded=True,
3063
+ )
3064
+ except (KeyboardInterrupt, SystemExit):
3065
+ pass
3066
+ return
3067
+
3068
+ try:
3069
+ from waitress import serve as waitress_serve
3070
+ except Exception:
3071
+ self.logger.warning(
3072
+ "Waitress is unavailable; falling back to Flask development server. "
3073
+ "Install the 'waitress' extra or set QBITRR_USE_DEV_SERVER=1 to silence this message."
3074
+ )
3075
+ self.app.run(host=self.host, port=self.port, debug=False, use_reloader=False)
3076
+ return
3077
+
3078
+ self.logger.info("Using Waitress WSGI server for WebUI")
3079
+
3080
+ # For graceful restart capability, we need to use waitress_serve with channels
3081
+ # However, for now we'll use the simpler approach and just run the server
3082
+ # Restart capability will require stopping the entire process
3083
+ # Use poll() instead of select() to avoid file descriptor limit issues
3084
+ waitress_serve(
3085
+ self.app,
3086
+ host=self.host,
3087
+ port=self.port,
3088
+ ident="qBitrr-WebUI",
3089
+ asyncore_use_poll=True,
3090
+ )
3091
+
3092
+ except KeyboardInterrupt:
3093
+ self.logger.info("WebUI interrupted")
3094
+ except Exception:
3095
+ self.logger.exception("WebUI server terminated unexpectedly")
3096
+ finally:
3097
+ self._server = None
3098
+
3099
+ # If restart was requested, start a new server
3100
+ if self._restart_requested:
3101
+ self._restart_requested = False
3102
+ self.logger.info("Restarting WebUI server...")
3103
+ time.sleep(0.5) # Brief pause
3104
+ self.start() # Restart
3105
+
3106
+ def _should_use_dev_server(self) -> bool:
3107
+ if self._use_dev_server is not None:
3108
+ return self._use_dev_server
3109
+ override = os.environ.get("QBITRR_USE_DEV_SERVER", "")
3110
+ if override:
3111
+ self._use_dev_server = override.strip().lower() not in {"0", "false", "no", "off"}
3112
+ return self._use_dev_server
3113
+ self._use_dev_server = False
3114
+ return self._use_dev_server