qBitrr2 4.10.15__py3-none-any.whl → 5.4.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. qBitrr/arss.py +2127 -850
  2. qBitrr/auto_update.py +382 -0
  3. qBitrr/bundled_data.py +3 -2
  4. qBitrr/config.py +20 -3
  5. qBitrr/db_lock.py +79 -0
  6. qBitrr/env_config.py +19 -7
  7. qBitrr/gen_config.py +286 -26
  8. qBitrr/logger.py +87 -3
  9. qBitrr/main.py +453 -101
  10. qBitrr/search_activity_store.py +88 -0
  11. qBitrr/static/assets/ArrView.js +2 -0
  12. qBitrr/static/assets/ArrView.js.map +1 -0
  13. qBitrr/static/assets/ConfigView.js +4 -0
  14. qBitrr/static/assets/ConfigView.js.map +1 -0
  15. qBitrr/static/assets/LogsView.js +230 -0
  16. qBitrr/static/assets/LogsView.js.map +1 -0
  17. qBitrr/static/assets/ProcessesView.js +2 -0
  18. qBitrr/static/assets/ProcessesView.js.map +1 -0
  19. qBitrr/static/assets/app.css +1 -0
  20. qBitrr/static/assets/app.js +11 -0
  21. qBitrr/static/assets/app.js.map +1 -0
  22. qBitrr/static/assets/build.svg +3 -0
  23. qBitrr/static/assets/check-mark.svg +5 -0
  24. qBitrr/static/assets/close.svg +4 -0
  25. qBitrr/static/assets/download.svg +5 -0
  26. qBitrr/static/assets/gear.svg +5 -0
  27. qBitrr/static/assets/lidarr.svg +1 -0
  28. qBitrr/static/assets/live-streaming.svg +8 -0
  29. qBitrr/static/assets/log.svg +3 -0
  30. qBitrr/static/assets/plus.svg +4 -0
  31. qBitrr/static/assets/process.svg +15 -0
  32. qBitrr/static/assets/react-select.esm.js +14 -0
  33. qBitrr/static/assets/react-select.esm.js.map +1 -0
  34. qBitrr/static/assets/refresh-arrow.svg +3 -0
  35. qBitrr/static/assets/table.js +23 -0
  36. qBitrr/static/assets/table.js.map +1 -0
  37. qBitrr/static/assets/trash.svg +8 -0
  38. qBitrr/static/assets/up-arrow.svg +3 -0
  39. qBitrr/static/assets/useInterval.js +2 -0
  40. qBitrr/static/assets/useInterval.js.map +1 -0
  41. qBitrr/static/assets/vendor.js +33 -0
  42. qBitrr/static/assets/vendor.js.map +1 -0
  43. qBitrr/static/assets/visibility.svg +9 -0
  44. qBitrr/static/index.html +47 -0
  45. qBitrr/static/manifest.json +23 -0
  46. qBitrr/static/sw.js +105 -0
  47. qBitrr/static/vite.svg +1 -0
  48. qBitrr/tables.py +44 -0
  49. qBitrr/utils.py +82 -15
  50. qBitrr/versioning.py +136 -0
  51. qBitrr/webui.py +2612 -0
  52. qbitrr2-5.4.5.dist-info/METADATA +1116 -0
  53. qbitrr2-5.4.5.dist-info/RECORD +61 -0
  54. {qbitrr2-4.10.15.dist-info → qbitrr2-5.4.5.dist-info}/WHEEL +1 -1
  55. qbitrr2-4.10.15.dist-info/METADATA +0 -239
  56. qbitrr2-4.10.15.dist-info/RECORD +0 -19
  57. {qbitrr2-4.10.15.dist-info → qbitrr2-5.4.5.dist-info}/entry_points.txt +0 -0
  58. {qbitrr2-4.10.15.dist-info → qbitrr2-5.4.5.dist-info/licenses}/LICENSE +0 -0
  59. {qbitrr2-4.10.15.dist-info → qbitrr2-5.4.5.dist-info}/top_level.txt +0 -0
qBitrr/arss.py CHANGED
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import atexit
3
4
  import contextlib
4
5
  import itertools
5
6
  import logging
@@ -19,9 +20,9 @@ import qbittorrentapi
19
20
  import qbittorrentapi.exceptions
20
21
  import requests
21
22
  from packaging import version as version_parser
22
- from peewee import SqliteDatabase
23
- from pyarr import RadarrAPI, SonarrAPI
24
- from pyarr.exceptions import PyarrResourceNotFound
23
+ from peewee import Model, SqliteDatabase
24
+ from pyarr import LidarrAPI, RadarrAPI, SonarrAPI
25
+ from pyarr.exceptions import PyarrResourceNotFound, PyarrServerError
25
26
  from pyarr.types import JsonObject
26
27
  from qbittorrentapi import TorrentDictionary, TorrentStates
27
28
  from ujson import JSONDecodeError
@@ -31,7 +32,6 @@ from qBitrr.config import (
31
32
  AUTO_PAUSE_RESUME,
32
33
  COMPLETED_DOWNLOAD_FOLDER,
33
34
  CONFIG,
34
- ENABLE_LOGS,
35
35
  FAILED_CATEGORY,
36
36
  FREE_SPACE,
37
37
  FREE_SPACE_FOLDER,
@@ -51,9 +51,16 @@ from qBitrr.errors import (
51
51
  SkipException,
52
52
  UnhandledError,
53
53
  )
54
- from qBitrr.home_path import HOME_PATH
55
54
  from qBitrr.logger import run_logs
55
+ from qBitrr.search_activity_store import (
56
+ clear_search_activity,
57
+ fetch_search_activities,
58
+ record_search_activity,
59
+ )
56
60
  from qBitrr.tables import (
61
+ AlbumFilesModel,
62
+ AlbumQueueModel,
63
+ ArtistFilesModel,
57
64
  EpisodeFilesModel,
58
65
  EpisodeQueueModel,
59
66
  FilesQueued,
@@ -61,22 +68,65 @@ from qBitrr.tables import (
61
68
  MoviesFilesModel,
62
69
  SeriesFilesModel,
63
70
  TorrentLibrary,
71
+ TrackFilesModel,
64
72
  )
65
73
  from qBitrr.utils import (
66
74
  ExpiringSet,
67
75
  absolute_file_paths,
76
+ format_bytes,
68
77
  has_internet,
69
78
  parse_size,
70
79
  validate_and_return_torrent_file,
80
+ with_retry,
71
81
  )
72
82
 
83
+
84
+ def _mask_secret(secret: str | None) -> str:
85
+ if not secret:
86
+ return ""
87
+ return "[redacted]"
88
+
89
+
90
+ def _normalize_media_status(value: int | str | None) -> str:
91
+ """Normalise Overseerr media status values across API versions."""
92
+ int_mapping = {
93
+ 1: "UNKNOWN",
94
+ 2: "PENDING",
95
+ 3: "PROCESSING",
96
+ 4: "PARTIALLY_AVAILABLE",
97
+ 5: "AVAILABLE",
98
+ 6: "DELETED",
99
+ }
100
+ if value is None:
101
+ return "UNKNOWN"
102
+ if isinstance(value, str):
103
+ token = value.strip().upper().replace("-", "_").replace(" ", "_")
104
+ # Newer Overseerr builds can return strings such as "PARTIALLY_AVAILABLE"
105
+ return token or "UNKNOWN"
106
+ try:
107
+ return int_mapping.get(int(value), "UNKNOWN")
108
+ except (TypeError, ValueError):
109
+ return "UNKNOWN"
110
+
111
+
112
+ def _is_media_available(status: str) -> bool:
113
+ return status in {"AVAILABLE", "DELETED"}
114
+
115
+
116
+ def _is_media_processing(status: str) -> bool:
117
+ return status in {"PROCESSING", "PARTIALLY_AVAILABLE"}
118
+
119
+
73
120
  if TYPE_CHECKING:
74
121
  from qBitrr.main import qBitManager
75
122
 
76
123
 
77
124
  class Arr:
78
125
  def __init__(
79
- self, name: str, manager: ArrManager, client_cls: type[Callable | RadarrAPI | SonarrAPI]
126
+ self,
127
+ name: str,
128
+ manager: ArrManager,
129
+ client_cls: type[Callable | RadarrAPI | SonarrAPI | LidarrAPI],
80
130
  ):
81
131
  if name in manager.groups:
82
132
  raise OSError(f"Group '{name}' has already been registered.")
@@ -94,19 +144,7 @@ class Arr:
94
144
  self.manager = manager
95
145
  self._LOG_LEVEL = self.manager.qbit_manager.logger.level
96
146
  self.logger = logging.getLogger(f"qBitrr.{self._name}")
97
- if ENABLE_LOGS:
98
- logs_folder = HOME_PATH.joinpath("logs")
99
- logs_folder.mkdir(parents=True, exist_ok=True)
100
- logs_folder.chmod(mode=0o777)
101
- logfile = logs_folder.joinpath(self._name + ".log")
102
- if pathlib.Path(logfile).is_file():
103
- logold = logs_folder.joinpath(self._name + ".log.old")
104
- if pathlib.Path(logold).exists():
105
- logold.unlink()
106
- logfile.rename(logold)
107
- fh = logging.FileHandler(logfile)
108
- self.logger.addHandler(fh)
109
- run_logs(self.logger)
147
+ run_logs(self.logger, self._name)
110
148
 
111
149
  if not QBIT_DISABLED:
112
150
  categories = self.manager.qbit_manager.client.torrent_categories.categories
@@ -191,19 +229,33 @@ class Arr:
191
229
  self.seeding_mode_global_bad_tracker_msg = CONFIG.get(
192
230
  f"{name}.Torrent.SeedingMode.RemoveTrackerWithMessage", fallback=[]
193
231
  )
232
+ if isinstance(self.seeding_mode_global_bad_tracker_msg, str):
233
+ self.seeding_mode_global_bad_tracker_msg = [self.seeding_mode_global_bad_tracker_msg]
234
+ else:
235
+ self.seeding_mode_global_bad_tracker_msg = list(
236
+ self.seeding_mode_global_bad_tracker_msg
237
+ )
194
238
 
195
239
  self.monitored_trackers = CONFIG.get(f"{name}.Torrent.Trackers", fallback=[])
196
240
  self._remove_trackers_if_exists: set[str] = {
197
- i.get("URI") for i in self.monitored_trackers if i.get("RemoveIfExists") is True
241
+ uri
242
+ for i in self.monitored_trackers
243
+ if i.get("RemoveIfExists") is True and (uri := (i.get("URI") or "").strip())
198
244
  }
199
245
  self._monitored_tracker_urls: set[str] = {
200
- r
246
+ uri
201
247
  for i in self.monitored_trackers
202
- if not (r := i.get("URI")) not in self._remove_trackers_if_exists
248
+ if (uri := (i.get("URI") or "").strip()) and uri not in self._remove_trackers_if_exists
203
249
  }
204
250
  self._add_trackers_if_missing: set[str] = {
205
- i.get("URI") for i in self.monitored_trackers if i.get("AddTrackerIfMissing") is True
251
+ uri
252
+ for i in self.monitored_trackers
253
+ if i.get("AddTrackerIfMissing") is True and (uri := (i.get("URI") or "").strip())
206
254
  }
255
+ self._normalized_bad_tracker_msgs: set[str] = {
256
+ msg.lower() for msg in self.seeding_mode_global_bad_tracker_msg if isinstance(msg, str)
257
+ }
258
+
207
259
  if (
208
260
  self.auto_delete is True
209
261
  and not self.completed_folder.parent.exists()
@@ -250,7 +302,7 @@ class Arr:
250
302
 
251
303
  self.do_not_remove_slow = CONFIG.get(f"{name}.Torrent.DoNotRemoveSlow", fallback=False)
252
304
  self.re_search_stalled = CONFIG.get(f"{name}.Torrent.ReSearchStalled", fallback=False)
253
- self.stalled_delay = CONFIG.get(f"{name}.Torrent.StalledDelay", fallback=0)
305
+ self.stalled_delay = CONFIG.get(f"{name}.Torrent.StalledDelay", fallback=15)
254
306
  self.allowed_stalled = True if self.stalled_delay != -1 else False
255
307
 
256
308
  self.search_current_year = None
@@ -258,6 +310,7 @@ class Arr:
258
310
  self._delta = 1
259
311
  else:
260
312
  self._delta = -1
313
+
261
314
  self._app_data_folder = APPDATA_FOLDER
262
315
  self.search_db_file = self._app_data_folder.joinpath(f"{self._name}.db")
263
316
 
@@ -267,7 +320,14 @@ class Arr:
267
320
  self.overseerr_requests = CONFIG.get(
268
321
  f"{name}.EntrySearch.Overseerr.SearchOverseerrRequests", fallback=False
269
322
  )
270
- self.series_search = CONFIG.get(f"{name}.EntrySearch.SearchBySeries", fallback=False)
323
+ # SearchBySeries can be: True (always series), False (always episode), or "smart" (automatic)
324
+ series_search_config = CONFIG.get(f"{name}.EntrySearch.SearchBySeries", fallback=False)
325
+ if isinstance(series_search_config, str) and series_search_config.lower() == "smart":
326
+ self.series_search = "smart"
327
+ elif series_search_config in (True, "true", "True", "TRUE", 1):
328
+ self.series_search = True
329
+ else:
330
+ self.series_search = False
271
331
  if self.ombi_search_requests:
272
332
  self.ombi_uri = CONFIG.get_or_raise(f"{name}.EntrySearch.Ombi.OmbiURI")
273
333
  self.ombi_api_key = CONFIG.get_or_raise(f"{name}.EntrySearch.Ombi.OmbiAPIKey")
@@ -339,6 +399,18 @@ class Arr:
339
399
  self.type = "sonarr"
340
400
  elif isinstance(self.client, RadarrAPI):
341
401
  self.type = "radarr"
402
+ elif isinstance(self.client, LidarrAPI):
403
+ self.type = "lidarr"
404
+
405
+ # Disable unsupported features for Lidarr
406
+ if self.type == "lidarr":
407
+ self.search_by_year = False
408
+ self.ombi_search_requests = False
409
+ self.overseerr_requests = False
410
+ self.ombi_uri = None
411
+ self.ombi_api_key = None
412
+ self.overseerr_uri = None
413
+ self.overseerr_api_key = None
342
414
 
343
415
  try:
344
416
  version_info = self.client.get_update()
@@ -368,6 +440,10 @@ class Arr:
368
440
  if self.use_temp_for_missing:
369
441
  self.temp_quality_profile_ids = self.parse_quality_profiles()
370
442
 
443
+ # Cache for valid quality profile IDs to avoid repeated API calls and warnings
444
+ self._quality_profile_cache: dict[int, dict] = {}
445
+ self._invalid_quality_profiles: set[int] = set()
446
+
371
447
  if self.rss_sync_timer > 0:
372
448
  self.rss_sync_timer_last_checked = datetime(1970, 1, 1)
373
449
  else:
@@ -399,7 +475,12 @@ class Arr:
399
475
  self.missing_files_post_delete = set()
400
476
  self.downloads_with_bad_error_message_blocklist = set()
401
477
  self.needs_cleanup = False
402
- self.recently_queue = {}
478
+
479
+ self.last_search_description: str | None = None
480
+ self.last_search_timestamp: str | None = None
481
+ self.queue_active_count: int = 0
482
+ self.category_torrent_count: int = 0
483
+ self.free_space_tagged_count: int = 0
403
484
 
404
485
  self.timed_ignore_cache = ExpiringSet(max_age_seconds=self.ignore_torrents_younger_than)
405
486
  self.timed_ignore_cache_2 = ExpiringSet(
@@ -413,6 +494,7 @@ class Arr:
413
494
  self.cleaned_torrents = set()
414
495
  self.search_api_command = None
415
496
 
497
+ self._webui_db_loaded = False
416
498
  self.manager.completed_folders.add(self.completed_folder)
417
499
  self.manager.category_allowlist.add(self.category)
418
500
 
@@ -432,7 +514,7 @@ class Arr:
432
514
  self.re_search,
433
515
  self.category,
434
516
  self.uri,
435
- self.apikey,
517
+ _mask_secret(self.apikey),
436
518
  self.refresh_downloads_timer,
437
519
  self.rss_sync_timer,
438
520
  )
@@ -478,24 +560,34 @@ class Arr:
478
560
  self.logger.debug("Script Config: SearchOmbiRequests=%s", self.ombi_search_requests)
479
561
  if self.ombi_search_requests:
480
562
  self.logger.debug("Script Config: OmbiURI=%s", self.ombi_uri)
481
- self.logger.debug("Script Config: OmbiAPIKey=%s", self.ombi_api_key)
563
+ self.logger.debug("Script Config: OmbiAPIKey=%s", _mask_secret(self.ombi_api_key))
482
564
  self.logger.debug("Script Config: ApprovedOnly=%s", self.ombi_approved_only)
483
565
  self.logger.debug(
484
566
  "Script Config: SearchOverseerrRequests=%s", self.overseerr_requests
485
567
  )
486
568
  if self.overseerr_requests:
487
569
  self.logger.debug("Script Config: OverseerrURI=%s", self.overseerr_uri)
488
- self.logger.debug("Script Config: OverseerrAPIKey=%s", self.overseerr_api_key)
570
+ self.logger.debug(
571
+ "Script Config: OverseerrAPIKey=%s", _mask_secret(self.overseerr_api_key)
572
+ )
489
573
  if self.ombi_search_requests or self.overseerr_requests:
490
574
  self.logger.debug(
491
575
  "Script Config: SearchRequestsEvery=%s", self.search_requests_every_x_seconds
492
576
  )
493
577
 
494
- if self.type == "sonarr":
495
- if self.quality_unmet_search or self.do_upgrade_search:
496
- self.search_api_command = "SeriesSearch"
497
- else:
498
- self.search_api_command = "MissingEpisodeSearch"
578
+ if self.type == "sonarr":
579
+ if (
580
+ self.quality_unmet_search
581
+ or self.do_upgrade_search
582
+ or self.custom_format_unmet_search
583
+ or self.series_search == True
584
+ ):
585
+ self.search_api_command = "SeriesSearch"
586
+ elif self.series_search == "smart":
587
+ # In smart mode, the command will be determined dynamically
588
+ self.search_api_command = "SeriesSearch" # Default, will be overridden per search
589
+ else:
590
+ self.search_api_command = "MissingEpisodeSearch"
499
591
 
500
592
  if not QBIT_DISABLED and not TAGLESS:
501
593
  self.manager.qbit_manager.client.torrents_create_tags(
@@ -509,13 +601,83 @@ class Arr:
509
601
  elif not QBIT_DISABLED and TAGLESS:
510
602
  self.manager.qbit_manager.client.torrents_create_tags(["qBitrr-ignored"])
511
603
  self.search_setup_completed = False
512
- self.model_file: EpisodeFilesModel | MoviesFilesModel = None
513
- self.series_file_model: SeriesFilesModel = None
514
- self.model_queue: EpisodeQueueModel | MovieQueueModel = None
515
- self.persistent_queue: FilesQueued = None
516
- self.torrents: TorrentLibrary = None
604
+ self.model_file: Model | None = None
605
+ self.series_file_model: Model | None = None
606
+ self.model_queue: Model | None = None
607
+ self.persistent_queue: Model | None = None
608
+ self.track_file_model: Model | None = None
609
+ self.torrents: TorrentLibrary | None = None
610
+ self.torrent_db: SqliteDatabase | None = None
611
+ self.db: SqliteDatabase | None = None
612
+ # Initialize search mode (and torrent tag-emulation DB in TAGLESS)
613
+ # early and fail fast if it cannot be set up.
614
+ self.register_search_mode()
615
+ atexit.register(
616
+ lambda: (
617
+ hasattr(self, "db") and self.db and not self.db.is_closed() and self.db.close()
618
+ )
619
+ )
620
+ atexit.register(
621
+ lambda: (
622
+ hasattr(self, "torrent_db")
623
+ and self.torrent_db
624
+ and not self.torrent_db.is_closed()
625
+ and self.torrent_db.close()
626
+ )
627
+ )
517
628
  self.logger.hnotice("Starting %s monitor", self._name)
518
629
 
630
+ @staticmethod
631
+ def _humanize_request_tag(tag: str) -> str | None:
632
+ if not tag:
633
+ return None
634
+ cleaned = tag.strip().strip(": ")
635
+ cleaned = cleaned.strip("[]")
636
+ upper = cleaned.upper()
637
+ if "OVERSEERR" in upper:
638
+ return "Overseerr request"
639
+ if "OMBI" in upper:
640
+ return "Ombi request"
641
+ if "PRIORITY SEARCH - TODAY" in upper:
642
+ return "Today's releases"
643
+ return cleaned or None
644
+
645
+ def _record_search_activity(
646
+ self,
647
+ description: str | None,
648
+ *,
649
+ context: str | None = None,
650
+ detail: str | None = None,
651
+ ) -> None:
652
+ self.last_search_description = description
653
+ self.last_search_timestamp = datetime.now(timezone.utc).isoformat()
654
+ if detail == "loop-complete":
655
+ detail = "Searches completed, waiting till next loop"
656
+ elif detail == "no-pending-searches":
657
+ detail = "No pending searches"
658
+ self.last_search_description = None if description is None else description
659
+ segments = [
660
+ segment for segment in (context, self.last_search_description, detail) if segment
661
+ ]
662
+ if segments and segments.count("No pending searches") > 1:
663
+ seen = set()
664
+ deduped = []
665
+ for segment in segments:
666
+ key = segment.strip().lower()
667
+ if key == "no pending searches" and key in seen:
668
+ continue
669
+ seen.add(key)
670
+ deduped.append(segment)
671
+ segments = deduped
672
+ if not segments:
673
+ return
674
+ self.last_search_description = " · ".join(segments)
675
+ record_search_activity(
676
+ str(self.category),
677
+ self.last_search_description,
678
+ self.last_search_timestamp,
679
+ )
680
+
519
681
  @property
520
682
  def is_alive(self) -> bool:
521
683
  try:
@@ -580,17 +742,29 @@ class Arr:
580
742
  if tag == "qBitrr-ignored":
581
743
  return_value = "qBitrr-ignored" in torrent.tags
582
744
  else:
583
- condition = (
584
- self.torrents.Hash == torrent.hash & self.torrents.Category == torrent.category
745
+ query = (
746
+ self.torrents.select()
747
+ .where(
748
+ (self.torrents.Hash == torrent.hash)
749
+ & (self.torrents.Category == torrent.category)
750
+ )
751
+ .execute()
752
+ )
753
+ if not query:
754
+ self.torrents.insert(
755
+ Hash=torrent.hash, Category=torrent.category
756
+ ).on_conflict_ignore().execute()
757
+ condition = (self.torrents.Hash == torrent.hash) & (
758
+ self.torrents.Category == torrent.category
585
759
  )
586
760
  if tag == "qBitrr-allowed_seeding":
587
- condition &= self.torrents.AllowedSeeding is True
761
+ condition &= self.torrents.AllowedSeeding == True
588
762
  elif tag == "qBitrr-imported":
589
- condition &= self.torrents.Imported is True
763
+ condition &= self.torrents.Imported == True
590
764
  elif tag == "qBitrr-allowed_stalled":
591
- condition &= self.torrents.AllowedStalled is True
765
+ condition &= self.torrents.AllowedStalled == True
592
766
  elif tag == "qBitrr-free_space_paused":
593
- condition &= self.torrents.FreeSpacePaused is True
767
+ condition &= self.torrents.FreeSpacePaused == True
594
768
  query = self.torrents.select().where(condition).execute()
595
769
  if query:
596
770
  return_value = True
@@ -612,13 +786,13 @@ class Arr:
612
786
  def remove_tags(self, torrent: TorrentDictionary, tags: list) -> None:
613
787
  for tag in tags:
614
788
  self.logger.trace("Removing tag %s from %s", tag, torrent.name)
615
- if TAGLESS:
789
+ if TAGLESS:
790
+ for tag in tags:
616
791
  query = (
617
792
  self.torrents.select()
618
793
  .where(
619
- self.torrents.Hash
620
- == torrent.hash & self.torrents.Category
621
- == torrent.category
794
+ (self.torrents.Hash == torrent.hash)
795
+ & (self.torrents.Category == torrent.category)
622
796
  )
623
797
  .execute()
624
798
  )
@@ -628,48 +802,48 @@ class Arr:
628
802
  ).on_conflict_ignore().execute()
629
803
  if tag == "qBitrr-allowed_seeding":
630
804
  self.torrents.update(AllowedSeeding=False).where(
631
- self.torrents.Hash
632
- == torrent.hash & self.torrents.Category
633
- == torrent.category
634
- )
805
+ (self.torrents.Hash == torrent.hash)
806
+ & (self.torrents.Category == torrent.category)
807
+ ).execute()
635
808
  elif tag == "qBitrr-imported":
636
809
  self.torrents.update(Imported=False).where(
637
- self.torrents.Hash
638
- == torrent.hash & self.torrents.Category
639
- == torrent.category
640
- )
810
+ (self.torrents.Hash == torrent.hash)
811
+ & (self.torrents.Category == torrent.category)
812
+ ).execute()
641
813
  elif tag == "qBitrr-allowed_stalled":
642
814
  self.torrents.update(AllowedStalled=False).where(
643
- self.torrents.Hash
644
- == torrent.hash & self.torrents.Category
645
- == torrent.category
646
- )
815
+ (self.torrents.Hash == torrent.hash)
816
+ & (self.torrents.Category == torrent.category)
817
+ ).execute()
647
818
  elif tag == "qBitrr-free_space_paused":
648
819
  self.torrents.update(FreeSpacePaused=False).where(
649
- self.torrents.Hash
650
- == torrent.hash & self.torrents.Category
651
- == torrent.category
652
- )
653
- else:
654
- if tag == "qBitrr-allowed_seeding":
655
- torrent.remove_tags(["qBitrr-allowed_seeding"])
656
- elif tag == "qBitrr-imported":
657
- torrent.remove_tags(["qBitrr-imported"])
658
- elif tag == "qBitrr-allowed_stalled":
659
- torrent.remove_tags(["qBitrr-allowed_stalled"])
660
- elif tag == "qBitrr-free_space_paused":
661
- torrent.remove_tags(["qBitrr-free_space_paused"])
820
+ (self.torrents.Hash == torrent.hash)
821
+ & (self.torrents.Category == torrent.category)
822
+ ).execute()
823
+ else:
824
+ with contextlib.suppress(Exception):
825
+ with_retry(
826
+ lambda: torrent.remove_tags(tags),
827
+ retries=3,
828
+ backoff=0.5,
829
+ max_backoff=3,
830
+ exceptions=(
831
+ qbittorrentapi.exceptions.APIError,
832
+ qbittorrentapi.exceptions.APIConnectionError,
833
+ requests.exceptions.RequestException,
834
+ ),
835
+ )
662
836
 
663
837
  def add_tags(self, torrent: TorrentDictionary, tags: list) -> None:
664
838
  for tag in tags:
665
839
  self.logger.trace("Adding tag %s from %s", tag, torrent.name)
666
- if TAGLESS:
840
+ if TAGLESS:
841
+ for tag in tags:
667
842
  query = (
668
843
  self.torrents.select()
669
844
  .where(
670
- self.torrents.Hash
671
- == torrent.hash & self.torrents.Category
672
- == torrent.category
845
+ (self.torrents.Hash == torrent.hash)
846
+ & (self.torrents.Category == torrent.category)
673
847
  )
674
848
  .execute()
675
849
  )
@@ -679,137 +853,138 @@ class Arr:
679
853
  ).on_conflict_ignore().execute()
680
854
  if tag == "qBitrr-allowed_seeding":
681
855
  self.torrents.update(AllowedSeeding=True).where(
682
- self.torrents.Hash
683
- == torrent.hash & self.torrents.Category
684
- == torrent.category
685
- )
856
+ (self.torrents.Hash == torrent.hash)
857
+ & (self.torrents.Category == torrent.category)
858
+ ).execute()
686
859
  elif tag == "qBitrr-imported":
687
860
  self.torrents.update(Imported=True).where(
688
- self.torrents.Hash
689
- == torrent.hash & self.torrents.Category
690
- == torrent.category
691
- )
861
+ (self.torrents.Hash == torrent.hash)
862
+ & (self.torrents.Category == torrent.category)
863
+ ).execute()
692
864
  elif tag == "qBitrr-allowed_stalled":
693
865
  self.torrents.update(AllowedStalled=True).where(
694
- self.torrents.Hash
695
- == torrent.hash & self.torrents.Category
696
- == torrent.category
697
- )
866
+ (self.torrents.Hash == torrent.hash)
867
+ & (self.torrents.Category == torrent.category)
868
+ ).execute()
698
869
  elif tag == "qBitrr-free_space_paused":
699
870
  self.torrents.update(FreeSpacePaused=True).where(
700
- self.torrents.Hash
701
- == torrent.hash & self.torrents.Category
702
- == torrent.category
703
- )
704
- else:
705
- if tag == "qBitrr-allowed_seeding":
706
- torrent.add_tags(["qBitrr-allowed_seeding"])
707
- elif tag == "qBitrr-imported":
708
- torrent.add_tags(["qBitrr-imported"])
709
- elif tag == "qBitrr-allowed_stalled":
710
- torrent.add_tags(["qBitrr-allowed_stalled"])
711
- elif tag == "qBitrr-free_space_paused":
712
- torrent.add_tags(["qBitrr-free_space_paused"])
713
-
714
- def _get_models(
715
- self,
716
- ) -> tuple[
717
- type[EpisodeFilesModel] | type[MoviesFilesModel],
718
- type[EpisodeQueueModel] | type[MovieQueueModel],
719
- type[SeriesFilesModel] | None,
720
- type[TorrentLibrary] | None,
721
- ]:
722
- if self.type == "sonarr":
723
- if self.series_search:
724
- return (
725
- EpisodeFilesModel,
726
- EpisodeQueueModel,
727
- SeriesFilesModel,
728
- TorrentLibrary if TAGLESS else None,
729
- )
730
- return EpisodeFilesModel, EpisodeQueueModel, None, TorrentLibrary if TAGLESS else None
731
- elif self.type == "radarr":
732
- return MoviesFilesModel, MovieQueueModel, None, TorrentLibrary if TAGLESS else None
871
+ (self.torrents.Hash == torrent.hash)
872
+ & (self.torrents.Category == torrent.category)
873
+ ).execute()
733
874
  else:
734
- raise UnhandledError(f"Well you shouldn't have reached here, Arr.type={self.type}")
875
+ with contextlib.suppress(Exception):
876
+ with_retry(
877
+ lambda: torrent.add_tags(tags),
878
+ retries=3,
879
+ backoff=0.5,
880
+ max_backoff=3,
881
+ exceptions=(
882
+ qbittorrentapi.exceptions.APIError,
883
+ qbittorrentapi.exceptions.APIConnectionError,
884
+ requests.exceptions.RequestException,
885
+ ),
886
+ )
735
887
 
736
888
  def _get_oversee_requests_all(self) -> dict[str, set]:
737
889
  try:
738
- key = "approved" if self.overseerr_approved_only else "unavailable"
739
890
  data = defaultdict(set)
740
- response = self.session.get(
741
- url=f"{self.overseerr_uri}/api/v1/request",
742
- headers={"X-Api-Key": self.overseerr_api_key},
743
- params={"take": 100, "skip": 0, "sort": "added", "filter": key},
744
- timeout=2,
745
- )
746
- response = response.json().get("results", [])
891
+ key = "approved" if self.overseerr_approved_only else "unavailable"
892
+ take = 100
893
+ skip = 0
747
894
  type_ = None
748
895
  if self.type == "radarr":
749
896
  type_ = "movie"
750
897
  elif self.type == "sonarr":
751
898
  type_ = "tv"
752
899
  _now = datetime.now()
753
- for entry in response:
754
- type__ = entry.get("type")
755
- if type__ == "movie":
756
- id__ = entry.get("media", {}).get("tmdbId")
757
- elif type__ == "tv":
758
- id__ = entry.get("media", {}).get("tvdbId")
759
- if type_ != type__:
760
- continue
761
- if self.overseerr_is_4k and entry.get("is4k"):
762
- if self.overseerr_approved_only:
763
- if entry.get("media", {}).get("status4k") != 3:
900
+ while True:
901
+ response = self.session.get(
902
+ url=f"{self.overseerr_uri}/api/v1/request",
903
+ headers={"X-Api-Key": self.overseerr_api_key},
904
+ params={"take": take, "skip": skip, "sort": "added", "filter": key},
905
+ timeout=5,
906
+ )
907
+ response.raise_for_status()
908
+ payload = response.json()
909
+ results = []
910
+ if isinstance(payload, list):
911
+ results = payload
912
+ elif isinstance(payload, dict):
913
+ if isinstance(payload.get("results"), list):
914
+ results = payload["results"]
915
+ elif isinstance(payload.get("data"), list):
916
+ results = payload["data"]
917
+ if not results:
918
+ break
919
+ for entry in results:
920
+ type__ = entry.get("type")
921
+ if type__ == "movie":
922
+ id__ = entry.get("media", {}).get("tmdbId")
923
+ elif type__ == "tv":
924
+ id__ = entry.get("media", {}).get("tvdbId")
925
+ else:
926
+ id__ = None
927
+ if not id__ or type_ != type__:
928
+ continue
929
+ media = entry.get("media") or {}
930
+ status_key = "status4k" if entry.get("is4k") else "status"
931
+ status_value = _normalize_media_status(media.get(status_key))
932
+ if entry.get("is4k"):
933
+ if not self.overseerr_is_4k:
764
934
  continue
765
- elif entry.get("media", {}).get("status4k") == 5:
935
+ elif self.overseerr_is_4k:
766
936
  continue
767
- elif not self.overseerr_is_4k and not entry.get("is4k"):
768
937
  if self.overseerr_approved_only:
769
- if entry.get("media", {}).get("status") != 3:
938
+ if not _is_media_processing(status_value):
770
939
  continue
771
- elif entry.get("media", {}).get("status") == 5:
772
- continue
773
- else:
774
- continue
775
- if id__ in self.overseerr_requests_release_cache:
776
- date = self.overseerr_requests_release_cache[id__]
777
- else:
778
- date = datetime(day=1, month=1, year=1970)
779
- date_string_backup = f"{_now.year}-{_now.month:02}-{_now.day:02}"
780
- date_string = None
781
- try:
782
- if type_ == "movie":
783
- _entry_data = self.session.get(
784
- url=f"{self.overseerr_uri}/api/v1/movies/{id__}",
785
- headers={"X-Api-Key": self.overseerr_api_key},
786
- timeout=2,
787
- )
788
- date_string = _entry_data.json().get("releaseDate")
789
- elif type__ == "tv":
790
- _entry_data = self.session.get(
791
- url=f"{self.overseerr_uri}/api/v1/tv/{id__}",
792
- headers={"X-Api-Key": self.overseerr_api_key},
793
- timeout=2,
794
- )
795
- # We don't do granular (episode/season) searched here so no need to
796
- # suppose them
797
- date_string = _entry_data.json().get("firstAirDate")
798
- if not date_string:
799
- date_string = date_string_backup
800
- date = datetime.strptime(date_string, "%Y-%m-%d")
801
- if date > _now:
940
+ else:
941
+ if _is_media_available(status_value):
802
942
  continue
803
- self.overseerr_requests_release_cache[id__] = date
804
- except Exception as e:
805
- self.logger.warning("Failed to query release date from Overseerr: %s", e)
806
- if media := entry.get("media"):
807
- if imdbId := media.get("imdbId"):
808
- data["ImdbId"].add(imdbId)
809
- if self.type == "sonarr" and (tvdbId := media.get("tvdbId")):
810
- data["TvdbId"].add(tvdbId)
811
- elif self.type == "radarr" and (tmdbId := media.get("tmdbId")):
812
- data["TmdbId"].add(tmdbId)
943
+ if id__ in self.overseerr_requests_release_cache:
944
+ date = self.overseerr_requests_release_cache[id__]
945
+ else:
946
+ date = datetime(day=1, month=1, year=1970)
947
+ date_string_backup = f"{_now.year}-{_now.month:02}-{_now.day:02}"
948
+ date_string = None
949
+ try:
950
+ if type_ == "movie":
951
+ _entry = self.session.get(
952
+ url=f"{self.overseerr_uri}/api/v1/movies/{id__}",
953
+ headers={"X-Api-Key": self.overseerr_api_key},
954
+ timeout=5,
955
+ )
956
+ _entry.raise_for_status()
957
+ date_string = _entry.json().get("releaseDate")
958
+ elif type__ == "tv":
959
+ _entry = self.session.get(
960
+ url=f"{self.overseerr_uri}/api/v1/tv/{id__}",
961
+ headers={"X-Api-Key": self.overseerr_api_key},
962
+ timeout=5,
963
+ )
964
+ _entry.raise_for_status()
965
+ # We don't do granular (episode/season) searched here so no need to
966
+ # suppose them
967
+ date_string = _entry.json().get("firstAirDate")
968
+ if not date_string:
969
+ date_string = date_string_backup
970
+ date = datetime.strptime(date_string[:10], "%Y-%m-%d")
971
+ if date > _now:
972
+ continue
973
+ self.overseerr_requests_release_cache[id__] = date
974
+ except Exception as e:
975
+ self.logger.warning(
976
+ "Failed to query release date from Overseerr: %s", e
977
+ )
978
+ if media:
979
+ if imdbId := media.get("imdbId"):
980
+ data["ImdbId"].add(imdbId)
981
+ if self.type == "sonarr" and (tvdbId := media.get("tvdbId")):
982
+ data["TvdbId"].add(tvdbId)
983
+ elif self.type == "radarr" and (tmdbId := media.get("tmdbId")):
984
+ data["TmdbId"].add(tmdbId)
985
+ if len(results) < take:
986
+ break
987
+ skip += take
813
988
  self._temp_overseer_request_cache = data
814
989
  except requests.exceptions.ConnectionError:
815
990
  self.logger.warning("Couldn't connect to Overseerr")
@@ -847,15 +1022,24 @@ class Arr:
847
1022
  extras = "/api/v1/Request/movie/total"
848
1023
  else:
849
1024
  raise UnhandledError(f"Well you shouldn't have reached here, Arr.type={self.type}")
1025
+ total = 0
850
1026
  try:
851
1027
  response = self.session.get(
852
- url=f"{self.ombi_uri}{extras}", headers={"ApiKey": self.ombi_api_key}
1028
+ url=f"{self.ombi_uri}{extras}", headers={"ApiKey": self.ombi_api_key}, timeout=5
853
1029
  )
1030
+ response.raise_for_status()
1031
+ payload = response.json()
1032
+ if isinstance(payload, dict):
1033
+ for key in ("total", "count", "totalCount", "totalRecords", "pending", "value"):
1034
+ value = payload.get(key)
1035
+ if isinstance(value, int):
1036
+ total = value
1037
+ break
1038
+ elif isinstance(payload, list):
1039
+ total = len(payload)
854
1040
  except Exception as e:
855
1041
  self.logger.exception(e, exc_info=sys.exc_info())
856
- return 0
857
- else:
858
- return response.json()
1042
+ return total
859
1043
 
860
1044
  def _get_ombi_requests(self) -> list[dict]:
861
1045
  if self.type == "sonarr":
@@ -866,9 +1050,18 @@ class Arr:
866
1050
  raise UnhandledError(f"Well you shouldn't have reached here, Arr.type={self.type}")
867
1051
  try:
868
1052
  response = self.session.get(
869
- url=f"{self.ombi_uri}{extras}", headers={"ApiKey": self.ombi_api_key}
1053
+ url=f"{self.ombi_uri}{extras}", headers={"ApiKey": self.ombi_api_key}, timeout=5
870
1054
  )
871
- return response.json()
1055
+ response.raise_for_status()
1056
+ payload = response.json()
1057
+ if isinstance(payload, list):
1058
+ return payload
1059
+ if isinstance(payload, dict):
1060
+ for key in ("result", "results", "requests", "data", "items"):
1061
+ value = payload.get(key)
1062
+ if isinstance(value, list):
1063
+ return value
1064
+ return []
872
1065
  except Exception as e:
873
1066
  self.logger.exception(e, exc_info=sys.exc_info())
874
1067
  return []
@@ -900,7 +1093,18 @@ class Arr:
900
1093
  self.logger.debug(
901
1094
  "Pausing %s (%s)", i, self.manager.qbit_manager.name_cache.get(i)
902
1095
  )
903
- self.manager.qbit.torrents_pause(torrent_hashes=self.pause)
1096
+ with contextlib.suppress(Exception):
1097
+ with_retry(
1098
+ lambda: self.manager.qbit.torrents_pause(torrent_hashes=self.pause),
1099
+ retries=3,
1100
+ backoff=0.5,
1101
+ max_backoff=3,
1102
+ exceptions=(
1103
+ qbittorrentapi.exceptions.APIError,
1104
+ qbittorrentapi.exceptions.APIConnectionError,
1105
+ requests.exceptions.RequestException,
1106
+ ),
1107
+ )
904
1108
  self.pause.clear()
905
1109
 
906
1110
  def _process_imports(self) -> None:
@@ -925,41 +1129,65 @@ class Arr:
925
1129
  self.sent_to_scan_hashes.add(torrent.hash)
926
1130
  try:
927
1131
  if self.type == "sonarr":
928
- while True:
929
- try:
930
- self.client.post_command(
931
- "DownloadedEpisodesScan",
932
- path=str(path),
933
- downloadClientId=torrent.hash.upper(),
934
- importMode=self.import_mode,
935
- )
936
- break
937
- except (
1132
+ with_retry(
1133
+ lambda: self.client.post_command(
1134
+ "DownloadedEpisodesScan",
1135
+ path=str(path),
1136
+ downloadClientId=torrent.hash.upper(),
1137
+ importMode=self.import_mode,
1138
+ ),
1139
+ retries=3,
1140
+ backoff=0.5,
1141
+ max_backoff=3,
1142
+ exceptions=(
938
1143
  requests.exceptions.ChunkedEncodingError,
939
1144
  requests.exceptions.ContentDecodingError,
940
1145
  requests.exceptions.ConnectionError,
941
1146
  JSONDecodeError,
942
- ):
943
- continue
1147
+ requests.exceptions.RequestException,
1148
+ ),
1149
+ )
944
1150
  self.logger.success("DownloadedEpisodesScan: %s", path)
945
1151
  elif self.type == "radarr":
946
- while True:
947
- try:
948
- self.client.post_command(
949
- "DownloadedMoviesScan",
950
- path=str(path),
951
- downloadClientId=torrent.hash.upper(),
952
- importMode=self.import_mode,
953
- )
954
- break
955
- except (
1152
+ with_retry(
1153
+ lambda: self.client.post_command(
1154
+ "DownloadedMoviesScan",
1155
+ path=str(path),
1156
+ downloadClientId=torrent.hash.upper(),
1157
+ importMode=self.import_mode,
1158
+ ),
1159
+ retries=3,
1160
+ backoff=0.5,
1161
+ max_backoff=3,
1162
+ exceptions=(
956
1163
  requests.exceptions.ChunkedEncodingError,
957
1164
  requests.exceptions.ContentDecodingError,
958
1165
  requests.exceptions.ConnectionError,
959
1166
  JSONDecodeError,
960
- ):
961
- continue
1167
+ requests.exceptions.RequestException,
1168
+ ),
1169
+ )
962
1170
  self.logger.success("DownloadedMoviesScan: %s", path)
1171
+ elif self.type == "lidarr":
1172
+ with_retry(
1173
+ lambda: self.client.post_command(
1174
+ "DownloadedAlbumsScan",
1175
+ path=str(path),
1176
+ downloadClientId=torrent.hash.upper(),
1177
+ importMode=self.import_mode,
1178
+ ),
1179
+ retries=3,
1180
+ backoff=0.5,
1181
+ max_backoff=3,
1182
+ exceptions=(
1183
+ requests.exceptions.ChunkedEncodingError,
1184
+ requests.exceptions.ContentDecodingError,
1185
+ requests.exceptions.ConnectionError,
1186
+ JSONDecodeError,
1187
+ requests.exceptions.RequestException,
1188
+ ),
1189
+ )
1190
+ self.logger.success("DownloadedAlbumsScan: %s", path)
963
1191
  except Exception as ex:
964
1192
  self.logger.error(
965
1193
  "Downloaded scan error: [%s][%s][%s][%s]",
@@ -990,8 +1218,6 @@ class Arr:
990
1218
  self.delete_from_queue(
991
1219
  id_=entry, remove_from_client=remove_from_client, blacklist=False
992
1220
  )
993
- if hash_ in self.recently_queue:
994
- del self.recently_queue[hash_]
995
1221
  object_id = self.requeue_cache.get(entry)
996
1222
  if self.re_search and object_id:
997
1223
  if self.type == "sonarr":
@@ -1141,6 +1367,48 @@ class Arr:
1141
1367
  continue
1142
1368
  if self.persistent_queue:
1143
1369
  self.persistent_queue.insert(EntryId=object_id).on_conflict_ignore()
1370
+ elif self.type == "lidarr":
1371
+ self.logger.trace("Requeue cache entry: %s", object_id)
1372
+ while True:
1373
+ try:
1374
+ data = self.client.get_album(object_id)
1375
+ name = data.get("title")
1376
+ if name:
1377
+ artist_title = data.get("artist", {}).get("artistName", "")
1378
+ foreign_album_id = data.get("foreignAlbumId", "")
1379
+ self.logger.notice(
1380
+ "Re-Searching album: %s - %s | [foreignAlbumId=%s|id=%s]",
1381
+ artist_title,
1382
+ name,
1383
+ foreign_album_id,
1384
+ object_id,
1385
+ )
1386
+ else:
1387
+ self.logger.notice("Re-Searching album: %s", object_id)
1388
+ break
1389
+ except (
1390
+ requests.exceptions.ChunkedEncodingError,
1391
+ requests.exceptions.ContentDecodingError,
1392
+ requests.exceptions.ConnectionError,
1393
+ JSONDecodeError,
1394
+ AttributeError,
1395
+ ):
1396
+ continue
1397
+ if object_id in self.queue_file_ids:
1398
+ self.queue_file_ids.remove(object_id)
1399
+ while True:
1400
+ try:
1401
+ self.client.post_command("AlbumSearch", albumIds=[object_id])
1402
+ break
1403
+ except (
1404
+ requests.exceptions.ChunkedEncodingError,
1405
+ requests.exceptions.ContentDecodingError,
1406
+ requests.exceptions.ConnectionError,
1407
+ JSONDecodeError,
1408
+ ):
1409
+ continue
1410
+ if self.persistent_queue:
1411
+ self.persistent_queue.insert(EntryId=object_id).on_conflict_ignore()
1144
1412
 
1145
1413
  def _process_errored(self) -> None:
1146
1414
  # Recheck all torrents marked for rechecking.
@@ -1158,10 +1426,6 @@ class Arr:
1158
1426
  to_delete_all = self.delete.union(
1159
1427
  self.missing_files_post_delete, self.downloads_with_bad_error_message_blocklist
1160
1428
  )
1161
- if self.missing_files_post_delete or self.downloads_with_bad_error_message_blocklist:
1162
- delete_ = True
1163
- else:
1164
- delete_ = False
1165
1429
  skip_blacklist = {
1166
1430
  i.upper() for i in self.skip_blacklist.union(self.missing_files_post_delete)
1167
1431
  }
@@ -1190,7 +1454,7 @@ class Arr:
1190
1454
  del self.manager.qbit_manager.name_cache[h]
1191
1455
  if h in self.manager.qbit_manager.cache:
1192
1456
  del self.manager.qbit_manager.cache[h]
1193
- if delete_:
1457
+ if self.missing_files_post_delete or self.downloads_with_bad_error_message_blocklist:
1194
1458
  self.missing_files_post_delete.clear()
1195
1459
  self.downloads_with_bad_error_message_blocklist.clear()
1196
1460
  self.skip_blacklist.clear()
@@ -1247,17 +1511,19 @@ class Arr:
1247
1511
  self.rss_sync_timer_last_checked is not None
1248
1512
  and self.rss_sync_timer_last_checked < now - timedelta(minutes=self.rss_sync_timer)
1249
1513
  ):
1250
- while True:
1251
- try:
1252
- self.client.post_command("RssSync")
1253
- break
1254
- except (
1514
+ with_retry(
1515
+ lambda: self.client.post_command("RssSync"),
1516
+ retries=3,
1517
+ backoff=0.5,
1518
+ max_backoff=3,
1519
+ exceptions=(
1255
1520
  requests.exceptions.ChunkedEncodingError,
1256
1521
  requests.exceptions.ContentDecodingError,
1257
1522
  requests.exceptions.ConnectionError,
1258
1523
  JSONDecodeError,
1259
- ):
1260
- continue
1524
+ requests.exceptions.RequestException,
1525
+ ),
1526
+ )
1261
1527
  self.rss_sync_timer_last_checked = now
1262
1528
 
1263
1529
  if (
@@ -1265,22 +1531,24 @@ class Arr:
1265
1531
  and self.refresh_downloads_timer_last_checked
1266
1532
  < now - timedelta(minutes=self.refresh_downloads_timer)
1267
1533
  ):
1268
- while True:
1269
- try:
1270
- self.client.post_command("RefreshMonitoredDownloads")
1271
- break
1272
- except (
1534
+ with_retry(
1535
+ lambda: self.client.post_command("RefreshMonitoredDownloads"),
1536
+ retries=3,
1537
+ backoff=0.5,
1538
+ max_backoff=3,
1539
+ exceptions=(
1273
1540
  requests.exceptions.ChunkedEncodingError,
1274
1541
  requests.exceptions.ContentDecodingError,
1275
1542
  requests.exceptions.ConnectionError,
1276
1543
  JSONDecodeError,
1277
- ):
1278
- continue
1544
+ requests.exceptions.RequestException,
1545
+ ),
1546
+ )
1279
1547
  self.refresh_downloads_timer_last_checked = now
1280
1548
 
1281
1549
  def arr_db_query_commands_count(self) -> int:
1282
1550
  search_commands = 0
1283
- if not self.search_missing:
1551
+ if not (self.search_missing or self.do_upgrade_search):
1284
1552
  return 0
1285
1553
  while True:
1286
1554
  try:
@@ -1324,11 +1592,55 @@ class Arr:
1324
1592
  ) -> Iterable[
1325
1593
  tuple[MoviesFilesModel | EpisodeFilesModel | SeriesFilesModel, bool, bool, bool, int]
1326
1594
  ]:
1327
- if self.type == "sonarr" and self.series_search:
1595
+ if self.type == "sonarr" and self.series_search == True:
1328
1596
  serieslist = self.db_get_files_series()
1329
1597
  for series in serieslist:
1330
1598
  yield series[0], series[1], series[2], series[2] is not True, len(serieslist)
1331
- elif self.type == "sonarr" and not self.series_search:
1599
+ elif self.type == "sonarr" and self.series_search == "smart":
1600
+ # Smart mode: decide dynamically based on what needs to be searched
1601
+ episodelist = self.db_get_files_episodes()
1602
+ if episodelist:
1603
+ # Group episodes by series to determine if we should search by series or episode
1604
+ series_episodes_map = {}
1605
+ for episode_entry in episodelist:
1606
+ episode = episode_entry[0]
1607
+ series_id = episode.SeriesId
1608
+ if series_id not in series_episodes_map:
1609
+ series_episodes_map[series_id] = []
1610
+ series_episodes_map[series_id].append(episode_entry)
1611
+
1612
+ # Process each series
1613
+ for series_id, episodes in series_episodes_map.items():
1614
+ if len(episodes) > 1:
1615
+ # Multiple episodes from same series - use series search (smart decision)
1616
+ self.logger.info(
1617
+ "[SMART MODE] Using series search for %s episodes from series ID %s",
1618
+ len(episodes),
1619
+ series_id,
1620
+ )
1621
+ # Create a series entry for searching
1622
+ series_model = (
1623
+ self.series_file_model.select()
1624
+ .where(self.series_file_model.EntryId == series_id)
1625
+ .first()
1626
+ )
1627
+ if series_model:
1628
+ yield series_model, episodes[0][1], episodes[0][2], True, len(
1629
+ episodelist
1630
+ )
1631
+ else:
1632
+ # Single episode - use episode search (smart decision)
1633
+ episode = episodes[0][0]
1634
+ self.logger.info(
1635
+ "[SMART MODE] Using episode search for single episode: %s S%02dE%03d",
1636
+ episode.SeriesTitle,
1637
+ episode.SeasonNumber,
1638
+ episode.EpisodeNumber,
1639
+ )
1640
+ yield episodes[0][0], episodes[0][1], episodes[0][2], False, len(
1641
+ episodelist
1642
+ )
1643
+ elif self.type == "sonarr" and self.series_search == False:
1332
1644
  episodelist = self.db_get_files_episodes()
1333
1645
  for episodes in episodelist:
1334
1646
  yield episodes[0], episodes[1], episodes[2], False, len(episodelist)
@@ -1336,6 +1648,10 @@ class Arr:
1336
1648
  movielist = self.db_get_files_movies()
1337
1649
  for movies in movielist:
1338
1650
  yield movies[0], movies[1], movies[2], False, len(movielist)
1651
+ elif self.type == "lidarr":
1652
+ albumlist = self.db_get_files_movies() # This calls the lidarr section we added
1653
+ for albums in albumlist:
1654
+ yield albums[0], albums[1], albums[2], False, len(albumlist)
1339
1655
 
1340
1656
  def db_maybe_reset_entry_searched_state(self):
1341
1657
  if self.type == "sonarr":
@@ -1343,6 +1659,8 @@ class Arr:
1343
1659
  self.db_reset__episode_searched_state()
1344
1660
  elif self.type == "radarr":
1345
1661
  self.db_reset__movie_searched_state()
1662
+ elif self.type == "lidarr":
1663
+ self.db_reset__album_searched_state()
1346
1664
  self.loop_completed = False
1347
1665
 
1348
1666
  def db_reset__series_searched_state(self):
@@ -1353,7 +1671,7 @@ class Arr:
1353
1671
  self.loop_completed and self.reset_on_completion and self.series_search
1354
1672
  ): # Only wipe if a loop completed was tagged
1355
1673
  self.series_file_model.update(Searched=False, Upgrade=False).where(
1356
- self.series_file_model.Searched is True
1674
+ self.series_file_model.Searched == True
1357
1675
  ).execute()
1358
1676
  while True:
1359
1677
  try:
@@ -1380,7 +1698,7 @@ class Arr:
1380
1698
  self.loop_completed is True and self.reset_on_completion
1381
1699
  ): # Only wipe if a loop completed was tagged
1382
1700
  self.model_file.update(Searched=False, Upgrade=False).where(
1383
- self.model_file.Searched is True
1701
+ self.model_file.Searched == True
1384
1702
  ).execute()
1385
1703
  while True:
1386
1704
  try:
@@ -1407,7 +1725,7 @@ class Arr:
1407
1725
  self.loop_completed is True and self.reset_on_completion
1408
1726
  ): # Only wipe if a loop completed was tagged
1409
1727
  self.model_file.update(Searched=False, Upgrade=False).where(
1410
- self.model_file.Searched is True
1728
+ self.model_file.Searched == True
1411
1729
  ).execute()
1412
1730
  while True:
1413
1731
  try:
@@ -1425,9 +1743,36 @@ class Arr:
1425
1743
  self.model_file.delete().where(self.model_file.EntryId.not_in(ids)).execute()
1426
1744
  self.loop_completed = False
1427
1745
 
1746
+ def db_reset__album_searched_state(self):
1747
+ ids = []
1748
+ self.model_file: AlbumFilesModel
1749
+ if (
1750
+ self.loop_completed is True and self.reset_on_completion
1751
+ ): # Only wipe if a loop completed was tagged
1752
+ self.model_file.update(Searched=False, Upgrade=False).where(
1753
+ self.model_file.Searched == True
1754
+ ).execute()
1755
+ while True:
1756
+ try:
1757
+ artists = self.client.get_artist()
1758
+ for artist in artists:
1759
+ albums = self.client.get_album(artistId=artist["id"])
1760
+ for album in albums:
1761
+ ids.append(album["id"])
1762
+ break
1763
+ except (
1764
+ requests.exceptions.ChunkedEncodingError,
1765
+ requests.exceptions.ContentDecodingError,
1766
+ requests.exceptions.ConnectionError,
1767
+ JSONDecodeError,
1768
+ ):
1769
+ continue
1770
+ self.model_file.delete().where(self.model_file.EntryId.not_in(ids)).execute()
1771
+ self.loop_completed = False
1772
+
1428
1773
  def db_get_files_series(self) -> list[list[SeriesFilesModel, bool, bool]] | None:
1429
1774
  entries = []
1430
- if not self.search_missing:
1775
+ if not (self.search_missing or self.do_upgrade_search):
1431
1776
  return None
1432
1777
  elif not self.series_search:
1433
1778
  return None
@@ -1439,21 +1784,18 @@ class Arr:
1439
1784
  condition &= self.model_file.Upgrade == False
1440
1785
  else:
1441
1786
  if self.quality_unmet_search and not self.custom_format_unmet_search:
1442
- condition &= (
1443
- self.model_file.Searched == False | self.model_file.QualityMet == False
1787
+ condition &= (self.model_file.Searched == False) | (
1788
+ self.model_file.QualityMet == False
1444
1789
  )
1445
1790
  elif not self.quality_unmet_search and self.custom_format_unmet_search:
1446
- condition &= (
1447
- self.model_file.Searched
1448
- == False | self.model_file.CustomFormatMet
1449
- == False
1791
+ condition &= (self.model_file.Searched == False) | (
1792
+ self.model_file.CustomFormatMet == False
1450
1793
  )
1451
1794
  elif self.quality_unmet_search and self.custom_format_unmet_search:
1452
1795
  condition &= (
1453
- self.model_file.Searched
1454
- == False | self.model_file.QualityMet
1455
- == False | self.model_file.CustomFormatMet
1456
- == False
1796
+ (self.model_file.Searched == False)
1797
+ | (self.model_file.QualityMet == False)
1798
+ | (self.model_file.CustomFormatMet == False)
1457
1799
  )
1458
1800
  else:
1459
1801
  condition &= self.model_file.EpisodeFileId == 0
@@ -1496,7 +1838,7 @@ class Arr:
1496
1838
 
1497
1839
  def db_get_files_episodes(self) -> list[list[EpisodeFilesModel, bool, bool]] | None:
1498
1840
  entries = []
1499
- if not self.search_missing:
1841
+ if not (self.search_missing or self.do_upgrade_search):
1500
1842
  return None
1501
1843
  elif self.type == "sonarr":
1502
1844
  condition = self.model_file.AirDateUtc.is_null(False)
@@ -1506,21 +1848,18 @@ class Arr:
1506
1848
  condition &= self.model_file.Upgrade == False
1507
1849
  else:
1508
1850
  if self.quality_unmet_search and not self.custom_format_unmet_search:
1509
- condition &= (
1510
- self.model_file.Searched == False | self.model_file.QualityMet == False
1851
+ condition &= (self.model_file.Searched == False) | (
1852
+ self.model_file.QualityMet == False
1511
1853
  )
1512
1854
  elif not self.quality_unmet_search and self.custom_format_unmet_search:
1513
- condition &= (
1514
- self.model_file.Searched
1515
- == False | self.model_file.CustomFormatMet
1516
- == False
1855
+ condition &= (self.model_file.Searched == False) | (
1856
+ self.model_file.CustomFormatMet == False
1517
1857
  )
1518
1858
  elif self.quality_unmet_search and self.custom_format_unmet_search:
1519
1859
  condition &= (
1520
- self.model_file.Searched
1521
- == False | self.model_file.QualityMet
1522
- == False | self.model_file.CustomFormatMet
1523
- == False
1860
+ (self.model_file.Searched == False)
1861
+ | (self.model_file.QualityMet == False)
1862
+ | (self.model_file.CustomFormatMet == False)
1524
1863
  )
1525
1864
  else:
1526
1865
  condition &= self.model_file.EpisodeFileId == 0
@@ -1564,7 +1903,7 @@ class Arr:
1564
1903
 
1565
1904
  def db_get_files_movies(self) -> list[list[MoviesFilesModel, bool, bool]] | None:
1566
1905
  entries = []
1567
- if not self.search_missing:
1906
+ if not (self.search_missing or self.do_upgrade_search):
1568
1907
  return None
1569
1908
  if self.type == "radarr":
1570
1909
  condition = self.model_file.Year.is_null(False)
@@ -1572,21 +1911,18 @@ class Arr:
1572
1911
  condition &= self.model_file.Upgrade == False
1573
1912
  else:
1574
1913
  if self.quality_unmet_search and not self.custom_format_unmet_search:
1575
- condition &= (
1576
- self.model_file.Searched == False | self.model_file.QualityMet == False
1914
+ condition &= (self.model_file.Searched == False) | (
1915
+ self.model_file.QualityMet == False
1577
1916
  )
1578
1917
  elif not self.quality_unmet_search and self.custom_format_unmet_search:
1579
- condition &= (
1580
- self.model_file.Searched
1581
- == False | self.model_file.CustomFormatMet
1582
- == False
1918
+ condition &= (self.model_file.Searched == False) | (
1919
+ self.model_file.CustomFormatMet == False
1583
1920
  )
1584
1921
  elif self.quality_unmet_search and self.custom_format_unmet_search:
1585
1922
  condition &= (
1586
- self.model_file.Searched
1587
- == False | self.model_file.QualityMet
1588
- == False | self.model_file.CustomFormatMet
1589
- == False
1923
+ (self.model_file.Searched == False)
1924
+ | (self.model_file.QualityMet == False)
1925
+ | (self.model_file.CustomFormatMet == False)
1590
1926
  )
1591
1927
  else:
1592
1928
  condition &= self.model_file.MovieFileId == 0
@@ -1601,24 +1937,54 @@ class Arr:
1601
1937
  ):
1602
1938
  entries.append([entry, False, False])
1603
1939
  return entries
1604
-
1605
- def db_get_request_files(self) -> Iterable[tuple[MoviesFilesModel | EpisodeFilesModel, int]]:
1606
- entries = []
1607
- self.logger.trace("Getting request files")
1608
- if self.type == "sonarr":
1609
- condition = self.model_file.IsRequest == True
1610
- condition &= self.model_file.AirDateUtc.is_null(False)
1611
- condition &= self.model_file.EpisodeFileId == 0
1612
- condition &= self.model_file.Searched == False
1613
- condition &= self.model_file.AirDateUtc < (
1614
- datetime.now(timezone.utc) - timedelta(days=1)
1615
- )
1616
- entries = list(
1617
- self.model_file.select()
1618
- .where(condition)
1619
- .order_by(
1620
- self.model_file.SeriesTitle,
1621
- self.model_file.SeasonNumber.desc(),
1940
+ elif self.type == "lidarr":
1941
+ condition = True # Placeholder, will be refined
1942
+ if self.do_upgrade_search:
1943
+ condition &= self.model_file.Upgrade == False
1944
+ else:
1945
+ if self.quality_unmet_search and not self.custom_format_unmet_search:
1946
+ condition &= (self.model_file.Searched == False) | (
1947
+ self.model_file.QualityMet == False
1948
+ )
1949
+ elif not self.quality_unmet_search and self.custom_format_unmet_search:
1950
+ condition &= (self.model_file.Searched == False) | (
1951
+ self.model_file.CustomFormatMet == False
1952
+ )
1953
+ elif self.quality_unmet_search and self.custom_format_unmet_search:
1954
+ condition &= (
1955
+ (self.model_file.Searched == False)
1956
+ | (self.model_file.QualityMet == False)
1957
+ | (self.model_file.CustomFormatMet == False)
1958
+ )
1959
+ else:
1960
+ condition &= self.model_file.AlbumFileId == 0
1961
+ condition &= self.model_file.Searched == False
1962
+ for entry in (
1963
+ self.model_file.select()
1964
+ .where(condition)
1965
+ .order_by(self.model_file.AlbumFileId.asc())
1966
+ .execute()
1967
+ ):
1968
+ entries.append([entry, False, False])
1969
+ return entries
1970
+
1971
+ def db_get_request_files(self) -> Iterable[tuple[MoviesFilesModel | EpisodeFilesModel, int]]:
1972
+ entries = []
1973
+ self.logger.trace("Getting request files")
1974
+ if self.type == "sonarr":
1975
+ condition = self.model_file.IsRequest == True
1976
+ condition &= self.model_file.AirDateUtc.is_null(False)
1977
+ condition &= self.model_file.EpisodeFileId == 0
1978
+ condition &= self.model_file.Searched == False
1979
+ condition &= self.model_file.AirDateUtc < (
1980
+ datetime.now(timezone.utc) - timedelta(days=1)
1981
+ )
1982
+ entries = list(
1983
+ self.model_file.select()
1984
+ .where(condition)
1985
+ .order_by(
1986
+ self.model_file.SeriesTitle,
1987
+ self.model_file.SeasonNumber.desc(),
1622
1988
  self.model_file.AirDateUtc.desc(),
1623
1989
  )
1624
1990
  .execute()
@@ -1781,17 +2147,28 @@ class Arr:
1781
2147
  self.logger.debug("No episode releases found for today")
1782
2148
 
1783
2149
  def db_update(self):
1784
- if not self.search_missing:
1785
- return
1786
- self.db_update_todays_releases()
1787
- if self.db_update_processed and not self.search_by_year:
2150
+ if not (
2151
+ self.search_missing
2152
+ or self.do_upgrade_search
2153
+ or self.quality_unmet_search
2154
+ or self.custom_format_unmet_search
2155
+ ):
1788
2156
  return
1789
- if self.search_by_year:
1790
- self.logger.info("Started updating database for %s", self.search_current_year)
1791
- else:
2157
+ placeholder_summary = "Updating database"
2158
+ placeholder_set = False
2159
+ try:
2160
+ self._webui_db_loaded = False
2161
+ try:
2162
+ self._record_search_activity(placeholder_summary)
2163
+ placeholder_set = True
2164
+ except Exception:
2165
+ pass
2166
+ self.db_update_todays_releases()
2167
+ if self.db_update_processed:
2168
+ return
1792
2169
  self.logger.info("Started updating database")
1793
- if self.type == "sonarr":
1794
- if not self.series_search:
2170
+ if self.type == "sonarr":
2171
+ # Always fetch series list for both episode and series-level tracking
1795
2172
  while True:
1796
2173
  try:
1797
2174
  series = self.client.get_series()
@@ -1803,62 +2180,35 @@ class Arr:
1803
2180
  JSONDecodeError,
1804
2181
  ):
1805
2182
  continue
1806
- if self.search_by_year:
1807
- for s in series:
1808
- if isinstance(s, str):
1809
- continue
1810
- episodes = self.client.get_episode(s["id"], True)
1811
- for e in episodes:
1812
- if isinstance(e, str):
1813
- continue
1814
- if "airDateUtc" in e:
1815
- if datetime.strptime(
1816
- e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ"
1817
- ).replace(tzinfo=timezone.utc) > datetime.now(timezone.utc):
1818
- continue
1819
- if (
1820
- datetime.strptime(e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ")
1821
- .replace(tzinfo=timezone.utc)
1822
- .date()
1823
- < datetime(
1824
- month=1, day=1, year=int(self.search_current_year)
1825
- ).date()
1826
- ):
1827
- continue
1828
- if (
1829
- datetime.strptime(e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ")
1830
- .replace(tzinfo=timezone.utc)
1831
- .date()
1832
- > datetime(
1833
- month=12, day=31, year=int(self.search_current_year)
1834
- ).date()
1835
- ):
1836
- continue
1837
- if not self.search_specials and e["seasonNumber"] == 0:
1838
- continue
1839
- self.db_update_single_series(db_entry=e)
1840
2183
 
1841
- else:
1842
- for s in series:
1843
- if isinstance(s, str):
2184
+ # Process episodes for episode-level tracking (all episodes)
2185
+ for s in series:
2186
+ if isinstance(s, str):
2187
+ continue
2188
+ episodes = self.client.get_episode(s["id"], True)
2189
+ for e in episodes:
2190
+ if isinstance(e, str):
1844
2191
  continue
1845
- episodes = self.client.get_episode(s["id"], True)
1846
- for e in episodes:
1847
- if isinstance(e, str):
2192
+ if "airDateUtc" in e:
2193
+ if datetime.strptime(e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ").replace(
2194
+ tzinfo=timezone.utc
2195
+ ) > datetime.now(timezone.utc):
1848
2196
  continue
1849
- if "airDateUtc" in e:
1850
- if datetime.strptime(
1851
- e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ"
1852
- ).replace(tzinfo=timezone.utc) > datetime.now(timezone.utc):
1853
- continue
1854
- if not self.search_specials and e["seasonNumber"] == 0:
1855
- continue
1856
- self.db_update_single_series(db_entry=e)
2197
+ if not self.search_specials and e["seasonNumber"] == 0:
2198
+ continue
2199
+ self.db_update_single_series(db_entry=e, series=False)
2200
+
2201
+ # Process series for series-level tracking (all series)
2202
+ for s in series:
2203
+ if isinstance(s, str):
2204
+ continue
2205
+ self.db_update_single_series(db_entry=s, series=True)
2206
+
1857
2207
  self.db_update_processed = True
1858
- else:
2208
+ elif self.type == "radarr":
1859
2209
  while True:
1860
2210
  try:
1861
- series = self.client.get_series()
2211
+ movies = self.client.get_movie()
1862
2212
  break
1863
2213
  except (
1864
2214
  requests.exceptions.ChunkedEncodingError,
@@ -1867,49 +2217,70 @@ class Arr:
1867
2217
  JSONDecodeError,
1868
2218
  ):
1869
2219
  continue
1870
- if self.search_by_year:
1871
- for s in series:
1872
- if isinstance(s, str):
1873
- continue
1874
- if s["year"] < self.search_current_year:
1875
- continue
1876
- if s["year"] > self.search_current_year:
1877
- continue
1878
- self.db_update_single_series(db_entry=s, series=True)
1879
- else:
1880
- for s in series:
1881
- if isinstance(s, str):
1882
- continue
1883
- self.db_update_single_series(db_entry=s, series=True)
1884
- self.db_update_processed = True
1885
- elif self.type == "radarr":
1886
- while True:
1887
- try:
1888
- movies = self.client.get_movie()
1889
- break
1890
- except (
1891
- requests.exceptions.ChunkedEncodingError,
1892
- requests.exceptions.ContentDecodingError,
1893
- requests.exceptions.ConnectionError,
1894
- JSONDecodeError,
1895
- ):
1896
- continue
1897
- if self.search_by_year:
2220
+ # Process all movies
1898
2221
  for m in movies:
1899
2222
  if isinstance(m, str):
1900
2223
  continue
1901
- if m["year"] < self.search_current_year:
2224
+ self.db_update_single_series(db_entry=m)
2225
+ self.db_update_processed = True
2226
+ elif self.type == "lidarr":
2227
+ while True:
2228
+ try:
2229
+ artists = self.client.get_artist()
2230
+ break
2231
+ except (
2232
+ requests.exceptions.ChunkedEncodingError,
2233
+ requests.exceptions.ContentDecodingError,
2234
+ requests.exceptions.ConnectionError,
2235
+ JSONDecodeError,
2236
+ ):
1902
2237
  continue
1903
- if m["year"] > self.search_current_year:
2238
+ for artist in artists:
2239
+ if isinstance(artist, str):
1904
2240
  continue
1905
- self.db_update_single_series(db_entry=m)
1906
- else:
1907
- for m in movies:
1908
- if isinstance(m, str):
2241
+ while True:
2242
+ try:
2243
+ # allArtistAlbums=True includes full album data with media/tracks
2244
+ albums = self.client.get_album(
2245
+ artistId=artist["id"], allArtistAlbums=True
2246
+ )
2247
+ break
2248
+ except (
2249
+ requests.exceptions.ChunkedEncodingError,
2250
+ requests.exceptions.ContentDecodingError,
2251
+ requests.exceptions.ConnectionError,
2252
+ JSONDecodeError,
2253
+ ):
2254
+ continue
2255
+ for album in albums:
2256
+ if isinstance(album, str):
2257
+ continue
2258
+ # For Lidarr, we don't have a specific releaseDate field
2259
+ # Check if album has been released
2260
+ if "releaseDate" in album:
2261
+ release_date = datetime.strptime(
2262
+ album["releaseDate"], "%Y-%m-%dT%H:%M:%SZ"
2263
+ )
2264
+ if release_date > datetime.now():
2265
+ continue
2266
+ self.db_update_single_series(db_entry=album)
2267
+ # Process artists for artist-level tracking
2268
+ for artist in artists:
2269
+ if isinstance(artist, str):
1909
2270
  continue
1910
- self.db_update_single_series(db_entry=m)
1911
- self.db_update_processed = True
1912
- self.logger.trace("Finished updating database")
2271
+ self.db_update_single_series(db_entry=artist, artist=True)
2272
+ self.db_update_processed = True
2273
+ self.logger.trace("Finished updating database")
2274
+ finally:
2275
+ if placeholder_set:
2276
+ try:
2277
+ activities = fetch_search_activities()
2278
+ entry = activities.get(str(self.category))
2279
+ if entry and entry.get("summary") == placeholder_summary:
2280
+ clear_search_activity(str(self.category))
2281
+ except Exception:
2282
+ pass
2283
+ self._webui_db_loaded = True
1913
2284
 
1914
2285
  def minimum_availability_check(self, db_entry: JsonObject) -> bool:
1915
2286
  inCinemas = (
@@ -2144,9 +2515,18 @@ class Arr:
2144
2515
  return False
2145
2516
 
2146
2517
  def db_update_single_series(
2147
- self, db_entry: JsonObject = None, request: bool = False, series: bool = False
2518
+ self,
2519
+ db_entry: JsonObject = None,
2520
+ request: bool = False,
2521
+ series: bool = False,
2522
+ artist: bool = False,
2148
2523
  ):
2149
- if not self.search_missing:
2524
+ if not (
2525
+ self.search_missing
2526
+ or self.do_upgrade_search
2527
+ or self.quality_unmet_search
2528
+ or self.custom_format_unmet_search
2529
+ ):
2150
2530
  return
2151
2531
  try:
2152
2532
  searched = False
@@ -2167,38 +2547,60 @@ class Arr:
2167
2547
  JSONDecodeError,
2168
2548
  ):
2169
2549
  continue
2170
- if episode["monitored"] or self.search_unmonitored:
2550
+ if episode.get("monitored", True) or self.search_unmonitored:
2171
2551
  while True:
2172
2552
  try:
2173
- if episodeData:
2174
- if not episodeData.MinCustomFormatScore:
2175
- minCustomFormat = self.client.get_quality_profile(
2176
- episode["series"]["qualityProfileId"]
2177
- )["minFormatScore"]
2178
- else:
2179
- minCustomFormat = episodeData.MinCustomFormatScore
2180
- if episode["hasFile"]:
2181
- if (
2182
- episode["episodeFile"]["id"]
2183
- != episodeData.EpisodeFileId
2184
- ):
2185
- customFormat = self.client.get_episode_file(
2186
- episode["episodeFile"]["id"]
2187
- )["customFormatScore"]
2188
- else:
2189
- customFormat = 0
2553
+ series_info = episode.get("series") or {}
2554
+ if isinstance(series_info, dict):
2555
+ quality_profile_id = series_info.get("qualityProfileId")
2556
+ else:
2557
+ quality_profile_id = getattr(
2558
+ series_info, "qualityProfileId", None
2559
+ )
2560
+ if not quality_profile_id:
2561
+ quality_profile_id = db_entry.get("qualityProfileId")
2562
+ minCustomFormat = (
2563
+ getattr(episodeData, "MinCustomFormatScore", 0)
2564
+ if episodeData
2565
+ else 0
2566
+ )
2567
+ if not minCustomFormat:
2568
+ if quality_profile_id:
2569
+ profile = (
2570
+ self.client.get_quality_profile(quality_profile_id)
2571
+ or {}
2572
+ )
2573
+ minCustomFormat = profile.get("minFormatScore") or 0
2190
2574
  else:
2191
- customFormat = 0
2575
+ self.logger.warning(
2576
+ "Episode %s missing qualityProfileId; defaulting custom format threshold to 0",
2577
+ episode.get("id"),
2578
+ )
2579
+ minCustomFormat = 0
2580
+ episode_file = episode.get("episodeFile") or {}
2581
+ if isinstance(episode_file, dict):
2582
+ episode_file_id = episode_file.get("id")
2192
2583
  else:
2193
- minCustomFormat = self.client.get_quality_profile(
2194
- episode["series"]["qualityProfileId"]
2195
- )["minFormatScore"]
2196
- if episode["hasFile"]:
2197
- customFormat = self.client.get_episode_file(
2198
- episode["episodeFile"]["id"]
2199
- )["customFormatScore"]
2584
+ episode_file_id = getattr(episode_file, "id", None)
2585
+ has_file = bool(episode.get("hasFile"))
2586
+ episode_data_file_id = (
2587
+ getattr(episodeData, "EpisodeFileId", None)
2588
+ if episodeData
2589
+ else None
2590
+ )
2591
+ if has_file and episode_file_id:
2592
+ if (
2593
+ episode_data_file_id
2594
+ and episode_file_id == episode_data_file_id
2595
+ ):
2596
+ customFormat = getattr(episodeData, "CustomFormatScore", 0)
2200
2597
  else:
2201
- customFormat = 0
2598
+ file_info = (
2599
+ self.client.get_episode_file(episode_file_id) or {}
2600
+ )
2601
+ customFormat = file_info.get("customFormatScore") or 0
2602
+ else:
2603
+ customFormat = 0
2202
2604
  break
2203
2605
  except (
2204
2606
  requests.exceptions.ChunkedEncodingError,
@@ -2207,9 +2609,6 @@ class Arr:
2207
2609
  JSONDecodeError,
2208
2610
  ):
2209
2611
  continue
2210
- except KeyError:
2211
- self.logger.warning("Key Error [%s]", db_entry["id"])
2212
- continue
2213
2612
 
2214
2613
  QualityUnmet = (
2215
2614
  episode["episodeFile"]["qualityCutoffNotMet"]
@@ -2229,72 +2628,60 @@ class Arr:
2229
2628
  ).execute()
2230
2629
 
2231
2630
  if self.use_temp_for_missing:
2232
- try:
2233
- self.logger.trace(
2234
- "Temp quality profile [%s][%s]",
2235
- searched,
2236
- db_entry["qualityProfileId"],
2237
- )
2238
- if (
2239
- searched
2240
- and db_entry["qualityProfileId"]
2241
- in self.temp_quality_profile_ids.values()
2242
- and not self.keep_temp_profile
2243
- ):
2244
- data: JsonObject = {
2245
- "qualityProfileId": list(
2246
- self.temp_quality_profile_ids.keys()
2247
- )[
2248
- list(self.temp_quality_profile_ids.values()).index(
2249
- db_entry["qualityProfileId"]
2250
- )
2251
- ]
2252
- }
2253
- self.logger.debug(
2254
- "Upgrading quality profile for %s to %s",
2255
- db_entry["title"],
2256
- list(self.temp_quality_profile_ids.keys())[
2257
- list(self.temp_quality_profile_ids.values()).index(
2258
- db_entry["qualityProfileId"]
2259
- )
2260
- ],
2261
- )
2262
- elif (
2263
- not searched
2264
- and db_entry["qualityProfileId"]
2265
- in self.temp_quality_profile_ids.keys()
2266
- ):
2267
- data: JsonObject = {
2268
- "qualityProfileId": self.temp_quality_profile_ids[
2269
- db_entry["qualityProfileId"]
2270
- ]
2271
- }
2272
- self.logger.debug(
2273
- "Downgrading quality profile for %s to %s",
2274
- db_entry["title"],
2275
- self.temp_quality_profile_ids[
2631
+ data = None
2632
+ quality_profile_id = db_entry.get("qualityProfileId")
2633
+ self.logger.trace(
2634
+ "Temp quality profile [%s][%s]",
2635
+ searched,
2636
+ quality_profile_id,
2637
+ )
2638
+ if (
2639
+ searched
2640
+ and quality_profile_id in self.temp_quality_profile_ids.values()
2641
+ and not self.keep_temp_profile
2642
+ ):
2643
+ data: JsonObject = {
2644
+ "qualityProfileId": list(self.temp_quality_profile_ids.keys())[
2645
+ list(self.temp_quality_profile_ids.values()).index(
2646
+ quality_profile_id
2647
+ )
2648
+ ]
2649
+ }
2650
+ self.logger.debug(
2651
+ "Upgrading quality profile for %s to %s",
2652
+ db_entry["title"],
2653
+ list(self.temp_quality_profile_ids.keys())[
2654
+ list(self.temp_quality_profile_ids.values()).index(
2276
2655
  db_entry["qualityProfileId"]
2277
- ],
2278
- )
2279
- except KeyError:
2280
- self.logger.warning(
2281
- "Check quality profile settings for %s", db_entry["title"]
2656
+ )
2657
+ ],
2282
2658
  )
2283
- try:
2284
- if data:
2285
- while True:
2286
- try:
2287
- self.client.upd_episode(episode["id"], data)
2288
- break
2289
- except (
2290
- requests.exceptions.ChunkedEncodingError,
2291
- requests.exceptions.ContentDecodingError,
2292
- requests.exceptions.ConnectionError,
2293
- JSONDecodeError,
2294
- ):
2295
- continue
2296
- except UnboundLocalError:
2297
- pass
2659
+ elif (
2660
+ not searched
2661
+ and quality_profile_id in self.temp_quality_profile_ids.keys()
2662
+ ):
2663
+ data: JsonObject = {
2664
+ "qualityProfileId": self.temp_quality_profile_ids[
2665
+ quality_profile_id
2666
+ ]
2667
+ }
2668
+ self.logger.debug(
2669
+ "Downgrading quality profile for %s to %s",
2670
+ db_entry["title"],
2671
+ self.temp_quality_profile_ids[quality_profile_id],
2672
+ )
2673
+ if data:
2674
+ while True:
2675
+ try:
2676
+ self.client.upd_episode(episode["id"], data)
2677
+ break
2678
+ except (
2679
+ requests.exceptions.ChunkedEncodingError,
2680
+ requests.exceptions.ContentDecodingError,
2681
+ requests.exceptions.ConnectionError,
2682
+ JSONDecodeError,
2683
+ ):
2684
+ continue
2298
2685
 
2299
2686
  EntryId = episode["id"]
2300
2687
  SeriesTitle = episode.get("series", {}).get("title")
@@ -2314,11 +2701,12 @@ class Arr:
2314
2701
  else None
2315
2702
  )
2316
2703
  AirDateUtc = episode["airDateUtc"]
2317
- Monitored = episode["monitored"]
2704
+ Monitored = episode.get("monitored", True)
2318
2705
  QualityMet = not QualityUnmet if db_entry["hasFile"] else False
2319
- customFormatMet = customFormat > minCustomFormat
2706
+ customFormatMet = customFormat >= minCustomFormat
2320
2707
 
2321
2708
  if not episode["hasFile"]:
2709
+ # Episode is missing a file - always mark as Missing
2322
2710
  reason = "Missing"
2323
2711
  elif self.quality_unmet_search and QualityUnmet:
2324
2712
  reason = "Quality"
@@ -2326,8 +2714,11 @@ class Arr:
2326
2714
  reason = "CustomFormat"
2327
2715
  elif self.do_upgrade_search:
2328
2716
  reason = "Upgrade"
2717
+ elif searched:
2718
+ # Episode has file and search is complete
2719
+ reason = "Not being searched"
2329
2720
  else:
2330
- reason = None
2721
+ reason = "Not being searched"
2331
2722
 
2332
2723
  to_update = {
2333
2724
  self.model_file.Monitored: Monitored,
@@ -2393,17 +2784,39 @@ class Arr:
2393
2784
  else:
2394
2785
  self.series_file_model: SeriesFilesModel
2395
2786
  EntryId = db_entry["id"]
2396
- seriesData = self.model_file.get_or_none(self.model_file.EntryId == EntryId)
2787
+ seriesData = self.series_file_model.get_or_none(
2788
+ self.series_file_model.EntryId == EntryId
2789
+ )
2397
2790
  if db_entry["monitored"] or self.search_unmonitored:
2398
2791
  while True:
2399
2792
  try:
2400
- seriesMetadata = self.client.get_series(id_=EntryId)
2793
+ seriesMetadata = self.client.get_series(id_=EntryId) or {}
2794
+ quality_profile_id = None
2795
+ if isinstance(seriesMetadata, dict):
2796
+ quality_profile_id = seriesMetadata.get("qualityProfileId")
2797
+ else:
2798
+ quality_profile_id = getattr(
2799
+ seriesMetadata, "qualityProfileId", None
2800
+ )
2401
2801
  if not seriesData:
2402
- minCustomFormat = self.client.get_quality_profile(
2403
- seriesMetadata["qualityProfileId"]
2404
- )["minFormatScore"]
2802
+ if quality_profile_id:
2803
+ profile = (
2804
+ self.client.get_quality_profile(quality_profile_id)
2805
+ or {}
2806
+ )
2807
+ minCustomFormat = profile.get("minFormatScore") or 0
2808
+ else:
2809
+ self.logger.warning(
2810
+ "Series %s (%s) missing qualityProfileId; "
2811
+ "defaulting custom format score to 0",
2812
+ db_entry.get("title"),
2813
+ EntryId,
2814
+ )
2815
+ minCustomFormat = 0
2405
2816
  else:
2406
- minCustomFormat = seriesMetadata.MinCustomFormatScore
2817
+ minCustomFormat = getattr(
2818
+ seriesData, "MinCustomFormatScore", 0
2819
+ )
2407
2820
  break
2408
2821
  except (
2409
2822
  requests.exceptions.ChunkedEncodingError,
@@ -2412,11 +2825,6 @@ class Arr:
2412
2825
  JSONDecodeError,
2413
2826
  ):
2414
2827
  continue
2415
- except KeyError:
2416
- self.logger.warning(
2417
- "Key Error [%s][%s]", db_entry["id"], seriesMetadata
2418
- )
2419
- continue
2420
2828
  episodeCount = 0
2421
2829
  episodeFileCount = 0
2422
2830
  totalEpisodeCount = 0
@@ -2450,9 +2858,10 @@ class Arr:
2450
2858
  searched = (episodeCount + monitoredEpisodeCount) == episodeFileCount
2451
2859
  if self.use_temp_for_missing:
2452
2860
  try:
2861
+ quality_profile_id = db_entry.get("qualityProfileId")
2453
2862
  if (
2454
2863
  searched
2455
- and db_entry["qualityProfileId"]
2864
+ and quality_profile_id
2456
2865
  in self.temp_quality_profile_ids.values()
2457
2866
  and not self.keep_temp_profile
2458
2867
  ):
@@ -2460,7 +2869,7 @@ class Arr:
2460
2869
  self.temp_quality_profile_ids.keys()
2461
2870
  )[
2462
2871
  list(self.temp_quality_profile_ids.values()).index(
2463
- db_entry["qualityProfileId"]
2872
+ quality_profile_id
2464
2873
  )
2465
2874
  ]
2466
2875
  self.logger.debug(
@@ -2470,11 +2879,10 @@ class Arr:
2470
2879
  )
2471
2880
  elif (
2472
2881
  not searched
2473
- and db_entry["qualityProfileId"]
2474
- in self.temp_quality_profile_ids.keys()
2882
+ and quality_profile_id in self.temp_quality_profile_ids.keys()
2475
2883
  ):
2476
2884
  db_entry["qualityProfileId"] = self.temp_quality_profile_ids[
2477
- db_entry["qualityProfileId"]
2885
+ quality_profile_id
2478
2886
  ]
2479
2887
  self.logger.debug(
2480
2888
  "Updating quality profile for %s to %s",
@@ -2528,6 +2936,9 @@ class Arr:
2528
2936
  conflict_target=[self.series_file_model.EntryId], update=to_update
2529
2937
  )
2530
2938
  db_commands.execute()
2939
+
2940
+ # Note: Episodes are now handled separately in db_update()
2941
+ # No need to recursively process episodes here to avoid duplication
2531
2942
  else:
2532
2943
  db_commands = self.series_file_model.delete().where(
2533
2944
  self.series_file_model.EntryId == EntryId
@@ -2556,7 +2967,7 @@ class Arr:
2556
2967
  db_entry["movieFile"]["id"]
2557
2968
  )["customFormatScore"]
2558
2969
  else:
2559
- customFormat = 0
2970
+ customFormat = movieData.CustomFormatScore
2560
2971
  else:
2561
2972
  customFormat = 0
2562
2973
  else:
@@ -2575,14 +2986,11 @@ class Arr:
2575
2986
  requests.exceptions.ContentDecodingError,
2576
2987
  requests.exceptions.ConnectionError,
2577
2988
  JSONDecodeError,
2578
- KeyError,
2579
2989
  ):
2580
2990
  continue
2581
- # except KeyError:
2582
- # self.logger.warning("Key Error [%s]", db_entry["id"])
2583
2991
  QualityUnmet = (
2584
- db_entry["episodeFile"]["qualityCutoffNotMet"]
2585
- if "episodeFile" in db_entry
2992
+ db_entry["movieFile"]["qualityCutoffNotMet"]
2993
+ if "movieFile" in db_entry
2586
2994
  else False
2587
2995
  )
2588
2996
  if (
@@ -2598,41 +3006,35 @@ class Arr:
2598
3006
  ).execute()
2599
3007
 
2600
3008
  if self.use_temp_for_missing:
2601
- try:
2602
- if (
2603
- searched
2604
- and db_entry["qualityProfileId"]
2605
- in self.temp_quality_profile_ids.values()
2606
- and not self.keep_temp_profile
2607
- ):
2608
- db_entry["qualityProfileId"] = list(
2609
- self.temp_quality_profile_ids.keys()
2610
- )[
2611
- list(self.temp_quality_profile_ids.values()).index(
2612
- db_entry["qualityProfileId"]
2613
- )
2614
- ]
2615
- self.logger.debug(
2616
- "Updating quality profile for %s to %s",
2617
- db_entry["title"],
2618
- db_entry["qualityProfileId"],
2619
- )
2620
- elif (
2621
- not searched
2622
- and db_entry["qualityProfileId"]
2623
- in self.temp_quality_profile_ids.keys()
2624
- ):
2625
- db_entry["qualityProfileId"] = self.temp_quality_profile_ids[
2626
- db_entry["qualityProfileId"]
2627
- ]
2628
- self.logger.debug(
2629
- "Updating quality profile for %s to %s",
2630
- db_entry["title"],
2631
- self.temp_quality_profile_ids[db_entry["qualityProfileId"]],
3009
+ quality_profile_id = db_entry.get("qualityProfileId")
3010
+ if (
3011
+ searched
3012
+ and quality_profile_id in self.temp_quality_profile_ids.values()
3013
+ and not self.keep_temp_profile
3014
+ ):
3015
+ db_entry["qualityProfileId"] = list(
3016
+ self.temp_quality_profile_ids.keys()
3017
+ )[
3018
+ list(self.temp_quality_profile_ids.values()).index(
3019
+ quality_profile_id
2632
3020
  )
2633
- except KeyError:
2634
- self.logger.warning(
2635
- "Check quality profile settings for %s", db_entry["title"]
3021
+ ]
3022
+ self.logger.debug(
3023
+ "Updating quality profile for %s to %s",
3024
+ db_entry["title"],
3025
+ db_entry["qualityProfileId"],
3026
+ )
3027
+ elif (
3028
+ not searched
3029
+ and quality_profile_id in self.temp_quality_profile_ids.keys()
3030
+ ):
3031
+ db_entry["qualityProfileId"] = self.temp_quality_profile_ids[
3032
+ quality_profile_id
3033
+ ]
3034
+ self.logger.debug(
3035
+ "Updating quality profile for %s to %s",
3036
+ db_entry["title"],
3037
+ db_entry["qualityProfileId"],
2636
3038
  )
2637
3039
  while True:
2638
3040
  try:
@@ -2653,9 +3055,10 @@ class Arr:
2653
3055
  entryId = db_entry["id"]
2654
3056
  movieFileId = db_entry["movieFileId"]
2655
3057
  qualityMet = not QualityUnmet if db_entry["hasFile"] else False
2656
- customFormatMet = customFormat > minCustomFormat
3058
+ customFormatMet = customFormat >= minCustomFormat
2657
3059
 
2658
3060
  if not db_entry["hasFile"]:
3061
+ # Movie is missing a file - always mark as Missing
2659
3062
  reason = "Missing"
2660
3063
  elif self.quality_unmet_search and QualityUnmet:
2661
3064
  reason = "Quality"
@@ -2663,8 +3066,11 @@ class Arr:
2663
3066
  reason = "CustomFormat"
2664
3067
  elif self.do_upgrade_search:
2665
3068
  reason = "Upgrade"
3069
+ elif searched:
3070
+ # Movie has file and search is complete
3071
+ reason = "Not being searched"
2666
3072
  else:
2667
- reason = None
3073
+ reason = "Not being searched"
2668
3074
 
2669
3075
  to_update = {
2670
3076
  self.model_file.MovieFileId: movieFileId,
@@ -2712,10 +3118,472 @@ class Arr:
2712
3118
  self.model_file.EntryId == db_entry["id"]
2713
3119
  )
2714
3120
  db_commands.execute()
2715
-
2716
- except requests.exceptions.ConnectionError as e:
2717
- self.logger.debug(
2718
- "Max retries exceeded for %s [%s][%s]",
3121
+ elif self.type == "lidarr":
3122
+ if not artist:
3123
+ # Album handling
3124
+ self.model_file: AlbumFilesModel
3125
+ searched = False
3126
+ albumData = self.model_file.get_or_none(
3127
+ self.model_file.EntryId == db_entry["id"]
3128
+ )
3129
+ if db_entry["monitored"] or self.search_unmonitored:
3130
+ while True:
3131
+ try:
3132
+ if albumData:
3133
+ if not albumData.MinCustomFormatScore:
3134
+ try:
3135
+ profile_id = db_entry["profileId"]
3136
+ # Check if this profile ID is known to be invalid
3137
+ if profile_id in self._invalid_quality_profiles:
3138
+ minCustomFormat = 0
3139
+ # Check cache first
3140
+ elif profile_id in self._quality_profile_cache:
3141
+ minCustomFormat = self._quality_profile_cache[
3142
+ profile_id
3143
+ ].get("minFormatScore", 0)
3144
+ else:
3145
+ # Fetch from API and cache
3146
+ try:
3147
+ profile = self.client.get_quality_profile(
3148
+ profile_id
3149
+ )
3150
+ self._quality_profile_cache[profile_id] = (
3151
+ profile
3152
+ )
3153
+ minCustomFormat = profile.get(
3154
+ "minFormatScore", 0
3155
+ )
3156
+ except PyarrResourceNotFound:
3157
+ # Mark as invalid to avoid repeated warnings
3158
+ self._invalid_quality_profiles.add(profile_id)
3159
+ self.logger.warning(
3160
+ "Quality profile %s not found for album %s, defaulting to 0",
3161
+ db_entry.get("profileId"),
3162
+ db_entry.get("title", "Unknown"),
3163
+ )
3164
+ minCustomFormat = 0
3165
+ except Exception:
3166
+ minCustomFormat = 0
3167
+ else:
3168
+ minCustomFormat = albumData.MinCustomFormatScore
3169
+ if (
3170
+ db_entry.get("statistics", {}).get("percentOfTracks", 0)
3171
+ == 100
3172
+ ):
3173
+ # Album has files
3174
+ albumFileId = db_entry.get("statistics", {}).get(
3175
+ "sizeOnDisk", 0
3176
+ )
3177
+ if albumFileId != albumData.AlbumFileId:
3178
+ # Get custom format score from album files
3179
+ customFormat = (
3180
+ 0 # Lidarr may not have customFormatScore
3181
+ )
3182
+ else:
3183
+ customFormat = albumData.CustomFormatScore
3184
+ else:
3185
+ customFormat = 0
3186
+ else:
3187
+ try:
3188
+ profile_id = db_entry["profileId"]
3189
+ # Check if this profile ID is known to be invalid
3190
+ if profile_id in self._invalid_quality_profiles:
3191
+ minCustomFormat = 0
3192
+ # Check cache first
3193
+ elif profile_id in self._quality_profile_cache:
3194
+ minCustomFormat = self._quality_profile_cache[
3195
+ profile_id
3196
+ ].get("minFormatScore", 0)
3197
+ else:
3198
+ # Fetch from API and cache
3199
+ try:
3200
+ profile = self.client.get_quality_profile(
3201
+ profile_id
3202
+ )
3203
+ self._quality_profile_cache[profile_id] = profile
3204
+ minCustomFormat = profile.get("minFormatScore", 0)
3205
+ except PyarrResourceNotFound:
3206
+ # Mark as invalid to avoid repeated warnings
3207
+ self._invalid_quality_profiles.add(profile_id)
3208
+ self.logger.warning(
3209
+ "Quality profile %s not found for album %s, defaulting to 0",
3210
+ db_entry.get("profileId"),
3211
+ db_entry.get("title", "Unknown"),
3212
+ )
3213
+ minCustomFormat = 0
3214
+ except Exception:
3215
+ minCustomFormat = 0
3216
+ if (
3217
+ db_entry.get("statistics", {}).get("percentOfTracks", 0)
3218
+ == 100
3219
+ ):
3220
+ customFormat = 0 # Lidarr may not have customFormatScore
3221
+ else:
3222
+ customFormat = 0
3223
+ break
3224
+ except (
3225
+ requests.exceptions.ChunkedEncodingError,
3226
+ requests.exceptions.ContentDecodingError,
3227
+ requests.exceptions.ConnectionError,
3228
+ JSONDecodeError,
3229
+ ):
3230
+ continue
3231
+
3232
+ # Determine if album has all tracks
3233
+ hasAllTracks = (
3234
+ db_entry.get("statistics", {}).get("percentOfTracks", 0) == 100
3235
+ )
3236
+
3237
+ # Check if quality cutoff is met for Lidarr
3238
+ # Unlike Sonarr/Radarr which have a qualityCutoffNotMet boolean field,
3239
+ # Lidarr requires us to check the track file quality against the profile cutoff
3240
+ QualityUnmet = False
3241
+ if hasAllTracks:
3242
+ try:
3243
+ # Get the artist's quality profile to find the cutoff
3244
+ artist_id = db_entry.get("artistId")
3245
+ artist_data = self.client.get_artist(artist_id)
3246
+ profile_id = artist_data.get("qualityProfileId")
3247
+
3248
+ if profile_id:
3249
+ # Get or use cached profile
3250
+ if profile_id in self._quality_profile_cache:
3251
+ profile = self._quality_profile_cache[profile_id]
3252
+ else:
3253
+ profile = self.client.get_quality_profile(profile_id)
3254
+ self._quality_profile_cache[profile_id] = profile
3255
+
3256
+ cutoff_quality_id = profile.get("cutoff")
3257
+ upgrade_allowed = profile.get("upgradeAllowed", False)
3258
+
3259
+ if cutoff_quality_id and upgrade_allowed:
3260
+ # Get track files for this album to check their quality
3261
+ album_id = db_entry.get("id")
3262
+ track_files = self.client.get_track_file(
3263
+ albumId=[album_id]
3264
+ )
3265
+
3266
+ if track_files:
3267
+ # Check if any track file's quality is below the cutoff
3268
+ for track_file in track_files:
3269
+ file_quality = track_file.get("quality", {}).get(
3270
+ "quality", {}
3271
+ )
3272
+ file_quality_id = file_quality.get("id", 0)
3273
+
3274
+ if file_quality_id < cutoff_quality_id:
3275
+ QualityUnmet = True
3276
+ self.logger.trace(
3277
+ "Album '%s' has quality below cutoff: %s (ID: %d) < cutoff (ID: %d)",
3278
+ db_entry.get("title", "Unknown"),
3279
+ file_quality.get("name", "Unknown"),
3280
+ file_quality_id,
3281
+ cutoff_quality_id,
3282
+ )
3283
+ break
3284
+ except Exception as e:
3285
+ self.logger.trace(
3286
+ "Could not determine quality cutoff status for album '%s': %s",
3287
+ db_entry.get("title", "Unknown"),
3288
+ str(e),
3289
+ )
3290
+ # Default to False if we can't determine
3291
+ QualityUnmet = False
3292
+
3293
+ if (
3294
+ hasAllTracks
3295
+ and not (self.quality_unmet_search and QualityUnmet)
3296
+ and not (
3297
+ self.custom_format_unmet_search and customFormat < minCustomFormat
3298
+ )
3299
+ ):
3300
+ searched = True
3301
+ self.model_queue.update(Completed=True).where(
3302
+ self.model_queue.EntryId == db_entry["id"]
3303
+ ).execute()
3304
+
3305
+ if self.use_temp_for_missing:
3306
+ quality_profile_id = db_entry.get("qualityProfileId")
3307
+ if (
3308
+ searched
3309
+ and quality_profile_id in self.temp_quality_profile_ids.values()
3310
+ and not self.keep_temp_profile
3311
+ ):
3312
+ db_entry["qualityProfileId"] = list(
3313
+ self.temp_quality_profile_ids.keys()
3314
+ )[
3315
+ list(self.temp_quality_profile_ids.values()).index(
3316
+ quality_profile_id
3317
+ )
3318
+ ]
3319
+ self.logger.debug(
3320
+ "Updating quality profile for %s to %s",
3321
+ db_entry["title"],
3322
+ db_entry["qualityProfileId"],
3323
+ )
3324
+ elif (
3325
+ not searched
3326
+ and quality_profile_id in self.temp_quality_profile_ids.keys()
3327
+ ):
3328
+ db_entry["qualityProfileId"] = self.temp_quality_profile_ids[
3329
+ quality_profile_id
3330
+ ]
3331
+ self.logger.debug(
3332
+ "Updating quality profile for %s to %s",
3333
+ db_entry["title"],
3334
+ db_entry["qualityProfileId"],
3335
+ )
3336
+ while True:
3337
+ try:
3338
+ self.client.upd_album(db_entry)
3339
+ break
3340
+ except (
3341
+ requests.exceptions.ChunkedEncodingError,
3342
+ requests.exceptions.ContentDecodingError,
3343
+ requests.exceptions.ConnectionError,
3344
+ JSONDecodeError,
3345
+ ):
3346
+ continue
3347
+
3348
+ title = db_entry.get("title", "Unknown Album")
3349
+ monitored = db_entry.get("monitored", False)
3350
+ # Handle artist field which can be an object or might not exist
3351
+ artist_obj = db_entry.get("artist", {})
3352
+ if isinstance(artist_obj, dict):
3353
+ # Try multiple possible field names for artist name
3354
+ artistName = (
3355
+ artist_obj.get("artistName")
3356
+ or artist_obj.get("name")
3357
+ or artist_obj.get("title")
3358
+ or "Unknown Artist"
3359
+ )
3360
+ else:
3361
+ artistName = "Unknown Artist"
3362
+ artistId = db_entry.get("artistId", 0)
3363
+ foreignAlbumId = db_entry.get("foreignAlbumId", "")
3364
+ releaseDate = db_entry.get("releaseDate")
3365
+ entryId = db_entry.get("id", 0)
3366
+ albumFileId = 1 if hasAllTracks else 0 # Use 1/0 to indicate presence
3367
+ qualityMet = not QualityUnmet if hasAllTracks else False
3368
+ customFormatMet = customFormat >= minCustomFormat
3369
+
3370
+ if not hasAllTracks:
3371
+ # Album is missing tracks - always mark as Missing
3372
+ reason = "Missing"
3373
+ elif self.quality_unmet_search and QualityUnmet:
3374
+ reason = "Quality"
3375
+ elif self.custom_format_unmet_search and not customFormatMet:
3376
+ reason = "CustomFormat"
3377
+ elif self.do_upgrade_search:
3378
+ reason = "Upgrade"
3379
+ elif searched:
3380
+ # Album is complete and not being searched
3381
+ reason = "Not being searched"
3382
+ else:
3383
+ reason = "Not being searched"
3384
+
3385
+ to_update = {
3386
+ self.model_file.AlbumFileId: albumFileId,
3387
+ self.model_file.Monitored: monitored,
3388
+ self.model_file.QualityMet: qualityMet,
3389
+ self.model_file.Searched: searched,
3390
+ self.model_file.Upgrade: False,
3391
+ self.model_file.MinCustomFormatScore: minCustomFormat,
3392
+ self.model_file.CustomFormatScore: customFormat,
3393
+ self.model_file.CustomFormatMet: customFormatMet,
3394
+ self.model_file.Reason: reason,
3395
+ self.model_file.ArtistTitle: artistName,
3396
+ self.model_file.ArtistId: artistId,
3397
+ self.model_file.ForeignAlbumId: foreignAlbumId,
3398
+ self.model_file.ReleaseDate: releaseDate,
3399
+ }
3400
+
3401
+ if request:
3402
+ to_update[self.model_file.IsRequest] = request
3403
+
3404
+ self.logger.debug(
3405
+ "Updating database entry | %s - %s [Searched:%s][Upgrade:%s][QualityMet:%s][CustomFormatMet:%s]",
3406
+ artistName.ljust(30, "."),
3407
+ title.ljust(30, "."),
3408
+ str(searched).ljust(5),
3409
+ str(False).ljust(5),
3410
+ str(qualityMet).ljust(5),
3411
+ str(customFormatMet).ljust(5),
3412
+ )
3413
+
3414
+ db_commands = self.model_file.insert(
3415
+ Title=title,
3416
+ Monitored=monitored,
3417
+ ArtistTitle=artistName,
3418
+ ArtistId=artistId,
3419
+ ForeignAlbumId=foreignAlbumId,
3420
+ ReleaseDate=releaseDate,
3421
+ EntryId=entryId,
3422
+ Searched=searched,
3423
+ AlbumFileId=albumFileId,
3424
+ IsRequest=request,
3425
+ QualityMet=qualityMet,
3426
+ Upgrade=False,
3427
+ MinCustomFormatScore=minCustomFormat,
3428
+ CustomFormatScore=customFormat,
3429
+ CustomFormatMet=customFormatMet,
3430
+ Reason=reason,
3431
+ ).on_conflict(conflict_target=[self.model_file.EntryId], update=to_update)
3432
+ db_commands.execute()
3433
+
3434
+ # Store tracks for this album (Lidarr only)
3435
+ if self.track_file_model:
3436
+ try:
3437
+ # Fetch tracks for this album via the track API
3438
+ # Tracks are NOT in the media field, they're a separate endpoint
3439
+ tracks = self.client.get_tracks(albumId=entryId)
3440
+ self.logger.debug(
3441
+ f"Fetched {len(tracks) if isinstance(tracks, list) else 0} tracks for album {entryId}"
3442
+ )
3443
+
3444
+ if tracks and isinstance(tracks, list):
3445
+ # First, delete existing tracks for this album
3446
+ self.track_file_model.delete().where(
3447
+ self.track_file_model.AlbumId == entryId
3448
+ ).execute()
3449
+
3450
+ # Insert new tracks
3451
+ track_insert_count = 0
3452
+ for track in tracks:
3453
+ # Get monitored status from track or default to album's monitored status
3454
+ track_monitored = track.get(
3455
+ "monitored", db_entry.get("monitored", False)
3456
+ )
3457
+
3458
+ self.track_file_model.insert(
3459
+ EntryId=track.get("id"),
3460
+ AlbumId=entryId,
3461
+ TrackNumber=track.get("trackNumber", ""),
3462
+ Title=track.get("title", ""),
3463
+ Duration=track.get("duration", 0),
3464
+ HasFile=track.get("hasFile", False),
3465
+ TrackFileId=track.get("trackFileId", 0),
3466
+ Monitored=track_monitored,
3467
+ ).execute()
3468
+ track_insert_count += 1
3469
+
3470
+ if track_insert_count > 0:
3471
+ self.logger.info(
3472
+ f"Stored {track_insert_count} tracks for album {entryId} ({title})"
3473
+ )
3474
+ else:
3475
+ self.logger.debug(
3476
+ f"No tracks found for album {entryId} ({title})"
3477
+ )
3478
+ except Exception as e:
3479
+ self.logger.warning(
3480
+ f"Could not fetch tracks for album {entryId} ({title}): {e}"
3481
+ )
3482
+ else:
3483
+ db_commands = self.model_file.delete().where(
3484
+ self.model_file.EntryId == db_entry["id"]
3485
+ )
3486
+ db_commands.execute()
3487
+ # Also delete tracks for this album (Lidarr only)
3488
+ if self.track_file_model:
3489
+ self.track_file_model.delete().where(
3490
+ self.track_file_model.AlbumId == db_entry["id"]
3491
+ ).execute()
3492
+ else:
3493
+ # Artist handling
3494
+ self.artists_file_model: ArtistFilesModel
3495
+ EntryId = db_entry["id"]
3496
+ artistData = self.artists_file_model.get_or_none(
3497
+ self.artists_file_model.EntryId == EntryId
3498
+ )
3499
+ if db_entry["monitored"] or self.search_unmonitored:
3500
+ while True:
3501
+ try:
3502
+ artistMetadata = self.client.get_artist(id_=EntryId) or {}
3503
+ quality_profile_id = None
3504
+ if isinstance(artistMetadata, dict):
3505
+ quality_profile_id = artistMetadata.get("qualityProfileId")
3506
+ else:
3507
+ quality_profile_id = getattr(
3508
+ artistMetadata, "qualityProfileId", None
3509
+ )
3510
+ if not artistData:
3511
+ if quality_profile_id:
3512
+ profile = (
3513
+ self.client.get_quality_profile(quality_profile_id)
3514
+ or {}
3515
+ )
3516
+ minCustomFormat = profile.get("minFormatScore") or 0
3517
+ else:
3518
+ self.logger.warning(
3519
+ "Artist %s (%s) missing qualityProfileId; "
3520
+ "defaulting custom format score to 0",
3521
+ db_entry.get("artistName"),
3522
+ EntryId,
3523
+ )
3524
+ minCustomFormat = 0
3525
+ else:
3526
+ minCustomFormat = getattr(
3527
+ artistData, "MinCustomFormatScore", 0
3528
+ )
3529
+ break
3530
+ except (
3531
+ requests.exceptions.ChunkedEncodingError,
3532
+ requests.exceptions.ContentDecodingError,
3533
+ requests.exceptions.ConnectionError,
3534
+ JSONDecodeError,
3535
+ ):
3536
+ continue
3537
+ # Calculate if artist is fully searched based on album statistics
3538
+ statistics = artistMetadata.get("statistics", {})
3539
+ albumCount = statistics.get("albumCount", 0)
3540
+ statistics.get("totalAlbumCount", 0)
3541
+ # Check if there's any album with files (sizeOnDisk > 0)
3542
+ sizeOnDisk = statistics.get("sizeOnDisk", 0)
3543
+ # Artist is considered searched if it has albums and at least some have files
3544
+ searched = albumCount > 0 and sizeOnDisk > 0
3545
+
3546
+ Title = artistMetadata.get("artistName")
3547
+ Monitored = db_entry["monitored"]
3548
+
3549
+ to_update = {
3550
+ self.artists_file_model.Monitored: Monitored,
3551
+ self.artists_file_model.Title: Title,
3552
+ self.artists_file_model.Searched: searched,
3553
+ self.artists_file_model.Upgrade: False,
3554
+ self.artists_file_model.MinCustomFormatScore: minCustomFormat,
3555
+ }
3556
+
3557
+ self.logger.debug(
3558
+ "Updating database entry | %s [Searched:%s][Upgrade:%s]",
3559
+ Title.ljust(60, "."),
3560
+ str(searched).ljust(5),
3561
+ str(False).ljust(5),
3562
+ )
3563
+
3564
+ db_commands = self.artists_file_model.insert(
3565
+ EntryId=EntryId,
3566
+ Title=Title,
3567
+ Searched=searched,
3568
+ Monitored=Monitored,
3569
+ Upgrade=False,
3570
+ MinCustomFormatScore=minCustomFormat,
3571
+ ).on_conflict(
3572
+ conflict_target=[self.artists_file_model.EntryId], update=to_update
3573
+ )
3574
+ db_commands.execute()
3575
+
3576
+ # Note: Albums are now handled separately in db_update()
3577
+ # No need to recursively process albums here to avoid duplication
3578
+ else:
3579
+ db_commands = self.artists_file_model.delete().where(
3580
+ self.artists_file_model.EntryId == EntryId
3581
+ )
3582
+ db_commands.execute()
3583
+
3584
+ except requests.exceptions.ConnectionError as e:
3585
+ self.logger.debug(
3586
+ "Max retries exceeded for %s [%s][%s]",
2719
3587
  self._name,
2720
3588
  db_entry["id"],
2721
3589
  db_entry["title"],
@@ -2743,10 +3611,11 @@ class Arr:
2743
3611
  try:
2744
3612
  while True:
2745
3613
  try:
2746
- res = self.client._delete(
2747
- f"queue/{id_}?removeFromClient={remove_from_client}&blocklist={blacklist}",
2748
- self.client.ver_uri,
2749
- )
3614
+ res = self.client.del_queue(id_, remove_from_client, blacklist)
3615
+ # res = self.client._delete(
3616
+ # f"queue/{id_}?removeFromClient={remove_from_client}&blocklist={blacklist}",
3617
+ # self.client.ver_uri,
3618
+ # )
2750
3619
  break
2751
3620
  except (
2752
3621
  requests.exceptions.ChunkedEncodingError,
@@ -2755,8 +3624,8 @@ class Arr:
2755
3624
  JSONDecodeError,
2756
3625
  ):
2757
3626
  continue
2758
- except PyarrResourceNotFound:
2759
- self.logger.error("Connection Error")
3627
+ except PyarrResourceNotFound as e:
3628
+ self.logger.error("Connection Error: " + e.message)
2760
3629
  raise DelayLoopException(length=300, type=self._name)
2761
3630
  return res
2762
3631
 
@@ -2770,6 +3639,9 @@ class Arr:
2770
3639
  if file.is_dir():
2771
3640
  self.logger.trace("Not probeable: File is a directory: %s", file)
2772
3641
  return False
3642
+ if file.name.endswith(".!qB"):
3643
+ self.logger.trace("Not probeable: File is still downloading: %s", file)
3644
+ return False
2773
3645
  output = ffmpeg.probe(
2774
3646
  str(file.absolute()), cmd=self.manager.qbit_manager.ffprobe_downloader.probe_path
2775
3647
  )
@@ -2901,16 +3773,26 @@ class Arr:
2901
3773
  request_tag = (
2902
3774
  "[OVERSEERR REQUEST]: "
2903
3775
  if request and self.overseerr_requests
2904
- else "[OMBI REQUEST]: "
2905
- if request and self.ombi_search_requests
2906
- else "[PRIORITY SEARCH - TODAY]: "
2907
- if todays
2908
- else ""
3776
+ else (
3777
+ "[OMBI REQUEST]: "
3778
+ if request and self.ombi_search_requests
3779
+ else "[PRIORITY SEARCH - TODAY]: " if todays else ""
3780
+ )
2909
3781
  )
2910
3782
  self.refresh_download_queue()
2911
3783
  if request or todays:
2912
3784
  bypass_limit = True
2913
- if (not self.search_missing) or (file_model is None):
3785
+ if file_model is None:
3786
+ return None
3787
+ features_enabled = (
3788
+ self.search_missing
3789
+ or self.do_upgrade_search
3790
+ or self.quality_unmet_search
3791
+ or self.custom_format_unmet_search
3792
+ or self.ombi_search_requests
3793
+ or self.overseerr_requests
3794
+ )
3795
+ if not features_enabled and not (request or todays):
2914
3796
  return None
2915
3797
  elif not self.is_alive:
2916
3798
  raise NoConnectionrException(f"Could not connect to {self.uri}", type="arr")
@@ -2984,7 +3866,8 @@ class Arr:
2984
3866
  self.model_file.update(Searched=True, Upgrade=True).where(
2985
3867
  file_model.EntryId == file_model.EntryId
2986
3868
  ).execute()
2987
- if file_model.Reason:
3869
+ reason_text = getattr(file_model, "Reason", None) or None
3870
+ if reason_text:
2988
3871
  self.logger.hnotice(
2989
3872
  "%sSearching for: %s | S%02dE%03d | %s | [id=%s|AirDateUTC=%s][%s]",
2990
3873
  request_tag,
@@ -2994,7 +3877,7 @@ class Arr:
2994
3877
  file_model.Title,
2995
3878
  file_model.EntryId,
2996
3879
  file_model.AirDateUtc,
2997
- file_model.Reason,
3880
+ reason_text,
2998
3881
  )
2999
3882
  else:
3000
3883
  self.logger.hnotice(
@@ -3007,6 +3890,15 @@ class Arr:
3007
3890
  file_model.EntryId,
3008
3891
  file_model.AirDateUtc,
3009
3892
  )
3893
+ description = f"{file_model.SeriesTitle} S{file_model.SeasonNumber:02d}E{file_model.EpisodeNumber:02d}"
3894
+ if getattr(file_model, "Title", None):
3895
+ description = f"{description} · {file_model.Title}"
3896
+ context_label = self._humanize_request_tag(request_tag)
3897
+ self._record_search_activity(
3898
+ description,
3899
+ context=context_label,
3900
+ detail=str(reason_text) if reason_text else None,
3901
+ )
3010
3902
  return True
3011
3903
  else:
3012
3904
  file_model: SeriesFilesModel
@@ -3048,12 +3940,22 @@ class Arr:
3048
3940
  self.logger.hnotice(
3049
3941
  "%sSearching for: %s | %s | [id=%s]",
3050
3942
  request_tag,
3051
- "Missing episodes in"
3052
- if "Missing" in self.search_api_command
3053
- else "All episodes in",
3943
+ (
3944
+ "Missing episodes in"
3945
+ if "Missing" in self.search_api_command
3946
+ else "All episodes in"
3947
+ ),
3054
3948
  file_model.Title,
3055
3949
  file_model.EntryId,
3056
3950
  )
3951
+ context_label = self._humanize_request_tag(request_tag)
3952
+ scope = (
3953
+ "Missing episodes in"
3954
+ if "Missing" in self.search_api_command
3955
+ else "All episodes in"
3956
+ )
3957
+ description = f"{scope} {file_model.Title}"
3958
+ self._record_search_activity(description, context=context_label)
3057
3959
  return True
3058
3960
  elif self.type == "radarr":
3059
3961
  file_model: MoviesFilesModel
@@ -3105,7 +4007,8 @@ class Arr:
3105
4007
  self.model_file.update(Searched=True, Upgrade=True).where(
3106
4008
  file_model.EntryId == file_model.EntryId
3107
4009
  ).execute()
3108
- if file_model.Reason:
4010
+ reason_text = getattr(file_model, "Reason", None)
4011
+ if reason_text:
3109
4012
  self.logger.hnotice(
3110
4013
  "%sSearching for: %s (%s) [tmdbId=%s|id=%s][%s]",
3111
4014
  request_tag,
@@ -3113,7 +4016,7 @@ class Arr:
3113
4016
  file_model.Year,
3114
4017
  file_model.TmdbId,
3115
4018
  file_model.EntryId,
3116
- file_model.Reason,
4019
+ reason_text,
3117
4020
  )
3118
4021
  else:
3119
4022
  self.logger.hnotice(
@@ -3124,6 +4027,97 @@ class Arr:
3124
4027
  file_model.TmdbId,
3125
4028
  file_model.EntryId,
3126
4029
  )
4030
+ context_label = self._humanize_request_tag(request_tag)
4031
+ description = (
4032
+ f"{file_model.Title} ({file_model.Year})"
4033
+ if getattr(file_model, "Year", None)
4034
+ else f"{file_model.Title}"
4035
+ )
4036
+ self._record_search_activity(
4037
+ description,
4038
+ context=context_label,
4039
+ detail=str(reason_text) if reason_text else None,
4040
+ )
4041
+ return True
4042
+ elif self.type == "lidarr":
4043
+ file_model: AlbumFilesModel
4044
+ if not (request or todays):
4045
+ (
4046
+ self.model_queue.select(self.model_queue.Completed)
4047
+ .where(self.model_queue.EntryId == file_model.EntryId)
4048
+ .execute()
4049
+ )
4050
+ else:
4051
+ pass
4052
+ if file_model.EntryId in self.queue_file_ids:
4053
+ self.logger.debug(
4054
+ "%sSkipping: Already Searched: %s - %s (%s)",
4055
+ request_tag,
4056
+ file_model.ArtistTitle,
4057
+ file_model.Title,
4058
+ file_model.EntryId,
4059
+ )
4060
+ self.model_file.update(Searched=True, Upgrade=True).where(
4061
+ file_model.EntryId == file_model.EntryId
4062
+ ).execute()
4063
+ return True
4064
+ active_commands = self.arr_db_query_commands_count()
4065
+ self.logger.info("%s active search commands, %s remaining", active_commands, commands)
4066
+ if not bypass_limit and active_commands >= self.search_command_limit:
4067
+ self.logger.trace(
4068
+ "Idle: Too many commands in queue: %s - %s | [id=%s]",
4069
+ file_model.ArtistTitle,
4070
+ file_model.Title,
4071
+ file_model.EntryId,
4072
+ )
4073
+ return False
4074
+ self.persistent_queue.insert(EntryId=file_model.EntryId).on_conflict_ignore().execute()
4075
+
4076
+ self.model_queue.insert(
4077
+ Completed=False, EntryId=file_model.EntryId
4078
+ ).on_conflict_replace().execute()
4079
+ if file_model.EntryId:
4080
+ while True:
4081
+ try:
4082
+ self.client.post_command("AlbumSearch", albumIds=[file_model.EntryId])
4083
+ break
4084
+ except (
4085
+ requests.exceptions.ChunkedEncodingError,
4086
+ requests.exceptions.ContentDecodingError,
4087
+ requests.exceptions.ConnectionError,
4088
+ JSONDecodeError,
4089
+ ):
4090
+ continue
4091
+ self.model_file.update(Searched=True, Upgrade=True).where(
4092
+ file_model.EntryId == file_model.EntryId
4093
+ ).execute()
4094
+ reason_text = getattr(file_model, "Reason", None)
4095
+ if reason_text:
4096
+ self.logger.hnotice(
4097
+ "%sSearching for: %s - %s [foreignAlbumId=%s|id=%s][%s]",
4098
+ request_tag,
4099
+ file_model.ArtistTitle,
4100
+ file_model.Title,
4101
+ file_model.ForeignAlbumId,
4102
+ file_model.EntryId,
4103
+ reason_text,
4104
+ )
4105
+ else:
4106
+ self.logger.hnotice(
4107
+ "%sSearching for: %s - %s [foreignAlbumId=%s|id=%s]",
4108
+ request_tag,
4109
+ file_model.ArtistTitle,
4110
+ file_model.Title,
4111
+ file_model.ForeignAlbumId,
4112
+ file_model.EntryId,
4113
+ )
4114
+ context_label = self._humanize_request_tag(request_tag)
4115
+ description = f"{file_model.ArtistTitle} - {file_model.Title}"
4116
+ self._record_search_activity(
4117
+ description,
4118
+ context=context_label,
4119
+ detail=str(reason_text) if reason_text else None,
4120
+ )
3127
4121
  return True
3128
4122
 
3129
4123
  def process(self):
@@ -3162,6 +4156,7 @@ class Arr:
3162
4156
  else:
3163
4157
  raise qbittorrentapi.exceptions.APIError
3164
4158
  torrents = [t for t in torrents if hasattr(t, "category")]
4159
+ self.category_torrent_count = len(torrents)
3165
4160
  if not len(torrents):
3166
4161
  raise DelayLoopException(length=LOOP_SLEEP_TIMER, type="no_downloads")
3167
4162
  if not has_internet(self.manager.qbit_manager.client):
@@ -3206,7 +4201,7 @@ class Arr:
3206
4201
  "[Last active: %s] "
3207
4202
  "| [%s] | %s (%s)",
3208
4203
  round(torrent.progress * 100, 2),
3209
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4204
+ datetime.fromtimestamp(torrent.added_on),
3210
4205
  round(torrent.availability * 100, 2),
3211
4206
  timedelta(seconds=torrent.eta),
3212
4207
  datetime.fromtimestamp(torrent.last_activity),
@@ -3224,7 +4219,7 @@ class Arr:
3224
4219
  "[Last active: %s] "
3225
4220
  "| [%s] | %s (%s)",
3226
4221
  round(torrent.progress * 100, 2),
3227
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4222
+ datetime.fromtimestamp(torrent.added_on),
3228
4223
  round(torrent.availability * 100, 2),
3229
4224
  timedelta(seconds=torrent.eta),
3230
4225
  datetime.fromtimestamp(torrent.last_activity),
@@ -3243,7 +4238,7 @@ class Arr:
3243
4238
  "[Last active: %s] "
3244
4239
  "| [%s] | %s (%s)",
3245
4240
  round(torrent.progress * 100, 2),
3246
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4241
+ datetime.fromtimestamp(torrent.added_on),
3247
4242
  round(torrent.availability * 100, 2),
3248
4243
  timedelta(seconds=torrent.eta),
3249
4244
  datetime.fromtimestamp(torrent.last_activity),
@@ -3251,8 +4246,6 @@ class Arr:
3251
4246
  torrent.name,
3252
4247
  torrent.hash,
3253
4248
  )
3254
- if torrent.state_enum == TorrentStates.QUEUED_DOWNLOAD:
3255
- self.recently_queue[torrent.hash] = time.time()
3256
4249
 
3257
4250
  def _process_single_torrent_added_to_ignore_cache(
3258
4251
  self, torrent: qbittorrentapi.TorrentDictionary
@@ -3264,7 +4257,7 @@ class Arr:
3264
4257
  "[Last active: %s] "
3265
4258
  "| [%s] | %s (%s)",
3266
4259
  round(torrent.progress * 100, 2),
3267
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4260
+ datetime.fromtimestamp(torrent.added_on),
3268
4261
  round(torrent.availability * 100, 2),
3269
4262
  timedelta(seconds=torrent.eta),
3270
4263
  datetime.fromtimestamp(torrent.last_activity),
@@ -3284,7 +4277,7 @@ class Arr:
3284
4277
  "[Last active: %s] "
3285
4278
  "| [%s] | %s (%s)",
3286
4279
  round(torrent.progress * 100, 2),
3287
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4280
+ datetime.fromtimestamp(torrent.added_on),
3288
4281
  round(torrent.availability * 100, 2),
3289
4282
  timedelta(seconds=torrent.eta),
3290
4283
  datetime.fromtimestamp(torrent.last_activity),
@@ -3301,7 +4294,7 @@ class Arr:
3301
4294
  "[Last active: %s] "
3302
4295
  "| [%s] | %s (%s)",
3303
4296
  round(torrent.progress * 100, 2),
3304
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4297
+ datetime.fromtimestamp(torrent.added_on),
3305
4298
  round(torrent.availability * 100, 2),
3306
4299
  timedelta(seconds=torrent.eta),
3307
4300
  datetime.fromtimestamp(torrent.last_activity),
@@ -3316,10 +4309,9 @@ class Arr:
3316
4309
  # Process torrents who have stalled at this point, only mark for
3317
4310
  # deletion if they have been added more than "IgnoreTorrentsYoungerThan"
3318
4311
  # seconds ago
3319
- if self.recently_queue.get(
3320
- torrent.hash, torrent.added_on
3321
- ) < time.time() - self.ignore_torrents_younger_than and torrent.last_activity < (
3322
- time.time() - self.ignore_torrents_younger_than
4312
+ if (
4313
+ torrent.added_on < time.time() - self.ignore_torrents_younger_than
4314
+ and torrent.last_activity < (time.time() - self.ignore_torrents_younger_than)
3323
4315
  ):
3324
4316
  self.logger.info(
3325
4317
  "Deleting Stale torrent: %s | "
@@ -3329,7 +4321,7 @@ class Arr:
3329
4321
  "| [%s] | %s (%s)",
3330
4322
  extra,
3331
4323
  round(torrent.progress * 100, 2),
3332
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4324
+ datetime.fromtimestamp(torrent.added_on),
3333
4325
  round(torrent.availability * 100, 2),
3334
4326
  timedelta(seconds=torrent.eta),
3335
4327
  datetime.fromtimestamp(torrent.last_activity),
@@ -3346,7 +4338,7 @@ class Arr:
3346
4338
  "[Last active: %s] "
3347
4339
  "| [%s] | %s (%s)",
3348
4340
  round(torrent.progress * 100, 2),
3349
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4341
+ datetime.fromtimestamp(torrent.added_on),
3350
4342
  round(torrent.availability * 100, 2),
3351
4343
  timedelta(seconds=torrent.eta),
3352
4344
  datetime.fromtimestamp(torrent.last_activity),
@@ -3376,7 +4368,7 @@ class Arr:
3376
4368
  "[Last active: %s] "
3377
4369
  "| [%s] | %s (%s)",
3378
4370
  round(torrent.progress * 100, 2),
3379
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4371
+ datetime.fromtimestamp(torrent.added_on),
3380
4372
  round(torrent.availability * 100, 2),
3381
4373
  timedelta(seconds=torrent.eta),
3382
4374
  datetime.fromtimestamp(torrent.last_activity),
@@ -3394,7 +4386,7 @@ class Arr:
3394
4386
  "[Last active: %s] "
3395
4387
  "| [%s] | %s (%s)",
3396
4388
  round(torrent.progress * 100, 2),
3397
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4389
+ datetime.fromtimestamp(torrent.added_on),
3398
4390
  round(torrent.availability * 100, 2),
3399
4391
  timedelta(seconds=torrent.eta),
3400
4392
  datetime.fromtimestamp(torrent.last_activity),
@@ -3413,7 +4405,7 @@ class Arr:
3413
4405
  "[Last active: %s] "
3414
4406
  "| [%s] | %s (%s)",
3415
4407
  round(torrent.progress * 100, 2),
3416
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4408
+ datetime.fromtimestamp(torrent.added_on),
3417
4409
  round(torrent.availability * 100, 2),
3418
4410
  timedelta(seconds=torrent.eta),
3419
4411
  datetime.fromtimestamp(torrent.last_activity),
@@ -3432,7 +4424,7 @@ class Arr:
3432
4424
  "[Last active: %s] "
3433
4425
  "| [%s] | %s (%s)",
3434
4426
  round(torrent.progress * 100, 2),
3435
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4427
+ datetime.fromtimestamp(torrent.added_on),
3436
4428
  round(torrent.availability * 100, 2),
3437
4429
  timedelta(seconds=torrent.eta),
3438
4430
  datetime.fromtimestamp(torrent.last_activity),
@@ -3449,7 +4441,7 @@ class Arr:
3449
4441
  "[Last active: %s] "
3450
4442
  "| [%s] | %s (%s)",
3451
4443
  round(torrent.progress * 100, 2),
3452
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4444
+ datetime.fromtimestamp(torrent.added_on),
3453
4445
  round(torrent.availability * 100, 2),
3454
4446
  timedelta(seconds=torrent.eta),
3455
4447
  datetime.fromtimestamp(torrent.last_activity),
@@ -3470,7 +4462,7 @@ class Arr:
3470
4462
  "[Last active: %s] "
3471
4463
  "| [%s] | %s (%s)",
3472
4464
  round(torrent.progress * 100, 2),
3473
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4465
+ datetime.fromtimestamp(torrent.added_on),
3474
4466
  round(torrent.availability * 100, 2),
3475
4467
  timedelta(seconds=torrent.eta),
3476
4468
  datetime.fromtimestamp(torrent.last_activity),
@@ -3486,7 +4478,7 @@ class Arr:
3486
4478
  "[Last active: %s] "
3487
4479
  "| [%s] | %s (%s)",
3488
4480
  round(torrent.progress * 100, 2),
3489
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4481
+ datetime.fromtimestamp(torrent.added_on),
3490
4482
  round(torrent.availability * 100, 2),
3491
4483
  timedelta(seconds=torrent.eta),
3492
4484
  datetime.fromtimestamp(torrent.last_activity),
@@ -3517,7 +4509,7 @@ class Arr:
3517
4509
  "[Last active: %s] "
3518
4510
  "| [%s] | %s (%s)",
3519
4511
  round(torrent.progress * 100, 2),
3520
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4512
+ datetime.fromtimestamp(torrent.added_on),
3521
4513
  round(torrent.availability * 100, 2),
3522
4514
  timedelta(seconds=torrent.eta),
3523
4515
  datetime.fromtimestamp(torrent.last_activity),
@@ -3539,7 +4531,7 @@ class Arr:
3539
4531
  "[Last active: %s] "
3540
4532
  "| [%s] | %s (%s)",
3541
4533
  round(torrent.progress * 100, 2),
3542
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4534
+ datetime.fromtimestamp(torrent.added_on),
3543
4535
  round(torrent.availability * 100, 2),
3544
4536
  timedelta(seconds=torrent.eta),
3545
4537
  datetime.fromtimestamp(torrent.last_activity),
@@ -3555,7 +4547,7 @@ class Arr:
3555
4547
  "[Last active: %s] "
3556
4548
  "| [%s] | %s (%s)",
3557
4549
  round(torrent.progress * 100, 2),
3558
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4550
+ datetime.fromtimestamp(torrent.added_on),
3559
4551
  round(torrent.availability * 100, 2),
3560
4552
  timedelta(seconds=torrent.eta),
3561
4553
  datetime.fromtimestamp(torrent.last_activity),
@@ -3575,7 +4567,7 @@ class Arr:
3575
4567
  "[Last active: %s] "
3576
4568
  "| [%s] | %s (%s)",
3577
4569
  round(torrent.progress * 100, 2),
3578
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4570
+ datetime.fromtimestamp(torrent.added_on),
3579
4571
  round(torrent.availability * 100, 2),
3580
4572
  timedelta(seconds=torrent.eta),
3581
4573
  datetime.fromtimestamp(torrent.last_activity),
@@ -3592,7 +4584,7 @@ class Arr:
3592
4584
  "[Last active: %s] "
3593
4585
  "| [%s] | %s (%s)",
3594
4586
  round(torrent.progress * 100, 2),
3595
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4587
+ datetime.fromtimestamp(torrent.added_on),
3596
4588
  round(torrent.availability * 100, 2),
3597
4589
  timedelta(seconds=torrent.eta),
3598
4590
  datetime.fromtimestamp(torrent.last_activity),
@@ -3610,7 +4602,7 @@ class Arr:
3610
4602
  "[Last active: %s] "
3611
4603
  "| [%s] | %s (%s)",
3612
4604
  round(torrent.progress * 100, 2),
3613
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4605
+ datetime.fromtimestamp(torrent.added_on),
3614
4606
  torrent.ratio,
3615
4607
  timedelta(seconds=torrent.seeding_time),
3616
4608
  datetime.fromtimestamp(torrent.last_activity),
@@ -3628,7 +4620,7 @@ class Arr:
3628
4620
  "[Last active: %s] "
3629
4621
  "| [%s] | %s (%s)",
3630
4622
  round(torrent.progress * 100, 2),
3631
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4623
+ datetime.fromtimestamp(torrent.added_on),
3632
4624
  torrent.ratio,
3633
4625
  timedelta(seconds=torrent.seeding_time),
3634
4626
  datetime.fromtimestamp(torrent.last_activity),
@@ -3709,9 +4701,7 @@ class Arr:
3709
4701
  "[Last active: %s] "
3710
4702
  "| [%s] | %s (%s)",
3711
4703
  round(torrent.progress * 100, 2),
3712
- datetime.fromtimestamp(
3713
- self.recently_queue.get(torrent.hash, torrent.added_on)
3714
- ),
4704
+ datetime.fromtimestamp(torrent.added_on),
3715
4705
  round(torrent.availability * 100, 2),
3716
4706
  timedelta(seconds=torrent.eta),
3717
4707
  datetime.fromtimestamp(torrent.last_activity),
@@ -3740,7 +4730,7 @@ class Arr:
3740
4730
  "[Last active: %s] "
3741
4731
  "| [%s] | %s (%s)",
3742
4732
  round(torrent.progress * 100, 2),
3743
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4733
+ datetime.fromtimestamp(torrent.added_on),
3744
4734
  round(torrent.availability * 100, 2),
3745
4735
  timedelta(seconds=torrent.eta),
3746
4736
  datetime.fromtimestamp(torrent.last_activity),
@@ -3756,7 +4746,7 @@ class Arr:
3756
4746
  "[Last active: %s] "
3757
4747
  "| [%s] | %s (%s)",
3758
4748
  round(torrent.progress * 100, 2),
3759
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4749
+ datetime.fromtimestamp(torrent.added_on),
3760
4750
  round(torrent.availability * 100, 2),
3761
4751
  timedelta(seconds=torrent.eta),
3762
4752
  datetime.fromtimestamp(torrent.last_activity),
@@ -3773,7 +4763,7 @@ class Arr:
3773
4763
  "[Last active: %s] "
3774
4764
  "| [%s] | %s (%s)",
3775
4765
  round(torrent.progress * 100, 2),
3776
- datetime.fromtimestamp(self.recently_queue.get(torrent.hash, torrent.added_on)),
4766
+ datetime.fromtimestamp(torrent.added_on),
3777
4767
  round(torrent.availability * 100, 2),
3778
4768
  timedelta(seconds=torrent.eta),
3779
4769
  datetime.fromtimestamp(torrent.last_activity),
@@ -3819,38 +4809,46 @@ class Arr:
3819
4809
  )
3820
4810
 
3821
4811
  data_settings = {
3822
- "ratio_limit": r
3823
- if (
3824
- r := most_important_tracker.get(
3825
- "MaxUploadRatio", self.seeding_mode_global_max_upload_ratio
4812
+ "ratio_limit": (
4813
+ r
4814
+ if (
4815
+ r := most_important_tracker.get(
4816
+ "MaxUploadRatio", self.seeding_mode_global_max_upload_ratio
4817
+ )
3826
4818
  )
3827
- )
3828
- > 0
3829
- else -5,
3830
- "seeding_time_limit": r
3831
- if (
3832
- r := most_important_tracker.get(
3833
- "MaxSeedingTime", self.seeding_mode_global_max_seeding_time
4819
+ > 0
4820
+ else -5
4821
+ ),
4822
+ "seeding_time_limit": (
4823
+ r
4824
+ if (
4825
+ r := most_important_tracker.get(
4826
+ "MaxSeedingTime", self.seeding_mode_global_max_seeding_time
4827
+ )
3834
4828
  )
3835
- )
3836
- > 0
3837
- else -5,
3838
- "dl_limit": r
3839
- if (
3840
- r := most_important_tracker.get(
3841
- "DownloadRateLimit", self.seeding_mode_global_download_limit
4829
+ > 0
4830
+ else -5
4831
+ ),
4832
+ "dl_limit": (
4833
+ r
4834
+ if (
4835
+ r := most_important_tracker.get(
4836
+ "DownloadRateLimit", self.seeding_mode_global_download_limit
4837
+ )
3842
4838
  )
3843
- )
3844
- > 0
3845
- else -5,
3846
- "up_limit": r
3847
- if (
3848
- r := most_important_tracker.get(
3849
- "UploadRateLimit", self.seeding_mode_global_upload_limit
4839
+ > 0
4840
+ else -5
4841
+ ),
4842
+ "up_limit": (
4843
+ r
4844
+ if (
4845
+ r := most_important_tracker.get(
4846
+ "UploadRateLimit", self.seeding_mode_global_upload_limit
4847
+ )
3850
4848
  )
3851
- )
3852
- > 0
3853
- else -5,
4849
+ > 0
4850
+ else -5
4851
+ ),
3854
4852
  "super_seeding": most_important_tracker.get("SuperSeedMode", torrent.super_seeding),
3855
4853
  "max_eta": most_important_tracker.get("MaximumETA", self.maximum_eta),
3856
4854
  }
@@ -3921,13 +4919,19 @@ class Arr:
3921
4919
  torrent.add_trackers(need_to_be_added)
3922
4920
  with contextlib.suppress(BaseException):
3923
4921
  for tracker in torrent.trackers:
3924
- if (
4922
+ tracker_url = getattr(tracker, "url", None)
4923
+ message_text = (getattr(tracker, "msg", "") or "").lower()
4924
+ remove_for_message = (
3925
4925
  self.remove_dead_trackers
3926
- and (
3927
- any(tracker.msg == m for m in self.seeding_mode_global_bad_tracker_msg)
3928
- ) # TODO: Add more messages
3929
- ) or tracker.url in self._remove_trackers_if_exists:
3930
- _remove_urls.add(tracker.url)
4926
+ and self._normalized_bad_tracker_msgs
4927
+ and any(
4928
+ keyword in message_text for keyword in self._normalized_bad_tracker_msgs
4929
+ )
4930
+ )
4931
+ if not tracker_url:
4932
+ continue
4933
+ if remove_for_message or tracker_url in self._remove_trackers_if_exists:
4934
+ _remove_urls.add(tracker_url)
3931
4935
  if _remove_urls:
3932
4936
  self.logger.trace(
3933
4937
  "Removing trackers from torrent: %s (%s) - %s",
@@ -3944,22 +4948,26 @@ class Arr:
3944
4948
  # Only use globals if there is not a configured equivalent value on the
3945
4949
  # highest priority tracker
3946
4950
  data = {
3947
- "ratio_limit": r
3948
- if (
3949
- r := most_important_tracker.get(
3950
- "MaxUploadRatio", self.seeding_mode_global_max_upload_ratio
4951
+ "ratio_limit": (
4952
+ r
4953
+ if (
4954
+ r := most_important_tracker.get(
4955
+ "MaxUploadRatio", self.seeding_mode_global_max_upload_ratio
4956
+ )
3951
4957
  )
3952
- )
3953
- > 0
3954
- else None,
3955
- "seeding_time_limit": r
3956
- if (
3957
- r := most_important_tracker.get(
3958
- "MaxSeedingTime", self.seeding_mode_global_max_seeding_time
4958
+ > 0
4959
+ else None
4960
+ ),
4961
+ "seeding_time_limit": (
4962
+ r
4963
+ if (
4964
+ r := most_important_tracker.get(
4965
+ "MaxSeedingTime", self.seeding_mode_global_max_seeding_time
4966
+ )
3959
4967
  )
3960
- )
3961
- > 0
3962
- else None,
4968
+ > 0
4969
+ else None
4970
+ ),
3963
4971
  }
3964
4972
  if any(r is not None for r in data):
3965
4973
  if (
@@ -4016,9 +5024,9 @@ class Arr:
4016
5024
  else:
4017
5025
  data = {
4018
5026
  "ratio_limit": r if (r := self.seeding_mode_global_max_upload_ratio) > 0 else None,
4019
- "seeding_time_limit": r
4020
- if (r := self.seeding_mode_global_max_seeding_time) > 0
4021
- else None,
5027
+ "seeding_time_limit": (
5028
+ r if (r := self.seeding_mode_global_max_seeding_time) > 0 else None
5029
+ ),
4022
5030
  }
4023
5031
  if any(r is not None for r in data):
4024
5032
  if (
@@ -4065,12 +5073,9 @@ class Arr:
4065
5073
  if not self.allowed_stalled:
4066
5074
  self.logger.trace("Stalled check: Stalled delay disabled")
4067
5075
  return False
4068
- if (
4069
- self.recently_queue.get(torrent.hash, torrent.added_on)
4070
- < time_now - self.ignore_torrents_younger_than
4071
- ):
5076
+ if time_now < torrent.added_on + self.ignore_torrents_younger_than:
4072
5077
  self.logger.trace(
4073
- "Stalled check: In recent queue %s [Current:%s][Added:%s][Limit:%s]",
5078
+ "Stalled check: In recent queue %s [Current:%s][Added:%s][Starting:%s]",
4074
5079
  torrent.name,
4075
5080
  datetime.fromtimestamp(time_now),
4076
5081
  datetime.fromtimestamp(torrent.added_on),
@@ -4081,19 +5086,19 @@ class Arr:
4081
5086
  return True
4082
5087
  if self.stalled_delay == 0:
4083
5088
  self.logger.trace(
4084
- "Stalled check: %s [Current:%s][Added:%s][Limit:No Limit]",
5089
+ "Stalled check: %s [Current:%s][Last Activity:%s][Limit:No Limit]",
4085
5090
  torrent.name,
4086
5091
  datetime.fromtimestamp(time_now),
4087
- datetime.fromtimestamp(torrent.added_on),
5092
+ datetime.fromtimestamp(torrent.last_activity),
4088
5093
  )
4089
5094
  else:
4090
5095
  self.logger.trace(
4091
- "Stalled check: %s [Current:%s][Added:%s][Limit:%s]",
5096
+ "Stalled check: %s [Current:%s][Last Activity:%s][Limit:%s]",
4092
5097
  torrent.name,
4093
5098
  datetime.fromtimestamp(time_now),
4094
- datetime.fromtimestamp(torrent.added_on),
5099
+ datetime.fromtimestamp(torrent.last_activity),
4095
5100
  datetime.fromtimestamp(
4096
- torrent.added_on + timedelta(minutes=self.stalled_delay).seconds
5101
+ torrent.last_activity + timedelta(minutes=self.stalled_delay).seconds
4097
5102
  ),
4098
5103
  )
4099
5104
  if (
@@ -4104,11 +5109,7 @@ class Arr:
4104
5109
  and not self.in_tags(torrent, "qBitrr-free_space_paused")
4105
5110
  )
4106
5111
  or (
4107
- (
4108
- self.recently_queue.get(torrent.hash, torrent.added_on)
4109
- < time_now - self.ignore_torrents_younger_than
4110
- and torrent.availability < 1
4111
- )
5112
+ torrent.availability < 1
4112
5113
  and torrent.hash in self.cleaned_torrents
4113
5114
  and torrent.state_enum in (TorrentStates.DOWNLOADING)
4114
5115
  and not self.in_tags(torrent, "qBitrr-ignored")
@@ -4117,7 +5118,8 @@ class Arr:
4117
5118
  ) and self.allowed_stalled:
4118
5119
  if (
4119
5120
  self.stalled_delay > 0
4120
- and time_now >= torrent.added_on + timedelta(minutes=self.stalled_delay).seconds
5121
+ and time_now
5122
+ >= torrent.last_activity + timedelta(minutes=self.stalled_delay).seconds
4121
5123
  ):
4122
5124
  stalled_ignore = False
4123
5125
  self.logger.trace("Process stalled, delay expired: %s", torrent.name)
@@ -4140,7 +5142,15 @@ class Arr:
4140
5142
  else:
4141
5143
  self.logger.trace("Stalled, adding tag: %s", torrent.name)
4142
5144
  elif self.in_tags(torrent, "qBitrr-allowed_stalled"):
4143
- self.logger.trace("Stalled: %s", torrent.name)
5145
+ self.logger.trace(
5146
+ "Stalled: %s [Current:%s][Last Activity:%s][Limit:%s]",
5147
+ torrent.name,
5148
+ datetime.fromtimestamp(time_now),
5149
+ datetime.fromtimestamp(torrent.last_activity),
5150
+ datetime.fromtimestamp(
5151
+ torrent.last_activity + timedelta(minutes=self.stalled_delay).seconds
5152
+ ),
5153
+ )
4144
5154
 
4145
5155
  elif self.in_tags(torrent, "qBitrr-allowed_stalled"):
4146
5156
  self.remove_tags(torrent, ["qBitrr-allowed_stalled"])
@@ -4167,7 +5177,14 @@ class Arr:
4167
5177
  )
4168
5178
  maximum_eta = _tracker_max_eta
4169
5179
 
4170
- stalled_ignore = self._stalled_check(torrent, time_now)
5180
+ if torrent.state_enum in (
5181
+ TorrentStates.METADATA_DOWNLOAD,
5182
+ TorrentStates.STALLED_DOWNLOAD,
5183
+ TorrentStates.DOWNLOADING,
5184
+ ):
5185
+ stalled_ignore = self._stalled_check(torrent, time_now)
5186
+ else:
5187
+ stalled_ignore = False
4171
5188
 
4172
5189
  if self.in_tags(torrent, "qBitrr-ignored"):
4173
5190
  self.remove_tags(torrent, ["qBitrr-allowed_seeding", "qBitrr-free_space_paused"])
@@ -4209,6 +5226,14 @@ class Arr:
4209
5226
  self._process_single_torrent_added_to_ignore_cache(torrent)
4210
5227
  elif torrent.state_enum == TorrentStates.QUEUED_UPLOAD:
4211
5228
  self._process_single_torrent_queued_upload(torrent, leave_alone)
5229
+ # Resume monitored downloads which have been paused.
5230
+ elif (
5231
+ torrent.state_enum == TorrentStates.PAUSED_DOWNLOAD
5232
+ and torrent.amount_left != 0
5233
+ and not self.in_tags(torrent, "qBitrr-free_space_paused")
5234
+ and not self.in_tags(torrent, "qBitrr-ignored")
5235
+ ):
5236
+ self._process_single_torrent_paused(torrent)
4212
5237
  elif (
4213
5238
  torrent.progress <= self.maximum_deletable_percentage
4214
5239
  and not self.is_complete_state(torrent)
@@ -4217,13 +5242,6 @@ class Arr:
4217
5242
  and not stalled_ignore
4218
5243
  ) and torrent.hash in self.cleaned_torrents:
4219
5244
  self._process_single_torrent_percentage_threshold(torrent, maximum_eta)
4220
- # Resume monitored downloads which have been paused.
4221
- elif (
4222
- torrent.state_enum == TorrentStates.PAUSED_DOWNLOAD
4223
- and torrent.amount_left != 0
4224
- and not self.in_tags(torrent, "qBitrr-free_space_paused")
4225
- ):
4226
- self._process_single_torrent_paused(torrent)
4227
5245
  # Ignore torrents which have been submitted to their respective Arr
4228
5246
  # instance for import.
4229
5247
  elif (
@@ -4264,8 +5282,7 @@ class Arr:
4264
5282
  elif (
4265
5283
  torrent.state_enum != TorrentStates.PAUSED_DOWNLOAD
4266
5284
  and torrent.state_enum.is_downloading
4267
- and self.recently_queue.get(torrent.hash, torrent.added_on)
4268
- < time_now - self.ignore_torrents_younger_than
5285
+ and time_now > torrent.added_on + self.ignore_torrents_younger_than
4269
5286
  and 0 < maximum_eta < torrent.eta
4270
5287
  and not self.do_not_remove_slow
4271
5288
  and not self.in_tags(torrent, "qBitrr-ignored")
@@ -4279,8 +5296,7 @@ class Arr:
4279
5296
  # "IgnoreTorrentsYoungerThan" variable, mark it for deletion.
4280
5297
  if (
4281
5298
  (
4282
- self.recently_queue.get(torrent.hash, torrent.added_on)
4283
- < time_now - self.ignore_torrents_younger_than
5299
+ time_now > torrent.added_on + self.ignore_torrents_younger_than
4284
5300
  and torrent.availability < 1
4285
5301
  )
4286
5302
  and torrent.hash in self.cleaned_torrents
@@ -4340,6 +5356,9 @@ class Arr:
4340
5356
  elif self.type == "radarr":
4341
5357
  entry_id_field = "movieId"
4342
5358
  file_id_field = "MovieFileId"
5359
+ elif self.type == "lidarr":
5360
+ entry_id_field = "albumId"
5361
+ file_id_field = "AlbumFileId"
4343
5362
  else:
4344
5363
  return False # Unknown type
4345
5364
 
@@ -4400,7 +5419,9 @@ class Arr:
4400
5419
  return False
4401
5420
 
4402
5421
  def refresh_download_queue(self):
4403
- self.queue = self.get_queue()
5422
+ self.queue = self.get_queue() or []
5423
+ self.queue_active_count = len(self.queue)
5424
+ self.category_torrent_count = 0
4404
5425
  self.requeue_cache = defaultdict(set)
4405
5426
  if self.queue:
4406
5427
  self.cache = {
@@ -4440,23 +5461,36 @@ class Arr:
4440
5461
  self.model_queue.delete().where(
4441
5462
  self.model_queue.EntryId.not_in(list(self.queue_file_ids))
4442
5463
  ).execute()
5464
+ elif self.type == "lidarr":
5465
+ self.requeue_cache = {
5466
+ entry["id"]: entry["albumId"] for entry in self.queue if entry.get("albumId")
5467
+ }
5468
+ self.queue_file_ids = {
5469
+ entry["albumId"] for entry in self.queue if entry.get("albumId")
5470
+ }
5471
+ if self.model_queue:
5472
+ self.model_queue.delete().where(
5473
+ self.model_queue.EntryId.not_in(list(self.queue_file_ids))
5474
+ ).execute()
4443
5475
 
4444
5476
  self._update_bad_queue_items()
4445
5477
 
4446
5478
  def get_queue(self, page=1, page_size=1000, sort_direction="ascending", sort_key="timeLeft"):
4447
- while True:
4448
- try:
4449
- res = self.client.get_queue(
4450
- page=page, page_size=page_size, sort_key=sort_key, sort_dir=sort_direction
4451
- )
4452
- break
4453
- except (
5479
+ res = with_retry(
5480
+ lambda: self.client.get_queue(
5481
+ page=page, page_size=page_size, sort_key=sort_key, sort_dir=sort_direction
5482
+ ),
5483
+ retries=3,
5484
+ backoff=0.5,
5485
+ max_backoff=3,
5486
+ exceptions=(
4454
5487
  requests.exceptions.ChunkedEncodingError,
4455
5488
  requests.exceptions.ContentDecodingError,
4456
5489
  requests.exceptions.ConnectionError,
4457
5490
  JSONDecodeError,
4458
- ):
4459
- continue
5491
+ requests.exceptions.RequestException,
5492
+ ),
5493
+ )
4460
5494
  try:
4461
5495
  res = res.get("records", [])
4462
5496
  except AttributeError:
@@ -4493,7 +5527,7 @@ class Arr:
4493
5527
  self.files_to_explicitly_delete = iter(_path_filter.copy())
4494
5528
 
4495
5529
  def parse_quality_profiles(self) -> dict[int, int]:
4496
- temp_quality_profile_ids = {}
5530
+ temp_quality_profile_ids: dict[int, int] = {}
4497
5531
 
4498
5532
  while True:
4499
5533
  try:
@@ -4505,7 +5539,23 @@ class Arr:
4505
5539
  requests.exceptions.ConnectionError,
4506
5540
  JSONDecodeError,
4507
5541
  ):
5542
+ # transient network/encoding issues; retry
5543
+ continue
5544
+ except PyarrServerError as e:
5545
+ # Server-side error (e.g., Radarr DB disk I/O). Log and wait 5 minutes before retrying.
5546
+ self.logger.error(
5547
+ "Failed to get quality profiles (server error): %s -- retrying in 5 minutes", e
5548
+ )
5549
+ try:
5550
+ time.sleep(300)
5551
+ except Exception:
5552
+ pass
4508
5553
  continue
5554
+ except Exception as e:
5555
+ # Unexpected error; log and continue without profiles.
5556
+ self.logger.error("Unexpected error getting quality profiles: %s", e)
5557
+ profiles = []
5558
+ break
4509
5559
 
4510
5560
  for n in self.main_quality_profiles:
4511
5561
  pair = [n, self.temp_quality_profiles[self.main_quality_profiles.index(n)]]
@@ -4524,24 +5574,55 @@ class Arr:
4524
5574
  def register_search_mode(self):
4525
5575
  if self.search_setup_completed:
4526
5576
  return
4527
- if not self.search_missing:
5577
+
5578
+ db1, db2, db3, db4, db5 = self._get_models()
5579
+
5580
+ if not (
5581
+ self.search_missing
5582
+ or self.do_upgrade_search
5583
+ or self.quality_unmet_search
5584
+ or self.custom_format_unmet_search
5585
+ or self.ombi_search_requests
5586
+ or self.overseerr_requests
5587
+ ):
5588
+ if db5 and getattr(self, "torrents", None) is None:
5589
+ self.torrent_db = SqliteDatabase(None)
5590
+ self.torrent_db.init(
5591
+ str(self._app_data_folder.joinpath("Torrents.db")),
5592
+ pragmas={
5593
+ "journal_mode": "wal",
5594
+ "cache_size": -64_000,
5595
+ "foreign_keys": 1,
5596
+ "ignore_check_constraints": 0,
5597
+ "synchronous": 0,
5598
+ },
5599
+ timeout=15,
5600
+ )
5601
+
5602
+ class Torrents(db5):
5603
+ class Meta:
5604
+ database = self.torrent_db
5605
+
5606
+ self.torrent_db.connect()
5607
+ self.torrent_db.create_tables([Torrents])
5608
+ self.torrents = Torrents
4528
5609
  self.search_setup_completed = True
4529
5610
  return
4530
5611
 
5612
+ self.search_db_file.parent.mkdir(parents=True, exist_ok=True)
4531
5613
  self.db = SqliteDatabase(None)
4532
5614
  self.db.init(
4533
5615
  str(self.search_db_file),
4534
5616
  pragmas={
4535
5617
  "journal_mode": "wal",
4536
- "cache_size": -1 * 64000, # 64MB
5618
+ "cache_size": -64_000,
4537
5619
  "foreign_keys": 1,
4538
5620
  "ignore_check_constraints": 0,
4539
5621
  "synchronous": 0,
4540
5622
  },
5623
+ timeout=15,
4541
5624
  )
4542
5625
 
4543
- db1, db2, db3, db4 = self._get_models()
4544
-
4545
5626
  class Files(db1):
4546
5627
  class Meta:
4547
5628
  database = self.db
@@ -4555,7 +5636,18 @@ class Arr:
4555
5636
  database = self.db
4556
5637
 
4557
5638
  self.db.connect()
4558
- if db3:
5639
+
5640
+ if db4:
5641
+
5642
+ class Tracks(db4):
5643
+ class Meta:
5644
+ database = self.db
5645
+
5646
+ self.track_file_model = Tracks
5647
+ else:
5648
+ self.track_file_model = None
5649
+
5650
+ if db3 and self.type == "sonarr":
4559
5651
 
4560
5652
  class Series(db3):
4561
5653
  class Meta:
@@ -4563,35 +5655,86 @@ class Arr:
4563
5655
 
4564
5656
  self.db.create_tables([Files, Queue, PersistingQueue, Series])
4565
5657
  self.series_file_model = Series
5658
+ self.artists_file_model = None
5659
+ elif db3 and self.type == "lidarr":
5660
+
5661
+ class Artists(db3):
5662
+ class Meta:
5663
+ database = self.db
5664
+
5665
+ self.db.create_tables([Files, Queue, PersistingQueue, Artists, Tracks])
5666
+ self.artists_file_model = Artists
5667
+ self.series_file_model = None # Lidarr uses artists, not series
4566
5668
  else:
5669
+ # Radarr or any type without db3/db4 (series/artists/tracks models)
4567
5670
  self.db.create_tables([Files, Queue, PersistingQueue])
5671
+ self.artists_file_model = None
5672
+ self.series_file_model = None
4568
5673
 
4569
- if db4:
5674
+ if db5:
4570
5675
  self.torrent_db = SqliteDatabase(None)
4571
5676
  self.torrent_db.init(
4572
5677
  str(self._app_data_folder.joinpath("Torrents.db")),
4573
5678
  pragmas={
4574
5679
  "journal_mode": "wal",
4575
- "cache_size": -1 * 64000, # 64MB
5680
+ "cache_size": -64_000,
4576
5681
  "foreign_keys": 1,
4577
5682
  "ignore_check_constraints": 0,
4578
5683
  "synchronous": 0,
4579
5684
  },
5685
+ timeout=15,
4580
5686
  )
4581
5687
 
4582
- class Torrents(db4):
5688
+ class Torrents(db5):
4583
5689
  class Meta:
4584
5690
  database = self.torrent_db
4585
5691
 
4586
5692
  self.torrent_db.connect()
4587
5693
  self.torrent_db.create_tables([Torrents])
4588
5694
  self.torrents = Torrents
5695
+ else:
5696
+ self.torrents = None
4589
5697
 
4590
5698
  self.model_file = Files
4591
5699
  self.model_queue = Queue
4592
5700
  self.persistent_queue = PersistingQueue
4593
5701
  self.search_setup_completed = True
4594
5702
 
5703
+ def _get_models(
5704
+ self,
5705
+ ) -> tuple[
5706
+ type[EpisodeFilesModel] | type[MoviesFilesModel] | type[AlbumFilesModel],
5707
+ type[EpisodeQueueModel] | type[MovieQueueModel] | type[AlbumQueueModel],
5708
+ type[SeriesFilesModel] | type[ArtistFilesModel] | None,
5709
+ type[TrackFilesModel] | None,
5710
+ type[TorrentLibrary] | None,
5711
+ ]:
5712
+ if self.type == "sonarr":
5713
+ return (
5714
+ EpisodeFilesModel,
5715
+ EpisodeQueueModel,
5716
+ SeriesFilesModel,
5717
+ None,
5718
+ TorrentLibrary if TAGLESS else None,
5719
+ )
5720
+ if self.type == "radarr":
5721
+ return (
5722
+ MoviesFilesModel,
5723
+ MovieQueueModel,
5724
+ None,
5725
+ None,
5726
+ TorrentLibrary if TAGLESS else None,
5727
+ )
5728
+ if self.type == "lidarr":
5729
+ return (
5730
+ AlbumFilesModel,
5731
+ AlbumQueueModel,
5732
+ ArtistFilesModel,
5733
+ TrackFilesModel,
5734
+ TorrentLibrary if TAGLESS else None,
5735
+ )
5736
+ raise UnhandledError(f"Well you shouldn't have reached here, Arr.type={self.type}")
5737
+
4595
5738
  def run_request_search(self):
4596
5739
  if (
4597
5740
  (
@@ -4602,13 +5745,13 @@ class Arr:
4602
5745
  or (self.request_search_timer > time.time() - self.search_requests_every_x_seconds)
4603
5746
  ):
4604
5747
  return None
4605
- self.register_search_mode()
4606
5748
  totcommands = -1
4607
5749
  if SEARCH_LOOP_DELAY == -1:
4608
5750
  loop_delay = 30
4609
5751
  else:
4610
5752
  loop_delay = SEARCH_LOOP_DELAY
4611
5753
  try:
5754
+ event = self.manager.qbit_manager.shutdown_event
4612
5755
  self.db_request_update()
4613
5756
  try:
4614
5757
  for entry, commands in self.db_get_request_files():
@@ -4621,15 +5764,17 @@ class Arr:
4621
5764
  loop_delay = 30
4622
5765
  else:
4623
5766
  loop_delay = SEARCH_LOOP_DELAY
4624
- while not self.maybe_do_search(
4625
- entry,
4626
- request=True,
4627
- commands=totcommands,
5767
+ while (not event.is_set()) and (
5768
+ not self.maybe_do_search(
5769
+ entry,
5770
+ request=True,
5771
+ commands=totcommands,
5772
+ )
4628
5773
  ):
4629
5774
  self.logger.debug("Waiting for active request search commands")
4630
- time.sleep(loop_delay)
5775
+ event.wait(loop_delay)
4631
5776
  self.logger.info("Delaying request search loop by %s seconds", loop_delay)
4632
- time.sleep(loop_delay)
5777
+ event.wait(loop_delay)
4633
5778
  if totcommands == 0:
4634
5779
  self.logger.info("All request searches completed")
4635
5780
  else:
@@ -4669,23 +5814,26 @@ class Arr:
4669
5814
  self.logger.debug(
4670
5815
  "No downloads in category, sleeping for %s", timedelta(seconds=e.length)
4671
5816
  )
4672
- time.sleep(e.length)
5817
+ # Respect shutdown signal
5818
+ self.manager.qbit_manager.shutdown_event.wait(e.length)
4673
5819
 
4674
5820
  def get_year_search(self) -> tuple[list[int], int]:
4675
5821
  years_list = set()
4676
5822
  years = []
4677
5823
  if self.type == "radarr":
4678
- while True:
4679
- try:
4680
- movies = self.client.get_movie()
4681
- break
4682
- except (
5824
+ movies = with_retry(
5825
+ lambda: self.client.get_movie(),
5826
+ retries=3,
5827
+ backoff=0.5,
5828
+ max_backoff=3,
5829
+ exceptions=(
4683
5830
  requests.exceptions.ChunkedEncodingError,
4684
5831
  requests.exceptions.ContentDecodingError,
4685
5832
  requests.exceptions.ConnectionError,
4686
5833
  JSONDecodeError,
4687
- ):
4688
- continue
5834
+ requests.exceptions.RequestException,
5835
+ ),
5836
+ )
4689
5837
 
4690
5838
  for m in movies:
4691
5839
  if not m["monitored"]:
@@ -4694,20 +5842,34 @@ class Arr:
4694
5842
  years_list.add(m["year"])
4695
5843
 
4696
5844
  elif self.type == "sonarr":
4697
- while True:
4698
- try:
4699
- series = self.client.get_series()
4700
- break
4701
- except (
5845
+ series = with_retry(
5846
+ lambda: self.client.get_series(),
5847
+ retries=3,
5848
+ backoff=0.5,
5849
+ max_backoff=3,
5850
+ exceptions=(
4702
5851
  requests.exceptions.ChunkedEncodingError,
4703
5852
  requests.exceptions.ContentDecodingError,
4704
5853
  requests.exceptions.ConnectionError,
4705
5854
  JSONDecodeError,
4706
- ):
4707
- continue
5855
+ requests.exceptions.RequestException,
5856
+ ),
5857
+ )
4708
5858
 
4709
5859
  for s in series:
4710
- episodes = self.client.get_episode(s["id"], True)
5860
+ episodes = with_retry(
5861
+ lambda: self.client.get_episode(s["id"], True),
5862
+ retries=3,
5863
+ backoff=0.5,
5864
+ max_backoff=3,
5865
+ exceptions=(
5866
+ requests.exceptions.ChunkedEncodingError,
5867
+ requests.exceptions.ContentDecodingError,
5868
+ requests.exceptions.ConnectionError,
5869
+ JSONDecodeError,
5870
+ requests.exceptions.RequestException,
5871
+ ),
5872
+ )
4711
5873
  for e in episodes:
4712
5874
  if "airDateUtc" in e:
4713
5875
  if not self.search_specials and e["seasonNumber"] == 0:
@@ -4736,15 +5898,22 @@ class Arr:
4736
5898
  def run_search_loop(self) -> NoReturn:
4737
5899
  run_logs(self.logger)
4738
5900
  try:
4739
- self.register_search_mode()
4740
- if not self.search_missing:
5901
+ if not (
5902
+ self.search_missing
5903
+ or self.do_upgrade_search
5904
+ or self.quality_unmet_search
5905
+ or self.custom_format_unmet_search
5906
+ or self.ombi_search_requests
5907
+ or self.overseerr_requests
5908
+ ):
4741
5909
  return None
4742
5910
  loop_timer = timedelta(minutes=15)
4743
5911
  timer = datetime.now()
4744
5912
  years_index = 0
4745
5913
  totcommands = -1
4746
5914
  self.db_update_processed = False
4747
- while True:
5915
+ event = self.manager.qbit_manager.shutdown_event
5916
+ while not event.is_set():
4748
5917
  if self.loop_completed:
4749
5918
  years_index = 0
4750
5919
  totcommands = -1
@@ -4770,13 +5939,22 @@ class Arr:
4770
5939
  self.search_current_year = years[years_index]
4771
5940
  elif datetime.now() >= (timer + loop_timer):
4772
5941
  self.refresh_download_queue()
4773
- time.sleep(((timer + loop_timer) - datetime.now()).total_seconds())
5942
+ event.wait(((timer + loop_timer) - datetime.now()).total_seconds())
4774
5943
  self.logger.trace("Restarting loop testing")
5944
+ try:
5945
+ self._record_search_activity(None, detail="loop-complete")
5946
+ except Exception:
5947
+ pass
4775
5948
  raise RestartLoopException
4776
5949
  elif datetime.now() >= (timer + loop_timer):
4777
5950
  self.refresh_download_queue()
4778
5951
  self.logger.trace("Restarting loop testing")
5952
+ try:
5953
+ self._record_search_activity(None, detail="loop-complete")
5954
+ except Exception:
5955
+ pass
4779
5956
  raise RestartLoopException
5957
+ any_commands = False
4780
5958
  for (
4781
5959
  entry,
4782
5960
  todays,
@@ -4784,6 +5962,7 @@ class Arr:
4784
5962
  series_search,
4785
5963
  commands,
4786
5964
  ) in self.db_get_files():
5965
+ any_commands = True
4787
5966
  if totcommands == -1:
4788
5967
  totcommands = commands
4789
5968
  self.logger.info("Starting search for %s items", totcommands)
@@ -4791,25 +5970,39 @@ class Arr:
4791
5970
  loop_delay = 30
4792
5971
  else:
4793
5972
  loop_delay = SEARCH_LOOP_DELAY
4794
- while not self.maybe_do_search(
4795
- entry,
4796
- todays=todays,
4797
- bypass_limit=limit_bypass,
4798
- series_search=series_search,
4799
- commands=totcommands,
5973
+ while (not event.is_set()) and (
5974
+ not self.maybe_do_search(
5975
+ entry,
5976
+ todays=todays,
5977
+ bypass_limit=limit_bypass,
5978
+ series_search=series_search,
5979
+ commands=totcommands,
5980
+ )
4800
5981
  ):
4801
5982
  self.logger.debug("Waiting for active search commands")
4802
- time.sleep(loop_delay)
5983
+ event.wait(loop_delay)
4803
5984
  totcommands -= 1
4804
5985
  self.logger.info("Delaying search loop by %s seconds", loop_delay)
4805
- time.sleep(loop_delay)
5986
+ event.wait(loop_delay)
4806
5987
  if totcommands == 0:
4807
5988
  self.logger.info("All searches completed")
5989
+ try:
5990
+ self._record_search_activity(
5991
+ None, detail="no-pending-searches"
5992
+ )
5993
+ except Exception:
5994
+ pass
4808
5995
  elif datetime.now() >= (timer + loop_timer):
4809
5996
  timer = datetime.now()
4810
5997
  self.logger.info(
4811
5998
  "Searches not completed, %s remaining", totcommands
4812
5999
  )
6000
+ if not any_commands:
6001
+ self.logger.debug("No pending searches for %s", self._name)
6002
+ try:
6003
+ self._record_search_activity(None, detail="no-pending-searches")
6004
+ except Exception:
6005
+ pass
4813
6006
  except RestartLoopException:
4814
6007
  self.loop_completed = True
4815
6008
  self.db_update_processed = False
@@ -4828,7 +6021,7 @@ class Arr:
4828
6021
  raise DelayLoopException(length=300, type="qbit")
4829
6022
  except Exception as e:
4830
6023
  self.logger.exception(e, exc_info=sys.exc_info())
4831
- time.sleep(LOOP_SLEEP_TIMER)
6024
+ event.wait(LOOP_SLEEP_TIMER)
4832
6025
  except DelayLoopException as e:
4833
6026
  if e.type == "qbit":
4834
6027
  self.logger.critical(
@@ -4851,22 +6044,22 @@ class Arr:
4851
6044
  "sleeping for %s",
4852
6045
  timedelta(seconds=e.length),
4853
6046
  )
4854
- time.sleep(e.length)
6047
+ event.wait(e.length)
4855
6048
  self.manager.qbit_manager.should_delay_torrent_scan = False
4856
6049
  except KeyboardInterrupt:
4857
6050
  self.logger.hnotice("Detected Ctrl+C - Terminating process")
4858
6051
  sys.exit(0)
4859
6052
  else:
4860
- time.sleep(5)
6053
+ event.wait(5)
4861
6054
  except KeyboardInterrupt:
4862
6055
  self.logger.hnotice("Detected Ctrl+C - Terminating process")
4863
6056
  sys.exit(0)
4864
6057
 
4865
6058
  def run_torrent_loop(self) -> NoReturn:
4866
6059
  run_logs(self.logger)
4867
- self.register_search_mode()
4868
6060
  self.logger.hnotice("Starting torrent monitoring for %s", self._name)
4869
- while True:
6061
+ event = self.manager.qbit_manager.shutdown_event
6062
+ while not event.is_set():
4870
6063
  try:
4871
6064
  try:
4872
6065
  try:
@@ -4896,7 +6089,7 @@ class Arr:
4896
6089
  sys.exit(0)
4897
6090
  except Exception as e:
4898
6091
  self.logger.error(e, exc_info=sys.exc_info())
4899
- time.sleep(LOOP_SLEEP_TIMER)
6092
+ event.wait(LOOP_SLEEP_TIMER)
4900
6093
  except DelayLoopException as e:
4901
6094
  if e.type == "qbit":
4902
6095
  self.logger.critical(
@@ -4924,7 +6117,7 @@ class Arr:
4924
6117
  "No downloads in category, sleeping for %s",
4925
6118
  timedelta(seconds=e.length),
4926
6119
  )
4927
- time.sleep(e.length)
6120
+ event.wait(e.length)
4928
6121
  self.manager.qbit_manager.should_delay_torrent_scan = False
4929
6122
  except KeyboardInterrupt:
4930
6123
  self.logger.hnotice("Detected Ctrl+C - Terminating process")
@@ -4937,13 +6130,13 @@ class Arr:
4937
6130
  _temp = []
4938
6131
  if self.search_missing:
4939
6132
  self.process_search_loop = pathos.helpers.mp.Process(
4940
- target=self.run_search_loop, daemon=True
6133
+ target=self.run_search_loop, daemon=False
4941
6134
  )
4942
6135
  self.manager.qbit_manager.child_processes.append(self.process_search_loop)
4943
6136
  _temp.append(self.process_search_loop)
4944
- if not any([QBIT_DISABLED, SEARCH_ONLY]):
6137
+ if not (QBIT_DISABLED or SEARCH_ONLY):
4945
6138
  self.process_torrent_loop = pathos.helpers.mp.Process(
4946
- target=self.run_torrent_loop, daemon=True
6139
+ target=self.run_torrent_loop, daemon=False
4947
6140
  )
4948
6141
  self.manager.qbit_manager.child_processes.append(self.process_torrent_loop)
4949
6142
  _temp.append(self.process_torrent_loop)
@@ -4961,7 +6154,6 @@ class PlaceHolderArr(Arr):
4961
6154
  self.queue = []
4962
6155
  self.cache = {}
4963
6156
  self.requeue_cache = {}
4964
- self.recently_queue = {}
4965
6157
  self.sent_to_scan = set()
4966
6158
  self.sent_to_scan_hashes = set()
4967
6159
  self.files_probed = set()
@@ -4982,22 +6174,15 @@ class PlaceHolderArr(Arr):
4982
6174
  self.tracker_delay = ExpiringSet(max_age_seconds=600)
4983
6175
  self._LOG_LEVEL = self.manager.qbit_manager.logger.level
4984
6176
  self.logger = logging.getLogger(f"qBitrr.{self._name}")
4985
- if ENABLE_LOGS:
4986
- logs_folder = HOME_PATH.joinpath("logs")
4987
- logs_folder.mkdir(parents=True, exist_ok=True)
4988
- logs_folder.chmod(mode=0o777)
4989
- logfile = logs_folder.joinpath(self._name + ".log")
4990
- if pathlib.Path(logfile).is_file():
4991
- logold = logs_folder.joinpath(self._name + ".log.old")
4992
- if pathlib.Path(logold).exists():
4993
- logold.unlink()
4994
- logfile.rename(logold)
4995
- fh = logging.FileHandler(logfile)
4996
- self.logger.addHandler(fh)
4997
- run_logs(self.logger)
6177
+ run_logs(self.logger, self._name)
4998
6178
  self.search_missing = False
4999
6179
  self.session = None
5000
6180
  self.search_setup_completed = False
6181
+ self.last_search_description: str | None = None
6182
+ self.last_search_timestamp: str | None = None
6183
+ self.queue_active_count: int = 0
6184
+ self.category_torrent_count: int = 0
6185
+ self.free_space_tagged_count: int = 0
5001
6186
  self.logger.hnotice("Starting %s monitor", self._name)
5002
6187
 
5003
6188
  def _process_errored(self):
@@ -5010,9 +6195,31 @@ class PlaceHolderArr(Arr):
5010
6195
  updated_recheck.append(h)
5011
6196
  if c := self.manager.qbit_manager.cache.get(h):
5012
6197
  temp[c].append(h)
5013
- self.manager.qbit.torrents_recheck(torrent_hashes=updated_recheck)
6198
+ with contextlib.suppress(Exception):
6199
+ with_retry(
6200
+ lambda: self.manager.qbit.torrents_recheck(torrent_hashes=updated_recheck),
6201
+ retries=3,
6202
+ backoff=0.5,
6203
+ max_backoff=3,
6204
+ exceptions=(
6205
+ qbittorrentapi.exceptions.APIError,
6206
+ qbittorrentapi.exceptions.APIConnectionError,
6207
+ requests.exceptions.RequestException,
6208
+ ),
6209
+ )
5014
6210
  for k, v in temp.items():
5015
- self.manager.qbit.torrents_set_category(torrent_hashes=v, category=k)
6211
+ with contextlib.suppress(Exception):
6212
+ with_retry(
6213
+ lambda: self.manager.qbit.torrents_set_category(torrent_hashes=v, category=k),
6214
+ retries=3,
6215
+ backoff=0.5,
6216
+ max_backoff=3,
6217
+ exceptions=(
6218
+ qbittorrentapi.exceptions.APIError,
6219
+ qbittorrentapi.exceptions.APIConnectionError,
6220
+ requests.exceptions.RequestException,
6221
+ ),
6222
+ )
5016
6223
 
5017
6224
  for k in updated_recheck:
5018
6225
  self.timed_ignore_cache.add(k)
@@ -5035,10 +6242,36 @@ class PlaceHolderArr(Arr):
5035
6242
  # Remove all bad torrents from the Client.
5036
6243
  temp_to_delete = set()
5037
6244
  if to_delete_all:
5038
- self.manager.qbit.torrents_delete(hashes=to_delete_all, delete_files=True)
6245
+ with contextlib.suppress(Exception):
6246
+ with_retry(
6247
+ lambda: self.manager.qbit.torrents_delete(
6248
+ hashes=to_delete_all, delete_files=True
6249
+ ),
6250
+ retries=3,
6251
+ backoff=0.5,
6252
+ max_backoff=3,
6253
+ exceptions=(
6254
+ qbittorrentapi.exceptions.APIError,
6255
+ qbittorrentapi.exceptions.APIConnectionError,
6256
+ requests.exceptions.RequestException,
6257
+ ),
6258
+ )
5039
6259
  if self.remove_from_qbit or self.skip_blacklist:
5040
6260
  temp_to_delete = self.remove_from_qbit.union(self.skip_blacklist)
5041
- self.manager.qbit.torrents_delete(hashes=temp_to_delete, delete_files=True)
6261
+ with contextlib.suppress(Exception):
6262
+ with_retry(
6263
+ lambda: self.manager.qbit.torrents_delete(
6264
+ hashes=temp_to_delete, delete_files=True
6265
+ ),
6266
+ retries=3,
6267
+ backoff=0.5,
6268
+ max_backoff=3,
6269
+ exceptions=(
6270
+ qbittorrentapi.exceptions.APIError,
6271
+ qbittorrentapi.exceptions.APIConnectionError,
6272
+ requests.exceptions.RequestException,
6273
+ ),
6274
+ )
5042
6275
  to_delete_all = to_delete_all.union(temp_to_delete)
5043
6276
  for h in to_delete_all:
5044
6277
  if h in self.manager.qbit_manager.name_cache:
@@ -5058,16 +6291,27 @@ class PlaceHolderArr(Arr):
5058
6291
  try:
5059
6292
  while True:
5060
6293
  try:
5061
- torrents = self.manager.qbit_manager.client.torrents.info(
5062
- status_filter="all",
5063
- category=self.category,
5064
- sort="added_on",
5065
- reverse=False,
6294
+ torrents = with_retry(
6295
+ lambda: self.manager.qbit_manager.client.torrents.info(
6296
+ status_filter="all",
6297
+ category=self.category,
6298
+ sort="added_on",
6299
+ reverse=False,
6300
+ ),
6301
+ retries=3,
6302
+ backoff=0.5,
6303
+ max_backoff=3,
6304
+ exceptions=(
6305
+ qbittorrentapi.exceptions.APIError,
6306
+ qbittorrentapi.exceptions.APIConnectionError,
6307
+ requests.exceptions.RequestException,
6308
+ ),
5066
6309
  )
5067
6310
  break
5068
6311
  except qbittorrentapi.exceptions.APIError:
5069
6312
  continue
5070
6313
  torrents = [t for t in torrents if hasattr(t, "category")]
6314
+ self.category_torrent_count = len(torrents)
5071
6315
  if not len(torrents):
5072
6316
  raise DelayLoopException(length=LOOP_SLEEP_TIMER, type="no_downloads")
5073
6317
  if not has_internet(self.manager.qbit_manager):
@@ -5108,29 +6352,15 @@ class PlaceHolderArr(Arr):
5108
6352
  except DelayLoopException:
5109
6353
  raise
5110
6354
 
5111
- def run_search_loop(self):
5112
- return
5113
-
5114
6355
 
5115
6356
  class FreeSpaceManager(Arr):
5116
6357
  def __init__(self, categories: set[str], manager: ArrManager):
5117
6358
  self._name = "FreeSpaceManager"
6359
+ self.type = "FreeSpaceManager"
5118
6360
  self.manager = manager
5119
6361
  self.logger = logging.getLogger(f"qBitrr.{self._name}")
5120
6362
  self._LOG_LEVEL = self.manager.qbit_manager.logger.level
5121
- if ENABLE_LOGS:
5122
- logs_folder = HOME_PATH.joinpath("logs")
5123
- logs_folder.mkdir(parents=True, exist_ok=True)
5124
- logs_folder.chmod(mode=0o777)
5125
- logfile = logs_folder.joinpath(self._name + ".log")
5126
- if pathlib.Path(logfile).is_file():
5127
- logold = logs_folder.joinpath(self._name + ".log.old")
5128
- if pathlib.Path(logold).exists():
5129
- logold.unlink()
5130
- logfile.rename(logold)
5131
- fh = logging.FileHandler(logfile)
5132
- self.logger.addHandler(fh)
5133
- run_logs(self.logger)
6363
+ run_logs(self.logger, self._name)
5134
6364
  self.categories = categories
5135
6365
  self.logger.trace("Categories: %s", self.categories)
5136
6366
  self.pause = set()
@@ -5141,6 +6371,9 @@ class FreeSpaceManager(Arr):
5141
6371
  )
5142
6372
  self.timed_ignore_cache = ExpiringSet(max_age_seconds=self.ignore_torrents_younger_than)
5143
6373
  self.needs_cleanup = False
6374
+ self._app_data_folder = APPDATA_FOLDER
6375
+ # Track search setup state to cooperate with Arr.register_search_mode
6376
+ self.search_setup_completed = False
5144
6377
  if FREE_SPACE_FOLDER == "CHANGE_ME":
5145
6378
  self.completed_folder = pathlib.Path(COMPLETED_DOWNLOAD_FOLDER).joinpath(
5146
6379
  next(iter(self.categories))
@@ -5148,53 +6381,68 @@ class FreeSpaceManager(Arr):
5148
6381
  else:
5149
6382
  self.completed_folder = pathlib.Path(FREE_SPACE_FOLDER)
5150
6383
  self.min_free_space = FREE_SPACE
5151
- self.current_free_space = shutil.disk_usage(self.completed_folder).free - parse_size(
5152
- self.min_free_space
6384
+ # Parse once to avoid repeated conversions
6385
+ self._min_free_space_bytes = (
6386
+ parse_size(self.min_free_space) if self.min_free_space != "-1" else 0
6387
+ )
6388
+ self.current_free_space = (
6389
+ shutil.disk_usage(self.completed_folder).free - self._min_free_space_bytes
6390
+ )
6391
+ self.logger.trace(
6392
+ "Free space monitor initialized | Available: %s | Threshold: %s",
6393
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6394
+ format_bytes(self._min_free_space_bytes),
5153
6395
  )
5154
- self.logger.trace("Current free space: %s", self.current_free_space)
5155
6396
  self.manager.qbit_manager.client.torrents_create_tags(["qBitrr-free_space_paused"])
5156
6397
  self.search_missing = False
6398
+ self.do_upgrade_search = False
6399
+ self.quality_unmet_search = False
6400
+ self.custom_format_unmet_search = False
6401
+ self.ombi_search_requests = False
6402
+ self.overseerr_requests = False
5157
6403
  self.session = None
5158
- self.register_torrent_database()
6404
+ # Ensure torrent tag-emulation tables exist when needed.
6405
+ self.torrents = None
6406
+ self.torrent_db: SqliteDatabase | None = None
6407
+ self.last_search_description: str | None = None
6408
+ self.last_search_timestamp: str | None = None
6409
+ self.queue_active_count: int = 0
6410
+ self.category_torrent_count: int = 0
6411
+ self.free_space_tagged_count: int = 0
6412
+ self.register_search_mode()
5159
6413
  self.logger.hnotice("Starting %s monitor", self._name)
5160
- self.search_setup_completed = False
5161
-
5162
- def register_torrent_database(self):
5163
- self.torrent_db = SqliteDatabase(None)
5164
- self.torrent_db.init(
5165
- str(APPDATA_FOLDER.joinpath("Torrents.db")),
5166
- pragmas={
5167
- "journal_mode": "wal",
5168
- "cache_size": -1 * 64000, # 64MB
5169
- "foreign_keys": 1,
5170
- "ignore_check_constraints": 0,
5171
- "synchronous": 0,
5172
- },
6414
+ atexit.register(
6415
+ lambda: (
6416
+ hasattr(self, "torrent_db")
6417
+ and self.torrent_db
6418
+ and not self.torrent_db.is_closed()
6419
+ and self.torrent_db.close()
6420
+ )
5173
6421
  )
5174
6422
 
5175
- class Torrents(TorrentLibrary):
5176
- class Meta:
5177
- database = self.torrent_db
5178
-
5179
- self.torrent_db.connect()
5180
- self.torrent_db.create_tables([Torrents])
5181
- self.torrents = Torrents
6423
+ def _get_models(
6424
+ self,
6425
+ ) -> tuple[
6426
+ None,
6427
+ None,
6428
+ None,
6429
+ None,
6430
+ type[TorrentLibrary] | None,
6431
+ ]:
6432
+ return None, None, None, None, (TorrentLibrary if TAGLESS else None)
5182
6433
 
5183
6434
  def _process_single_torrent_pause_disk_space(self, torrent: qbittorrentapi.TorrentDictionary):
5184
6435
  self.logger.info(
5185
- "Pausing torrent for disk space: "
5186
- "[Progress: %s%%][Added On: %s]"
5187
- "[Availability: %s%%][Time Left: %s]"
5188
- "[Last active: %s] "
5189
- "| [%s] | %s (%s)",
6436
+ "Pausing torrent due to insufficient disk space | "
6437
+ "Name: %s | Progress: %s%% | Size remaining: %s | "
6438
+ "Availability: %s%% | ETA: %s | State: %s | Hash: %s",
6439
+ torrent.name,
5190
6440
  round(torrent.progress * 100, 2),
5191
- datetime.fromtimestamp(torrent.added_on),
6441
+ format_bytes(torrent.amount_left),
5192
6442
  round(torrent.availability * 100, 2),
5193
6443
  timedelta(seconds=torrent.eta),
5194
- datetime.fromtimestamp(torrent.last_activity),
5195
6444
  torrent.state_enum,
5196
- torrent.name,
5197
- torrent.hash,
6445
+ torrent.hash[:8], # Shortened hash for readability
5198
6446
  )
5199
6447
  self.pause.add(torrent.hash)
5200
6448
 
@@ -5203,45 +6451,48 @@ class FreeSpaceManager(Arr):
5203
6451
  free_space_test = self.current_free_space
5204
6452
  free_space_test -= torrent["amount_left"]
5205
6453
  self.logger.trace(
5206
- "Result [%s]: Free space %s -> %s",
6454
+ "Evaluating torrent: %s | Current space: %s | Space after download: %s | Remaining: %s",
5207
6455
  torrent.name,
5208
- self.current_free_space,
5209
- free_space_test,
6456
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6457
+ format_bytes(free_space_test + self._min_free_space_bytes),
6458
+ format_bytes(torrent.amount_left),
5210
6459
  )
5211
6460
  if torrent.state_enum != TorrentStates.PAUSED_DOWNLOAD and free_space_test < 0:
5212
6461
  self.logger.info(
5213
- "Pause download [%s]: Free space %s -> %s",
6462
+ "Pausing download (insufficient space) | Torrent: %s | Available: %s | Needed: %s | Deficit: %s",
5214
6463
  torrent.name,
5215
- self.current_free_space,
5216
- free_space_test,
6464
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6465
+ format_bytes(torrent.amount_left),
6466
+ format_bytes(-free_space_test),
5217
6467
  )
5218
6468
  self.add_tags(torrent, ["qBitrr-free_space_paused"])
5219
6469
  self.remove_tags(torrent, ["qBitrr-allowed_seeding"])
5220
6470
  self._process_single_torrent_pause_disk_space(torrent)
5221
6471
  elif torrent.state_enum == TorrentStates.PAUSED_DOWNLOAD and free_space_test < 0:
5222
6472
  self.logger.info(
5223
- "Leave paused [%s]: Free space %s -> %s",
6473
+ "Keeping paused (insufficient space) | Torrent: %s | Available: %s | Needed: %s | Deficit: %s",
5224
6474
  torrent.name,
5225
- self.current_free_space,
5226
- free_space_test,
6475
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6476
+ format_bytes(torrent.amount_left),
6477
+ format_bytes(-free_space_test),
5227
6478
  )
5228
6479
  self.add_tags(torrent, ["qBitrr-free_space_paused"])
5229
6480
  self.remove_tags(torrent, ["qBitrr-allowed_seeding"])
5230
6481
  elif torrent.state_enum != TorrentStates.PAUSED_DOWNLOAD and free_space_test > 0:
5231
6482
  self.logger.info(
5232
- "Continue downloading [%s]: Free space %s -> %s",
6483
+ "Continuing download (sufficient space) | Torrent: %s | Available: %s | Space after: %s",
5233
6484
  torrent.name,
5234
- self.current_free_space,
5235
- free_space_test,
6485
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6486
+ format_bytes(free_space_test + self._min_free_space_bytes),
5236
6487
  )
5237
6488
  self.current_free_space = free_space_test
5238
6489
  self.remove_tags(torrent, ["qBitrr-free_space_paused"])
5239
6490
  elif torrent.state_enum == TorrentStates.PAUSED_DOWNLOAD and free_space_test > 0:
5240
6491
  self.logger.info(
5241
- "Unpause download [%s]: Free space %s -> %s",
6492
+ "Resuming download (space available) | Torrent: %s | Available: %s | Space after: %s",
5242
6493
  torrent.name,
5243
- self.current_free_space,
5244
- free_space_test,
6494
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6495
+ format_bytes(free_space_test + self._min_free_space_bytes),
5245
6496
  )
5246
6497
  self.current_free_space = free_space_test
5247
6498
  self.remove_tags(torrent, ["qBitrr-free_space_paused"])
@@ -5249,10 +6500,9 @@ class FreeSpaceManager(Arr):
5249
6500
  torrent, "qBitrr-free_space_paused"
5250
6501
  ):
5251
6502
  self.logger.info(
5252
- "Removing tag [%s] for completed torrent[%s]: Free space %s",
5253
- "qBitrr-free_space_paused",
6503
+ "Torrent completed, removing free space tag | Torrent: %s | Available: %s",
5254
6504
  torrent.name,
5255
- self.current_free_space,
6505
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
5256
6506
  )
5257
6507
  self.remove_tags(torrent, ["qBitrr-free_space_paused"])
5258
6508
 
@@ -5264,15 +6514,28 @@ class FreeSpaceManager(Arr):
5264
6514
  try:
5265
6515
  while True:
5266
6516
  try:
5267
- torrents = self.manager.qbit_manager.client.torrents.info(
5268
- status_filter="all", sort="added_on", reverse=False
5269
- )
6517
+ # Fetch per category to reduce client-side filtering
6518
+ torrents = []
6519
+ for cat in self.categories:
6520
+ with contextlib.suppress(qbittorrentapi.exceptions.APIError):
6521
+ torrents.extend(
6522
+ self.manager.qbit_manager.client.torrents.info(
6523
+ status_filter="all",
6524
+ category=cat,
6525
+ sort="added_on",
6526
+ reverse=False,
6527
+ )
6528
+ )
5270
6529
  break
5271
6530
  except qbittorrentapi.exceptions.APIError:
5272
6531
  continue
5273
6532
  torrents = [t for t in torrents if hasattr(t, "category")]
5274
6533
  torrents = [t for t in torrents if t.category in self.categories]
5275
6534
  torrents = [t for t in torrents if "qBitrr-ignored" not in t.tags]
6535
+ self.category_torrent_count = len(torrents)
6536
+ self.free_space_tagged_count = sum(
6537
+ 1 for t in torrents if self.in_tags(t, "qBitrr-free_space_paused")
6538
+ )
5276
6539
  if not len(torrents):
5277
6540
  raise DelayLoopException(length=LOOP_SLEEP_TIMER, type="no_downloads")
5278
6541
  if not has_internet(self.manager.qbit_manager):
@@ -5280,10 +6543,17 @@ class FreeSpaceManager(Arr):
5280
6543
  raise DelayLoopException(length=NO_INTERNET_SLEEP_TIMER, type="internet")
5281
6544
  if self.manager.qbit_manager.should_delay_torrent_scan:
5282
6545
  raise DelayLoopException(length=NO_INTERNET_SLEEP_TIMER, type="delay")
5283
- self.current_free_space = shutil.disk_usage(
5284
- self.completed_folder
5285
- ).free - parse_size(self.min_free_space)
5286
- self.logger.trace("Current free space: %s", self.current_free_space)
6546
+ self.current_free_space = (
6547
+ shutil.disk_usage(self.completed_folder).free - self._min_free_space_bytes
6548
+ )
6549
+ self.logger.trace(
6550
+ "Processing torrents | Available: %s | Threshold: %s | Usable: %s | Torrents: %d | Paused for space: %d",
6551
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6552
+ format_bytes(self._min_free_space_bytes),
6553
+ format_bytes(self.current_free_space),
6554
+ self.category_torrent_count,
6555
+ self.free_space_tagged_count,
6556
+ )
5287
6557
  sorted_torrents = sorted(torrents, key=lambda t: t["priority"])
5288
6558
  for torrent in sorted_torrents:
5289
6559
  with contextlib.suppress(qbittorrentapi.NotFound404Error):
@@ -5331,7 +6601,7 @@ class ArrManager:
5331
6601
  self.ffprobe_available: bool = self.qbit_manager.ffprobe_downloader.probe_path.exists()
5332
6602
  self.logger = logging.getLogger("qBitrr.ArrManager")
5333
6603
  run_logs(self.logger)
5334
- if not self.ffprobe_available and not any([QBIT_DISABLED, SEARCH_ONLY]):
6604
+ if not self.ffprobe_available and not (QBIT_DISABLED or SEARCH_ONLY):
5335
6605
  self.logger.error(
5336
6606
  "'%s' was not found, disabling all functionality dependant on it",
5337
6607
  self.qbit_manager.ffprobe_downloader.probe_path,
@@ -5339,7 +6609,7 @@ class ArrManager:
5339
6609
 
5340
6610
  def build_arr_instances(self):
5341
6611
  for key in CONFIG.sections():
5342
- if search := re.match("(rad|son|anim)arr.*", key, re.IGNORECASE):
6612
+ if search := re.match("(rad|son|anim|lid)arr.*", key, re.IGNORECASE):
5343
6613
  name = search.group(0)
5344
6614
  match = search.group(1)
5345
6615
  if match.lower() == "son":
@@ -5348,6 +6618,8 @@ class ArrManager:
5348
6618
  call_cls = SonarrAPI
5349
6619
  elif match.lower() == "rad":
5350
6620
  call_cls = RadarrAPI
6621
+ elif match.lower() == "lid":
6622
+ call_cls = LidarrAPI
5351
6623
  else:
5352
6624
  call_cls = None
5353
6625
  try:
@@ -5362,7 +6634,12 @@ class ArrManager:
5362
6634
  continue
5363
6635
  except (OSError, TypeError) as e:
5364
6636
  self.logger.exception(e)
5365
- if FREE_SPACE != "-1" and AUTO_PAUSE_RESUME:
6637
+ if (
6638
+ FREE_SPACE != "-1"
6639
+ and AUTO_PAUSE_RESUME
6640
+ and not QBIT_DISABLED
6641
+ and len(self.arr_categories) > 0
6642
+ ):
5366
6643
  managed_object = FreeSpaceManager(self.arr_categories, self)
5367
6644
  self.managed_objects["FreeSpaceManager"] = managed_object
5368
6645
  for cat in self.special_categories: