qBitrr2 5.6.2__py3-none-any.whl → 5.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
qBitrr/arss.py CHANGED
@@ -147,36 +147,36 @@ class Arr:
147
147
  self.logger = logging.getLogger(f"qBitrr.{self._name}")
148
148
  run_logs(self.logger, self._name)
149
149
 
150
+ # Set completed_folder path (used for category creation and file monitoring)
150
151
  if not QBIT_DISABLED:
151
152
  try:
153
+ # Check default instance for existing category configuration
152
154
  categories = self.manager.qbit_manager.client.torrent_categories.categories
153
- try:
154
- categ = categories[self.category]
155
- path = categ["savePath"]
156
- if path:
157
- self.logger.trace("Category exists with save path [%s]", path)
158
- self.completed_folder = pathlib.Path(path)
159
- else:
160
- self.logger.trace("Category exists without save path")
161
- self.completed_folder = pathlib.Path(COMPLETED_DOWNLOAD_FOLDER).joinpath(
162
- self.category
163
- )
164
- except KeyError:
155
+ categ = categories.get(self.category)
156
+ if categ and categ.get("savePath"):
157
+ self.logger.trace("Category exists with save path [%s]", categ["savePath"])
158
+ self.completed_folder = pathlib.Path(categ["savePath"])
159
+ else:
160
+ self.logger.trace("Category does not exist or lacks save path")
165
161
  self.completed_folder = pathlib.Path(COMPLETED_DOWNLOAD_FOLDER).joinpath(
166
162
  self.category
167
163
  )
168
- self.manager.qbit_manager.client.torrent_categories.create_category(
169
- self.category, save_path=self.completed_folder
170
- )
171
164
  except Exception as e:
172
165
  self.logger.warning(
173
- "Could not connect to qBittorrent during initialization for %s: %s. Will retry when process starts.",
166
+ "Could not connect to qBittorrent during initialization for %s: %s. Using default path.",
174
167
  self._name,
175
- str(e).split("\n")[0] if "\n" in str(e) else str(e), # First line only
168
+ str(e).split("\n")[0] if "\n" in str(e) else str(e),
176
169
  )
177
170
  self.completed_folder = pathlib.Path(COMPLETED_DOWNLOAD_FOLDER).joinpath(
178
171
  self.category
179
172
  )
173
+ # Ensure category exists on ALL instances (deferred to avoid __init__ failures)
174
+ try:
175
+ self._ensure_category_on_all_instances()
176
+ except Exception as e:
177
+ self.logger.warning(
178
+ "Could not ensure category on all instances during init: %s", e
179
+ )
180
180
  else:
181
181
  self.completed_folder = pathlib.Path(COMPLETED_DOWNLOAD_FOLDER).joinpath(self.category)
182
182
 
@@ -365,7 +365,7 @@ class Arr:
365
365
  f"{name}.EntrySearch.Overseerr.ApprovedOnly", fallback=True
366
366
  )
367
367
  self.search_requests_every_x_seconds = CONFIG.get(
368
- f"{name}.EntrySearch.SearchRequestsEvery", fallback=1800
368
+ f"{name}.EntrySearch.SearchRequestsEvery", fallback=300
369
369
  )
370
370
  self._temp_overseer_request_cache: dict[str, set[int | str]] = defaultdict(set)
371
371
  if self.ombi_search_requests or self.overseerr_requests:
@@ -465,10 +465,32 @@ class Arr:
465
465
  self.keep_temp_profile,
466
466
  )
467
467
  self.temp_quality_profile_ids = self.parse_quality_profiles()
468
+ # Create reverse mapping (temp_id → main_id) for O(1) lookups
469
+ self.main_quality_profile_ids = {
470
+ v: k for k, v in self.temp_quality_profile_ids.items()
471
+ }
472
+ self.profile_switch_retry_attempts = CONFIG.get(
473
+ f"{name}.EntrySearch.ProfileSwitchRetryAttempts", fallback=3
474
+ )
475
+ self.temp_profile_timeout_minutes = CONFIG.get(
476
+ f"{name}.EntrySearch.TempProfileResetTimeoutMinutes", fallback=0
477
+ )
468
478
  self.logger.info(
469
479
  "Parsed quality profile mappings: %s",
470
480
  {f"{k}→{v}": f"(main→temp)" for k, v in self.temp_quality_profile_ids.items()},
471
481
  )
482
+ if self.temp_profile_timeout_minutes > 0:
483
+ self.logger.info(
484
+ f"Temp profile timeout enabled: {self.temp_profile_timeout_minutes} minutes"
485
+ )
486
+
487
+ # Check if we should reset all temp profiles on startup
488
+ force_reset = CONFIG.get(f"{name}.EntrySearch.ForceResetTempProfiles", fallback=False)
489
+ if force_reset:
490
+ self.logger.info(
491
+ "ForceResetTempProfiles enabled - resetting all temp profiles on startup"
492
+ )
493
+ self._reset_all_temp_profiles()
472
494
 
473
495
  # Cache for valid quality profile IDs to avoid repeated API calls and warnings
474
496
  self._quality_profile_cache: dict[int, dict] = {}
@@ -673,6 +695,57 @@ class Arr:
673
695
  )
674
696
  self.logger.hnotice("Starting %s monitor", self._name)
675
697
 
698
+ def _ensure_category_on_all_instances(self) -> None:
699
+ """
700
+ Ensure the Arr category exists on ALL qBittorrent instances.
701
+
702
+ Creates the category with the completed_folder save path on each instance.
703
+ Logs errors but continues if individual instances fail.
704
+ """
705
+ if QBIT_DISABLED:
706
+ return
707
+
708
+ qbit_manager = self.manager.qbit_manager
709
+ all_instances = qbit_manager.get_all_instances()
710
+
711
+ self.logger.debug(
712
+ "Ensuring category '%s' exists on %d qBit instance(s)",
713
+ self.category,
714
+ len(all_instances),
715
+ )
716
+
717
+ for instance_name in all_instances:
718
+ try:
719
+ client = qbit_manager.get_client(instance_name)
720
+ if client is None:
721
+ self.logger.warning(
722
+ "Skipping category creation on instance '%s' (client unavailable)",
723
+ instance_name,
724
+ )
725
+ continue
726
+
727
+ categories = client.torrent_categories.categories
728
+ if self.category not in categories:
729
+ client.torrent_categories.create_category(
730
+ self.category, save_path=str(self.completed_folder)
731
+ )
732
+ self.logger.info(
733
+ "Created category '%s' on instance '%s'", self.category, instance_name
734
+ )
735
+ else:
736
+ self.logger.debug(
737
+ "Category '%s' already exists on instance '%s'",
738
+ self.category,
739
+ instance_name,
740
+ )
741
+ except Exception as e:
742
+ self.logger.error(
743
+ "Failed to ensure category '%s' on instance '%s': %s",
744
+ self.category,
745
+ instance_name,
746
+ str(e).split("\n")[0] if "\n" in str(e) else str(e),
747
+ )
748
+
676
749
  @staticmethod
677
750
  def _humanize_request_tag(tag: str) -> str | None:
678
751
  if not tag:
@@ -785,7 +858,9 @@ class Arr:
785
858
  """Returns True if the State is categorized as Downloading."""
786
859
  return torrent.state_enum in (TorrentStates.DOWNLOADING, TorrentStates.PAUSED_DOWNLOAD)
787
860
 
788
- def in_tags(self, torrent: TorrentDictionary, tag: str) -> bool:
861
+ def in_tags(
862
+ self, torrent: TorrentDictionary, tag: str, instance_name: str = "default"
863
+ ) -> bool:
789
864
  return_value = False
790
865
  if TAGLESS:
791
866
  if tag == "qBitrr-ignored":
@@ -796,15 +871,20 @@ class Arr:
796
871
  .where(
797
872
  (self.torrents.Hash == torrent.hash)
798
873
  & (self.torrents.Category == torrent.category)
874
+ & (self.torrents.QbitInstance == instance_name)
799
875
  )
800
876
  .execute()
801
877
  )
802
878
  if not query:
803
879
  self.torrents.insert(
804
- Hash=torrent.hash, Category=torrent.category
880
+ Hash=torrent.hash,
881
+ Category=torrent.category,
882
+ QbitInstance=instance_name,
805
883
  ).on_conflict_ignore().execute()
806
- condition = (self.torrents.Hash == torrent.hash) & (
807
- self.torrents.Category == torrent.category
884
+ condition = (
885
+ (self.torrents.Hash == torrent.hash)
886
+ & (self.torrents.Category == torrent.category)
887
+ & (self.torrents.QbitInstance == instance_name)
808
888
  )
809
889
  if tag == "qBitrr-allowed_seeding":
810
890
  condition &= self.torrents.AllowedSeeding == True
@@ -832,7 +912,9 @@ class Arr:
832
912
  self.logger.trace("Tag %s not in %s", tag, torrent.name)
833
913
  return False
834
914
 
835
- def remove_tags(self, torrent: TorrentDictionary, tags: list) -> None:
915
+ def remove_tags(
916
+ self, torrent: TorrentDictionary, tags: list, instance_name: str = "default"
917
+ ) -> None:
836
918
  for tag in tags:
837
919
  self.logger.trace("Removing tag %s from %s", tag, torrent.name)
838
920
  if TAGLESS:
@@ -842,32 +924,39 @@ class Arr:
842
924
  .where(
843
925
  (self.torrents.Hash == torrent.hash)
844
926
  & (self.torrents.Category == torrent.category)
927
+ & (self.torrents.QbitInstance == instance_name)
845
928
  )
846
929
  .execute()
847
930
  )
848
931
  if not query:
849
932
  self.torrents.insert(
850
- Hash=torrent.hash, Category=torrent.category
933
+ Hash=torrent.hash,
934
+ Category=torrent.category,
935
+ QbitInstance=instance_name,
851
936
  ).on_conflict_ignore().execute()
852
937
  if tag == "qBitrr-allowed_seeding":
853
938
  self.torrents.update(AllowedSeeding=False).where(
854
939
  (self.torrents.Hash == torrent.hash)
855
940
  & (self.torrents.Category == torrent.category)
941
+ & (self.torrents.QbitInstance == instance_name)
856
942
  ).execute()
857
943
  elif tag == "qBitrr-imported":
858
944
  self.torrents.update(Imported=False).where(
859
945
  (self.torrents.Hash == torrent.hash)
860
946
  & (self.torrents.Category == torrent.category)
947
+ & (self.torrents.QbitInstance == instance_name)
861
948
  ).execute()
862
949
  elif tag == "qBitrr-allowed_stalled":
863
950
  self.torrents.update(AllowedStalled=False).where(
864
951
  (self.torrents.Hash == torrent.hash)
865
952
  & (self.torrents.Category == torrent.category)
953
+ & (self.torrents.QbitInstance == instance_name)
866
954
  ).execute()
867
955
  elif tag == "qBitrr-free_space_paused":
868
956
  self.torrents.update(FreeSpacePaused=False).where(
869
957
  (self.torrents.Hash == torrent.hash)
870
958
  & (self.torrents.Category == torrent.category)
959
+ & (self.torrents.QbitInstance == instance_name)
871
960
  ).execute()
872
961
  else:
873
962
  with contextlib.suppress(Exception):
@@ -883,7 +972,9 @@ class Arr:
883
972
  ),
884
973
  )
885
974
 
886
- def add_tags(self, torrent: TorrentDictionary, tags: list) -> None:
975
+ def add_tags(
976
+ self, torrent: TorrentDictionary, tags: list, instance_name: str = "default"
977
+ ) -> None:
887
978
  for tag in tags:
888
979
  self.logger.trace("Adding tag %s from %s", tag, torrent.name)
889
980
  if TAGLESS:
@@ -893,32 +984,39 @@ class Arr:
893
984
  .where(
894
985
  (self.torrents.Hash == torrent.hash)
895
986
  & (self.torrents.Category == torrent.category)
987
+ & (self.torrents.QbitInstance == instance_name)
896
988
  )
897
989
  .execute()
898
990
  )
899
991
  if not query:
900
992
  self.torrents.insert(
901
- Hash=torrent.hash, Category=torrent.category
993
+ Hash=torrent.hash,
994
+ Category=torrent.category,
995
+ QbitInstance=instance_name,
902
996
  ).on_conflict_ignore().execute()
903
997
  if tag == "qBitrr-allowed_seeding":
904
998
  self.torrents.update(AllowedSeeding=True).where(
905
999
  (self.torrents.Hash == torrent.hash)
906
1000
  & (self.torrents.Category == torrent.category)
1001
+ & (self.torrents.QbitInstance == instance_name)
907
1002
  ).execute()
908
1003
  elif tag == "qBitrr-imported":
909
1004
  self.torrents.update(Imported=True).where(
910
1005
  (self.torrents.Hash == torrent.hash)
911
1006
  & (self.torrents.Category == torrent.category)
1007
+ & (self.torrents.QbitInstance == instance_name)
912
1008
  ).execute()
913
1009
  elif tag == "qBitrr-allowed_stalled":
914
1010
  self.torrents.update(AllowedStalled=True).where(
915
1011
  (self.torrents.Hash == torrent.hash)
916
1012
  & (self.torrents.Category == torrent.category)
1013
+ & (self.torrents.QbitInstance == instance_name)
917
1014
  ).execute()
918
1015
  elif tag == "qBitrr-free_space_paused":
919
1016
  self.torrents.update(FreeSpacePaused=True).where(
920
1017
  (self.torrents.Hash == torrent.hash)
921
1018
  & (self.torrents.Category == torrent.category)
1019
+ & (self.torrents.QbitInstance == instance_name)
922
1020
  ).execute()
923
1021
  else:
924
1022
  with contextlib.suppress(Exception):
@@ -966,7 +1064,16 @@ class Arr:
966
1064
  if not results:
967
1065
  break
968
1066
  for entry in results:
1067
+ # NOTE: 'type' field is not documented in official Overseerr API spec
1068
+ # but exists in practice. May break if Overseerr changes API.
969
1069
  type__ = entry.get("type")
1070
+ if not type__:
1071
+ self.logger.debug(
1072
+ "Overseerr request missing 'type' field (entry ID: %s). "
1073
+ "This may indicate an API change.",
1074
+ entry.get("id", "unknown"),
1075
+ )
1076
+ continue
970
1077
  if type__ == "movie":
971
1078
  id__ = entry.get("media", {}).get("tmdbId")
972
1079
  elif type__ == "tv":
@@ -976,6 +1083,8 @@ class Arr:
976
1083
  if not id__ or type_ != type__:
977
1084
  continue
978
1085
  media = entry.get("media") or {}
1086
+ # NOTE: 'status4k' field is not documented in official Overseerr API spec
1087
+ # but exists for 4K request tracking. Falls back to 'status' for non-4K.
979
1088
  status_key = "status4k" if entry.get("is4k") else "status"
980
1089
  status_value = _normalize_media_status(media.get(status_key))
981
1090
  if entry.get("is4k"):
@@ -998,7 +1107,7 @@ class Arr:
998
1107
  try:
999
1108
  if type_ == "movie":
1000
1109
  _entry = self.session.get(
1001
- url=f"{self.overseerr_uri}/api/v1/movies/{id__}",
1110
+ url=f"{self.overseerr_uri}/api/v1/movie/{id__}",
1002
1111
  headers={"X-Api-Key": self.overseerr_api_key},
1003
1112
  timeout=5,
1004
1113
  )
@@ -1159,7 +1268,7 @@ class Arr:
1159
1268
  def _process_imports(self) -> None:
1160
1269
  if self.import_torrents:
1161
1270
  self.needs_cleanup = True
1162
- for torrent in self.import_torrents:
1271
+ for torrent, instance_name in self.import_torrents:
1163
1272
  if torrent.hash in self.sent_to_scan:
1164
1273
  continue
1165
1274
  path = validate_and_return_torrent_file(torrent.content_path)
@@ -1245,7 +1354,7 @@ class Arr:
1245
1354
  self.import_mode,
1246
1355
  ex,
1247
1356
  )
1248
- self.add_tags(torrent, ["qBitrr-imported"])
1357
+ self.add_tags(torrent, ["qBitrr-imported"], instance_name)
1249
1358
  self.sent_to_scan.add(path)
1250
1359
  self.import_torrents.clear()
1251
1360
 
@@ -2842,6 +2951,10 @@ class Arr:
2842
2951
  # Only apply temp profiles for truly missing content (no file)
2843
2952
  # Do NOT apply for quality/custom format unmet or upgrade searches
2844
2953
  has_file = episode.get("hasFile", False)
2954
+ profile_switch_timestamp = None
2955
+ original_profile_for_db = None
2956
+ current_profile_for_db = None
2957
+
2845
2958
  self.logger.trace(
2846
2959
  "Temp quality profile check for '%s': searched=%s, has_file=%s, current_profile_id=%s, keep_temp=%s",
2847
2960
  db_entry.get("title", "Unknown"),
@@ -2852,22 +2965,31 @@ class Arr:
2852
2965
  )
2853
2966
  if (
2854
2967
  searched
2855
- and quality_profile_id in self.temp_quality_profile_ids.values()
2968
+ and quality_profile_id in self.main_quality_profile_ids.keys()
2856
2969
  and not self.keep_temp_profile
2857
2970
  ):
2858
- new_profile_id = list(self.temp_quality_profile_ids.keys())[
2859
- list(self.temp_quality_profile_ids.values()).index(
2860
- quality_profile_id
2861
- )
2862
- ]
2863
- data: JsonObject = {"qualityProfileId": new_profile_id}
2864
- self.logger.info(
2865
- "Upgrading quality profile for '%s': %s (ID:%s) → main profile (ID:%s) [Episode searched, reverting to main]",
2866
- db_entry.get("title", "Unknown"),
2867
- quality_profile_id,
2868
- quality_profile_id,
2869
- new_profile_id,
2971
+ new_profile_id = self.main_quality_profile_ids.get(
2972
+ quality_profile_id
2870
2973
  )
2974
+ if new_profile_id is None:
2975
+ self.logger.warning(
2976
+ f"Profile ID {quality_profile_id} not found in current temp→main mappings. "
2977
+ "Config may have changed. Skipping profile upgrade."
2978
+ )
2979
+ else:
2980
+ data: JsonObject = {"qualityProfileId": new_profile_id}
2981
+ self.logger.info(
2982
+ "Upgrading quality profile for '%s': temp profile (ID:%s) → main profile (ID:%s) [Episode searched, reverting to main]",
2983
+ db_entry.get("title", "Unknown"),
2984
+ quality_profile_id,
2985
+ new_profile_id,
2986
+ )
2987
+ # Reverting to main - clear tracking fields
2988
+ from datetime import datetime
2989
+
2990
+ profile_switch_timestamp = datetime.now()
2991
+ original_profile_for_db = None
2992
+ current_profile_for_db = None
2871
2993
  elif (
2872
2994
  not searched
2873
2995
  and not has_file
@@ -2881,6 +3003,12 @@ class Arr:
2881
3003
  quality_profile_id,
2882
3004
  new_profile_id,
2883
3005
  )
3006
+ # Downgrading to temp - track original and switch time
3007
+ from datetime import datetime
3008
+
3009
+ profile_switch_timestamp = datetime.now()
3010
+ original_profile_for_db = quality_profile_id
3011
+ current_profile_for_db = new_profile_id
2884
3012
  else:
2885
3013
  self.logger.trace(
2886
3014
  "No quality profile change for '%s': searched=%s, profile_id=%s (in_temps=%s, in_mains=%s)",
@@ -2891,18 +3019,34 @@ class Arr:
2891
3019
  quality_profile_id in self.temp_quality_profile_ids.keys(),
2892
3020
  )
2893
3021
  if data:
2894
- while True:
3022
+ profile_update_success = False
3023
+ for attempt in range(self.profile_switch_retry_attempts):
2895
3024
  try:
2896
3025
  self.client.upd_episode(episode["id"], data)
3026
+ profile_update_success = True
2897
3027
  break
2898
3028
  except (
2899
3029
  requests.exceptions.ChunkedEncodingError,
2900
3030
  requests.exceptions.ContentDecodingError,
2901
3031
  requests.exceptions.ConnectionError,
2902
3032
  JSONDecodeError,
2903
- ):
3033
+ ) as e:
3034
+ if attempt == self.profile_switch_retry_attempts - 1:
3035
+ self.logger.error(
3036
+ "Failed to update episode profile after %d attempts: %s",
3037
+ self.profile_switch_retry_attempts,
3038
+ e,
3039
+ )
3040
+ break
3041
+ time.sleep(1)
2904
3042
  continue
2905
3043
 
3044
+ # If profile update failed, don't track the change
3045
+ if not profile_update_success:
3046
+ profile_switch_timestamp = None
3047
+ original_profile_for_db = None
3048
+ current_profile_for_db = None
3049
+
2906
3050
  EntryId = episode.get("id")
2907
3051
  SeriesTitle = episode.get("series", {}).get("title")
2908
3052
  SeasonNumber = episode.get("seasonNumber")
@@ -2960,6 +3104,14 @@ class Arr:
2960
3104
  self.model_file.Reason: reason,
2961
3105
  }
2962
3106
 
3107
+ # Add profile tracking fields if temp profile feature is enabled
3108
+ if self.use_temp_for_missing and profile_switch_timestamp is not None:
3109
+ to_update[self.model_file.LastProfileSwitchTime] = (
3110
+ profile_switch_timestamp
3111
+ )
3112
+ to_update[self.model_file.OriginalProfileId] = original_profile_for_db
3113
+ to_update[self.model_file.CurrentProfileId] = current_profile_for_db
3114
+
2963
3115
  self.logger.debug(
2964
3116
  "Updating database entry | %s | S%02dE%03d [Searched:%s][Upgrade:%s][QualityMet:%s][CustomFormatMet:%s]",
2965
3117
  SeriesTitle.ljust(60, "."),
@@ -3076,46 +3228,42 @@ class Arr:
3076
3228
  searched = totalEpisodeCount == episodeFileCount
3077
3229
  else:
3078
3230
  searched = (episodeCount + monitoredEpisodeCount) == episodeFileCount
3231
+ # Sonarr series-level temp profile logic
3232
+ # NOTE: Sonarr only supports quality profiles at the series level (not episode level).
3233
+ # Individual episodes inherit the series profile. This is intentional and correct.
3234
+ # If ANY episodes are missing, the entire series uses temp profile to maximize
3235
+ # the chance of finding missing content (priority #1).
3079
3236
  if self.use_temp_for_missing:
3080
3237
  try:
3081
3238
  quality_profile_id = db_entry.get("qualityProfileId")
3082
3239
  if (
3083
3240
  searched
3084
- and quality_profile_id
3085
- in self.temp_quality_profile_ids.values()
3241
+ and quality_profile_id in self.main_quality_profile_ids.keys()
3086
3242
  and not self.keep_temp_profile
3087
3243
  ):
3088
- db_entry["qualityProfileId"] = list(
3089
- self.temp_quality_profile_ids.keys()
3090
- )[
3091
- list(self.temp_quality_profile_ids.values()).index(
3092
- quality_profile_id
3093
- )
3094
- ]
3244
+ new_main_id = self.main_quality_profile_ids[quality_profile_id]
3245
+ db_entry["qualityProfileId"] = new_main_id
3095
3246
  self.logger.debug(
3096
3247
  "Updating quality profile for %s to %s",
3097
3248
  db_entry["title"],
3098
- db_entry["qualityProfileId"],
3249
+ new_main_id,
3099
3250
  )
3100
3251
  elif (
3101
3252
  not searched
3102
3253
  and quality_profile_id in self.temp_quality_profile_ids.keys()
3103
3254
  ):
3104
- db_entry["qualityProfileId"] = self.temp_quality_profile_ids[
3105
- quality_profile_id
3106
- ]
3255
+ new_temp_id = self.temp_quality_profile_ids[quality_profile_id]
3256
+ db_entry["qualityProfileId"] = new_temp_id
3107
3257
  self.logger.debug(
3108
3258
  "Updating quality profile for %s to %s",
3109
3259
  db_entry["title"],
3110
- self.temp_quality_profile_ids[
3111
- db_entry["qualityProfileId"]
3112
- ],
3260
+ new_temp_id,
3113
3261
  )
3114
3262
  except KeyError:
3115
3263
  self.logger.warning(
3116
3264
  "Check quality profile settings for %s", db_entry["title"]
3117
3265
  )
3118
- while True:
3266
+ for attempt in range(self.profile_switch_retry_attempts):
3119
3267
  try:
3120
3268
  self.client.upd_series(db_entry)
3121
3269
  break
@@ -3124,7 +3272,15 @@ class Arr:
3124
3272
  requests.exceptions.ContentDecodingError,
3125
3273
  requests.exceptions.ConnectionError,
3126
3274
  JSONDecodeError,
3127
- ):
3275
+ ) as e:
3276
+ if attempt == self.profile_switch_retry_attempts - 1:
3277
+ self.logger.error(
3278
+ "Failed to update series profile after %d attempts: %s",
3279
+ self.profile_switch_retry_attempts,
3280
+ e,
3281
+ )
3282
+ break
3283
+ time.sleep(1)
3128
3284
  continue
3129
3285
 
3130
3286
  Title = seriesMetadata.get("title")
@@ -3248,6 +3404,10 @@ class Arr:
3248
3404
  self.model_queue.EntryId == db_entry["id"]
3249
3405
  ).execute()
3250
3406
 
3407
+ profile_switch_timestamp = None
3408
+ original_profile_for_db = None
3409
+ current_profile_for_db = None
3410
+
3251
3411
  if self.use_temp_for_missing:
3252
3412
  quality_profile_id = db_entry.get("qualityProfileId")
3253
3413
  # Only apply temp profiles for truly missing content (no file)
@@ -3255,46 +3415,69 @@ class Arr:
3255
3415
  has_file = db_entry.get("hasFile", False)
3256
3416
  if (
3257
3417
  searched
3258
- and quality_profile_id in self.temp_quality_profile_ids.values()
3418
+ and quality_profile_id in self.main_quality_profile_ids.keys()
3259
3419
  and not self.keep_temp_profile
3260
3420
  ):
3261
- db_entry["qualityProfileId"] = list(
3262
- self.temp_quality_profile_ids.keys()
3263
- )[
3264
- list(self.temp_quality_profile_ids.values()).index(
3265
- quality_profile_id
3266
- )
3267
- ]
3421
+ new_main_id = self.main_quality_profile_ids[quality_profile_id]
3422
+ db_entry["qualityProfileId"] = new_main_id
3268
3423
  self.logger.debug(
3269
3424
  "Updating quality profile for %s to %s",
3270
3425
  db_entry["title"],
3271
- db_entry["qualityProfileId"],
3426
+ new_main_id,
3272
3427
  )
3428
+ # Reverting to main - clear tracking fields
3429
+ from datetime import datetime
3430
+
3431
+ profile_switch_timestamp = datetime.now()
3432
+ original_profile_for_db = None
3433
+ current_profile_for_db = None
3273
3434
  elif (
3274
3435
  not searched
3275
3436
  and not has_file
3276
3437
  and quality_profile_id in self.temp_quality_profile_ids.keys()
3277
3438
  ):
3278
- db_entry["qualityProfileId"] = self.temp_quality_profile_ids[
3279
- quality_profile_id
3280
- ]
3439
+ new_temp_id = self.temp_quality_profile_ids[quality_profile_id]
3440
+ db_entry["qualityProfileId"] = new_temp_id
3281
3441
  self.logger.debug(
3282
3442
  "Updating quality profile for %s to %s",
3283
3443
  db_entry["title"],
3284
- db_entry["qualityProfileId"],
3444
+ new_temp_id,
3285
3445
  )
3286
- while True:
3446
+ # Downgrading to temp - track original and switch time
3447
+ from datetime import datetime
3448
+
3449
+ profile_switch_timestamp = datetime.now()
3450
+ original_profile_for_db = quality_profile_id
3451
+ current_profile_for_db = new_temp_id
3452
+
3453
+ profile_update_success = False
3454
+ for attempt in range(self.profile_switch_retry_attempts):
3287
3455
  try:
3288
3456
  self.client.upd_movie(db_entry)
3457
+ profile_update_success = True
3289
3458
  break
3290
3459
  except (
3291
3460
  requests.exceptions.ChunkedEncodingError,
3292
3461
  requests.exceptions.ContentDecodingError,
3293
3462
  requests.exceptions.ConnectionError,
3294
3463
  JSONDecodeError,
3295
- ):
3464
+ ) as e:
3465
+ if attempt == self.profile_switch_retry_attempts - 1:
3466
+ self.logger.error(
3467
+ "Failed to update movie profile after %d attempts: %s",
3468
+ self.profile_switch_retry_attempts,
3469
+ e,
3470
+ )
3471
+ break
3472
+ time.sleep(1)
3296
3473
  continue
3297
3474
 
3475
+ # If profile update failed, don't track the change
3476
+ if not profile_update_success:
3477
+ profile_switch_timestamp = None
3478
+ original_profile_for_db = None
3479
+ current_profile_for_db = None
3480
+
3298
3481
  title = db_entry["title"]
3299
3482
  monitored = db_entry["monitored"]
3300
3483
  tmdbId = db_entry["tmdbId"]
@@ -3347,6 +3530,12 @@ class Arr:
3347
3530
  self.model_file.QualityProfileName: qualityProfileName,
3348
3531
  }
3349
3532
 
3533
+ # Add profile tracking fields if temp profile feature is enabled
3534
+ if self.use_temp_for_missing and profile_switch_timestamp is not None:
3535
+ to_update[self.model_file.LastProfileSwitchTime] = profile_switch_timestamp
3536
+ to_update[self.model_file.OriginalProfileId] = original_profile_for_db
3537
+ to_update[self.model_file.CurrentProfileId] = current_profile_for_db
3538
+
3350
3539
  if request:
3351
3540
  to_update[self.model_file.IsRequest] = request
3352
3541
 
@@ -3794,18 +3983,17 @@ class Arr:
3794
3983
 
3795
3984
  # Temp profile management for Lidarr artists
3796
3985
  # Quality profiles in Lidarr are set at artist level, not album level
3986
+ # NOTE: Lidarr uses sizeOnDisk instead of hasFile because the Lidarr API
3987
+ # doesn't provide a hasFile boolean at artist level. sizeOnDisk > 0 is
3988
+ # equivalent to hasFile=True for Lidarr.
3797
3989
  if self.use_temp_for_missing and quality_profile_id:
3798
3990
  if (
3799
3991
  searched
3800
- and quality_profile_id in self.temp_quality_profile_ids.values()
3992
+ and quality_profile_id in self.main_quality_profile_ids.keys()
3801
3993
  and not self.keep_temp_profile
3802
3994
  ):
3803
3995
  # Artist has files, switch from temp back to main profile
3804
- main_profile_id = list(self.temp_quality_profile_ids.keys())[
3805
- list(self.temp_quality_profile_ids.values()).index(
3806
- quality_profile_id
3807
- )
3808
- ]
3996
+ main_profile_id = self.main_quality_profile_ids[quality_profile_id]
3809
3997
  artistMetadata["qualityProfileId"] = main_profile_id
3810
3998
  self.client.upd_artist(artistMetadata)
3811
3999
  quality_profile_id = main_profile_id
@@ -4448,27 +4636,85 @@ class Arr:
4448
4636
 
4449
4637
  return payload
4450
4638
 
4639
+ def _get_torrents_from_all_instances(
4640
+ self,
4641
+ ) -> list[tuple[str, qbittorrentapi.TorrentDictionary]]:
4642
+ """
4643
+ Get torrents from ALL qBittorrent instances for this Arr's category.
4644
+
4645
+ Returns:
4646
+ list[tuple[str, TorrentDictionary]]: List of (instance_name, torrent) tuples
4647
+ """
4648
+ all_torrents = []
4649
+ qbit_manager = self.manager.qbit_manager
4650
+
4651
+ for instance_name in qbit_manager.get_all_instances():
4652
+ if not qbit_manager.is_instance_alive(instance_name):
4653
+ self.logger.debug(
4654
+ "Skipping unhealthy instance '%s' during torrent scan", instance_name
4655
+ )
4656
+ continue
4657
+
4658
+ client = qbit_manager.get_client(instance_name)
4659
+ if client is None:
4660
+ continue
4661
+
4662
+ try:
4663
+ torrents = client.torrents.info(
4664
+ status_filter="all",
4665
+ category=self.category,
4666
+ sort="added_on",
4667
+ reverse=False,
4668
+ )
4669
+ # Tag each torrent with its instance name
4670
+ for torrent in torrents:
4671
+ if hasattr(torrent, "category"):
4672
+ all_torrents.append((instance_name, torrent))
4673
+
4674
+ self.logger.trace(
4675
+ "Retrieved %d torrents from instance '%s' for category '%s'",
4676
+ len(torrents),
4677
+ instance_name,
4678
+ self.category,
4679
+ )
4680
+ except (qbittorrentapi.exceptions.APIError, JSONDecodeError) as e:
4681
+ self.logger.warning(
4682
+ "Failed to get torrents from instance '%s': %s", instance_name, e
4683
+ )
4684
+ continue
4685
+
4686
+ self.logger.debug(
4687
+ "Total torrents across %d instances: %d",
4688
+ len(qbit_manager.get_all_instances()),
4689
+ len(all_torrents),
4690
+ )
4691
+ return all_torrents
4692
+
4451
4693
  def process_torrents(self):
4452
4694
  try:
4453
4695
  try:
4454
4696
  while True:
4455
4697
  try:
4456
- torrents = self.manager.qbit_manager.client.torrents.info(
4457
- status_filter="all",
4458
- category=self.category,
4459
- sort="added_on",
4460
- reverse=False,
4461
- )
4698
+ # Multi-instance: Scan all qBit instances for category-matching torrents
4699
+ torrents_with_instances = self._get_torrents_from_all_instances()
4462
4700
  break
4463
4701
  except (qbittorrentapi.exceptions.APIError, JSONDecodeError) as e:
4464
4702
  if "JSONDecodeError" in str(e):
4465
4703
  continue
4466
4704
  else:
4467
4705
  raise qbittorrentapi.exceptions.APIError
4468
- torrents = [t for t in torrents if hasattr(t, "category")]
4469
- self.category_torrent_count = len(torrents)
4470
- if not len(torrents):
4706
+
4707
+ # Filter torrents that have category attribute
4708
+ torrents_with_instances = [
4709
+ (instance, t)
4710
+ for instance, t in torrents_with_instances
4711
+ if hasattr(t, "category")
4712
+ ]
4713
+ self.category_torrent_count = len(torrents_with_instances)
4714
+ if not len(torrents_with_instances):
4471
4715
  raise DelayLoopException(length=LOOP_SLEEP_TIMER, type="no_downloads")
4716
+
4717
+ # Internet check: Use default instance for backward compatibility
4472
4718
  if not has_internet(self.manager.qbit_manager.client):
4473
4719
  self.manager.qbit_manager.should_delay_torrent_scan = True
4474
4720
  raise DelayLoopException(length=NO_INTERNET_SLEEP_TIMER, type="internet")
@@ -4507,9 +4753,10 @@ class Arr:
4507
4753
 
4508
4754
  self.api_calls()
4509
4755
  self.refresh_download_queue()
4510
- for torrent in torrents:
4756
+ # Multi-instance: Process torrents from all instances
4757
+ for instance_name, torrent in torrents_with_instances:
4511
4758
  with contextlib.suppress(qbittorrentapi.NotFound404Error):
4512
- self._process_single_torrent(torrent)
4759
+ self._process_single_torrent(torrent, instance_name=instance_name)
4513
4760
  self.process()
4514
4761
  except NoConnectionrException as e:
4515
4762
  self.logger.error(e.message)
@@ -4527,42 +4774,70 @@ class Arr:
4527
4774
  current_time = time.time()
4528
4775
 
4529
4776
  # Track consecutive database errors for exponential backoff
4777
+ # Initialize tracking on first error ever
4778
+ if not hasattr(self, "_db_first_error_time"):
4779
+ self._db_first_error_time = current_time
4780
+
4781
+ # Reset if >5min since last error (new error sequence)
4530
4782
  if (
4531
4783
  current_time - self._db_last_error_time > 300
4532
4784
  ): # Reset if >5min since last error
4533
4785
  self._db_error_count = 0
4786
+ self._db_first_error_time = current_time
4787
+
4534
4788
  self._db_error_count += 1
4535
4789
  self._db_last_error_time = current_time
4536
4790
 
4791
+ # Check if errors have persisted for more than 5 minutes
4792
+ time_since_first_error = current_time - self._db_first_error_time
4793
+ if time_since_first_error > 300: # 5 minutes
4794
+ self.logger.critical(
4795
+ "Database errors have persisted for %.1f minutes. "
4796
+ "Signaling coordinated restart of ALL processes for database recovery...",
4797
+ time_since_first_error / 60,
4798
+ )
4799
+ # Signal all processes to restart (shared database affects everyone)
4800
+ self.manager.qbit_manager.database_restart_event.set()
4801
+ # Exit this process - main will restart all
4802
+ sys.exit(1)
4803
+
4537
4804
  # Calculate exponential backoff: 2min, 5min, 10min, 20min, 30min (max)
4538
4805
  delay_seconds = min(120 * (2 ** (self._db_error_count - 1)), 1800)
4539
4806
 
4540
4807
  # Log detailed error information based on error type
4808
+ # Use escalating severity: WARNING (1-2 errors), ERROR (3-4), CRITICAL (5+)
4809
+ if self._db_error_count <= 2:
4810
+ log_func = self.logger.warning
4811
+ elif self._db_error_count <= 4:
4812
+ log_func = self.logger.error
4813
+ else:
4814
+ log_func = self.logger.critical
4815
+
4541
4816
  if "disk i/o error" in error_msg:
4542
- self.logger.critical(
4543
- "Persistent database I/O error detected (consecutive error #%d). "
4544
- "This indicates disk issues, filesystem corruption, or resource exhaustion. "
4817
+ log_func(
4818
+ "Database I/O error detected (consecutive error #%d). "
4819
+ "This may indicate disk issues, filesystem corruption, or resource exhaustion. "
4545
4820
  "Attempting automatic recovery and retrying in %d seconds...",
4546
4821
  self._db_error_count,
4547
4822
  delay_seconds,
4548
4823
  )
4549
4824
  elif "database is locked" in error_msg:
4550
- self.logger.error(
4825
+ log_func(
4551
4826
  "Database locked error (consecutive error #%d). "
4552
4827
  "Retrying in %d seconds...",
4553
4828
  self._db_error_count,
4554
4829
  delay_seconds,
4555
4830
  )
4556
4831
  elif "disk image is malformed" in error_msg:
4557
- self.logger.critical(
4832
+ log_func(
4558
4833
  "Database corruption detected (consecutive error #%d). "
4559
4834
  "Attempting automatic recovery and retrying in %d seconds...",
4560
4835
  self._db_error_count,
4561
4836
  delay_seconds,
4562
4837
  )
4563
4838
  else:
4564
- self.logger.error(
4565
- "Database error (consecutive error #%d): %s. " "Retrying in %d seconds...",
4839
+ log_func(
4840
+ "Database error (consecutive error #%d): %s. Retrying in %d seconds...",
4566
4841
  self._db_error_count,
4567
4842
  error_msg,
4568
4843
  delay_seconds,
@@ -4977,7 +5252,10 @@ class Arr:
4977
5252
  self.recheck.add(torrent.hash)
4978
5253
 
4979
5254
  def _process_single_torrent_fully_completed_torrent(
4980
- self, torrent: qbittorrentapi.TorrentDictionary, leave_alone: bool
5255
+ self,
5256
+ torrent: qbittorrentapi.TorrentDictionary,
5257
+ leave_alone: bool,
5258
+ instance_name: str = "default",
4981
5259
  ):
4982
5260
  if leave_alone or torrent.state_enum == TorrentStates.FORCED_UPLOAD:
4983
5261
  self.logger.trace(
@@ -4995,7 +5273,7 @@ class Arr:
4995
5273
  torrent.name,
4996
5274
  torrent.hash,
4997
5275
  )
4998
- elif not self.in_tags(torrent, "qBitrr-imported"):
5276
+ elif not self.in_tags(torrent, "qBitrr-imported", instance_name):
4999
5277
  self.logger.info(
5000
5278
  "Importing Completed torrent: "
5001
5279
  "[Progress: %s%%][Added On: %s]"
@@ -5020,7 +5298,7 @@ class Arr:
5020
5298
  else:
5021
5299
  torrent_folder = content_path
5022
5300
  self.files_to_cleanup.add((torrent.hash, torrent_folder))
5023
- self.import_torrents.append(torrent)
5301
+ self.import_torrents.append((torrent, instance_name))
5024
5302
 
5025
5303
  def _process_single_torrent_missing_files(self, torrent: qbittorrentapi.TorrentDictionary):
5026
5304
  # Sometimes Sonarr/Radarr does not automatically remove the
@@ -5388,7 +5666,7 @@ class Arr:
5388
5666
  return data_settings, data_torrent
5389
5667
 
5390
5668
  def _should_leave_alone(
5391
- self, torrent: qbittorrentapi.TorrentDictionary
5669
+ self, torrent: qbittorrentapi.TorrentDictionary, instance_name: str = "default"
5392
5670
  ) -> tuple[bool, int, bool]:
5393
5671
  return_value = True
5394
5672
  remove_torrent = False
@@ -5414,18 +5692,18 @@ class Arr:
5414
5692
  return_value = not self.torrent_limit_check(torrent, seeding_time_limit, ratio_limit)
5415
5693
  if data_settings.get("super_seeding", False) or data_torrent.get("super_seeding", False):
5416
5694
  return_value = True
5417
- if self.in_tags(torrent, "qBitrr-free_space_paused"):
5695
+ if self.in_tags(torrent, "qBitrr-free_space_paused", instance_name):
5418
5696
  return_value = True
5419
5697
  if (
5420
5698
  return_value
5421
- and not self.in_tags(torrent, "qBitrr-allowed_seeding")
5422
- and not self.in_tags(torrent, "qBitrr-free_space_paused")
5699
+ and not self.in_tags(torrent, "qBitrr-allowed_seeding", instance_name)
5700
+ and not self.in_tags(torrent, "qBitrr-free_space_paused", instance_name)
5423
5701
  ):
5424
- self.add_tags(torrent, ["qBitrr-allowed_seeding"])
5702
+ self.add_tags(torrent, ["qBitrr-allowed_seeding"], instance_name)
5425
5703
  elif (
5426
- not return_value and self.in_tags(torrent, "qBitrr-allowed_seeding")
5427
- ) or self.in_tags(torrent, "qBitrr-free_space_paused"):
5428
- self.remove_tags(torrent, ["qBitrr-allowed_seeding"])
5704
+ not return_value and self.in_tags(torrent, "qBitrr-allowed_seeding", instance_name)
5705
+ ) or self.in_tags(torrent, "qBitrr-free_space_paused", instance_name):
5706
+ self.remove_tags(torrent, ["qBitrr-allowed_seeding"], instance_name)
5429
5707
 
5430
5708
  self.logger.trace("Config Settings returned [%s]: %r", torrent.name, data_settings)
5431
5709
  return (
@@ -5591,9 +5869,14 @@ class Arr:
5591
5869
  current_tags = set(torrent.tags.split(", "))
5592
5870
  add_tags = unique_tags.difference(current_tags)
5593
5871
  if add_tags:
5594
- self.add_tags(torrent, add_tags)
5872
+ self.add_tags(torrent, add_tags, instance_name)
5595
5873
 
5596
- def _stalled_check(self, torrent: qbittorrentapi.TorrentDictionary, time_now: float) -> bool:
5874
+ def _stalled_check(
5875
+ self,
5876
+ torrent: qbittorrentapi.TorrentDictionary,
5877
+ time_now: float,
5878
+ instance_name: str = "default",
5879
+ ) -> bool:
5597
5880
  stalled_ignore = True
5598
5881
  if not self.allowed_stalled:
5599
5882
  self.logger.trace("Stalled check: Stalled delay disabled")
@@ -5630,15 +5913,15 @@ class Arr:
5630
5913
  (
5631
5914
  torrent.state_enum
5632
5915
  in (TorrentStates.METADATA_DOWNLOAD, TorrentStates.STALLED_DOWNLOAD)
5633
- and not self.in_tags(torrent, "qBitrr-ignored")
5634
- and not self.in_tags(torrent, "qBitrr-free_space_paused")
5916
+ and not self.in_tags(torrent, "qBitrr-ignored", instance_name)
5917
+ and not self.in_tags(torrent, "qBitrr-free_space_paused", instance_name)
5635
5918
  )
5636
5919
  or (
5637
5920
  torrent.availability < 1
5638
5921
  and torrent.hash in self.cleaned_torrents
5639
5922
  and torrent.state_enum in (TorrentStates.DOWNLOADING)
5640
- and not self.in_tags(torrent, "qBitrr-ignored")
5641
- and not self.in_tags(torrent, "qBitrr-free_space_paused")
5923
+ and not self.in_tags(torrent, "qBitrr-ignored", instance_name)
5924
+ and not self.in_tags(torrent, "qBitrr-free_space_paused", instance_name)
5642
5925
  )
5643
5926
  ) and self.allowed_stalled:
5644
5927
  if (
@@ -5648,8 +5931,8 @@ class Arr:
5648
5931
  ):
5649
5932
  stalled_ignore = False
5650
5933
  self.logger.trace("Process stalled, delay expired: %s", torrent.name)
5651
- elif not self.in_tags(torrent, "qBitrr-allowed_stalled"):
5652
- self.add_tags(torrent, ["qBitrr-allowed_stalled"])
5934
+ elif not self.in_tags(torrent, "qBitrr-allowed_stalled", instance_name):
5935
+ self.add_tags(torrent, ["qBitrr-allowed_stalled"], instance_name)
5653
5936
  if self.re_search_stalled:
5654
5937
  self.logger.trace(
5655
5938
  "Stalled, adding tag, blocklosting and re-searching: %s", torrent.name
@@ -5666,7 +5949,7 @@ class Arr:
5666
5949
  )
5667
5950
  else:
5668
5951
  self.logger.trace("Stalled, adding tag: %s", torrent.name)
5669
- elif self.in_tags(torrent, "qBitrr-allowed_stalled"):
5952
+ elif self.in_tags(torrent, "qBitrr-allowed_stalled", instance_name):
5670
5953
  self.logger.trace(
5671
5954
  "Stalled: %s [Current:%s][Last Activity:%s][Limit:%s]",
5672
5955
  torrent.name,
@@ -5677,8 +5960,8 @@ class Arr:
5677
5960
  ),
5678
5961
  )
5679
5962
 
5680
- elif self.in_tags(torrent, "qBitrr-allowed_stalled"):
5681
- self.remove_tags(torrent, ["qBitrr-allowed_stalled"])
5963
+ elif self.in_tags(torrent, "qBitrr-allowed_stalled", instance_name):
5964
+ self.remove_tags(torrent, ["qBitrr-allowed_stalled"], instance_name)
5682
5965
  stalled_ignore = False
5683
5966
  self.logger.trace("Not stalled, removing tag: %s", torrent.name)
5684
5967
  else:
@@ -5686,13 +5969,17 @@ class Arr:
5686
5969
  self.logger.trace("Not stalled: %s", torrent.name)
5687
5970
  return stalled_ignore
5688
5971
 
5689
- def _process_single_torrent(self, torrent: qbittorrentapi.TorrentDictionary):
5972
+ def _process_single_torrent(
5973
+ self, torrent: qbittorrentapi.TorrentDictionary, instance_name: str = "default"
5974
+ ):
5690
5975
  if torrent.category != RECHECK_CATEGORY:
5691
5976
  self.manager.qbit_manager.cache[torrent.hash] = torrent.category
5692
5977
  self._process_single_torrent_trackers(torrent)
5693
5978
  self.manager.qbit_manager.name_cache[torrent.hash] = torrent.name
5694
5979
  time_now = time.time()
5695
- leave_alone, _tracker_max_eta, remove_torrent = self._should_leave_alone(torrent)
5980
+ leave_alone, _tracker_max_eta, remove_torrent = self._should_leave_alone(
5981
+ torrent, instance_name
5982
+ )
5696
5983
  self.logger.trace(
5697
5984
  "Torrent [%s]: Leave Alone (allow seeding): %s, Max ETA: %s, State[%s]",
5698
5985
  torrent.name,
@@ -5707,20 +5994,22 @@ class Arr:
5707
5994
  TorrentStates.STALLED_DOWNLOAD,
5708
5995
  TorrentStates.DOWNLOADING,
5709
5996
  ):
5710
- stalled_ignore = self._stalled_check(torrent, time_now)
5997
+ stalled_ignore = self._stalled_check(torrent, time_now, instance_name)
5711
5998
  else:
5712
5999
  stalled_ignore = False
5713
6000
 
5714
- if self.in_tags(torrent, "qBitrr-ignored"):
5715
- self.remove_tags(torrent, ["qBitrr-allowed_seeding", "qBitrr-free_space_paused"])
6001
+ if self.in_tags(torrent, "qBitrr-ignored", instance_name):
6002
+ self.remove_tags(
6003
+ torrent, ["qBitrr-allowed_seeding", "qBitrr-free_space_paused"], instance_name
6004
+ )
5716
6005
 
5717
6006
  if (
5718
6007
  self.custom_format_unmet_search
5719
6008
  and self.custom_format_unmet_check(torrent)
5720
- and not self.in_tags(torrent, "qBitrr-ignored")
5721
- and not self.in_tags(torrent, "qBitrr-free_space_paused")
6009
+ and not self.in_tags(torrent, "qBitrr-ignored", instance_name)
6010
+ and not self.in_tags(torrent, "qBitrr-free_space_paused", instance_name)
5722
6011
  ):
5723
- self._process_single_torrent_delete_cfunmet(torrent)
6012
+ self._process_single_torrent_delete_cfunmet(torrent, instance_name)
5724
6013
  elif remove_torrent and not leave_alone and torrent.amount_left == 0:
5725
6014
  self._process_single_torrent_delete_ratio_seed(torrent)
5726
6015
  elif torrent.category == FAILED_CATEGORY:
@@ -5733,8 +6022,8 @@ class Arr:
5733
6022
  self._process_single_torrent_ignored(torrent)
5734
6023
  elif (
5735
6024
  torrent.state_enum in (TorrentStates.METADATA_DOWNLOAD, TorrentStates.STALLED_DOWNLOAD)
5736
- and not self.in_tags(torrent, "qBitrr-ignored")
5737
- and not self.in_tags(torrent, "qBitrr-free_space_paused")
6025
+ and not self.in_tags(torrent, "qBitrr-ignored", instance_name)
6026
+ and not self.in_tags(torrent, "qBitrr-free_space_paused", instance_name)
5738
6027
  and not stalled_ignore
5739
6028
  ):
5740
6029
  self._process_single_torrent_stalled_torrent(torrent, "Stalled State")
@@ -5755,15 +6044,15 @@ class Arr:
5755
6044
  elif (
5756
6045
  torrent.state_enum == TorrentStates.PAUSED_DOWNLOAD
5757
6046
  and torrent.amount_left != 0
5758
- and not self.in_tags(torrent, "qBitrr-free_space_paused")
5759
- and not self.in_tags(torrent, "qBitrr-ignored")
6047
+ and not self.in_tags(torrent, "qBitrr-free_space_paused", instance_name)
6048
+ and not self.in_tags(torrent, "qBitrr-ignored", instance_name)
5760
6049
  ):
5761
6050
  self._process_single_torrent_paused(torrent)
5762
6051
  elif (
5763
6052
  torrent.progress <= self.maximum_deletable_percentage
5764
6053
  and not self.is_complete_state(torrent)
5765
- and not self.in_tags(torrent, "qBitrr-ignored")
5766
- and not self.in_tags(torrent, "qBitrr-free_space_paused")
6054
+ and not self.in_tags(torrent, "qBitrr-ignored", instance_name)
6055
+ and not self.in_tags(torrent, "qBitrr-free_space_paused", instance_name)
5767
6056
  and not stalled_ignore
5768
6057
  ) and torrent.hash in self.cleaned_torrents:
5769
6058
  self._process_single_torrent_percentage_threshold(torrent, maximum_eta)
@@ -5810,8 +6099,8 @@ class Arr:
5810
6099
  and time_now > torrent.added_on + self.ignore_torrents_younger_than
5811
6100
  and 0 < maximum_eta < torrent.eta
5812
6101
  and not self.do_not_remove_slow
5813
- and not self.in_tags(torrent, "qBitrr-ignored")
5814
- and not self.in_tags(torrent, "qBitrr-free_space_paused")
6102
+ and not self.in_tags(torrent, "qBitrr-ignored", instance_name)
6103
+ and not self.in_tags(torrent, "qBitrr-free_space_paused", instance_name)
5815
6104
  and not stalled_ignore
5816
6105
  ):
5817
6106
  self._process_single_torrent_delete_slow(torrent)
@@ -5826,8 +6115,8 @@ class Arr:
5826
6115
  )
5827
6116
  and torrent.hash in self.cleaned_torrents
5828
6117
  and self.is_downloading_state(torrent)
5829
- and not self.in_tags(torrent, "qBitrr-ignored")
5830
- and not self.in_tags(torrent, "qBitrr-free_space_paused")
6118
+ and not self.in_tags(torrent, "qBitrr-ignored", instance_name)
6119
+ and not self.in_tags(torrent, "qBitrr-free_space_paused", instance_name)
5831
6120
  and not stalled_ignore
5832
6121
  ):
5833
6122
  self._process_single_torrent_stalled_torrent(torrent, "Unavailable")
@@ -5919,27 +6208,30 @@ class Arr:
5919
6208
  def torrent_limit_check(
5920
6209
  self, torrent: qbittorrentapi.TorrentDictionary, seeding_time_limit, ratio_limit
5921
6210
  ) -> bool:
5922
- if (
6211
+ # -1 = Never remove (regardless of ratio/time limits)
6212
+ if self.seeding_mode_global_remove_torrent == -1:
6213
+ return False
6214
+ # 4 = AND (remove when BOTH ratio AND time limits met)
6215
+ elif (
5923
6216
  self.seeding_mode_global_remove_torrent == 4
5924
6217
  and torrent.ratio >= ratio_limit
5925
6218
  and torrent.seeding_time >= seeding_time_limit
5926
6219
  ):
5927
6220
  return True
5928
- if self.seeding_mode_global_remove_torrent == 3 and (
6221
+ # 3 = OR (remove when EITHER ratio OR time limit met)
6222
+ elif self.seeding_mode_global_remove_torrent == 3 and (
5929
6223
  torrent.ratio >= ratio_limit or torrent.seeding_time >= seeding_time_limit
5930
6224
  ):
5931
6225
  return True
6226
+ # 2 = Time only (remove when seeding time limit met)
5932
6227
  elif (
5933
6228
  self.seeding_mode_global_remove_torrent == 2
5934
6229
  and torrent.seeding_time >= seeding_time_limit
5935
6230
  ):
5936
6231
  return True
6232
+ # 1 = Ratio only (remove when upload ratio limit met)
5937
6233
  elif self.seeding_mode_global_remove_torrent == 1 and torrent.ratio >= ratio_limit:
5938
6234
  return True
5939
- elif self.seeding_mode_global_remove_torrent == -1 and (
5940
- torrent.ratio >= ratio_limit and torrent.seeding_time >= seeding_time_limit
5941
- ):
5942
- return True
5943
6235
  else:
5944
6236
  return False
5945
6237
 
@@ -6064,6 +6356,15 @@ class Arr:
6064
6356
  self.files_to_explicitly_delete = iter(_path_filter.copy())
6065
6357
 
6066
6358
  def parse_quality_profiles(self) -> dict[int, int]:
6359
+ """
6360
+ Parse quality profile name mappings into ID mappings.
6361
+
6362
+ Converts the configured profile name mappings (e.g., {"HD-1080p": "SD"})
6363
+ into ID mappings (e.g., {2: 1}) for faster lookups during profile switching.
6364
+
6365
+ Returns:
6366
+ dict[int, int]: Mapping of main_profile_id → temp_profile_id
6367
+ """
6067
6368
  temp_quality_profile_ids: dict[int, int] = {}
6068
6369
 
6069
6370
  self.logger.debug(
@@ -6148,6 +6449,171 @@ class Arr:
6148
6449
 
6149
6450
  return temp_quality_profile_ids
6150
6451
 
6452
+ def _reset_all_temp_profiles(self):
6453
+ """Reset all items using temp profiles back to their original main profiles on startup."""
6454
+ reset_count = 0
6455
+
6456
+ try:
6457
+ # Get all items from Arr instance
6458
+ if self._name.lower().startswith("radarr"):
6459
+ items = self.client.get_movie()
6460
+ item_type = "movie"
6461
+ elif self._name.lower().startswith("sonarr") or self._name.lower().startswith(
6462
+ "animarr"
6463
+ ):
6464
+ items = self.client.get_series()
6465
+ item_type = "series"
6466
+ elif self._name.lower().startswith("lidarr"):
6467
+ items = self.client.get_artist()
6468
+ item_type = "artist"
6469
+ else:
6470
+ self.logger.warning(f"Unknown Arr type for temp profile reset: {self._name}")
6471
+ return
6472
+
6473
+ self.logger.info(f"Checking {len(items)} {item_type}s for temp profile resets...")
6474
+
6475
+ for item in items:
6476
+ profile_id = item.get("qualityProfileId")
6477
+
6478
+ # Check if item is currently using a temp profile
6479
+ if profile_id in self.main_quality_profile_ids.keys():
6480
+ # This is a temp profile - get the original main profile
6481
+ original_id = self.main_quality_profile_ids[profile_id]
6482
+ item["qualityProfileId"] = original_id
6483
+
6484
+ # Update via API with retry logic
6485
+ for attempt in range(self.profile_switch_retry_attempts):
6486
+ try:
6487
+ if item_type == "movie":
6488
+ self.client.upd_movie(item)
6489
+ elif item_type == "series":
6490
+ self.client.upd_series(item)
6491
+ elif item_type == "artist":
6492
+ self.client.upd_artist(item)
6493
+
6494
+ reset_count += 1
6495
+ self.logger.info(
6496
+ f"Reset {item_type} '{item.get('title', item.get('artistName', 'Unknown'))}' "
6497
+ f"from temp profile (ID:{profile_id}) to main profile (ID:{original_id})"
6498
+ )
6499
+ break
6500
+ except (
6501
+ requests.exceptions.ChunkedEncodingError,
6502
+ requests.exceptions.ContentDecodingError,
6503
+ requests.exceptions.ConnectionError,
6504
+ JSONDecodeError,
6505
+ ) as e:
6506
+ if attempt == self.profile_switch_retry_attempts - 1:
6507
+ self.logger.error(
6508
+ f"Failed to reset {item_type} profile after {self.profile_switch_retry_attempts} attempts: {e}"
6509
+ )
6510
+ else:
6511
+ time.sleep(1)
6512
+ continue
6513
+
6514
+ if reset_count > 0:
6515
+ self.logger.info(
6516
+ f"ForceResetTempProfiles: Reset {reset_count} {item_type}s from temp to main profiles"
6517
+ )
6518
+ else:
6519
+ self.logger.info(
6520
+ f"ForceResetTempProfiles: No {item_type}s found using temp profiles"
6521
+ )
6522
+
6523
+ except Exception as e:
6524
+ self.logger.error(f"Error during temp profile reset: {e}", exc_info=True)
6525
+
6526
+ def _check_temp_profile_timeouts(self):
6527
+ """Check for items with temp profiles that have exceeded the timeout and reset them."""
6528
+ if self.temp_profile_timeout_minutes == 0:
6529
+ return # Feature disabled
6530
+
6531
+ from datetime import timedelta
6532
+
6533
+ timeout_threshold = datetime.now() - timedelta(minutes=self.temp_profile_timeout_minutes)
6534
+ reset_count = 0
6535
+
6536
+ try:
6537
+ # Query database for items with expired temp profiles
6538
+ db1, db2, db3, db4, db5 = self._get_models()
6539
+
6540
+ # Determine which model to use
6541
+ if self._name.lower().startswith("radarr"):
6542
+ model = self.movies_file_model
6543
+ item_type = "movie"
6544
+ elif self._name.lower().startswith("sonarr") or self._name.lower().startswith(
6545
+ "animarr"
6546
+ ):
6547
+ model = self.model_file # episodes
6548
+ item_type = "episode"
6549
+ elif self._name.lower().startswith("lidarr"):
6550
+ model = self.artists_file_model
6551
+ item_type = "artist"
6552
+ else:
6553
+ return
6554
+
6555
+ # Find items with temp profiles that have exceeded timeout
6556
+ expired_items = model.select().where(
6557
+ (model.LastProfileSwitchTime.is_null(False))
6558
+ & (model.LastProfileSwitchTime < timeout_threshold)
6559
+ & (model.CurrentProfileId.is_null(False))
6560
+ & (model.OriginalProfileId.is_null(False))
6561
+ )
6562
+
6563
+ for db_item in expired_items:
6564
+ entry_id = db_item.EntryId
6565
+ current_profile = db_item.CurrentProfileId
6566
+ original_profile = db_item.OriginalProfileId
6567
+
6568
+ # Verify current profile is still a temp profile in our mappings
6569
+ if current_profile not in self.main_quality_profile_ids.keys():
6570
+ # Not a temp profile anymore, clear tracking
6571
+ model.update(
6572
+ LastProfileSwitchTime=None, CurrentProfileId=None, OriginalProfileId=None
6573
+ ).where(model.EntryId == entry_id).execute()
6574
+ continue
6575
+
6576
+ # Reset to original profile via Arr API
6577
+ try:
6578
+ if item_type == "movie":
6579
+ item = self.client.get_movie(entry_id)
6580
+ item["qualityProfileId"] = original_profile
6581
+ self.client.upd_movie(item)
6582
+ elif item_type == "episode":
6583
+ # For episodes, we need to update the series
6584
+ series_id = db_item.SeriesId
6585
+ series = self.client.get_series(series_id)
6586
+ series["qualityProfileId"] = original_profile
6587
+ self.client.upd_series(series)
6588
+ elif item_type == "artist":
6589
+ artist = self.client.get_artist(entry_id)
6590
+ artist["qualityProfileId"] = original_profile
6591
+ self.client.upd_artist(artist)
6592
+
6593
+ # Clear tracking fields in database
6594
+ model.update(
6595
+ LastProfileSwitchTime=None, CurrentProfileId=None, OriginalProfileId=None
6596
+ ).where(model.EntryId == entry_id).execute()
6597
+
6598
+ reset_count += 1
6599
+ self.logger.info(
6600
+ f"Timeout reset: {item_type} ID {entry_id} from temp profile (ID:{current_profile}) "
6601
+ f"to main profile (ID:{original_profile}) after {self.temp_profile_timeout_minutes} minutes"
6602
+ )
6603
+
6604
+ except Exception as e:
6605
+ self.logger.error(
6606
+ f"Failed to reset {item_type} ID {entry_id} after timeout: {e}"
6607
+ )
6608
+
6609
+ if reset_count > 0:
6610
+ self.logger.info(
6611
+ f"TempProfileTimeout: Reset {reset_count} {item_type}s from temp to main profiles"
6612
+ )
6613
+
6614
+ except Exception as e:
6615
+ self.logger.error(f"Error checking temp profile timeouts: {e}", exc_info=True)
6616
+
6151
6617
  def register_search_mode(self):
6152
6618
  if self.search_setup_completed:
6153
6619
  return
@@ -6501,6 +6967,18 @@ class Arr:
6501
6967
 
6502
6968
  def run_search_loop(self) -> NoReturn:
6503
6969
  run_logs(self.logger)
6970
+ self.logger.info(
6971
+ "Search loop starting for %s (SearchMissing=%s, DoUpgradeSearch=%s, "
6972
+ "QualityUnmetSearch=%s, CustomFormatUnmetSearch=%s, "
6973
+ "Overseerr=%s, Ombi=%s)",
6974
+ self._name,
6975
+ self.search_missing,
6976
+ self.do_upgrade_search,
6977
+ self.quality_unmet_search,
6978
+ self.custom_format_unmet_search,
6979
+ self.overseerr_requests,
6980
+ self.ombi_search_requests,
6981
+ )
6504
6982
  try:
6505
6983
  if not (
6506
6984
  self.search_missing
@@ -6517,6 +6995,7 @@ class Arr:
6517
6995
  totcommands = -1
6518
6996
  self.db_update_processed = False
6519
6997
  event = self.manager.qbit_manager.shutdown_event
6998
+ self.logger.info("Search loop initialized successfully, entering main loop")
6520
6999
  while not event.is_set():
6521
7000
  if self.loop_completed:
6522
7001
  years_index = 0
@@ -6535,7 +7014,13 @@ class Arr:
6535
7014
  self.db_maybe_reset_entry_searched_state()
6536
7015
  self.refresh_download_queue()
6537
7016
  self.db_update()
6538
- # self.run_request_search()
7017
+
7018
+ # Check for expired temp profiles if feature is enabled
7019
+ if self.use_temp_for_missing and self.temp_profile_timeout_minutes > 0:
7020
+ self._check_temp_profile_timeouts()
7021
+
7022
+ # Check for new Overseerr/Ombi requests and trigger searches
7023
+ self.run_request_search()
6539
7024
  try:
6540
7025
  if self.search_by_year:
6541
7026
  if years.index(self.search_current_year) != years_count - 1:
@@ -6658,6 +7143,16 @@ class Arr:
6658
7143
  except KeyboardInterrupt:
6659
7144
  self.logger.hnotice("Detected Ctrl+C - Terminating process")
6660
7145
  sys.exit(0)
7146
+ except Exception as e:
7147
+ self.logger.critical(
7148
+ "Search loop crashed unexpectedly for %s: %s",
7149
+ self._name,
7150
+ e,
7151
+ exc_info=True,
7152
+ )
7153
+ raise
7154
+ finally:
7155
+ self.logger.warning("Search loop terminated for %s", self._name)
6661
7156
 
6662
7157
  def run_torrent_loop(self) -> NoReturn:
6663
7158
  run_logs(self.logger)
@@ -7050,7 +7545,7 @@ class FreeSpaceManager(Arr):
7050
7545
  )
7051
7546
  self.pause.add(torrent.hash)
7052
7547
 
7053
- def _process_single_torrent(self, torrent):
7548
+ def _process_single_torrent(self, torrent, instance_name: str = "default"):
7054
7549
  if self.is_downloading_state(torrent):
7055
7550
  free_space_test = self.current_free_space
7056
7551
  free_space_test -= torrent["amount_left"]
@@ -7069,8 +7564,8 @@ class FreeSpaceManager(Arr):
7069
7564
  format_bytes(torrent.amount_left),
7070
7565
  format_bytes(-free_space_test),
7071
7566
  )
7072
- self.add_tags(torrent, ["qBitrr-free_space_paused"])
7073
- self.remove_tags(torrent, ["qBitrr-allowed_seeding"])
7567
+ self.add_tags(torrent, ["qBitrr-free_space_paused"], instance_name)
7568
+ self.remove_tags(torrent, ["qBitrr-allowed_seeding"], instance_name)
7074
7569
  self._process_single_torrent_pause_disk_space(torrent)
7075
7570
  elif torrent.state_enum == TorrentStates.PAUSED_DOWNLOAD and free_space_test < 0:
7076
7571
  self.logger.info(
@@ -7080,8 +7575,8 @@ class FreeSpaceManager(Arr):
7080
7575
  format_bytes(torrent.amount_left),
7081
7576
  format_bytes(-free_space_test),
7082
7577
  )
7083
- self.add_tags(torrent, ["qBitrr-free_space_paused"])
7084
- self.remove_tags(torrent, ["qBitrr-allowed_seeding"])
7578
+ self.add_tags(torrent, ["qBitrr-free_space_paused"], instance_name)
7579
+ self.remove_tags(torrent, ["qBitrr-allowed_seeding"], instance_name)
7085
7580
  elif torrent.state_enum != TorrentStates.PAUSED_DOWNLOAD and free_space_test > 0:
7086
7581
  self.logger.info(
7087
7582
  "Continuing download (sufficient space) | Torrent: %s | Available: %s | Space after: %s",
@@ -7090,7 +7585,7 @@ class FreeSpaceManager(Arr):
7090
7585
  format_bytes(free_space_test + self._min_free_space_bytes),
7091
7586
  )
7092
7587
  self.current_free_space = free_space_test
7093
- self.remove_tags(torrent, ["qBitrr-free_space_paused"])
7588
+ self.remove_tags(torrent, ["qBitrr-free_space_paused"], instance_name)
7094
7589
  elif torrent.state_enum == TorrentStates.PAUSED_DOWNLOAD and free_space_test > 0:
7095
7590
  self.logger.info(
7096
7591
  "Resuming download (space available) | Torrent: %s | Available: %s | Space after: %s",
@@ -7099,16 +7594,16 @@ class FreeSpaceManager(Arr):
7099
7594
  format_bytes(free_space_test + self._min_free_space_bytes),
7100
7595
  )
7101
7596
  self.current_free_space = free_space_test
7102
- self.remove_tags(torrent, ["qBitrr-free_space_paused"])
7597
+ self.remove_tags(torrent, ["qBitrr-free_space_paused"], instance_name)
7103
7598
  elif not self.is_downloading_state(torrent) and self.in_tags(
7104
- torrent, "qBitrr-free_space_paused"
7599
+ torrent, "qBitrr-free_space_paused", instance_name
7105
7600
  ):
7106
7601
  self.logger.info(
7107
7602
  "Torrent completed, removing free space tag | Torrent: %s | Available: %s",
7108
7603
  torrent.name,
7109
7604
  format_bytes(self.current_free_space + self._min_free_space_bytes),
7110
7605
  )
7111
- self.remove_tags(torrent, ["qBitrr-free_space_paused"])
7606
+ self.remove_tags(torrent, ["qBitrr-free_space_paused"], instance_name)
7112
7607
 
7113
7608
  def process(self):
7114
7609
  self._process_paused()
@@ -7138,7 +7633,7 @@ class FreeSpaceManager(Arr):
7138
7633
  torrents = [t for t in torrents if "qBitrr-ignored" not in t.tags]
7139
7634
  self.category_torrent_count = len(torrents)
7140
7635
  self.free_space_tagged_count = sum(
7141
- 1 for t in torrents if self.in_tags(t, "qBitrr-free_space_paused")
7636
+ 1 for t in torrents if self.in_tags(t, "qBitrr-free_space_paused", "default")
7142
7637
  )
7143
7638
  if not len(torrents):
7144
7639
  raise DelayLoopException(length=LOOP_SLEEP_TIMER, type="no_downloads")