qBitrr2 5.1.1__py3-none-any.whl → 5.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
qBitrr/arss.py CHANGED
@@ -21,7 +21,7 @@ import qbittorrentapi.exceptions
21
21
  import requests
22
22
  from packaging import version as version_parser
23
23
  from peewee import Model, SqliteDatabase
24
- from pyarr import RadarrAPI, SonarrAPI
24
+ from pyarr import LidarrAPI, RadarrAPI, SonarrAPI
25
25
  from pyarr.exceptions import PyarrResourceNotFound, PyarrServerError
26
26
  from pyarr.types import JsonObject
27
27
  from qbittorrentapi import TorrentDictionary, TorrentStates
@@ -58,6 +58,9 @@ from qBitrr.search_activity_store import (
58
58
  record_search_activity,
59
59
  )
60
60
  from qBitrr.tables import (
61
+ AlbumFilesModel,
62
+ AlbumQueueModel,
63
+ ArtistFilesModel,
61
64
  EpisodeFilesModel,
62
65
  EpisodeQueueModel,
63
66
  FilesQueued,
@@ -65,10 +68,12 @@ from qBitrr.tables import (
65
68
  MoviesFilesModel,
66
69
  SeriesFilesModel,
67
70
  TorrentLibrary,
71
+ TrackFilesModel,
68
72
  )
69
73
  from qBitrr.utils import (
70
74
  ExpiringSet,
71
75
  absolute_file_paths,
76
+ format_bytes,
72
77
  has_internet,
73
78
  parse_size,
74
79
  validate_and_return_torrent_file,
@@ -118,7 +123,10 @@ if TYPE_CHECKING:
118
123
 
119
124
  class Arr:
120
125
  def __init__(
121
- self, name: str, manager: ArrManager, client_cls: type[Callable | RadarrAPI | SonarrAPI]
126
+ self,
127
+ name: str,
128
+ manager: ArrManager,
129
+ client_cls: type[Callable | RadarrAPI | SonarrAPI | LidarrAPI],
122
130
  ):
123
131
  if name in manager.groups:
124
132
  raise OSError(f"Group '{name}' has already been registered.")
@@ -312,7 +320,14 @@ class Arr:
312
320
  self.overseerr_requests = CONFIG.get(
313
321
  f"{name}.EntrySearch.Overseerr.SearchOverseerrRequests", fallback=False
314
322
  )
315
- self.series_search = CONFIG.get(f"{name}.EntrySearch.SearchBySeries", fallback=False)
323
+ # SearchBySeries can be: True (always series), False (always episode), or "smart" (automatic)
324
+ series_search_config = CONFIG.get(f"{name}.EntrySearch.SearchBySeries", fallback=False)
325
+ if isinstance(series_search_config, str) and series_search_config.lower() == "smart":
326
+ self.series_search = "smart"
327
+ elif series_search_config in (True, "true", "True", "TRUE", 1):
328
+ self.series_search = True
329
+ else:
330
+ self.series_search = False
316
331
  if self.ombi_search_requests:
317
332
  self.ombi_uri = CONFIG.get_or_raise(f"{name}.EntrySearch.Ombi.OmbiURI")
318
333
  self.ombi_api_key = CONFIG.get_or_raise(f"{name}.EntrySearch.Ombi.OmbiAPIKey")
@@ -384,6 +399,18 @@ class Arr:
384
399
  self.type = "sonarr"
385
400
  elif isinstance(self.client, RadarrAPI):
386
401
  self.type = "radarr"
402
+ elif isinstance(self.client, LidarrAPI):
403
+ self.type = "lidarr"
404
+
405
+ # Disable unsupported features for Lidarr
406
+ if self.type == "lidarr":
407
+ self.search_by_year = False
408
+ self.ombi_search_requests = False
409
+ self.overseerr_requests = False
410
+ self.ombi_uri = None
411
+ self.ombi_api_key = None
412
+ self.overseerr_uri = None
413
+ self.overseerr_api_key = None
387
414
 
388
415
  try:
389
416
  version_info = self.client.get_update()
@@ -413,6 +440,10 @@ class Arr:
413
440
  if self.use_temp_for_missing:
414
441
  self.temp_quality_profile_ids = self.parse_quality_profiles()
415
442
 
443
+ # Cache for valid quality profile IDs to avoid repeated API calls and warnings
444
+ self._quality_profile_cache: dict[int, dict] = {}
445
+ self._invalid_quality_profiles: set[int] = set()
446
+
416
447
  if self.rss_sync_timer > 0:
417
448
  self.rss_sync_timer_last_checked = datetime(1970, 1, 1)
418
449
  else:
@@ -549,9 +580,12 @@ class Arr:
549
580
  self.quality_unmet_search
550
581
  or self.do_upgrade_search
551
582
  or self.custom_format_unmet_search
552
- or self.series_search
583
+ or self.series_search == True
553
584
  ):
554
585
  self.search_api_command = "SeriesSearch"
586
+ elif self.series_search == "smart":
587
+ # In smart mode, the command will be determined dynamically
588
+ self.search_api_command = "SeriesSearch" # Default, will be overridden per search
555
589
  else:
556
590
  self.search_api_command = "MissingEpisodeSearch"
557
591
 
@@ -571,6 +605,7 @@ class Arr:
571
605
  self.series_file_model: Model | None = None
572
606
  self.model_queue: Model | None = None
573
607
  self.persistent_queue: Model | None = None
608
+ self.track_file_model: Model | None = None
574
609
  self.torrents: TorrentLibrary | None = None
575
610
  self.torrent_db: SqliteDatabase | None = None
576
611
  self.db: SqliteDatabase | None = None
@@ -1133,6 +1168,26 @@ class Arr:
1133
1168
  ),
1134
1169
  )
1135
1170
  self.logger.success("DownloadedMoviesScan: %s", path)
1171
+ elif self.type == "lidarr":
1172
+ with_retry(
1173
+ lambda: self.client.post_command(
1174
+ "DownloadedAlbumsScan",
1175
+ path=str(path),
1176
+ downloadClientId=torrent.hash.upper(),
1177
+ importMode=self.import_mode,
1178
+ ),
1179
+ retries=3,
1180
+ backoff=0.5,
1181
+ max_backoff=3,
1182
+ exceptions=(
1183
+ requests.exceptions.ChunkedEncodingError,
1184
+ requests.exceptions.ContentDecodingError,
1185
+ requests.exceptions.ConnectionError,
1186
+ JSONDecodeError,
1187
+ requests.exceptions.RequestException,
1188
+ ),
1189
+ )
1190
+ self.logger.success("DownloadedAlbumsScan: %s", path)
1136
1191
  except Exception as ex:
1137
1192
  self.logger.error(
1138
1193
  "Downloaded scan error: [%s][%s][%s][%s]",
@@ -1312,6 +1367,48 @@ class Arr:
1312
1367
  continue
1313
1368
  if self.persistent_queue:
1314
1369
  self.persistent_queue.insert(EntryId=object_id).on_conflict_ignore()
1370
+ elif self.type == "lidarr":
1371
+ self.logger.trace("Requeue cache entry: %s", object_id)
1372
+ while True:
1373
+ try:
1374
+ data = self.client.get_album(object_id)
1375
+ name = data.get("title")
1376
+ if name:
1377
+ artist_title = data.get("artist", {}).get("artistName", "")
1378
+ foreign_album_id = data.get("foreignAlbumId", "")
1379
+ self.logger.notice(
1380
+ "Re-Searching album: %s - %s | [foreignAlbumId=%s|id=%s]",
1381
+ artist_title,
1382
+ name,
1383
+ foreign_album_id,
1384
+ object_id,
1385
+ )
1386
+ else:
1387
+ self.logger.notice("Re-Searching album: %s", object_id)
1388
+ break
1389
+ except (
1390
+ requests.exceptions.ChunkedEncodingError,
1391
+ requests.exceptions.ContentDecodingError,
1392
+ requests.exceptions.ConnectionError,
1393
+ JSONDecodeError,
1394
+ AttributeError,
1395
+ ):
1396
+ continue
1397
+ if object_id in self.queue_file_ids:
1398
+ self.queue_file_ids.remove(object_id)
1399
+ while True:
1400
+ try:
1401
+ self.client.post_command("AlbumSearch", albumIds=[object_id])
1402
+ break
1403
+ except (
1404
+ requests.exceptions.ChunkedEncodingError,
1405
+ requests.exceptions.ContentDecodingError,
1406
+ requests.exceptions.ConnectionError,
1407
+ JSONDecodeError,
1408
+ ):
1409
+ continue
1410
+ if self.persistent_queue:
1411
+ self.persistent_queue.insert(EntryId=object_id).on_conflict_ignore()
1315
1412
 
1316
1413
  def _process_errored(self) -> None:
1317
1414
  # Recheck all torrents marked for rechecking.
@@ -1329,10 +1426,6 @@ class Arr:
1329
1426
  to_delete_all = self.delete.union(
1330
1427
  self.missing_files_post_delete, self.downloads_with_bad_error_message_blocklist
1331
1428
  )
1332
- if self.missing_files_post_delete or self.downloads_with_bad_error_message_blocklist:
1333
- delete_ = True
1334
- else:
1335
- delete_ = False
1336
1429
  skip_blacklist = {
1337
1430
  i.upper() for i in self.skip_blacklist.union(self.missing_files_post_delete)
1338
1431
  }
@@ -1361,7 +1454,7 @@ class Arr:
1361
1454
  del self.manager.qbit_manager.name_cache[h]
1362
1455
  if h in self.manager.qbit_manager.cache:
1363
1456
  del self.manager.qbit_manager.cache[h]
1364
- if delete_:
1457
+ if self.missing_files_post_delete or self.downloads_with_bad_error_message_blocklist:
1365
1458
  self.missing_files_post_delete.clear()
1366
1459
  self.downloads_with_bad_error_message_blocklist.clear()
1367
1460
  self.skip_blacklist.clear()
@@ -1499,11 +1592,55 @@ class Arr:
1499
1592
  ) -> Iterable[
1500
1593
  tuple[MoviesFilesModel | EpisodeFilesModel | SeriesFilesModel, bool, bool, bool, int]
1501
1594
  ]:
1502
- if self.type == "sonarr" and self.series_search:
1595
+ if self.type == "sonarr" and self.series_search == True:
1503
1596
  serieslist = self.db_get_files_series()
1504
1597
  for series in serieslist:
1505
1598
  yield series[0], series[1], series[2], series[2] is not True, len(serieslist)
1506
- elif self.type == "sonarr" and not self.series_search:
1599
+ elif self.type == "sonarr" and self.series_search == "smart":
1600
+ # Smart mode: decide dynamically based on what needs to be searched
1601
+ episodelist = self.db_get_files_episodes()
1602
+ if episodelist:
1603
+ # Group episodes by series to determine if we should search by series or episode
1604
+ series_episodes_map = {}
1605
+ for episode_entry in episodelist:
1606
+ episode = episode_entry[0]
1607
+ series_id = episode.SeriesId
1608
+ if series_id not in series_episodes_map:
1609
+ series_episodes_map[series_id] = []
1610
+ series_episodes_map[series_id].append(episode_entry)
1611
+
1612
+ # Process each series
1613
+ for series_id, episodes in series_episodes_map.items():
1614
+ if len(episodes) > 1:
1615
+ # Multiple episodes from same series - use series search (smart decision)
1616
+ self.logger.info(
1617
+ "[SMART MODE] Using series search for %s episodes from series ID %s",
1618
+ len(episodes),
1619
+ series_id,
1620
+ )
1621
+ # Create a series entry for searching
1622
+ series_model = (
1623
+ self.series_file_model.select()
1624
+ .where(self.series_file_model.EntryId == series_id)
1625
+ .first()
1626
+ )
1627
+ if series_model:
1628
+ yield series_model, episodes[0][1], episodes[0][2], True, len(
1629
+ episodelist
1630
+ )
1631
+ else:
1632
+ # Single episode - use episode search (smart decision)
1633
+ episode = episodes[0][0]
1634
+ self.logger.info(
1635
+ "[SMART MODE] Using episode search for single episode: %s S%02dE%03d",
1636
+ episode.SeriesTitle,
1637
+ episode.SeasonNumber,
1638
+ episode.EpisodeNumber,
1639
+ )
1640
+ yield episodes[0][0], episodes[0][1], episodes[0][2], False, len(
1641
+ episodelist
1642
+ )
1643
+ elif self.type == "sonarr" and self.series_search == False:
1507
1644
  episodelist = self.db_get_files_episodes()
1508
1645
  for episodes in episodelist:
1509
1646
  yield episodes[0], episodes[1], episodes[2], False, len(episodelist)
@@ -1511,6 +1648,10 @@ class Arr:
1511
1648
  movielist = self.db_get_files_movies()
1512
1649
  for movies in movielist:
1513
1650
  yield movies[0], movies[1], movies[2], False, len(movielist)
1651
+ elif self.type == "lidarr":
1652
+ albumlist = self.db_get_files_movies() # This calls the lidarr section we added
1653
+ for albums in albumlist:
1654
+ yield albums[0], albums[1], albums[2], False, len(albumlist)
1514
1655
 
1515
1656
  def db_maybe_reset_entry_searched_state(self):
1516
1657
  if self.type == "sonarr":
@@ -1518,6 +1659,8 @@ class Arr:
1518
1659
  self.db_reset__episode_searched_state()
1519
1660
  elif self.type == "radarr":
1520
1661
  self.db_reset__movie_searched_state()
1662
+ elif self.type == "lidarr":
1663
+ self.db_reset__album_searched_state()
1521
1664
  self.loop_completed = False
1522
1665
 
1523
1666
  def db_reset__series_searched_state(self):
@@ -1600,6 +1743,33 @@ class Arr:
1600
1743
  self.model_file.delete().where(self.model_file.EntryId.not_in(ids)).execute()
1601
1744
  self.loop_completed = False
1602
1745
 
1746
+ def db_reset__album_searched_state(self):
1747
+ ids = []
1748
+ self.model_file: AlbumFilesModel
1749
+ if (
1750
+ self.loop_completed is True and self.reset_on_completion
1751
+ ): # Only wipe if a loop completed was tagged
1752
+ self.model_file.update(Searched=False, Upgrade=False).where(
1753
+ self.model_file.Searched == True
1754
+ ).execute()
1755
+ while True:
1756
+ try:
1757
+ artists = self.client.get_artist()
1758
+ for artist in artists:
1759
+ albums = self.client.get_album(artistId=artist["id"])
1760
+ for album in albums:
1761
+ ids.append(album["id"])
1762
+ break
1763
+ except (
1764
+ requests.exceptions.ChunkedEncodingError,
1765
+ requests.exceptions.ContentDecodingError,
1766
+ requests.exceptions.ConnectionError,
1767
+ JSONDecodeError,
1768
+ ):
1769
+ continue
1770
+ self.model_file.delete().where(self.model_file.EntryId.not_in(ids)).execute()
1771
+ self.loop_completed = False
1772
+
1603
1773
  def db_get_files_series(self) -> list[list[SeriesFilesModel, bool, bool]] | None:
1604
1774
  entries = []
1605
1775
  if not (self.search_missing or self.do_upgrade_search):
@@ -1767,6 +1937,36 @@ class Arr:
1767
1937
  ):
1768
1938
  entries.append([entry, False, False])
1769
1939
  return entries
1940
+ elif self.type == "lidarr":
1941
+ condition = True # Placeholder, will be refined
1942
+ if self.do_upgrade_search:
1943
+ condition &= self.model_file.Upgrade == False
1944
+ else:
1945
+ if self.quality_unmet_search and not self.custom_format_unmet_search:
1946
+ condition &= (self.model_file.Searched == False) | (
1947
+ self.model_file.QualityMet == False
1948
+ )
1949
+ elif not self.quality_unmet_search and self.custom_format_unmet_search:
1950
+ condition &= (self.model_file.Searched == False) | (
1951
+ self.model_file.CustomFormatMet == False
1952
+ )
1953
+ elif self.quality_unmet_search and self.custom_format_unmet_search:
1954
+ condition &= (
1955
+ (self.model_file.Searched == False)
1956
+ | (self.model_file.QualityMet == False)
1957
+ | (self.model_file.CustomFormatMet == False)
1958
+ )
1959
+ else:
1960
+ condition &= self.model_file.AlbumFileId == 0
1961
+ condition &= self.model_file.Searched == False
1962
+ for entry in (
1963
+ self.model_file.select()
1964
+ .where(condition)
1965
+ .order_by(self.model_file.AlbumFileId.asc())
1966
+ .execute()
1967
+ ):
1968
+ entries.append([entry, False, False])
1969
+ return entries
1770
1970
 
1771
1971
  def db_get_request_files(self) -> Iterable[tuple[MoviesFilesModel | EpisodeFilesModel, int]]:
1772
1972
  entries = []
@@ -1964,104 +2164,47 @@ class Arr:
1964
2164
  except Exception:
1965
2165
  pass
1966
2166
  self.db_update_todays_releases()
1967
- if self.db_update_processed and not self.search_by_year:
2167
+ if self.db_update_processed:
1968
2168
  return
1969
- if self.search_by_year:
1970
- self.logger.info("Started updating database for %s", self.search_current_year)
1971
- else:
1972
- self.logger.info("Started updating database")
2169
+ self.logger.info("Started updating database")
1973
2170
  if self.type == "sonarr":
1974
- if not self.series_search:
1975
- while True:
1976
- try:
1977
- series = self.client.get_series()
1978
- break
1979
- except (
1980
- requests.exceptions.ChunkedEncodingError,
1981
- requests.exceptions.ContentDecodingError,
1982
- requests.exceptions.ConnectionError,
1983
- JSONDecodeError,
1984
- ):
1985
- continue
1986
- if self.search_by_year:
1987
- for s in series:
1988
- if isinstance(s, str):
1989
- continue
1990
- episodes = self.client.get_episode(s["id"], True)
1991
- for e in episodes:
1992
- if isinstance(e, str):
1993
- continue
1994
- if "airDateUtc" in e:
1995
- if datetime.strptime(
1996
- e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ"
1997
- ).replace(tzinfo=timezone.utc) > datetime.now(timezone.utc):
1998
- continue
1999
- if (
2000
- datetime.strptime(e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ")
2001
- .replace(tzinfo=timezone.utc)
2002
- .date()
2003
- < datetime(
2004
- month=1, day=1, year=int(self.search_current_year)
2005
- ).date()
2006
- ):
2007
- continue
2008
- if (
2009
- datetime.strptime(e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ")
2010
- .replace(tzinfo=timezone.utc)
2011
- .date()
2012
- > datetime(
2013
- month=12, day=31, year=int(self.search_current_year)
2014
- ).date()
2015
- ):
2016
- continue
2017
- if not self.search_specials and e["seasonNumber"] == 0:
2018
- continue
2019
- self.db_update_single_series(db_entry=e)
2171
+ # Always fetch series list for both episode and series-level tracking
2172
+ while True:
2173
+ try:
2174
+ series = self.client.get_series()
2175
+ break
2176
+ except (
2177
+ requests.exceptions.ChunkedEncodingError,
2178
+ requests.exceptions.ContentDecodingError,
2179
+ requests.exceptions.ConnectionError,
2180
+ JSONDecodeError,
2181
+ ):
2182
+ continue
2020
2183
 
2021
- else:
2022
- for s in series:
2023
- if isinstance(s, str):
2024
- continue
2025
- episodes = self.client.get_episode(s["id"], True)
2026
- for e in episodes:
2027
- if isinstance(e, str):
2028
- continue
2029
- if "airDateUtc" in e:
2030
- if datetime.strptime(
2031
- e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ"
2032
- ).replace(tzinfo=timezone.utc) > datetime.now(timezone.utc):
2033
- continue
2034
- if not self.search_specials and e["seasonNumber"] == 0:
2035
- continue
2036
- self.db_update_single_series(db_entry=e)
2037
- self.db_update_processed = True
2038
- else:
2039
- while True:
2040
- try:
2041
- series = self.client.get_series()
2042
- break
2043
- except (
2044
- requests.exceptions.ChunkedEncodingError,
2045
- requests.exceptions.ContentDecodingError,
2046
- requests.exceptions.ConnectionError,
2047
- JSONDecodeError,
2048
- ):
2184
+ # Process episodes for episode-level tracking (all episodes)
2185
+ for s in series:
2186
+ if isinstance(s, str):
2187
+ continue
2188
+ episodes = self.client.get_episode(s["id"], True)
2189
+ for e in episodes:
2190
+ if isinstance(e, str):
2049
2191
  continue
2050
- if self.search_by_year:
2051
- for s in series:
2052
- if isinstance(s, str):
2053
- continue
2054
- if s["year"] < self.search_current_year:
2055
- continue
2056
- if s["year"] > self.search_current_year:
2192
+ if "airDateUtc" in e:
2193
+ if datetime.strptime(e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ").replace(
2194
+ tzinfo=timezone.utc
2195
+ ) > datetime.now(timezone.utc):
2057
2196
  continue
2058
- self.db_update_single_series(db_entry=s, series=True)
2059
- else:
2060
- for s in series:
2061
- if isinstance(s, str):
2197
+ if not self.search_specials and e["seasonNumber"] == 0:
2062
2198
  continue
2063
- self.db_update_single_series(db_entry=s, series=True)
2064
- self.db_update_processed = True
2199
+ self.db_update_single_series(db_entry=e, series=False)
2200
+
2201
+ # Process series for series-level tracking (all series)
2202
+ for s in series:
2203
+ if isinstance(s, str):
2204
+ continue
2205
+ self.db_update_single_series(db_entry=s, series=True)
2206
+
2207
+ self.db_update_processed = True
2065
2208
  elif self.type == "radarr":
2066
2209
  while True:
2067
2210
  try:
@@ -2074,20 +2217,58 @@ class Arr:
2074
2217
  JSONDecodeError,
2075
2218
  ):
2076
2219
  continue
2077
- if self.search_by_year:
2078
- for m in movies:
2079
- if isinstance(m, str):
2080
- continue
2081
- if m["year"] < self.search_current_year:
2082
- continue
2083
- if m["year"] > self.search_current_year:
2220
+ # Process all movies
2221
+ for m in movies:
2222
+ if isinstance(m, str):
2223
+ continue
2224
+ self.db_update_single_series(db_entry=m)
2225
+ self.db_update_processed = True
2226
+ elif self.type == "lidarr":
2227
+ while True:
2228
+ try:
2229
+ artists = self.client.get_artist()
2230
+ break
2231
+ except (
2232
+ requests.exceptions.ChunkedEncodingError,
2233
+ requests.exceptions.ContentDecodingError,
2234
+ requests.exceptions.ConnectionError,
2235
+ JSONDecodeError,
2236
+ ):
2237
+ continue
2238
+ for artist in artists:
2239
+ if isinstance(artist, str):
2240
+ continue
2241
+ while True:
2242
+ try:
2243
+ # allArtistAlbums=True includes full album data with media/tracks
2244
+ albums = self.client.get_album(
2245
+ artistId=artist["id"], allArtistAlbums=True
2246
+ )
2247
+ break
2248
+ except (
2249
+ requests.exceptions.ChunkedEncodingError,
2250
+ requests.exceptions.ContentDecodingError,
2251
+ requests.exceptions.ConnectionError,
2252
+ JSONDecodeError,
2253
+ ):
2084
2254
  continue
2085
- self.db_update_single_series(db_entry=m)
2086
- else:
2087
- for m in movies:
2088
- if isinstance(m, str):
2255
+ for album in albums:
2256
+ if isinstance(album, str):
2089
2257
  continue
2090
- self.db_update_single_series(db_entry=m)
2258
+ # For Lidarr, we don't have a specific releaseDate field
2259
+ # Check if album has been released
2260
+ if "releaseDate" in album:
2261
+ release_date = datetime.strptime(
2262
+ album["releaseDate"], "%Y-%m-%dT%H:%M:%SZ"
2263
+ )
2264
+ if release_date > datetime.now():
2265
+ continue
2266
+ self.db_update_single_series(db_entry=album)
2267
+ # Process artists for artist-level tracking
2268
+ for artist in artists:
2269
+ if isinstance(artist, str):
2270
+ continue
2271
+ self.db_update_single_series(db_entry=artist, artist=True)
2091
2272
  self.db_update_processed = True
2092
2273
  self.logger.trace("Finished updating database")
2093
2274
  finally:
@@ -2334,7 +2515,11 @@ class Arr:
2334
2515
  return False
2335
2516
 
2336
2517
  def db_update_single_series(
2337
- self, db_entry: JsonObject = None, request: bool = False, series: bool = False
2518
+ self,
2519
+ db_entry: JsonObject = None,
2520
+ request: bool = False,
2521
+ series: bool = False,
2522
+ artist: bool = False,
2338
2523
  ):
2339
2524
  if not (
2340
2525
  self.search_missing
@@ -2520,7 +2705,10 @@ class Arr:
2520
2705
  QualityMet = not QualityUnmet if db_entry["hasFile"] else False
2521
2706
  customFormatMet = customFormat >= minCustomFormat
2522
2707
 
2523
- if not episode["hasFile"]:
2708
+ if searched:
2709
+ # Episode is complete and not being searched
2710
+ reason = None
2711
+ elif not episode["hasFile"]:
2524
2712
  reason = "Missing"
2525
2713
  elif self.quality_unmet_search and QualityUnmet:
2526
2714
  reason = "Quality"
@@ -2529,7 +2717,7 @@ class Arr:
2529
2717
  elif self.do_upgrade_search:
2530
2718
  reason = "Upgrade"
2531
2719
  else:
2532
- reason = "Scheduled search"
2720
+ reason = None
2533
2721
 
2534
2722
  to_update = {
2535
2723
  self.model_file.Monitored: Monitored,
@@ -2747,6 +2935,9 @@ class Arr:
2747
2935
  conflict_target=[self.series_file_model.EntryId], update=to_update
2748
2936
  )
2749
2937
  db_commands.execute()
2938
+
2939
+ # Note: Episodes are now handled separately in db_update()
2940
+ # No need to recursively process episodes here to avoid duplication
2750
2941
  else:
2751
2942
  db_commands = self.series_file_model.delete().where(
2752
2943
  self.series_file_model.EntryId == EntryId
@@ -2842,7 +3033,7 @@ class Arr:
2842
3033
  self.logger.debug(
2843
3034
  "Updating quality profile for %s to %s",
2844
3035
  db_entry["title"],
2845
- self.temp_quality_profile_ids[db_entry["qualityProfileId"]],
3036
+ db_entry["qualityProfileId"],
2846
3037
  )
2847
3038
  while True:
2848
3039
  try:
@@ -2865,7 +3056,10 @@ class Arr:
2865
3056
  qualityMet = not QualityUnmet if db_entry["hasFile"] else False
2866
3057
  customFormatMet = customFormat >= minCustomFormat
2867
3058
 
2868
- if not db_entry["hasFile"]:
3059
+ if searched:
3060
+ # Movie is complete and not being searched
3061
+ reason = None
3062
+ elif not db_entry["hasFile"]:
2869
3063
  reason = "Missing"
2870
3064
  elif self.quality_unmet_search and QualityUnmet:
2871
3065
  reason = "Quality"
@@ -2874,7 +3068,7 @@ class Arr:
2874
3068
  elif self.do_upgrade_search:
2875
3069
  reason = "Upgrade"
2876
3070
  else:
2877
- reason = "Scheduled search"
3071
+ reason = None
2878
3072
 
2879
3073
  to_update = {
2880
3074
  self.model_file.MovieFileId: movieFileId,
@@ -2922,6 +3116,467 @@ class Arr:
2922
3116
  self.model_file.EntryId == db_entry["id"]
2923
3117
  )
2924
3118
  db_commands.execute()
3119
+ elif self.type == "lidarr":
3120
+ if not artist:
3121
+ # Album handling
3122
+ self.model_file: AlbumFilesModel
3123
+ searched = False
3124
+ albumData = self.model_file.get_or_none(
3125
+ self.model_file.EntryId == db_entry["id"]
3126
+ )
3127
+ if db_entry["monitored"] or self.search_unmonitored:
3128
+ while True:
3129
+ try:
3130
+ if albumData:
3131
+ if not albumData.MinCustomFormatScore:
3132
+ try:
3133
+ profile_id = db_entry["profileId"]
3134
+ # Check if this profile ID is known to be invalid
3135
+ if profile_id in self._invalid_quality_profiles:
3136
+ minCustomFormat = 0
3137
+ # Check cache first
3138
+ elif profile_id in self._quality_profile_cache:
3139
+ minCustomFormat = self._quality_profile_cache[
3140
+ profile_id
3141
+ ].get("minFormatScore", 0)
3142
+ else:
3143
+ # Fetch from API and cache
3144
+ try:
3145
+ profile = self.client.get_quality_profile(
3146
+ profile_id
3147
+ )
3148
+ self._quality_profile_cache[profile_id] = (
3149
+ profile
3150
+ )
3151
+ minCustomFormat = profile.get(
3152
+ "minFormatScore", 0
3153
+ )
3154
+ except PyarrResourceNotFound:
3155
+ # Mark as invalid to avoid repeated warnings
3156
+ self._invalid_quality_profiles.add(profile_id)
3157
+ self.logger.warning(
3158
+ "Quality profile %s not found for album %s, defaulting to 0",
3159
+ db_entry.get("profileId"),
3160
+ db_entry.get("title", "Unknown"),
3161
+ )
3162
+ minCustomFormat = 0
3163
+ except Exception:
3164
+ minCustomFormat = 0
3165
+ else:
3166
+ minCustomFormat = albumData.MinCustomFormatScore
3167
+ if (
3168
+ db_entry.get("statistics", {}).get("percentOfTracks", 0)
3169
+ == 100
3170
+ ):
3171
+ # Album has files
3172
+ albumFileId = db_entry.get("statistics", {}).get(
3173
+ "sizeOnDisk", 0
3174
+ )
3175
+ if albumFileId != albumData.AlbumFileId:
3176
+ # Get custom format score from album files
3177
+ customFormat = (
3178
+ 0 # Lidarr may not have customFormatScore
3179
+ )
3180
+ else:
3181
+ customFormat = albumData.CustomFormatScore
3182
+ else:
3183
+ customFormat = 0
3184
+ else:
3185
+ try:
3186
+ profile_id = db_entry["profileId"]
3187
+ # Check if this profile ID is known to be invalid
3188
+ if profile_id in self._invalid_quality_profiles:
3189
+ minCustomFormat = 0
3190
+ # Check cache first
3191
+ elif profile_id in self._quality_profile_cache:
3192
+ minCustomFormat = self._quality_profile_cache[
3193
+ profile_id
3194
+ ].get("minFormatScore", 0)
3195
+ else:
3196
+ # Fetch from API and cache
3197
+ try:
3198
+ profile = self.client.get_quality_profile(
3199
+ profile_id
3200
+ )
3201
+ self._quality_profile_cache[profile_id] = profile
3202
+ minCustomFormat = profile.get("minFormatScore", 0)
3203
+ except PyarrResourceNotFound:
3204
+ # Mark as invalid to avoid repeated warnings
3205
+ self._invalid_quality_profiles.add(profile_id)
3206
+ self.logger.warning(
3207
+ "Quality profile %s not found for album %s, defaulting to 0",
3208
+ db_entry.get("profileId"),
3209
+ db_entry.get("title", "Unknown"),
3210
+ )
3211
+ minCustomFormat = 0
3212
+ except Exception:
3213
+ minCustomFormat = 0
3214
+ if (
3215
+ db_entry.get("statistics", {}).get("percentOfTracks", 0)
3216
+ == 100
3217
+ ):
3218
+ customFormat = 0 # Lidarr may not have customFormatScore
3219
+ else:
3220
+ customFormat = 0
3221
+ break
3222
+ except (
3223
+ requests.exceptions.ChunkedEncodingError,
3224
+ requests.exceptions.ContentDecodingError,
3225
+ requests.exceptions.ConnectionError,
3226
+ JSONDecodeError,
3227
+ ):
3228
+ continue
3229
+
3230
+ # Determine if album has all tracks
3231
+ hasAllTracks = (
3232
+ db_entry.get("statistics", {}).get("percentOfTracks", 0) == 100
3233
+ )
3234
+
3235
+ # Check if quality cutoff is met for Lidarr
3236
+ # Unlike Sonarr/Radarr which have a qualityCutoffNotMet boolean field,
3237
+ # Lidarr requires us to check the track file quality against the profile cutoff
3238
+ QualityUnmet = False
3239
+ if hasAllTracks:
3240
+ try:
3241
+ # Get the artist's quality profile to find the cutoff
3242
+ artist_id = db_entry.get("artistId")
3243
+ artist_data = self.client.get_artist(artist_id)
3244
+ profile_id = artist_data.get("qualityProfileId")
3245
+
3246
+ if profile_id:
3247
+ # Get or use cached profile
3248
+ if profile_id in self._quality_profile_cache:
3249
+ profile = self._quality_profile_cache[profile_id]
3250
+ else:
3251
+ profile = self.client.get_quality_profile(profile_id)
3252
+ self._quality_profile_cache[profile_id] = profile
3253
+
3254
+ cutoff_quality_id = profile.get("cutoff")
3255
+ upgrade_allowed = profile.get("upgradeAllowed", False)
3256
+
3257
+ if cutoff_quality_id and upgrade_allowed:
3258
+ # Get track files for this album to check their quality
3259
+ album_id = db_entry.get("id")
3260
+ track_files = self.client.get_track_file(
3261
+ albumId=[album_id]
3262
+ )
3263
+
3264
+ if track_files:
3265
+ # Check if any track file's quality is below the cutoff
3266
+ for track_file in track_files:
3267
+ file_quality = track_file.get("quality", {}).get(
3268
+ "quality", {}
3269
+ )
3270
+ file_quality_id = file_quality.get("id", 0)
3271
+
3272
+ if file_quality_id < cutoff_quality_id:
3273
+ QualityUnmet = True
3274
+ self.logger.trace(
3275
+ "Album '%s' has quality below cutoff: %s (ID: %d) < cutoff (ID: %d)",
3276
+ db_entry.get("title", "Unknown"),
3277
+ file_quality.get("name", "Unknown"),
3278
+ file_quality_id,
3279
+ cutoff_quality_id,
3280
+ )
3281
+ break
3282
+ except Exception as e:
3283
+ self.logger.trace(
3284
+ "Could not determine quality cutoff status for album '%s': %s",
3285
+ db_entry.get("title", "Unknown"),
3286
+ str(e),
3287
+ )
3288
+ # Default to False if we can't determine
3289
+ QualityUnmet = False
3290
+
3291
+ if (
3292
+ hasAllTracks
3293
+ and not (self.quality_unmet_search and QualityUnmet)
3294
+ and not (
3295
+ self.custom_format_unmet_search and customFormat < minCustomFormat
3296
+ )
3297
+ ):
3298
+ searched = True
3299
+ self.model_queue.update(Completed=True).where(
3300
+ self.model_queue.EntryId == db_entry["id"]
3301
+ ).execute()
3302
+
3303
+ if self.use_temp_for_missing:
3304
+ quality_profile_id = db_entry.get("qualityProfileId")
3305
+ if (
3306
+ searched
3307
+ and quality_profile_id in self.temp_quality_profile_ids.values()
3308
+ and not self.keep_temp_profile
3309
+ ):
3310
+ db_entry["qualityProfileId"] = list(
3311
+ self.temp_quality_profile_ids.keys()
3312
+ )[
3313
+ list(self.temp_quality_profile_ids.values()).index(
3314
+ quality_profile_id
3315
+ )
3316
+ ]
3317
+ self.logger.debug(
3318
+ "Updating quality profile for %s to %s",
3319
+ db_entry["title"],
3320
+ db_entry["qualityProfileId"],
3321
+ )
3322
+ elif (
3323
+ not searched
3324
+ and quality_profile_id in self.temp_quality_profile_ids.keys()
3325
+ ):
3326
+ db_entry["qualityProfileId"] = self.temp_quality_profile_ids[
3327
+ quality_profile_id
3328
+ ]
3329
+ self.logger.debug(
3330
+ "Updating quality profile for %s to %s",
3331
+ db_entry["title"],
3332
+ db_entry["qualityProfileId"],
3333
+ )
3334
+ while True:
3335
+ try:
3336
+ self.client.upd_album(db_entry)
3337
+ break
3338
+ except (
3339
+ requests.exceptions.ChunkedEncodingError,
3340
+ requests.exceptions.ContentDecodingError,
3341
+ requests.exceptions.ConnectionError,
3342
+ JSONDecodeError,
3343
+ ):
3344
+ continue
3345
+
3346
+ title = db_entry.get("title", "Unknown Album")
3347
+ monitored = db_entry.get("monitored", False)
3348
+ # Handle artist field which can be an object or might not exist
3349
+ artist_obj = db_entry.get("artist", {})
3350
+ if isinstance(artist_obj, dict):
3351
+ # Try multiple possible field names for artist name
3352
+ artistName = (
3353
+ artist_obj.get("artistName")
3354
+ or artist_obj.get("name")
3355
+ or artist_obj.get("title")
3356
+ or "Unknown Artist"
3357
+ )
3358
+ else:
3359
+ artistName = "Unknown Artist"
3360
+ artistId = db_entry.get("artistId", 0)
3361
+ foreignAlbumId = db_entry.get("foreignAlbumId", "")
3362
+ releaseDate = db_entry.get("releaseDate")
3363
+ entryId = db_entry.get("id", 0)
3364
+ albumFileId = 1 if hasAllTracks else 0 # Use 1/0 to indicate presence
3365
+ qualityMet = not QualityUnmet if hasAllTracks else False
3366
+ customFormatMet = customFormat >= minCustomFormat
3367
+
3368
+ if searched:
3369
+ # Album is complete and not being searched
3370
+ reason = None
3371
+ elif not hasAllTracks:
3372
+ reason = "Missing"
3373
+ elif self.quality_unmet_search and QualityUnmet:
3374
+ reason = "Quality"
3375
+ elif self.custom_format_unmet_search and not customFormatMet:
3376
+ reason = "CustomFormat"
3377
+ elif self.do_upgrade_search:
3378
+ reason = "Upgrade"
3379
+ else:
3380
+ reason = None
3381
+
3382
+ to_update = {
3383
+ self.model_file.AlbumFileId: albumFileId,
3384
+ self.model_file.Monitored: monitored,
3385
+ self.model_file.QualityMet: qualityMet,
3386
+ self.model_file.Searched: searched,
3387
+ self.model_file.Upgrade: False,
3388
+ self.model_file.MinCustomFormatScore: minCustomFormat,
3389
+ self.model_file.CustomFormatScore: customFormat,
3390
+ self.model_file.CustomFormatMet: customFormatMet,
3391
+ self.model_file.Reason: reason,
3392
+ self.model_file.ArtistTitle: artistName,
3393
+ self.model_file.ArtistId: artistId,
3394
+ self.model_file.ForeignAlbumId: foreignAlbumId,
3395
+ self.model_file.ReleaseDate: releaseDate,
3396
+ }
3397
+
3398
+ if request:
3399
+ to_update[self.model_file.IsRequest] = request
3400
+
3401
+ self.logger.debug(
3402
+ "Updating database entry | %s - %s [Searched:%s][Upgrade:%s][QualityMet:%s][CustomFormatMet:%s]",
3403
+ artistName.ljust(30, "."),
3404
+ title.ljust(30, "."),
3405
+ str(searched).ljust(5),
3406
+ str(False).ljust(5),
3407
+ str(qualityMet).ljust(5),
3408
+ str(customFormatMet).ljust(5),
3409
+ )
3410
+
3411
+ db_commands = self.model_file.insert(
3412
+ Title=title,
3413
+ Monitored=monitored,
3414
+ ArtistTitle=artistName,
3415
+ ArtistId=artistId,
3416
+ ForeignAlbumId=foreignAlbumId,
3417
+ ReleaseDate=releaseDate,
3418
+ EntryId=entryId,
3419
+ Searched=searched,
3420
+ AlbumFileId=albumFileId,
3421
+ IsRequest=request,
3422
+ QualityMet=qualityMet,
3423
+ Upgrade=False,
3424
+ MinCustomFormatScore=minCustomFormat,
3425
+ CustomFormatScore=customFormat,
3426
+ CustomFormatMet=customFormatMet,
3427
+ Reason=reason,
3428
+ ).on_conflict(conflict_target=[self.model_file.EntryId], update=to_update)
3429
+ db_commands.execute()
3430
+
3431
+ # Store tracks for this album (Lidarr only)
3432
+ if self.track_file_model:
3433
+ try:
3434
+ # Fetch tracks for this album via the track API
3435
+ # Tracks are NOT in the media field, they're a separate endpoint
3436
+ tracks = self.client.get_tracks(albumId=entryId)
3437
+ self.logger.debug(
3438
+ f"Fetched {len(tracks) if isinstance(tracks, list) else 0} tracks for album {entryId}"
3439
+ )
3440
+
3441
+ if tracks and isinstance(tracks, list):
3442
+ # First, delete existing tracks for this album
3443
+ self.track_file_model.delete().where(
3444
+ self.track_file_model.AlbumId == entryId
3445
+ ).execute()
3446
+
3447
+ # Insert new tracks
3448
+ track_insert_count = 0
3449
+ for track in tracks:
3450
+ # Get monitored status from track or default to album's monitored status
3451
+ track_monitored = track.get(
3452
+ "monitored", db_entry.get("monitored", False)
3453
+ )
3454
+
3455
+ self.track_file_model.insert(
3456
+ EntryId=track.get("id"),
3457
+ AlbumId=entryId,
3458
+ TrackNumber=track.get("trackNumber", ""),
3459
+ Title=track.get("title", ""),
3460
+ Duration=track.get("duration", 0),
3461
+ HasFile=track.get("hasFile", False),
3462
+ TrackFileId=track.get("trackFileId", 0),
3463
+ Monitored=track_monitored,
3464
+ ).execute()
3465
+ track_insert_count += 1
3466
+
3467
+ if track_insert_count > 0:
3468
+ self.logger.info(
3469
+ f"Stored {track_insert_count} tracks for album {entryId} ({title})"
3470
+ )
3471
+ else:
3472
+ self.logger.debug(
3473
+ f"No tracks found for album {entryId} ({title})"
3474
+ )
3475
+ except Exception as e:
3476
+ self.logger.warning(
3477
+ f"Could not fetch tracks for album {entryId} ({title}): {e}"
3478
+ )
3479
+ else:
3480
+ db_commands = self.model_file.delete().where(
3481
+ self.model_file.EntryId == db_entry["id"]
3482
+ )
3483
+ db_commands.execute()
3484
+ # Also delete tracks for this album (Lidarr only)
3485
+ if self.track_file_model:
3486
+ self.track_file_model.delete().where(
3487
+ self.track_file_model.AlbumId == db_entry["id"]
3488
+ ).execute()
3489
+ else:
3490
+ # Artist handling
3491
+ self.artists_file_model: ArtistFilesModel
3492
+ EntryId = db_entry["id"]
3493
+ artistData = self.artists_file_model.get_or_none(
3494
+ self.artists_file_model.EntryId == EntryId
3495
+ )
3496
+ if db_entry["monitored"] or self.search_unmonitored:
3497
+ while True:
3498
+ try:
3499
+ artistMetadata = self.client.get_artist(id_=EntryId) or {}
3500
+ quality_profile_id = None
3501
+ if isinstance(artistMetadata, dict):
3502
+ quality_profile_id = artistMetadata.get("qualityProfileId")
3503
+ else:
3504
+ quality_profile_id = getattr(
3505
+ artistMetadata, "qualityProfileId", None
3506
+ )
3507
+ if not artistData:
3508
+ if quality_profile_id:
3509
+ profile = (
3510
+ self.client.get_quality_profile(quality_profile_id)
3511
+ or {}
3512
+ )
3513
+ minCustomFormat = profile.get("minFormatScore") or 0
3514
+ else:
3515
+ self.logger.warning(
3516
+ "Artist %s (%s) missing qualityProfileId; "
3517
+ "defaulting custom format score to 0",
3518
+ db_entry.get("artistName"),
3519
+ EntryId,
3520
+ )
3521
+ minCustomFormat = 0
3522
+ else:
3523
+ minCustomFormat = getattr(
3524
+ artistData, "MinCustomFormatScore", 0
3525
+ )
3526
+ break
3527
+ except (
3528
+ requests.exceptions.ChunkedEncodingError,
3529
+ requests.exceptions.ContentDecodingError,
3530
+ requests.exceptions.ConnectionError,
3531
+ JSONDecodeError,
3532
+ ):
3533
+ continue
3534
+ # Calculate if artist is fully searched based on album statistics
3535
+ statistics = artistMetadata.get("statistics", {})
3536
+ albumCount = statistics.get("albumCount", 0)
3537
+ statistics.get("totalAlbumCount", 0)
3538
+ # Check if there's any album with files (sizeOnDisk > 0)
3539
+ sizeOnDisk = statistics.get("sizeOnDisk", 0)
3540
+ # Artist is considered searched if it has albums and at least some have files
3541
+ searched = albumCount > 0 and sizeOnDisk > 0
3542
+
3543
+ Title = artistMetadata.get("artistName")
3544
+ Monitored = db_entry["monitored"]
3545
+
3546
+ to_update = {
3547
+ self.artists_file_model.Monitored: Monitored,
3548
+ self.artists_file_model.Title: Title,
3549
+ self.artists_file_model.Searched: searched,
3550
+ self.artists_file_model.Upgrade: False,
3551
+ self.artists_file_model.MinCustomFormatScore: minCustomFormat,
3552
+ }
3553
+
3554
+ self.logger.debug(
3555
+ "Updating database entry | %s [Searched:%s][Upgrade:%s]",
3556
+ Title.ljust(60, "."),
3557
+ str(searched).ljust(5),
3558
+ str(False).ljust(5),
3559
+ )
3560
+
3561
+ db_commands = self.artists_file_model.insert(
3562
+ EntryId=EntryId,
3563
+ Title=Title,
3564
+ Searched=searched,
3565
+ Monitored=Monitored,
3566
+ Upgrade=False,
3567
+ MinCustomFormatScore=minCustomFormat,
3568
+ ).on_conflict(
3569
+ conflict_target=[self.artists_file_model.EntryId], update=to_update
3570
+ )
3571
+ db_commands.execute()
3572
+
3573
+ # Note: Albums are now handled separately in db_update()
3574
+ # No need to recursively process albums here to avoid duplication
3575
+ else:
3576
+ db_commands = self.artists_file_model.delete().where(
3577
+ self.artists_file_model.EntryId == EntryId
3578
+ )
3579
+ db_commands.execute()
2925
3580
 
2926
3581
  except requests.exceptions.ConnectionError as e:
2927
3582
  self.logger.debug(
@@ -3208,7 +3863,7 @@ class Arr:
3208
3863
  self.model_file.update(Searched=True, Upgrade=True).where(
3209
3864
  file_model.EntryId == file_model.EntryId
3210
3865
  ).execute()
3211
- reason_text = getattr(file_model, "Reason", None) or "Scheduled search"
3866
+ reason_text = getattr(file_model, "Reason", None) or None
3212
3867
  if reason_text:
3213
3868
  self.logger.hnotice(
3214
3869
  "%sSearching for: %s | S%02dE%03d | %s | [id=%s|AirDateUTC=%s][%s]",
@@ -3381,6 +4036,86 @@ class Arr:
3381
4036
  detail=str(reason_text) if reason_text else None,
3382
4037
  )
3383
4038
  return True
4039
+ elif self.type == "lidarr":
4040
+ file_model: AlbumFilesModel
4041
+ if not (request or todays):
4042
+ (
4043
+ self.model_queue.select(self.model_queue.Completed)
4044
+ .where(self.model_queue.EntryId == file_model.EntryId)
4045
+ .execute()
4046
+ )
4047
+ else:
4048
+ pass
4049
+ if file_model.EntryId in self.queue_file_ids:
4050
+ self.logger.debug(
4051
+ "%sSkipping: Already Searched: %s - %s (%s)",
4052
+ request_tag,
4053
+ file_model.ArtistTitle,
4054
+ file_model.Title,
4055
+ file_model.EntryId,
4056
+ )
4057
+ self.model_file.update(Searched=True, Upgrade=True).where(
4058
+ file_model.EntryId == file_model.EntryId
4059
+ ).execute()
4060
+ return True
4061
+ active_commands = self.arr_db_query_commands_count()
4062
+ self.logger.info("%s active search commands, %s remaining", active_commands, commands)
4063
+ if not bypass_limit and active_commands >= self.search_command_limit:
4064
+ self.logger.trace(
4065
+ "Idle: Too many commands in queue: %s - %s | [id=%s]",
4066
+ file_model.ArtistTitle,
4067
+ file_model.Title,
4068
+ file_model.EntryId,
4069
+ )
4070
+ return False
4071
+ self.persistent_queue.insert(EntryId=file_model.EntryId).on_conflict_ignore().execute()
4072
+
4073
+ self.model_queue.insert(
4074
+ Completed=False, EntryId=file_model.EntryId
4075
+ ).on_conflict_replace().execute()
4076
+ if file_model.EntryId:
4077
+ while True:
4078
+ try:
4079
+ self.client.post_command("AlbumSearch", albumIds=[file_model.EntryId])
4080
+ break
4081
+ except (
4082
+ requests.exceptions.ChunkedEncodingError,
4083
+ requests.exceptions.ContentDecodingError,
4084
+ requests.exceptions.ConnectionError,
4085
+ JSONDecodeError,
4086
+ ):
4087
+ continue
4088
+ self.model_file.update(Searched=True, Upgrade=True).where(
4089
+ file_model.EntryId == file_model.EntryId
4090
+ ).execute()
4091
+ reason_text = getattr(file_model, "Reason", None)
4092
+ if reason_text:
4093
+ self.logger.hnotice(
4094
+ "%sSearching for: %s - %s [foreignAlbumId=%s|id=%s][%s]",
4095
+ request_tag,
4096
+ file_model.ArtistTitle,
4097
+ file_model.Title,
4098
+ file_model.ForeignAlbumId,
4099
+ file_model.EntryId,
4100
+ reason_text,
4101
+ )
4102
+ else:
4103
+ self.logger.hnotice(
4104
+ "%sSearching for: %s - %s [foreignAlbumId=%s|id=%s]",
4105
+ request_tag,
4106
+ file_model.ArtistTitle,
4107
+ file_model.Title,
4108
+ file_model.ForeignAlbumId,
4109
+ file_model.EntryId,
4110
+ )
4111
+ context_label = self._humanize_request_tag(request_tag)
4112
+ description = f"{file_model.ArtistTitle} - {file_model.Title}"
4113
+ self._record_search_activity(
4114
+ description,
4115
+ context=context_label,
4116
+ detail=str(reason_text) if reason_text else None,
4117
+ )
4118
+ return True
3384
4119
 
3385
4120
  def process(self):
3386
4121
  self._process_resume()
@@ -4618,6 +5353,9 @@ class Arr:
4618
5353
  elif self.type == "radarr":
4619
5354
  entry_id_field = "movieId"
4620
5355
  file_id_field = "MovieFileId"
5356
+ elif self.type == "lidarr":
5357
+ entry_id_field = "albumId"
5358
+ file_id_field = "AlbumFileId"
4621
5359
  else:
4622
5360
  return False # Unknown type
4623
5361
 
@@ -4720,6 +5458,17 @@ class Arr:
4720
5458
  self.model_queue.delete().where(
4721
5459
  self.model_queue.EntryId.not_in(list(self.queue_file_ids))
4722
5460
  ).execute()
5461
+ elif self.type == "lidarr":
5462
+ self.requeue_cache = {
5463
+ entry["id"]: entry["albumId"] for entry in self.queue if entry.get("albumId")
5464
+ }
5465
+ self.queue_file_ids = {
5466
+ entry["albumId"] for entry in self.queue if entry.get("albumId")
5467
+ }
5468
+ if self.model_queue:
5469
+ self.model_queue.delete().where(
5470
+ self.model_queue.EntryId.not_in(list(self.queue_file_ids))
5471
+ ).execute()
4723
5472
 
4724
5473
  self._update_bad_queue_items()
4725
5474
 
@@ -4823,7 +5572,7 @@ class Arr:
4823
5572
  if self.search_setup_completed:
4824
5573
  return
4825
5574
 
4826
- db1, db2, db3, db4 = self._get_models()
5575
+ db1, db2, db3, db4, db5 = self._get_models()
4827
5576
 
4828
5577
  if not (
4829
5578
  self.search_missing
@@ -4833,7 +5582,7 @@ class Arr:
4833
5582
  or self.ombi_search_requests
4834
5583
  or self.overseerr_requests
4835
5584
  ):
4836
- if db4 and getattr(self, "torrents", None) is None:
5585
+ if db5 and getattr(self, "torrents", None) is None:
4837
5586
  self.torrent_db = SqliteDatabase(None)
4838
5587
  self.torrent_db.init(
4839
5588
  str(self._app_data_folder.joinpath("Torrents.db")),
@@ -4847,7 +5596,7 @@ class Arr:
4847
5596
  timeout=15,
4848
5597
  )
4849
5598
 
4850
- class Torrents(db4):
5599
+ class Torrents(db5):
4851
5600
  class Meta:
4852
5601
  database = self.torrent_db
4853
5602
 
@@ -4884,7 +5633,18 @@ class Arr:
4884
5633
  database = self.db
4885
5634
 
4886
5635
  self.db.connect()
4887
- if db3:
5636
+
5637
+ if db4:
5638
+
5639
+ class Tracks(db4):
5640
+ class Meta:
5641
+ database = self.db
5642
+
5643
+ self.track_file_model = Tracks
5644
+ else:
5645
+ self.track_file_model = None
5646
+
5647
+ if db3 and self.type == "sonarr":
4888
5648
 
4889
5649
  class Series(db3):
4890
5650
  class Meta:
@@ -4892,11 +5652,23 @@ class Arr:
4892
5652
 
4893
5653
  self.db.create_tables([Files, Queue, PersistingQueue, Series])
4894
5654
  self.series_file_model = Series
5655
+ self.artists_file_model = None
5656
+ elif db3 and self.type == "lidarr":
5657
+
5658
+ class Artists(db3):
5659
+ class Meta:
5660
+ database = self.db
5661
+
5662
+ self.db.create_tables([Files, Queue, PersistingQueue, Artists, Tracks])
5663
+ self.artists_file_model = Artists
5664
+ self.series_file_model = None # Lidarr uses artists, not series
4895
5665
  else:
5666
+ # Radarr or any type without db3/db4 (series/artists/tracks models)
4896
5667
  self.db.create_tables([Files, Queue, PersistingQueue])
5668
+ self.artists_file_model = None
4897
5669
  self.series_file_model = None
4898
5670
 
4899
- if db4:
5671
+ if db5:
4900
5672
  self.torrent_db = SqliteDatabase(None)
4901
5673
  self.torrent_db.init(
4902
5674
  str(self._app_data_folder.joinpath("Torrents.db")),
@@ -4910,7 +5682,7 @@ class Arr:
4910
5682
  timeout=15,
4911
5683
  )
4912
5684
 
4913
- class Torrents(db4):
5685
+ class Torrents(db5):
4914
5686
  class Meta:
4915
5687
  database = self.torrent_db
4916
5688
 
@@ -4928,22 +5700,17 @@ class Arr:
4928
5700
  def _get_models(
4929
5701
  self,
4930
5702
  ) -> tuple[
4931
- type[EpisodeFilesModel] | type[MoviesFilesModel],
4932
- type[EpisodeQueueModel] | type[MovieQueueModel],
4933
- type[SeriesFilesModel] | None,
5703
+ type[EpisodeFilesModel] | type[MoviesFilesModel] | type[AlbumFilesModel],
5704
+ type[EpisodeQueueModel] | type[MovieQueueModel] | type[AlbumQueueModel],
5705
+ type[SeriesFilesModel] | type[ArtistFilesModel] | None,
5706
+ type[TrackFilesModel] | None,
4934
5707
  type[TorrentLibrary] | None,
4935
5708
  ]:
4936
5709
  if self.type == "sonarr":
4937
- if self.series_search:
4938
- return (
4939
- EpisodeFilesModel,
4940
- EpisodeQueueModel,
4941
- SeriesFilesModel,
4942
- TorrentLibrary if TAGLESS else None,
4943
- )
4944
5710
  return (
4945
5711
  EpisodeFilesModel,
4946
5712
  EpisodeQueueModel,
5713
+ SeriesFilesModel,
4947
5714
  None,
4948
5715
  TorrentLibrary if TAGLESS else None,
4949
5716
  )
@@ -4952,6 +5719,15 @@ class Arr:
4952
5719
  MoviesFilesModel,
4953
5720
  MovieQueueModel,
4954
5721
  None,
5722
+ None,
5723
+ TorrentLibrary if TAGLESS else None,
5724
+ )
5725
+ if self.type == "lidarr":
5726
+ return (
5727
+ AlbumFilesModel,
5728
+ AlbumQueueModel,
5729
+ ArtistFilesModel,
5730
+ TrackFilesModel,
4955
5731
  TorrentLibrary if TAGLESS else None,
4956
5732
  )
4957
5733
  raise UnhandledError(f"Well you shouldn't have reached here, Arr.type={self.type}")
@@ -5609,7 +6385,11 @@ class FreeSpaceManager(Arr):
5609
6385
  self.current_free_space = (
5610
6386
  shutil.disk_usage(self.completed_folder).free - self._min_free_space_bytes
5611
6387
  )
5612
- self.logger.trace("Current free space: %s", self.current_free_space)
6388
+ self.logger.trace(
6389
+ "Free space monitor initialized | Available: %s | Threshold: %s",
6390
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6391
+ format_bytes(self._min_free_space_bytes),
6392
+ )
5613
6393
  self.manager.qbit_manager.client.torrents_create_tags(["qBitrr-free_space_paused"])
5614
6394
  self.search_missing = False
5615
6395
  self.do_upgrade_search = False
@@ -5643,25 +6423,23 @@ class FreeSpaceManager(Arr):
5643
6423
  None,
5644
6424
  None,
5645
6425
  None,
6426
+ None,
5646
6427
  type[TorrentLibrary] | None,
5647
6428
  ]:
5648
- return None, None, None, (TorrentLibrary if TAGLESS else None)
6429
+ return None, None, None, None, (TorrentLibrary if TAGLESS else None)
5649
6430
 
5650
6431
  def _process_single_torrent_pause_disk_space(self, torrent: qbittorrentapi.TorrentDictionary):
5651
6432
  self.logger.info(
5652
- "Pausing torrent for disk space: "
5653
- "[Progress: %s%%][Added On: %s]"
5654
- "[Availability: %s%%][Time Left: %s]"
5655
- "[Last active: %s] "
5656
- "| [%s] | %s (%s)",
6433
+ "Pausing torrent due to insufficient disk space | "
6434
+ "Name: %s | Progress: %s%% | Size remaining: %s | "
6435
+ "Availability: %s%% | ETA: %s | State: %s | Hash: %s",
6436
+ torrent.name,
5657
6437
  round(torrent.progress * 100, 2),
5658
- datetime.fromtimestamp(torrent.added_on),
6438
+ format_bytes(torrent.amount_left),
5659
6439
  round(torrent.availability * 100, 2),
5660
6440
  timedelta(seconds=torrent.eta),
5661
- datetime.fromtimestamp(torrent.last_activity),
5662
6441
  torrent.state_enum,
5663
- torrent.name,
5664
- torrent.hash,
6442
+ torrent.hash[:8], # Shortened hash for readability
5665
6443
  )
5666
6444
  self.pause.add(torrent.hash)
5667
6445
 
@@ -5670,45 +6448,48 @@ class FreeSpaceManager(Arr):
5670
6448
  free_space_test = self.current_free_space
5671
6449
  free_space_test -= torrent["amount_left"]
5672
6450
  self.logger.trace(
5673
- "Result [%s]: Free space %s -> %s",
6451
+ "Evaluating torrent: %s | Current space: %s | Space after download: %s | Remaining: %s",
5674
6452
  torrent.name,
5675
- self.current_free_space,
5676
- free_space_test,
6453
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6454
+ format_bytes(free_space_test + self._min_free_space_bytes),
6455
+ format_bytes(torrent.amount_left),
5677
6456
  )
5678
6457
  if torrent.state_enum != TorrentStates.PAUSED_DOWNLOAD and free_space_test < 0:
5679
6458
  self.logger.info(
5680
- "Pause download [%s]: Free space %s -> %s",
6459
+ "Pausing download (insufficient space) | Torrent: %s | Available: %s | Needed: %s | Deficit: %s",
5681
6460
  torrent.name,
5682
- self.current_free_space,
5683
- free_space_test,
6461
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6462
+ format_bytes(torrent.amount_left),
6463
+ format_bytes(-free_space_test),
5684
6464
  )
5685
6465
  self.add_tags(torrent, ["qBitrr-free_space_paused"])
5686
6466
  self.remove_tags(torrent, ["qBitrr-allowed_seeding"])
5687
6467
  self._process_single_torrent_pause_disk_space(torrent)
5688
6468
  elif torrent.state_enum == TorrentStates.PAUSED_DOWNLOAD and free_space_test < 0:
5689
6469
  self.logger.info(
5690
- "Leave paused [%s]: Free space %s -> %s",
6470
+ "Keeping paused (insufficient space) | Torrent: %s | Available: %s | Needed: %s | Deficit: %s",
5691
6471
  torrent.name,
5692
- self.current_free_space,
5693
- free_space_test,
6472
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6473
+ format_bytes(torrent.amount_left),
6474
+ format_bytes(-free_space_test),
5694
6475
  )
5695
6476
  self.add_tags(torrent, ["qBitrr-free_space_paused"])
5696
6477
  self.remove_tags(torrent, ["qBitrr-allowed_seeding"])
5697
6478
  elif torrent.state_enum != TorrentStates.PAUSED_DOWNLOAD and free_space_test > 0:
5698
6479
  self.logger.info(
5699
- "Continue downloading [%s]: Free space %s -> %s",
6480
+ "Continuing download (sufficient space) | Torrent: %s | Available: %s | Space after: %s",
5700
6481
  torrent.name,
5701
- self.current_free_space,
5702
- free_space_test,
6482
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6483
+ format_bytes(free_space_test + self._min_free_space_bytes),
5703
6484
  )
5704
6485
  self.current_free_space = free_space_test
5705
6486
  self.remove_tags(torrent, ["qBitrr-free_space_paused"])
5706
6487
  elif torrent.state_enum == TorrentStates.PAUSED_DOWNLOAD and free_space_test > 0:
5707
6488
  self.logger.info(
5708
- "Unpause download [%s]: Free space %s -> %s",
6489
+ "Resuming download (space available) | Torrent: %s | Available: %s | Space after: %s",
5709
6490
  torrent.name,
5710
- self.current_free_space,
5711
- free_space_test,
6491
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6492
+ format_bytes(free_space_test + self._min_free_space_bytes),
5712
6493
  )
5713
6494
  self.current_free_space = free_space_test
5714
6495
  self.remove_tags(torrent, ["qBitrr-free_space_paused"])
@@ -5716,10 +6497,9 @@ class FreeSpaceManager(Arr):
5716
6497
  torrent, "qBitrr-free_space_paused"
5717
6498
  ):
5718
6499
  self.logger.info(
5719
- "Removing tag [%s] for completed torrent[%s]: Free space %s",
5720
- "qBitrr-free_space_paused",
6500
+ "Torrent completed, removing free space tag | Torrent: %s | Available: %s",
5721
6501
  torrent.name,
5722
- self.current_free_space,
6502
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
5723
6503
  )
5724
6504
  self.remove_tags(torrent, ["qBitrr-free_space_paused"])
5725
6505
 
@@ -5763,7 +6543,14 @@ class FreeSpaceManager(Arr):
5763
6543
  self.current_free_space = (
5764
6544
  shutil.disk_usage(self.completed_folder).free - self._min_free_space_bytes
5765
6545
  )
5766
- self.logger.trace("Current free space: %s", self.current_free_space)
6546
+ self.logger.trace(
6547
+ "Processing torrents | Available: %s | Threshold: %s | Usable: %s | Torrents: %d | Paused for space: %d",
6548
+ format_bytes(self.current_free_space + self._min_free_space_bytes),
6549
+ format_bytes(self._min_free_space_bytes),
6550
+ format_bytes(self.current_free_space),
6551
+ self.category_torrent_count,
6552
+ self.free_space_tagged_count,
6553
+ )
5767
6554
  sorted_torrents = sorted(torrents, key=lambda t: t["priority"])
5768
6555
  for torrent in sorted_torrents:
5769
6556
  with contextlib.suppress(qbittorrentapi.NotFound404Error):
@@ -5819,7 +6606,7 @@ class ArrManager:
5819
6606
 
5820
6607
  def build_arr_instances(self):
5821
6608
  for key in CONFIG.sections():
5822
- if search := re.match("(rad|son|anim)arr.*", key, re.IGNORECASE):
6609
+ if search := re.match("(rad|son|anim|lid)arr.*", key, re.IGNORECASE):
5823
6610
  name = search.group(0)
5824
6611
  match = search.group(1)
5825
6612
  if match.lower() == "son":
@@ -5828,6 +6615,8 @@ class ArrManager:
5828
6615
  call_cls = SonarrAPI
5829
6616
  elif match.lower() == "rad":
5830
6617
  call_cls = RadarrAPI
6618
+ elif match.lower() == "lid":
6619
+ call_cls = LidarrAPI
5831
6620
  else:
5832
6621
  call_cls = None
5833
6622
  try: