qBitrr2 5.1.0__py3-none-any.whl → 5.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qBitrr/arss.py +1155 -225
- qBitrr/bundled_data.py +2 -2
- qBitrr/config.py +5 -1
- qBitrr/gen_config.py +252 -32
- qBitrr/logger.py +82 -17
- qBitrr/main.py +35 -15
- qBitrr/search_activity_store.py +44 -17
- qBitrr/tables.py +50 -323
- qBitrr/utils.py +29 -0
- qBitrr/versioning.py +33 -0
- qBitrr/webui.py +750 -74
- {qbitrr2-5.1.0.dist-info → qbitrr2-5.3.0.dist-info}/METADATA +8 -6
- qbitrr2-5.3.0.dist-info/RECORD +24 -0
- qbitrr2-5.1.0.dist-info/RECORD +0 -24
- {qbitrr2-5.1.0.dist-info → qbitrr2-5.3.0.dist-info}/WHEEL +0 -0
- {qbitrr2-5.1.0.dist-info → qbitrr2-5.3.0.dist-info}/entry_points.txt +0 -0
- {qbitrr2-5.1.0.dist-info → qbitrr2-5.3.0.dist-info}/licenses/LICENSE +0 -0
- {qbitrr2-5.1.0.dist-info → qbitrr2-5.3.0.dist-info}/top_level.txt +0 -0
qBitrr/arss.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import atexit
|
|
3
4
|
import contextlib
|
|
4
5
|
import itertools
|
|
5
6
|
import logging
|
|
@@ -19,14 +20,15 @@ import qbittorrentapi
|
|
|
19
20
|
import qbittorrentapi.exceptions
|
|
20
21
|
import requests
|
|
21
22
|
from packaging import version as version_parser
|
|
22
|
-
from peewee import Model
|
|
23
|
-
from pyarr import RadarrAPI, SonarrAPI
|
|
23
|
+
from peewee import Model, SqliteDatabase
|
|
24
|
+
from pyarr import LidarrAPI, RadarrAPI, SonarrAPI
|
|
24
25
|
from pyarr.exceptions import PyarrResourceNotFound, PyarrServerError
|
|
25
26
|
from pyarr.types import JsonObject
|
|
26
27
|
from qbittorrentapi import TorrentDictionary, TorrentStates
|
|
27
28
|
from ujson import JSONDecodeError
|
|
28
29
|
|
|
29
30
|
from qBitrr.config import (
|
|
31
|
+
APPDATA_FOLDER,
|
|
30
32
|
AUTO_PAUSE_RESUME,
|
|
31
33
|
COMPLETED_DOWNLOAD_FOLDER,
|
|
32
34
|
CONFIG,
|
|
@@ -55,10 +57,23 @@ from qBitrr.search_activity_store import (
|
|
|
55
57
|
fetch_search_activities,
|
|
56
58
|
record_search_activity,
|
|
57
59
|
)
|
|
58
|
-
from qBitrr.tables import
|
|
60
|
+
from qBitrr.tables import (
|
|
61
|
+
AlbumFilesModel,
|
|
62
|
+
AlbumQueueModel,
|
|
63
|
+
ArtistFilesModel,
|
|
64
|
+
EpisodeFilesModel,
|
|
65
|
+
EpisodeQueueModel,
|
|
66
|
+
FilesQueued,
|
|
67
|
+
MovieQueueModel,
|
|
68
|
+
MoviesFilesModel,
|
|
69
|
+
SeriesFilesModel,
|
|
70
|
+
TorrentLibrary,
|
|
71
|
+
TrackFilesModel,
|
|
72
|
+
)
|
|
59
73
|
from qBitrr.utils import (
|
|
60
74
|
ExpiringSet,
|
|
61
75
|
absolute_file_paths,
|
|
76
|
+
format_bytes,
|
|
62
77
|
has_internet,
|
|
63
78
|
parse_size,
|
|
64
79
|
validate_and_return_torrent_file,
|
|
@@ -108,7 +123,10 @@ if TYPE_CHECKING:
|
|
|
108
123
|
|
|
109
124
|
class Arr:
|
|
110
125
|
def __init__(
|
|
111
|
-
self,
|
|
126
|
+
self,
|
|
127
|
+
name: str,
|
|
128
|
+
manager: ArrManager,
|
|
129
|
+
client_cls: type[Callable | RadarrAPI | SonarrAPI | LidarrAPI],
|
|
112
130
|
):
|
|
113
131
|
if name in manager.groups:
|
|
114
132
|
raise OSError(f"Group '{name}' has already been registered.")
|
|
@@ -293,13 +311,23 @@ class Arr:
|
|
|
293
311
|
else:
|
|
294
312
|
self._delta = -1
|
|
295
313
|
|
|
314
|
+
self._app_data_folder = APPDATA_FOLDER
|
|
315
|
+
self.search_db_file = self._app_data_folder.joinpath(f"{self._name}.db")
|
|
316
|
+
|
|
296
317
|
self.ombi_search_requests = CONFIG.get(
|
|
297
318
|
f"{name}.EntrySearch.Ombi.SearchOmbiRequests", fallback=False
|
|
298
319
|
)
|
|
299
320
|
self.overseerr_requests = CONFIG.get(
|
|
300
321
|
f"{name}.EntrySearch.Overseerr.SearchOverseerrRequests", fallback=False
|
|
301
322
|
)
|
|
302
|
-
|
|
323
|
+
# SearchBySeries can be: True (always series), False (always episode), or "smart" (automatic)
|
|
324
|
+
series_search_config = CONFIG.get(f"{name}.EntrySearch.SearchBySeries", fallback=False)
|
|
325
|
+
if isinstance(series_search_config, str) and series_search_config.lower() == "smart":
|
|
326
|
+
self.series_search = "smart"
|
|
327
|
+
elif series_search_config in (True, "true", "True", "TRUE", 1):
|
|
328
|
+
self.series_search = True
|
|
329
|
+
else:
|
|
330
|
+
self.series_search = False
|
|
303
331
|
if self.ombi_search_requests:
|
|
304
332
|
self.ombi_uri = CONFIG.get_or_raise(f"{name}.EntrySearch.Ombi.OmbiURI")
|
|
305
333
|
self.ombi_api_key = CONFIG.get_or_raise(f"{name}.EntrySearch.Ombi.OmbiAPIKey")
|
|
@@ -371,6 +399,18 @@ class Arr:
|
|
|
371
399
|
self.type = "sonarr"
|
|
372
400
|
elif isinstance(self.client, RadarrAPI):
|
|
373
401
|
self.type = "radarr"
|
|
402
|
+
elif isinstance(self.client, LidarrAPI):
|
|
403
|
+
self.type = "lidarr"
|
|
404
|
+
|
|
405
|
+
# Disable unsupported features for Lidarr
|
|
406
|
+
if self.type == "lidarr":
|
|
407
|
+
self.search_by_year = False
|
|
408
|
+
self.ombi_search_requests = False
|
|
409
|
+
self.overseerr_requests = False
|
|
410
|
+
self.ombi_uri = None
|
|
411
|
+
self.ombi_api_key = None
|
|
412
|
+
self.overseerr_uri = None
|
|
413
|
+
self.overseerr_api_key = None
|
|
374
414
|
|
|
375
415
|
try:
|
|
376
416
|
version_info = self.client.get_update()
|
|
@@ -400,6 +440,10 @@ class Arr:
|
|
|
400
440
|
if self.use_temp_for_missing:
|
|
401
441
|
self.temp_quality_profile_ids = self.parse_quality_profiles()
|
|
402
442
|
|
|
443
|
+
# Cache for valid quality profile IDs to avoid repeated API calls and warnings
|
|
444
|
+
self._quality_profile_cache: dict[int, dict] = {}
|
|
445
|
+
self._invalid_quality_profiles: set[int] = set()
|
|
446
|
+
|
|
403
447
|
if self.rss_sync_timer > 0:
|
|
404
448
|
self.rss_sync_timer_last_checked = datetime(1970, 1, 1)
|
|
405
449
|
else:
|
|
@@ -536,9 +580,12 @@ class Arr:
|
|
|
536
580
|
self.quality_unmet_search
|
|
537
581
|
or self.do_upgrade_search
|
|
538
582
|
or self.custom_format_unmet_search
|
|
539
|
-
or self.series_search
|
|
583
|
+
or self.series_search == True
|
|
540
584
|
):
|
|
541
585
|
self.search_api_command = "SeriesSearch"
|
|
586
|
+
elif self.series_search == "smart":
|
|
587
|
+
# In smart mode, the command will be determined dynamically
|
|
588
|
+
self.search_api_command = "SeriesSearch" # Default, will be overridden per search
|
|
542
589
|
else:
|
|
543
590
|
self.search_api_command = "MissingEpisodeSearch"
|
|
544
591
|
|
|
@@ -558,10 +605,26 @@ class Arr:
|
|
|
558
605
|
self.series_file_model: Model | None = None
|
|
559
606
|
self.model_queue: Model | None = None
|
|
560
607
|
self.persistent_queue: Model | None = None
|
|
608
|
+
self.track_file_model: Model | None = None
|
|
561
609
|
self.torrents: TorrentLibrary | None = None
|
|
610
|
+
self.torrent_db: SqliteDatabase | None = None
|
|
611
|
+
self.db: SqliteDatabase | None = None
|
|
562
612
|
# Initialize search mode (and torrent tag-emulation DB in TAGLESS)
|
|
563
613
|
# early and fail fast if it cannot be set up.
|
|
564
614
|
self.register_search_mode()
|
|
615
|
+
atexit.register(
|
|
616
|
+
lambda: (
|
|
617
|
+
hasattr(self, "db") and self.db and not self.db.is_closed() and self.db.close()
|
|
618
|
+
)
|
|
619
|
+
)
|
|
620
|
+
atexit.register(
|
|
621
|
+
lambda: (
|
|
622
|
+
hasattr(self, "torrent_db")
|
|
623
|
+
and self.torrent_db
|
|
624
|
+
and not self.torrent_db.is_closed()
|
|
625
|
+
and self.torrent_db.close()
|
|
626
|
+
)
|
|
627
|
+
)
|
|
565
628
|
self.logger.hnotice("Starting %s monitor", self._name)
|
|
566
629
|
|
|
567
630
|
@staticmethod
|
|
@@ -1105,6 +1168,26 @@ class Arr:
|
|
|
1105
1168
|
),
|
|
1106
1169
|
)
|
|
1107
1170
|
self.logger.success("DownloadedMoviesScan: %s", path)
|
|
1171
|
+
elif self.type == "lidarr":
|
|
1172
|
+
with_retry(
|
|
1173
|
+
lambda: self.client.post_command(
|
|
1174
|
+
"DownloadedAlbumsScan",
|
|
1175
|
+
path=str(path),
|
|
1176
|
+
downloadClientId=torrent.hash.upper(),
|
|
1177
|
+
importMode=self.import_mode,
|
|
1178
|
+
),
|
|
1179
|
+
retries=3,
|
|
1180
|
+
backoff=0.5,
|
|
1181
|
+
max_backoff=3,
|
|
1182
|
+
exceptions=(
|
|
1183
|
+
requests.exceptions.ChunkedEncodingError,
|
|
1184
|
+
requests.exceptions.ContentDecodingError,
|
|
1185
|
+
requests.exceptions.ConnectionError,
|
|
1186
|
+
JSONDecodeError,
|
|
1187
|
+
requests.exceptions.RequestException,
|
|
1188
|
+
),
|
|
1189
|
+
)
|
|
1190
|
+
self.logger.success("DownloadedAlbumsScan: %s", path)
|
|
1108
1191
|
except Exception as ex:
|
|
1109
1192
|
self.logger.error(
|
|
1110
1193
|
"Downloaded scan error: [%s][%s][%s][%s]",
|
|
@@ -1284,6 +1367,48 @@ class Arr:
|
|
|
1284
1367
|
continue
|
|
1285
1368
|
if self.persistent_queue:
|
|
1286
1369
|
self.persistent_queue.insert(EntryId=object_id).on_conflict_ignore()
|
|
1370
|
+
elif self.type == "lidarr":
|
|
1371
|
+
self.logger.trace("Requeue cache entry: %s", object_id)
|
|
1372
|
+
while True:
|
|
1373
|
+
try:
|
|
1374
|
+
data = self.client.get_album(object_id)
|
|
1375
|
+
name = data.get("title")
|
|
1376
|
+
if name:
|
|
1377
|
+
artist_title = data.get("artist", {}).get("artistName", "")
|
|
1378
|
+
foreign_album_id = data.get("foreignAlbumId", "")
|
|
1379
|
+
self.logger.notice(
|
|
1380
|
+
"Re-Searching album: %s - %s | [foreignAlbumId=%s|id=%s]",
|
|
1381
|
+
artist_title,
|
|
1382
|
+
name,
|
|
1383
|
+
foreign_album_id,
|
|
1384
|
+
object_id,
|
|
1385
|
+
)
|
|
1386
|
+
else:
|
|
1387
|
+
self.logger.notice("Re-Searching album: %s", object_id)
|
|
1388
|
+
break
|
|
1389
|
+
except (
|
|
1390
|
+
requests.exceptions.ChunkedEncodingError,
|
|
1391
|
+
requests.exceptions.ContentDecodingError,
|
|
1392
|
+
requests.exceptions.ConnectionError,
|
|
1393
|
+
JSONDecodeError,
|
|
1394
|
+
AttributeError,
|
|
1395
|
+
):
|
|
1396
|
+
continue
|
|
1397
|
+
if object_id in self.queue_file_ids:
|
|
1398
|
+
self.queue_file_ids.remove(object_id)
|
|
1399
|
+
while True:
|
|
1400
|
+
try:
|
|
1401
|
+
self.client.post_command("AlbumSearch", albumIds=[object_id])
|
|
1402
|
+
break
|
|
1403
|
+
except (
|
|
1404
|
+
requests.exceptions.ChunkedEncodingError,
|
|
1405
|
+
requests.exceptions.ContentDecodingError,
|
|
1406
|
+
requests.exceptions.ConnectionError,
|
|
1407
|
+
JSONDecodeError,
|
|
1408
|
+
):
|
|
1409
|
+
continue
|
|
1410
|
+
if self.persistent_queue:
|
|
1411
|
+
self.persistent_queue.insert(EntryId=object_id).on_conflict_ignore()
|
|
1287
1412
|
|
|
1288
1413
|
def _process_errored(self) -> None:
|
|
1289
1414
|
# Recheck all torrents marked for rechecking.
|
|
@@ -1301,10 +1426,6 @@ class Arr:
|
|
|
1301
1426
|
to_delete_all = self.delete.union(
|
|
1302
1427
|
self.missing_files_post_delete, self.downloads_with_bad_error_message_blocklist
|
|
1303
1428
|
)
|
|
1304
|
-
if self.missing_files_post_delete or self.downloads_with_bad_error_message_blocklist:
|
|
1305
|
-
delete_ = True
|
|
1306
|
-
else:
|
|
1307
|
-
delete_ = False
|
|
1308
1429
|
skip_blacklist = {
|
|
1309
1430
|
i.upper() for i in self.skip_blacklist.union(self.missing_files_post_delete)
|
|
1310
1431
|
}
|
|
@@ -1333,7 +1454,7 @@ class Arr:
|
|
|
1333
1454
|
del self.manager.qbit_manager.name_cache[h]
|
|
1334
1455
|
if h in self.manager.qbit_manager.cache:
|
|
1335
1456
|
del self.manager.qbit_manager.cache[h]
|
|
1336
|
-
if
|
|
1457
|
+
if self.missing_files_post_delete or self.downloads_with_bad_error_message_blocklist:
|
|
1337
1458
|
self.missing_files_post_delete.clear()
|
|
1338
1459
|
self.downloads_with_bad_error_message_blocklist.clear()
|
|
1339
1460
|
self.skip_blacklist.clear()
|
|
@@ -1471,11 +1592,55 @@ class Arr:
|
|
|
1471
1592
|
) -> Iterable[
|
|
1472
1593
|
tuple[MoviesFilesModel | EpisodeFilesModel | SeriesFilesModel, bool, bool, bool, int]
|
|
1473
1594
|
]:
|
|
1474
|
-
if self.type == "sonarr" and self.series_search:
|
|
1595
|
+
if self.type == "sonarr" and self.series_search == True:
|
|
1475
1596
|
serieslist = self.db_get_files_series()
|
|
1476
1597
|
for series in serieslist:
|
|
1477
1598
|
yield series[0], series[1], series[2], series[2] is not True, len(serieslist)
|
|
1478
|
-
elif self.type == "sonarr" and
|
|
1599
|
+
elif self.type == "sonarr" and self.series_search == "smart":
|
|
1600
|
+
# Smart mode: decide dynamically based on what needs to be searched
|
|
1601
|
+
episodelist = self.db_get_files_episodes()
|
|
1602
|
+
if episodelist:
|
|
1603
|
+
# Group episodes by series to determine if we should search by series or episode
|
|
1604
|
+
series_episodes_map = {}
|
|
1605
|
+
for episode_entry in episodelist:
|
|
1606
|
+
episode = episode_entry[0]
|
|
1607
|
+
series_id = episode.SeriesId
|
|
1608
|
+
if series_id not in series_episodes_map:
|
|
1609
|
+
series_episodes_map[series_id] = []
|
|
1610
|
+
series_episodes_map[series_id].append(episode_entry)
|
|
1611
|
+
|
|
1612
|
+
# Process each series
|
|
1613
|
+
for series_id, episodes in series_episodes_map.items():
|
|
1614
|
+
if len(episodes) > 1:
|
|
1615
|
+
# Multiple episodes from same series - use series search (smart decision)
|
|
1616
|
+
self.logger.info(
|
|
1617
|
+
"[SMART MODE] Using series search for %s episodes from series ID %s",
|
|
1618
|
+
len(episodes),
|
|
1619
|
+
series_id,
|
|
1620
|
+
)
|
|
1621
|
+
# Create a series entry for searching
|
|
1622
|
+
series_model = (
|
|
1623
|
+
self.series_file_model.select()
|
|
1624
|
+
.where(self.series_file_model.EntryId == series_id)
|
|
1625
|
+
.first()
|
|
1626
|
+
)
|
|
1627
|
+
if series_model:
|
|
1628
|
+
yield series_model, episodes[0][1], episodes[0][2], True, len(
|
|
1629
|
+
episodelist
|
|
1630
|
+
)
|
|
1631
|
+
else:
|
|
1632
|
+
# Single episode - use episode search (smart decision)
|
|
1633
|
+
episode = episodes[0][0]
|
|
1634
|
+
self.logger.info(
|
|
1635
|
+
"[SMART MODE] Using episode search for single episode: %s S%02dE%03d",
|
|
1636
|
+
episode.SeriesTitle,
|
|
1637
|
+
episode.SeasonNumber,
|
|
1638
|
+
episode.EpisodeNumber,
|
|
1639
|
+
)
|
|
1640
|
+
yield episodes[0][0], episodes[0][1], episodes[0][2], False, len(
|
|
1641
|
+
episodelist
|
|
1642
|
+
)
|
|
1643
|
+
elif self.type == "sonarr" and self.series_search == False:
|
|
1479
1644
|
episodelist = self.db_get_files_episodes()
|
|
1480
1645
|
for episodes in episodelist:
|
|
1481
1646
|
yield episodes[0], episodes[1], episodes[2], False, len(episodelist)
|
|
@@ -1483,6 +1648,10 @@ class Arr:
|
|
|
1483
1648
|
movielist = self.db_get_files_movies()
|
|
1484
1649
|
for movies in movielist:
|
|
1485
1650
|
yield movies[0], movies[1], movies[2], False, len(movielist)
|
|
1651
|
+
elif self.type == "lidarr":
|
|
1652
|
+
albumlist = self.db_get_files_movies() # This calls the lidarr section we added
|
|
1653
|
+
for albums in albumlist:
|
|
1654
|
+
yield albums[0], albums[1], albums[2], False, len(albumlist)
|
|
1486
1655
|
|
|
1487
1656
|
def db_maybe_reset_entry_searched_state(self):
|
|
1488
1657
|
if self.type == "sonarr":
|
|
@@ -1490,6 +1659,8 @@ class Arr:
|
|
|
1490
1659
|
self.db_reset__episode_searched_state()
|
|
1491
1660
|
elif self.type == "radarr":
|
|
1492
1661
|
self.db_reset__movie_searched_state()
|
|
1662
|
+
elif self.type == "lidarr":
|
|
1663
|
+
self.db_reset__album_searched_state()
|
|
1493
1664
|
self.loop_completed = False
|
|
1494
1665
|
|
|
1495
1666
|
def db_reset__series_searched_state(self):
|
|
@@ -1572,6 +1743,33 @@ class Arr:
|
|
|
1572
1743
|
self.model_file.delete().where(self.model_file.EntryId.not_in(ids)).execute()
|
|
1573
1744
|
self.loop_completed = False
|
|
1574
1745
|
|
|
1746
|
+
def db_reset__album_searched_state(self):
|
|
1747
|
+
ids = []
|
|
1748
|
+
self.model_file: AlbumFilesModel
|
|
1749
|
+
if (
|
|
1750
|
+
self.loop_completed is True and self.reset_on_completion
|
|
1751
|
+
): # Only wipe if a loop completed was tagged
|
|
1752
|
+
self.model_file.update(Searched=False, Upgrade=False).where(
|
|
1753
|
+
self.model_file.Searched == True
|
|
1754
|
+
).execute()
|
|
1755
|
+
while True:
|
|
1756
|
+
try:
|
|
1757
|
+
artists = self.client.get_artist()
|
|
1758
|
+
for artist in artists:
|
|
1759
|
+
albums = self.client.get_album(artistId=artist["id"])
|
|
1760
|
+
for album in albums:
|
|
1761
|
+
ids.append(album["id"])
|
|
1762
|
+
break
|
|
1763
|
+
except (
|
|
1764
|
+
requests.exceptions.ChunkedEncodingError,
|
|
1765
|
+
requests.exceptions.ContentDecodingError,
|
|
1766
|
+
requests.exceptions.ConnectionError,
|
|
1767
|
+
JSONDecodeError,
|
|
1768
|
+
):
|
|
1769
|
+
continue
|
|
1770
|
+
self.model_file.delete().where(self.model_file.EntryId.not_in(ids)).execute()
|
|
1771
|
+
self.loop_completed = False
|
|
1772
|
+
|
|
1575
1773
|
def db_get_files_series(self) -> list[list[SeriesFilesModel, bool, bool]] | None:
|
|
1576
1774
|
entries = []
|
|
1577
1775
|
if not (self.search_missing or self.do_upgrade_search):
|
|
@@ -1739,6 +1937,36 @@ class Arr:
|
|
|
1739
1937
|
):
|
|
1740
1938
|
entries.append([entry, False, False])
|
|
1741
1939
|
return entries
|
|
1940
|
+
elif self.type == "lidarr":
|
|
1941
|
+
condition = True # Placeholder, will be refined
|
|
1942
|
+
if self.do_upgrade_search:
|
|
1943
|
+
condition &= self.model_file.Upgrade == False
|
|
1944
|
+
else:
|
|
1945
|
+
if self.quality_unmet_search and not self.custom_format_unmet_search:
|
|
1946
|
+
condition &= (self.model_file.Searched == False) | (
|
|
1947
|
+
self.model_file.QualityMet == False
|
|
1948
|
+
)
|
|
1949
|
+
elif not self.quality_unmet_search and self.custom_format_unmet_search:
|
|
1950
|
+
condition &= (self.model_file.Searched == False) | (
|
|
1951
|
+
self.model_file.CustomFormatMet == False
|
|
1952
|
+
)
|
|
1953
|
+
elif self.quality_unmet_search and self.custom_format_unmet_search:
|
|
1954
|
+
condition &= (
|
|
1955
|
+
(self.model_file.Searched == False)
|
|
1956
|
+
| (self.model_file.QualityMet == False)
|
|
1957
|
+
| (self.model_file.CustomFormatMet == False)
|
|
1958
|
+
)
|
|
1959
|
+
else:
|
|
1960
|
+
condition &= self.model_file.AlbumFileId == 0
|
|
1961
|
+
condition &= self.model_file.Searched == False
|
|
1962
|
+
for entry in (
|
|
1963
|
+
self.model_file.select()
|
|
1964
|
+
.where(condition)
|
|
1965
|
+
.order_by(self.model_file.AlbumFileId.asc())
|
|
1966
|
+
.execute()
|
|
1967
|
+
):
|
|
1968
|
+
entries.append([entry, False, False])
|
|
1969
|
+
return entries
|
|
1742
1970
|
|
|
1743
1971
|
def db_get_request_files(self) -> Iterable[tuple[MoviesFilesModel | EpisodeFilesModel, int]]:
|
|
1744
1972
|
entries = []
|
|
@@ -1936,104 +2164,47 @@ class Arr:
|
|
|
1936
2164
|
except Exception:
|
|
1937
2165
|
pass
|
|
1938
2166
|
self.db_update_todays_releases()
|
|
1939
|
-
if self.db_update_processed
|
|
2167
|
+
if self.db_update_processed:
|
|
1940
2168
|
return
|
|
1941
|
-
|
|
1942
|
-
self.logger.info("Started updating database for %s", self.search_current_year)
|
|
1943
|
-
else:
|
|
1944
|
-
self.logger.info("Started updating database")
|
|
2169
|
+
self.logger.info("Started updating database")
|
|
1945
2170
|
if self.type == "sonarr":
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
|
|
1950
|
-
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
|
|
1954
|
-
|
|
1955
|
-
|
|
1956
|
-
|
|
1957
|
-
|
|
1958
|
-
if self.search_by_year:
|
|
1959
|
-
for s in series:
|
|
1960
|
-
if isinstance(s, str):
|
|
1961
|
-
continue
|
|
1962
|
-
episodes = self.client.get_episode(s["id"], True)
|
|
1963
|
-
for e in episodes:
|
|
1964
|
-
if isinstance(e, str):
|
|
1965
|
-
continue
|
|
1966
|
-
if "airDateUtc" in e:
|
|
1967
|
-
if datetime.strptime(
|
|
1968
|
-
e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ"
|
|
1969
|
-
).replace(tzinfo=timezone.utc) > datetime.now(timezone.utc):
|
|
1970
|
-
continue
|
|
1971
|
-
if (
|
|
1972
|
-
datetime.strptime(e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ")
|
|
1973
|
-
.replace(tzinfo=timezone.utc)
|
|
1974
|
-
.date()
|
|
1975
|
-
< datetime(
|
|
1976
|
-
month=1, day=1, year=int(self.search_current_year)
|
|
1977
|
-
).date()
|
|
1978
|
-
):
|
|
1979
|
-
continue
|
|
1980
|
-
if (
|
|
1981
|
-
datetime.strptime(e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ")
|
|
1982
|
-
.replace(tzinfo=timezone.utc)
|
|
1983
|
-
.date()
|
|
1984
|
-
> datetime(
|
|
1985
|
-
month=12, day=31, year=int(self.search_current_year)
|
|
1986
|
-
).date()
|
|
1987
|
-
):
|
|
1988
|
-
continue
|
|
1989
|
-
if not self.search_specials and e["seasonNumber"] == 0:
|
|
1990
|
-
continue
|
|
1991
|
-
self.db_update_single_series(db_entry=e)
|
|
2171
|
+
# Always fetch series list for both episode and series-level tracking
|
|
2172
|
+
while True:
|
|
2173
|
+
try:
|
|
2174
|
+
series = self.client.get_series()
|
|
2175
|
+
break
|
|
2176
|
+
except (
|
|
2177
|
+
requests.exceptions.ChunkedEncodingError,
|
|
2178
|
+
requests.exceptions.ContentDecodingError,
|
|
2179
|
+
requests.exceptions.ConnectionError,
|
|
2180
|
+
JSONDecodeError,
|
|
2181
|
+
):
|
|
2182
|
+
continue
|
|
1992
2183
|
|
|
1993
|
-
|
|
1994
|
-
|
|
1995
|
-
|
|
1996
|
-
|
|
1997
|
-
|
|
1998
|
-
|
|
1999
|
-
|
|
2000
|
-
continue
|
|
2001
|
-
if "airDateUtc" in e:
|
|
2002
|
-
if datetime.strptime(
|
|
2003
|
-
e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ"
|
|
2004
|
-
).replace(tzinfo=timezone.utc) > datetime.now(timezone.utc):
|
|
2005
|
-
continue
|
|
2006
|
-
if not self.search_specials and e["seasonNumber"] == 0:
|
|
2007
|
-
continue
|
|
2008
|
-
self.db_update_single_series(db_entry=e)
|
|
2009
|
-
self.db_update_processed = True
|
|
2010
|
-
else:
|
|
2011
|
-
while True:
|
|
2012
|
-
try:
|
|
2013
|
-
series = self.client.get_series()
|
|
2014
|
-
break
|
|
2015
|
-
except (
|
|
2016
|
-
requests.exceptions.ChunkedEncodingError,
|
|
2017
|
-
requests.exceptions.ContentDecodingError,
|
|
2018
|
-
requests.exceptions.ConnectionError,
|
|
2019
|
-
JSONDecodeError,
|
|
2020
|
-
):
|
|
2184
|
+
# Process episodes for episode-level tracking (all episodes)
|
|
2185
|
+
for s in series:
|
|
2186
|
+
if isinstance(s, str):
|
|
2187
|
+
continue
|
|
2188
|
+
episodes = self.client.get_episode(s["id"], True)
|
|
2189
|
+
for e in episodes:
|
|
2190
|
+
if isinstance(e, str):
|
|
2021
2191
|
continue
|
|
2022
|
-
|
|
2023
|
-
|
|
2024
|
-
|
|
2025
|
-
|
|
2026
|
-
if s["year"] < self.search_current_year:
|
|
2027
|
-
continue
|
|
2028
|
-
if s["year"] > self.search_current_year:
|
|
2192
|
+
if "airDateUtc" in e:
|
|
2193
|
+
if datetime.strptime(e["airDateUtc"], "%Y-%m-%dT%H:%M:%SZ").replace(
|
|
2194
|
+
tzinfo=timezone.utc
|
|
2195
|
+
) > datetime.now(timezone.utc):
|
|
2029
2196
|
continue
|
|
2030
|
-
self.
|
|
2031
|
-
else:
|
|
2032
|
-
for s in series:
|
|
2033
|
-
if isinstance(s, str):
|
|
2197
|
+
if not self.search_specials and e["seasonNumber"] == 0:
|
|
2034
2198
|
continue
|
|
2035
|
-
self.db_update_single_series(db_entry=
|
|
2036
|
-
|
|
2199
|
+
self.db_update_single_series(db_entry=e, series=False)
|
|
2200
|
+
|
|
2201
|
+
# Process series for series-level tracking (all series)
|
|
2202
|
+
for s in series:
|
|
2203
|
+
if isinstance(s, str):
|
|
2204
|
+
continue
|
|
2205
|
+
self.db_update_single_series(db_entry=s, series=True)
|
|
2206
|
+
|
|
2207
|
+
self.db_update_processed = True
|
|
2037
2208
|
elif self.type == "radarr":
|
|
2038
2209
|
while True:
|
|
2039
2210
|
try:
|
|
@@ -2046,20 +2217,58 @@ class Arr:
|
|
|
2046
2217
|
JSONDecodeError,
|
|
2047
2218
|
):
|
|
2048
2219
|
continue
|
|
2049
|
-
|
|
2050
|
-
|
|
2051
|
-
|
|
2052
|
-
|
|
2053
|
-
|
|
2054
|
-
|
|
2055
|
-
|
|
2220
|
+
# Process all movies
|
|
2221
|
+
for m in movies:
|
|
2222
|
+
if isinstance(m, str):
|
|
2223
|
+
continue
|
|
2224
|
+
self.db_update_single_series(db_entry=m)
|
|
2225
|
+
self.db_update_processed = True
|
|
2226
|
+
elif self.type == "lidarr":
|
|
2227
|
+
while True:
|
|
2228
|
+
try:
|
|
2229
|
+
artists = self.client.get_artist()
|
|
2230
|
+
break
|
|
2231
|
+
except (
|
|
2232
|
+
requests.exceptions.ChunkedEncodingError,
|
|
2233
|
+
requests.exceptions.ContentDecodingError,
|
|
2234
|
+
requests.exceptions.ConnectionError,
|
|
2235
|
+
JSONDecodeError,
|
|
2236
|
+
):
|
|
2237
|
+
continue
|
|
2238
|
+
for artist in artists:
|
|
2239
|
+
if isinstance(artist, str):
|
|
2240
|
+
continue
|
|
2241
|
+
while True:
|
|
2242
|
+
try:
|
|
2243
|
+
# allArtistAlbums=True includes full album data with media/tracks
|
|
2244
|
+
albums = self.client.get_album(
|
|
2245
|
+
artistId=artist["id"], allArtistAlbums=True
|
|
2246
|
+
)
|
|
2247
|
+
break
|
|
2248
|
+
except (
|
|
2249
|
+
requests.exceptions.ChunkedEncodingError,
|
|
2250
|
+
requests.exceptions.ContentDecodingError,
|
|
2251
|
+
requests.exceptions.ConnectionError,
|
|
2252
|
+
JSONDecodeError,
|
|
2253
|
+
):
|
|
2056
2254
|
continue
|
|
2057
|
-
|
|
2058
|
-
|
|
2059
|
-
for m in movies:
|
|
2060
|
-
if isinstance(m, str):
|
|
2255
|
+
for album in albums:
|
|
2256
|
+
if isinstance(album, str):
|
|
2061
2257
|
continue
|
|
2062
|
-
|
|
2258
|
+
# For Lidarr, we don't have a specific releaseDate field
|
|
2259
|
+
# Check if album has been released
|
|
2260
|
+
if "releaseDate" in album:
|
|
2261
|
+
release_date = datetime.strptime(
|
|
2262
|
+
album["releaseDate"], "%Y-%m-%dT%H:%M:%SZ"
|
|
2263
|
+
)
|
|
2264
|
+
if release_date > datetime.now():
|
|
2265
|
+
continue
|
|
2266
|
+
self.db_update_single_series(db_entry=album)
|
|
2267
|
+
# Process artists for artist-level tracking
|
|
2268
|
+
for artist in artists:
|
|
2269
|
+
if isinstance(artist, str):
|
|
2270
|
+
continue
|
|
2271
|
+
self.db_update_single_series(db_entry=artist, artist=True)
|
|
2063
2272
|
self.db_update_processed = True
|
|
2064
2273
|
self.logger.trace("Finished updating database")
|
|
2065
2274
|
finally:
|
|
@@ -2306,7 +2515,11 @@ class Arr:
|
|
|
2306
2515
|
return False
|
|
2307
2516
|
|
|
2308
2517
|
def db_update_single_series(
|
|
2309
|
-
self,
|
|
2518
|
+
self,
|
|
2519
|
+
db_entry: JsonObject = None,
|
|
2520
|
+
request: bool = False,
|
|
2521
|
+
series: bool = False,
|
|
2522
|
+
artist: bool = False,
|
|
2310
2523
|
):
|
|
2311
2524
|
if not (
|
|
2312
2525
|
self.search_missing
|
|
@@ -2492,7 +2705,10 @@ class Arr:
|
|
|
2492
2705
|
QualityMet = not QualityUnmet if db_entry["hasFile"] else False
|
|
2493
2706
|
customFormatMet = customFormat >= minCustomFormat
|
|
2494
2707
|
|
|
2495
|
-
if
|
|
2708
|
+
if searched:
|
|
2709
|
+
# Episode is complete and not being searched
|
|
2710
|
+
reason = None
|
|
2711
|
+
elif not episode["hasFile"]:
|
|
2496
2712
|
reason = "Missing"
|
|
2497
2713
|
elif self.quality_unmet_search and QualityUnmet:
|
|
2498
2714
|
reason = "Quality"
|
|
@@ -2501,7 +2717,7 @@ class Arr:
|
|
|
2501
2717
|
elif self.do_upgrade_search:
|
|
2502
2718
|
reason = "Upgrade"
|
|
2503
2719
|
else:
|
|
2504
|
-
reason =
|
|
2720
|
+
reason = None
|
|
2505
2721
|
|
|
2506
2722
|
to_update = {
|
|
2507
2723
|
self.model_file.Monitored: Monitored,
|
|
@@ -2719,6 +2935,9 @@ class Arr:
|
|
|
2719
2935
|
conflict_target=[self.series_file_model.EntryId], update=to_update
|
|
2720
2936
|
)
|
|
2721
2937
|
db_commands.execute()
|
|
2938
|
+
|
|
2939
|
+
# Note: Episodes are now handled separately in db_update()
|
|
2940
|
+
# No need to recursively process episodes here to avoid duplication
|
|
2722
2941
|
else:
|
|
2723
2942
|
db_commands = self.series_file_model.delete().where(
|
|
2724
2943
|
self.series_file_model.EntryId == EntryId
|
|
@@ -2814,7 +3033,7 @@ class Arr:
|
|
|
2814
3033
|
self.logger.debug(
|
|
2815
3034
|
"Updating quality profile for %s to %s",
|
|
2816
3035
|
db_entry["title"],
|
|
2817
|
-
|
|
3036
|
+
db_entry["qualityProfileId"],
|
|
2818
3037
|
)
|
|
2819
3038
|
while True:
|
|
2820
3039
|
try:
|
|
@@ -2837,7 +3056,10 @@ class Arr:
|
|
|
2837
3056
|
qualityMet = not QualityUnmet if db_entry["hasFile"] else False
|
|
2838
3057
|
customFormatMet = customFormat >= minCustomFormat
|
|
2839
3058
|
|
|
2840
|
-
if
|
|
3059
|
+
if searched:
|
|
3060
|
+
# Movie is complete and not being searched
|
|
3061
|
+
reason = None
|
|
3062
|
+
elif not db_entry["hasFile"]:
|
|
2841
3063
|
reason = "Missing"
|
|
2842
3064
|
elif self.quality_unmet_search and QualityUnmet:
|
|
2843
3065
|
reason = "Quality"
|
|
@@ -2846,7 +3068,7 @@ class Arr:
|
|
|
2846
3068
|
elif self.do_upgrade_search:
|
|
2847
3069
|
reason = "Upgrade"
|
|
2848
3070
|
else:
|
|
2849
|
-
reason =
|
|
3071
|
+
reason = None
|
|
2850
3072
|
|
|
2851
3073
|
to_update = {
|
|
2852
3074
|
self.model_file.MovieFileId: movieFileId,
|
|
@@ -2894,46 +3116,507 @@ class Arr:
|
|
|
2894
3116
|
self.model_file.EntryId == db_entry["id"]
|
|
2895
3117
|
)
|
|
2896
3118
|
db_commands.execute()
|
|
2897
|
-
|
|
2898
|
-
|
|
2899
|
-
|
|
2900
|
-
|
|
2901
|
-
|
|
2902
|
-
|
|
2903
|
-
|
|
2904
|
-
exc_info=e,
|
|
2905
|
-
)
|
|
2906
|
-
raise DelayLoopException(length=300, type=self._name)
|
|
2907
|
-
except JSONDecodeError:
|
|
2908
|
-
if self.type == "sonarr":
|
|
2909
|
-
if self.series_search:
|
|
2910
|
-
self.logger.warning(
|
|
2911
|
-
"Error getting series info: [%s][%s]", db_entry["id"], db_entry["title"]
|
|
2912
|
-
)
|
|
2913
|
-
else:
|
|
2914
|
-
self.logger.warning(
|
|
2915
|
-
"Error getting episode info: [%s][%s]", db_entry["id"], db_entry["title"]
|
|
3119
|
+
elif self.type == "lidarr":
|
|
3120
|
+
if not artist:
|
|
3121
|
+
# Album handling
|
|
3122
|
+
self.model_file: AlbumFilesModel
|
|
3123
|
+
searched = False
|
|
3124
|
+
albumData = self.model_file.get_or_none(
|
|
3125
|
+
self.model_file.EntryId == db_entry["id"]
|
|
2916
3126
|
)
|
|
2917
|
-
|
|
2918
|
-
|
|
2919
|
-
|
|
2920
|
-
|
|
2921
|
-
|
|
2922
|
-
|
|
2923
|
-
|
|
2924
|
-
|
|
2925
|
-
|
|
2926
|
-
|
|
2927
|
-
|
|
2928
|
-
|
|
2929
|
-
|
|
2930
|
-
|
|
2931
|
-
|
|
2932
|
-
|
|
2933
|
-
|
|
2934
|
-
|
|
2935
|
-
|
|
2936
|
-
|
|
3127
|
+
if db_entry["monitored"] or self.search_unmonitored:
|
|
3128
|
+
while True:
|
|
3129
|
+
try:
|
|
3130
|
+
if albumData:
|
|
3131
|
+
if not albumData.MinCustomFormatScore:
|
|
3132
|
+
try:
|
|
3133
|
+
profile_id = db_entry["profileId"]
|
|
3134
|
+
# Check if this profile ID is known to be invalid
|
|
3135
|
+
if profile_id in self._invalid_quality_profiles:
|
|
3136
|
+
minCustomFormat = 0
|
|
3137
|
+
# Check cache first
|
|
3138
|
+
elif profile_id in self._quality_profile_cache:
|
|
3139
|
+
minCustomFormat = self._quality_profile_cache[
|
|
3140
|
+
profile_id
|
|
3141
|
+
].get("minFormatScore", 0)
|
|
3142
|
+
else:
|
|
3143
|
+
# Fetch from API and cache
|
|
3144
|
+
try:
|
|
3145
|
+
profile = self.client.get_quality_profile(
|
|
3146
|
+
profile_id
|
|
3147
|
+
)
|
|
3148
|
+
self._quality_profile_cache[profile_id] = (
|
|
3149
|
+
profile
|
|
3150
|
+
)
|
|
3151
|
+
minCustomFormat = profile.get(
|
|
3152
|
+
"minFormatScore", 0
|
|
3153
|
+
)
|
|
3154
|
+
except PyarrResourceNotFound:
|
|
3155
|
+
# Mark as invalid to avoid repeated warnings
|
|
3156
|
+
self._invalid_quality_profiles.add(profile_id)
|
|
3157
|
+
self.logger.warning(
|
|
3158
|
+
"Quality profile %s not found for album %s, defaulting to 0",
|
|
3159
|
+
db_entry.get("profileId"),
|
|
3160
|
+
db_entry.get("title", "Unknown"),
|
|
3161
|
+
)
|
|
3162
|
+
minCustomFormat = 0
|
|
3163
|
+
except Exception:
|
|
3164
|
+
minCustomFormat = 0
|
|
3165
|
+
else:
|
|
3166
|
+
minCustomFormat = albumData.MinCustomFormatScore
|
|
3167
|
+
if (
|
|
3168
|
+
db_entry.get("statistics", {}).get("percentOfTracks", 0)
|
|
3169
|
+
== 100
|
|
3170
|
+
):
|
|
3171
|
+
# Album has files
|
|
3172
|
+
albumFileId = db_entry.get("statistics", {}).get(
|
|
3173
|
+
"sizeOnDisk", 0
|
|
3174
|
+
)
|
|
3175
|
+
if albumFileId != albumData.AlbumFileId:
|
|
3176
|
+
# Get custom format score from album files
|
|
3177
|
+
customFormat = (
|
|
3178
|
+
0 # Lidarr may not have customFormatScore
|
|
3179
|
+
)
|
|
3180
|
+
else:
|
|
3181
|
+
customFormat = albumData.CustomFormatScore
|
|
3182
|
+
else:
|
|
3183
|
+
customFormat = 0
|
|
3184
|
+
else:
|
|
3185
|
+
try:
|
|
3186
|
+
profile_id = db_entry["profileId"]
|
|
3187
|
+
# Check if this profile ID is known to be invalid
|
|
3188
|
+
if profile_id in self._invalid_quality_profiles:
|
|
3189
|
+
minCustomFormat = 0
|
|
3190
|
+
# Check cache first
|
|
3191
|
+
elif profile_id in self._quality_profile_cache:
|
|
3192
|
+
minCustomFormat = self._quality_profile_cache[
|
|
3193
|
+
profile_id
|
|
3194
|
+
].get("minFormatScore", 0)
|
|
3195
|
+
else:
|
|
3196
|
+
# Fetch from API and cache
|
|
3197
|
+
try:
|
|
3198
|
+
profile = self.client.get_quality_profile(
|
|
3199
|
+
profile_id
|
|
3200
|
+
)
|
|
3201
|
+
self._quality_profile_cache[profile_id] = profile
|
|
3202
|
+
minCustomFormat = profile.get("minFormatScore", 0)
|
|
3203
|
+
except PyarrResourceNotFound:
|
|
3204
|
+
# Mark as invalid to avoid repeated warnings
|
|
3205
|
+
self._invalid_quality_profiles.add(profile_id)
|
|
3206
|
+
self.logger.warning(
|
|
3207
|
+
"Quality profile %s not found for album %s, defaulting to 0",
|
|
3208
|
+
db_entry.get("profileId"),
|
|
3209
|
+
db_entry.get("title", "Unknown"),
|
|
3210
|
+
)
|
|
3211
|
+
minCustomFormat = 0
|
|
3212
|
+
except Exception:
|
|
3213
|
+
minCustomFormat = 0
|
|
3214
|
+
if (
|
|
3215
|
+
db_entry.get("statistics", {}).get("percentOfTracks", 0)
|
|
3216
|
+
== 100
|
|
3217
|
+
):
|
|
3218
|
+
customFormat = 0 # Lidarr may not have customFormatScore
|
|
3219
|
+
else:
|
|
3220
|
+
customFormat = 0
|
|
3221
|
+
break
|
|
3222
|
+
except (
|
|
3223
|
+
requests.exceptions.ChunkedEncodingError,
|
|
3224
|
+
requests.exceptions.ContentDecodingError,
|
|
3225
|
+
requests.exceptions.ConnectionError,
|
|
3226
|
+
JSONDecodeError,
|
|
3227
|
+
):
|
|
3228
|
+
continue
|
|
3229
|
+
|
|
3230
|
+
# Determine if album has all tracks
|
|
3231
|
+
hasAllTracks = (
|
|
3232
|
+
db_entry.get("statistics", {}).get("percentOfTracks", 0) == 100
|
|
3233
|
+
)
|
|
3234
|
+
|
|
3235
|
+
# Check if quality cutoff is met for Lidarr
|
|
3236
|
+
# Unlike Sonarr/Radarr which have a qualityCutoffNotMet boolean field,
|
|
3237
|
+
# Lidarr requires us to check the track file quality against the profile cutoff
|
|
3238
|
+
QualityUnmet = False
|
|
3239
|
+
if hasAllTracks:
|
|
3240
|
+
try:
|
|
3241
|
+
# Get the artist's quality profile to find the cutoff
|
|
3242
|
+
artist_id = db_entry.get("artistId")
|
|
3243
|
+
artist_data = self.client.get_artist(artist_id)
|
|
3244
|
+
profile_id = artist_data.get("qualityProfileId")
|
|
3245
|
+
|
|
3246
|
+
if profile_id:
|
|
3247
|
+
# Get or use cached profile
|
|
3248
|
+
if profile_id in self._quality_profile_cache:
|
|
3249
|
+
profile = self._quality_profile_cache[profile_id]
|
|
3250
|
+
else:
|
|
3251
|
+
profile = self.client.get_quality_profile(profile_id)
|
|
3252
|
+
self._quality_profile_cache[profile_id] = profile
|
|
3253
|
+
|
|
3254
|
+
cutoff_quality_id = profile.get("cutoff")
|
|
3255
|
+
upgrade_allowed = profile.get("upgradeAllowed", False)
|
|
3256
|
+
|
|
3257
|
+
if cutoff_quality_id and upgrade_allowed:
|
|
3258
|
+
# Get track files for this album to check their quality
|
|
3259
|
+
album_id = db_entry.get("id")
|
|
3260
|
+
track_files = self.client.get_track_file(
|
|
3261
|
+
albumId=[album_id]
|
|
3262
|
+
)
|
|
3263
|
+
|
|
3264
|
+
if track_files:
|
|
3265
|
+
# Check if any track file's quality is below the cutoff
|
|
3266
|
+
for track_file in track_files:
|
|
3267
|
+
file_quality = track_file.get("quality", {}).get(
|
|
3268
|
+
"quality", {}
|
|
3269
|
+
)
|
|
3270
|
+
file_quality_id = file_quality.get("id", 0)
|
|
3271
|
+
|
|
3272
|
+
if file_quality_id < cutoff_quality_id:
|
|
3273
|
+
QualityUnmet = True
|
|
3274
|
+
self.logger.trace(
|
|
3275
|
+
"Album '%s' has quality below cutoff: %s (ID: %d) < cutoff (ID: %d)",
|
|
3276
|
+
db_entry.get("title", "Unknown"),
|
|
3277
|
+
file_quality.get("name", "Unknown"),
|
|
3278
|
+
file_quality_id,
|
|
3279
|
+
cutoff_quality_id,
|
|
3280
|
+
)
|
|
3281
|
+
break
|
|
3282
|
+
except Exception as e:
|
|
3283
|
+
self.logger.trace(
|
|
3284
|
+
"Could not determine quality cutoff status for album '%s': %s",
|
|
3285
|
+
db_entry.get("title", "Unknown"),
|
|
3286
|
+
str(e),
|
|
3287
|
+
)
|
|
3288
|
+
# Default to False if we can't determine
|
|
3289
|
+
QualityUnmet = False
|
|
3290
|
+
|
|
3291
|
+
if (
|
|
3292
|
+
hasAllTracks
|
|
3293
|
+
and not (self.quality_unmet_search and QualityUnmet)
|
|
3294
|
+
and not (
|
|
3295
|
+
self.custom_format_unmet_search and customFormat < minCustomFormat
|
|
3296
|
+
)
|
|
3297
|
+
):
|
|
3298
|
+
searched = True
|
|
3299
|
+
self.model_queue.update(Completed=True).where(
|
|
3300
|
+
self.model_queue.EntryId == db_entry["id"]
|
|
3301
|
+
).execute()
|
|
3302
|
+
|
|
3303
|
+
if self.use_temp_for_missing:
|
|
3304
|
+
quality_profile_id = db_entry.get("qualityProfileId")
|
|
3305
|
+
if (
|
|
3306
|
+
searched
|
|
3307
|
+
and quality_profile_id in self.temp_quality_profile_ids.values()
|
|
3308
|
+
and not self.keep_temp_profile
|
|
3309
|
+
):
|
|
3310
|
+
db_entry["qualityProfileId"] = list(
|
|
3311
|
+
self.temp_quality_profile_ids.keys()
|
|
3312
|
+
)[
|
|
3313
|
+
list(self.temp_quality_profile_ids.values()).index(
|
|
3314
|
+
quality_profile_id
|
|
3315
|
+
)
|
|
3316
|
+
]
|
|
3317
|
+
self.logger.debug(
|
|
3318
|
+
"Updating quality profile for %s to %s",
|
|
3319
|
+
db_entry["title"],
|
|
3320
|
+
db_entry["qualityProfileId"],
|
|
3321
|
+
)
|
|
3322
|
+
elif (
|
|
3323
|
+
not searched
|
|
3324
|
+
and quality_profile_id in self.temp_quality_profile_ids.keys()
|
|
3325
|
+
):
|
|
3326
|
+
db_entry["qualityProfileId"] = self.temp_quality_profile_ids[
|
|
3327
|
+
quality_profile_id
|
|
3328
|
+
]
|
|
3329
|
+
self.logger.debug(
|
|
3330
|
+
"Updating quality profile for %s to %s",
|
|
3331
|
+
db_entry["title"],
|
|
3332
|
+
db_entry["qualityProfileId"],
|
|
3333
|
+
)
|
|
3334
|
+
while True:
|
|
3335
|
+
try:
|
|
3336
|
+
self.client.upd_album(db_entry)
|
|
3337
|
+
break
|
|
3338
|
+
except (
|
|
3339
|
+
requests.exceptions.ChunkedEncodingError,
|
|
3340
|
+
requests.exceptions.ContentDecodingError,
|
|
3341
|
+
requests.exceptions.ConnectionError,
|
|
3342
|
+
JSONDecodeError,
|
|
3343
|
+
):
|
|
3344
|
+
continue
|
|
3345
|
+
|
|
3346
|
+
title = db_entry.get("title", "Unknown Album")
|
|
3347
|
+
monitored = db_entry.get("monitored", False)
|
|
3348
|
+
# Handle artist field which can be an object or might not exist
|
|
3349
|
+
artist_obj = db_entry.get("artist", {})
|
|
3350
|
+
if isinstance(artist_obj, dict):
|
|
3351
|
+
# Try multiple possible field names for artist name
|
|
3352
|
+
artistName = (
|
|
3353
|
+
artist_obj.get("artistName")
|
|
3354
|
+
or artist_obj.get("name")
|
|
3355
|
+
or artist_obj.get("title")
|
|
3356
|
+
or "Unknown Artist"
|
|
3357
|
+
)
|
|
3358
|
+
else:
|
|
3359
|
+
artistName = "Unknown Artist"
|
|
3360
|
+
artistId = db_entry.get("artistId", 0)
|
|
3361
|
+
foreignAlbumId = db_entry.get("foreignAlbumId", "")
|
|
3362
|
+
releaseDate = db_entry.get("releaseDate")
|
|
3363
|
+
entryId = db_entry.get("id", 0)
|
|
3364
|
+
albumFileId = 1 if hasAllTracks else 0 # Use 1/0 to indicate presence
|
|
3365
|
+
qualityMet = not QualityUnmet if hasAllTracks else False
|
|
3366
|
+
customFormatMet = customFormat >= minCustomFormat
|
|
3367
|
+
|
|
3368
|
+
if searched:
|
|
3369
|
+
# Album is complete and not being searched
|
|
3370
|
+
reason = None
|
|
3371
|
+
elif not hasAllTracks:
|
|
3372
|
+
reason = "Missing"
|
|
3373
|
+
elif self.quality_unmet_search and QualityUnmet:
|
|
3374
|
+
reason = "Quality"
|
|
3375
|
+
elif self.custom_format_unmet_search and not customFormatMet:
|
|
3376
|
+
reason = "CustomFormat"
|
|
3377
|
+
elif self.do_upgrade_search:
|
|
3378
|
+
reason = "Upgrade"
|
|
3379
|
+
else:
|
|
3380
|
+
reason = None
|
|
3381
|
+
|
|
3382
|
+
to_update = {
|
|
3383
|
+
self.model_file.AlbumFileId: albumFileId,
|
|
3384
|
+
self.model_file.Monitored: monitored,
|
|
3385
|
+
self.model_file.QualityMet: qualityMet,
|
|
3386
|
+
self.model_file.Searched: searched,
|
|
3387
|
+
self.model_file.Upgrade: False,
|
|
3388
|
+
self.model_file.MinCustomFormatScore: minCustomFormat,
|
|
3389
|
+
self.model_file.CustomFormatScore: customFormat,
|
|
3390
|
+
self.model_file.CustomFormatMet: customFormatMet,
|
|
3391
|
+
self.model_file.Reason: reason,
|
|
3392
|
+
self.model_file.ArtistTitle: artistName,
|
|
3393
|
+
self.model_file.ArtistId: artistId,
|
|
3394
|
+
self.model_file.ForeignAlbumId: foreignAlbumId,
|
|
3395
|
+
self.model_file.ReleaseDate: releaseDate,
|
|
3396
|
+
}
|
|
3397
|
+
|
|
3398
|
+
if request:
|
|
3399
|
+
to_update[self.model_file.IsRequest] = request
|
|
3400
|
+
|
|
3401
|
+
self.logger.debug(
|
|
3402
|
+
"Updating database entry | %s - %s [Searched:%s][Upgrade:%s][QualityMet:%s][CustomFormatMet:%s]",
|
|
3403
|
+
artistName.ljust(30, "."),
|
|
3404
|
+
title.ljust(30, "."),
|
|
3405
|
+
str(searched).ljust(5),
|
|
3406
|
+
str(False).ljust(5),
|
|
3407
|
+
str(qualityMet).ljust(5),
|
|
3408
|
+
str(customFormatMet).ljust(5),
|
|
3409
|
+
)
|
|
3410
|
+
|
|
3411
|
+
db_commands = self.model_file.insert(
|
|
3412
|
+
Title=title,
|
|
3413
|
+
Monitored=monitored,
|
|
3414
|
+
ArtistTitle=artistName,
|
|
3415
|
+
ArtistId=artistId,
|
|
3416
|
+
ForeignAlbumId=foreignAlbumId,
|
|
3417
|
+
ReleaseDate=releaseDate,
|
|
3418
|
+
EntryId=entryId,
|
|
3419
|
+
Searched=searched,
|
|
3420
|
+
AlbumFileId=albumFileId,
|
|
3421
|
+
IsRequest=request,
|
|
3422
|
+
QualityMet=qualityMet,
|
|
3423
|
+
Upgrade=False,
|
|
3424
|
+
MinCustomFormatScore=minCustomFormat,
|
|
3425
|
+
CustomFormatScore=customFormat,
|
|
3426
|
+
CustomFormatMet=customFormatMet,
|
|
3427
|
+
Reason=reason,
|
|
3428
|
+
).on_conflict(conflict_target=[self.model_file.EntryId], update=to_update)
|
|
3429
|
+
db_commands.execute()
|
|
3430
|
+
|
|
3431
|
+
# Store tracks for this album (Lidarr only)
|
|
3432
|
+
if self.track_file_model:
|
|
3433
|
+
try:
|
|
3434
|
+
# Fetch tracks for this album via the track API
|
|
3435
|
+
# Tracks are NOT in the media field, they're a separate endpoint
|
|
3436
|
+
tracks = self.client.get_tracks(albumId=entryId)
|
|
3437
|
+
self.logger.debug(
|
|
3438
|
+
f"Fetched {len(tracks) if isinstance(tracks, list) else 0} tracks for album {entryId}"
|
|
3439
|
+
)
|
|
3440
|
+
|
|
3441
|
+
if tracks and isinstance(tracks, list):
|
|
3442
|
+
# First, delete existing tracks for this album
|
|
3443
|
+
self.track_file_model.delete().where(
|
|
3444
|
+
self.track_file_model.AlbumId == entryId
|
|
3445
|
+
).execute()
|
|
3446
|
+
|
|
3447
|
+
# Insert new tracks
|
|
3448
|
+
track_insert_count = 0
|
|
3449
|
+
for track in tracks:
|
|
3450
|
+
# Get monitored status from track or default to album's monitored status
|
|
3451
|
+
track_monitored = track.get(
|
|
3452
|
+
"monitored", db_entry.get("monitored", False)
|
|
3453
|
+
)
|
|
3454
|
+
|
|
3455
|
+
self.track_file_model.insert(
|
|
3456
|
+
EntryId=track.get("id"),
|
|
3457
|
+
AlbumId=entryId,
|
|
3458
|
+
TrackNumber=track.get("trackNumber", ""),
|
|
3459
|
+
Title=track.get("title", ""),
|
|
3460
|
+
Duration=track.get("duration", 0),
|
|
3461
|
+
HasFile=track.get("hasFile", False),
|
|
3462
|
+
TrackFileId=track.get("trackFileId", 0),
|
|
3463
|
+
Monitored=track_monitored,
|
|
3464
|
+
).execute()
|
|
3465
|
+
track_insert_count += 1
|
|
3466
|
+
|
|
3467
|
+
if track_insert_count > 0:
|
|
3468
|
+
self.logger.info(
|
|
3469
|
+
f"Stored {track_insert_count} tracks for album {entryId} ({title})"
|
|
3470
|
+
)
|
|
3471
|
+
else:
|
|
3472
|
+
self.logger.debug(
|
|
3473
|
+
f"No tracks found for album {entryId} ({title})"
|
|
3474
|
+
)
|
|
3475
|
+
except Exception as e:
|
|
3476
|
+
self.logger.warning(
|
|
3477
|
+
f"Could not fetch tracks for album {entryId} ({title}): {e}"
|
|
3478
|
+
)
|
|
3479
|
+
else:
|
|
3480
|
+
db_commands = self.model_file.delete().where(
|
|
3481
|
+
self.model_file.EntryId == db_entry["id"]
|
|
3482
|
+
)
|
|
3483
|
+
db_commands.execute()
|
|
3484
|
+
# Also delete tracks for this album (Lidarr only)
|
|
3485
|
+
if self.track_file_model:
|
|
3486
|
+
self.track_file_model.delete().where(
|
|
3487
|
+
self.track_file_model.AlbumId == db_entry["id"]
|
|
3488
|
+
).execute()
|
|
3489
|
+
else:
|
|
3490
|
+
# Artist handling
|
|
3491
|
+
self.artists_file_model: ArtistFilesModel
|
|
3492
|
+
EntryId = db_entry["id"]
|
|
3493
|
+
artistData = self.artists_file_model.get_or_none(
|
|
3494
|
+
self.artists_file_model.EntryId == EntryId
|
|
3495
|
+
)
|
|
3496
|
+
if db_entry["monitored"] or self.search_unmonitored:
|
|
3497
|
+
while True:
|
|
3498
|
+
try:
|
|
3499
|
+
artistMetadata = self.client.get_artist(id_=EntryId) or {}
|
|
3500
|
+
quality_profile_id = None
|
|
3501
|
+
if isinstance(artistMetadata, dict):
|
|
3502
|
+
quality_profile_id = artistMetadata.get("qualityProfileId")
|
|
3503
|
+
else:
|
|
3504
|
+
quality_profile_id = getattr(
|
|
3505
|
+
artistMetadata, "qualityProfileId", None
|
|
3506
|
+
)
|
|
3507
|
+
if not artistData:
|
|
3508
|
+
if quality_profile_id:
|
|
3509
|
+
profile = (
|
|
3510
|
+
self.client.get_quality_profile(quality_profile_id)
|
|
3511
|
+
or {}
|
|
3512
|
+
)
|
|
3513
|
+
minCustomFormat = profile.get("minFormatScore") or 0
|
|
3514
|
+
else:
|
|
3515
|
+
self.logger.warning(
|
|
3516
|
+
"Artist %s (%s) missing qualityProfileId; "
|
|
3517
|
+
"defaulting custom format score to 0",
|
|
3518
|
+
db_entry.get("artistName"),
|
|
3519
|
+
EntryId,
|
|
3520
|
+
)
|
|
3521
|
+
minCustomFormat = 0
|
|
3522
|
+
else:
|
|
3523
|
+
minCustomFormat = getattr(
|
|
3524
|
+
artistData, "MinCustomFormatScore", 0
|
|
3525
|
+
)
|
|
3526
|
+
break
|
|
3527
|
+
except (
|
|
3528
|
+
requests.exceptions.ChunkedEncodingError,
|
|
3529
|
+
requests.exceptions.ContentDecodingError,
|
|
3530
|
+
requests.exceptions.ConnectionError,
|
|
3531
|
+
JSONDecodeError,
|
|
3532
|
+
):
|
|
3533
|
+
continue
|
|
3534
|
+
# Calculate if artist is fully searched based on album statistics
|
|
3535
|
+
statistics = artistMetadata.get("statistics", {})
|
|
3536
|
+
albumCount = statistics.get("albumCount", 0)
|
|
3537
|
+
statistics.get("totalAlbumCount", 0)
|
|
3538
|
+
# Check if there's any album with files (sizeOnDisk > 0)
|
|
3539
|
+
sizeOnDisk = statistics.get("sizeOnDisk", 0)
|
|
3540
|
+
# Artist is considered searched if it has albums and at least some have files
|
|
3541
|
+
searched = albumCount > 0 and sizeOnDisk > 0
|
|
3542
|
+
|
|
3543
|
+
Title = artistMetadata.get("artistName")
|
|
3544
|
+
Monitored = db_entry["monitored"]
|
|
3545
|
+
|
|
3546
|
+
to_update = {
|
|
3547
|
+
self.artists_file_model.Monitored: Monitored,
|
|
3548
|
+
self.artists_file_model.Title: Title,
|
|
3549
|
+
self.artists_file_model.Searched: searched,
|
|
3550
|
+
self.artists_file_model.Upgrade: False,
|
|
3551
|
+
self.artists_file_model.MinCustomFormatScore: minCustomFormat,
|
|
3552
|
+
}
|
|
3553
|
+
|
|
3554
|
+
self.logger.debug(
|
|
3555
|
+
"Updating database entry | %s [Searched:%s][Upgrade:%s]",
|
|
3556
|
+
Title.ljust(60, "."),
|
|
3557
|
+
str(searched).ljust(5),
|
|
3558
|
+
str(False).ljust(5),
|
|
3559
|
+
)
|
|
3560
|
+
|
|
3561
|
+
db_commands = self.artists_file_model.insert(
|
|
3562
|
+
EntryId=EntryId,
|
|
3563
|
+
Title=Title,
|
|
3564
|
+
Searched=searched,
|
|
3565
|
+
Monitored=Monitored,
|
|
3566
|
+
Upgrade=False,
|
|
3567
|
+
MinCustomFormatScore=minCustomFormat,
|
|
3568
|
+
).on_conflict(
|
|
3569
|
+
conflict_target=[self.artists_file_model.EntryId], update=to_update
|
|
3570
|
+
)
|
|
3571
|
+
db_commands.execute()
|
|
3572
|
+
|
|
3573
|
+
# Note: Albums are now handled separately in db_update()
|
|
3574
|
+
# No need to recursively process albums here to avoid duplication
|
|
3575
|
+
else:
|
|
3576
|
+
db_commands = self.artists_file_model.delete().where(
|
|
3577
|
+
self.artists_file_model.EntryId == EntryId
|
|
3578
|
+
)
|
|
3579
|
+
db_commands.execute()
|
|
3580
|
+
|
|
3581
|
+
except requests.exceptions.ConnectionError as e:
|
|
3582
|
+
self.logger.debug(
|
|
3583
|
+
"Max retries exceeded for %s [%s][%s]",
|
|
3584
|
+
self._name,
|
|
3585
|
+
db_entry["id"],
|
|
3586
|
+
db_entry["title"],
|
|
3587
|
+
exc_info=e,
|
|
3588
|
+
)
|
|
3589
|
+
raise DelayLoopException(length=300, type=self._name)
|
|
3590
|
+
except JSONDecodeError:
|
|
3591
|
+
if self.type == "sonarr":
|
|
3592
|
+
if self.series_search:
|
|
3593
|
+
self.logger.warning(
|
|
3594
|
+
"Error getting series info: [%s][%s]", db_entry["id"], db_entry["title"]
|
|
3595
|
+
)
|
|
3596
|
+
else:
|
|
3597
|
+
self.logger.warning(
|
|
3598
|
+
"Error getting episode info: [%s][%s]", db_entry["id"], db_entry["title"]
|
|
3599
|
+
)
|
|
3600
|
+
elif self.type == "radarr":
|
|
3601
|
+
self.logger.warning(
|
|
3602
|
+
"Error getting movie info: [%s][%s]", db_entry["id"], db_entry["path"]
|
|
3603
|
+
)
|
|
3604
|
+
except Exception as e:
|
|
3605
|
+
self.logger.error(e, exc_info=sys.exc_info())
|
|
3606
|
+
|
|
3607
|
+
def delete_from_queue(self, id_, remove_from_client=True, blacklist=True):
|
|
3608
|
+
try:
|
|
3609
|
+
while True:
|
|
3610
|
+
try:
|
|
3611
|
+
res = self.client.del_queue(id_, remove_from_client, blacklist)
|
|
3612
|
+
# res = self.client._delete(
|
|
3613
|
+
# f"queue/{id_}?removeFromClient={remove_from_client}&blocklist={blacklist}",
|
|
3614
|
+
# self.client.ver_uri,
|
|
3615
|
+
# )
|
|
3616
|
+
break
|
|
3617
|
+
except (
|
|
3618
|
+
requests.exceptions.ChunkedEncodingError,
|
|
3619
|
+
requests.exceptions.ContentDecodingError,
|
|
2937
3620
|
requests.exceptions.ConnectionError,
|
|
2938
3621
|
JSONDecodeError,
|
|
2939
3622
|
):
|
|
@@ -3180,7 +3863,7 @@ class Arr:
|
|
|
3180
3863
|
self.model_file.update(Searched=True, Upgrade=True).where(
|
|
3181
3864
|
file_model.EntryId == file_model.EntryId
|
|
3182
3865
|
).execute()
|
|
3183
|
-
reason_text = getattr(file_model, "Reason", None) or
|
|
3866
|
+
reason_text = getattr(file_model, "Reason", None) or None
|
|
3184
3867
|
if reason_text:
|
|
3185
3868
|
self.logger.hnotice(
|
|
3186
3869
|
"%sSearching for: %s | S%02dE%03d | %s | [id=%s|AirDateUTC=%s][%s]",
|
|
@@ -3353,6 +4036,86 @@ class Arr:
|
|
|
3353
4036
|
detail=str(reason_text) if reason_text else None,
|
|
3354
4037
|
)
|
|
3355
4038
|
return True
|
|
4039
|
+
elif self.type == "lidarr":
|
|
4040
|
+
file_model: AlbumFilesModel
|
|
4041
|
+
if not (request or todays):
|
|
4042
|
+
(
|
|
4043
|
+
self.model_queue.select(self.model_queue.Completed)
|
|
4044
|
+
.where(self.model_queue.EntryId == file_model.EntryId)
|
|
4045
|
+
.execute()
|
|
4046
|
+
)
|
|
4047
|
+
else:
|
|
4048
|
+
pass
|
|
4049
|
+
if file_model.EntryId in self.queue_file_ids:
|
|
4050
|
+
self.logger.debug(
|
|
4051
|
+
"%sSkipping: Already Searched: %s - %s (%s)",
|
|
4052
|
+
request_tag,
|
|
4053
|
+
file_model.ArtistTitle,
|
|
4054
|
+
file_model.Title,
|
|
4055
|
+
file_model.EntryId,
|
|
4056
|
+
)
|
|
4057
|
+
self.model_file.update(Searched=True, Upgrade=True).where(
|
|
4058
|
+
file_model.EntryId == file_model.EntryId
|
|
4059
|
+
).execute()
|
|
4060
|
+
return True
|
|
4061
|
+
active_commands = self.arr_db_query_commands_count()
|
|
4062
|
+
self.logger.info("%s active search commands, %s remaining", active_commands, commands)
|
|
4063
|
+
if not bypass_limit and active_commands >= self.search_command_limit:
|
|
4064
|
+
self.logger.trace(
|
|
4065
|
+
"Idle: Too many commands in queue: %s - %s | [id=%s]",
|
|
4066
|
+
file_model.ArtistTitle,
|
|
4067
|
+
file_model.Title,
|
|
4068
|
+
file_model.EntryId,
|
|
4069
|
+
)
|
|
4070
|
+
return False
|
|
4071
|
+
self.persistent_queue.insert(EntryId=file_model.EntryId).on_conflict_ignore().execute()
|
|
4072
|
+
|
|
4073
|
+
self.model_queue.insert(
|
|
4074
|
+
Completed=False, EntryId=file_model.EntryId
|
|
4075
|
+
).on_conflict_replace().execute()
|
|
4076
|
+
if file_model.EntryId:
|
|
4077
|
+
while True:
|
|
4078
|
+
try:
|
|
4079
|
+
self.client.post_command("AlbumSearch", albumIds=[file_model.EntryId])
|
|
4080
|
+
break
|
|
4081
|
+
except (
|
|
4082
|
+
requests.exceptions.ChunkedEncodingError,
|
|
4083
|
+
requests.exceptions.ContentDecodingError,
|
|
4084
|
+
requests.exceptions.ConnectionError,
|
|
4085
|
+
JSONDecodeError,
|
|
4086
|
+
):
|
|
4087
|
+
continue
|
|
4088
|
+
self.model_file.update(Searched=True, Upgrade=True).where(
|
|
4089
|
+
file_model.EntryId == file_model.EntryId
|
|
4090
|
+
).execute()
|
|
4091
|
+
reason_text = getattr(file_model, "Reason", None)
|
|
4092
|
+
if reason_text:
|
|
4093
|
+
self.logger.hnotice(
|
|
4094
|
+
"%sSearching for: %s - %s [foreignAlbumId=%s|id=%s][%s]",
|
|
4095
|
+
request_tag,
|
|
4096
|
+
file_model.ArtistTitle,
|
|
4097
|
+
file_model.Title,
|
|
4098
|
+
file_model.ForeignAlbumId,
|
|
4099
|
+
file_model.EntryId,
|
|
4100
|
+
reason_text,
|
|
4101
|
+
)
|
|
4102
|
+
else:
|
|
4103
|
+
self.logger.hnotice(
|
|
4104
|
+
"%sSearching for: %s - %s [foreignAlbumId=%s|id=%s]",
|
|
4105
|
+
request_tag,
|
|
4106
|
+
file_model.ArtistTitle,
|
|
4107
|
+
file_model.Title,
|
|
4108
|
+
file_model.ForeignAlbumId,
|
|
4109
|
+
file_model.EntryId,
|
|
4110
|
+
)
|
|
4111
|
+
context_label = self._humanize_request_tag(request_tag)
|
|
4112
|
+
description = f"{file_model.ArtistTitle} - {file_model.Title}"
|
|
4113
|
+
self._record_search_activity(
|
|
4114
|
+
description,
|
|
4115
|
+
context=context_label,
|
|
4116
|
+
detail=str(reason_text) if reason_text else None,
|
|
4117
|
+
)
|
|
4118
|
+
return True
|
|
3356
4119
|
|
|
3357
4120
|
def process(self):
|
|
3358
4121
|
self._process_resume()
|
|
@@ -4590,6 +5353,9 @@ class Arr:
|
|
|
4590
5353
|
elif self.type == "radarr":
|
|
4591
5354
|
entry_id_field = "movieId"
|
|
4592
5355
|
file_id_field = "MovieFileId"
|
|
5356
|
+
elif self.type == "lidarr":
|
|
5357
|
+
entry_id_field = "albumId"
|
|
5358
|
+
file_id_field = "AlbumFileId"
|
|
4593
5359
|
else:
|
|
4594
5360
|
return False # Unknown type
|
|
4595
5361
|
|
|
@@ -4692,6 +5458,17 @@ class Arr:
|
|
|
4692
5458
|
self.model_queue.delete().where(
|
|
4693
5459
|
self.model_queue.EntryId.not_in(list(self.queue_file_ids))
|
|
4694
5460
|
).execute()
|
|
5461
|
+
elif self.type == "lidarr":
|
|
5462
|
+
self.requeue_cache = {
|
|
5463
|
+
entry["id"]: entry["albumId"] for entry in self.queue if entry.get("albumId")
|
|
5464
|
+
}
|
|
5465
|
+
self.queue_file_ids = {
|
|
5466
|
+
entry["albumId"] for entry in self.queue if entry.get("albumId")
|
|
5467
|
+
}
|
|
5468
|
+
if self.model_queue:
|
|
5469
|
+
self.model_queue.delete().where(
|
|
5470
|
+
self.model_queue.EntryId.not_in(list(self.queue_file_ids))
|
|
5471
|
+
).execute()
|
|
4695
5472
|
|
|
4696
5473
|
self._update_bad_queue_items()
|
|
4697
5474
|
|
|
@@ -4795,46 +5572,166 @@ class Arr:
|
|
|
4795
5572
|
if self.search_setup_completed:
|
|
4796
5573
|
return
|
|
4797
5574
|
|
|
4798
|
-
|
|
4799
|
-
|
|
4800
|
-
|
|
4801
|
-
|
|
4802
|
-
|
|
4803
|
-
|
|
4804
|
-
|
|
4805
|
-
|
|
4806
|
-
|
|
5575
|
+
db1, db2, db3, db4, db5 = self._get_models()
|
|
5576
|
+
|
|
5577
|
+
if not (
|
|
5578
|
+
self.search_missing
|
|
5579
|
+
or self.do_upgrade_search
|
|
5580
|
+
or self.quality_unmet_search
|
|
5581
|
+
or self.custom_format_unmet_search
|
|
5582
|
+
or self.ombi_search_requests
|
|
5583
|
+
or self.overseerr_requests
|
|
5584
|
+
):
|
|
5585
|
+
if db5 and getattr(self, "torrents", None) is None:
|
|
5586
|
+
self.torrent_db = SqliteDatabase(None)
|
|
5587
|
+
self.torrent_db.init(
|
|
5588
|
+
str(self._app_data_folder.joinpath("Torrents.db")),
|
|
5589
|
+
pragmas={
|
|
5590
|
+
"journal_mode": "wal",
|
|
5591
|
+
"cache_size": -64_000,
|
|
5592
|
+
"foreign_keys": 1,
|
|
5593
|
+
"ignore_check_constraints": 0,
|
|
5594
|
+
"synchronous": 0,
|
|
5595
|
+
},
|
|
5596
|
+
timeout=15,
|
|
5597
|
+
)
|
|
5598
|
+
|
|
5599
|
+
class Torrents(db5):
|
|
5600
|
+
class Meta:
|
|
5601
|
+
database = self.torrent_db
|
|
5602
|
+
|
|
5603
|
+
self.torrent_db.connect()
|
|
5604
|
+
self.torrent_db.create_tables([Torrents])
|
|
5605
|
+
self.torrents = Torrents
|
|
5606
|
+
self.search_setup_completed = True
|
|
5607
|
+
return
|
|
5608
|
+
|
|
5609
|
+
self.search_db_file.parent.mkdir(parents=True, exist_ok=True)
|
|
5610
|
+
self.db = SqliteDatabase(None)
|
|
5611
|
+
self.db.init(
|
|
5612
|
+
str(self.search_db_file),
|
|
5613
|
+
pragmas={
|
|
5614
|
+
"journal_mode": "wal",
|
|
5615
|
+
"cache_size": -64_000,
|
|
5616
|
+
"foreign_keys": 1,
|
|
5617
|
+
"ignore_check_constraints": 0,
|
|
5618
|
+
"synchronous": 0,
|
|
5619
|
+
},
|
|
5620
|
+
timeout=15,
|
|
4807
5621
|
)
|
|
4808
|
-
include_series = self.type == "sonarr" and self.series_search
|
|
4809
|
-
include_torrents = TAGLESS
|
|
4810
5622
|
|
|
4811
|
-
|
|
5623
|
+
class Files(db1):
|
|
5624
|
+
class Meta:
|
|
5625
|
+
database = self.db
|
|
4812
5626
|
|
|
4813
|
-
|
|
4814
|
-
|
|
4815
|
-
self.
|
|
4816
|
-
|
|
4817
|
-
|
|
4818
|
-
|
|
4819
|
-
|
|
4820
|
-
|
|
4821
|
-
|
|
4822
|
-
|
|
4823
|
-
|
|
4824
|
-
|
|
5627
|
+
class Queue(db2):
|
|
5628
|
+
class Meta:
|
|
5629
|
+
database = self.db
|
|
5630
|
+
|
|
5631
|
+
class PersistingQueue(FilesQueued):
|
|
5632
|
+
class Meta:
|
|
5633
|
+
database = self.db
|
|
5634
|
+
|
|
5635
|
+
self.db.connect()
|
|
5636
|
+
|
|
5637
|
+
if db4:
|
|
5638
|
+
|
|
5639
|
+
class Tracks(db4):
|
|
5640
|
+
class Meta:
|
|
5641
|
+
database = self.db
|
|
5642
|
+
|
|
5643
|
+
self.track_file_model = Tracks
|
|
4825
5644
|
else:
|
|
4826
|
-
self.
|
|
4827
|
-
|
|
4828
|
-
|
|
5645
|
+
self.track_file_model = None
|
|
5646
|
+
|
|
5647
|
+
if db3 and self.type == "sonarr":
|
|
5648
|
+
|
|
5649
|
+
class Series(db3):
|
|
5650
|
+
class Meta:
|
|
5651
|
+
database = self.db
|
|
5652
|
+
|
|
5653
|
+
self.db.create_tables([Files, Queue, PersistingQueue, Series])
|
|
5654
|
+
self.series_file_model = Series
|
|
5655
|
+
self.artists_file_model = None
|
|
5656
|
+
elif db3 and self.type == "lidarr":
|
|
5657
|
+
|
|
5658
|
+
class Artists(db3):
|
|
5659
|
+
class Meta:
|
|
5660
|
+
database = self.db
|
|
5661
|
+
|
|
5662
|
+
self.db.create_tables([Files, Queue, PersistingQueue, Artists, Tracks])
|
|
5663
|
+
self.artists_file_model = Artists
|
|
5664
|
+
self.series_file_model = None # Lidarr uses artists, not series
|
|
5665
|
+
else:
|
|
5666
|
+
# Radarr or any type without db3/db4 (series/artists/tracks models)
|
|
5667
|
+
self.db.create_tables([Files, Queue, PersistingQueue])
|
|
5668
|
+
self.artists_file_model = None
|
|
4829
5669
|
self.series_file_model = None
|
|
4830
|
-
if include_torrents:
|
|
4831
|
-
ensure_table_schema(TorrentLibrary)
|
|
4832
|
-
self.torrents = TorrentLibrary
|
|
4833
|
-
else:
|
|
4834
|
-
self.torrents = None
|
|
4835
5670
|
|
|
5671
|
+
if db5:
|
|
5672
|
+
self.torrent_db = SqliteDatabase(None)
|
|
5673
|
+
self.torrent_db.init(
|
|
5674
|
+
str(self._app_data_folder.joinpath("Torrents.db")),
|
|
5675
|
+
pragmas={
|
|
5676
|
+
"journal_mode": "wal",
|
|
5677
|
+
"cache_size": -64_000,
|
|
5678
|
+
"foreign_keys": 1,
|
|
5679
|
+
"ignore_check_constraints": 0,
|
|
5680
|
+
"synchronous": 0,
|
|
5681
|
+
},
|
|
5682
|
+
timeout=15,
|
|
5683
|
+
)
|
|
5684
|
+
|
|
5685
|
+
class Torrents(db5):
|
|
5686
|
+
class Meta:
|
|
5687
|
+
database = self.torrent_db
|
|
5688
|
+
|
|
5689
|
+
self.torrent_db.connect()
|
|
5690
|
+
self.torrent_db.create_tables([Torrents])
|
|
5691
|
+
self.torrents = Torrents
|
|
5692
|
+
else:
|
|
5693
|
+
self.torrents = None
|
|
5694
|
+
|
|
5695
|
+
self.model_file = Files
|
|
5696
|
+
self.model_queue = Queue
|
|
5697
|
+
self.persistent_queue = PersistingQueue
|
|
4836
5698
|
self.search_setup_completed = True
|
|
4837
5699
|
|
|
5700
|
+
def _get_models(
|
|
5701
|
+
self,
|
|
5702
|
+
) -> tuple[
|
|
5703
|
+
type[EpisodeFilesModel] | type[MoviesFilesModel] | type[AlbumFilesModel],
|
|
5704
|
+
type[EpisodeQueueModel] | type[MovieQueueModel] | type[AlbumQueueModel],
|
|
5705
|
+
type[SeriesFilesModel] | type[ArtistFilesModel] | None,
|
|
5706
|
+
type[TrackFilesModel] | None,
|
|
5707
|
+
type[TorrentLibrary] | None,
|
|
5708
|
+
]:
|
|
5709
|
+
if self.type == "sonarr":
|
|
5710
|
+
return (
|
|
5711
|
+
EpisodeFilesModel,
|
|
5712
|
+
EpisodeQueueModel,
|
|
5713
|
+
SeriesFilesModel,
|
|
5714
|
+
None,
|
|
5715
|
+
TorrentLibrary if TAGLESS else None,
|
|
5716
|
+
)
|
|
5717
|
+
if self.type == "radarr":
|
|
5718
|
+
return (
|
|
5719
|
+
MoviesFilesModel,
|
|
5720
|
+
MovieQueueModel,
|
|
5721
|
+
None,
|
|
5722
|
+
None,
|
|
5723
|
+
TorrentLibrary if TAGLESS else None,
|
|
5724
|
+
)
|
|
5725
|
+
if self.type == "lidarr":
|
|
5726
|
+
return (
|
|
5727
|
+
AlbumFilesModel,
|
|
5728
|
+
AlbumQueueModel,
|
|
5729
|
+
ArtistFilesModel,
|
|
5730
|
+
TrackFilesModel,
|
|
5731
|
+
TorrentLibrary if TAGLESS else None,
|
|
5732
|
+
)
|
|
5733
|
+
raise UnhandledError(f"Well you shouldn't have reached here, Arr.type={self.type}")
|
|
5734
|
+
|
|
4838
5735
|
def run_request_search(self):
|
|
4839
5736
|
if (
|
|
4840
5737
|
(
|
|
@@ -5471,6 +6368,7 @@ class FreeSpaceManager(Arr):
|
|
|
5471
6368
|
)
|
|
5472
6369
|
self.timed_ignore_cache = ExpiringSet(max_age_seconds=self.ignore_torrents_younger_than)
|
|
5473
6370
|
self.needs_cleanup = False
|
|
6371
|
+
self._app_data_folder = APPDATA_FOLDER
|
|
5474
6372
|
# Track search setup state to cooperate with Arr.register_search_mode
|
|
5475
6373
|
self.search_setup_completed = False
|
|
5476
6374
|
if FREE_SPACE_FOLDER == "CHANGE_ME":
|
|
@@ -5487,7 +6385,11 @@ class FreeSpaceManager(Arr):
|
|
|
5487
6385
|
self.current_free_space = (
|
|
5488
6386
|
shutil.disk_usage(self.completed_folder).free - self._min_free_space_bytes
|
|
5489
6387
|
)
|
|
5490
|
-
self.logger.trace(
|
|
6388
|
+
self.logger.trace(
|
|
6389
|
+
"Free space monitor initialized | Available: %s | Threshold: %s",
|
|
6390
|
+
format_bytes(self.current_free_space + self._min_free_space_bytes),
|
|
6391
|
+
format_bytes(self._min_free_space_bytes),
|
|
6392
|
+
)
|
|
5491
6393
|
self.manager.qbit_manager.client.torrents_create_tags(["qBitrr-free_space_paused"])
|
|
5492
6394
|
self.search_missing = False
|
|
5493
6395
|
self.do_upgrade_search = False
|
|
@@ -5498,6 +6400,7 @@ class FreeSpaceManager(Arr):
|
|
|
5498
6400
|
self.session = None
|
|
5499
6401
|
# Ensure torrent tag-emulation tables exist when needed.
|
|
5500
6402
|
self.torrents = None
|
|
6403
|
+
self.torrent_db: SqliteDatabase | None = None
|
|
5501
6404
|
self.last_search_description: str | None = None
|
|
5502
6405
|
self.last_search_timestamp: str | None = None
|
|
5503
6406
|
self.queue_active_count: int = 0
|
|
@@ -5505,22 +6408,38 @@ class FreeSpaceManager(Arr):
|
|
|
5505
6408
|
self.free_space_tagged_count: int = 0
|
|
5506
6409
|
self.register_search_mode()
|
|
5507
6410
|
self.logger.hnotice("Starting %s monitor", self._name)
|
|
6411
|
+
atexit.register(
|
|
6412
|
+
lambda: (
|
|
6413
|
+
hasattr(self, "torrent_db")
|
|
6414
|
+
and self.torrent_db
|
|
6415
|
+
and not self.torrent_db.is_closed()
|
|
6416
|
+
and self.torrent_db.close()
|
|
6417
|
+
)
|
|
6418
|
+
)
|
|
6419
|
+
|
|
6420
|
+
def _get_models(
|
|
6421
|
+
self,
|
|
6422
|
+
) -> tuple[
|
|
6423
|
+
None,
|
|
6424
|
+
None,
|
|
6425
|
+
None,
|
|
6426
|
+
None,
|
|
6427
|
+
type[TorrentLibrary] | None,
|
|
6428
|
+
]:
|
|
6429
|
+
return None, None, None, None, (TorrentLibrary if TAGLESS else None)
|
|
5508
6430
|
|
|
5509
6431
|
def _process_single_torrent_pause_disk_space(self, torrent: qbittorrentapi.TorrentDictionary):
|
|
5510
6432
|
self.logger.info(
|
|
5511
|
-
"Pausing torrent
|
|
5512
|
-
"
|
|
5513
|
-
"
|
|
5514
|
-
|
|
5515
|
-
"| [%s] | %s (%s)",
|
|
6433
|
+
"Pausing torrent due to insufficient disk space | "
|
|
6434
|
+
"Name: %s | Progress: %s%% | Size remaining: %s | "
|
|
6435
|
+
"Availability: %s%% | ETA: %s | State: %s | Hash: %s",
|
|
6436
|
+
torrent.name,
|
|
5516
6437
|
round(torrent.progress * 100, 2),
|
|
5517
|
-
|
|
6438
|
+
format_bytes(torrent.amount_left),
|
|
5518
6439
|
round(torrent.availability * 100, 2),
|
|
5519
6440
|
timedelta(seconds=torrent.eta),
|
|
5520
|
-
datetime.fromtimestamp(torrent.last_activity),
|
|
5521
6441
|
torrent.state_enum,
|
|
5522
|
-
torrent.
|
|
5523
|
-
torrent.hash,
|
|
6442
|
+
torrent.hash[:8], # Shortened hash for readability
|
|
5524
6443
|
)
|
|
5525
6444
|
self.pause.add(torrent.hash)
|
|
5526
6445
|
|
|
@@ -5529,45 +6448,48 @@ class FreeSpaceManager(Arr):
|
|
|
5529
6448
|
free_space_test = self.current_free_space
|
|
5530
6449
|
free_space_test -= torrent["amount_left"]
|
|
5531
6450
|
self.logger.trace(
|
|
5532
|
-
"
|
|
6451
|
+
"Evaluating torrent: %s | Current space: %s | Space after download: %s | Remaining: %s",
|
|
5533
6452
|
torrent.name,
|
|
5534
|
-
self.current_free_space,
|
|
5535
|
-
free_space_test,
|
|
6453
|
+
format_bytes(self.current_free_space + self._min_free_space_bytes),
|
|
6454
|
+
format_bytes(free_space_test + self._min_free_space_bytes),
|
|
6455
|
+
format_bytes(torrent.amount_left),
|
|
5536
6456
|
)
|
|
5537
6457
|
if torrent.state_enum != TorrentStates.PAUSED_DOWNLOAD and free_space_test < 0:
|
|
5538
6458
|
self.logger.info(
|
|
5539
|
-
"
|
|
6459
|
+
"Pausing download (insufficient space) | Torrent: %s | Available: %s | Needed: %s | Deficit: %s",
|
|
5540
6460
|
torrent.name,
|
|
5541
|
-
self.current_free_space,
|
|
5542
|
-
|
|
6461
|
+
format_bytes(self.current_free_space + self._min_free_space_bytes),
|
|
6462
|
+
format_bytes(torrent.amount_left),
|
|
6463
|
+
format_bytes(-free_space_test),
|
|
5543
6464
|
)
|
|
5544
6465
|
self.add_tags(torrent, ["qBitrr-free_space_paused"])
|
|
5545
6466
|
self.remove_tags(torrent, ["qBitrr-allowed_seeding"])
|
|
5546
6467
|
self._process_single_torrent_pause_disk_space(torrent)
|
|
5547
6468
|
elif torrent.state_enum == TorrentStates.PAUSED_DOWNLOAD and free_space_test < 0:
|
|
5548
6469
|
self.logger.info(
|
|
5549
|
-
"
|
|
6470
|
+
"Keeping paused (insufficient space) | Torrent: %s | Available: %s | Needed: %s | Deficit: %s",
|
|
5550
6471
|
torrent.name,
|
|
5551
|
-
self.current_free_space,
|
|
5552
|
-
|
|
6472
|
+
format_bytes(self.current_free_space + self._min_free_space_bytes),
|
|
6473
|
+
format_bytes(torrent.amount_left),
|
|
6474
|
+
format_bytes(-free_space_test),
|
|
5553
6475
|
)
|
|
5554
6476
|
self.add_tags(torrent, ["qBitrr-free_space_paused"])
|
|
5555
6477
|
self.remove_tags(torrent, ["qBitrr-allowed_seeding"])
|
|
5556
6478
|
elif torrent.state_enum != TorrentStates.PAUSED_DOWNLOAD and free_space_test > 0:
|
|
5557
6479
|
self.logger.info(
|
|
5558
|
-
"
|
|
6480
|
+
"Continuing download (sufficient space) | Torrent: %s | Available: %s | Space after: %s",
|
|
5559
6481
|
torrent.name,
|
|
5560
|
-
self.current_free_space,
|
|
5561
|
-
free_space_test,
|
|
6482
|
+
format_bytes(self.current_free_space + self._min_free_space_bytes),
|
|
6483
|
+
format_bytes(free_space_test + self._min_free_space_bytes),
|
|
5562
6484
|
)
|
|
5563
6485
|
self.current_free_space = free_space_test
|
|
5564
6486
|
self.remove_tags(torrent, ["qBitrr-free_space_paused"])
|
|
5565
6487
|
elif torrent.state_enum == TorrentStates.PAUSED_DOWNLOAD and free_space_test > 0:
|
|
5566
6488
|
self.logger.info(
|
|
5567
|
-
"
|
|
6489
|
+
"Resuming download (space available) | Torrent: %s | Available: %s | Space after: %s",
|
|
5568
6490
|
torrent.name,
|
|
5569
|
-
self.current_free_space,
|
|
5570
|
-
free_space_test,
|
|
6491
|
+
format_bytes(self.current_free_space + self._min_free_space_bytes),
|
|
6492
|
+
format_bytes(free_space_test + self._min_free_space_bytes),
|
|
5571
6493
|
)
|
|
5572
6494
|
self.current_free_space = free_space_test
|
|
5573
6495
|
self.remove_tags(torrent, ["qBitrr-free_space_paused"])
|
|
@@ -5575,10 +6497,9 @@ class FreeSpaceManager(Arr):
|
|
|
5575
6497
|
torrent, "qBitrr-free_space_paused"
|
|
5576
6498
|
):
|
|
5577
6499
|
self.logger.info(
|
|
5578
|
-
"
|
|
5579
|
-
"qBitrr-free_space_paused",
|
|
6500
|
+
"Torrent completed, removing free space tag | Torrent: %s | Available: %s",
|
|
5580
6501
|
torrent.name,
|
|
5581
|
-
self.current_free_space,
|
|
6502
|
+
format_bytes(self.current_free_space + self._min_free_space_bytes),
|
|
5582
6503
|
)
|
|
5583
6504
|
self.remove_tags(torrent, ["qBitrr-free_space_paused"])
|
|
5584
6505
|
|
|
@@ -5622,7 +6543,14 @@ class FreeSpaceManager(Arr):
|
|
|
5622
6543
|
self.current_free_space = (
|
|
5623
6544
|
shutil.disk_usage(self.completed_folder).free - self._min_free_space_bytes
|
|
5624
6545
|
)
|
|
5625
|
-
self.logger.trace(
|
|
6546
|
+
self.logger.trace(
|
|
6547
|
+
"Processing torrents | Available: %s | Threshold: %s | Usable: %s | Torrents: %d | Paused for space: %d",
|
|
6548
|
+
format_bytes(self.current_free_space + self._min_free_space_bytes),
|
|
6549
|
+
format_bytes(self._min_free_space_bytes),
|
|
6550
|
+
format_bytes(self.current_free_space),
|
|
6551
|
+
self.category_torrent_count,
|
|
6552
|
+
self.free_space_tagged_count,
|
|
6553
|
+
)
|
|
5626
6554
|
sorted_torrents = sorted(torrents, key=lambda t: t["priority"])
|
|
5627
6555
|
for torrent in sorted_torrents:
|
|
5628
6556
|
with contextlib.suppress(qbittorrentapi.NotFound404Error):
|
|
@@ -5678,7 +6606,7 @@ class ArrManager:
|
|
|
5678
6606
|
|
|
5679
6607
|
def build_arr_instances(self):
|
|
5680
6608
|
for key in CONFIG.sections():
|
|
5681
|
-
if search := re.match("(rad|son|anim)arr.*", key, re.IGNORECASE):
|
|
6609
|
+
if search := re.match("(rad|son|anim|lid)arr.*", key, re.IGNORECASE):
|
|
5682
6610
|
name = search.group(0)
|
|
5683
6611
|
match = search.group(1)
|
|
5684
6612
|
if match.lower() == "son":
|
|
@@ -5687,6 +6615,8 @@ class ArrManager:
|
|
|
5687
6615
|
call_cls = SonarrAPI
|
|
5688
6616
|
elif match.lower() == "rad":
|
|
5689
6617
|
call_cls = RadarrAPI
|
|
6618
|
+
elif match.lower() == "lid":
|
|
6619
|
+
call_cls = LidarrAPI
|
|
5690
6620
|
else:
|
|
5691
6621
|
call_cls = None
|
|
5692
6622
|
try:
|