qBitrr2 5.4.4__py3-none-any.whl → 5.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qBitrr/arss.py +457 -127
- qBitrr/bundled_data.py +2 -2
- qBitrr/config_version.py +144 -0
- qBitrr/db_lock.py +189 -0
- qBitrr/db_recovery.py +202 -0
- qBitrr/gen_config.py +285 -3
- qBitrr/main.py +171 -5
- qBitrr/search_activity_store.py +6 -2
- qBitrr/static/assets/ArrView.js +1 -1
- qBitrr/static/assets/ArrView.js.map +1 -1
- qBitrr/static/assets/ConfigView.js +4 -3
- qBitrr/static/assets/ConfigView.js.map +1 -1
- qBitrr/static/assets/LogsView.js +17 -39
- qBitrr/static/assets/LogsView.js.map +1 -1
- qBitrr/static/assets/ProcessesView.js +1 -1
- qBitrr/static/assets/ProcessesView.js.map +1 -1
- qBitrr/static/assets/app.css +1 -1
- qBitrr/static/assets/app.js +1 -9
- qBitrr/static/assets/app.js.map +1 -1
- qBitrr/static/assets/react-select.esm.js +1 -8
- qBitrr/static/assets/react-select.esm.js.map +1 -1
- qBitrr/static/assets/table.js +2 -20
- qBitrr/static/assets/table.js.map +1 -1
- qBitrr/static/assets/vendor.js +1 -25
- qBitrr/static/assets/vendor.js.map +1 -1
- qBitrr/static/sw.js +5 -0
- qBitrr/tables.py +27 -0
- qBitrr/webui.py +523 -23
- {qbitrr2-5.4.4.dist-info → qbitrr2-5.5.0.dist-info}/METADATA +88 -13
- qbitrr2-5.5.0.dist-info/RECORD +63 -0
- qbitrr2-5.4.4.dist-info/RECORD +0 -61
- {qbitrr2-5.4.4.dist-info → qbitrr2-5.5.0.dist-info}/WHEEL +0 -0
- {qbitrr2-5.4.4.dist-info → qbitrr2-5.5.0.dist-info}/entry_points.txt +0 -0
- {qbitrr2-5.4.4.dist-info → qbitrr2-5.5.0.dist-info}/licenses/LICENSE +0 -0
- {qbitrr2-5.4.4.dist-info → qbitrr2-5.5.0.dist-info}/top_level.txt +0 -0
qBitrr/arss.py
CHANGED
|
@@ -44,6 +44,7 @@ from qBitrr.config import (
|
|
|
44
44
|
SEARCH_ONLY,
|
|
45
45
|
TAGLESS,
|
|
46
46
|
)
|
|
47
|
+
from qBitrr.db_lock import with_database_retry
|
|
47
48
|
from qBitrr.errors import (
|
|
48
49
|
DelayLoopException,
|
|
49
50
|
NoConnectionrException,
|
|
@@ -147,25 +148,35 @@ class Arr:
|
|
|
147
148
|
run_logs(self.logger, self._name)
|
|
148
149
|
|
|
149
150
|
if not QBIT_DISABLED:
|
|
150
|
-
categories = self.manager.qbit_manager.client.torrent_categories.categories
|
|
151
151
|
try:
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
152
|
+
categories = self.manager.qbit_manager.client.torrent_categories.categories
|
|
153
|
+
try:
|
|
154
|
+
categ = categories[self.category]
|
|
155
|
+
path = categ["savePath"]
|
|
156
|
+
if path:
|
|
157
|
+
self.logger.trace("Category exists with save path [%s]", path)
|
|
158
|
+
self.completed_folder = pathlib.Path(path)
|
|
159
|
+
else:
|
|
160
|
+
self.logger.trace("Category exists without save path")
|
|
161
|
+
self.completed_folder = pathlib.Path(COMPLETED_DOWNLOAD_FOLDER).joinpath(
|
|
162
|
+
self.category
|
|
163
|
+
)
|
|
164
|
+
except KeyError:
|
|
159
165
|
self.completed_folder = pathlib.Path(COMPLETED_DOWNLOAD_FOLDER).joinpath(
|
|
160
166
|
self.category
|
|
161
167
|
)
|
|
162
|
-
|
|
168
|
+
self.manager.qbit_manager.client.torrent_categories.create_category(
|
|
169
|
+
self.category, save_path=self.completed_folder
|
|
170
|
+
)
|
|
171
|
+
except Exception as e:
|
|
172
|
+
self.logger.warning(
|
|
173
|
+
"Could not connect to qBittorrent during initialization for %s: %s. Will retry when process starts.",
|
|
174
|
+
self._name,
|
|
175
|
+
str(e).split("\n")[0] if "\n" in str(e) else str(e), # First line only
|
|
176
|
+
)
|
|
163
177
|
self.completed_folder = pathlib.Path(COMPLETED_DOWNLOAD_FOLDER).joinpath(
|
|
164
178
|
self.category
|
|
165
179
|
)
|
|
166
|
-
self.manager.qbit_manager.client.torrent_categories.create_category(
|
|
167
|
-
self.category, save_path=self.completed_folder
|
|
168
|
-
)
|
|
169
180
|
else:
|
|
170
181
|
self.completed_folder = pathlib.Path(COMPLETED_DOWNLOAD_FOLDER).joinpath(self.category)
|
|
171
182
|
|
|
@@ -419,26 +430,45 @@ class Arr:
|
|
|
419
430
|
except Exception:
|
|
420
431
|
self.logger.debug("Failed to get version")
|
|
421
432
|
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
if not isinstance(self.main_quality_profiles, list):
|
|
426
|
-
self.main_quality_profiles = [self.main_quality_profiles]
|
|
427
|
-
self.temp_quality_profiles = CONFIG.get(
|
|
428
|
-
f"{self._name}.EntrySearch.TempQualityProfile", fallback=None
|
|
433
|
+
# Try new QualityProfileMappings format first (dict), then fall back to old format (lists)
|
|
434
|
+
self.quality_profile_mappings = CONFIG.get(
|
|
435
|
+
f"{self._name}.EntrySearch.QualityProfileMappings", fallback={}
|
|
429
436
|
)
|
|
430
|
-
|
|
431
|
-
|
|
437
|
+
|
|
438
|
+
if not self.quality_profile_mappings:
|
|
439
|
+
# Old format: separate lists - convert to dict
|
|
440
|
+
main_profiles = CONFIG.get(
|
|
441
|
+
f"{self._name}.EntrySearch.MainQualityProfile", fallback=None
|
|
442
|
+
)
|
|
443
|
+
if not isinstance(main_profiles, list):
|
|
444
|
+
main_profiles = [main_profiles] if main_profiles else []
|
|
445
|
+
temp_profiles = CONFIG.get(
|
|
446
|
+
f"{self._name}.EntrySearch.TempQualityProfile", fallback=None
|
|
447
|
+
)
|
|
448
|
+
if not isinstance(temp_profiles, list):
|
|
449
|
+
temp_profiles = [temp_profiles] if temp_profiles else []
|
|
450
|
+
|
|
451
|
+
# Convert lists to dictionary
|
|
452
|
+
if main_profiles and temp_profiles and len(main_profiles) == len(temp_profiles):
|
|
453
|
+
self.quality_profile_mappings = dict(zip(main_profiles, temp_profiles))
|
|
432
454
|
|
|
433
455
|
self.use_temp_for_missing = (
|
|
434
456
|
CONFIG.get(f"{name}.EntrySearch.UseTempForMissing", fallback=False)
|
|
435
|
-
and self.
|
|
436
|
-
and self.temp_quality_profiles
|
|
457
|
+
and self.quality_profile_mappings
|
|
437
458
|
)
|
|
438
459
|
self.keep_temp_profile = CONFIG.get(f"{name}.EntrySearch.KeepTempProfile", fallback=False)
|
|
439
460
|
|
|
440
461
|
if self.use_temp_for_missing:
|
|
462
|
+
self.logger.info(
|
|
463
|
+
"Temp quality profile mode enabled: Mappings=%s, Keep temp=%s",
|
|
464
|
+
self.quality_profile_mappings,
|
|
465
|
+
self.keep_temp_profile,
|
|
466
|
+
)
|
|
441
467
|
self.temp_quality_profile_ids = self.parse_quality_profiles()
|
|
468
|
+
self.logger.info(
|
|
469
|
+
"Parsed quality profile mappings: %s",
|
|
470
|
+
{f"{k}→{v}": f"(main→temp)" for k, v in self.temp_quality_profile_ids.items()},
|
|
471
|
+
)
|
|
442
472
|
|
|
443
473
|
# Cache for valid quality profile IDs to avoid repeated API calls and warnings
|
|
444
474
|
self._quality_profile_cache: dict[int, dict] = {}
|
|
@@ -698,6 +728,9 @@ class Arr:
|
|
|
698
728
|
return True
|
|
699
729
|
except requests.RequestException:
|
|
700
730
|
self.logger.warning("Could not connect to %s", self.uri)
|
|
731
|
+
# Clear the cache to ensure we retry on next check
|
|
732
|
+
if 1 in self.expiring_bool.container:
|
|
733
|
+
self.expiring_bool.remove(1)
|
|
701
734
|
return False
|
|
702
735
|
|
|
703
736
|
@staticmethod
|
|
@@ -1573,10 +1606,25 @@ class Arr:
|
|
|
1573
1606
|
|
|
1574
1607
|
def _search_todays(self, condition):
|
|
1575
1608
|
if self.prioritize_todays_release:
|
|
1609
|
+
# Order searches by priority: Missing > CustomFormat > Quality > Upgrade
|
|
1610
|
+
from peewee import Case
|
|
1611
|
+
|
|
1612
|
+
reason_priority = Case(
|
|
1613
|
+
None,
|
|
1614
|
+
(
|
|
1615
|
+
(self.model_file.Reason == "Missing", 1),
|
|
1616
|
+
(self.model_file.Reason == "CustomFormat", 2),
|
|
1617
|
+
(self.model_file.Reason == "Quality", 3),
|
|
1618
|
+
(self.model_file.Reason == "Upgrade", 4),
|
|
1619
|
+
),
|
|
1620
|
+
5, # Default priority for other reasons
|
|
1621
|
+
)
|
|
1622
|
+
|
|
1576
1623
|
for entry in (
|
|
1577
1624
|
self.model_file.select()
|
|
1578
1625
|
.where(condition)
|
|
1579
1626
|
.order_by(
|
|
1627
|
+
reason_priority.asc(), # Primary: order by reason priority
|
|
1580
1628
|
self.model_file.SeriesTitle,
|
|
1581
1629
|
self.model_file.SeasonNumber.desc(),
|
|
1582
1630
|
self.model_file.AirDateUtc.desc(),
|
|
@@ -1826,12 +1874,36 @@ class Arr:
|
|
|
1826
1874
|
condition = self.series_file_model.Searched == False
|
|
1827
1875
|
else:
|
|
1828
1876
|
condition = self.series_file_model.Upgrade == False
|
|
1829
|
-
|
|
1830
|
-
|
|
1831
|
-
|
|
1832
|
-
|
|
1833
|
-
|
|
1834
|
-
|
|
1877
|
+
|
|
1878
|
+
# Collect series entries with their priority based on episode reasons
|
|
1879
|
+
# Missing > CustomFormat > Quality > Upgrade
|
|
1880
|
+
series_entries = []
|
|
1881
|
+
for entry_ in self.series_file_model.select().where(condition).execute():
|
|
1882
|
+
# Get the highest priority reason from this series' episodes
|
|
1883
|
+
reason_priority_map = {
|
|
1884
|
+
"Missing": 1,
|
|
1885
|
+
"CustomFormat": 2,
|
|
1886
|
+
"Quality": 3,
|
|
1887
|
+
"Upgrade": 4,
|
|
1888
|
+
}
|
|
1889
|
+
# Find the minimum priority (highest importance) reason for this series
|
|
1890
|
+
min_priority = 5 # Default
|
|
1891
|
+
episode_reasons = (
|
|
1892
|
+
self.model_file.select(self.model_file.Reason)
|
|
1893
|
+
.where(self.model_file.SeriesId == entry_.EntryId)
|
|
1894
|
+
.execute()
|
|
1895
|
+
)
|
|
1896
|
+
for ep in episode_reasons:
|
|
1897
|
+
if ep.Reason:
|
|
1898
|
+
priority = reason_priority_map.get(ep.Reason, 5)
|
|
1899
|
+
min_priority = min(min_priority, priority)
|
|
1900
|
+
|
|
1901
|
+
series_entries.append((entry_, min_priority))
|
|
1902
|
+
|
|
1903
|
+
# Sort by priority, then by EntryId
|
|
1904
|
+
series_entries.sort(key=lambda x: (x[1], x[0].EntryId))
|
|
1905
|
+
|
|
1906
|
+
for entry_, _ in series_entries:
|
|
1835
1907
|
self.logger.trace("Adding %s to search list", entry_.Title)
|
|
1836
1908
|
entries.append([entry_, False, False])
|
|
1837
1909
|
return entries
|
|
@@ -1883,10 +1955,26 @@ class Arr:
|
|
|
1883
1955
|
self.model_file.AirDateUtc
|
|
1884
1956
|
<= datetime(month=12, day=31, year=int(self.search_current_year)).date()
|
|
1885
1957
|
)
|
|
1958
|
+
# Order searches by priority: Missing > CustomFormat > Quality > Upgrade
|
|
1959
|
+
# Use CASE to assign priority values to each reason
|
|
1960
|
+
from peewee import Case
|
|
1961
|
+
|
|
1962
|
+
reason_priority = Case(
|
|
1963
|
+
None,
|
|
1964
|
+
(
|
|
1965
|
+
(self.model_file.Reason == "Missing", 1),
|
|
1966
|
+
(self.model_file.Reason == "CustomFormat", 2),
|
|
1967
|
+
(self.model_file.Reason == "Quality", 3),
|
|
1968
|
+
(self.model_file.Reason == "Upgrade", 4),
|
|
1969
|
+
),
|
|
1970
|
+
5, # Default priority for other reasons
|
|
1971
|
+
)
|
|
1972
|
+
|
|
1886
1973
|
for entry in (
|
|
1887
1974
|
self.model_file.select()
|
|
1888
1975
|
.where(condition)
|
|
1889
1976
|
.order_by(
|
|
1977
|
+
reason_priority.asc(), # Primary: order by reason priority
|
|
1890
1978
|
self.model_file.SeriesTitle,
|
|
1891
1979
|
self.model_file.SeasonNumber.desc(),
|
|
1892
1980
|
self.model_file.AirDateUtc.desc(),
|
|
@@ -1929,10 +2017,29 @@ class Arr:
|
|
|
1929
2017
|
condition &= self.model_file.Searched == False
|
|
1930
2018
|
if self.search_by_year:
|
|
1931
2019
|
condition &= self.model_file.Year == self.search_current_year
|
|
2020
|
+
|
|
2021
|
+
# Order searches by priority: Missing > CustomFormat > Quality > Upgrade
|
|
2022
|
+
# Use CASE to assign priority values to each reason
|
|
2023
|
+
from peewee import Case
|
|
2024
|
+
|
|
2025
|
+
reason_priority = Case(
|
|
2026
|
+
None,
|
|
2027
|
+
(
|
|
2028
|
+
(self.model_file.Reason == "Missing", 1),
|
|
2029
|
+
(self.model_file.Reason == "CustomFormat", 2),
|
|
2030
|
+
(self.model_file.Reason == "Quality", 3),
|
|
2031
|
+
(self.model_file.Reason == "Upgrade", 4),
|
|
2032
|
+
),
|
|
2033
|
+
5, # Default priority for other reasons
|
|
2034
|
+
)
|
|
2035
|
+
|
|
1932
2036
|
for entry in (
|
|
1933
2037
|
self.model_file.select()
|
|
1934
2038
|
.where(condition)
|
|
1935
|
-
.order_by(
|
|
2039
|
+
.order_by(
|
|
2040
|
+
reason_priority.asc(), # Primary: order by reason priority
|
|
2041
|
+
self.model_file.MovieFileId.asc(),
|
|
2042
|
+
)
|
|
1936
2043
|
.execute()
|
|
1937
2044
|
):
|
|
1938
2045
|
entries.append([entry, False, False])
|
|
@@ -1959,10 +2066,29 @@ class Arr:
|
|
|
1959
2066
|
else:
|
|
1960
2067
|
condition &= self.model_file.AlbumFileId == 0
|
|
1961
2068
|
condition &= self.model_file.Searched == False
|
|
2069
|
+
|
|
2070
|
+
# Order searches by priority: Missing > CustomFormat > Quality > Upgrade
|
|
2071
|
+
# Use CASE to assign priority values to each reason
|
|
2072
|
+
from peewee import Case
|
|
2073
|
+
|
|
2074
|
+
reason_priority = Case(
|
|
2075
|
+
None,
|
|
2076
|
+
(
|
|
2077
|
+
(self.model_file.Reason == "Missing", 1),
|
|
2078
|
+
(self.model_file.Reason == "CustomFormat", 2),
|
|
2079
|
+
(self.model_file.Reason == "Quality", 3),
|
|
2080
|
+
(self.model_file.Reason == "Upgrade", 4),
|
|
2081
|
+
),
|
|
2082
|
+
5, # Default priority for other reasons
|
|
2083
|
+
)
|
|
2084
|
+
|
|
1962
2085
|
for entry in (
|
|
1963
2086
|
self.model_file.select()
|
|
1964
2087
|
.where(condition)
|
|
1965
|
-
.order_by(
|
|
2088
|
+
.order_by(
|
|
2089
|
+
reason_priority.asc(), # Primary: order by reason priority
|
|
2090
|
+
self.model_file.AlbumFileId.asc(),
|
|
2091
|
+
)
|
|
1966
2092
|
.execute()
|
|
1967
2093
|
):
|
|
1968
2094
|
entries.append([entry, False, False])
|
|
@@ -2616,7 +2742,7 @@ class Arr:
|
|
|
2616
2742
|
else False
|
|
2617
2743
|
)
|
|
2618
2744
|
if (
|
|
2619
|
-
episode
|
|
2745
|
+
episode.get("hasFile", False)
|
|
2620
2746
|
and not (self.quality_unmet_search and QualityUnmet)
|
|
2621
2747
|
and not (
|
|
2622
2748
|
self.custom_format_unmet_search and customFormat < minCustomFormat
|
|
@@ -2630,45 +2756,56 @@ class Arr:
|
|
|
2630
2756
|
if self.use_temp_for_missing:
|
|
2631
2757
|
data = None
|
|
2632
2758
|
quality_profile_id = db_entry.get("qualityProfileId")
|
|
2759
|
+
# Only apply temp profiles for truly missing content (no file)
|
|
2760
|
+
# Do NOT apply for quality/custom format unmet or upgrade searches
|
|
2761
|
+
has_file = episode.get("hasFile", False)
|
|
2633
2762
|
self.logger.trace(
|
|
2634
|
-
"Temp quality profile
|
|
2763
|
+
"Temp quality profile check for '%s': searched=%s, has_file=%s, current_profile_id=%s, keep_temp=%s",
|
|
2764
|
+
db_entry.get("title", "Unknown"),
|
|
2635
2765
|
searched,
|
|
2766
|
+
has_file,
|
|
2636
2767
|
quality_profile_id,
|
|
2768
|
+
self.keep_temp_profile,
|
|
2637
2769
|
)
|
|
2638
2770
|
if (
|
|
2639
2771
|
searched
|
|
2640
2772
|
and quality_profile_id in self.temp_quality_profile_ids.values()
|
|
2641
2773
|
and not self.keep_temp_profile
|
|
2642
2774
|
):
|
|
2643
|
-
|
|
2644
|
-
|
|
2645
|
-
|
|
2646
|
-
|
|
2647
|
-
|
|
2648
|
-
|
|
2649
|
-
|
|
2650
|
-
|
|
2651
|
-
"
|
|
2652
|
-
|
|
2653
|
-
|
|
2654
|
-
|
|
2655
|
-
db_entry["qualityProfileId"]
|
|
2656
|
-
)
|
|
2657
|
-
],
|
|
2775
|
+
new_profile_id = list(self.temp_quality_profile_ids.keys())[
|
|
2776
|
+
list(self.temp_quality_profile_ids.values()).index(
|
|
2777
|
+
quality_profile_id
|
|
2778
|
+
)
|
|
2779
|
+
]
|
|
2780
|
+
data: JsonObject = {"qualityProfileId": new_profile_id}
|
|
2781
|
+
self.logger.info(
|
|
2782
|
+
"Upgrading quality profile for '%s': %s (ID:%s) → main profile (ID:%s) [Episode searched, reverting to main]",
|
|
2783
|
+
db_entry.get("title", "Unknown"),
|
|
2784
|
+
quality_profile_id,
|
|
2785
|
+
quality_profile_id,
|
|
2786
|
+
new_profile_id,
|
|
2658
2787
|
)
|
|
2659
2788
|
elif (
|
|
2660
2789
|
not searched
|
|
2790
|
+
and not has_file
|
|
2661
2791
|
and quality_profile_id in self.temp_quality_profile_ids.keys()
|
|
2662
2792
|
):
|
|
2663
|
-
|
|
2664
|
-
|
|
2665
|
-
|
|
2666
|
-
]
|
|
2667
|
-
|
|
2668
|
-
|
|
2669
|
-
|
|
2670
|
-
|
|
2671
|
-
|
|
2793
|
+
new_profile_id = self.temp_quality_profile_ids[quality_profile_id]
|
|
2794
|
+
data: JsonObject = {"qualityProfileId": new_profile_id}
|
|
2795
|
+
self.logger.info(
|
|
2796
|
+
"Downgrading quality profile for '%s': main profile (ID:%s) → temp profile (ID:%s) [Episode not searched yet]",
|
|
2797
|
+
db_entry.get("title", "Unknown"),
|
|
2798
|
+
quality_profile_id,
|
|
2799
|
+
new_profile_id,
|
|
2800
|
+
)
|
|
2801
|
+
else:
|
|
2802
|
+
self.logger.trace(
|
|
2803
|
+
"No quality profile change for '%s': searched=%s, profile_id=%s (in_temps=%s, in_mains=%s)",
|
|
2804
|
+
db_entry.get("title", "Unknown"),
|
|
2805
|
+
searched,
|
|
2806
|
+
quality_profile_id,
|
|
2807
|
+
quality_profile_id in self.temp_quality_profile_ids.values(),
|
|
2808
|
+
quality_profile_id in self.temp_quality_profile_ids.keys(),
|
|
2672
2809
|
)
|
|
2673
2810
|
if data:
|
|
2674
2811
|
while True:
|
|
@@ -2705,7 +2842,7 @@ class Arr:
|
|
|
2705
2842
|
QualityMet = not QualityUnmet if db_entry["hasFile"] else False
|
|
2706
2843
|
customFormatMet = customFormat >= minCustomFormat
|
|
2707
2844
|
|
|
2708
|
-
if not episode
|
|
2845
|
+
if not episode.get("hasFile", False):
|
|
2709
2846
|
# Episode is missing a file - always mark as Missing
|
|
2710
2847
|
reason = "Missing"
|
|
2711
2848
|
elif self.quality_unmet_search and QualityUnmet:
|
|
@@ -2910,12 +3047,27 @@ class Arr:
|
|
|
2910
3047
|
Title = seriesMetadata.get("title")
|
|
2911
3048
|
Monitored = db_entry["monitored"]
|
|
2912
3049
|
|
|
3050
|
+
# Get quality profile info
|
|
3051
|
+
qualityProfileName = None
|
|
3052
|
+
if quality_profile_id:
|
|
3053
|
+
try:
|
|
3054
|
+
if quality_profile_id not in self._quality_profile_cache:
|
|
3055
|
+
profile = self.client.get_quality_profile(quality_profile_id)
|
|
3056
|
+
self._quality_profile_cache[quality_profile_id] = profile
|
|
3057
|
+
qualityProfileName = self._quality_profile_cache[
|
|
3058
|
+
quality_profile_id
|
|
3059
|
+
].get("name")
|
|
3060
|
+
except Exception:
|
|
3061
|
+
pass
|
|
3062
|
+
|
|
2913
3063
|
to_update = {
|
|
2914
3064
|
self.series_file_model.Monitored: Monitored,
|
|
2915
3065
|
self.series_file_model.Title: Title,
|
|
2916
3066
|
self.series_file_model.Searched: searched,
|
|
2917
3067
|
self.series_file_model.Upgrade: False,
|
|
2918
3068
|
self.series_file_model.MinCustomFormatScore: minCustomFormat,
|
|
3069
|
+
self.series_file_model.QualityProfileId: quality_profile_id,
|
|
3070
|
+
self.series_file_model.QualityProfileName: qualityProfileName,
|
|
2919
3071
|
}
|
|
2920
3072
|
|
|
2921
3073
|
self.logger.debug(
|
|
@@ -2932,6 +3084,8 @@ class Arr:
|
|
|
2932
3084
|
Monitored=Monitored,
|
|
2933
3085
|
Upgrade=False,
|
|
2934
3086
|
MinCustomFormatScore=minCustomFormat,
|
|
3087
|
+
QualityProfileId=quality_profile_id,
|
|
3088
|
+
QualityProfileName=qualityProfileName,
|
|
2935
3089
|
).on_conflict(
|
|
2936
3090
|
conflict_target=[self.series_file_model.EntryId], update=to_update
|
|
2937
3091
|
)
|
|
@@ -3007,6 +3161,9 @@ class Arr:
|
|
|
3007
3161
|
|
|
3008
3162
|
if self.use_temp_for_missing:
|
|
3009
3163
|
quality_profile_id = db_entry.get("qualityProfileId")
|
|
3164
|
+
# Only apply temp profiles for truly missing content (no file)
|
|
3165
|
+
# Do NOT apply for quality/custom format unmet or upgrade searches
|
|
3166
|
+
has_file = db_entry.get("hasFile", False)
|
|
3010
3167
|
if (
|
|
3011
3168
|
searched
|
|
3012
3169
|
and quality_profile_id in self.temp_quality_profile_ids.values()
|
|
@@ -3026,6 +3183,7 @@ class Arr:
|
|
|
3026
3183
|
)
|
|
3027
3184
|
elif (
|
|
3028
3185
|
not searched
|
|
3186
|
+
and not has_file
|
|
3029
3187
|
and quality_profile_id in self.temp_quality_profile_ids.keys()
|
|
3030
3188
|
):
|
|
3031
3189
|
db_entry["qualityProfileId"] = self.temp_quality_profile_ids[
|
|
@@ -3057,6 +3215,20 @@ class Arr:
|
|
|
3057
3215
|
qualityMet = not QualityUnmet if db_entry["hasFile"] else False
|
|
3058
3216
|
customFormatMet = customFormat >= minCustomFormat
|
|
3059
3217
|
|
|
3218
|
+
# Get quality profile info
|
|
3219
|
+
qualityProfileId = db_entry.get("qualityProfileId")
|
|
3220
|
+
qualityProfileName = None
|
|
3221
|
+
if qualityProfileId:
|
|
3222
|
+
try:
|
|
3223
|
+
if qualityProfileId not in self._quality_profile_cache:
|
|
3224
|
+
profile = self.client.get_quality_profile(qualityProfileId)
|
|
3225
|
+
self._quality_profile_cache[qualityProfileId] = profile
|
|
3226
|
+
qualityProfileName = self._quality_profile_cache[qualityProfileId].get(
|
|
3227
|
+
"name"
|
|
3228
|
+
)
|
|
3229
|
+
except Exception:
|
|
3230
|
+
pass
|
|
3231
|
+
|
|
3060
3232
|
if not db_entry["hasFile"]:
|
|
3061
3233
|
# Movie is missing a file - always mark as Missing
|
|
3062
3234
|
reason = "Missing"
|
|
@@ -3082,6 +3254,8 @@ class Arr:
|
|
|
3082
3254
|
self.model_file.CustomFormatScore: customFormat,
|
|
3083
3255
|
self.model_file.CustomFormatMet: customFormatMet,
|
|
3084
3256
|
self.model_file.Reason: reason,
|
|
3257
|
+
self.model_file.QualityProfileId: qualityProfileId,
|
|
3258
|
+
self.model_file.QualityProfileName: qualityProfileName,
|
|
3085
3259
|
}
|
|
3086
3260
|
|
|
3087
3261
|
if request:
|
|
@@ -3111,6 +3285,8 @@ class Arr:
|
|
|
3111
3285
|
CustomFormatScore=customFormat,
|
|
3112
3286
|
CustomFormatMet=customFormatMet,
|
|
3113
3287
|
Reason=reason,
|
|
3288
|
+
QualityProfileId=qualityProfileId,
|
|
3289
|
+
QualityProfileName=qualityProfileName,
|
|
3114
3290
|
).on_conflict(conflict_target=[self.model_file.EntryId], update=to_update)
|
|
3115
3291
|
db_commands.execute()
|
|
3116
3292
|
else:
|
|
@@ -3302,48 +3478,8 @@ class Arr:
|
|
|
3302
3478
|
self.model_queue.EntryId == db_entry["id"]
|
|
3303
3479
|
).execute()
|
|
3304
3480
|
|
|
3305
|
-
|
|
3306
|
-
|
|
3307
|
-
if (
|
|
3308
|
-
searched
|
|
3309
|
-
and quality_profile_id in self.temp_quality_profile_ids.values()
|
|
3310
|
-
and not self.keep_temp_profile
|
|
3311
|
-
):
|
|
3312
|
-
db_entry["qualityProfileId"] = list(
|
|
3313
|
-
self.temp_quality_profile_ids.keys()
|
|
3314
|
-
)[
|
|
3315
|
-
list(self.temp_quality_profile_ids.values()).index(
|
|
3316
|
-
quality_profile_id
|
|
3317
|
-
)
|
|
3318
|
-
]
|
|
3319
|
-
self.logger.debug(
|
|
3320
|
-
"Updating quality profile for %s to %s",
|
|
3321
|
-
db_entry["title"],
|
|
3322
|
-
db_entry["qualityProfileId"],
|
|
3323
|
-
)
|
|
3324
|
-
elif (
|
|
3325
|
-
not searched
|
|
3326
|
-
and quality_profile_id in self.temp_quality_profile_ids.keys()
|
|
3327
|
-
):
|
|
3328
|
-
db_entry["qualityProfileId"] = self.temp_quality_profile_ids[
|
|
3329
|
-
quality_profile_id
|
|
3330
|
-
]
|
|
3331
|
-
self.logger.debug(
|
|
3332
|
-
"Updating quality profile for %s to %s",
|
|
3333
|
-
db_entry["title"],
|
|
3334
|
-
db_entry["qualityProfileId"],
|
|
3335
|
-
)
|
|
3336
|
-
while True:
|
|
3337
|
-
try:
|
|
3338
|
-
self.client.upd_album(db_entry)
|
|
3339
|
-
break
|
|
3340
|
-
except (
|
|
3341
|
-
requests.exceptions.ChunkedEncodingError,
|
|
3342
|
-
requests.exceptions.ContentDecodingError,
|
|
3343
|
-
requests.exceptions.ConnectionError,
|
|
3344
|
-
JSONDecodeError,
|
|
3345
|
-
):
|
|
3346
|
-
continue
|
|
3481
|
+
# Note: Lidarr quality profiles are set at artist level, not album level.
|
|
3482
|
+
# Temp profile logic for Lidarr is handled in artist processing below.
|
|
3347
3483
|
|
|
3348
3484
|
title = db_entry.get("title", "Unknown Album")
|
|
3349
3485
|
monitored = db_entry.get("monitored", False)
|
|
@@ -3367,6 +3503,26 @@ class Arr:
|
|
|
3367
3503
|
qualityMet = not QualityUnmet if hasAllTracks else False
|
|
3368
3504
|
customFormatMet = customFormat >= minCustomFormat
|
|
3369
3505
|
|
|
3506
|
+
# Get quality profile info from artist (Lidarr albums inherit from artist)
|
|
3507
|
+
qualityProfileId = None
|
|
3508
|
+
qualityProfileName = None
|
|
3509
|
+
try:
|
|
3510
|
+
artist_id = db_entry.get("artistId")
|
|
3511
|
+
if artist_id:
|
|
3512
|
+
# Try to get from already-fetched artist data if available
|
|
3513
|
+
artist_data = self.client.get_artist(artist_id)
|
|
3514
|
+
qualityProfileId = artist_data.get("qualityProfileId")
|
|
3515
|
+
if qualityProfileId:
|
|
3516
|
+
# Fetch quality profile from cache or API
|
|
3517
|
+
if qualityProfileId not in self._quality_profile_cache:
|
|
3518
|
+
profile = self.client.get_quality_profile(qualityProfileId)
|
|
3519
|
+
self._quality_profile_cache[qualityProfileId] = profile
|
|
3520
|
+
qualityProfileName = self._quality_profile_cache[
|
|
3521
|
+
qualityProfileId
|
|
3522
|
+
].get("name")
|
|
3523
|
+
except Exception:
|
|
3524
|
+
pass
|
|
3525
|
+
|
|
3370
3526
|
if not hasAllTracks:
|
|
3371
3527
|
# Album is missing tracks - always mark as Missing
|
|
3372
3528
|
reason = "Missing"
|
|
@@ -3396,6 +3552,8 @@ class Arr:
|
|
|
3396
3552
|
self.model_file.ArtistId: artistId,
|
|
3397
3553
|
self.model_file.ForeignAlbumId: foreignAlbumId,
|
|
3398
3554
|
self.model_file.ReleaseDate: releaseDate,
|
|
3555
|
+
self.model_file.QualityProfileId: qualityProfileId,
|
|
3556
|
+
self.model_file.QualityProfileName: qualityProfileName,
|
|
3399
3557
|
}
|
|
3400
3558
|
|
|
3401
3559
|
if request:
|
|
@@ -3428,6 +3586,8 @@ class Arr:
|
|
|
3428
3586
|
CustomFormatScore=customFormat,
|
|
3429
3587
|
CustomFormatMet=customFormatMet,
|
|
3430
3588
|
Reason=reason,
|
|
3589
|
+
QualityProfileId=qualityProfileId,
|
|
3590
|
+
QualityProfileName=qualityProfileName,
|
|
3431
3591
|
).on_conflict(conflict_target=[self.model_file.EntryId], update=to_update)
|
|
3432
3592
|
db_commands.execute()
|
|
3433
3593
|
|
|
@@ -3543,6 +3703,46 @@ class Arr:
|
|
|
3543
3703
|
# Artist is considered searched if it has albums and at least some have files
|
|
3544
3704
|
searched = albumCount > 0 and sizeOnDisk > 0
|
|
3545
3705
|
|
|
3706
|
+
# Temp profile management for Lidarr artists
|
|
3707
|
+
# Quality profiles in Lidarr are set at artist level, not album level
|
|
3708
|
+
if self.use_temp_for_missing and quality_profile_id:
|
|
3709
|
+
if (
|
|
3710
|
+
searched
|
|
3711
|
+
and quality_profile_id in self.temp_quality_profile_ids.values()
|
|
3712
|
+
and not self.keep_temp_profile
|
|
3713
|
+
):
|
|
3714
|
+
# Artist has files, switch from temp back to main profile
|
|
3715
|
+
main_profile_id = list(self.temp_quality_profile_ids.keys())[
|
|
3716
|
+
list(self.temp_quality_profile_ids.values()).index(
|
|
3717
|
+
quality_profile_id
|
|
3718
|
+
)
|
|
3719
|
+
]
|
|
3720
|
+
artistMetadata["qualityProfileId"] = main_profile_id
|
|
3721
|
+
self.client.upd_artist(artistMetadata)
|
|
3722
|
+
quality_profile_id = main_profile_id
|
|
3723
|
+
self.logger.debug(
|
|
3724
|
+
"Upgrading artist '%s' from temp profile (ID:%s) to main profile (ID:%s) [Has files]",
|
|
3725
|
+
artistMetadata.get("artistName", "Unknown"),
|
|
3726
|
+
quality_profile_id,
|
|
3727
|
+
main_profile_id,
|
|
3728
|
+
)
|
|
3729
|
+
elif (
|
|
3730
|
+
not searched
|
|
3731
|
+
and sizeOnDisk == 0
|
|
3732
|
+
and quality_profile_id in self.temp_quality_profile_ids.keys()
|
|
3733
|
+
):
|
|
3734
|
+
# Artist has no files yet, apply temp profile
|
|
3735
|
+
temp_profile_id = self.temp_quality_profile_ids[quality_profile_id]
|
|
3736
|
+
artistMetadata["qualityProfileId"] = temp_profile_id
|
|
3737
|
+
self.client.upd_artist(artistMetadata)
|
|
3738
|
+
quality_profile_id = temp_profile_id
|
|
3739
|
+
self.logger.debug(
|
|
3740
|
+
"Downgrading artist '%s' from main profile (ID:%s) to temp profile (ID:%s) [No files yet]",
|
|
3741
|
+
artistMetadata.get("artistName", "Unknown"),
|
|
3742
|
+
quality_profile_id,
|
|
3743
|
+
temp_profile_id,
|
|
3744
|
+
)
|
|
3745
|
+
|
|
3546
3746
|
Title = artistMetadata.get("artistName")
|
|
3547
3747
|
Monitored = db_entry["monitored"]
|
|
3548
3748
|
|
|
@@ -3625,7 +3825,7 @@ class Arr:
|
|
|
3625
3825
|
):
|
|
3626
3826
|
continue
|
|
3627
3827
|
except PyarrResourceNotFound as e:
|
|
3628
|
-
self.logger.error("Connection Error: "
|
|
3828
|
+
self.logger.error("Connection Error: %s", str(e))
|
|
3629
3829
|
raise DelayLoopException(length=300, type=self._name)
|
|
3630
3830
|
return res
|
|
3631
3831
|
|
|
@@ -4164,6 +4364,32 @@ class Arr:
|
|
|
4164
4364
|
raise DelayLoopException(length=NO_INTERNET_SLEEP_TIMER, type="internet")
|
|
4165
4365
|
if self.manager.qbit_manager.should_delay_torrent_scan:
|
|
4166
4366
|
raise DelayLoopException(length=NO_INTERNET_SLEEP_TIMER, type="delay")
|
|
4367
|
+
|
|
4368
|
+
# Periodic database health check (every 10th iteration)
|
|
4369
|
+
if not hasattr(self, "_health_check_counter"):
|
|
4370
|
+
self._health_check_counter = 0
|
|
4371
|
+
|
|
4372
|
+
self._health_check_counter += 1
|
|
4373
|
+
if self._health_check_counter >= 10:
|
|
4374
|
+
from qBitrr.db_lock import check_database_health
|
|
4375
|
+
from qBitrr.home_path import APPDATA_FOLDER
|
|
4376
|
+
|
|
4377
|
+
db_path = APPDATA_FOLDER / "qbitrr.db"
|
|
4378
|
+
healthy, msg = check_database_health(db_path, self.logger)
|
|
4379
|
+
|
|
4380
|
+
if not healthy:
|
|
4381
|
+
self.logger.error("Database health check failed: %s", msg)
|
|
4382
|
+
self.logger.warning("Attempting database recovery...")
|
|
4383
|
+
try:
|
|
4384
|
+
self._recover_database()
|
|
4385
|
+
except Exception as recovery_error:
|
|
4386
|
+
self.logger.error(
|
|
4387
|
+
"Database recovery failed: %s. Continuing with caution...",
|
|
4388
|
+
recovery_error,
|
|
4389
|
+
)
|
|
4390
|
+
|
|
4391
|
+
self._health_check_counter = 0
|
|
4392
|
+
|
|
4167
4393
|
self.api_calls()
|
|
4168
4394
|
self.refresh_download_queue()
|
|
4169
4395
|
for torrent in torrents:
|
|
@@ -4193,6 +4419,43 @@ class Arr:
|
|
|
4193
4419
|
except DelayLoopException:
|
|
4194
4420
|
raise
|
|
4195
4421
|
|
|
4422
|
+
def _recover_database(self):
|
|
4423
|
+
"""
|
|
4424
|
+
Attempt automatic database recovery when health check fails.
|
|
4425
|
+
|
|
4426
|
+
This method implements a progressive recovery strategy:
|
|
4427
|
+
1. Try WAL checkpoint (least invasive)
|
|
4428
|
+
2. Try full database repair if checkpoint fails
|
|
4429
|
+
3. Log critical error if all recovery methods fail
|
|
4430
|
+
"""
|
|
4431
|
+
from qBitrr.db_recovery import DatabaseRecoveryError, checkpoint_wal, repair_database
|
|
4432
|
+
from qBitrr.home_path import APPDATA_FOLDER
|
|
4433
|
+
|
|
4434
|
+
db_path = APPDATA_FOLDER / "qbitrr.db"
|
|
4435
|
+
|
|
4436
|
+
# Step 1: Try WAL checkpoint (least invasive)
|
|
4437
|
+
self.logger.info("Attempting WAL checkpoint...")
|
|
4438
|
+
if checkpoint_wal(db_path, self.logger):
|
|
4439
|
+
self.logger.info("WAL checkpoint successful - database recovered")
|
|
4440
|
+
return
|
|
4441
|
+
|
|
4442
|
+
# Step 2: Try full repair (more invasive)
|
|
4443
|
+
self.logger.warning("WAL checkpoint failed - attempting full database repair...")
|
|
4444
|
+
try:
|
|
4445
|
+
if repair_database(db_path, backup=True, logger_override=self.logger):
|
|
4446
|
+
self.logger.info("Database repair successful")
|
|
4447
|
+
return
|
|
4448
|
+
except DatabaseRecoveryError as e:
|
|
4449
|
+
self.logger.error("Database repair failed: %s", e)
|
|
4450
|
+
except Exception as e:
|
|
4451
|
+
self.logger.error("Unexpected error during database repair: %s", e)
|
|
4452
|
+
|
|
4453
|
+
# Step 3: All recovery methods failed
|
|
4454
|
+
self.logger.critical(
|
|
4455
|
+
"Database recovery failed - database may be corrupted. "
|
|
4456
|
+
"Manual intervention may be required. Continuing with caution..."
|
|
4457
|
+
)
|
|
4458
|
+
|
|
4196
4459
|
def _process_single_torrent_failed_cat(self, torrent: qbittorrentapi.TorrentDictionary):
|
|
4197
4460
|
self.logger.notice(
|
|
4198
4461
|
"Deleting manually failed torrent: "
|
|
@@ -5436,9 +5699,12 @@ class Arr:
|
|
|
5436
5699
|
entry["episodeId"] for entry in self.queue if entry.get("episodeId")
|
|
5437
5700
|
}
|
|
5438
5701
|
if self.model_queue:
|
|
5439
|
-
|
|
5440
|
-
self.model_queue.
|
|
5441
|
-
|
|
5702
|
+
with_database_retry(
|
|
5703
|
+
lambda: self.model_queue.delete()
|
|
5704
|
+
.where(self.model_queue.EntryId.not_in(list(self.queue_file_ids)))
|
|
5705
|
+
.execute(),
|
|
5706
|
+
logger=self.logger,
|
|
5707
|
+
)
|
|
5442
5708
|
else:
|
|
5443
5709
|
for entry in self.queue:
|
|
5444
5710
|
if r := entry.get("seriesId"):
|
|
@@ -5447,9 +5713,12 @@ class Arr:
|
|
|
5447
5713
|
entry["seriesId"] for entry in self.queue if entry.get("seriesId")
|
|
5448
5714
|
}
|
|
5449
5715
|
if self.model_queue:
|
|
5450
|
-
|
|
5451
|
-
self.model_queue.
|
|
5452
|
-
|
|
5716
|
+
with_database_retry(
|
|
5717
|
+
lambda: self.model_queue.delete()
|
|
5718
|
+
.where(self.model_queue.EntryId.not_in(list(self.queue_file_ids)))
|
|
5719
|
+
.execute(),
|
|
5720
|
+
logger=self.logger,
|
|
5721
|
+
)
|
|
5453
5722
|
elif self.type == "radarr":
|
|
5454
5723
|
self.requeue_cache = {
|
|
5455
5724
|
entry["id"]: entry["movieId"] for entry in self.queue if entry.get("movieId")
|
|
@@ -5458,9 +5727,12 @@ class Arr:
|
|
|
5458
5727
|
entry["movieId"] for entry in self.queue if entry.get("movieId")
|
|
5459
5728
|
}
|
|
5460
5729
|
if self.model_queue:
|
|
5461
|
-
|
|
5462
|
-
self.model_queue.
|
|
5463
|
-
|
|
5730
|
+
with_database_retry(
|
|
5731
|
+
lambda: self.model_queue.delete()
|
|
5732
|
+
.where(self.model_queue.EntryId.not_in(list(self.queue_file_ids)))
|
|
5733
|
+
.execute(),
|
|
5734
|
+
logger=self.logger,
|
|
5735
|
+
)
|
|
5464
5736
|
elif self.type == "lidarr":
|
|
5465
5737
|
self.requeue_cache = {
|
|
5466
5738
|
entry["id"]: entry["albumId"] for entry in self.queue if entry.get("albumId")
|
|
@@ -5469,9 +5741,12 @@ class Arr:
|
|
|
5469
5741
|
entry["albumId"] for entry in self.queue if entry.get("albumId")
|
|
5470
5742
|
}
|
|
5471
5743
|
if self.model_queue:
|
|
5472
|
-
|
|
5473
|
-
self.model_queue.
|
|
5474
|
-
|
|
5744
|
+
with_database_retry(
|
|
5745
|
+
lambda: self.model_queue.delete()
|
|
5746
|
+
.where(self.model_queue.EntryId.not_in(list(self.queue_file_ids)))
|
|
5747
|
+
.execute(),
|
|
5748
|
+
logger=self.logger,
|
|
5749
|
+
)
|
|
5475
5750
|
|
|
5476
5751
|
self._update_bad_queue_items()
|
|
5477
5752
|
|
|
@@ -5529,17 +5804,26 @@ class Arr:
|
|
|
5529
5804
|
def parse_quality_profiles(self) -> dict[int, int]:
|
|
5530
5805
|
temp_quality_profile_ids: dict[int, int] = {}
|
|
5531
5806
|
|
|
5807
|
+
self.logger.debug(
|
|
5808
|
+
"Parsing quality profile mappings: %s",
|
|
5809
|
+
self.quality_profile_mappings,
|
|
5810
|
+
)
|
|
5811
|
+
|
|
5532
5812
|
while True:
|
|
5533
5813
|
try:
|
|
5534
5814
|
profiles = self.client.get_quality_profile()
|
|
5815
|
+
self.logger.debug("Fetched %d quality profiles from API", len(profiles))
|
|
5535
5816
|
break
|
|
5536
5817
|
except (
|
|
5537
5818
|
requests.exceptions.ChunkedEncodingError,
|
|
5538
5819
|
requests.exceptions.ContentDecodingError,
|
|
5539
5820
|
requests.exceptions.ConnectionError,
|
|
5540
5821
|
JSONDecodeError,
|
|
5541
|
-
):
|
|
5822
|
+
) as e:
|
|
5542
5823
|
# transient network/encoding issues; retry
|
|
5824
|
+
self.logger.warning(
|
|
5825
|
+
"Transient error fetching quality profiles, retrying: %s", type(e).__name__
|
|
5826
|
+
)
|
|
5543
5827
|
continue
|
|
5544
5828
|
except PyarrServerError as e:
|
|
5545
5829
|
# Server-side error (e.g., Radarr DB disk I/O). Log and wait 5 minutes before retrying.
|
|
@@ -5557,17 +5841,48 @@ class Arr:
|
|
|
5557
5841
|
profiles = []
|
|
5558
5842
|
break
|
|
5559
5843
|
|
|
5560
|
-
for
|
|
5561
|
-
|
|
5844
|
+
# Build a lookup dict for profile name -> ID
|
|
5845
|
+
profile_name_to_id = {p["name"]: p["id"] for p in profiles}
|
|
5846
|
+
self.logger.trace("Available profiles: %s", profile_name_to_id)
|
|
5847
|
+
|
|
5848
|
+
# Convert name mappings to ID mappings
|
|
5849
|
+
for main_name, temp_name in self.quality_profile_mappings.items():
|
|
5850
|
+
main_id = profile_name_to_id.get(main_name)
|
|
5851
|
+
temp_id = profile_name_to_id.get(temp_name)
|
|
5852
|
+
|
|
5853
|
+
if main_id is None:
|
|
5854
|
+
self.logger.error(
|
|
5855
|
+
"Main quality profile '%s' not found in available profiles. Available: %s",
|
|
5856
|
+
main_name,
|
|
5857
|
+
list(profile_name_to_id.keys()),
|
|
5858
|
+
)
|
|
5859
|
+
if temp_id is None:
|
|
5860
|
+
self.logger.error(
|
|
5861
|
+
"Temp quality profile '%s' not found in available profiles. Available: %s",
|
|
5862
|
+
temp_name,
|
|
5863
|
+
list(profile_name_to_id.keys()),
|
|
5864
|
+
)
|
|
5865
|
+
|
|
5866
|
+
if main_id is not None and temp_id is not None:
|
|
5867
|
+
temp_quality_profile_ids[main_id] = temp_id
|
|
5868
|
+
self.logger.info(
|
|
5869
|
+
"Quality profile mapping: '%s' (ID:%d) → '%s' (ID:%d)",
|
|
5870
|
+
main_name,
|
|
5871
|
+
main_id,
|
|
5872
|
+
temp_name,
|
|
5873
|
+
temp_id,
|
|
5874
|
+
)
|
|
5875
|
+
else:
|
|
5876
|
+
self.logger.warning(
|
|
5877
|
+
"Skipping quality profile mapping for '%s' → '%s' due to missing profile(s)",
|
|
5878
|
+
main_name,
|
|
5879
|
+
temp_name,
|
|
5880
|
+
)
|
|
5562
5881
|
|
|
5563
|
-
|
|
5564
|
-
|
|
5565
|
-
|
|
5566
|
-
|
|
5567
|
-
if p["name"] == pair[1]:
|
|
5568
|
-
pair[1] = p["id"]
|
|
5569
|
-
self.logger.trace("Quality profile %s:%s", p["name"], p["id"])
|
|
5570
|
-
temp_quality_profile_ids[pair[0]] = pair[1]
|
|
5882
|
+
if not temp_quality_profile_ids:
|
|
5883
|
+
self.logger.error(
|
|
5884
|
+
"No valid quality profile mappings created! Check your configuration."
|
|
5885
|
+
)
|
|
5571
5886
|
|
|
5572
5887
|
return temp_quality_profile_ids
|
|
5573
5888
|
|
|
@@ -5595,6 +5910,7 @@ class Arr:
|
|
|
5595
5910
|
"foreign_keys": 1,
|
|
5596
5911
|
"ignore_check_constraints": 0,
|
|
5597
5912
|
"synchronous": 0,
|
|
5913
|
+
"read_uncommitted": 1,
|
|
5598
5914
|
},
|
|
5599
5915
|
timeout=15,
|
|
5600
5916
|
)
|
|
@@ -5603,7 +5919,11 @@ class Arr:
|
|
|
5603
5919
|
class Meta:
|
|
5604
5920
|
database = self.torrent_db
|
|
5605
5921
|
|
|
5606
|
-
|
|
5922
|
+
# Connect with retry logic for transient I/O errors
|
|
5923
|
+
with_database_retry(
|
|
5924
|
+
lambda: self.torrent_db.connect(),
|
|
5925
|
+
logger=self.logger,
|
|
5926
|
+
)
|
|
5607
5927
|
self.torrent_db.create_tables([Torrents])
|
|
5608
5928
|
self.torrents = Torrents
|
|
5609
5929
|
self.search_setup_completed = True
|
|
@@ -5619,6 +5939,7 @@ class Arr:
|
|
|
5619
5939
|
"foreign_keys": 1,
|
|
5620
5940
|
"ignore_check_constraints": 0,
|
|
5621
5941
|
"synchronous": 0,
|
|
5942
|
+
"read_uncommitted": 1,
|
|
5622
5943
|
},
|
|
5623
5944
|
timeout=15,
|
|
5624
5945
|
)
|
|
@@ -5635,7 +5956,11 @@ class Arr:
|
|
|
5635
5956
|
class Meta:
|
|
5636
5957
|
database = self.db
|
|
5637
5958
|
|
|
5638
|
-
|
|
5959
|
+
# Connect with retry logic for transient I/O errors
|
|
5960
|
+
with_database_retry(
|
|
5961
|
+
lambda: self.db.connect(),
|
|
5962
|
+
logger=self.logger,
|
|
5963
|
+
)
|
|
5639
5964
|
|
|
5640
5965
|
if db4:
|
|
5641
5966
|
|
|
@@ -5681,6 +6006,7 @@ class Arr:
|
|
|
5681
6006
|
"foreign_keys": 1,
|
|
5682
6007
|
"ignore_check_constraints": 0,
|
|
5683
6008
|
"synchronous": 0,
|
|
6009
|
+
"read_uncommitted": 1,
|
|
5684
6010
|
},
|
|
5685
6011
|
timeout=15,
|
|
5686
6012
|
)
|
|
@@ -5689,7 +6015,11 @@ class Arr:
|
|
|
5689
6015
|
class Meta:
|
|
5690
6016
|
database = self.torrent_db
|
|
5691
6017
|
|
|
5692
|
-
|
|
6018
|
+
# Connect with retry logic for transient I/O errors
|
|
6019
|
+
with_database_retry(
|
|
6020
|
+
lambda: self.torrent_db.connect(),
|
|
6021
|
+
logger=self.logger,
|
|
6022
|
+
)
|
|
5693
6023
|
self.torrent_db.create_tables([Torrents])
|
|
5694
6024
|
self.torrents = Torrents
|
|
5695
6025
|
else:
|