qBitrr2 5.1.0__tar.gz → 5.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. {qbitrr2-5.1.0/qBitrr2.egg-info → qbitrr2-5.1.1}/PKG-INFO +1 -1
  2. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/pyproject.toml +1 -1
  3. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/arss.py +175 -34
  4. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/bundled_data.py +2 -2
  5. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/main.py +0 -12
  6. qbitrr2-5.1.1/qBitrr/search_activity_store.py +88 -0
  7. qbitrr2-5.1.1/qBitrr/tables.py +72 -0
  8. {qbitrr2-5.1.0 → qbitrr2-5.1.1/qBitrr2.egg-info}/PKG-INFO +1 -1
  9. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/setup.cfg +1 -1
  10. qbitrr2-5.1.0/qBitrr/search_activity_store.py +0 -61
  11. qbitrr2-5.1.0/qBitrr/tables.py +0 -389
  12. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/LICENSE +0 -0
  13. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/MANIFEST.in +0 -0
  14. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/README.md +0 -0
  15. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/config.example.toml +0 -0
  16. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/__init__.py +0 -0
  17. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/auto_update.py +0 -0
  18. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/config.py +0 -0
  19. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/db_lock.py +0 -0
  20. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/env_config.py +0 -0
  21. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/errors.py +0 -0
  22. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/ffprobe.py +0 -0
  23. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/gen_config.py +0 -0
  24. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/home_path.py +0 -0
  25. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/logger.py +0 -0
  26. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/utils.py +0 -0
  27. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/versioning.py +0 -0
  28. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr/webui.py +0 -0
  29. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr2.egg-info/SOURCES.txt +0 -0
  30. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr2.egg-info/dependency_links.txt +0 -0
  31. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr2.egg-info/entry_points.txt +0 -0
  32. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr2.egg-info/requires.txt +0 -0
  33. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/qBitrr2.egg-info/top_level.txt +0 -0
  34. {qbitrr2-5.1.0 → qbitrr2-5.1.1}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: qBitrr2
3
- Version: 5.1.0
3
+ Version: 5.1.1
4
4
  Summary: "A simple Python script to talk to qBittorrent and Arr's"
5
5
  Home-page: https://github.com/Feramance/qBitrr
6
6
  Author: Feramance
@@ -28,7 +28,7 @@ target-version = ['py312']
28
28
 
29
29
  [tool.poetry]
30
30
  name = "pypi-public"
31
- version = "5.1.0"
31
+ version = "5.1.1"
32
32
  description = "A simple script to monitor qBit and communicate with Radarr and Sonarr"
33
33
  authors = ["Drapersniper", "Feramance"]
34
34
  readme = "README.md"
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import atexit
3
4
  import contextlib
4
5
  import itertools
5
6
  import logging
@@ -19,7 +20,7 @@ import qbittorrentapi
19
20
  import qbittorrentapi.exceptions
20
21
  import requests
21
22
  from packaging import version as version_parser
22
- from peewee import Model
23
+ from peewee import Model, SqliteDatabase
23
24
  from pyarr import RadarrAPI, SonarrAPI
24
25
  from pyarr.exceptions import PyarrResourceNotFound, PyarrServerError
25
26
  from pyarr.types import JsonObject
@@ -27,6 +28,7 @@ from qbittorrentapi import TorrentDictionary, TorrentStates
27
28
  from ujson import JSONDecodeError
28
29
 
29
30
  from qBitrr.config import (
31
+ APPDATA_FOLDER,
30
32
  AUTO_PAUSE_RESUME,
31
33
  COMPLETED_DOWNLOAD_FOLDER,
32
34
  CONFIG,
@@ -55,7 +57,15 @@ from qBitrr.search_activity_store import (
55
57
  fetch_search_activities,
56
58
  record_search_activity,
57
59
  )
58
- from qBitrr.tables import TorrentLibrary, create_arr_tables, ensure_table_schema, get_database
60
+ from qBitrr.tables import (
61
+ EpisodeFilesModel,
62
+ EpisodeQueueModel,
63
+ FilesQueued,
64
+ MovieQueueModel,
65
+ MoviesFilesModel,
66
+ SeriesFilesModel,
67
+ TorrentLibrary,
68
+ )
59
69
  from qBitrr.utils import (
60
70
  ExpiringSet,
61
71
  absolute_file_paths,
@@ -293,6 +303,9 @@ class Arr:
293
303
  else:
294
304
  self._delta = -1
295
305
 
306
+ self._app_data_folder = APPDATA_FOLDER
307
+ self.search_db_file = self._app_data_folder.joinpath(f"{self._name}.db")
308
+
296
309
  self.ombi_search_requests = CONFIG.get(
297
310
  f"{name}.EntrySearch.Ombi.SearchOmbiRequests", fallback=False
298
311
  )
@@ -559,9 +572,24 @@ class Arr:
559
572
  self.model_queue: Model | None = None
560
573
  self.persistent_queue: Model | None = None
561
574
  self.torrents: TorrentLibrary | None = None
575
+ self.torrent_db: SqliteDatabase | None = None
576
+ self.db: SqliteDatabase | None = None
562
577
  # Initialize search mode (and torrent tag-emulation DB in TAGLESS)
563
578
  # early and fail fast if it cannot be set up.
564
579
  self.register_search_mode()
580
+ atexit.register(
581
+ lambda: (
582
+ hasattr(self, "db") and self.db and not self.db.is_closed() and self.db.close()
583
+ )
584
+ )
585
+ atexit.register(
586
+ lambda: (
587
+ hasattr(self, "torrent_db")
588
+ and self.torrent_db
589
+ and not self.torrent_db.is_closed()
590
+ and self.torrent_db.close()
591
+ )
592
+ )
565
593
  self.logger.hnotice("Starting %s monitor", self._name)
566
594
 
567
595
  @staticmethod
@@ -4795,46 +4823,139 @@ class Arr:
4795
4823
  if self.search_setup_completed:
4796
4824
  return
4797
4825
 
4798
- include_search_tables = any(
4799
- (
4800
- self.search_missing,
4801
- self.do_upgrade_search,
4802
- self.quality_unmet_search,
4803
- self.custom_format_unmet_search,
4804
- self.ombi_search_requests,
4805
- self.overseerr_requests,
4806
- )
4826
+ db1, db2, db3, db4 = self._get_models()
4827
+
4828
+ if not (
4829
+ self.search_missing
4830
+ or self.do_upgrade_search
4831
+ or self.quality_unmet_search
4832
+ or self.custom_format_unmet_search
4833
+ or self.ombi_search_requests
4834
+ or self.overseerr_requests
4835
+ ):
4836
+ if db4 and getattr(self, "torrents", None) is None:
4837
+ self.torrent_db = SqliteDatabase(None)
4838
+ self.torrent_db.init(
4839
+ str(self._app_data_folder.joinpath("Torrents.db")),
4840
+ pragmas={
4841
+ "journal_mode": "wal",
4842
+ "cache_size": -64_000,
4843
+ "foreign_keys": 1,
4844
+ "ignore_check_constraints": 0,
4845
+ "synchronous": 0,
4846
+ },
4847
+ timeout=15,
4848
+ )
4849
+
4850
+ class Torrents(db4):
4851
+ class Meta:
4852
+ database = self.torrent_db
4853
+
4854
+ self.torrent_db.connect()
4855
+ self.torrent_db.create_tables([Torrents])
4856
+ self.torrents = Torrents
4857
+ self.search_setup_completed = True
4858
+ return
4859
+
4860
+ self.search_db_file.parent.mkdir(parents=True, exist_ok=True)
4861
+ self.db = SqliteDatabase(None)
4862
+ self.db.init(
4863
+ str(self.search_db_file),
4864
+ pragmas={
4865
+ "journal_mode": "wal",
4866
+ "cache_size": -64_000,
4867
+ "foreign_keys": 1,
4868
+ "ignore_check_constraints": 0,
4869
+ "synchronous": 0,
4870
+ },
4871
+ timeout=15,
4807
4872
  )
4808
- include_series = self.type == "sonarr" and self.series_search
4809
- include_torrents = TAGLESS
4810
4873
 
4811
- self.db = get_database()
4874
+ class Files(db1):
4875
+ class Meta:
4876
+ database = self.db
4812
4877
 
4813
- if include_search_tables:
4814
- tables = create_arr_tables(
4815
- self._name,
4816
- self.type,
4817
- include_series=include_series,
4818
- include_torrents=include_torrents,
4819
- )
4820
- self.model_file = tables.files
4821
- self.model_queue = tables.queue
4822
- self.persistent_queue = tables.persisting_queue
4823
- self.series_file_model = tables.series
4824
- self.torrents = tables.torrents
4878
+ class Queue(db2):
4879
+ class Meta:
4880
+ database = self.db
4881
+
4882
+ class PersistingQueue(FilesQueued):
4883
+ class Meta:
4884
+ database = self.db
4885
+
4886
+ self.db.connect()
4887
+ if db3:
4888
+
4889
+ class Series(db3):
4890
+ class Meta:
4891
+ database = self.db
4892
+
4893
+ self.db.create_tables([Files, Queue, PersistingQueue, Series])
4894
+ self.series_file_model = Series
4825
4895
  else:
4826
- self.model_file = None
4827
- self.model_queue = None
4828
- self.persistent_queue = None
4896
+ self.db.create_tables([Files, Queue, PersistingQueue])
4829
4897
  self.series_file_model = None
4830
- if include_torrents:
4831
- ensure_table_schema(TorrentLibrary)
4832
- self.torrents = TorrentLibrary
4833
- else:
4834
- self.torrents = None
4835
4898
 
4899
+ if db4:
4900
+ self.torrent_db = SqliteDatabase(None)
4901
+ self.torrent_db.init(
4902
+ str(self._app_data_folder.joinpath("Torrents.db")),
4903
+ pragmas={
4904
+ "journal_mode": "wal",
4905
+ "cache_size": -64_000,
4906
+ "foreign_keys": 1,
4907
+ "ignore_check_constraints": 0,
4908
+ "synchronous": 0,
4909
+ },
4910
+ timeout=15,
4911
+ )
4912
+
4913
+ class Torrents(db4):
4914
+ class Meta:
4915
+ database = self.torrent_db
4916
+
4917
+ self.torrent_db.connect()
4918
+ self.torrent_db.create_tables([Torrents])
4919
+ self.torrents = Torrents
4920
+ else:
4921
+ self.torrents = None
4922
+
4923
+ self.model_file = Files
4924
+ self.model_queue = Queue
4925
+ self.persistent_queue = PersistingQueue
4836
4926
  self.search_setup_completed = True
4837
4927
 
4928
+ def _get_models(
4929
+ self,
4930
+ ) -> tuple[
4931
+ type[EpisodeFilesModel] | type[MoviesFilesModel],
4932
+ type[EpisodeQueueModel] | type[MovieQueueModel],
4933
+ type[SeriesFilesModel] | None,
4934
+ type[TorrentLibrary] | None,
4935
+ ]:
4936
+ if self.type == "sonarr":
4937
+ if self.series_search:
4938
+ return (
4939
+ EpisodeFilesModel,
4940
+ EpisodeQueueModel,
4941
+ SeriesFilesModel,
4942
+ TorrentLibrary if TAGLESS else None,
4943
+ )
4944
+ return (
4945
+ EpisodeFilesModel,
4946
+ EpisodeQueueModel,
4947
+ None,
4948
+ TorrentLibrary if TAGLESS else None,
4949
+ )
4950
+ if self.type == "radarr":
4951
+ return (
4952
+ MoviesFilesModel,
4953
+ MovieQueueModel,
4954
+ None,
4955
+ TorrentLibrary if TAGLESS else None,
4956
+ )
4957
+ raise UnhandledError(f"Well you shouldn't have reached here, Arr.type={self.type}")
4958
+
4838
4959
  def run_request_search(self):
4839
4960
  if (
4840
4961
  (
@@ -5471,6 +5592,7 @@ class FreeSpaceManager(Arr):
5471
5592
  )
5472
5593
  self.timed_ignore_cache = ExpiringSet(max_age_seconds=self.ignore_torrents_younger_than)
5473
5594
  self.needs_cleanup = False
5595
+ self._app_data_folder = APPDATA_FOLDER
5474
5596
  # Track search setup state to cooperate with Arr.register_search_mode
5475
5597
  self.search_setup_completed = False
5476
5598
  if FREE_SPACE_FOLDER == "CHANGE_ME":
@@ -5498,6 +5620,7 @@ class FreeSpaceManager(Arr):
5498
5620
  self.session = None
5499
5621
  # Ensure torrent tag-emulation tables exist when needed.
5500
5622
  self.torrents = None
5623
+ self.torrent_db: SqliteDatabase | None = None
5501
5624
  self.last_search_description: str | None = None
5502
5625
  self.last_search_timestamp: str | None = None
5503
5626
  self.queue_active_count: int = 0
@@ -5505,6 +5628,24 @@ class FreeSpaceManager(Arr):
5505
5628
  self.free_space_tagged_count: int = 0
5506
5629
  self.register_search_mode()
5507
5630
  self.logger.hnotice("Starting %s monitor", self._name)
5631
+ atexit.register(
5632
+ lambda: (
5633
+ hasattr(self, "torrent_db")
5634
+ and self.torrent_db
5635
+ and not self.torrent_db.is_closed()
5636
+ and self.torrent_db.close()
5637
+ )
5638
+ )
5639
+
5640
+ def _get_models(
5641
+ self,
5642
+ ) -> tuple[
5643
+ None,
5644
+ None,
5645
+ None,
5646
+ type[TorrentLibrary] | None,
5647
+ ]:
5648
+ return None, None, None, (TorrentLibrary if TAGLESS else None)
5508
5649
 
5509
5650
  def _process_single_torrent_pause_disk_space(self, torrent: qbittorrentapi.TorrentDictionary):
5510
5651
  self.logger.info(
@@ -1,5 +1,5 @@
1
- version = "5.1.0"
2
- git_hash = "048cc3f"
1
+ version = "5.1.1"
2
+ git_hash = "203e3ef"
3
3
  license_text = (
4
4
  "Licence can be found on:\n\nhttps://github.com/Feramance/qBitrr/blob/master/LICENSE"
5
5
  )
@@ -32,7 +32,6 @@ from qBitrr.config import (
32
32
  from qBitrr.env_config import ENVIRO_CONFIG
33
33
  from qBitrr.ffprobe import FFprobeDownloader
34
34
  from qBitrr.logger import run_logs
35
- from qBitrr.tables import ensure_core_tables, get_database, purge_database_files
36
35
  from qBitrr.utils import ExpiringSet
37
36
  from qBitrr.versioning import fetch_latest_release
38
37
  from qBitrr.webui import WebUI
@@ -490,17 +489,6 @@ def run():
490
489
  child.kill()
491
490
 
492
491
 
493
- def initialize_database() -> None:
494
- try:
495
- purge_database_files()
496
- get_database()
497
- ensure_core_tables()
498
- except Exception:
499
- logger.exception("Failed to initialize database schema")
500
- raise
501
-
502
-
503
492
  if __name__ == "__main__":
504
493
  freeze_support()
505
- initialize_database()
506
494
  run()
@@ -0,0 +1,88 @@
1
+ from __future__ import annotations
2
+
3
+ from threading import RLock
4
+ from typing import Any
5
+
6
+ from peewee import Model, SqliteDatabase, TextField
7
+
8
+ from qBitrr.home_path import APPDATA_FOLDER
9
+
10
+ _DB_LOCK = RLock()
11
+ _DB_INSTANCE: SqliteDatabase | None = None
12
+
13
+
14
+ def _get_database() -> SqliteDatabase:
15
+ global _DB_INSTANCE
16
+ if _DB_INSTANCE is None:
17
+ path = APPDATA_FOLDER.joinpath("webui_activity.db")
18
+ path.parent.mkdir(parents=True, exist_ok=True)
19
+ _DB_INSTANCE = SqliteDatabase(
20
+ str(path),
21
+ pragmas={
22
+ "journal_mode": "wal",
23
+ "cache_size": -64_000,
24
+ "foreign_keys": 1,
25
+ "ignore_check_constraints": 0,
26
+ "synchronous": 0,
27
+ },
28
+ timeout=15,
29
+ check_same_thread=False,
30
+ )
31
+ return _DB_INSTANCE
32
+
33
+
34
+ class BaseModel(Model):
35
+ class Meta:
36
+ database = _get_database()
37
+
38
+
39
+ class SearchActivity(BaseModel):
40
+ category = TextField(primary_key=True)
41
+ summary = TextField(null=True)
42
+ timestamp = TextField(null=True)
43
+
44
+
45
+ def _ensure_tables() -> None:
46
+ db = _get_database()
47
+ with _DB_LOCK:
48
+ db.connect(reuse_if_open=True)
49
+ db.create_tables([SearchActivity], safe=True)
50
+
51
+
52
+ def record_search_activity(category: str, summary: str | None, timestamp: str | None) -> None:
53
+ if not category:
54
+ return
55
+ _ensure_tables()
56
+ if timestamp is not None and not isinstance(timestamp, str):
57
+ timestamp = str(timestamp)
58
+ data: dict[str, Any] = {"summary": summary, "timestamp": timestamp}
59
+ with _get_database().atomic():
60
+ SearchActivity.insert(category=category, **data).on_conflict(
61
+ conflict_target=[SearchActivity.category],
62
+ update=data,
63
+ ).execute()
64
+
65
+
66
+ def fetch_search_activities() -> dict[str, dict[str, str | None]]:
67
+ _ensure_tables()
68
+ activities: dict[str, dict[str, str | None]] = {}
69
+ db = _get_database()
70
+ db.connect(reuse_if_open=True)
71
+ try:
72
+ query = SearchActivity.select()
73
+ except Exception:
74
+ return activities
75
+ for row in query:
76
+ activities[str(row.category)] = {
77
+ "summary": row.summary,
78
+ "timestamp": row.timestamp,
79
+ }
80
+ return activities
81
+
82
+
83
+ def clear_search_activity(category: str) -> None:
84
+ if not category:
85
+ return
86
+ _ensure_tables()
87
+ with _get_database().atomic():
88
+ SearchActivity.delete().where(SearchActivity.category == category).execute()
@@ -0,0 +1,72 @@
1
+ from peewee import BooleanField, CharField, DateTimeField, IntegerField, Model, TextField
2
+
3
+
4
+ class FilesQueued(Model):
5
+ EntryId = IntegerField(primary_key=True, null=False, unique=True)
6
+
7
+
8
+ class MoviesFilesModel(Model):
9
+ Title = CharField()
10
+ Monitored = BooleanField()
11
+ TmdbId = IntegerField()
12
+ Year = IntegerField()
13
+ EntryId = IntegerField(unique=True)
14
+ Searched = BooleanField(default=False)
15
+ MovieFileId = IntegerField()
16
+ IsRequest = BooleanField(default=False)
17
+ QualityMet = BooleanField(default=False)
18
+ Upgrade = BooleanField(default=False)
19
+ CustomFormatScore = IntegerField(null=True)
20
+ MinCustomFormatScore = IntegerField(null=True)
21
+ CustomFormatMet = BooleanField(default=False)
22
+ Reason = TextField(null=True)
23
+
24
+
25
+ class EpisodeFilesModel(Model):
26
+ EntryId = IntegerField(primary_key=True)
27
+ SeriesTitle = TextField(null=True)
28
+ Title = TextField(null=True)
29
+ SeriesId = IntegerField(null=False)
30
+ EpisodeFileId = IntegerField(null=True)
31
+ EpisodeNumber = IntegerField(null=False)
32
+ SeasonNumber = IntegerField(null=False)
33
+ AbsoluteEpisodeNumber = IntegerField(null=True)
34
+ SceneAbsoluteEpisodeNumber = IntegerField(null=True)
35
+ AirDateUtc = DateTimeField(formats=["%Y-%m-%d %H:%M:%S.%f"], null=True)
36
+ Monitored = BooleanField(null=True)
37
+ Searched = BooleanField(default=False)
38
+ IsRequest = BooleanField(default=False)
39
+ QualityMet = BooleanField(default=False)
40
+ Upgrade = BooleanField(default=False)
41
+ CustomFormatScore = IntegerField(null=True)
42
+ MinCustomFormatScore = IntegerField(null=True)
43
+ CustomFormatMet = BooleanField(default=False)
44
+ Reason = TextField(null=True)
45
+
46
+
47
+ class SeriesFilesModel(Model):
48
+ EntryId = IntegerField(primary_key=True)
49
+ Title = TextField(null=True)
50
+ Monitored = BooleanField(null=True)
51
+ Searched = BooleanField(default=False)
52
+ Upgrade = BooleanField(default=False)
53
+ MinCustomFormatScore = IntegerField(null=True)
54
+
55
+
56
+ class MovieQueueModel(Model):
57
+ EntryId = IntegerField(unique=True)
58
+ Completed = BooleanField(default=False)
59
+
60
+
61
+ class EpisodeQueueModel(Model):
62
+ EntryId = IntegerField(unique=True)
63
+ Completed = BooleanField(default=False)
64
+
65
+
66
+ class TorrentLibrary(Model):
67
+ Hash = TextField(null=False)
68
+ Category = TextField(null=False)
69
+ AllowedSeeding = BooleanField(default=False)
70
+ Imported = BooleanField(default=False)
71
+ AllowedStalled = BooleanField(default=False)
72
+ FreeSpacePaused = BooleanField(default=False)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: qBitrr2
3
- Version: 5.1.0
3
+ Version: 5.1.1
4
4
  Summary: "A simple Python script to talk to qBittorrent and Arr's"
5
5
  Home-page: https://github.com/Feramance/qBitrr
6
6
  Author: Feramance
@@ -1,6 +1,6 @@
1
1
  [metadata]
2
2
  name = qBitrr2
3
- version = 5.1.0
3
+ version = 5.1.1
4
4
  description = "A simple Python script to talk to qBittorrent and Arr's"
5
5
  long_description = file: README.md
6
6
  long_description_content_type = text/markdown
@@ -1,61 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from threading import RLock
4
- from typing import Any
5
-
6
- from peewee import SqliteDatabase
7
-
8
- from qBitrr.tables import SearchActivity, ensure_table_schema, get_database
9
-
10
- _DB_LOCK = RLock()
11
- _TABLE_READY = False
12
-
13
-
14
- def _ensure_ready() -> SqliteDatabase:
15
- global _TABLE_READY
16
- db = get_database()
17
- if _TABLE_READY:
18
- return db
19
- with _DB_LOCK:
20
- if not _TABLE_READY:
21
- ensure_table_schema(SearchActivity)
22
- _TABLE_READY = True
23
- return db
24
-
25
-
26
- def record_search_activity(category: str, summary: str | None, timestamp: str | None) -> None:
27
- if not category:
28
- return
29
- db = _ensure_ready()
30
- if timestamp is not None and not isinstance(timestamp, str):
31
- timestamp = str(timestamp)
32
- data: dict[str, Any] = {"summary": summary, "timestamp": timestamp}
33
- with db.atomic():
34
- SearchActivity.insert(category=category, **data).on_conflict(
35
- conflict_target=[SearchActivity.category],
36
- update=data,
37
- ).execute()
38
-
39
-
40
- def fetch_search_activities() -> dict[str, dict[str, str | None]]:
41
- db = _ensure_ready()
42
- activities: dict[str, dict[str, str | None]] = {}
43
- db.connect(reuse_if_open=True)
44
- try:
45
- query = SearchActivity.select()
46
- except Exception:
47
- return activities
48
- for row in query:
49
- activities[str(row.category)] = {
50
- "summary": row.summary,
51
- "timestamp": row.timestamp,
52
- }
53
- return activities
54
-
55
-
56
- def clear_search_activity(category: str) -> None:
57
- if not category:
58
- return
59
- db = _ensure_ready()
60
- with db.atomic():
61
- SearchActivity.delete().where(SearchActivity.category == category).execute()
@@ -1,389 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import logging
4
- import re
5
- from functools import lru_cache
6
- from pathlib import Path
7
- from typing import NamedTuple
8
-
9
- from peewee import (
10
- BooleanField,
11
- CharField,
12
- DatabaseError,
13
- DatabaseProxy,
14
- DateTimeField,
15
- IntegerField,
16
- Model,
17
- OperationalError,
18
- SqliteDatabase,
19
- TextField,
20
- )
21
-
22
- from qBitrr.db_lock import database_lock
23
- from qBitrr.home_path import APPDATA_FOLDER
24
-
25
- logger = logging.getLogger("qBitrr.Database")
26
-
27
- DATABASE_FILE = APPDATA_FOLDER.joinpath("qbitrr.db")
28
- _database_proxy: DatabaseProxy = DatabaseProxy()
29
- _DATABASE: SqliteDatabase | None = None
30
- _DB_ARTIFACT_SUFFIXES: tuple[str, ...] = ("", "-wal", "-shm")
31
-
32
-
33
- class LockedSqliteDatabase(SqliteDatabase):
34
- def connect(self, **kwargs):
35
- with database_lock():
36
- return super().connect(**kwargs)
37
-
38
- def close(self):
39
- with database_lock():
40
- return super().close()
41
-
42
- def execute_sql(self, *args, **kwargs):
43
- with database_lock():
44
- return super().execute_sql(*args, **kwargs)
45
-
46
-
47
- def _database_artifact_paths() -> tuple[Path, ...]:
48
- return tuple(
49
- DATABASE_FILE if suffix == "" else DATABASE_FILE.with_name(f"{DATABASE_FILE.name}{suffix}")
50
- for suffix in _DB_ARTIFACT_SUFFIXES
51
- )
52
-
53
-
54
- def purge_database_files() -> list[Path]:
55
- removed: list[Path] = []
56
- with database_lock():
57
- for candidate in _database_artifact_paths():
58
- try:
59
- candidate.unlink()
60
- removed.append(candidate)
61
- except FileNotFoundError:
62
- continue
63
- except OSError as exc:
64
- logger.warning("Unable to remove database artifact '%s': %s", candidate, exc)
65
- if removed:
66
- logger.info(
67
- "Removed database artifacts: %s",
68
- ", ".join(str(path) for path in removed),
69
- )
70
- return removed
71
-
72
-
73
- def _reset_database(exc: BaseException) -> None:
74
- global _DATABASE
75
- logger.warning("Database reset triggered after failure: %s", exc)
76
- with database_lock():
77
- try:
78
- if _DATABASE is not None and not _DATABASE.is_closed():
79
- _DATABASE.close()
80
- except Exception as close_error: # pragma: no cover - best effort cleanup
81
- logger.debug("Error closing database while resetting: %s", close_error)
82
- _DATABASE = None
83
- purge_database_files()
84
-
85
-
86
- class BaseModel(Model):
87
- class Meta:
88
- database = _database_proxy
89
-
90
-
91
- def get_database(*, _retry: bool = True) -> SqliteDatabase:
92
- global _DATABASE
93
- if _DATABASE is None:
94
- DATABASE_FILE.parent.mkdir(parents=True, exist_ok=True)
95
- _DATABASE = LockedSqliteDatabase(
96
- str(DATABASE_FILE),
97
- pragmas={
98
- "journal_mode": "wal",
99
- "cache_size": -64_000,
100
- "foreign_keys": 1,
101
- "ignore_check_constraints": 0,
102
- "synchronous": "NORMAL",
103
- "busy_timeout": 60_000,
104
- },
105
- timeout=15,
106
- check_same_thread=False,
107
- autocommit=True,
108
- )
109
- _database_proxy.initialize(_DATABASE)
110
- try:
111
- _DATABASE.connect(reuse_if_open=True)
112
- except DatabaseError as exc:
113
- if not _retry:
114
- raise
115
- _reset_database(exc)
116
- return get_database(_retry=False)
117
- return _DATABASE
118
-
119
-
120
- def ensure_table_schema(model: type[BaseModel]) -> None:
121
- database = get_database()
122
- table_name = model._meta.table_name
123
- with database:
124
- database.create_tables([model], safe=True)
125
- existing_columns = {column.name for column in database.get_columns(table_name)}
126
- try:
127
- primary_keys = {column.lower() for column in database.get_primary_keys(table_name)}
128
- except OperationalError:
129
- primary_keys = set()
130
- try:
131
- index_metadata = database.get_indexes(table_name)
132
- except OperationalError:
133
- index_metadata = []
134
-
135
- def _refresh_indexes() -> None:
136
- nonlocal index_metadata
137
- try:
138
- index_metadata = database.get_indexes(table_name)
139
- except OperationalError:
140
- index_metadata = []
141
-
142
- def _has_unique(column: str) -> bool:
143
- lower_column = column.lower()
144
- for index in index_metadata:
145
- if not index.unique:
146
- continue
147
- normalized = tuple(col.lower() for col in index.columns or ())
148
- if normalized == (lower_column,):
149
- return True
150
- return False
151
-
152
- def _deduplicate(column: str) -> None:
153
- try:
154
- duplicates = database.execute_sql(
155
- f"""
156
- SELECT {column}, MIN(rowid) AS keep_rowid
157
- FROM {table_name}
158
- WHERE {column} IS NOT NULL
159
- GROUP BY {column}
160
- HAVING COUNT(*) > 1
161
- """
162
- ).fetchall()
163
- except OperationalError:
164
- return
165
- if not duplicates:
166
- return
167
- for value, keep_rowid in duplicates:
168
- try:
169
- database.execute_sql(
170
- f"""
171
- DELETE FROM {table_name}
172
- WHERE {column} = ?
173
- AND rowid != ?
174
- """,
175
- (value, keep_rowid),
176
- )
177
- except OperationalError:
178
- logger.warning(
179
- "Failed to deduplicate rows on %s.%s for value %s",
180
- table_name,
181
- column,
182
- value,
183
- )
184
- if duplicates:
185
- logger.info(
186
- "Deduplicated %s entries on %s.%s to restore unique constraint",
187
- len(duplicates),
188
- table_name,
189
- column,
190
- )
191
-
192
- def _ensure_unique(column: str) -> None:
193
- if _has_unique(column):
194
- return
195
- _deduplicate(column)
196
- try:
197
- index_name = f"{table_name}_{column}_uniq".replace(".", "_")
198
- database.execute_sql(
199
- f'CREATE UNIQUE INDEX IF NOT EXISTS "{index_name}" '
200
- f'ON "{table_name}" ("{column}")'
201
- )
202
- _refresh_indexes()
203
- except OperationalError:
204
- logger.warning(
205
- "Unable to create unique index on %s.%s; uniqueness guarantees may be missing",
206
- table_name,
207
- column,
208
- )
209
- return
210
- _refresh_indexes()
211
-
212
- for field in model._meta.sorted_fields:
213
- column_name = field.column_name
214
- if column_name not in existing_columns:
215
- database.add_column(table_name, column_name, field)
216
- if field.primary_key and column_name.lower() not in primary_keys:
217
- _ensure_unique(column_name)
218
- elif field.unique:
219
- _ensure_unique(column_name)
220
-
221
-
222
- class FilesQueued(BaseModel):
223
- EntryId = IntegerField(primary_key=True, null=False, unique=True)
224
-
225
-
226
- class MoviesFilesModel(BaseModel):
227
- Title = CharField()
228
- Monitored = BooleanField()
229
- TmdbId = IntegerField()
230
- Year = IntegerField()
231
- EntryId = IntegerField(unique=True)
232
- Searched = BooleanField(default=False)
233
- MovieFileId = IntegerField()
234
- IsRequest = BooleanField(default=False)
235
- QualityMet = BooleanField(default=False)
236
- Upgrade = BooleanField(default=False)
237
- CustomFormatScore = IntegerField(null=True)
238
- MinCustomFormatScore = IntegerField(null=True)
239
- CustomFormatMet = BooleanField(default=False)
240
- Reason = TextField(null=True)
241
-
242
-
243
- class EpisodeFilesModel(BaseModel):
244
- EntryId = IntegerField(primary_key=True)
245
- SeriesTitle = TextField(null=True)
246
- Title = TextField(null=True)
247
- SeriesId = IntegerField(null=False)
248
- EpisodeFileId = IntegerField(null=True)
249
- EpisodeNumber = IntegerField(null=False)
250
- SeasonNumber = IntegerField(null=False)
251
- AbsoluteEpisodeNumber = IntegerField(null=True)
252
- SceneAbsoluteEpisodeNumber = IntegerField(null=True)
253
- AirDateUtc = DateTimeField(formats=["%Y-%m-%d %H:%M:%S.%f"], null=True)
254
- Monitored = BooleanField(null=True)
255
- Searched = BooleanField(default=False)
256
- IsRequest = BooleanField(default=False)
257
- QualityMet = BooleanField(default=False)
258
- Upgrade = BooleanField(default=False)
259
- CustomFormatScore = IntegerField(null=True)
260
- MinCustomFormatScore = IntegerField(null=True)
261
- CustomFormatMet = BooleanField(default=False)
262
- Reason = TextField(null=True)
263
-
264
-
265
- class SeriesFilesModel(BaseModel):
266
- EntryId = IntegerField(primary_key=True)
267
- Title = TextField(null=True)
268
- Monitored = BooleanField(null=True)
269
- Searched = BooleanField(default=False)
270
- Upgrade = BooleanField(default=False)
271
- MinCustomFormatScore = IntegerField(null=True)
272
-
273
-
274
- class MovieQueueModel(BaseModel):
275
- EntryId = IntegerField(unique=True)
276
- Completed = BooleanField(default=False)
277
-
278
-
279
- class EpisodeQueueModel(BaseModel):
280
- EntryId = IntegerField(unique=True)
281
- Completed = BooleanField(default=False)
282
-
283
-
284
- class TorrentLibrary(BaseModel):
285
- Hash = TextField(null=False)
286
- Category = TextField(null=False)
287
- AllowedSeeding = BooleanField(default=False)
288
- Imported = BooleanField(default=False)
289
- AllowedStalled = BooleanField(default=False)
290
- FreeSpacePaused = BooleanField(default=False)
291
-
292
- class Meta:
293
- table_name = "torrent_library"
294
-
295
-
296
- class SearchActivity(BaseModel):
297
- category = TextField(primary_key=True)
298
- summary = TextField(null=True)
299
- timestamp = TextField(null=True)
300
-
301
- class Meta:
302
- table_name = "search_activity"
303
-
304
-
305
- class ArrTables(NamedTuple):
306
- files: type[BaseModel]
307
- queue: type[BaseModel]
308
- series: type[BaseModel] | None
309
- persisting_queue: type[BaseModel]
310
- torrents: type[BaseModel] | None
311
-
312
-
313
- _SAFE_IDENTIFIER = re.compile(r"[^0-9A-Za-z_]+")
314
-
315
-
316
- def _sanitize_identifier(name: str) -> str:
317
- token = name.strip().replace(" ", "_")
318
- token = _SAFE_IDENTIFIER.sub("_", token)
319
- token = token.strip("_")
320
- if not token:
321
- token = "Arr"
322
- if token[0].isdigit():
323
- token = f"Arr_{token}"
324
- return token
325
-
326
-
327
- @lru_cache(maxsize=None)
328
- def create_arr_tables(
329
- arr_name: str,
330
- arr_type: str,
331
- *,
332
- include_series: bool,
333
- include_torrents: bool,
334
- ) -> ArrTables:
335
- table_prefix = _sanitize_identifier(arr_name)
336
- files_base: type[BaseModel]
337
- queue_base: type[BaseModel]
338
- if arr_type.lower() == "sonarr":
339
- files_base = EpisodeFilesModel
340
- queue_base = EpisodeQueueModel
341
- elif arr_type.lower() == "radarr":
342
- files_base = MoviesFilesModel
343
- queue_base = MovieQueueModel
344
- else:
345
- raise ValueError(f"Unknown arr_type '{arr_type}'")
346
-
347
- class Files(files_base):
348
- class Meta:
349
- table_name = f"{table_prefix}_files"
350
-
351
- class Queue(queue_base):
352
- class Meta:
353
- table_name = f"{table_prefix}_queue"
354
-
355
- class PersistingQueue(FilesQueued):
356
- class Meta:
357
- table_name = f"{table_prefix}_persisting_queue"
358
-
359
- series_model: type[BaseModel] | None = None
360
- if include_series:
361
-
362
- class Series(SeriesFilesModel):
363
- class Meta:
364
- table_name = f"{table_prefix}_series"
365
-
366
- series_model = Series
367
-
368
- torrents_model: type[BaseModel] | None = TorrentLibrary if include_torrents else None
369
-
370
- ensure_table_schema(Files)
371
- ensure_table_schema(Queue)
372
- ensure_table_schema(PersistingQueue)
373
- if series_model is not None:
374
- ensure_table_schema(series_model)
375
- if torrents_model is not None:
376
- ensure_table_schema(torrents_model)
377
-
378
- return ArrTables(
379
- files=Files,
380
- queue=Queue,
381
- series=series_model,
382
- persisting_queue=PersistingQueue,
383
- torrents=torrents_model,
384
- )
385
-
386
-
387
- def ensure_core_tables() -> None:
388
- ensure_table_schema(TorrentLibrary)
389
- ensure_table_schema(SearchActivity)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes