qBitrr2 5.0.1__py3-none-any.whl → 5.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
qBitrr/arss.py CHANGED
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import atexit
4
3
  import contextlib
5
4
  import itertools
6
5
  import logging
@@ -20,7 +19,7 @@ import qbittorrentapi
20
19
  import qbittorrentapi.exceptions
21
20
  import requests
22
21
  from packaging import version as version_parser
23
- from peewee import SqliteDatabase
22
+ from peewee import Model
24
23
  from pyarr import RadarrAPI, SonarrAPI
25
24
  from pyarr.exceptions import PyarrResourceNotFound, PyarrServerError
26
25
  from pyarr.types import JsonObject
@@ -28,7 +27,6 @@ from qbittorrentapi import TorrentDictionary, TorrentStates
28
27
  from ujson import JSONDecodeError
29
28
 
30
29
  from qBitrr.config import (
31
- APPDATA_FOLDER,
32
30
  AUTO_PAUSE_RESUME,
33
31
  COMPLETED_DOWNLOAD_FOLDER,
34
32
  CONFIG,
@@ -57,15 +55,7 @@ from qBitrr.search_activity_store import (
57
55
  fetch_search_activities,
58
56
  record_search_activity,
59
57
  )
60
- from qBitrr.tables import (
61
- EpisodeFilesModel,
62
- EpisodeQueueModel,
63
- FilesQueued,
64
- MovieQueueModel,
65
- MoviesFilesModel,
66
- SeriesFilesModel,
67
- TorrentLibrary,
68
- )
58
+ from qBitrr.tables import TorrentLibrary, create_arr_tables, ensure_table_schema, get_database
69
59
  from qBitrr.utils import (
70
60
  ExpiringSet,
71
61
  absolute_file_paths,
@@ -302,8 +292,6 @@ class Arr:
302
292
  self._delta = 1
303
293
  else:
304
294
  self._delta = -1
305
- self._app_data_folder = APPDATA_FOLDER
306
- self.search_db_file = self._app_data_folder.joinpath(f"{self._name}.db")
307
295
 
308
296
  self.ombi_search_requests = CONFIG.get(
309
297
  f"{name}.EntrySearch.Ombi.SearchOmbiRequests", fallback=False
@@ -566,28 +554,14 @@ class Arr:
566
554
  elif not QBIT_DISABLED and TAGLESS:
567
555
  self.manager.qbit_manager.client.torrents_create_tags(["qBitrr-ignored"])
568
556
  self.search_setup_completed = False
569
- self.model_file: EpisodeFilesModel | MoviesFilesModel = None
570
- self.series_file_model: SeriesFilesModel = None
571
- self.model_queue: EpisodeQueueModel | MovieQueueModel = None
572
- self.persistent_queue: FilesQueued = None
557
+ self.model_file: Model | None = None
558
+ self.series_file_model: Model | None = None
559
+ self.model_queue: Model | None = None
560
+ self.persistent_queue: Model | None = None
573
561
  self.torrents: TorrentLibrary | None = None
574
562
  # Initialize search mode (and torrent tag-emulation DB in TAGLESS)
575
563
  # early and fail fast if it cannot be set up.
576
564
  self.register_search_mode()
577
- # Ensure DBs are closed on process exit
578
- atexit.register(
579
- lambda: (
580
- hasattr(self, "db") and self.db and not self.db.is_closed() and self.db.close()
581
- )
582
- )
583
- atexit.register(
584
- lambda: (
585
- hasattr(self, "torrent_db")
586
- and self.torrent_db
587
- and not self.torrent_db.is_closed()
588
- and self.torrent_db.close()
589
- )
590
- )
591
565
  self.logger.hnotice("Starting %s monitor", self._name)
592
566
 
593
567
  @staticmethod
@@ -848,28 +822,6 @@ class Arr:
848
822
  ),
849
823
  )
850
824
 
851
- def _get_models(
852
- self,
853
- ) -> tuple[
854
- type[EpisodeFilesModel] | type[MoviesFilesModel],
855
- type[EpisodeQueueModel] | type[MovieQueueModel],
856
- type[SeriesFilesModel] | None,
857
- type[TorrentLibrary] | None,
858
- ]:
859
- if self.type == "sonarr":
860
- if self.series_search:
861
- return (
862
- EpisodeFilesModel,
863
- EpisodeQueueModel,
864
- SeriesFilesModel,
865
- TorrentLibrary if TAGLESS else None,
866
- )
867
- return EpisodeFilesModel, EpisodeQueueModel, None, TorrentLibrary if TAGLESS else None
868
- elif self.type == "radarr":
869
- return MoviesFilesModel, MovieQueueModel, None, TorrentLibrary if TAGLESS else None
870
- else:
871
- raise UnhandledError(f"Well you shouldn't have reached here, Arr.type={self.type}")
872
-
873
825
  def _get_oversee_requests_all(self) -> dict[str, set]:
874
826
  try:
875
827
  data = defaultdict(set)
@@ -4843,107 +4795,44 @@ class Arr:
4843
4795
  if self.search_setup_completed:
4844
4796
  return
4845
4797
 
4846
- # Determine which models we need in this mode (including TorrentLibrary when TAGLESS)
4847
- db1, db2, db3, db4 = self._get_models()
4848
-
4849
- # If searches are disabled, we still want the torrent tag-emulation DB in TAGLESS mode,
4850
- # but can skip the per-entry search database setup.
4851
- if not (
4852
- self.search_missing
4853
- or self.do_upgrade_search
4854
- or self.quality_unmet_search
4855
- or self.custom_format_unmet_search
4856
- or self.ombi_search_requests
4857
- or self.overseerr_requests
4858
- ):
4859
- if db4 and getattr(self, "torrents", None) is None:
4860
- self.torrent_db = SqliteDatabase(None)
4861
- self.torrent_db.init(
4862
- str(self._app_data_folder.joinpath("Torrents.db")),
4863
- pragmas={
4864
- "journal_mode": "wal",
4865
- "cache_size": -1 * 64000, # 64MB
4866
- "foreign_keys": 1,
4867
- "ignore_check_constraints": 0,
4868
- "synchronous": 0,
4869
- },
4870
- timeout=15,
4871
- )
4872
-
4873
- class Torrents(db4):
4874
- class Meta:
4875
- database = self.torrent_db
4876
-
4877
- self.torrent_db.connect()
4878
- self.torrent_db.create_tables([Torrents])
4879
- self.torrents = Torrents
4880
- self.search_setup_completed = True
4881
- return
4882
-
4883
- self.db = SqliteDatabase(None)
4884
- self.db.init(
4885
- str(self.search_db_file),
4886
- pragmas={
4887
- "journal_mode": "wal",
4888
- "cache_size": -1 * 64000, # 64MB
4889
- "foreign_keys": 1,
4890
- "ignore_check_constraints": 0,
4891
- "synchronous": 0,
4892
- },
4893
- timeout=15,
4798
+ include_search_tables = any(
4799
+ (
4800
+ self.search_missing,
4801
+ self.do_upgrade_search,
4802
+ self.quality_unmet_search,
4803
+ self.custom_format_unmet_search,
4804
+ self.ombi_search_requests,
4805
+ self.overseerr_requests,
4806
+ )
4894
4807
  )
4808
+ include_series = self.type == "sonarr" and self.series_search
4809
+ include_torrents = TAGLESS
4895
4810
 
4896
- class Files(db1):
4897
- class Meta:
4898
- database = self.db
4899
-
4900
- class Queue(db2):
4901
- class Meta:
4902
- database = self.db
4903
-
4904
- class PersistingQueue(FilesQueued):
4905
- class Meta:
4906
- database = self.db
4907
-
4908
- self.db.connect()
4909
- if db3:
4910
-
4911
- class Series(db3):
4912
- class Meta:
4913
- database = self.db
4811
+ self.db = get_database()
4914
4812
 
4915
- self.db.create_tables([Files, Queue, PersistingQueue, Series])
4916
- self.series_file_model = Series
4917
- else:
4918
- self.db.create_tables([Files, Queue, PersistingQueue])
4919
-
4920
- if db4:
4921
- self.torrent_db = SqliteDatabase(None)
4922
- self.torrent_db.init(
4923
- str(self._app_data_folder.joinpath("Torrents.db")),
4924
- pragmas={
4925
- "journal_mode": "wal",
4926
- "cache_size": -1 * 64000, # 64MB
4927
- "foreign_keys": 1,
4928
- "ignore_check_constraints": 0,
4929
- "synchronous": 0,
4930
- },
4931
- timeout=15,
4813
+ if include_search_tables:
4814
+ tables = create_arr_tables(
4815
+ self._name,
4816
+ self.type,
4817
+ include_series=include_series,
4818
+ include_torrents=include_torrents,
4932
4819
  )
4933
-
4934
- class Torrents(db4):
4935
- class Meta:
4936
- database = self.torrent_db
4937
-
4938
- self.torrent_db.connect()
4939
- self.torrent_db.create_tables([Torrents])
4940
- self.torrents = Torrents
4820
+ self.model_file = tables.files
4821
+ self.model_queue = tables.queue
4822
+ self.persistent_queue = tables.persisting_queue
4823
+ self.series_file_model = tables.series
4824
+ self.torrents = tables.torrents
4941
4825
  else:
4942
- self.torrents: TorrentLibrary = None
4826
+ self.model_file = None
4827
+ self.model_queue = None
4828
+ self.persistent_queue = None
4829
+ self.series_file_model = None
4830
+ if include_torrents:
4831
+ ensure_table_schema(TorrentLibrary)
4832
+ self.torrents = TorrentLibrary
4833
+ else:
4834
+ self.torrents = None
4943
4835
 
4944
- self.model_file = Files
4945
- self.model_queue = Queue
4946
- self.persistent_queue = PersistingQueue
4947
4836
  self.search_setup_completed = True
4948
4837
 
4949
4838
  def run_request_search(self):
@@ -5567,6 +5456,7 @@ class PlaceHolderArr(Arr):
5567
5456
  class FreeSpaceManager(Arr):
5568
5457
  def __init__(self, categories: set[str], manager: ArrManager):
5569
5458
  self._name = "FreeSpaceManager"
5459
+ self.type = "FreeSpaceManager"
5570
5460
  self.manager = manager
5571
5461
  self.logger = logging.getLogger(f"qBitrr.{self._name}")
5572
5462
  self._LOG_LEVEL = self.manager.qbit_manager.logger.level
@@ -5581,8 +5471,6 @@ class FreeSpaceManager(Arr):
5581
5471
  )
5582
5472
  self.timed_ignore_cache = ExpiringSet(max_age_seconds=self.ignore_torrents_younger_than)
5583
5473
  self.needs_cleanup = False
5584
- # Needed by register_search_mode for torrent DB pathing
5585
- self._app_data_folder = APPDATA_FOLDER
5586
5474
  # Track search setup state to cooperate with Arr.register_search_mode
5587
5475
  self.search_setup_completed = False
5588
5476
  if FREE_SPACE_FOLDER == "CHANGE_ME":
@@ -5602,9 +5490,13 @@ class FreeSpaceManager(Arr):
5602
5490
  self.logger.trace("Current free space: %s", self.current_free_space)
5603
5491
  self.manager.qbit_manager.client.torrents_create_tags(["qBitrr-free_space_paused"])
5604
5492
  self.search_missing = False
5493
+ self.do_upgrade_search = False
5494
+ self.quality_unmet_search = False
5495
+ self.custom_format_unmet_search = False
5496
+ self.ombi_search_requests = False
5497
+ self.overseerr_requests = False
5605
5498
  self.session = None
5606
- # Reuse Arr's search-mode initializer to set up the torrent tag-emulation DB
5607
- # without needing Arr type, by overriding _get_models below.
5499
+ # Ensure torrent tag-emulation tables exist when needed.
5608
5500
  self.torrents = None
5609
5501
  self.last_search_description: str | None = None
5610
5502
  self.last_search_timestamp: str | None = None
@@ -5613,28 +5505,6 @@ class FreeSpaceManager(Arr):
5613
5505
  self.free_space_tagged_count: int = 0
5614
5506
  self.register_search_mode()
5615
5507
  self.logger.hnotice("Starting %s monitor", self._name)
5616
- # Ensure DB is closed when process exits (guard attribute existence)
5617
- atexit.register(
5618
- lambda: (
5619
- hasattr(self, "torrent_db")
5620
- and self.torrent_db
5621
- and not self.torrent_db.is_closed()
5622
- and self.torrent_db.close()
5623
- )
5624
- )
5625
-
5626
- def _get_models(
5627
- self,
5628
- ) -> tuple[
5629
- None,
5630
- None,
5631
- None,
5632
- type[TorrentLibrary] | None,
5633
- ]:
5634
- # FreeSpaceManager should never create the per-entry search database.
5635
- # Return None for file and queue models so only the torrent DB (TAGLESS)
5636
- # can be initialized by register_search_mode.
5637
- return None, None, None, (TorrentLibrary if TAGLESS else None)
5638
5508
 
5639
5509
  def _process_single_torrent_pause_disk_space(self, torrent: qbittorrentapi.TorrentDictionary):
5640
5510
  self.logger.info(
qBitrr/auto_update.py CHANGED
@@ -82,8 +82,11 @@ class AutoUpdater:
82
82
  self._logger.info("Auto update completed")
83
83
 
84
84
 
85
- def perform_self_update(logger: logging.Logger) -> None:
86
- """Attempt to update qBitrr in-place using git or pip."""
85
+ def perform_self_update(logger: logging.Logger) -> bool:
86
+ """Attempt to update qBitrr in-place using git or pip.
87
+
88
+ Returns True when the update command completed successfully, False otherwise.
89
+ """
87
90
 
88
91
  repo_root = Path(__file__).resolve().parent.parent
89
92
  git_dir = repo_root / ".git"
@@ -100,10 +103,11 @@ def perform_self_update(logger: logging.Logger) -> None:
100
103
  stdout = (result.stdout or "").strip()
101
104
  if stdout:
102
105
  logger.info("git pull output:\n%s", stdout)
106
+ return True
103
107
  except subprocess.CalledProcessError as exc:
104
108
  stderr = (exc.stderr or "").strip()
105
109
  logger.error("Failed to update repository via git: %s", stderr or exc)
106
- return
110
+ return False
107
111
 
108
112
  package = "qBitrr2"
109
113
  logger.debug("Fallback to pip upgrade for package %s", package)
@@ -117,6 +121,8 @@ def perform_self_update(logger: logging.Logger) -> None:
117
121
  stdout = (result.stdout or "").strip()
118
122
  if stdout:
119
123
  logger.info("pip upgrade output:\n%s", stdout)
124
+ return True
120
125
  except subprocess.CalledProcessError as exc:
121
126
  stderr = (exc.stderr or "").strip()
122
127
  logger.error("Failed to upgrade package via pip: %s", stderr or exc)
128
+ return False
qBitrr/bundled_data.py CHANGED
@@ -1,5 +1,5 @@
1
- version = "5.0.1"
2
- git_hash = "9c461f7"
1
+ version = "5.1.0"
2
+ git_hash = "048cc3f"
3
3
  license_text = (
4
4
  "Licence can be found on:\n\nhttps://github.com/Feramance/qBitrr/blob/master/LICENSE"
5
5
  )
qBitrr/db_lock.py ADDED
@@ -0,0 +1,79 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import threading
5
+ from contextlib import contextmanager
6
+ from pathlib import Path
7
+ from typing import Iterator
8
+
9
+ from qBitrr.home_path import APPDATA_FOLDER
10
+
11
+ if os.name == "nt": # pragma: no cover - platform specific
12
+ import msvcrt
13
+ else: # pragma: no cover
14
+ import fcntl
15
+
16
+ _LOCK_FILE = APPDATA_FOLDER.joinpath("qbitrr.db.lock")
17
+
18
+
19
+ class _InterProcessFileLock:
20
+ """Cross-process, re-entrant file lock to guard SQLite access."""
21
+
22
+ def __init__(self, path: Path):
23
+ self._path = path
24
+ self._thread_gate = threading.RLock()
25
+ self._local = threading.local()
26
+
27
+ def acquire(self) -> None:
28
+ depth = getattr(self._local, "depth", 0)
29
+ if depth == 0:
30
+ self._thread_gate.acquire()
31
+ self._path.parent.mkdir(parents=True, exist_ok=True)
32
+ handle = open(self._path, "a+b")
33
+ try:
34
+ if os.name == "nt": # pragma: no cover - Windows specific branch
35
+ msvcrt.locking(handle.fileno(), msvcrt.LK_LOCK, 1)
36
+ else: # pragma: no cover - POSIX branch
37
+ fcntl.flock(handle, fcntl.LOCK_EX)
38
+ except Exception:
39
+ handle.close()
40
+ self._thread_gate.release()
41
+ raise
42
+ self._local.handle = handle
43
+ self._local.depth = depth + 1
44
+
45
+ def release(self) -> None:
46
+ depth = getattr(self._local, "depth", 0)
47
+ if depth <= 0:
48
+ raise RuntimeError("Attempted to release an unacquired database lock")
49
+ depth -= 1
50
+ if depth == 0:
51
+ handle = getattr(self._local, "handle")
52
+ try:
53
+ if os.name == "nt": # pragma: no cover
54
+ msvcrt.locking(handle.fileno(), msvcrt.LK_UNLCK, 1)
55
+ else: # pragma: no cover
56
+ fcntl.flock(handle, fcntl.LOCK_UN)
57
+ finally:
58
+ handle.close()
59
+ del self._local.handle
60
+ self._thread_gate.release()
61
+ self._local.depth = depth
62
+
63
+ @contextmanager
64
+ def context(self) -> Iterator[None]:
65
+ self.acquire()
66
+ try:
67
+ yield
68
+ finally:
69
+ self.release()
70
+
71
+
72
+ _DB_LOCK = _InterProcessFileLock(_LOCK_FILE)
73
+
74
+
75
+ @contextmanager
76
+ def database_lock() -> Iterator[None]:
77
+ """Provide a shared lock used to serialize SQLite access across processes."""
78
+ with _DB_LOCK.context():
79
+ yield
qBitrr/gen_config.py CHANGED
@@ -159,9 +159,9 @@ def _add_settings_section(config: TOMLDocument):
159
159
  )
160
160
  _gen_default_line(
161
161
  settings,
162
- "WebUI listen host (default 127.0.0.1)",
162
+ "WebUI listen host (default 0.0.0.0)",
163
163
  "WebUIHost",
164
- "127.0.0.1",
164
+ "0.0.0.0",
165
165
  )
166
166
  _gen_default_line(
167
167
  settings,
qBitrr/logger.py CHANGED
@@ -119,6 +119,7 @@ def run_logs(logger: Logger, _name: str = None) -> None:
119
119
  },
120
120
  reconfigure=True,
121
121
  )
122
+ logger.propagate = False
122
123
  if ENABLE_LOGS and _name:
123
124
  logs_folder = HOME_PATH.joinpath("logs")
124
125
  logs_folder.mkdir(parents=True, exist_ok=True)
qBitrr/main.py CHANGED
@@ -2,9 +2,10 @@ from __future__ import annotations
2
2
 
3
3
  import atexit
4
4
  import contextlib
5
- import itertools
6
5
  import logging
6
+ import os
7
7
  import sys
8
+ import time
8
9
  from multiprocessing import Event, freeze_support
9
10
  from queue import SimpleQueue
10
11
  from threading import Event as ThreadEvent
@@ -21,7 +22,6 @@ from qbittorrentapi import APINames
21
22
  from qBitrr.auto_update import AutoUpdater, perform_self_update
22
23
  from qBitrr.bundled_data import patched_version
23
24
  from qBitrr.config import (
24
- APPDATA_FOLDER,
25
25
  CONFIG,
26
26
  CONFIG_EXISTS,
27
27
  QBIT_DISABLED,
@@ -32,7 +32,9 @@ from qBitrr.config import (
32
32
  from qBitrr.env_config import ENVIRO_CONFIG
33
33
  from qBitrr.ffprobe import FFprobeDownloader
34
34
  from qBitrr.logger import run_logs
35
- from qBitrr.utils import ExpiringSet, absolute_file_paths
35
+ from qBitrr.tables import ensure_core_tables, get_database, purge_database_files
36
+ from qBitrr.utils import ExpiringSet
37
+ from qBitrr.versioning import fetch_latest_release
36
38
  from qBitrr.webui import WebUI
37
39
 
38
40
  if CONFIG_EXISTS:
@@ -101,6 +103,8 @@ class qBitManager:
101
103
  self.arr_manager = None
102
104
  self._bootstrap_ready = ThreadEvent()
103
105
  self._startup_thread: Thread | None = None
106
+ self._restart_requested = False
107
+ self._restart_thread: Thread | None = None
104
108
  self.ffprobe_downloader = FFprobeDownloader()
105
109
  try:
106
110
  if not (QBIT_DISABLED or SEARCH_ONLY):
@@ -115,6 +119,11 @@ class qBitManager:
115
119
  except Exception:
116
120
  web_port = 6969
117
121
  web_host = CONFIG.get("Settings.WebUIHost", fallback="127.0.0.1") or "127.0.0.1"
122
+ if os.environ.get("QBITRR_DOCKER_RUNNING") == "69420" and web_host in {
123
+ "127.0.0.1",
124
+ "localhost",
125
+ }:
126
+ web_host = "0.0.0.0"
118
127
  if web_host in {"0.0.0.0", "::"}:
119
128
  self.logger.warning(
120
129
  "WebUI host configured for %s; ensure exposure is intentional and protected.",
@@ -144,11 +153,55 @@ class qBitManager:
144
153
  self.logger.error("Auto update could not be scheduled; leaving it disabled")
145
154
 
146
155
  def _perform_auto_update(self) -> None:
147
- self.logger.notice("Performing auto update...")
148
- perform_self_update(self.logger)
149
- self.logger.notice(
150
- "Auto update cycle complete. A restart may be required if files were updated."
151
- )
156
+ self.logger.notice("Checking for updates...")
157
+ release_info = fetch_latest_release()
158
+ if release_info.get("error"):
159
+ self.logger.error("Auto update skipped: %s", release_info["error"])
160
+ return
161
+ target_version = release_info.get("raw_tag") or release_info.get("normalized")
162
+ if not release_info.get("update_available"):
163
+ if target_version:
164
+ self.logger.info(
165
+ "Auto update skipped: already running the latest release (%s).",
166
+ target_version,
167
+ )
168
+ else:
169
+ self.logger.info("Auto update skipped: no new release detected.")
170
+ return
171
+
172
+ self.logger.notice("Updating from %s to %s", patched_version, target_version or "latest")
173
+ updated = perform_self_update(self.logger)
174
+ if not updated:
175
+ self.logger.error("Auto update failed; manual intervention may be required.")
176
+ return
177
+ self.logger.notice("Update applied successfully; restarting to load the new version.")
178
+ self.request_restart()
179
+
180
+ def request_restart(self, delay: float = 3.0) -> None:
181
+ if self._restart_requested:
182
+ return
183
+ self._restart_requested = True
184
+
185
+ def _restart():
186
+ if delay > 0:
187
+ time.sleep(delay)
188
+ self.logger.notice("Exiting to complete restart.")
189
+ try:
190
+ self.shutdown_event.set()
191
+ except Exception:
192
+ pass
193
+ for proc in list(self.child_processes):
194
+ with contextlib.suppress(Exception):
195
+ proc.join(timeout=5)
196
+ for proc in list(self.child_processes):
197
+ with contextlib.suppress(Exception):
198
+ proc.kill()
199
+ with contextlib.suppress(Exception):
200
+ proc.terminate()
201
+ os._exit(0)
202
+
203
+ self._restart_thread = Thread(target=_restart, name="qBitrr-Restart", daemon=True)
204
+ self._restart_thread.start()
152
205
 
153
206
  def _prepare_arr_processes(self, arr, timeout_seconds: int = 30) -> None:
154
207
  timeout = max(
@@ -437,15 +490,17 @@ def run():
437
490
  child.kill()
438
491
 
439
492
 
440
- def file_cleanup():
441
- extensions = [".db", ".db-shm", ".db-wal"]
442
- all_files_in_folder = list(absolute_file_paths(APPDATA_FOLDER))
443
- for file, ext in itertools.product(all_files_in_folder, extensions):
444
- if file.name.endswith(ext):
445
- file.unlink(missing_ok=True)
493
+ def initialize_database() -> None:
494
+ try:
495
+ purge_database_files()
496
+ get_database()
497
+ ensure_core_tables()
498
+ except Exception:
499
+ logger.exception("Failed to initialize database schema")
500
+ raise
446
501
 
447
502
 
448
503
  if __name__ == "__main__":
449
504
  freeze_support()
450
- file_cleanup()
505
+ initialize_database()
451
506
  run()
@@ -3,60 +3,34 @@ from __future__ import annotations
3
3
  from threading import RLock
4
4
  from typing import Any
5
5
 
6
- from peewee import Model, SqliteDatabase, TextField
6
+ from peewee import SqliteDatabase
7
7
 
8
- from qBitrr.home_path import APPDATA_FOLDER
8
+ from qBitrr.tables import SearchActivity, ensure_table_schema, get_database
9
9
 
10
10
  _DB_LOCK = RLock()
11
- _DB_INSTANCE: SqliteDatabase | None = None
11
+ _TABLE_READY = False
12
12
 
13
13
 
14
- def _get_database() -> SqliteDatabase:
15
- global _DB_INSTANCE
16
- if _DB_INSTANCE is None:
17
- path = APPDATA_FOLDER.joinpath("webui_activity.db")
18
- path.parent.mkdir(parents=True, exist_ok=True)
19
- _DB_INSTANCE = SqliteDatabase(
20
- str(path),
21
- pragmas={
22
- "journal_mode": "wal",
23
- "cache_size": -64_000,
24
- "foreign_keys": 1,
25
- "ignore_check_constraints": 0,
26
- "synchronous": 0,
27
- },
28
- timeout=15,
29
- check_same_thread=False,
30
- )
31
- return _DB_INSTANCE
32
-
33
-
34
- class BaseModel(Model):
35
- class Meta:
36
- database = _get_database()
37
-
38
-
39
- class SearchActivity(BaseModel):
40
- category = TextField(primary_key=True)
41
- summary = TextField(null=True)
42
- timestamp = TextField(null=True)
43
-
44
-
45
- def _ensure_tables() -> None:
46
- db = _get_database()
14
+ def _ensure_ready() -> SqliteDatabase:
15
+ global _TABLE_READY
16
+ db = get_database()
17
+ if _TABLE_READY:
18
+ return db
47
19
  with _DB_LOCK:
48
- db.connect(reuse_if_open=True)
49
- db.create_tables([SearchActivity], safe=True)
20
+ if not _TABLE_READY:
21
+ ensure_table_schema(SearchActivity)
22
+ _TABLE_READY = True
23
+ return db
50
24
 
51
25
 
52
26
  def record_search_activity(category: str, summary: str | None, timestamp: str | None) -> None:
53
27
  if not category:
54
28
  return
55
- _ensure_tables()
29
+ db = _ensure_ready()
56
30
  if timestamp is not None and not isinstance(timestamp, str):
57
31
  timestamp = str(timestamp)
58
32
  data: dict[str, Any] = {"summary": summary, "timestamp": timestamp}
59
- with _get_database().atomic():
33
+ with db.atomic():
60
34
  SearchActivity.insert(category=category, **data).on_conflict(
61
35
  conflict_target=[SearchActivity.category],
62
36
  update=data,
@@ -64,9 +38,8 @@ def record_search_activity(category: str, summary: str | None, timestamp: str |
64
38
 
65
39
 
66
40
  def fetch_search_activities() -> dict[str, dict[str, str | None]]:
67
- _ensure_tables()
41
+ db = _ensure_ready()
68
42
  activities: dict[str, dict[str, str | None]] = {}
69
- db = _get_database()
70
43
  db.connect(reuse_if_open=True)
71
44
  try:
72
45
  query = SearchActivity.select()
@@ -83,6 +56,6 @@ def fetch_search_activities() -> dict[str, dict[str, str | None]]:
83
56
  def clear_search_activity(category: str) -> None:
84
57
  if not category:
85
58
  return
86
- _ensure_tables()
87
- with _get_database().atomic():
59
+ db = _ensure_ready()
60
+ with db.atomic():
88
61
  SearchActivity.delete().where(SearchActivity.category == category).execute()
qBitrr/tables.py CHANGED
@@ -1,11 +1,229 @@
1
- from peewee import BooleanField, CharField, DateTimeField, IntegerField, Model, TextField
1
+ from __future__ import annotations
2
2
 
3
+ import logging
4
+ import re
5
+ from functools import lru_cache
6
+ from pathlib import Path
7
+ from typing import NamedTuple
3
8
 
4
- class FilesQueued(Model):
9
+ from peewee import (
10
+ BooleanField,
11
+ CharField,
12
+ DatabaseError,
13
+ DatabaseProxy,
14
+ DateTimeField,
15
+ IntegerField,
16
+ Model,
17
+ OperationalError,
18
+ SqliteDatabase,
19
+ TextField,
20
+ )
21
+
22
+ from qBitrr.db_lock import database_lock
23
+ from qBitrr.home_path import APPDATA_FOLDER
24
+
25
+ logger = logging.getLogger("qBitrr.Database")
26
+
27
+ DATABASE_FILE = APPDATA_FOLDER.joinpath("qbitrr.db")
28
+ _database_proxy: DatabaseProxy = DatabaseProxy()
29
+ _DATABASE: SqliteDatabase | None = None
30
+ _DB_ARTIFACT_SUFFIXES: tuple[str, ...] = ("", "-wal", "-shm")
31
+
32
+
33
+ class LockedSqliteDatabase(SqliteDatabase):
34
+ def connect(self, **kwargs):
35
+ with database_lock():
36
+ return super().connect(**kwargs)
37
+
38
+ def close(self):
39
+ with database_lock():
40
+ return super().close()
41
+
42
+ def execute_sql(self, *args, **kwargs):
43
+ with database_lock():
44
+ return super().execute_sql(*args, **kwargs)
45
+
46
+
47
+ def _database_artifact_paths() -> tuple[Path, ...]:
48
+ return tuple(
49
+ DATABASE_FILE if suffix == "" else DATABASE_FILE.with_name(f"{DATABASE_FILE.name}{suffix}")
50
+ for suffix in _DB_ARTIFACT_SUFFIXES
51
+ )
52
+
53
+
54
+ def purge_database_files() -> list[Path]:
55
+ removed: list[Path] = []
56
+ with database_lock():
57
+ for candidate in _database_artifact_paths():
58
+ try:
59
+ candidate.unlink()
60
+ removed.append(candidate)
61
+ except FileNotFoundError:
62
+ continue
63
+ except OSError as exc:
64
+ logger.warning("Unable to remove database artifact '%s': %s", candidate, exc)
65
+ if removed:
66
+ logger.info(
67
+ "Removed database artifacts: %s",
68
+ ", ".join(str(path) for path in removed),
69
+ )
70
+ return removed
71
+
72
+
73
+ def _reset_database(exc: BaseException) -> None:
74
+ global _DATABASE
75
+ logger.warning("Database reset triggered after failure: %s", exc)
76
+ with database_lock():
77
+ try:
78
+ if _DATABASE is not None and not _DATABASE.is_closed():
79
+ _DATABASE.close()
80
+ except Exception as close_error: # pragma: no cover - best effort cleanup
81
+ logger.debug("Error closing database while resetting: %s", close_error)
82
+ _DATABASE = None
83
+ purge_database_files()
84
+
85
+
86
+ class BaseModel(Model):
87
+ class Meta:
88
+ database = _database_proxy
89
+
90
+
91
+ def get_database(*, _retry: bool = True) -> SqliteDatabase:
92
+ global _DATABASE
93
+ if _DATABASE is None:
94
+ DATABASE_FILE.parent.mkdir(parents=True, exist_ok=True)
95
+ _DATABASE = LockedSqliteDatabase(
96
+ str(DATABASE_FILE),
97
+ pragmas={
98
+ "journal_mode": "wal",
99
+ "cache_size": -64_000,
100
+ "foreign_keys": 1,
101
+ "ignore_check_constraints": 0,
102
+ "synchronous": "NORMAL",
103
+ "busy_timeout": 60_000,
104
+ },
105
+ timeout=15,
106
+ check_same_thread=False,
107
+ autocommit=True,
108
+ )
109
+ _database_proxy.initialize(_DATABASE)
110
+ try:
111
+ _DATABASE.connect(reuse_if_open=True)
112
+ except DatabaseError as exc:
113
+ if not _retry:
114
+ raise
115
+ _reset_database(exc)
116
+ return get_database(_retry=False)
117
+ return _DATABASE
118
+
119
+
120
+ def ensure_table_schema(model: type[BaseModel]) -> None:
121
+ database = get_database()
122
+ table_name = model._meta.table_name
123
+ with database:
124
+ database.create_tables([model], safe=True)
125
+ existing_columns = {column.name for column in database.get_columns(table_name)}
126
+ try:
127
+ primary_keys = {column.lower() for column in database.get_primary_keys(table_name)}
128
+ except OperationalError:
129
+ primary_keys = set()
130
+ try:
131
+ index_metadata = database.get_indexes(table_name)
132
+ except OperationalError:
133
+ index_metadata = []
134
+
135
+ def _refresh_indexes() -> None:
136
+ nonlocal index_metadata
137
+ try:
138
+ index_metadata = database.get_indexes(table_name)
139
+ except OperationalError:
140
+ index_metadata = []
141
+
142
+ def _has_unique(column: str) -> bool:
143
+ lower_column = column.lower()
144
+ for index in index_metadata:
145
+ if not index.unique:
146
+ continue
147
+ normalized = tuple(col.lower() for col in index.columns or ())
148
+ if normalized == (lower_column,):
149
+ return True
150
+ return False
151
+
152
+ def _deduplicate(column: str) -> None:
153
+ try:
154
+ duplicates = database.execute_sql(
155
+ f"""
156
+ SELECT {column}, MIN(rowid) AS keep_rowid
157
+ FROM {table_name}
158
+ WHERE {column} IS NOT NULL
159
+ GROUP BY {column}
160
+ HAVING COUNT(*) > 1
161
+ """
162
+ ).fetchall()
163
+ except OperationalError:
164
+ return
165
+ if not duplicates:
166
+ return
167
+ for value, keep_rowid in duplicates:
168
+ try:
169
+ database.execute_sql(
170
+ f"""
171
+ DELETE FROM {table_name}
172
+ WHERE {column} = ?
173
+ AND rowid != ?
174
+ """,
175
+ (value, keep_rowid),
176
+ )
177
+ except OperationalError:
178
+ logger.warning(
179
+ "Failed to deduplicate rows on %s.%s for value %s",
180
+ table_name,
181
+ column,
182
+ value,
183
+ )
184
+ if duplicates:
185
+ logger.info(
186
+ "Deduplicated %s entries on %s.%s to restore unique constraint",
187
+ len(duplicates),
188
+ table_name,
189
+ column,
190
+ )
191
+
192
+ def _ensure_unique(column: str) -> None:
193
+ if _has_unique(column):
194
+ return
195
+ _deduplicate(column)
196
+ try:
197
+ index_name = f"{table_name}_{column}_uniq".replace(".", "_")
198
+ database.execute_sql(
199
+ f'CREATE UNIQUE INDEX IF NOT EXISTS "{index_name}" '
200
+ f'ON "{table_name}" ("{column}")'
201
+ )
202
+ _refresh_indexes()
203
+ except OperationalError:
204
+ logger.warning(
205
+ "Unable to create unique index on %s.%s; uniqueness guarantees may be missing",
206
+ table_name,
207
+ column,
208
+ )
209
+ return
210
+ _refresh_indexes()
211
+
212
+ for field in model._meta.sorted_fields:
213
+ column_name = field.column_name
214
+ if column_name not in existing_columns:
215
+ database.add_column(table_name, column_name, field)
216
+ if field.primary_key and column_name.lower() not in primary_keys:
217
+ _ensure_unique(column_name)
218
+ elif field.unique:
219
+ _ensure_unique(column_name)
220
+
221
+
222
+ class FilesQueued(BaseModel):
5
223
  EntryId = IntegerField(primary_key=True, null=False, unique=True)
6
224
 
7
225
 
8
- class MoviesFilesModel(Model):
226
+ class MoviesFilesModel(BaseModel):
9
227
  Title = CharField()
10
228
  Monitored = BooleanField()
11
229
  TmdbId = IntegerField()
@@ -22,7 +240,7 @@ class MoviesFilesModel(Model):
22
240
  Reason = TextField(null=True)
23
241
 
24
242
 
25
- class EpisodeFilesModel(Model):
243
+ class EpisodeFilesModel(BaseModel):
26
244
  EntryId = IntegerField(primary_key=True)
27
245
  SeriesTitle = TextField(null=True)
28
246
  Title = TextField(null=True)
@@ -44,7 +262,7 @@ class EpisodeFilesModel(Model):
44
262
  Reason = TextField(null=True)
45
263
 
46
264
 
47
- class SeriesFilesModel(Model):
265
+ class SeriesFilesModel(BaseModel):
48
266
  EntryId = IntegerField(primary_key=True)
49
267
  Title = TextField(null=True)
50
268
  Monitored = BooleanField(null=True)
@@ -53,20 +271,119 @@ class SeriesFilesModel(Model):
53
271
  MinCustomFormatScore = IntegerField(null=True)
54
272
 
55
273
 
56
- class MovieQueueModel(Model):
274
+ class MovieQueueModel(BaseModel):
57
275
  EntryId = IntegerField(unique=True)
58
276
  Completed = BooleanField(default=False)
59
277
 
60
278
 
61
- class EpisodeQueueModel(Model):
279
+ class EpisodeQueueModel(BaseModel):
62
280
  EntryId = IntegerField(unique=True)
63
281
  Completed = BooleanField(default=False)
64
282
 
65
283
 
66
- class TorrentLibrary(Model):
284
+ class TorrentLibrary(BaseModel):
67
285
  Hash = TextField(null=False)
68
286
  Category = TextField(null=False)
69
287
  AllowedSeeding = BooleanField(default=False)
70
288
  Imported = BooleanField(default=False)
71
289
  AllowedStalled = BooleanField(default=False)
72
290
  FreeSpacePaused = BooleanField(default=False)
291
+
292
+ class Meta:
293
+ table_name = "torrent_library"
294
+
295
+
296
+ class SearchActivity(BaseModel):
297
+ category = TextField(primary_key=True)
298
+ summary = TextField(null=True)
299
+ timestamp = TextField(null=True)
300
+
301
+ class Meta:
302
+ table_name = "search_activity"
303
+
304
+
305
+ class ArrTables(NamedTuple):
306
+ files: type[BaseModel]
307
+ queue: type[BaseModel]
308
+ series: type[BaseModel] | None
309
+ persisting_queue: type[BaseModel]
310
+ torrents: type[BaseModel] | None
311
+
312
+
313
+ _SAFE_IDENTIFIER = re.compile(r"[^0-9A-Za-z_]+")
314
+
315
+
316
+ def _sanitize_identifier(name: str) -> str:
317
+ token = name.strip().replace(" ", "_")
318
+ token = _SAFE_IDENTIFIER.sub("_", token)
319
+ token = token.strip("_")
320
+ if not token:
321
+ token = "Arr"
322
+ if token[0].isdigit():
323
+ token = f"Arr_{token}"
324
+ return token
325
+
326
+
327
+ @lru_cache(maxsize=None)
328
+ def create_arr_tables(
329
+ arr_name: str,
330
+ arr_type: str,
331
+ *,
332
+ include_series: bool,
333
+ include_torrents: bool,
334
+ ) -> ArrTables:
335
+ table_prefix = _sanitize_identifier(arr_name)
336
+ files_base: type[BaseModel]
337
+ queue_base: type[BaseModel]
338
+ if arr_type.lower() == "sonarr":
339
+ files_base = EpisodeFilesModel
340
+ queue_base = EpisodeQueueModel
341
+ elif arr_type.lower() == "radarr":
342
+ files_base = MoviesFilesModel
343
+ queue_base = MovieQueueModel
344
+ else:
345
+ raise ValueError(f"Unknown arr_type '{arr_type}'")
346
+
347
+ class Files(files_base):
348
+ class Meta:
349
+ table_name = f"{table_prefix}_files"
350
+
351
+ class Queue(queue_base):
352
+ class Meta:
353
+ table_name = f"{table_prefix}_queue"
354
+
355
+ class PersistingQueue(FilesQueued):
356
+ class Meta:
357
+ table_name = f"{table_prefix}_persisting_queue"
358
+
359
+ series_model: type[BaseModel] | None = None
360
+ if include_series:
361
+
362
+ class Series(SeriesFilesModel):
363
+ class Meta:
364
+ table_name = f"{table_prefix}_series"
365
+
366
+ series_model = Series
367
+
368
+ torrents_model: type[BaseModel] | None = TorrentLibrary if include_torrents else None
369
+
370
+ ensure_table_schema(Files)
371
+ ensure_table_schema(Queue)
372
+ ensure_table_schema(PersistingQueue)
373
+ if series_model is not None:
374
+ ensure_table_schema(series_model)
375
+ if torrents_model is not None:
376
+ ensure_table_schema(torrents_model)
377
+
378
+ return ArrTables(
379
+ files=Files,
380
+ queue=Queue,
381
+ series=series_model,
382
+ persisting_queue=PersistingQueue,
383
+ torrents=torrents_model,
384
+ )
385
+
386
+
387
+ def ensure_core_tables() -> None:
388
+ ensure_table_schema(TorrentLibrary)
389
+ ensure_table_schema(SearchActivity)
qBitrr/versioning.py ADDED
@@ -0,0 +1,70 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+
5
+ import requests
6
+ from packaging import version as version_parser
7
+
8
+ from qBitrr.bundled_data import patched_version
9
+
10
+ DEFAULT_REPOSITORY = "Feramance/qBitrr"
11
+
12
+
13
+ def normalize_version(raw: str | None) -> str | None:
14
+ if not raw:
15
+ return None
16
+ cleaned = raw.strip()
17
+ if cleaned.startswith(("v", "V")):
18
+ cleaned = cleaned[1:]
19
+ if "-" in cleaned:
20
+ cleaned = cleaned.split("-", 1)[0]
21
+ return cleaned or None
22
+
23
+
24
+ def is_newer_version(candidate: str | None, current: str | None = None) -> bool:
25
+ if not candidate:
26
+ return False
27
+ normalized_current = normalize_version(current or patched_version)
28
+ if not normalized_current:
29
+ return True
30
+ try:
31
+ latest_version = version_parser.parse(candidate)
32
+ current_version = version_parser.parse(normalized_current)
33
+ return latest_version > current_version
34
+ except Exception:
35
+ return candidate != normalized_current
36
+
37
+
38
+ def fetch_latest_release(repo: str = DEFAULT_REPOSITORY, *, timeout: int = 10) -> dict[str, Any]:
39
+ url = f"https://api.github.com/repos/{repo}/releases/latest"
40
+ headers = {"Accept": "application/vnd.github+json"}
41
+ try:
42
+ response = requests.get(url, headers=headers, timeout=timeout)
43
+ response.raise_for_status()
44
+ payload = response.json()
45
+ except Exception as exc:
46
+ message = str(exc)
47
+ if len(message) > 200:
48
+ message = f"{message[:197]}..."
49
+ return {
50
+ "raw_tag": None,
51
+ "normalized": None,
52
+ "changelog": "",
53
+ "changelog_url": f"https://github.com/{repo}/releases",
54
+ "update_available": False,
55
+ "error": message,
56
+ }
57
+
58
+ raw_tag = (payload.get("tag_name") or payload.get("name") or "").strip()
59
+ normalized = normalize_version(raw_tag)
60
+ changelog = payload.get("body") or ""
61
+ changelog_url = payload.get("html_url") or f"https://github.com/{repo}/releases"
62
+ update_available = is_newer_version(normalized)
63
+ return {
64
+ "raw_tag": raw_tag or None,
65
+ "normalized": normalized,
66
+ "changelog": changelog,
67
+ "changelog_url": changelog_url,
68
+ "update_available": update_available,
69
+ "error": None,
70
+ }
qBitrr/webui.py CHANGED
@@ -11,9 +11,7 @@ from datetime import datetime, timedelta, timezone
11
11
  from pathlib import Path
12
12
  from typing import Any
13
13
 
14
- import requests
15
14
  from flask import Flask, jsonify, redirect, request, send_file
16
- from packaging import version as version_parser
17
15
  from peewee import fn
18
16
 
19
17
  from qBitrr.arss import FreeSpaceManager, PlaceHolderArr
@@ -24,6 +22,7 @@ from qBitrr.search_activity_store import (
24
22
  clear_search_activity,
25
23
  fetch_search_activities,
26
24
  )
25
+ from qBitrr.versioning import fetch_latest_release
27
26
 
28
27
 
29
28
  def _toml_set(doc, dotted_key: str, value: Any):
@@ -69,7 +68,7 @@ def _toml_to_jsonable(obj: Any) -> Any:
69
68
 
70
69
 
71
70
  class WebUI:
72
- def __init__(self, manager, host: str = "127.0.0.1", port: int = 6969):
71
+ def __init__(self, manager, host: str = "0.0.0.0", port: int = 6969):
73
72
  self.manager = manager
74
73
  self.host = host
75
74
  self.port = port
@@ -131,58 +130,17 @@ class WebUI:
131
130
  self._thread: threading.Thread | None = None
132
131
  self._use_dev_server: bool | None = None
133
132
 
134
- @staticmethod
135
- def _normalize_version(value: str | None) -> str | None:
136
- if not value:
137
- return None
138
- cleaned = value.strip()
139
- if not cleaned:
140
- return None
141
- if cleaned[0] in {"v", "V"}:
142
- cleaned = cleaned[1:]
143
- if "-" in cleaned:
144
- cleaned = cleaned.split("-", 1)[0]
145
- return cleaned or None
146
-
147
- def _is_newer_version(self, candidate: str | None) -> bool:
148
- if not candidate:
149
- return False
150
- current_norm = self._normalize_version(patched_version)
151
- if not current_norm:
152
- return True
153
- try:
154
- latest_version = version_parser.parse(candidate)
155
- current_version = version_parser.parse(current_norm)
156
- return latest_version > current_version
157
- except Exception:
158
- return candidate != current_norm
159
-
160
133
  def _fetch_version_info(self) -> dict[str, Any]:
161
- repo = self._github_repo
162
- url = f"https://api.github.com/repos/{repo}/releases/latest"
163
- headers = {"Accept": "application/vnd.github+json"}
164
- try:
165
- response = requests.get(url, headers=headers, timeout=10)
166
- response.raise_for_status()
167
- payload = response.json()
168
- except Exception as exc:
169
- message = str(exc)
170
- if len(message) > 200:
171
- message = f"{message[:197]}..."
172
- self.logger.debug("Failed to fetch latest release information: %s", exc)
173
- return {"error": message}
174
-
175
- raw_tag = (payload.get("tag_name") or payload.get("name") or "").strip()
176
- normalized_latest = self._normalize_version(raw_tag)
177
- latest_display = raw_tag or normalized_latest
178
- changelog = payload.get("body") or ""
179
- changelog_url = payload.get("html_url") or f"https://github.com/{repo}/releases"
180
- update_available = self._is_newer_version(normalized_latest)
134
+ info = fetch_latest_release(self._github_repo)
135
+ if info.get("error"):
136
+ self.logger.debug("Failed to fetch latest release information: %s", info["error"])
137
+ return {"error": info["error"]}
138
+ latest_display = info.get("raw_tag") or info.get("normalized")
181
139
  return {
182
140
  "latest_version": latest_display,
183
- "update_available": update_available,
184
- "changelog": changelog,
185
- "changelog_url": changelog_url,
141
+ "update_available": bool(info.get("update_available")),
142
+ "changelog": info.get("changelog") or "",
143
+ "changelog_url": info.get("changelog_url"),
186
144
  "error": None,
187
145
  }
188
146
 
@@ -251,7 +209,14 @@ class WebUI:
251
209
  except AttributeError:
252
210
  from qBitrr.auto_update import perform_self_update
253
211
 
254
- perform_self_update(self.manager.logger)
212
+ if not perform_self_update(self.manager.logger):
213
+ raise RuntimeError("pip upgrade did not complete successfully")
214
+ try:
215
+ self.manager.request_restart()
216
+ except Exception:
217
+ self.logger.warning(
218
+ "Update applied but restart request failed; exiting manually."
219
+ )
255
220
  except Exception as exc:
256
221
  result = "error"
257
222
  error_message = str(exc)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: qBitrr2
3
- Version: 5.0.1
3
+ Version: 5.1.0
4
4
  Summary: "A simple Python script to talk to qBittorrent and Arr's"
5
5
  Home-page: https://github.com/Feramance/qBitrr
6
6
  Author: Feramance
@@ -138,13 +138,16 @@ Minimal setup:
138
138
  docker run -d \
139
139
  --name qbitrr \
140
140
  -e TZ=Europe/London \
141
+ -p 6969:6969 \
141
142
  -v /etc/localtime:/etc/localtime:ro \
142
143
  -v /path/to/appdata/qbitrr:/config \
143
144
  -v /path/to/completed/downloads:/completed_downloads:rw \
144
145
  --restart unless-stopped \
145
- feramance/qbitrr:latest
146
+ feramance/qbitrr:latest
146
147
  ```
147
148
 
149
+ The container automatically binds its WebUI to `0.0.0.0`; exposing `6969` makes the dashboard reachable at `http://<host>:6969/ui`.
150
+
148
151
  Compose example with a little more structure:
149
152
  ```yaml
150
153
  services:
@@ -154,6 +157,8 @@ services:
154
157
  restart: unless-stopped
155
158
  environment:
156
159
  TZ: Europe/London
160
+ ports:
161
+ - "6969:6969"
157
162
  volumes:
158
163
  - /etc/localtime:/etc/localtime:ro
159
164
  - /path/to/appdata/qbitrr:/config
@@ -0,0 +1,24 @@
1
+ qBitrr/__init__.py,sha256=smiPIV7d2lMJ_KTtFdAVlxLEBobFTheILdgry1iqpjQ,405
2
+ qBitrr/arss.py,sha256=sxxO19PyD5bR1-K8k87pgQWtgXghzSggVJXkbuExU3c,258843
3
+ qBitrr/auto_update.py,sha256=hVAvAlKEdOHm6AJLlKvtkklbQhjotVcFOCH-MTigHQM,4419
4
+ qBitrr/bundled_data.py,sha256=oP177OBkkhh_wXSpvQnW-oH46iGPEACBHC2myahMkGE,190
5
+ qBitrr/config.py,sha256=brGy1PQJY6D0HG1V6gpuTi1gPbMH3zIvfozASkvPZR8,6177
6
+ qBitrr/db_lock.py,sha256=SRCDIoqg-AFLU-VDChAmGdfx8nhgLGETn6XKF3RdJT4,2449
7
+ qBitrr/env_config.py,sha256=299u_uEoyxlM_ceTD0Z_i41JdYjSHmqO6FKe7qGFgTM,2866
8
+ qBitrr/errors.py,sha256=5_n1x0XX4UvMlieC_J1Hc5pq5JD17orfjJy9KfxDXA4,1107
9
+ qBitrr/ffprobe.py,sha256=2IM0iuPPTEb0xHmN1OetQoBd80-Nmv5Oq7P6o-mjBd0,4019
10
+ qBitrr/gen_config.py,sha256=lDRbCzjWoJuUyOZNnOmNjChuZoR5K6fuwKCJ2qxzu78,29862
11
+ qBitrr/home_path.py,sha256=zvBheAR2xvr8LBZRk1FyqfnALE-eFzsY9CyqyZDjxiE,626
12
+ qBitrr/logger.py,sha256=os7cHbJ3sbkxDh6Nno9o_41aCwsLp-Y963nZe-rglKA,5505
13
+ qBitrr/main.py,sha256=FdW3sNLUR2WAeOCMB_1XCg1E9az_qg_5SqXORNilXgg,19340
14
+ qBitrr/search_activity_store.py,sha256=NZFLIhnHYst9-vIWVN9on2yRRd0eYyTDd5Uye6slbsw,1704
15
+ qBitrr/tables.py,sha256=0i7oTSUaPk3qbmBAGzawxbQo4bL04YpLDGQ0G3-fBeo,12133
16
+ qBitrr/utils.py,sha256=DEnkQrbXFPWunhzId0OE6_oWuUTd5V4aDCZ2yHdrvo0,7306
17
+ qBitrr/versioning.py,sha256=k3n8cOh1E5mevN8OkYWOA3110PuOajMOpGyCKy3rFEc,2279
18
+ qBitrr/webui.py,sha256=HaM3w-rzuvVyGtphRCROY2GDXZtRmny3blkC5WoTOSk,68298
19
+ qbitrr2-5.1.0.dist-info/licenses/LICENSE,sha256=P978aVGi7dPbKz8lfvdiryOS5IjTAU7AA47XhBhVBlI,1066
20
+ qbitrr2-5.1.0.dist-info/METADATA,sha256=HZW0rN-GBfN2nVhoBTELsSJAHG6szM9M9QWlByr3-SI,10122
21
+ qbitrr2-5.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
+ qbitrr2-5.1.0.dist-info/entry_points.txt,sha256=MIR-l5s31VBs9qlv3HiAaMdpOOyy0MNGfM7Ib1-fKeQ,43
23
+ qbitrr2-5.1.0.dist-info/top_level.txt,sha256=jIINodarzsPcQeTf-vvK8-_g7cQ8CvxEg41ms14K97g,7
24
+ qbitrr2-5.1.0.dist-info/RECORD,,
@@ -1,22 +0,0 @@
1
- qBitrr/__init__.py,sha256=smiPIV7d2lMJ_KTtFdAVlxLEBobFTheILdgry1iqpjQ,405
2
- qBitrr/arss.py,sha256=83YtsaUoOp5ICyRopvjT4wUHUPWWPMbNg6t2CC5ZxFc,263238
3
- qBitrr/auto_update.py,sha256=faHYeJ9GsSmz5XfOt6Uq3fZOeEmqZ20MMUotKtq9M7s,4256
4
- qBitrr/bundled_data.py,sha256=jQbC0u8PvjasHFnKzjvDz_oMRBrzaI6YCb4SJIUJcFU,190
5
- qBitrr/config.py,sha256=brGy1PQJY6D0HG1V6gpuTi1gPbMH3zIvfozASkvPZR8,6177
6
- qBitrr/env_config.py,sha256=299u_uEoyxlM_ceTD0Z_i41JdYjSHmqO6FKe7qGFgTM,2866
7
- qBitrr/errors.py,sha256=5_n1x0XX4UvMlieC_J1Hc5pq5JD17orfjJy9KfxDXA4,1107
8
- qBitrr/ffprobe.py,sha256=2IM0iuPPTEb0xHmN1OetQoBd80-Nmv5Oq7P6o-mjBd0,4019
9
- qBitrr/gen_config.py,sha256=ub5sZAPj7uh4yxR-RIV-LhqrWS66uH9kitybXLAk1e4,29866
10
- qBitrr/home_path.py,sha256=zvBheAR2xvr8LBZRk1FyqfnALE-eFzsY9CyqyZDjxiE,626
11
- qBitrr/logger.py,sha256=lp9aPXtdcSVrSv7SQX_Nokq1nzQdHHkmZJ23RofWnos,5476
12
- qBitrr/main.py,sha256=mz5c-m7ZlnTkSrIS_eTJdWUh2ANioH4LJ4AkAHoB_FA,17208
13
- qBitrr/search_activity_store.py,sha256=_7MD7fFna4uTSo_pRT7DqoytSVz7tPoU9D2AV2mn-oc,2474
14
- qBitrr/tables.py,sha256=si_EpQXj6OOF78rgJGDMeTEnT2zpvfnR3NGPaVZHUXc,2479
15
- qBitrr/utils.py,sha256=DEnkQrbXFPWunhzId0OE6_oWuUTd5V4aDCZ2yHdrvo0,7306
16
- qBitrr/webui.py,sha256=YhEhEm0os1UE-kVHxtEZN4ZV06hSeE5D0HPMIV4pPjs,69493
17
- qbitrr2-5.0.1.dist-info/licenses/LICENSE,sha256=P978aVGi7dPbKz8lfvdiryOS5IjTAU7AA47XhBhVBlI,1066
18
- qbitrr2-5.0.1.dist-info/METADATA,sha256=si636Z-gpFrpBtMcwYx5zB3D2XqIGX3sfEWtDEHHmWQ,9943
19
- qbitrr2-5.0.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
20
- qbitrr2-5.0.1.dist-info/entry_points.txt,sha256=MIR-l5s31VBs9qlv3HiAaMdpOOyy0MNGfM7Ib1-fKeQ,43
21
- qbitrr2-5.0.1.dist-info/top_level.txt,sha256=jIINodarzsPcQeTf-vvK8-_g7cQ8CvxEg41ms14K97g,7
22
- qbitrr2-5.0.1.dist-info/RECORD,,