quasarr 2.6.0__py3-none-any.whl → 2.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of quasarr might be problematic. Click here for more details.

Files changed (57) hide show
  1. quasarr/__init__.py +71 -61
  2. quasarr/api/__init__.py +3 -4
  3. quasarr/api/arr/__init__.py +159 -56
  4. quasarr/api/captcha/__init__.py +203 -154
  5. quasarr/api/config/__init__.py +1 -1
  6. quasarr/api/jdownloader/__init__.py +19 -12
  7. quasarr/downloads/__init__.py +12 -8
  8. quasarr/downloads/linkcrypters/al.py +3 -3
  9. quasarr/downloads/linkcrypters/filecrypt.py +1 -2
  10. quasarr/downloads/packages/__init__.py +62 -88
  11. quasarr/downloads/sources/al.py +3 -3
  12. quasarr/downloads/sources/by.py +3 -3
  13. quasarr/downloads/sources/he.py +8 -9
  14. quasarr/downloads/sources/nk.py +3 -3
  15. quasarr/downloads/sources/sl.py +6 -1
  16. quasarr/downloads/sources/wd.py +132 -62
  17. quasarr/downloads/sources/wx.py +11 -17
  18. quasarr/providers/auth.py +9 -13
  19. quasarr/providers/cloudflare.py +50 -4
  20. quasarr/providers/imdb_metadata.py +0 -2
  21. quasarr/providers/jd_cache.py +64 -90
  22. quasarr/providers/log.py +226 -8
  23. quasarr/providers/myjd_api.py +116 -94
  24. quasarr/providers/sessions/al.py +20 -22
  25. quasarr/providers/sessions/dd.py +1 -1
  26. quasarr/providers/sessions/dl.py +8 -10
  27. quasarr/providers/sessions/nx.py +1 -1
  28. quasarr/providers/shared_state.py +26 -15
  29. quasarr/providers/utils.py +15 -6
  30. quasarr/providers/version.py +1 -1
  31. quasarr/search/__init__.py +91 -78
  32. quasarr/search/sources/al.py +19 -23
  33. quasarr/search/sources/by.py +6 -6
  34. quasarr/search/sources/dd.py +8 -10
  35. quasarr/search/sources/dj.py +15 -18
  36. quasarr/search/sources/dl.py +25 -37
  37. quasarr/search/sources/dt.py +13 -15
  38. quasarr/search/sources/dw.py +24 -16
  39. quasarr/search/sources/fx.py +25 -11
  40. quasarr/search/sources/he.py +16 -14
  41. quasarr/search/sources/hs.py +7 -7
  42. quasarr/search/sources/mb.py +7 -7
  43. quasarr/search/sources/nk.py +24 -25
  44. quasarr/search/sources/nx.py +22 -15
  45. quasarr/search/sources/sf.py +18 -9
  46. quasarr/search/sources/sj.py +7 -7
  47. quasarr/search/sources/sl.py +26 -14
  48. quasarr/search/sources/wd.py +63 -9
  49. quasarr/search/sources/wx.py +33 -47
  50. quasarr/storage/config.py +1 -3
  51. quasarr/storage/setup.py +13 -4
  52. {quasarr-2.6.0.dist-info → quasarr-2.7.0.dist-info}/METADATA +4 -1
  53. quasarr-2.7.0.dist-info/RECORD +84 -0
  54. quasarr-2.6.0.dist-info/RECORD +0 -84
  55. {quasarr-2.6.0.dist-info → quasarr-2.7.0.dist-info}/WHEEL +0 -0
  56. {quasarr-2.6.0.dist-info → quasarr-2.7.0.dist-info}/entry_points.txt +0 -0
  57. {quasarr-2.6.0.dist-info → quasarr-2.7.0.dist-info}/licenses/LICENSE +0 -0
@@ -2,7 +2,7 @@
2
2
  # Quasarr
3
3
  # Project by https://github.com/rix1337
4
4
 
5
- from quasarr.providers.log import debug
5
+ from quasarr.providers.log import trace
6
6
  from quasarr.providers.myjd_api import (
7
7
  MYJDException,
8
8
  RequestTimeoutException,
@@ -62,7 +62,7 @@ class JDPackageCache:
62
62
  """
63
63
 
64
64
  def __init__(self, device):
65
- debug("JDPackageCache: Initializing new cache instance")
65
+ trace("Initializing new cache instance")
66
66
  self._device = device
67
67
  self._linkgrabber_packages = None
68
68
  self._linkgrabber_links = None
@@ -87,97 +87,91 @@ class JDPackageCache:
87
87
  @property
88
88
  def linkgrabber_packages(self):
89
89
  if self._linkgrabber_packages is None:
90
- debug("JDPackageCache: Fetching linkgrabber_packages from API")
90
+ trace("Fetching linkgrabber_packages from API")
91
91
  self._api_calls += 1
92
92
  try:
93
93
  self._linkgrabber_packages = self._device.linkgrabber.query_packages()
94
- debug(
95
- f"JDPackageCache: Retrieved {len(self._linkgrabber_packages)} linkgrabber packages"
94
+ trace(
95
+ f"Retrieved {len(self._linkgrabber_packages)} linkgrabber packages"
96
96
  )
97
97
  except (TokenExpiredException, RequestTimeoutException, MYJDException) as e:
98
- debug(f"JDPackageCache: Failed to fetch linkgrabber_packages: {e}")
98
+ trace(f"Failed to fetch linkgrabber_packages: {e}")
99
99
  self._linkgrabber_packages = []
100
100
  else:
101
101
  self._cache_hits += 1
102
- debug(
103
- f"JDPackageCache: Using cached linkgrabber_packages ({len(self._linkgrabber_packages)} packages)"
102
+ trace(
103
+ f"Using cached linkgrabber_packages ({len(self._linkgrabber_packages)} packages)"
104
104
  )
105
105
  return self._linkgrabber_packages
106
106
 
107
107
  @property
108
108
  def linkgrabber_links(self):
109
109
  if self._linkgrabber_links is None:
110
- debug("JDPackageCache: Fetching linkgrabber_links from API")
110
+ trace("Fetching linkgrabber_links from API")
111
111
  self._api_calls += 1
112
112
  try:
113
113
  self._linkgrabber_links = self._device.linkgrabber.query_links()
114
- debug(
115
- f"JDPackageCache: Retrieved {len(self._linkgrabber_links)} linkgrabber links"
116
- )
114
+ trace(f"Retrieved {len(self._linkgrabber_links)} linkgrabber links")
117
115
  except (TokenExpiredException, RequestTimeoutException, MYJDException) as e:
118
- debug(f"JDPackageCache: Failed to fetch linkgrabber_links: {e}")
116
+ trace(f"Failed to fetch linkgrabber_links: {e}")
119
117
  self._linkgrabber_links = []
120
118
  else:
121
119
  self._cache_hits += 1
122
- debug(
123
- f"JDPackageCache: Using cached linkgrabber_links ({len(self._linkgrabber_links)} links)"
120
+ trace(
121
+ f"Using cached linkgrabber_links ({len(self._linkgrabber_links)} links)"
124
122
  )
125
123
  return self._linkgrabber_links
126
124
 
127
125
  @property
128
126
  def downloader_packages(self):
129
127
  if self._downloader_packages is None:
130
- debug("JDPackageCache: Fetching downloader_packages from API")
128
+ trace("Fetching downloader_packages from API")
131
129
  self._api_calls += 1
132
130
  try:
133
131
  self._downloader_packages = self._device.downloads.query_packages()
134
- debug(
135
- f"JDPackageCache: Retrieved {len(self._downloader_packages)} downloader packages"
136
- )
132
+ trace(f"Retrieved {len(self._downloader_packages)} downloader packages")
137
133
  except (TokenExpiredException, RequestTimeoutException, MYJDException) as e:
138
- debug(f"JDPackageCache: Failed to fetch downloader_packages: {e}")
134
+ trace(f"Failed to fetch downloader_packages: {e}")
139
135
  self._downloader_packages = []
140
136
  else:
141
137
  self._cache_hits += 1
142
- debug(
143
- f"JDPackageCache: Using cached downloader_packages ({len(self._downloader_packages)} packages)"
138
+ trace(
139
+ f"Using cached downloader_packages ({len(self._downloader_packages)} packages)"
144
140
  )
145
141
  return self._downloader_packages
146
142
 
147
143
  @property
148
144
  def downloader_links(self):
149
145
  if self._downloader_links is None:
150
- debug("JDPackageCache: Fetching downloader_links from API")
146
+ trace("Fetching downloader_links from API")
151
147
  self._api_calls += 1
152
148
  try:
153
149
  self._downloader_links = self._device.downloads.query_links()
154
- debug(
155
- f"JDPackageCache: Retrieved {len(self._downloader_links)} downloader links"
156
- )
150
+ trace(f"Retrieved {len(self._downloader_links)} downloader links")
157
151
  except (TokenExpiredException, RequestTimeoutException, MYJDException) as e:
158
- debug(f"JDPackageCache: Failed to fetch downloader_links: {e}")
152
+ trace(f"Failed to fetch downloader_links: {e}")
159
153
  self._downloader_links = []
160
154
  else:
161
155
  self._cache_hits += 1
162
- debug(
163
- f"JDPackageCache: Using cached downloader_links ({len(self._downloader_links)} links)"
156
+ trace(
157
+ f"Using cached downloader_links ({len(self._downloader_links)} links)"
164
158
  )
165
159
  return self._downloader_links
166
160
 
167
161
  @property
168
162
  def is_collecting(self):
169
163
  if self._is_collecting is None:
170
- debug("JDPackageCache: Checking is_collecting from API")
164
+ trace("Checking is_collecting from API")
171
165
  self._api_calls += 1
172
166
  try:
173
167
  self._is_collecting = self._device.linkgrabber.is_collecting()
174
- debug(f"JDPackageCache: is_collecting = {self._is_collecting}")
168
+ trace(f"is_collecting = {self._is_collecting}")
175
169
  except (TokenExpiredException, RequestTimeoutException, MYJDException) as e:
176
- debug(f"JDPackageCache: Failed to check is_collecting: {e}")
170
+ trace(f"Failed to check is_collecting: {e}")
177
171
  self._is_collecting = False
178
172
  else:
179
173
  self._cache_hits += 1
180
- debug(f"JDPackageCache: Using cached is_collecting = {self._is_collecting}")
174
+ trace(f"Using cached is_collecting = {self._is_collecting}")
181
175
  return self._is_collecting
182
176
 
183
177
  def _has_archive_extension(self, package_uuid, links):
@@ -189,8 +183,8 @@ class JDPackageCache:
189
183
  name_lower = name.lower()
190
184
  for ext in ARCHIVE_EXTENSIONS:
191
185
  if name_lower.endswith(ext):
192
- debug(
193
- f"JDPackageCache: Found archive extension '{ext}' in file '{name}' for package {package_uuid}"
186
+ trace(
187
+ f"Found archive extension '{ext}' in file '{name}' for package {package_uuid}"
194
188
  )
195
189
  return True
196
190
  return False
@@ -207,51 +201,43 @@ class JDPackageCache:
207
201
  confirmed_archives = set()
208
202
 
209
203
  if not package_uuids:
210
- debug(
211
- "JDPackageCache: _bulk_detect_archives called with empty package_uuids"
212
- )
204
+ trace("_bulk_detect_archives called with empty package_uuids")
213
205
  return confirmed_archives, True
214
206
 
215
207
  package_list = list(package_uuids)
216
- debug(
217
- f"JDPackageCache: Bulk archive detection for {len(package_list)} packages"
218
- )
208
+ trace(f"Bulk archive detection for {len(package_list)} packages")
219
209
 
220
210
  try:
221
211
  self._api_calls += 1
222
212
  archive_infos = self._device.extraction.get_archive_info([], package_list)
223
- debug(
224
- f"JDPackageCache: get_archive_info returned {len(archive_infos) if archive_infos else 0} results"
213
+ trace(
214
+ f"get_archive_info returned {len(archive_infos) if archive_infos else 0} results"
225
215
  )
226
216
 
227
217
  if archive_infos:
228
218
  for i, archive_info in enumerate(archive_infos):
229
219
  if archive_info:
230
- debug(f"JDPackageCache: archive_info[{i}] = {archive_info}")
220
+ trace(f"archive_info[{i}] = {archive_info}")
231
221
  # Try to get packageUUID from response
232
222
  pkg_uuid = archive_info.get("packageUUID")
233
223
  if pkg_uuid:
234
- debug(
235
- f"JDPackageCache: Confirmed archive via packageUUID: {pkg_uuid}"
236
- )
224
+ trace(f"Confirmed archive via packageUUID: {pkg_uuid}")
237
225
  confirmed_archives.add(pkg_uuid)
238
226
  else:
239
227
  # Log what fields ARE available for debugging
240
- debug(
241
- f"JDPackageCache: archive_info has no packageUUID, available keys: {list(archive_info.keys())}"
228
+ trace(
229
+ f"archive_info has no packageUUID, available keys: {list(archive_info.keys())}"
242
230
  )
243
231
  else:
244
- debug(f"JDPackageCache: archive_info[{i}] is empty/None")
232
+ trace(f"archive_info[{i}] is empty/None")
245
233
 
246
- debug(
247
- f"JDPackageCache: Bulk detection confirmed {len(confirmed_archives)} archives: {confirmed_archives}"
234
+ trace(
235
+ f"Bulk detection confirmed {len(confirmed_archives)} archives: {confirmed_archives}"
248
236
  )
249
237
  return confirmed_archives, True
250
238
 
251
239
  except Exception as e:
252
- debug(
253
- f"JDPackageCache: Bulk archive detection API FAILED: {type(e).__name__}: {e}"
254
- )
240
+ trace(f"Bulk archive detection API FAILED: {type(e).__name__}: {e}")
255
241
  return confirmed_archives, False
256
242
 
257
243
  def detect_all_archives(self, packages, links):
@@ -269,50 +255,46 @@ class JDPackageCache:
269
255
  Set of package UUIDs that should be treated as archives
270
256
  """
271
257
  if not packages:
272
- debug("JDPackageCache: detect_all_archives called with no packages")
258
+ trace("detect_all_archives called with no packages")
273
259
  return set()
274
260
 
275
261
  all_package_uuids = {p.get("uuid") for p in packages if p.get("uuid")}
276
- debug(
277
- f"JDPackageCache: detect_all_archives for {len(all_package_uuids)} packages"
278
- )
262
+ trace(f"detect_all_archives for {len(all_package_uuids)} packages")
279
263
 
280
264
  # ONE bulk API call for all packages
281
265
  confirmed_archives, api_succeeded = self._bulk_detect_archives(
282
266
  all_package_uuids
283
267
  )
284
- debug(
285
- f"JDPackageCache: Bulk API succeeded={api_succeeded}, confirmed={len(confirmed_archives)} archives"
268
+ trace(
269
+ f"Bulk API succeeded={api_succeeded}, confirmed={len(confirmed_archives)} archives"
286
270
  )
287
271
 
288
272
  # For packages NOT confirmed as archives, apply safety fallbacks
289
273
  unconfirmed = all_package_uuids - confirmed_archives
290
- debug(f"JDPackageCache: {len(unconfirmed)} packages need fallback checking")
274
+ trace(f"{len(unconfirmed)} packages need fallback checking")
291
275
 
292
276
  for pkg_uuid in unconfirmed:
293
277
  # Fallback 1: Check file extensions
294
278
  if self._has_archive_extension(pkg_uuid, links):
295
- debug(
296
- f"JDPackageCache: Package {pkg_uuid} confirmed as archive via extension fallback"
297
- )
279
+ trace(f"Package {pkg_uuid} confirmed as archive via extension fallback")
298
280
  confirmed_archives.add(pkg_uuid)
299
281
  # Fallback 2: If bulk API failed completely, assume archive (safe)
300
282
  elif not api_succeeded:
301
- debug(
302
- f"JDPackageCache: SAFETY - Bulk API failed, assuming package {pkg_uuid} is archive"
283
+ trace(
284
+ f"SAFETY - Bulk API failed, assuming package {pkg_uuid} is archive"
303
285
  )
304
286
  confirmed_archives.add(pkg_uuid)
305
287
  else:
306
- debug(
307
- f"JDPackageCache: Package {pkg_uuid} confirmed as NON-archive (API worked, no extension match)"
288
+ trace(
289
+ f"Package {pkg_uuid} confirmed as NON-archive (API worked, no extension match)"
308
290
  )
309
291
 
310
292
  # Cache results for is_package_archive() lookups
311
293
  for pkg_uuid in all_package_uuids:
312
294
  self._archive_cache[pkg_uuid] = pkg_uuid in confirmed_archives
313
295
 
314
- debug(
315
- f"JDPackageCache: Final archive detection: {len(confirmed_archives)}/{len(all_package_uuids)} packages are archives"
296
+ trace(
297
+ f"Final archive detection: {len(confirmed_archives)}/{len(all_package_uuids)} packages are archives"
316
298
  )
317
299
  return confirmed_archives
318
300
 
@@ -327,20 +309,16 @@ class JDPackageCache:
327
309
  premature "finished" status.
328
310
  """
329
311
  if package_uuid is None:
330
- debug("JDPackageCache: is_package_archive called with None UUID")
312
+ trace("is_package_archive called with None UUID")
331
313
  return False
332
314
 
333
315
  if package_uuid in self._archive_cache:
334
316
  self._cache_hits += 1
335
317
  cached = self._archive_cache[package_uuid]
336
- debug(
337
- f"JDPackageCache: is_package_archive({package_uuid}) = {cached} (cached)"
338
- )
318
+ trace(f"is_package_archive({package_uuid}) = {cached} (cached)")
339
319
  return cached
340
320
 
341
- debug(
342
- f"JDPackageCache: is_package_archive({package_uuid}) - cache miss, querying API"
343
- )
321
+ trace(f"is_package_archive({package_uuid}) - cache miss, querying API")
344
322
 
345
323
  # Single package lookup (fallback if detect_all_archives wasn't called)
346
324
  is_archive = None
@@ -349,33 +327,29 @@ class JDPackageCache:
349
327
  try:
350
328
  self._api_calls += 1
351
329
  archive_info = self._device.extraction.get_archive_info([], [package_uuid])
352
- debug(f"JDPackageCache: Single get_archive_info returned: {archive_info}")
330
+ trace(f"Single get_archive_info returned: {archive_info}")
353
331
  # Original logic: is_archive = True if archive_info and archive_info[0] else False
354
332
  is_archive = True if archive_info and archive_info[0] else False
355
- debug(f"JDPackageCache: API says is_archive = {is_archive}")
333
+ trace(f"API says is_archive = {is_archive}")
356
334
  except Exception as e:
357
335
  api_failed = True
358
- debug(
359
- f"JDPackageCache: Single archive detection API FAILED for {package_uuid}: {type(e).__name__}: {e}"
336
+ trace(
337
+ f"Single archive detection API FAILED for {package_uuid}: {type(e).__name__}: {e}"
360
338
  )
361
339
 
362
340
  # Fallback: check file extensions if API failed or returned False
363
341
  if (api_failed or not is_archive) and links:
364
342
  if self._has_archive_extension(package_uuid, links):
365
- debug(
366
- f"JDPackageCache: Package {package_uuid} confirmed as archive via extension fallback"
343
+ trace(
344
+ f"Package {package_uuid} confirmed as archive via extension fallback"
367
345
  )
368
346
  is_archive = True
369
347
 
370
348
  # SAFETY: If API failed and no extension detected, assume archive (conservative)
371
349
  if is_archive is None:
372
- debug(
373
- f"JDPackageCache: SAFETY - Detection uncertain for {package_uuid}, assuming archive"
374
- )
350
+ trace(f"SAFETY - Detection uncertain for {package_uuid}, assuming archive")
375
351
  is_archive = True
376
352
 
377
353
  self._archive_cache[package_uuid] = is_archive
378
- debug(
379
- f"JDPackageCache: is_package_archive({package_uuid}) = {is_archive} (final)"
380
- )
354
+ trace(f"is_package_archive({package_uuid}) = {is_archive} (final)")
381
355
  return is_archive
quasarr/providers/log.py CHANGED
@@ -1,19 +1,237 @@
1
1
  # -*- coding: utf-8 -*-
2
2
  # Quasarr
3
3
  # Project by https://github.com/rix1337
4
+ from __future__ import annotations
4
5
 
5
- import datetime
6
+ import inspect
6
7
  import os
8
+ import sys
9
+ from typing import TYPE_CHECKING, Any
7
10
 
11
+ from dotenv import load_dotenv
12
+ from loguru import logger
13
+ from wcwidth import wcswidth, wrap
8
14
 
9
- def timestamp():
10
- return datetime.datetime.now().strftime("[%Y-%m-%d %H:%M:%S]")
15
+ if TYPE_CHECKING:
16
+ from loguru import Message
11
17
 
18
+ load_dotenv()
12
19
 
13
- def info(string):
14
- print(f"{timestamp()} {string}")
15
20
 
21
+ def get_log_max_width() -> int:
22
+ try:
23
+ return int(os.getenv("LOG_MAX_WIDTH"))
24
+ except Exception:
25
+ pass
26
+ try:
27
+ return os.get_terminal_size().columns
28
+ except Exception:
29
+ return 160
16
30
 
17
- def debug(string):
18
- if os.getenv("DEBUG"):
19
- info(string)
31
+
32
+ _subsequent_indent = " " * 33
33
+
34
+
35
+ def wrapping_sink(message: Message) -> None:
36
+ wrapped = wrap(
37
+ text=message,
38
+ width=get_log_max_width(),
39
+ subsequent_indent=_subsequent_indent,
40
+ )
41
+ for w in wrapped:
42
+ sys.stdout.write(w + "\n")
43
+
44
+
45
+ logger.remove(0)
46
+ logger.add(
47
+ wrapping_sink,
48
+ format="<d>{time:YYYY-MM-DDTHH:mm:ss}</d> <lvl>{level:<5}</lvl> {extra[context]}<b><M>{extra[source]}</M></b>{extra[padding]} {message}",
49
+ colorize=os.getenv("LOG_COLOR", "1").lower() in ["1", "true", "yes"],
50
+ level=5,
51
+ )
52
+ logger.level(name="WARN", no=30, color="<yellow>")
53
+ logger.level(name="CRIT", no=50, color="<red>")
54
+
55
+ log_level_names = {
56
+ 50: "CRIT",
57
+ 40: "ERROR",
58
+ 30: "WARN",
59
+ 20: "INFO",
60
+ 10: "DEBUG",
61
+ 5: "TRACE",
62
+ }
63
+
64
+ # reverse map log_level_names
65
+ log_names_to_level = {v: k for k, v in log_level_names.items()}
66
+
67
+
68
+ def _read_env_log(key, default):
69
+ try:
70
+ try:
71
+ level = log_names_to_level[os.getenv(key, default).upper()]
72
+ except Exception:
73
+ level = max(0, min(int(level), 50))
74
+ except Exception:
75
+ level = default
76
+
77
+ return level
78
+
79
+
80
+ _log_level = _read_env_log("LOG", 20)
81
+
82
+
83
+ _context_replace = {
84
+ "quasarr": "", # /quasarr/*
85
+ "arr": "🏴‍☠️", # /quasarr/arr/*
86
+ "api": "🌐", # /quasarr/api/*
87
+ "captcha": "🧩", # /quasarr/api/captcha/*
88
+ "config": "⚙️", # /quasarr/api/config/*
89
+ "sponsors_helper": "💖", # /quasarr/api/sponsors_helper/*
90
+ "downloads": "📥", # /quasarr/downloads/*
91
+ "linkcrypters": "🔐", # /quasarr/linkcrypters/*
92
+ "filecrypt": "🛡️", # /quasarr/linkcrypters/filecrypt.py
93
+ "hide": "👻", # /quasarr/linkcrypters/hide.py
94
+ "packages": "📦", # /quasarr/api/packages/*
95
+ "providers": "🔌", # /quasarr/providers/*
96
+ "imdb_metadata": "🎬", # /quasarr/providers/imdb_metadata.py
97
+ "jd_cache": "📇", # /quasarr/providers/jd_cache.py
98
+ "log": "📝", # /quasarr/providers/log.py
99
+ "myjd_api": "🔑", # /quasarr/providers/myjd_api.py
100
+ "notifications": "🔔", # /quasarr/providers/notifications.py
101
+ "shared_state": "🧠", # /quasarr/providers/shared_state.py
102
+ "sessions": "🍪", # /quasarr/providers/sessions/*
103
+ "search": "🔍", # /quasarr/search/*
104
+ "storage": "💽", # /quasarr/storage/*
105
+ "setup": "🛠️", # /quasarr/storage/setup.py
106
+ "sqlite_database": "🗃️", # /quasarr/storage/sqlite_database.py
107
+ "sources": "🧲", # /quasarr/*/sources/*
108
+ "utils": "🧰", # /quasarr/providers/utils.py
109
+ }
110
+
111
+
112
+ def _contexts_to_str(contexts: list[str]) -> str:
113
+ source = ""
114
+ if len(contexts) == 0:
115
+ return "", source
116
+
117
+ if contexts:
118
+ if contexts[-1].__len__() == 2:
119
+ source = contexts.pop()
120
+ elif contexts[0] == "quasarr" and contexts.__len__() == 1:
121
+ return "🌌", source
122
+
123
+ return "".join(_context_replace.get(c, c) for c in contexts), source.upper()
124
+
125
+
126
+ def get_log_level_name(level: int = _log_level) -> str:
127
+ return log_level_names[level]
128
+
129
+
130
+ def get_log_level(contexts: list[str] | None = None) -> int:
131
+ if contexts is None:
132
+ contexts = []
133
+ level = _log_level
134
+
135
+ for context in contexts:
136
+ context_level = _read_env_log(
137
+ "LOG" + f"_{context.upper()}" if context else "", _log_level
138
+ )
139
+ if context_level < level:
140
+ level = context_level
141
+
142
+ return level
143
+
144
+
145
+ class _Logger:
146
+ def __init__(self, contexts: list[str] | None = None):
147
+ if contexts is None:
148
+ contexts = []
149
+ self.level = get_log_level(contexts)
150
+ context, source = _contexts_to_str(contexts)
151
+ width = wcswidth(context + source)
152
+ padding = 6 - width
153
+
154
+ self.logger_alt = logger.bind(
155
+ context=context,
156
+ source=source,
157
+ padding=" " * padding,
158
+ )
159
+ self.logger = self.logger_alt.opt(colors=True)
160
+
161
+ def _log(self, level: int, msg: str, *args: Any, **kwargs: Any) -> None:
162
+ if self.level > level:
163
+ return
164
+ try:
165
+ try:
166
+ self.logger.log(log_level_names[level], msg, *args, **kwargs)
167
+ except ValueError as e:
168
+ # Fallback: try logging without color parsing if tags are mismatched
169
+ self.logger_alt.log(log_level_names[level], msg, *args, **kwargs)
170
+
171
+ if self.level <= 10:
172
+ self.logger_alt.debug(
173
+ f"Log formatting error: {e} | Original message: {msg}"
174
+ )
175
+ except Exception:
176
+ # Fallback: just print to stderr if logging fails completely
177
+ print(f"LOGGING FAILURE: {msg}", file=sys.stderr)
178
+
179
+ def crit(self, msg: str, *args: Any, **kwargs: Any) -> None:
180
+ self._log(50, msg, *args, **kwargs)
181
+
182
+ def error(self, msg: str, *args: Any, **kwargs: Any) -> None:
183
+ self._log(40, msg, *args, **kwargs)
184
+
185
+ def warn(self, msg: str, *args: Any, **kwargs: Any) -> None:
186
+ self._log(30, msg, *args, **kwargs)
187
+
188
+ def info(self, msg: str, *args: Any, **kwargs: Any) -> None:
189
+ self._log(20, msg, *args, **kwargs)
190
+
191
+ def debug(self, msg: str, *args: Any, **kwargs: Any) -> None:
192
+ self._log(10, msg, *args, **kwargs)
193
+
194
+ def trace(self, msg: str, *args: Any, **kwargs: Any) -> None:
195
+ self._log(5, msg, *args, **kwargs)
196
+
197
+
198
+ _loggers = {}
199
+
200
+
201
+ def get_logger(context: str) -> _Logger:
202
+ if context not in _loggers:
203
+ _loggers[context] = _Logger(context.split(".") if context else [])
204
+ return _loggers[context]
205
+
206
+
207
+ def _get_logger_for_module() -> _Logger:
208
+ # get the calling module filename
209
+ frame = inspect.currentframe()
210
+ caller_frame = frame.f_back.f_back
211
+ module_name = caller_frame.f_globals["__name__"]
212
+
213
+ return get_logger(module_name)
214
+
215
+
216
+ def crit(msg: str, *args: Any, **kwargs: Any) -> None:
217
+ _get_logger_for_module().crit(msg, *args, **kwargs)
218
+
219
+
220
+ def error(msg: str, *args: Any, **kwargs: Any) -> None:
221
+ _get_logger_for_module().error(msg, *args, **kwargs)
222
+
223
+
224
+ def warn(msg: str, *args: Any, **kwargs: Any) -> None:
225
+ _get_logger_for_module().warn(msg, *args, **kwargs)
226
+
227
+
228
+ def info(msg: str, *args: Any, **kwargs: Any) -> None:
229
+ _get_logger_for_module().info(msg, *args, **kwargs)
230
+
231
+
232
+ def debug(msg: str, *args: Any, **kwargs: Any) -> None:
233
+ _get_logger_for_module().debug(msg, *args, **kwargs)
234
+
235
+
236
+ def trace(msg: str, *args: Any, **kwargs: Any) -> None:
237
+ _get_logger_for_module().trace(msg, *args, **kwargs)