fastapi-cachex 0.2.1__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,27 @@
1
1
  """FastAPI-CacheX: A powerful and flexible caching extension for FastAPI."""
2
2
 
3
+ import logging
4
+
3
5
  from .cache import cache as cache
6
+ from .cache import default_key_builder as default_key_builder
4
7
  from .dependencies import CacheBackend as CacheBackend
5
8
  from .dependencies import get_cache_backend as get_cache_backend
6
9
  from .proxy import BackendProxy as BackendProxy
7
10
  from .routes import add_routes as add_routes
11
+ from .types import CacheKeyBuilder as CacheKeyBuilder
12
+
13
+ _package_logger = logging.getLogger("fastapi_cachex")
14
+ _package_logger.addHandler(
15
+ logging.NullHandler()
16
+ ) # Attach a NullHandler to avoid "No handler found" warnings in user applications.
17
+
18
+ # Session management (optional feature)
19
+ __all__ = [
20
+ "BackendProxy",
21
+ "CacheBackend",
22
+ "CacheKeyBuilder",
23
+ "add_routes",
24
+ "cache",
25
+ "default_key_builder",
26
+ "get_cache_backend",
27
+ ]
@@ -1,9 +1,9 @@
1
1
  """Cache backend implementations for FastAPI-CacheX."""
2
2
 
3
- from fastapi_cachex.backends.base import BaseCacheBackend
4
- from fastapi_cachex.backends.memcached import MemcachedBackend
5
- from fastapi_cachex.backends.memory import MemoryBackend
6
- from fastapi_cachex.backends.redis import AsyncRedisCacheBackend
3
+ from .base import BaseCacheBackend
4
+ from .memcached import MemcachedBackend
5
+ from .memory import MemoryBackend
6
+ from .redis import AsyncRedisCacheBackend
7
7
 
8
8
  __all__ = [
9
9
  "AsyncRedisCacheBackend",
@@ -1,17 +1,21 @@
1
1
  """Memcached cache backend implementation."""
2
2
 
3
+ import logging
3
4
  import warnings
4
5
 
5
- from fastapi_cachex.backends.base import BaseCacheBackend
6
6
  from fastapi_cachex.exceptions import CacheXError
7
7
  from fastapi_cachex.types import ETagContent
8
8
 
9
+ from .base import BaseCacheBackend
10
+
9
11
  try:
10
12
  import orjson as json
11
13
 
12
14
  except ImportError: # pragma: no cover
13
15
  import json # type: ignore[no-redef] # pragma: no cover
14
16
 
17
+ logger = logging.getLogger(__name__)
18
+
15
19
  # Default Memcached key prefix for fastapi-cachex
16
20
  DEFAULT_MEMCACHE_PREFIX = "fastapi_cachex:"
17
21
 
@@ -70,11 +74,13 @@ class MemcachedBackend(BaseCacheBackend):
70
74
  prefixed_key = self._make_key(key)
71
75
  value = self.client.get(prefixed_key)
72
76
  if value is None:
77
+ logger.debug("Memcached MISS; key=%s", key)
73
78
  return None
74
79
 
75
80
  # Memcached stores data as bytes; deserialize from JSON
76
81
  try:
77
82
  data = json.loads(value.decode("utf-8"))
83
+ logger.debug("Memcached HIT; key=%s", key)
78
84
  return ETagContent(
79
85
  etag=data["etag"],
80
86
  content=data["content"].encode()
@@ -82,6 +88,7 @@ class MemcachedBackend(BaseCacheBackend):
82
88
  else data["content"],
83
89
  )
84
90
  except (json.JSONDecodeError, KeyError, ValueError):
91
+ logger.debug("Memcached DESERIALIZE ERROR; key=%s", key)
85
92
  return None
86
93
 
87
94
  async def set(self, key: str, value: ETagContent, ttl: int | None = None) -> None:
@@ -119,6 +126,7 @@ class MemcachedBackend(BaseCacheBackend):
119
126
  serialized_bytes,
120
127
  expire=ttl if ttl is not None else 0,
121
128
  )
129
+ logger.debug("Memcached SET; key=%s ttl=%s", key, ttl)
122
130
 
123
131
  async def delete(self, key: str) -> None:
124
132
  """Delete value from cache.
@@ -127,6 +135,7 @@ class MemcachedBackend(BaseCacheBackend):
127
135
  key: Cache key to delete
128
136
  """
129
137
  self.client.delete(self._make_key(key))
138
+ logger.debug("Memcached DELETE; key=%s", key)
130
139
 
131
140
  async def clear(self) -> None:
132
141
  """Clear all values from cache.
@@ -142,6 +151,7 @@ class MemcachedBackend(BaseCacheBackend):
142
151
  stacklevel=2,
143
152
  )
144
153
  self.client.flush_all()
154
+ logger.debug("Memcached CLEAR; flush_all issued")
145
155
 
146
156
  async def clear_path(self, path: str, include_params: bool = False) -> int:
147
157
  """Clear cached responses for a specific path.
@@ -175,9 +185,15 @@ class MemcachedBackend(BaseCacheBackend):
175
185
  except Exception: # noqa: BLE001
176
186
  return 0
177
187
  else:
188
+ logger.debug(
189
+ "Memcached CLEAR_PATH; path=%s include_params=%s removed=%s",
190
+ path,
191
+ include_params,
192
+ 1 if result else 0,
193
+ )
178
194
  return 1 if result else 0
179
195
 
180
- async def clear_pattern(self, pattern: str) -> int: # noqa: ARG002
196
+ async def clear_pattern(self, pattern: str) -> int:
181
197
  """Clear cached responses matching a pattern.
182
198
 
183
199
  Memcached does not support pattern matching or key scanning.
@@ -197,6 +213,7 @@ class MemcachedBackend(BaseCacheBackend):
197
213
  RuntimeWarning,
198
214
  stacklevel=2,
199
215
  )
216
+ logger.debug("Memcached CLEAR_PATTERN unsupported; pattern=%s", pattern)
200
217
  return 0
201
218
 
202
219
  async def get_all_keys(self) -> list[str]:
@@ -218,6 +235,7 @@ class MemcachedBackend(BaseCacheBackend):
218
235
  RuntimeWarning,
219
236
  stacklevel=2,
220
237
  )
238
+ logger.debug("Memcached GET_ALL_KEYS unsupported; returning empty list")
221
239
  return []
222
240
 
223
241
  async def get_cache_data(self) -> dict[str, tuple[ETagContent, float | None]]:
@@ -236,4 +254,5 @@ class MemcachedBackend(BaseCacheBackend):
236
254
  RuntimeWarning,
237
255
  stacklevel=2,
238
256
  )
257
+ logger.debug("Memcached GET_CACHE_DATA unsupported; returning empty dict")
239
258
  return {}
@@ -3,14 +3,18 @@
3
3
  import asyncio
4
4
  import contextlib
5
5
  import fnmatch
6
+ import logging
6
7
  import time
7
8
 
9
+ from fastapi_cachex.types import CACHE_KEY_SEPARATOR
8
10
  from fastapi_cachex.types import CacheItem
9
11
  from fastapi_cachex.types import ETagContent
10
12
 
11
13
  from .base import BaseCacheBackend
12
14
 
13
- # Cache keys are formatted as: method:host:path:query_params
15
+ logger = logging.getLogger(__name__)
16
+
17
+ # Cache keys are formatted as: method|||host|||path|||query_params
14
18
  # Minimum parts required to extract path component
15
19
  _MIN_KEY_PARTS = 3
16
20
  # Maximum parts to split (method, host, path, query_params)
@@ -43,6 +47,10 @@ class MemoryBackend(BaseCacheBackend):
43
47
  with contextlib.suppress(RuntimeError):
44
48
  # No event loop yet; will be created on first async operation
45
49
  self._cleanup_task = asyncio.create_task(self._cleanup_task_impl())
50
+ logger.debug(
51
+ "Started memory backend cleanup task (interval=%s)",
52
+ self.cleanup_interval,
53
+ )
46
54
 
47
55
  def start_cleanup(self) -> None:
48
56
  """Start the cleanup task if it's not already running.
@@ -56,6 +64,7 @@ class MemoryBackend(BaseCacheBackend):
56
64
  if self._cleanup_task is not None:
57
65
  self._cleanup_task.cancel()
58
66
  self._cleanup_task = None
67
+ logger.debug("Stopped memory backend cleanup task")
59
68
 
60
69
  async def get(self, key: str) -> ETagContent | None:
61
70
  """Retrieve a cached response.
@@ -69,10 +78,13 @@ class MemoryBackend(BaseCacheBackend):
69
78
  cached_item = self.cache.get(key)
70
79
  if cached_item:
71
80
  if cached_item.expiry is None or cached_item.expiry > time.time():
81
+ logger.debug("Memory cache HIT; key=%s", key)
72
82
  return cached_item.value
73
83
  # Entry has expired; clean it up
74
84
  del self.cache[key]
85
+ logger.debug("Memory cache EXPIRED; key=%s removed", key)
75
86
  return None
87
+ logger.debug("Memory cache MISS; key=%s", key)
76
88
  return None
77
89
 
78
90
  async def set(self, key: str, value: ETagContent, ttl: int | None = None) -> None:
@@ -86,16 +98,19 @@ class MemoryBackend(BaseCacheBackend):
86
98
  async with self.lock:
87
99
  expiry = time.time() + ttl if ttl is not None else None
88
100
  self.cache[key] = CacheItem(value=value, expiry=expiry)
101
+ logger.debug("Memory cache SET; key=%s ttl=%s", key, ttl)
89
102
 
90
103
  async def delete(self, key: str) -> None:
91
104
  """Remove a response from the cache."""
92
105
  async with self.lock:
93
106
  self.cache.pop(key, None)
107
+ logger.debug("Memory cache DELETE; key=%s", key)
94
108
 
95
109
  async def clear(self) -> None:
96
110
  """Clear all cached responses."""
97
111
  async with self.lock:
98
112
  self.cache.clear()
113
+ logger.debug("Memory cache CLEAR; all entries removed")
99
114
 
100
115
  async def clear_path(self, path: str, include_params: bool = False) -> int:
101
116
  """Clear cached responses for a specific path.
@@ -115,8 +130,8 @@ class MemoryBackend(BaseCacheBackend):
115
130
  async with self.lock:
116
131
  keys_to_delete = []
117
132
  for key in self.cache:
118
- # Keys are formatted as: method:host:path:query_params
119
- parts = key.split(":", _MAX_KEY_PARTS)
133
+ # Keys are formatted as: method|||host|||path|||query_params
134
+ parts = key.split(CACHE_KEY_SEPARATOR, _MAX_KEY_PARTS)
120
135
  if len(parts) >= _MIN_KEY_PARTS:
121
136
  cache_path = parts[2]
122
137
  has_params = len(parts) > _MIN_KEY_PARTS
@@ -127,6 +142,12 @@ class MemoryBackend(BaseCacheBackend):
127
142
  for key in keys_to_delete:
128
143
  del self.cache[key]
129
144
 
145
+ logger.debug(
146
+ "Memory cache CLEAR_PATH; path=%s include_params=%s removed=%s",
147
+ path,
148
+ include_params,
149
+ cleared_count,
150
+ )
130
151
  return cleared_count
131
152
 
132
153
  async def clear_pattern(self, pattern: str) -> int:
@@ -145,8 +166,8 @@ class MemoryBackend(BaseCacheBackend):
145
166
  async with self.lock:
146
167
  keys_to_delete = []
147
168
  for key in self.cache:
148
- # Extract path component (method:host:path:query_params)
149
- parts = key.split(":", _MAX_KEY_PARTS)
169
+ # Extract path component (method|||host|||path|||query_params)
170
+ parts = key.split(CACHE_KEY_SEPARATOR, _MAX_KEY_PARTS)
150
171
  if len(parts) >= _MIN_KEY_PARTS:
151
172
  cache_path = parts[2]
152
173
  if fnmatch.fnmatch(cache_path, pattern):
@@ -156,6 +177,9 @@ class MemoryBackend(BaseCacheBackend):
156
177
  for key in keys_to_delete:
157
178
  del self.cache[key]
158
179
 
180
+ logger.debug(
181
+ "Memory cache CLEAR_PATTERN; pattern=%s removed=%s", pattern, cleared_count
182
+ )
159
183
  return cleared_count
160
184
 
161
185
  async def get_all_keys(self) -> list[str]:
@@ -196,3 +220,7 @@ class MemoryBackend(BaseCacheBackend):
196
220
  ]
197
221
  for key in expired_keys:
198
222
  self.cache.pop(key, None)
223
+ if expired_keys:
224
+ logger.debug(
225
+ "Memory cache CLEANUP; expired removed=%s", len(expired_keys)
226
+ )
@@ -1,13 +1,16 @@
1
1
  """Redis cache backend implementation."""
2
2
 
3
+ import logging
3
4
  from typing import TYPE_CHECKING
4
5
  from typing import Any
5
6
  from typing import Literal
6
7
 
7
- from fastapi_cachex.backends.base import BaseCacheBackend
8
8
  from fastapi_cachex.exceptions import CacheXError
9
+ from fastapi_cachex.types import CACHE_KEY_SEPARATOR
9
10
  from fastapi_cachex.types import ETagContent
10
11
 
12
+ from .base import BaseCacheBackend
13
+
11
14
  if TYPE_CHECKING:
12
15
  from redis.asyncio import Redis as AsyncRedis
13
16
 
@@ -17,6 +20,8 @@ try:
17
20
  except ImportError: # pragma: no cover
18
21
  import json # type: ignore[no-redef] # pragma: no cover
19
22
 
23
+ logger = logging.getLogger(__name__)
24
+
20
25
  # Default Redis key prefix for fastapi-cachex
21
26
  DEFAULT_REDIS_PREFIX = "fastapi_cachex:"
22
27
 
@@ -116,7 +121,9 @@ class AsyncRedisCacheBackend(BaseCacheBackend):
116
121
  async def get(self, key: str) -> ETagContent | None:
117
122
  """Retrieve a cached response."""
118
123
  result = await self.client.get(self._make_key(key))
119
- return self._deserialize(result)
124
+ value = self._deserialize(result)
125
+ logger.debug("Redis %s; key=%s", "HIT" if value else "MISS", key)
126
+ return value
120
127
 
121
128
  async def set(self, key: str, value: ETagContent, ttl: int | None = None) -> None:
122
129
  """Store a response in the cache."""
@@ -126,10 +133,12 @@ class AsyncRedisCacheBackend(BaseCacheBackend):
126
133
  await self.client.setex(prefixed_key, ttl, serialized)
127
134
  else:
128
135
  await self.client.set(prefixed_key, serialized)
136
+ logger.debug("Redis SET; key=%s ttl=%s", key, ttl)
129
137
 
130
138
  async def delete(self, key: str) -> None:
131
139
  """Remove a response from the cache."""
132
140
  await self.client.delete(self._make_key(key))
141
+ logger.debug("Redis DELETE; key=%s", key)
133
142
 
134
143
  async def clear(self) -> None:
135
144
  """Clear all cached responses for this namespace.
@@ -160,6 +169,7 @@ class AsyncRedisCacheBackend(BaseCacheBackend):
160
169
  batch = keys_to_delete[i : i + batch_size]
161
170
  if batch:
162
171
  await self.client.delete(*batch)
172
+ logger.debug("Redis CLEAR; removed=%s", len(keys_to_delete))
163
173
 
164
174
  async def clear_path(self, path: str, include_params: bool = False) -> int:
165
175
  """Clear cached responses for a specific path.
@@ -175,11 +185,13 @@ class AsyncRedisCacheBackend(BaseCacheBackend):
175
185
  """
176
186
  # Pattern includes the HTTP method, host, and path components
177
187
  if include_params:
178
- # Clear all variations: *:path:*
179
- pattern = f"{self.key_prefix}*:{path}:*"
188
+ # Clear all variations: *|||path|||*
189
+ pattern = (
190
+ f"{self.key_prefix}*{CACHE_KEY_SEPARATOR}{path}{CACHE_KEY_SEPARATOR}*"
191
+ )
180
192
  else:
181
- # Clear only exact path (no query params): *:path
182
- pattern = f"{self.key_prefix}*:{path}"
193
+ # Clear only exact path (no query params): *|||path
194
+ pattern = f"{self.key_prefix}*{CACHE_KEY_SEPARATOR}{path}"
183
195
 
184
196
  cursor = 0
185
197
  batch_size = 100
@@ -206,6 +218,12 @@ class AsyncRedisCacheBackend(BaseCacheBackend):
206
218
  deleted = await self.client.delete(*batch)
207
219
  cleared_count += deleted
208
220
 
221
+ logger.debug(
222
+ "Redis CLEAR_PATH; path=%s include_params=%s removed=%s",
223
+ path,
224
+ include_params,
225
+ cleared_count,
226
+ )
209
227
  return cleared_count
210
228
 
211
229
  async def clear_pattern(self, pattern: str) -> int:
@@ -250,6 +268,9 @@ class AsyncRedisCacheBackend(BaseCacheBackend):
250
268
  deleted = await self.client.delete(*batch)
251
269
  cleared_count += deleted
252
270
 
271
+ logger.debug(
272
+ "Redis CLEAR_PATTERN; pattern=%s removed=%s", full_pattern, cleared_count
273
+ )
253
274
  return cleared_count
254
275
 
255
276
  async def get_all_keys(self) -> list[str]:
@@ -275,6 +296,7 @@ class AsyncRedisCacheBackend(BaseCacheBackend):
275
296
  if cursor == 0:
276
297
  break
277
298
 
299
+ logger.debug("Redis GET_ALL_KEYS; count=%s", len(all_keys))
278
300
  return all_keys
279
301
 
280
302
  async def get_cache_data(self) -> dict[str, tuple[ETagContent, float | None]]:
@@ -297,4 +319,5 @@ class AsyncRedisCacheBackend(BaseCacheBackend):
297
319
  if value is not None:
298
320
  cache_data[original_key] = (value, None)
299
321
 
322
+ logger.debug("Redis GET_CACHE_DATA; keys=%s", len(cache_data))
300
323
  return cache_data
fastapi_cachex/cache.py CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  import hashlib
4
4
  import inspect
5
+ import logging
5
6
  from collections.abc import Awaitable
6
7
  from collections.abc import Callable
7
8
  from functools import update_wrapper
@@ -11,29 +12,53 @@ from inspect import Signature
11
12
  from typing import TYPE_CHECKING
12
13
  from typing import Any
13
14
  from typing import Literal
14
- from typing import TypeVar
15
- from typing import Union
16
15
 
17
16
  from fastapi import Request
18
17
  from fastapi import Response
19
18
  from fastapi.datastructures import DefaultPlaceholder
20
19
  from starlette.status import HTTP_304_NOT_MODIFIED
21
20
 
22
- from fastapi_cachex.backends import MemoryBackend
23
- from fastapi_cachex.directives import DirectiveType
24
- from fastapi_cachex.exceptions import BackendNotFoundError
25
- from fastapi_cachex.exceptions import CacheXError
26
- from fastapi_cachex.exceptions import RequestNotFoundError
27
- from fastapi_cachex.proxy import BackendProxy
28
- from fastapi_cachex.types import ETagContent
21
+ from .backends import MemoryBackend
22
+ from .directives import DirectiveType
23
+ from .exceptions import BackendNotFoundError
24
+ from .exceptions import CacheXError
25
+ from .exceptions import RequestNotFoundError
26
+ from .proxy import BackendProxy
27
+ from .types import CACHE_KEY_SEPARATOR
28
+ from .types import CacheKeyBuilder
29
+ from .types import ETagContent
29
30
 
30
31
  if TYPE_CHECKING:
31
32
  from fastapi.routing import APIRoute
32
33
 
33
- T = TypeVar("T", bound=Response)
34
- AsyncCallable = Callable[..., Awaitable[T]]
35
- SyncCallable = Callable[..., T]
36
- AnyCallable = Union[AsyncCallable[T], SyncCallable[T]] # noqa: UP007
34
+ # Handler callable accepted by @cache: can return any type (sync or async).
35
+ HandlerCallable = Callable[..., Awaitable[object]] | Callable[..., object]
36
+
37
+ # Wrapper callable produced by @cache: always async and returns Response.
38
+ AsyncResponseCallable = Callable[..., Awaitable[Response]]
39
+
40
+ logger = logging.getLogger(__name__)
41
+
42
+
43
+ def default_key_builder(request: Request) -> str:
44
+ """Default cache key builder function.
45
+
46
+ Generates cache key in format: method|||host|||path|||query_params
47
+
48
+ Args:
49
+ request: The FastAPI Request object
50
+
51
+ Returns:
52
+ Generated cache key string
53
+ """
54
+ key = (
55
+ f"{request.method}{CACHE_KEY_SEPARATOR}"
56
+ f"{request.headers.get('host', 'unknown')}{CACHE_KEY_SEPARATOR}"
57
+ f"{request.url.path}{CACHE_KEY_SEPARATOR}"
58
+ f"{request.query_params}"
59
+ )
60
+ logger.debug("Built cache key: %s", key)
61
+ return key
37
62
 
38
63
 
39
64
  class CacheControl:
@@ -61,7 +86,7 @@ class CacheControl:
61
86
 
62
87
 
63
88
  async def get_response(
64
- __func: AnyCallable[Response],
89
+ __func: HandlerCallable,
65
90
  __request: Request,
66
91
  /,
67
92
  *args: Any,
@@ -96,6 +121,7 @@ async def get_response(
96
121
  def cache( # noqa: C901
97
122
  ttl: int | None = None,
98
123
  stale_ttl: int | None = None,
124
+ *,
99
125
  stale: Literal["error", "revalidate"] | None = None,
100
126
  no_cache: bool = False,
101
127
  no_store: bool = False,
@@ -103,7 +129,8 @@ def cache( # noqa: C901
103
129
  private: bool = False,
104
130
  immutable: bool = False,
105
131
  must_revalidate: bool = False,
106
- ) -> Callable[[AnyCallable[Response]], AsyncCallable[Response]]:
132
+ key_builder: CacheKeyBuilder | None = None,
133
+ ) -> Callable[[HandlerCallable], AsyncResponseCallable]:
107
134
  """Cache decorator for FastAPI route handlers.
108
135
 
109
136
  Args:
@@ -116,18 +143,20 @@ def cache( # noqa: C901
116
143
  private: Whether responses are for single user only
117
144
  immutable: Whether cached responses never change
118
145
  must_revalidate: Whether to force revalidation when stale
146
+ key_builder: Custom function to build cache keys. If None, uses default_key_builder
119
147
 
120
148
  Returns:
121
149
  Decorator function that wraps route handlers with caching logic
122
150
  """
123
151
 
124
- def decorator(func: AnyCallable[Response]) -> AsyncCallable[Response]: # noqa: C901
152
+ def decorator(func: HandlerCallable) -> AsyncResponseCallable: # noqa: C901
125
153
  try:
126
154
  cache_backend = BackendProxy.get_backend()
127
155
  except BackendNotFoundError:
128
156
  # Fallback to memory backend if no backend is set
129
157
  cache_backend = MemoryBackend()
130
158
  BackendProxy.set_backend(cache_backend)
159
+ logger.debug("No backend configured; using MemoryBackend fallback")
131
160
 
132
161
  # Analyze the original function's signature
133
162
  sig: Signature = inspect.signature(func)
@@ -205,11 +234,14 @@ def cache( # noqa: C901
205
234
 
206
235
  # Only cache GET requests
207
236
  if req.method != "GET":
237
+ logger.debug(
238
+ "Non-GET request; bypassing cache for method=%s", req.method
239
+ )
208
240
  return await get_response(func, req, *args, **kwargs)
209
241
 
210
- # Generate cache key: method:host:path:query_params[:vary]
211
- # Include host to avoid cross-host cache pollution
212
- cache_key = f"{req.method}:{req.headers.get('host', 'unknown')}:{req.url.path}:{req.query_params}"
242
+ # Generate cache key using custom builder or default
243
+ builder = key_builder or default_key_builder
244
+ cache_key = builder(req)
213
245
  client_etag = req.headers.get("if-none-match")
214
246
  cache_control = await get_cache_control(CacheControl())
215
247
 
@@ -219,6 +251,7 @@ def cache( # noqa: C901
219
251
  cc = CacheControl()
220
252
  cc.add(DirectiveType.NO_STORE)
221
253
  response.headers["Cache-Control"] = str(cc)
254
+ logger.debug("no-store active; bypassed cache for key=%s", cache_key)
222
255
  return response
223
256
 
224
257
  # Check cache and handle ETag validation
@@ -237,6 +270,7 @@ def cache( # noqa: C901
237
270
 
238
271
  if client_etag == current_etag:
239
272
  # For no-cache, compare fresh data with client's ETag
273
+ logger.debug("304 Not Modified via no-cache; key=%s", cache_key)
240
274
  return Response(
241
275
  status_code=HTTP_304_NOT_MODIFIED,
242
276
  headers={
@@ -250,6 +284,9 @@ def cache( # noqa: C901
250
284
  cached_data and client_etag == cached_data.etag
251
285
  ): # pragma: no branch
252
286
  # Cache hit with matching ETag: return 304 Not Modified
287
+ logger.debug(
288
+ "304 Not Modified (cached ETag match); key=%s", cache_key
289
+ )
253
290
  return Response(
254
291
  status_code=HTTP_304_NOT_MODIFIED,
255
292
  headers={
@@ -263,6 +300,7 @@ def cache( # noqa: C901
263
300
  if cached_data and not no_cache and ttl is not None:
264
301
  # We have a cached entry and TTL-based caching is enabled
265
302
  # Return the cached content directly with 200 OK without revalidation
303
+ logger.debug("Cache HIT (TTL valid); key=%s", cache_key)
266
304
  return Response(
267
305
  content=cached_data.content,
268
306
  status_code=200,
@@ -276,6 +314,7 @@ def cache( # noqa: C901
276
314
  # Retrieve the current response if not already done
277
315
  current_response = await get_response(func, req, *args, **kwargs)
278
316
  current_etag = f'W/"{hashlib.md5(current_response.body).hexdigest()}"' # noqa: S324
317
+ logger.debug("Cache MISS; computed fresh ETag for key=%s", cache_key)
279
318
 
280
319
  # Set ETag header
281
320
  current_response.headers["ETag"] = current_etag
@@ -288,6 +327,7 @@ def cache( # noqa: C901
288
327
  ETagContent(current_etag, current_response.body),
289
328
  ttl=ttl,
290
329
  )
330
+ logger.debug("Updated cache entry; key=%s ttl=%s", cache_key, ttl)
291
331
 
292
332
  current_response.headers["Cache-Control"] = cache_control
293
333
  return current_response
@@ -4,8 +4,8 @@ from typing import Annotated
4
4
 
5
5
  from fastapi import Depends
6
6
 
7
- from fastapi_cachex.backends.base import BaseCacheBackend
8
- from fastapi_cachex.proxy import BackendProxy
7
+ from .backends.base import BaseCacheBackend
8
+ from .proxy import BackendProxy
9
9
 
10
10
 
11
11
  def get_cache_backend() -> BaseCacheBackend:
fastapi_cachex/proxy.py CHANGED
@@ -1,9 +1,12 @@
1
1
  """Backend proxy for managing cache backend instances."""
2
2
 
3
- from fastapi_cachex.backends import BaseCacheBackend
4
- from fastapi_cachex.exceptions import BackendNotFoundError
3
+ from logging import getLogger
4
+
5
+ from .backends import BaseCacheBackend
6
+ from .exceptions import BackendNotFoundError
5
7
 
6
8
  _default_backend: BaseCacheBackend | None = None
9
+ logger = getLogger(__name__)
7
10
 
8
11
 
9
12
  class BackendProxy:
@@ -33,4 +36,8 @@ class BackendProxy:
33
36
  backend: The backend to use for caching, or None to clear the current backend
34
37
  """
35
38
  global _default_backend
39
+ logger.info(
40
+ "Setting backend to: <%s>",
41
+ backend.__class__.__name__ if backend else "None",
42
+ )
36
43
  _default_backend = backend
fastapi_cachex/routes.py CHANGED
@@ -4,9 +4,10 @@ import time
4
4
  from dataclasses import dataclass
5
5
  from typing import TYPE_CHECKING
6
6
 
7
- from fastapi_cachex.backends import BaseCacheBackend
8
- from fastapi_cachex.exceptions import BackendNotFoundError
9
- from fastapi_cachex.proxy import BackendProxy
7
+ from .backends import BaseCacheBackend
8
+ from .exceptions import BackendNotFoundError
9
+ from .proxy import BackendProxy
10
+ from .types import CACHE_KEY_SEPARATOR
10
11
 
11
12
  if TYPE_CHECKING:
12
13
  from fastapi import FastAPI
@@ -93,12 +94,12 @@ def _parse_cache_key(cache_key: str) -> tuple[str, str, str, str]:
93
94
  """Parse cache key into components.
94
95
 
95
96
  Args:
96
- cache_key: Cache key in format method:host:path:query_params
97
+ cache_key: Cache key in format method|||host|||path|||query_params
97
98
 
98
99
  Returns:
99
100
  Tuple of (method, host, path, query_params)
100
101
  """
101
- key_parts = cache_key.split(":", CACHE_KEY_MAX_PARTS)
102
+ key_parts = cache_key.split(CACHE_KEY_SEPARATOR, CACHE_KEY_MAX_PARTS)
102
103
  if len(key_parts) >= CACHE_KEY_MIN_PARTS:
103
104
  method, host, path = key_parts[0], key_parts[1], key_parts[2]
104
105
  query_params = key_parts[3] if len(key_parts) > CACHE_KEY_MIN_PARTS else ""
@@ -0,0 +1,21 @@
1
+ """Session management extension for FastAPI-CacheX."""
2
+
3
+ from .config import SessionConfig
4
+ from .dependencies import get_optional_session
5
+ from .dependencies import get_session
6
+ from .dependencies import require_session
7
+ from .manager import SessionManager
8
+ from .middleware import SessionMiddleware
9
+ from .models import Session
10
+ from .models import SessionUser
11
+
12
+ __all__ = [
13
+ "Session",
14
+ "SessionConfig",
15
+ "SessionManager",
16
+ "SessionMiddleware",
17
+ "SessionUser",
18
+ "get_optional_session",
19
+ "get_session",
20
+ "require_session",
21
+ ]