fastapi-cachekit 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fast_cache/__init__.py +13 -0
- fast_cache/backends/__init__.py +0 -0
- fast_cache/backends/backend.py +126 -0
- fast_cache/backends/memcached.py +126 -0
- fast_cache/backends/memory.py +291 -0
- fast_cache/backends/postgres.py +230 -0
- fast_cache/backends/redis.py +257 -0
- fast_cache/integration.py +199 -0
- fastapi_cachekit-0.1.0.dist-info/METADATA +289 -0
- fastapi_cachekit-0.1.0.dist-info/RECORD +12 -0
- fastapi_cachekit-0.1.0.dist-info/WHEEL +5 -0
- fastapi_cachekit-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
import pickle
|
|
2
|
+
from datetime import datetime, timezone, timedelta
|
|
3
|
+
from typing import Any, Optional, Union
|
|
4
|
+
from .backend import CacheBackend
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class PostgresBackend(CacheBackend):
|
|
8
|
+
"""
|
|
9
|
+
PostgreSQL cache backend implementation.
|
|
10
|
+
|
|
11
|
+
Uses an UNLOGGED TABLE for performance and lazy expiration.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
def __init__(
|
|
15
|
+
self,
|
|
16
|
+
dsn: str,
|
|
17
|
+
namespace: str = "fastapi",
|
|
18
|
+
min_size: int = 1,
|
|
19
|
+
max_size: int = 10,
|
|
20
|
+
) -> None:
|
|
21
|
+
try:
|
|
22
|
+
from psycopg_pool import AsyncConnectionPool, ConnectionPool
|
|
23
|
+
except ImportError:
|
|
24
|
+
raise ImportError(
|
|
25
|
+
"PostgresBackend requires the 'psycopg[pool]' package. "
|
|
26
|
+
"Install it with: pip install fast-cache[postgres]"
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
self._namespace = namespace
|
|
30
|
+
self._table_name = f"{namespace}_cache_store"
|
|
31
|
+
|
|
32
|
+
# The pools are opened on creation and will auto-reopen if needed
|
|
33
|
+
# when using the context manager (`with/async with`).
|
|
34
|
+
self._sync_pool = ConnectionPool(
|
|
35
|
+
conninfo=dsn, min_size=min_size, max_size=max_size, open=True
|
|
36
|
+
)
|
|
37
|
+
self._async_pool = AsyncConnectionPool(
|
|
38
|
+
conninfo=dsn, min_size=min_size, max_size=max_size, open=False
|
|
39
|
+
)
|
|
40
|
+
self._create_unlogged_table_if_not_exists()
|
|
41
|
+
|
|
42
|
+
def _create_unlogged_table_if_not_exists(self):
|
|
43
|
+
"""Create the cache table if it doesn't exist."""
|
|
44
|
+
# The index on expire_at is for efficient periodic cleanup jobs,
|
|
45
|
+
# though not used in the lazy-delete implementation.
|
|
46
|
+
create_sql = f"""
|
|
47
|
+
CREATE UNLOGGED TABLE IF NOT EXISTS {self._table_name} (
|
|
48
|
+
key TEXT PRIMARY KEY,
|
|
49
|
+
value BYTEA NOT NULL,
|
|
50
|
+
expire_at TIMESTAMPTZ
|
|
51
|
+
);
|
|
52
|
+
CREATE INDEX IF NOT EXISTS idx_{self._table_name}_expire_at
|
|
53
|
+
ON {self._table_name} (expire_at);
|
|
54
|
+
"""
|
|
55
|
+
with self._sync_pool.connection() as conn:
|
|
56
|
+
with conn.cursor() as cur:
|
|
57
|
+
cur.execute(create_sql)
|
|
58
|
+
conn.commit()
|
|
59
|
+
|
|
60
|
+
def _make_key(self, key: str) -> str:
|
|
61
|
+
return f"{self._namespace}:{key}"
|
|
62
|
+
|
|
63
|
+
def _is_expired(self, expire_at: Optional[datetime]) -> bool:
|
|
64
|
+
return expire_at is not None and expire_at < datetime.now(
|
|
65
|
+
timezone.utc
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
def set(
|
|
69
|
+
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
70
|
+
) -> None:
|
|
71
|
+
expire_at = None
|
|
72
|
+
if expire:
|
|
73
|
+
delta = (
|
|
74
|
+
timedelta(seconds=expire)
|
|
75
|
+
if isinstance(expire, int)
|
|
76
|
+
else expire
|
|
77
|
+
)
|
|
78
|
+
expire_at = datetime.now(timezone.utc) + delta
|
|
79
|
+
|
|
80
|
+
with self._sync_pool.connection() as conn:
|
|
81
|
+
with conn.cursor() as cur:
|
|
82
|
+
cur.execute(
|
|
83
|
+
f"""
|
|
84
|
+
INSERT INTO {self._table_name} (key, value, expire_at)
|
|
85
|
+
VALUES (%s, %s, %s)
|
|
86
|
+
ON CONFLICT (key)
|
|
87
|
+
DO UPDATE SET value = EXCLUDED.value,
|
|
88
|
+
expire_at = EXCLUDED.expire_at;
|
|
89
|
+
""",
|
|
90
|
+
(self._make_key(key), pickle.dumps(value), expire_at),
|
|
91
|
+
)
|
|
92
|
+
conn.commit()
|
|
93
|
+
|
|
94
|
+
def get(self, key: str) -> Optional[Any]:
|
|
95
|
+
with self._sync_pool.connection() as conn:
|
|
96
|
+
with conn.cursor() as cur:
|
|
97
|
+
cur.execute(
|
|
98
|
+
f"SELECT value, expire_at FROM {self._table_name} WHERE key = %s;",
|
|
99
|
+
(self._make_key(key),),
|
|
100
|
+
)
|
|
101
|
+
row = cur.fetchone()
|
|
102
|
+
if not row:
|
|
103
|
+
return None
|
|
104
|
+
value, expire_at = row
|
|
105
|
+
if self._is_expired(expire_at):
|
|
106
|
+
self.delete(key) # Lazy delete
|
|
107
|
+
return None
|
|
108
|
+
return pickle.loads(value)
|
|
109
|
+
|
|
110
|
+
def delete(self, key: str) -> None:
|
|
111
|
+
with self._sync_pool.connection() as conn:
|
|
112
|
+
with conn.cursor() as cur:
|
|
113
|
+
cur.execute(
|
|
114
|
+
f"DELETE FROM {self._table_name} WHERE key = %s;",
|
|
115
|
+
(self._make_key(key),),
|
|
116
|
+
)
|
|
117
|
+
conn.commit()
|
|
118
|
+
|
|
119
|
+
def has(self, key: str) -> bool:
|
|
120
|
+
with self._sync_pool.connection() as conn:
|
|
121
|
+
with conn.cursor() as cur:
|
|
122
|
+
cur.execute(
|
|
123
|
+
f"SELECT expire_at FROM {self._table_name} WHERE key = %s;",
|
|
124
|
+
(self._make_key(key),),
|
|
125
|
+
)
|
|
126
|
+
row = cur.fetchone()
|
|
127
|
+
if not row:
|
|
128
|
+
return False
|
|
129
|
+
return not self._is_expired(row[0])
|
|
130
|
+
|
|
131
|
+
def clear(self) -> None:
|
|
132
|
+
"""Clear all keys in the current namespace from the cache."""
|
|
133
|
+
with self._sync_pool.connection() as conn:
|
|
134
|
+
with conn.cursor() as cur:
|
|
135
|
+
# FIX: Use the dynamic table name
|
|
136
|
+
cur.execute(
|
|
137
|
+
f"DELETE FROM {self._table_name} WHERE key LIKE %s;",
|
|
138
|
+
(self._make_key("%"),),
|
|
139
|
+
)
|
|
140
|
+
conn.commit()
|
|
141
|
+
|
|
142
|
+
async def aset(
|
|
143
|
+
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
144
|
+
) -> None:
|
|
145
|
+
|
|
146
|
+
if not self._async_pool._opened:
|
|
147
|
+
await self._async_pool.open()
|
|
148
|
+
|
|
149
|
+
expire_at = None
|
|
150
|
+
if expire:
|
|
151
|
+
delta = (
|
|
152
|
+
timedelta(seconds=expire)
|
|
153
|
+
if isinstance(expire, int)
|
|
154
|
+
else expire
|
|
155
|
+
)
|
|
156
|
+
expire_at = datetime.now(timezone.utc) + delta
|
|
157
|
+
|
|
158
|
+
async with self._async_pool.connection() as conn:
|
|
159
|
+
async with conn.cursor() as cur:
|
|
160
|
+
await cur.execute(
|
|
161
|
+
f"""
|
|
162
|
+
INSERT INTO {self._table_name} (key, value, expire_at)
|
|
163
|
+
VALUES (%s, %s, %s)
|
|
164
|
+
ON CONFLICT (key)
|
|
165
|
+
DO UPDATE SET value = EXCLUDED.value,
|
|
166
|
+
expire_at = EXCLUDED.expire_at;
|
|
167
|
+
""",
|
|
168
|
+
(self._make_key(key), pickle.dumps(value), expire_at),
|
|
169
|
+
)
|
|
170
|
+
await conn.commit()
|
|
171
|
+
|
|
172
|
+
async def aget(self, key: str) -> Optional[Any]:
|
|
173
|
+
if not self._async_pool._opened:
|
|
174
|
+
await self._async_pool.open()
|
|
175
|
+
async with self._async_pool.connection() as conn:
|
|
176
|
+
async with conn.cursor() as cur:
|
|
177
|
+
await cur.execute(
|
|
178
|
+
f"SELECT value, expire_at FROM {self._table_name} WHERE key = %s;",
|
|
179
|
+
(self._make_key(key),),
|
|
180
|
+
)
|
|
181
|
+
row = await cur.fetchone()
|
|
182
|
+
if not row:
|
|
183
|
+
return None
|
|
184
|
+
value, expire_at = row
|
|
185
|
+
if self._is_expired(expire_at):
|
|
186
|
+
await self.adelete(key) # Lazy delete
|
|
187
|
+
return None
|
|
188
|
+
return pickle.loads(value)
|
|
189
|
+
|
|
190
|
+
async def adelete(self, key: str) -> None:
|
|
191
|
+
if not self._async_pool._opened:
|
|
192
|
+
await self._async_pool.open()
|
|
193
|
+
async with self._async_pool.connection() as conn:
|
|
194
|
+
async with conn.cursor() as cur:
|
|
195
|
+
await cur.execute(
|
|
196
|
+
f"DELETE FROM {self._table_name} WHERE key = %s;",
|
|
197
|
+
(self._make_key(key),),
|
|
198
|
+
)
|
|
199
|
+
await conn.commit()
|
|
200
|
+
|
|
201
|
+
async def ahas(self, key: str) -> bool:
|
|
202
|
+
if not self._async_pool._opened:
|
|
203
|
+
await self._async_pool.open()
|
|
204
|
+
async with self._async_pool.connection() as conn:
|
|
205
|
+
async with conn.cursor() as cur:
|
|
206
|
+
await cur.execute(
|
|
207
|
+
f"SELECT expire_at FROM {self._table_name} WHERE key = %s;",
|
|
208
|
+
(self._make_key(key),),
|
|
209
|
+
)
|
|
210
|
+
row = await cur.fetchone()
|
|
211
|
+
if not row:
|
|
212
|
+
return False
|
|
213
|
+
return not self._is_expired(row[0])
|
|
214
|
+
|
|
215
|
+
async def aclear(self) -> None:
|
|
216
|
+
"""Asynchronously clear all keys in the current namespace."""
|
|
217
|
+
if not self._async_pool._opened:
|
|
218
|
+
await self._async_pool.open()
|
|
219
|
+
async with self._async_pool.connection() as conn:
|
|
220
|
+
async with conn.cursor() as cur:
|
|
221
|
+
# FIX: Use the dynamic table name
|
|
222
|
+
await cur.execute(
|
|
223
|
+
f"DELETE FROM {self._table_name} WHERE key LIKE %s;",
|
|
224
|
+
(self._make_key("%"),),
|
|
225
|
+
)
|
|
226
|
+
await conn.commit()
|
|
227
|
+
|
|
228
|
+
async def close(self) -> None:
|
|
229
|
+
self._sync_pool.close()
|
|
230
|
+
await self._async_pool.close()
|
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import inspect
|
|
3
|
+
from typing import Any, Optional, Union
|
|
4
|
+
from datetime import timedelta
|
|
5
|
+
import pickle
|
|
6
|
+
|
|
7
|
+
from .backend import CacheBackend
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class RedisBackend(CacheBackend):
|
|
11
|
+
"""
|
|
12
|
+
Redis cache backend implementation with namespace support.
|
|
13
|
+
|
|
14
|
+
Attributes:
|
|
15
|
+
_namespace (str): Namespace prefix for all keys.
|
|
16
|
+
_sync_pool (redis.ConnectionPool): Synchronous Redis connection pool.
|
|
17
|
+
_async_pool (aioredis.ConnectionPool): Asynchronous Redis connection pool.
|
|
18
|
+
_sync_client (redis.Redis): Synchronous Redis client.
|
|
19
|
+
_async_client (aioredis.Redis): Asynchronous Redis client.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
def __init__(
|
|
23
|
+
self,
|
|
24
|
+
redis_url: str,
|
|
25
|
+
namespace: str = "fastapi-cache",
|
|
26
|
+
pool_size: int = 10,
|
|
27
|
+
max_connections: int = 20,
|
|
28
|
+
) -> None:
|
|
29
|
+
"""
|
|
30
|
+
Initialize Redis backend with connection URL and pool settings.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
redis_url (str): Redis connection URL (e.g., "redis://localhost:6379/0").
|
|
34
|
+
namespace (str): Namespace prefix for all keys (default: "fastapi-cache").
|
|
35
|
+
pool_size (int): Minimum number of connections in the pool.
|
|
36
|
+
max_connections (int): Maximum number of connections in the pool.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
try:
|
|
40
|
+
import redis.asyncio as aioredis
|
|
41
|
+
import redis
|
|
42
|
+
except ImportError:
|
|
43
|
+
raise ImportError(
|
|
44
|
+
"RedisBackend requires the 'redis' package. "
|
|
45
|
+
"Install it with: pip install fast-cache[redis]"
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
self._namespace = namespace
|
|
49
|
+
self._sync_pool = redis.ConnectionPool.from_url(
|
|
50
|
+
redis_url, max_connections=max_connections, decode_responses=False
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
self._async_pool = aioredis.ConnectionPool.from_url(
|
|
54
|
+
redis_url,
|
|
55
|
+
max_connections=max_connections,
|
|
56
|
+
decode_responses=False,
|
|
57
|
+
encoding="utf-8",
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
self._sync_client = redis.Redis(connection_pool=self._sync_pool)
|
|
61
|
+
self._async_client = aioredis.Redis(connection_pool=self._async_pool)
|
|
62
|
+
|
|
63
|
+
def _make_key(self, key: str) -> str:
|
|
64
|
+
"""
|
|
65
|
+
Create a namespaced key.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
key (str): The original key.
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
str: The namespaced key.
|
|
72
|
+
"""
|
|
73
|
+
return f"{self._namespace}:{key}"
|
|
74
|
+
|
|
75
|
+
async def _scan_keys(self, pattern: str = "*") -> list[str]:
|
|
76
|
+
"""
|
|
77
|
+
Scan all keys in the namespace asynchronously.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
pattern (str): Pattern to match keys (default: "*").
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
List[str]: List of matching keys.
|
|
84
|
+
"""
|
|
85
|
+
keys = []
|
|
86
|
+
cursor = 0
|
|
87
|
+
namespace_pattern = self._make_key(pattern)
|
|
88
|
+
|
|
89
|
+
while True:
|
|
90
|
+
cursor, batch = await self._async_client.scan(
|
|
91
|
+
cursor=cursor, match=namespace_pattern, count=100
|
|
92
|
+
)
|
|
93
|
+
keys.extend(batch)
|
|
94
|
+
if cursor == 0:
|
|
95
|
+
break
|
|
96
|
+
return keys
|
|
97
|
+
|
|
98
|
+
async def aget(self, key: str) -> Optional[Any]:
|
|
99
|
+
"""
|
|
100
|
+
Asynchronously retrieve a value from the cache.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
key (str): The key to retrieve.
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
Optional[Any]: The cached value, or None if not found.
|
|
107
|
+
"""
|
|
108
|
+
try:
|
|
109
|
+
result = await self._async_client.get(self._make_key(key))
|
|
110
|
+
return pickle.loads(result) if result else None
|
|
111
|
+
except Exception:
|
|
112
|
+
return None
|
|
113
|
+
|
|
114
|
+
def get(self, key: str) -> Optional[Any]:
|
|
115
|
+
"""
|
|
116
|
+
Synchronously retrieve a value from the cache.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
key (str): The key to retrieve.
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
Optional[Any]: The cached value, or None if not found.
|
|
123
|
+
"""
|
|
124
|
+
try:
|
|
125
|
+
result = self._sync_client.get(self._make_key(key))
|
|
126
|
+
return pickle.loads(result) if result else None
|
|
127
|
+
except Exception:
|
|
128
|
+
return None
|
|
129
|
+
|
|
130
|
+
async def aset(
|
|
131
|
+
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
132
|
+
) -> None:
|
|
133
|
+
"""
|
|
134
|
+
Asynchronously set a value in the cache.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
key (str): The key under which to store the value.
|
|
138
|
+
value (Any): The value to store.
|
|
139
|
+
expire (Optional[Union[int, timedelta]]): Expiration time in seconds or as timedelta.
|
|
140
|
+
"""
|
|
141
|
+
try:
|
|
142
|
+
ex = expire.total_seconds() if isinstance(expire, timedelta) else expire
|
|
143
|
+
await self._async_client.set(
|
|
144
|
+
self._make_key(key), pickle.dumps(value), ex=ex
|
|
145
|
+
)
|
|
146
|
+
except Exception:
|
|
147
|
+
pass
|
|
148
|
+
|
|
149
|
+
def set(
|
|
150
|
+
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
151
|
+
) -> None:
|
|
152
|
+
"""
|
|
153
|
+
Synchronously set a value in the cache.
|
|
154
|
+
|
|
155
|
+
Args:
|
|
156
|
+
key (str): The key under which to store the value.
|
|
157
|
+
value (Any): The value to store.
|
|
158
|
+
expire (Optional[Union[int, timedelta]]): Expiration time in seconds or as timedelta.
|
|
159
|
+
"""
|
|
160
|
+
try:
|
|
161
|
+
ex = expire.total_seconds() if isinstance(expire, timedelta) else expire
|
|
162
|
+
self._sync_client.set(self._make_key(key), pickle.dumps(value), ex=ex)
|
|
163
|
+
except Exception:
|
|
164
|
+
pass
|
|
165
|
+
|
|
166
|
+
async def adelete(self, key: str) -> None:
|
|
167
|
+
"""
|
|
168
|
+
Asynchronously delete a value from the cache.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
key (str): The key to delete.
|
|
172
|
+
"""
|
|
173
|
+
try:
|
|
174
|
+
await self._async_client.delete(self._make_key(key))
|
|
175
|
+
except Exception:
|
|
176
|
+
pass
|
|
177
|
+
|
|
178
|
+
def delete(self, key: str) -> None:
|
|
179
|
+
"""
|
|
180
|
+
Synchronously delete a value from the cache.
|
|
181
|
+
|
|
182
|
+
Args:
|
|
183
|
+
key (str): The key to delete.
|
|
184
|
+
"""
|
|
185
|
+
try:
|
|
186
|
+
self._sync_client.delete(self._make_key(key))
|
|
187
|
+
except Exception:
|
|
188
|
+
pass
|
|
189
|
+
|
|
190
|
+
async def aclear(self) -> None:
|
|
191
|
+
"""
|
|
192
|
+
Asynchronously clear all values from the namespace.
|
|
193
|
+
"""
|
|
194
|
+
try:
|
|
195
|
+
keys = await self._scan_keys()
|
|
196
|
+
if keys:
|
|
197
|
+
await self._async_client.delete(*keys)
|
|
198
|
+
except Exception:
|
|
199
|
+
pass
|
|
200
|
+
|
|
201
|
+
def clear(self) -> None:
|
|
202
|
+
"""
|
|
203
|
+
Synchronously clear all values from the namespace.
|
|
204
|
+
"""
|
|
205
|
+
try:
|
|
206
|
+
cursor = 0
|
|
207
|
+
namespace_pattern = self._make_key("*")
|
|
208
|
+
|
|
209
|
+
while True:
|
|
210
|
+
cursor, keys = self._sync_client.scan(
|
|
211
|
+
cursor=cursor, match=namespace_pattern, count=100
|
|
212
|
+
)
|
|
213
|
+
if keys:
|
|
214
|
+
self._sync_client.delete(*keys)
|
|
215
|
+
if cursor == 0:
|
|
216
|
+
break
|
|
217
|
+
except Exception:
|
|
218
|
+
pass
|
|
219
|
+
|
|
220
|
+
async def ahas(self, key: str) -> bool:
|
|
221
|
+
"""
|
|
222
|
+
Asynchronously check if a key exists in the cache.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
key (str): The key to check.
|
|
226
|
+
|
|
227
|
+
Returns:
|
|
228
|
+
bool: True if the key exists, False otherwise.
|
|
229
|
+
"""
|
|
230
|
+
try:
|
|
231
|
+
return await self._async_client.exists(self._make_key(key)) > 0
|
|
232
|
+
except Exception:
|
|
233
|
+
return False
|
|
234
|
+
|
|
235
|
+
def has(self, key: str) -> bool:
|
|
236
|
+
"""
|
|
237
|
+
Synchronously check if a key exists in the cache.
|
|
238
|
+
|
|
239
|
+
Args:
|
|
240
|
+
key (str): The key to check.
|
|
241
|
+
|
|
242
|
+
Returns:
|
|
243
|
+
bool: True if the key exists, False otherwise.
|
|
244
|
+
"""
|
|
245
|
+
try:
|
|
246
|
+
return self._sync_client.exists(self._make_key(key)) > 0
|
|
247
|
+
except Exception:
|
|
248
|
+
return False
|
|
249
|
+
|
|
250
|
+
async def close(self) -> None:
|
|
251
|
+
"""
|
|
252
|
+
Close Redis connections and clean up pools.
|
|
253
|
+
"""
|
|
254
|
+
await self._async_client.close()
|
|
255
|
+
await self._async_pool.disconnect()
|
|
256
|
+
self._sync_client.close()
|
|
257
|
+
self._sync_pool.disconnect()
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
from contextlib import asynccontextmanager
|
|
2
|
+
from fastapi import FastAPI
|
|
3
|
+
from typing import Optional, Callable, Union, AsyncIterator, Any
|
|
4
|
+
from datetime import timedelta
|
|
5
|
+
import inspect
|
|
6
|
+
from functools import wraps
|
|
7
|
+
from .backends.backend import CacheBackend
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class FastAPICache:
|
|
11
|
+
"""
|
|
12
|
+
FastAPI Cache Extension.
|
|
13
|
+
|
|
14
|
+
This class provides caching utilities for FastAPI applications, including
|
|
15
|
+
decorator-based caching and dependency-injection-based backend access.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(self) -> None:
|
|
19
|
+
"""
|
|
20
|
+
Initialize the FastAPICache instance.
|
|
21
|
+
"""
|
|
22
|
+
self._backend: Optional[CacheBackend] = None
|
|
23
|
+
self._app: Optional[FastAPI] = None
|
|
24
|
+
self._default_expire: Optional[Union[int, timedelta]] = None
|
|
25
|
+
|
|
26
|
+
def get_cache(self) -> CacheBackend:
|
|
27
|
+
"""
|
|
28
|
+
Get the configured cache backend for dependency injection.
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
CacheBackend: The configured cache backend instance.
|
|
32
|
+
|
|
33
|
+
Raises:
|
|
34
|
+
RuntimeError: If the cache is not initialized.
|
|
35
|
+
"""
|
|
36
|
+
if self._backend is None:
|
|
37
|
+
raise RuntimeError("Cache not initialized. Call init_app first.")
|
|
38
|
+
return self._backend
|
|
39
|
+
|
|
40
|
+
def cached(
|
|
41
|
+
self,
|
|
42
|
+
expire: Optional[Union[int, timedelta]] = None,
|
|
43
|
+
key_builder: Optional[Callable[..., str]] = None,
|
|
44
|
+
namespace: Optional[str] = None,
|
|
45
|
+
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
|
46
|
+
"""
|
|
47
|
+
Decorator for caching function results.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
expire (Optional[Union[int, timedelta]]): Expiration time in seconds or as a timedelta.
|
|
51
|
+
key_builder (Optional[Callable[..., str]]): Custom function to build the cache key.
|
|
52
|
+
namespace (Optional[str]): Optional namespace for the cache key.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
Callable: A decorator that caches the function result.
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
def decorator(func: Callable) -> Callable[..., Any]:
|
|
59
|
+
"""
|
|
60
|
+
The actual decorator that wraps the function.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
func (Callable): The function to be cached.
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
Callable: The wrapped function with caching.
|
|
67
|
+
"""
|
|
68
|
+
is_async = inspect.iscoroutinefunction(func)
|
|
69
|
+
|
|
70
|
+
def build_cache_key(*args, **kwargs) -> str:
|
|
71
|
+
"""
|
|
72
|
+
Build the cache key for the function call.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
*args: Positional arguments for the function.
|
|
76
|
+
**kwargs: Keyword arguments for the function.
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
str: The generated cache key.
|
|
80
|
+
"""
|
|
81
|
+
if key_builder is not None:
|
|
82
|
+
key = key_builder(*args, **kwargs)
|
|
83
|
+
else:
|
|
84
|
+
# Default key building logic
|
|
85
|
+
key = f"{func.__module__}:{func.__name__}:{str(args)}:{str(kwargs)}"
|
|
86
|
+
|
|
87
|
+
if namespace:
|
|
88
|
+
key = f"{namespace}:{key}"
|
|
89
|
+
|
|
90
|
+
return key
|
|
91
|
+
|
|
92
|
+
@wraps(func)
|
|
93
|
+
async def async_wrapper(*args, **kwargs) -> Any:
|
|
94
|
+
"""
|
|
95
|
+
Async wrapper for caching.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
*args: Positional arguments.
|
|
99
|
+
**kwargs: Keyword arguments.
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
Any: The cached or computed result.
|
|
103
|
+
"""
|
|
104
|
+
if not self._backend:
|
|
105
|
+
return await func(*args, **kwargs)
|
|
106
|
+
|
|
107
|
+
# Skip cache if explicitly requested
|
|
108
|
+
if kwargs.pop("skip_cache", False):
|
|
109
|
+
return await func(*args, **kwargs)
|
|
110
|
+
|
|
111
|
+
cache_key = build_cache_key(*args, **kwargs)
|
|
112
|
+
|
|
113
|
+
# Try to get from cache
|
|
114
|
+
cached_value = await self._backend.aget(cache_key)
|
|
115
|
+
if cached_value is not None:
|
|
116
|
+
return cached_value
|
|
117
|
+
|
|
118
|
+
# Execute function and cache result
|
|
119
|
+
result = await func(*args, **kwargs)
|
|
120
|
+
await self._backend.aset(
|
|
121
|
+
cache_key, result, expire=expire or self._default_expire
|
|
122
|
+
)
|
|
123
|
+
return result
|
|
124
|
+
|
|
125
|
+
@wraps(func)
|
|
126
|
+
def sync_wrapper(*args, **kwargs):
|
|
127
|
+
"""
|
|
128
|
+
Sync wrapper for caching.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
*args: Positional arguments.
|
|
132
|
+
**kwargs: Keyword arguments.
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
Any: The cached or computed result.
|
|
136
|
+
"""
|
|
137
|
+
if not self._backend:
|
|
138
|
+
return func(*args, **kwargs)
|
|
139
|
+
|
|
140
|
+
# Skip cache if explicitly requested
|
|
141
|
+
if kwargs.pop("skip_cache", False):
|
|
142
|
+
return func(*args, **kwargs)
|
|
143
|
+
|
|
144
|
+
cache_key = build_cache_key(*args, **kwargs)
|
|
145
|
+
|
|
146
|
+
# Try to get from cache
|
|
147
|
+
cached_value = self._backend.get(cache_key)
|
|
148
|
+
if cached_value is not None:
|
|
149
|
+
return cached_value
|
|
150
|
+
|
|
151
|
+
# Execute function and cache result
|
|
152
|
+
result = func(*args, **kwargs)
|
|
153
|
+
self._backend.set(
|
|
154
|
+
cache_key, result, expire=expire or self._default_expire
|
|
155
|
+
)
|
|
156
|
+
return result
|
|
157
|
+
|
|
158
|
+
return async_wrapper if is_async else sync_wrapper
|
|
159
|
+
|
|
160
|
+
return decorator
|
|
161
|
+
|
|
162
|
+
@asynccontextmanager
|
|
163
|
+
async def lifespan_handler(self, app: FastAPI) -> AsyncIterator[None]:
|
|
164
|
+
"""
|
|
165
|
+
Lifespan context manager for FastAPI.
|
|
166
|
+
|
|
167
|
+
This can be used as the `lifespan` argument to FastAPI to manage
|
|
168
|
+
cache lifecycle.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
app (FastAPI): The FastAPI application instance.
|
|
172
|
+
|
|
173
|
+
Yields:
|
|
174
|
+
None
|
|
175
|
+
"""
|
|
176
|
+
if not hasattr(app, "state"):
|
|
177
|
+
app.state = {}
|
|
178
|
+
app.state["cache"] = self
|
|
179
|
+
yield
|
|
180
|
+
self._backend = None
|
|
181
|
+
self._app = None
|
|
182
|
+
|
|
183
|
+
def init_app(
|
|
184
|
+
self,
|
|
185
|
+
app: FastAPI,
|
|
186
|
+
backend: CacheBackend,
|
|
187
|
+
default_expire: Optional[Union[int, timedelta]] = None,
|
|
188
|
+
) -> None:
|
|
189
|
+
"""
|
|
190
|
+
Initialize the cache extension.
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
app (FastAPI): FastAPI application instance.
|
|
194
|
+
backend (CacheBackend): Cache backend instance.
|
|
195
|
+
default_expire (Optional[Union[int, timedelta]]): Default expiration time for cached items.
|
|
196
|
+
"""
|
|
197
|
+
self._backend = backend
|
|
198
|
+
self._app = app
|
|
199
|
+
self._default_expire = default_expire
|