sqlspec 0.16.2__cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- 51ff5a9eadfdefd49f98__mypyc.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/__init__.py +92 -0
- sqlspec/__main__.py +12 -0
- sqlspec/__metadata__.py +14 -0
- sqlspec/_serialization.py +77 -0
- sqlspec/_sql.py +1782 -0
- sqlspec/_typing.py +680 -0
- sqlspec/adapters/__init__.py +0 -0
- sqlspec/adapters/adbc/__init__.py +5 -0
- sqlspec/adapters/adbc/_types.py +12 -0
- sqlspec/adapters/adbc/config.py +361 -0
- sqlspec/adapters/adbc/driver.py +512 -0
- sqlspec/adapters/aiosqlite/__init__.py +19 -0
- sqlspec/adapters/aiosqlite/_types.py +13 -0
- sqlspec/adapters/aiosqlite/config.py +253 -0
- sqlspec/adapters/aiosqlite/driver.py +248 -0
- sqlspec/adapters/asyncmy/__init__.py +19 -0
- sqlspec/adapters/asyncmy/_types.py +12 -0
- sqlspec/adapters/asyncmy/config.py +180 -0
- sqlspec/adapters/asyncmy/driver.py +274 -0
- sqlspec/adapters/asyncpg/__init__.py +21 -0
- sqlspec/adapters/asyncpg/_types.py +17 -0
- sqlspec/adapters/asyncpg/config.py +229 -0
- sqlspec/adapters/asyncpg/driver.py +344 -0
- sqlspec/adapters/bigquery/__init__.py +18 -0
- sqlspec/adapters/bigquery/_types.py +12 -0
- sqlspec/adapters/bigquery/config.py +298 -0
- sqlspec/adapters/bigquery/driver.py +558 -0
- sqlspec/adapters/duckdb/__init__.py +22 -0
- sqlspec/adapters/duckdb/_types.py +12 -0
- sqlspec/adapters/duckdb/config.py +504 -0
- sqlspec/adapters/duckdb/driver.py +368 -0
- sqlspec/adapters/oracledb/__init__.py +32 -0
- sqlspec/adapters/oracledb/_types.py +14 -0
- sqlspec/adapters/oracledb/config.py +317 -0
- sqlspec/adapters/oracledb/driver.py +538 -0
- sqlspec/adapters/psqlpy/__init__.py +16 -0
- sqlspec/adapters/psqlpy/_types.py +11 -0
- sqlspec/adapters/psqlpy/config.py +214 -0
- sqlspec/adapters/psqlpy/driver.py +530 -0
- sqlspec/adapters/psycopg/__init__.py +32 -0
- sqlspec/adapters/psycopg/_types.py +17 -0
- sqlspec/adapters/psycopg/config.py +426 -0
- sqlspec/adapters/psycopg/driver.py +796 -0
- sqlspec/adapters/sqlite/__init__.py +15 -0
- sqlspec/adapters/sqlite/_types.py +11 -0
- sqlspec/adapters/sqlite/config.py +240 -0
- sqlspec/adapters/sqlite/driver.py +294 -0
- sqlspec/base.py +571 -0
- sqlspec/builder/__init__.py +62 -0
- sqlspec/builder/_base.py +473 -0
- sqlspec/builder/_column.py +320 -0
- sqlspec/builder/_ddl.py +1346 -0
- sqlspec/builder/_ddl_utils.py +103 -0
- sqlspec/builder/_delete.py +76 -0
- sqlspec/builder/_insert.py +421 -0
- sqlspec/builder/_merge.py +71 -0
- sqlspec/builder/_parsing_utils.py +164 -0
- sqlspec/builder/_select.py +170 -0
- sqlspec/builder/_update.py +188 -0
- sqlspec/builder/mixins/__init__.py +55 -0
- sqlspec/builder/mixins/_cte_and_set_ops.py +222 -0
- sqlspec/builder/mixins/_delete_operations.py +41 -0
- sqlspec/builder/mixins/_insert_operations.py +244 -0
- sqlspec/builder/mixins/_join_operations.py +149 -0
- sqlspec/builder/mixins/_merge_operations.py +562 -0
- sqlspec/builder/mixins/_order_limit_operations.py +135 -0
- sqlspec/builder/mixins/_pivot_operations.py +153 -0
- sqlspec/builder/mixins/_select_operations.py +604 -0
- sqlspec/builder/mixins/_update_operations.py +202 -0
- sqlspec/builder/mixins/_where_clause.py +644 -0
- sqlspec/cli.py +247 -0
- sqlspec/config.py +395 -0
- sqlspec/core/__init__.py +63 -0
- sqlspec/core/cache.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/core/cache.py +871 -0
- sqlspec/core/compiler.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/core/compiler.py +417 -0
- sqlspec/core/filters.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/core/filters.py +830 -0
- sqlspec/core/hashing.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/core/hashing.py +310 -0
- sqlspec/core/parameters.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters.py +1237 -0
- sqlspec/core/result.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result.py +677 -0
- sqlspec/core/splitter.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/core/splitter.py +819 -0
- sqlspec/core/statement.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/core/statement.py +676 -0
- sqlspec/driver/__init__.py +19 -0
- sqlspec/driver/_async.py +502 -0
- sqlspec/driver/_common.py +631 -0
- sqlspec/driver/_sync.py +503 -0
- sqlspec/driver/mixins/__init__.py +6 -0
- sqlspec/driver/mixins/_result_tools.py +193 -0
- sqlspec/driver/mixins/_sql_translator.py +86 -0
- sqlspec/exceptions.py +193 -0
- sqlspec/extensions/__init__.py +0 -0
- sqlspec/extensions/aiosql/__init__.py +10 -0
- sqlspec/extensions/aiosql/adapter.py +461 -0
- sqlspec/extensions/litestar/__init__.py +6 -0
- sqlspec/extensions/litestar/_utils.py +52 -0
- sqlspec/extensions/litestar/cli.py +48 -0
- sqlspec/extensions/litestar/config.py +92 -0
- sqlspec/extensions/litestar/handlers.py +260 -0
- sqlspec/extensions/litestar/plugin.py +145 -0
- sqlspec/extensions/litestar/providers.py +454 -0
- sqlspec/loader.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/loader.py +760 -0
- sqlspec/migrations/__init__.py +35 -0
- sqlspec/migrations/base.py +414 -0
- sqlspec/migrations/commands.py +443 -0
- sqlspec/migrations/loaders.py +402 -0
- sqlspec/migrations/runner.py +213 -0
- sqlspec/migrations/tracker.py +140 -0
- sqlspec/migrations/utils.py +129 -0
- sqlspec/protocols.py +407 -0
- sqlspec/py.typed +0 -0
- sqlspec/storage/__init__.py +23 -0
- sqlspec/storage/backends/__init__.py +0 -0
- sqlspec/storage/backends/base.py +163 -0
- sqlspec/storage/backends/fsspec.py +386 -0
- sqlspec/storage/backends/obstore.py +459 -0
- sqlspec/storage/capabilities.py +102 -0
- sqlspec/storage/registry.py +239 -0
- sqlspec/typing.py +299 -0
- sqlspec/utils/__init__.py +3 -0
- sqlspec/utils/correlation.py +150 -0
- sqlspec/utils/deprecation.py +106 -0
- sqlspec/utils/fixtures.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/fixtures.py +58 -0
- sqlspec/utils/logging.py +127 -0
- sqlspec/utils/module_loader.py +89 -0
- sqlspec/utils/serializers.py +4 -0
- sqlspec/utils/singleton.py +32 -0
- sqlspec/utils/sync_tools.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/sync_tools.py +237 -0
- sqlspec/utils/text.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/text.py +96 -0
- sqlspec/utils/type_guards.cpython-39-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_guards.py +1139 -0
- sqlspec-0.16.2.dist-info/METADATA +365 -0
- sqlspec-0.16.2.dist-info/RECORD +148 -0
- sqlspec-0.16.2.dist-info/WHEEL +7 -0
- sqlspec-0.16.2.dist-info/entry_points.txt +2 -0
- sqlspec-0.16.2.dist-info/licenses/LICENSE +21 -0
- sqlspec-0.16.2.dist-info/licenses/NOTICE +29 -0
sqlspec/core/cache.py
ADDED
|
@@ -0,0 +1,871 @@
|
|
|
1
|
+
"""Caching system with unified cache management.
|
|
2
|
+
|
|
3
|
+
This module provides a caching system with LRU eviction and TTL support for
|
|
4
|
+
SQL statement processing, parameter processing, and expression caching.
|
|
5
|
+
|
|
6
|
+
Components:
|
|
7
|
+
- CacheKey: Immutable cache key with optimized hashing
|
|
8
|
+
- UnifiedCache: Main cache implementation with LRU eviction and TTL
|
|
9
|
+
- StatementCache: Specialized cache for compiled SQL statements
|
|
10
|
+
- ExpressionCache: Specialized cache for parsed SQLGlot expressions
|
|
11
|
+
- ParameterCache: Specialized cache for processed parameters
|
|
12
|
+
|
|
13
|
+
Features:
|
|
14
|
+
- LRU caching with configurable size and TTL
|
|
15
|
+
- Thread-safe cache operations for concurrent access
|
|
16
|
+
- Cached hash values to avoid recomputation
|
|
17
|
+
- O(1) cache lookup and insertion operations
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import threading
|
|
21
|
+
import time
|
|
22
|
+
from typing import TYPE_CHECKING, Any, Generic, Optional
|
|
23
|
+
|
|
24
|
+
from mypy_extensions import mypyc_attr
|
|
25
|
+
from typing_extensions import TypeVar
|
|
26
|
+
|
|
27
|
+
from sqlspec.utils.logging import get_logger
|
|
28
|
+
|
|
29
|
+
if TYPE_CHECKING:
|
|
30
|
+
import sqlglot.expressions as exp
|
|
31
|
+
|
|
32
|
+
from sqlspec.core.statement import SQL
|
|
33
|
+
|
|
34
|
+
__all__ = (
|
|
35
|
+
"CacheKey",
|
|
36
|
+
"CacheStats",
|
|
37
|
+
"ExpressionCache",
|
|
38
|
+
"ParameterCache",
|
|
39
|
+
"StatementCache",
|
|
40
|
+
"UnifiedCache",
|
|
41
|
+
"get_cache_config",
|
|
42
|
+
"get_default_cache",
|
|
43
|
+
"get_expression_cache",
|
|
44
|
+
"get_parameter_cache",
|
|
45
|
+
"get_statement_cache",
|
|
46
|
+
"sql_cache",
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
T = TypeVar("T")
|
|
50
|
+
CacheValueT = TypeVar("CacheValueT")
|
|
51
|
+
|
|
52
|
+
# Cache configuration constants
|
|
53
|
+
DEFAULT_MAX_SIZE = 10000 # LRU cache size limit
|
|
54
|
+
DEFAULT_TTL_SECONDS = 3600 # 1 hour default TTL
|
|
55
|
+
CACHE_STATS_UPDATE_INTERVAL = 100 # Update stats every N operations
|
|
56
|
+
|
|
57
|
+
# Cache slots - optimized structure
|
|
58
|
+
CACHE_KEY_SLOTS = ("_hash", "_key_data")
|
|
59
|
+
CACHE_NODE_SLOTS = ("key", "value", "prev", "next", "timestamp", "access_count")
|
|
60
|
+
UNIFIED_CACHE_SLOTS = ("_cache", "_lock", "_max_size", "_ttl", "_head", "_tail", "_stats")
|
|
61
|
+
CACHE_STATS_SLOTS = ("hits", "misses", "evictions", "total_operations", "memory_usage")
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@mypyc_attr(allow_interpreted_subclasses=True)
|
|
65
|
+
class CacheKey:
|
|
66
|
+
"""Immutable cache key with optimized hashing.
|
|
67
|
+
|
|
68
|
+
This class provides an immutable cache key for consistent cache operations
|
|
69
|
+
across all cache types.
|
|
70
|
+
|
|
71
|
+
Features:
|
|
72
|
+
- Cached hash value to avoid recomputation
|
|
73
|
+
- Immutable design for safe sharing across threads
|
|
74
|
+
- Fast equality comparison with short-circuit evaluation
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
key_data: Tuple of hashable values that uniquely identify the cached item
|
|
78
|
+
"""
|
|
79
|
+
|
|
80
|
+
__slots__ = ("_hash", "_key_data")
|
|
81
|
+
|
|
82
|
+
def __init__(self, key_data: tuple[Any, ...]) -> None:
|
|
83
|
+
"""Initialize cache key.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
key_data: Tuple of hashable values for the cache key
|
|
87
|
+
"""
|
|
88
|
+
self._key_data = key_data
|
|
89
|
+
self._hash = hash(key_data)
|
|
90
|
+
|
|
91
|
+
@property
|
|
92
|
+
def key_data(self) -> tuple[Any, ...]:
|
|
93
|
+
"""Get the key data tuple."""
|
|
94
|
+
return self._key_data
|
|
95
|
+
|
|
96
|
+
def __hash__(self) -> int:
|
|
97
|
+
"""Return cached hash value."""
|
|
98
|
+
return self._hash
|
|
99
|
+
|
|
100
|
+
def __eq__(self, other: object) -> bool:
|
|
101
|
+
"""Equality comparison with short-circuit evaluation."""
|
|
102
|
+
if type(other) is not CacheKey:
|
|
103
|
+
return False
|
|
104
|
+
other_key = other # type: CacheKey
|
|
105
|
+
if self._hash != other_key._hash:
|
|
106
|
+
return False
|
|
107
|
+
return self._key_data == other_key._key_data
|
|
108
|
+
|
|
109
|
+
def __repr__(self) -> str:
|
|
110
|
+
"""String representation of the cache key."""
|
|
111
|
+
return f"CacheKey({self._key_data!r})"
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
@mypyc_attr(allow_interpreted_subclasses=True)
|
|
115
|
+
class CacheStats:
|
|
116
|
+
"""Cache statistics tracking.
|
|
117
|
+
|
|
118
|
+
Tracks cache performance metrics including hit rates, evictions,
|
|
119
|
+
and memory usage.
|
|
120
|
+
"""
|
|
121
|
+
|
|
122
|
+
__slots__ = CACHE_STATS_SLOTS
|
|
123
|
+
|
|
124
|
+
def __init__(self) -> None:
|
|
125
|
+
"""Initialize cache statistics."""
|
|
126
|
+
self.hits = 0
|
|
127
|
+
self.misses = 0
|
|
128
|
+
self.evictions = 0
|
|
129
|
+
self.total_operations = 0
|
|
130
|
+
self.memory_usage = 0
|
|
131
|
+
|
|
132
|
+
@property
|
|
133
|
+
def hit_rate(self) -> float:
|
|
134
|
+
"""Calculate cache hit rate as percentage."""
|
|
135
|
+
total = self.hits + self.misses
|
|
136
|
+
return (self.hits / total * 100) if total > 0 else 0.0
|
|
137
|
+
|
|
138
|
+
@property
|
|
139
|
+
def miss_rate(self) -> float:
|
|
140
|
+
"""Calculate cache miss rate as percentage."""
|
|
141
|
+
return 100.0 - self.hit_rate
|
|
142
|
+
|
|
143
|
+
def record_hit(self) -> None:
|
|
144
|
+
"""Record a cache hit."""
|
|
145
|
+
self.hits += 1
|
|
146
|
+
self.total_operations += 1
|
|
147
|
+
|
|
148
|
+
def record_miss(self) -> None:
|
|
149
|
+
"""Record a cache miss."""
|
|
150
|
+
self.misses += 1
|
|
151
|
+
self.total_operations += 1
|
|
152
|
+
|
|
153
|
+
def record_eviction(self) -> None:
|
|
154
|
+
"""Record a cache eviction."""
|
|
155
|
+
self.evictions += 1
|
|
156
|
+
|
|
157
|
+
def reset(self) -> None:
|
|
158
|
+
"""Reset all statistics."""
|
|
159
|
+
self.hits = 0
|
|
160
|
+
self.misses = 0
|
|
161
|
+
self.evictions = 0
|
|
162
|
+
self.total_operations = 0
|
|
163
|
+
self.memory_usage = 0
|
|
164
|
+
|
|
165
|
+
def __repr__(self) -> str:
|
|
166
|
+
"""String representation of cache statistics."""
|
|
167
|
+
return (
|
|
168
|
+
f"CacheStats(hit_rate={self.hit_rate:.1f}%, "
|
|
169
|
+
f"hits={self.hits}, misses={self.misses}, "
|
|
170
|
+
f"evictions={self.evictions}, ops={self.total_operations})"
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
@mypyc_attr(allow_interpreted_subclasses=True)
|
|
175
|
+
class CacheNode:
|
|
176
|
+
"""Internal cache node for LRU linked list implementation.
|
|
177
|
+
|
|
178
|
+
This class represents a node in the doubly-linked list used for
|
|
179
|
+
LRU cache implementation with O(1) operations.
|
|
180
|
+
"""
|
|
181
|
+
|
|
182
|
+
__slots__ = CACHE_NODE_SLOTS
|
|
183
|
+
|
|
184
|
+
def __init__(self, key: CacheKey, value: Any) -> None:
|
|
185
|
+
"""Initialize cache node.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
key: Cache key for this node
|
|
189
|
+
value: Cached value
|
|
190
|
+
"""
|
|
191
|
+
self.key = key
|
|
192
|
+
self.value = value
|
|
193
|
+
self.prev: Optional[CacheNode] = None
|
|
194
|
+
self.next: Optional[CacheNode] = None
|
|
195
|
+
self.timestamp = time.time()
|
|
196
|
+
self.access_count = 1
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
@mypyc_attr(allow_interpreted_subclasses=True)
|
|
200
|
+
class UnifiedCache(Generic[CacheValueT]):
|
|
201
|
+
"""Cache with LRU eviction and TTL support.
|
|
202
|
+
|
|
203
|
+
This class provides a thread-safe cache implementation with LRU eviction
|
|
204
|
+
and time-based expiration.
|
|
205
|
+
|
|
206
|
+
Features:
|
|
207
|
+
- O(1) cache lookup, insertion, and deletion operations
|
|
208
|
+
- LRU eviction policy with configurable size limits
|
|
209
|
+
- TTL-based expiration for cache entries
|
|
210
|
+
- Thread-safe operations
|
|
211
|
+
- Statistics tracking
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
max_size: Maximum number of items to cache (LRU eviction when exceeded)
|
|
215
|
+
ttl_seconds: Time-to-live in seconds (None for no expiration)
|
|
216
|
+
"""
|
|
217
|
+
|
|
218
|
+
__slots__ = UNIFIED_CACHE_SLOTS
|
|
219
|
+
|
|
220
|
+
def __init__(self, max_size: int = DEFAULT_MAX_SIZE, ttl_seconds: Optional[int] = DEFAULT_TTL_SECONDS) -> None:
|
|
221
|
+
"""Initialize unified cache.
|
|
222
|
+
|
|
223
|
+
Args:
|
|
224
|
+
max_size: Maximum number of cache entries
|
|
225
|
+
ttl_seconds: Time-to-live in seconds (None for no expiration)
|
|
226
|
+
"""
|
|
227
|
+
self._cache: dict[CacheKey, CacheNode] = {}
|
|
228
|
+
self._lock = threading.RLock()
|
|
229
|
+
self._max_size = max_size
|
|
230
|
+
self._ttl = ttl_seconds
|
|
231
|
+
self._stats = CacheStats()
|
|
232
|
+
|
|
233
|
+
self._head = CacheNode(CacheKey(()), None)
|
|
234
|
+
self._tail = CacheNode(CacheKey(()), None)
|
|
235
|
+
self._head.next = self._tail
|
|
236
|
+
self._tail.prev = self._head
|
|
237
|
+
|
|
238
|
+
def get(self, key: CacheKey) -> Optional[CacheValueT]:
|
|
239
|
+
"""Get value from cache with LRU update.
|
|
240
|
+
|
|
241
|
+
Args:
|
|
242
|
+
key: Cache key to lookup
|
|
243
|
+
|
|
244
|
+
Returns:
|
|
245
|
+
Cached value or None if not found or expired
|
|
246
|
+
"""
|
|
247
|
+
with self._lock:
|
|
248
|
+
node = self._cache.get(key)
|
|
249
|
+
if node is None:
|
|
250
|
+
self._stats.record_miss()
|
|
251
|
+
return None
|
|
252
|
+
|
|
253
|
+
# Optimize TTL check with early variable assignment
|
|
254
|
+
ttl = self._ttl
|
|
255
|
+
if ttl is not None:
|
|
256
|
+
current_time = time.time()
|
|
257
|
+
if (current_time - node.timestamp) > ttl:
|
|
258
|
+
self._remove_node(node)
|
|
259
|
+
del self._cache[key]
|
|
260
|
+
self._stats.record_miss()
|
|
261
|
+
self._stats.record_eviction()
|
|
262
|
+
return None
|
|
263
|
+
|
|
264
|
+
self._move_to_head(node)
|
|
265
|
+
node.access_count += 1
|
|
266
|
+
self._stats.record_hit()
|
|
267
|
+
return node.value # type: ignore[no-any-return]
|
|
268
|
+
|
|
269
|
+
def put(self, key: CacheKey, value: CacheValueT) -> None:
|
|
270
|
+
"""Put value in cache with LRU management.
|
|
271
|
+
|
|
272
|
+
Args:
|
|
273
|
+
key: Cache key
|
|
274
|
+
value: Value to cache
|
|
275
|
+
"""
|
|
276
|
+
with self._lock:
|
|
277
|
+
existing_node = self._cache.get(key)
|
|
278
|
+
if existing_node is not None:
|
|
279
|
+
existing_node.value = value
|
|
280
|
+
existing_node.timestamp = time.time()
|
|
281
|
+
existing_node.access_count += 1
|
|
282
|
+
self._move_to_head(existing_node)
|
|
283
|
+
return
|
|
284
|
+
|
|
285
|
+
new_node = CacheNode(key, value)
|
|
286
|
+
self._cache[key] = new_node
|
|
287
|
+
self._add_to_head(new_node)
|
|
288
|
+
|
|
289
|
+
# Optimize size check with cached length
|
|
290
|
+
if len(self._cache) > self._max_size:
|
|
291
|
+
tail_node = self._tail.prev
|
|
292
|
+
if tail_node is not None and tail_node is not self._head:
|
|
293
|
+
self._remove_node(tail_node)
|
|
294
|
+
del self._cache[tail_node.key]
|
|
295
|
+
self._stats.record_eviction()
|
|
296
|
+
|
|
297
|
+
def delete(self, key: CacheKey) -> bool:
|
|
298
|
+
"""Delete entry from cache.
|
|
299
|
+
|
|
300
|
+
Args:
|
|
301
|
+
key: Cache key to delete
|
|
302
|
+
|
|
303
|
+
Returns:
|
|
304
|
+
True if key was found and deleted, False otherwise
|
|
305
|
+
"""
|
|
306
|
+
with self._lock:
|
|
307
|
+
node: Optional[CacheNode] = self._cache.get(key)
|
|
308
|
+
if node is None:
|
|
309
|
+
return False
|
|
310
|
+
|
|
311
|
+
self._remove_node(node)
|
|
312
|
+
del self._cache[key]
|
|
313
|
+
return True
|
|
314
|
+
|
|
315
|
+
def clear(self) -> None:
|
|
316
|
+
"""Clear all cache entries."""
|
|
317
|
+
with self._lock:
|
|
318
|
+
self._cache.clear()
|
|
319
|
+
self._head.next = self._tail
|
|
320
|
+
self._tail.prev = self._head
|
|
321
|
+
self._stats.reset()
|
|
322
|
+
|
|
323
|
+
def size(self) -> int:
|
|
324
|
+
"""Get current cache size."""
|
|
325
|
+
return len(self._cache)
|
|
326
|
+
|
|
327
|
+
def is_empty(self) -> bool:
|
|
328
|
+
"""Check if cache is empty."""
|
|
329
|
+
return len(self._cache) == 0
|
|
330
|
+
|
|
331
|
+
def get_stats(self) -> CacheStats:
|
|
332
|
+
"""Get cache statistics."""
|
|
333
|
+
return self._stats
|
|
334
|
+
|
|
335
|
+
def _add_to_head(self, node: CacheNode) -> None:
|
|
336
|
+
"""Add node after head."""
|
|
337
|
+
node.prev = self._head
|
|
338
|
+
head_next: Optional[CacheNode] = self._head.next
|
|
339
|
+
node.next = head_next
|
|
340
|
+
if head_next is not None:
|
|
341
|
+
head_next.prev = node
|
|
342
|
+
self._head.next = node
|
|
343
|
+
|
|
344
|
+
def _remove_node(self, node: CacheNode) -> None:
|
|
345
|
+
"""Remove node from linked list."""
|
|
346
|
+
node_prev: Optional[CacheNode] = node.prev
|
|
347
|
+
node_next: Optional[CacheNode] = node.next
|
|
348
|
+
if node_prev is not None:
|
|
349
|
+
node_prev.next = node_next
|
|
350
|
+
if node_next is not None:
|
|
351
|
+
node_next.prev = node_prev
|
|
352
|
+
|
|
353
|
+
def _move_to_head(self, node: CacheNode) -> None:
|
|
354
|
+
"""Move existing node to head."""
|
|
355
|
+
self._remove_node(node)
|
|
356
|
+
self._add_to_head(node)
|
|
357
|
+
|
|
358
|
+
def __len__(self) -> int:
|
|
359
|
+
"""Get current cache size."""
|
|
360
|
+
return len(self._cache)
|
|
361
|
+
|
|
362
|
+
def __contains__(self, key: CacheKey) -> bool:
|
|
363
|
+
"""Check if key exists in cache."""
|
|
364
|
+
with self._lock:
|
|
365
|
+
node = self._cache.get(key)
|
|
366
|
+
if node is None:
|
|
367
|
+
return False
|
|
368
|
+
|
|
369
|
+
# Optimize TTL check
|
|
370
|
+
ttl = self._ttl
|
|
371
|
+
return not (ttl is not None and time.time() - node.timestamp > ttl)
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
@mypyc_attr(allow_interpreted_subclasses=False)
|
|
375
|
+
class StatementCache:
|
|
376
|
+
"""Specialized cache for compiled SQL statements.
|
|
377
|
+
|
|
378
|
+
Caches compiled SQL statements and their execution parameters.
|
|
379
|
+
"""
|
|
380
|
+
|
|
381
|
+
def __init__(self, max_size: int = DEFAULT_MAX_SIZE) -> None:
|
|
382
|
+
"""Initialize statement cache.
|
|
383
|
+
|
|
384
|
+
Args:
|
|
385
|
+
max_size: Maximum number of statements to cache
|
|
386
|
+
"""
|
|
387
|
+
self._cache: UnifiedCache[tuple[str, Any]] = UnifiedCache(max_size)
|
|
388
|
+
|
|
389
|
+
def get_compiled(self, statement: "SQL") -> Optional[tuple[str, Any]]:
|
|
390
|
+
"""Get compiled SQL and parameters from cache.
|
|
391
|
+
|
|
392
|
+
Args:
|
|
393
|
+
statement: SQL statement to lookup
|
|
394
|
+
|
|
395
|
+
Returns:
|
|
396
|
+
Tuple of (compiled_sql, parameters) or None if not cached
|
|
397
|
+
"""
|
|
398
|
+
cache_key = self._create_statement_key(statement)
|
|
399
|
+
return self._cache.get(cache_key)
|
|
400
|
+
|
|
401
|
+
def put_compiled(self, statement: "SQL", compiled_sql: str, parameters: Any) -> None:
|
|
402
|
+
"""Cache compiled SQL and parameters.
|
|
403
|
+
|
|
404
|
+
Args:
|
|
405
|
+
statement: Original SQL statement
|
|
406
|
+
compiled_sql: Compiled SQL string
|
|
407
|
+
parameters: Processed parameters
|
|
408
|
+
"""
|
|
409
|
+
cache_key = self._create_statement_key(statement)
|
|
410
|
+
self._cache.put(cache_key, (compiled_sql, parameters))
|
|
411
|
+
|
|
412
|
+
def _create_statement_key(self, statement: "SQL") -> CacheKey:
|
|
413
|
+
"""Create cache key for SQL statement.
|
|
414
|
+
|
|
415
|
+
Args:
|
|
416
|
+
statement: SQL statement
|
|
417
|
+
|
|
418
|
+
Returns:
|
|
419
|
+
Cache key for the statement
|
|
420
|
+
"""
|
|
421
|
+
# Create key from SQL text, parameters, and configuration
|
|
422
|
+
key_data = (
|
|
423
|
+
"statement",
|
|
424
|
+
statement._raw_sql,
|
|
425
|
+
hash(statement), # Includes parameters and flags
|
|
426
|
+
str(statement.dialect) if statement.dialect else None,
|
|
427
|
+
statement.is_many,
|
|
428
|
+
statement.is_script,
|
|
429
|
+
)
|
|
430
|
+
return CacheKey(key_data)
|
|
431
|
+
|
|
432
|
+
def clear(self) -> None:
|
|
433
|
+
"""Clear statement cache."""
|
|
434
|
+
self._cache.clear()
|
|
435
|
+
|
|
436
|
+
def get_stats(self) -> CacheStats:
|
|
437
|
+
"""Get cache statistics."""
|
|
438
|
+
return self._cache.get_stats()
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
@mypyc_attr(allow_interpreted_subclasses=False)
|
|
442
|
+
class ExpressionCache:
|
|
443
|
+
"""Specialized cache for parsed SQLGlot expressions.
|
|
444
|
+
|
|
445
|
+
Caches parsed SQLGlot expressions to avoid redundant parsing operations.
|
|
446
|
+
"""
|
|
447
|
+
|
|
448
|
+
def __init__(self, max_size: int = DEFAULT_MAX_SIZE) -> None:
|
|
449
|
+
"""Initialize expression cache.
|
|
450
|
+
|
|
451
|
+
Args:
|
|
452
|
+
max_size: Maximum number of expressions to cache
|
|
453
|
+
"""
|
|
454
|
+
self._cache: UnifiedCache[exp.Expression] = UnifiedCache(max_size)
|
|
455
|
+
|
|
456
|
+
def get_expression(self, sql: str, dialect: Optional[str] = None) -> "Optional[exp.Expression]":
|
|
457
|
+
"""Get parsed expression from cache.
|
|
458
|
+
|
|
459
|
+
Args:
|
|
460
|
+
sql: SQL string
|
|
461
|
+
dialect: SQL dialect
|
|
462
|
+
|
|
463
|
+
Returns:
|
|
464
|
+
Parsed expression or None if not cached
|
|
465
|
+
"""
|
|
466
|
+
cache_key = self._create_expression_key(sql, dialect)
|
|
467
|
+
return self._cache.get(cache_key)
|
|
468
|
+
|
|
469
|
+
def put_expression(self, sql: str, expression: "exp.Expression", dialect: Optional[str] = None) -> None:
|
|
470
|
+
"""Cache parsed expression.
|
|
471
|
+
|
|
472
|
+
Args:
|
|
473
|
+
sql: SQL string
|
|
474
|
+
expression: Parsed SQLGlot expression
|
|
475
|
+
dialect: SQL dialect
|
|
476
|
+
"""
|
|
477
|
+
cache_key = self._create_expression_key(sql, dialect)
|
|
478
|
+
self._cache.put(cache_key, expression)
|
|
479
|
+
|
|
480
|
+
def _create_expression_key(self, sql: str, dialect: Optional[str]) -> CacheKey:
|
|
481
|
+
"""Create cache key for expression.
|
|
482
|
+
|
|
483
|
+
Args:
|
|
484
|
+
sql: SQL string
|
|
485
|
+
dialect: SQL dialect
|
|
486
|
+
|
|
487
|
+
Returns:
|
|
488
|
+
Cache key for the expression
|
|
489
|
+
"""
|
|
490
|
+
key_data = ("expression", sql, dialect)
|
|
491
|
+
return CacheKey(key_data)
|
|
492
|
+
|
|
493
|
+
def clear(self) -> None:
|
|
494
|
+
"""Clear expression cache."""
|
|
495
|
+
self._cache.clear()
|
|
496
|
+
|
|
497
|
+
def get_stats(self) -> CacheStats:
|
|
498
|
+
"""Get cache statistics."""
|
|
499
|
+
return self._cache.get_stats()
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
@mypyc_attr(allow_interpreted_subclasses=False)
|
|
503
|
+
class ParameterCache:
|
|
504
|
+
"""Specialized cache for processed parameters.
|
|
505
|
+
|
|
506
|
+
Caches processed parameter transformations.
|
|
507
|
+
"""
|
|
508
|
+
|
|
509
|
+
def __init__(self, max_size: int = DEFAULT_MAX_SIZE) -> None:
|
|
510
|
+
"""Initialize parameter cache.
|
|
511
|
+
|
|
512
|
+
Args:
|
|
513
|
+
max_size: Maximum number of parameter sets to cache
|
|
514
|
+
"""
|
|
515
|
+
self._cache: UnifiedCache[Any] = UnifiedCache(max_size)
|
|
516
|
+
|
|
517
|
+
def get_parameters(self, original_params: Any, config_hash: int) -> Optional[Any]:
|
|
518
|
+
"""Get processed parameters from cache.
|
|
519
|
+
|
|
520
|
+
Args:
|
|
521
|
+
original_params: Original parameters
|
|
522
|
+
config_hash: Hash of parameter processing configuration
|
|
523
|
+
|
|
524
|
+
Returns:
|
|
525
|
+
Processed parameters or None if not cached
|
|
526
|
+
"""
|
|
527
|
+
cache_key = self._create_parameter_key(original_params, config_hash)
|
|
528
|
+
return self._cache.get(cache_key)
|
|
529
|
+
|
|
530
|
+
def put_parameters(self, original_params: Any, processed_params: Any, config_hash: int) -> None:
|
|
531
|
+
"""Cache processed parameters.
|
|
532
|
+
|
|
533
|
+
Args:
|
|
534
|
+
original_params: Original parameters
|
|
535
|
+
processed_params: Processed parameters
|
|
536
|
+
config_hash: Hash of parameter processing configuration
|
|
537
|
+
"""
|
|
538
|
+
cache_key = self._create_parameter_key(original_params, config_hash)
|
|
539
|
+
self._cache.put(cache_key, processed_params)
|
|
540
|
+
|
|
541
|
+
def _create_parameter_key(self, params: Any, config_hash: int) -> CacheKey:
|
|
542
|
+
"""Create cache key for parameters.
|
|
543
|
+
|
|
544
|
+
Args:
|
|
545
|
+
params: Parameters to cache
|
|
546
|
+
config_hash: Configuration hash
|
|
547
|
+
|
|
548
|
+
Returns:
|
|
549
|
+
Cache key for the parameters
|
|
550
|
+
"""
|
|
551
|
+
# Create stable key from parameters and configuration
|
|
552
|
+
try:
|
|
553
|
+
# Optimize type checking order
|
|
554
|
+
param_key: tuple[Any, ...]
|
|
555
|
+
if isinstance(params, dict):
|
|
556
|
+
param_key = tuple(sorted(params.items()))
|
|
557
|
+
elif isinstance(params, (list, tuple)):
|
|
558
|
+
param_key = tuple(params)
|
|
559
|
+
else:
|
|
560
|
+
param_key = (params,)
|
|
561
|
+
|
|
562
|
+
return CacheKey(("parameters", param_key, config_hash))
|
|
563
|
+
except (TypeError, ValueError):
|
|
564
|
+
# Fallback for unhashable types
|
|
565
|
+
param_key_fallback = (str(params), type(params).__name__)
|
|
566
|
+
return CacheKey(("parameters", param_key_fallback, config_hash))
|
|
567
|
+
|
|
568
|
+
def clear(self) -> None:
|
|
569
|
+
"""Clear parameter cache."""
|
|
570
|
+
self._cache.clear()
|
|
571
|
+
|
|
572
|
+
def get_stats(self) -> CacheStats:
|
|
573
|
+
"""Get cache statistics."""
|
|
574
|
+
return self._cache.get_stats()
|
|
575
|
+
|
|
576
|
+
|
|
577
|
+
_default_cache: Optional[UnifiedCache[Any]] = None
|
|
578
|
+
_statement_cache: Optional[StatementCache] = None
|
|
579
|
+
_expression_cache: Optional[ExpressionCache] = None
|
|
580
|
+
_parameter_cache: Optional[ParameterCache] = None
|
|
581
|
+
_cache_lock = threading.Lock()
|
|
582
|
+
|
|
583
|
+
|
|
584
|
+
def get_default_cache() -> UnifiedCache[Any]:
|
|
585
|
+
"""Get the default unified cache instance.
|
|
586
|
+
|
|
587
|
+
Returns:
|
|
588
|
+
Singleton default cache instance
|
|
589
|
+
"""
|
|
590
|
+
global _default_cache
|
|
591
|
+
if _default_cache is None:
|
|
592
|
+
with _cache_lock:
|
|
593
|
+
if _default_cache is None:
|
|
594
|
+
_default_cache = UnifiedCache[Any]()
|
|
595
|
+
return _default_cache
|
|
596
|
+
|
|
597
|
+
|
|
598
|
+
def get_statement_cache() -> StatementCache:
|
|
599
|
+
"""Get the statement cache instance.
|
|
600
|
+
|
|
601
|
+
Returns:
|
|
602
|
+
Singleton statement cache instance
|
|
603
|
+
"""
|
|
604
|
+
global _statement_cache
|
|
605
|
+
if _statement_cache is None:
|
|
606
|
+
with _cache_lock:
|
|
607
|
+
if _statement_cache is None:
|
|
608
|
+
_statement_cache = StatementCache()
|
|
609
|
+
return _statement_cache
|
|
610
|
+
|
|
611
|
+
|
|
612
|
+
def get_expression_cache() -> ExpressionCache:
|
|
613
|
+
"""Get the expression cache instance.
|
|
614
|
+
|
|
615
|
+
Returns:
|
|
616
|
+
Singleton expression cache instance
|
|
617
|
+
"""
|
|
618
|
+
global _expression_cache
|
|
619
|
+
if _expression_cache is None:
|
|
620
|
+
with _cache_lock:
|
|
621
|
+
if _expression_cache is None:
|
|
622
|
+
_expression_cache = ExpressionCache()
|
|
623
|
+
return _expression_cache
|
|
624
|
+
|
|
625
|
+
|
|
626
|
+
def get_parameter_cache() -> ParameterCache:
|
|
627
|
+
"""Get the parameter cache instance.
|
|
628
|
+
|
|
629
|
+
Returns:
|
|
630
|
+
Singleton parameter cache instance
|
|
631
|
+
"""
|
|
632
|
+
global _parameter_cache
|
|
633
|
+
if _parameter_cache is None:
|
|
634
|
+
with _cache_lock:
|
|
635
|
+
if _parameter_cache is None:
|
|
636
|
+
_parameter_cache = ParameterCache()
|
|
637
|
+
return _parameter_cache
|
|
638
|
+
|
|
639
|
+
|
|
640
|
+
def clear_all_caches() -> None:
|
|
641
|
+
"""Clear all cache instances."""
|
|
642
|
+
if _default_cache is not None:
|
|
643
|
+
_default_cache.clear()
|
|
644
|
+
if _statement_cache is not None:
|
|
645
|
+
_statement_cache.clear()
|
|
646
|
+
if _expression_cache is not None:
|
|
647
|
+
_expression_cache.clear()
|
|
648
|
+
if _parameter_cache is not None:
|
|
649
|
+
_parameter_cache.clear()
|
|
650
|
+
|
|
651
|
+
|
|
652
|
+
def get_cache_statistics() -> dict[str, CacheStats]:
|
|
653
|
+
"""Get statistics from all cache instances.
|
|
654
|
+
|
|
655
|
+
Returns:
|
|
656
|
+
Dictionary mapping cache type to statistics
|
|
657
|
+
"""
|
|
658
|
+
stats = {}
|
|
659
|
+
if _default_cache is not None:
|
|
660
|
+
stats["default"] = _default_cache.get_stats()
|
|
661
|
+
if _statement_cache is not None:
|
|
662
|
+
stats["statement"] = _statement_cache.get_stats()
|
|
663
|
+
if _expression_cache is not None:
|
|
664
|
+
stats["expression"] = _expression_cache.get_stats()
|
|
665
|
+
if _parameter_cache is not None:
|
|
666
|
+
stats["parameter"] = _parameter_cache.get_stats()
|
|
667
|
+
return stats
|
|
668
|
+
|
|
669
|
+
|
|
670
|
+
_global_cache_config: "Optional[CacheConfig]" = None
|
|
671
|
+
|
|
672
|
+
|
|
673
|
+
@mypyc_attr(allow_interpreted_subclasses=True)
|
|
674
|
+
class CacheConfig:
|
|
675
|
+
"""Global cache configuration for SQLSpec.
|
|
676
|
+
|
|
677
|
+
Controls caching behavior across the SQLSpec system.
|
|
678
|
+
"""
|
|
679
|
+
|
|
680
|
+
def __init__(
|
|
681
|
+
self,
|
|
682
|
+
*,
|
|
683
|
+
compiled_cache_enabled: bool = True,
|
|
684
|
+
sql_cache_enabled: bool = True,
|
|
685
|
+
fragment_cache_enabled: bool = True,
|
|
686
|
+
optimized_cache_enabled: bool = True,
|
|
687
|
+
sql_cache_size: int = 1000,
|
|
688
|
+
fragment_cache_size: int = 5000,
|
|
689
|
+
optimized_cache_size: int = 2000,
|
|
690
|
+
) -> None:
|
|
691
|
+
"""Initialize cache configuration.
|
|
692
|
+
|
|
693
|
+
Args:
|
|
694
|
+
compiled_cache_enabled: Enable compiled SQL caching
|
|
695
|
+
sql_cache_enabled: Enable SQL statement caching
|
|
696
|
+
fragment_cache_enabled: Enable AST fragment caching
|
|
697
|
+
optimized_cache_enabled: Enable optimized expression caching
|
|
698
|
+
sql_cache_size: Maximum SQL cache entries
|
|
699
|
+
fragment_cache_size: Maximum fragment cache entries
|
|
700
|
+
optimized_cache_size: Maximum optimized cache entries
|
|
701
|
+
"""
|
|
702
|
+
self.compiled_cache_enabled = compiled_cache_enabled
|
|
703
|
+
self.sql_cache_enabled = sql_cache_enabled
|
|
704
|
+
self.fragment_cache_enabled = fragment_cache_enabled
|
|
705
|
+
self.optimized_cache_enabled = optimized_cache_enabled
|
|
706
|
+
self.sql_cache_size = sql_cache_size
|
|
707
|
+
self.fragment_cache_size = fragment_cache_size
|
|
708
|
+
self.optimized_cache_size = optimized_cache_size
|
|
709
|
+
|
|
710
|
+
|
|
711
|
+
def get_cache_config() -> CacheConfig:
|
|
712
|
+
"""Get the global cache configuration.
|
|
713
|
+
|
|
714
|
+
Returns:
|
|
715
|
+
Current global cache configuration instance
|
|
716
|
+
"""
|
|
717
|
+
global _global_cache_config
|
|
718
|
+
if _global_cache_config is None:
|
|
719
|
+
_global_cache_config = CacheConfig()
|
|
720
|
+
return _global_cache_config
|
|
721
|
+
|
|
722
|
+
|
|
723
|
+
def update_cache_config(config: CacheConfig) -> None:
|
|
724
|
+
"""Update the global cache configuration.
|
|
725
|
+
|
|
726
|
+
This clears all existing caches when configuration changes to ensure
|
|
727
|
+
consistency with the new settings.
|
|
728
|
+
|
|
729
|
+
Args:
|
|
730
|
+
config: New cache configuration to apply globally
|
|
731
|
+
"""
|
|
732
|
+
logger = get_logger("sqlspec.cache")
|
|
733
|
+
logger.info("Cache configuration updated: %s", config)
|
|
734
|
+
|
|
735
|
+
global _global_cache_config
|
|
736
|
+
_global_cache_config = config
|
|
737
|
+
|
|
738
|
+
unified_cache = get_default_cache()
|
|
739
|
+
unified_cache.clear()
|
|
740
|
+
statement_cache = get_statement_cache()
|
|
741
|
+
statement_cache.clear()
|
|
742
|
+
|
|
743
|
+
logger = get_logger("sqlspec.cache")
|
|
744
|
+
logger.info(
|
|
745
|
+
"Cache configuration updated - all caches cleared",
|
|
746
|
+
extra={
|
|
747
|
+
"compiled_cache_enabled": config.compiled_cache_enabled,
|
|
748
|
+
"sql_cache_enabled": config.sql_cache_enabled,
|
|
749
|
+
"fragment_cache_enabled": config.fragment_cache_enabled,
|
|
750
|
+
"optimized_cache_enabled": config.optimized_cache_enabled,
|
|
751
|
+
},
|
|
752
|
+
)
|
|
753
|
+
|
|
754
|
+
|
|
755
|
+
@mypyc_attr(allow_interpreted_subclasses=True)
|
|
756
|
+
class CacheStatsAggregate:
|
|
757
|
+
"""Aggregated cache statistics from all cache instances."""
|
|
758
|
+
|
|
759
|
+
__slots__ = (
|
|
760
|
+
"fragment_capacity",
|
|
761
|
+
"fragment_hit_rate",
|
|
762
|
+
"fragment_hits",
|
|
763
|
+
"fragment_misses",
|
|
764
|
+
"fragment_size",
|
|
765
|
+
"optimized_capacity",
|
|
766
|
+
"optimized_hit_rate",
|
|
767
|
+
"optimized_hits",
|
|
768
|
+
"optimized_misses",
|
|
769
|
+
"optimized_size",
|
|
770
|
+
"sql_capacity",
|
|
771
|
+
"sql_hit_rate",
|
|
772
|
+
"sql_hits",
|
|
773
|
+
"sql_misses",
|
|
774
|
+
"sql_size",
|
|
775
|
+
)
|
|
776
|
+
|
|
777
|
+
def __init__(self) -> None:
|
|
778
|
+
"""Initialize aggregated cache statistics."""
|
|
779
|
+
self.sql_hit_rate = 0.0
|
|
780
|
+
self.fragment_hit_rate = 0.0
|
|
781
|
+
self.optimized_hit_rate = 0.0
|
|
782
|
+
self.sql_size = 0
|
|
783
|
+
self.fragment_size = 0
|
|
784
|
+
self.optimized_size = 0
|
|
785
|
+
self.sql_capacity = 0
|
|
786
|
+
self.fragment_capacity = 0
|
|
787
|
+
self.optimized_capacity = 0
|
|
788
|
+
self.sql_hits = 0
|
|
789
|
+
self.sql_misses = 0
|
|
790
|
+
self.fragment_hits = 0
|
|
791
|
+
self.fragment_misses = 0
|
|
792
|
+
self.optimized_hits = 0
|
|
793
|
+
self.optimized_misses = 0
|
|
794
|
+
|
|
795
|
+
|
|
796
|
+
def get_cache_stats() -> CacheStatsAggregate:
|
|
797
|
+
"""Get current cache statistics from all caches.
|
|
798
|
+
|
|
799
|
+
Returns:
|
|
800
|
+
Combined cache statistics object
|
|
801
|
+
"""
|
|
802
|
+
stats_dict = get_cache_statistics()
|
|
803
|
+
stats = CacheStatsAggregate()
|
|
804
|
+
|
|
805
|
+
for cache_name, cache_stats in stats_dict.items():
|
|
806
|
+
hits = cache_stats.hits
|
|
807
|
+
misses = cache_stats.misses
|
|
808
|
+
size = 0
|
|
809
|
+
|
|
810
|
+
if "sql" in cache_name.lower():
|
|
811
|
+
stats.sql_hits += hits
|
|
812
|
+
stats.sql_misses += misses
|
|
813
|
+
stats.sql_size += size
|
|
814
|
+
elif "fragment" in cache_name.lower():
|
|
815
|
+
stats.fragment_hits += hits
|
|
816
|
+
stats.fragment_misses += misses
|
|
817
|
+
stats.fragment_size += size
|
|
818
|
+
elif "optimized" in cache_name.lower():
|
|
819
|
+
stats.optimized_hits += hits
|
|
820
|
+
stats.optimized_misses += misses
|
|
821
|
+
stats.optimized_size += size
|
|
822
|
+
|
|
823
|
+
sql_total = stats.sql_hits + stats.sql_misses
|
|
824
|
+
if sql_total > 0:
|
|
825
|
+
stats.sql_hit_rate = stats.sql_hits / sql_total
|
|
826
|
+
|
|
827
|
+
fragment_total = stats.fragment_hits + stats.fragment_misses
|
|
828
|
+
if fragment_total > 0:
|
|
829
|
+
stats.fragment_hit_rate = stats.fragment_hits / fragment_total
|
|
830
|
+
|
|
831
|
+
optimized_total = stats.optimized_hits + stats.optimized_misses
|
|
832
|
+
if optimized_total > 0:
|
|
833
|
+
stats.optimized_hit_rate = stats.optimized_hits / optimized_total
|
|
834
|
+
|
|
835
|
+
return stats
|
|
836
|
+
|
|
837
|
+
|
|
838
|
+
def reset_cache_stats() -> None:
|
|
839
|
+
"""Reset all cache statistics."""
|
|
840
|
+
clear_all_caches()
|
|
841
|
+
|
|
842
|
+
|
|
843
|
+
def log_cache_stats() -> None:
|
|
844
|
+
"""Log current cache statistics using the configured logger."""
|
|
845
|
+
logger = get_logger("sqlspec.cache")
|
|
846
|
+
stats = get_cache_stats()
|
|
847
|
+
logger.info("Cache Statistics: %s", stats)
|
|
848
|
+
|
|
849
|
+
|
|
850
|
+
@mypyc_attr(allow_interpreted_subclasses=False)
|
|
851
|
+
class SQLCompilationCache:
|
|
852
|
+
"""Wrapper around StatementCache for compatibility."""
|
|
853
|
+
|
|
854
|
+
__slots__ = ("_statement_cache", "_unified_cache")
|
|
855
|
+
|
|
856
|
+
def __init__(self) -> None:
|
|
857
|
+
self._statement_cache = get_statement_cache()
|
|
858
|
+
self._unified_cache = get_default_cache()
|
|
859
|
+
|
|
860
|
+
def get(self, cache_key: str) -> Optional[tuple[str, Any]]:
|
|
861
|
+
"""Get cached compiled SQL and parameters."""
|
|
862
|
+
key = CacheKey((cache_key,))
|
|
863
|
+
return self._unified_cache.get(key)
|
|
864
|
+
|
|
865
|
+
def set(self, cache_key: str, value: tuple[str, Any]) -> None:
|
|
866
|
+
"""Set cached compiled SQL and parameters."""
|
|
867
|
+
key = CacheKey((cache_key,))
|
|
868
|
+
self._unified_cache.put(key, value)
|
|
869
|
+
|
|
870
|
+
|
|
871
|
+
sql_cache = SQLCompilationCache()
|