nanasqlite 1.3.3.dev4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nanasqlite/__init__.py +52 -0
- nanasqlite/async_core.py +1456 -0
- nanasqlite/cache.py +335 -0
- nanasqlite/core.py +2336 -0
- nanasqlite/exceptions.py +117 -0
- nanasqlite/py.typed +0 -0
- nanasqlite/sql_utils.py +174 -0
- nanasqlite/utils.py +202 -0
- nanasqlite-1.3.3.dev4.dist-info/METADATA +413 -0
- nanasqlite-1.3.3.dev4.dist-info/RECORD +13 -0
- nanasqlite-1.3.3.dev4.dist-info/WHEEL +5 -0
- nanasqlite-1.3.3.dev4.dist-info/licenses/LICENSE +21 -0
- nanasqlite-1.3.3.dev4.dist-info/top_level.txt +1 -0
nanasqlite/cache.py
ADDED
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from abc import abstractmethod
|
|
5
|
+
from collections import OrderedDict
|
|
6
|
+
from collections.abc import MutableMapping
|
|
7
|
+
from enum import Enum
|
|
8
|
+
from typing import Any, Callable, Protocol
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
# Try to import C-optimized LRU (optional dependency)
|
|
13
|
+
_FAST_LRU: Any = None
|
|
14
|
+
try:
|
|
15
|
+
from lru import LRU
|
|
16
|
+
|
|
17
|
+
_FAST_LRU = LRU
|
|
18
|
+
HAS_FAST_LRU = True
|
|
19
|
+
except ImportError:
|
|
20
|
+
HAS_FAST_LRU = False
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class CacheType(str, Enum):
|
|
24
|
+
"""Available cache strategies"""
|
|
25
|
+
|
|
26
|
+
UNBOUNDED = "unbounded"
|
|
27
|
+
LRU = "lru"
|
|
28
|
+
TTL = "ttl"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class CacheStrategy(Protocol):
|
|
32
|
+
"""Protocol defining the interface for all cache implementations"""
|
|
33
|
+
|
|
34
|
+
@abstractmethod
|
|
35
|
+
def get(self, key: str) -> Any | None:
|
|
36
|
+
"""Get item from cache. Returns None if not found."""
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
@abstractmethod
|
|
40
|
+
def set(self, key: str, value: Any) -> None:
|
|
41
|
+
"""Set item in cache."""
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
@abstractmethod
|
|
45
|
+
def delete(self, key: str) -> None:
|
|
46
|
+
"""Remove item from cache."""
|
|
47
|
+
pass
|
|
48
|
+
|
|
49
|
+
@abstractmethod
|
|
50
|
+
def invalidate(self, key: str) -> None:
|
|
51
|
+
"""Completely remove key from cache knowledge (forget it exists or not)."""
|
|
52
|
+
pass
|
|
53
|
+
|
|
54
|
+
@abstractmethod
|
|
55
|
+
def contains(self, key: str) -> bool:
|
|
56
|
+
"""Check if key exists in cache."""
|
|
57
|
+
pass
|
|
58
|
+
|
|
59
|
+
@abstractmethod
|
|
60
|
+
def mark_cached(self, key: str) -> None:
|
|
61
|
+
"""Mark a key as 'known' (cached) even if value is not loaded yet."""
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
@abstractmethod
|
|
65
|
+
def is_cached(self, key: str) -> bool:
|
|
66
|
+
"""Check if we have knowledge about this key (either value or existence)."""
|
|
67
|
+
pass
|
|
68
|
+
|
|
69
|
+
@abstractmethod
|
|
70
|
+
def clear(self) -> None:
|
|
71
|
+
"""Clear entire cache."""
|
|
72
|
+
pass
|
|
73
|
+
|
|
74
|
+
@abstractmethod
|
|
75
|
+
def get_data(self) -> MutableMapping[str, Any]:
|
|
76
|
+
"""Return reference to internal data storage (for legacy compatibility/inspection)."""
|
|
77
|
+
pass
|
|
78
|
+
|
|
79
|
+
@property
|
|
80
|
+
@abstractmethod
|
|
81
|
+
def size(self) -> int:
|
|
82
|
+
"""Current number of items in cache."""
|
|
83
|
+
pass
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class UnboundedCache(CacheStrategy):
|
|
87
|
+
"""
|
|
88
|
+
Default behavior: Infinite growth, maximum speed.
|
|
89
|
+
Equivalent to v1.2.x logic using standard user Dict + Set.
|
|
90
|
+
If max_size is set, uses FIFO eviction.
|
|
91
|
+
"""
|
|
92
|
+
|
|
93
|
+
def __init__(self, max_size: int | None = None):
|
|
94
|
+
self._max_size = max_size
|
|
95
|
+
self._data: dict[str, Any] = {}
|
|
96
|
+
self._cached_keys: set[str] = set()
|
|
97
|
+
|
|
98
|
+
def get(self, key: str) -> Any | None:
|
|
99
|
+
return self._data.get(key)
|
|
100
|
+
|
|
101
|
+
def set(self, key: str, value: Any) -> None:
|
|
102
|
+
if self._max_size and self._max_size > 0:
|
|
103
|
+
if key not in self._data and len(self._data) >= self._max_size:
|
|
104
|
+
# FIFO:最初に追加された要素を削除 (Python 3.7+)
|
|
105
|
+
oldest_key = next(iter(self._data))
|
|
106
|
+
del self._data[oldest_key]
|
|
107
|
+
self._cached_keys.discard(oldest_key)
|
|
108
|
+
|
|
109
|
+
self._data[key] = value
|
|
110
|
+
self._cached_keys.add(key)
|
|
111
|
+
|
|
112
|
+
def delete(self, key: str) -> None:
|
|
113
|
+
if key in self._data:
|
|
114
|
+
del self._data[key]
|
|
115
|
+
self._cached_keys.add(key)
|
|
116
|
+
|
|
117
|
+
def invalidate(self, key: str) -> None:
|
|
118
|
+
if key in self._data:
|
|
119
|
+
del self._data[key]
|
|
120
|
+
self._cached_keys.discard(key)
|
|
121
|
+
|
|
122
|
+
def contains(self, key: str) -> bool:
|
|
123
|
+
return key in self._data
|
|
124
|
+
|
|
125
|
+
def mark_cached(self, key: str) -> None:
|
|
126
|
+
self._cached_keys.add(key)
|
|
127
|
+
|
|
128
|
+
def is_cached(self, key: str) -> bool:
|
|
129
|
+
return key in self._cached_keys
|
|
130
|
+
|
|
131
|
+
def clear(self) -> None:
|
|
132
|
+
self._data.clear()
|
|
133
|
+
self._cached_keys.clear()
|
|
134
|
+
|
|
135
|
+
def get_data(self) -> MutableMapping[str, Any]:
|
|
136
|
+
return self._data
|
|
137
|
+
|
|
138
|
+
@property
|
|
139
|
+
def size(self) -> int:
|
|
140
|
+
return len(self._data)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class StdLRUCache(CacheStrategy):
|
|
144
|
+
"""
|
|
145
|
+
Standard Library LRU implementation using OrderedDict.
|
|
146
|
+
Safe fallback if C extensions are not available.
|
|
147
|
+
"""
|
|
148
|
+
|
|
149
|
+
def __init__(self, max_size: int):
|
|
150
|
+
self._max_size = max_size
|
|
151
|
+
self._data: OrderedDict[str, Any] = OrderedDict()
|
|
152
|
+
# For LRU, is_cached logic is unified with containment.
|
|
153
|
+
# If it's evicted, it's no longer "cached", so we must fetch again.
|
|
154
|
+
|
|
155
|
+
def get(self, key: str) -> Any | None:
|
|
156
|
+
if key not in self._data:
|
|
157
|
+
return None
|
|
158
|
+
self._data.move_to_end(key) # Mark used
|
|
159
|
+
return self._data[key]
|
|
160
|
+
|
|
161
|
+
def set(self, key: str, value: Any) -> None:
|
|
162
|
+
self._data[key] = value
|
|
163
|
+
self._data.move_to_end(key)
|
|
164
|
+
if len(self._data) > self._max_size:
|
|
165
|
+
self._data.popitem(last=False) # Evict oldest
|
|
166
|
+
|
|
167
|
+
def delete(self, key: str) -> None:
|
|
168
|
+
if key in self._data:
|
|
169
|
+
del self._data[key]
|
|
170
|
+
|
|
171
|
+
def invalidate(self, key: str) -> None:
|
|
172
|
+
self.delete(key)
|
|
173
|
+
|
|
174
|
+
def contains(self, key: str) -> bool:
|
|
175
|
+
return key in self._data
|
|
176
|
+
|
|
177
|
+
def mark_cached(self, key: str) -> None:
|
|
178
|
+
# In strict LRU, we can't just "mark" likely.
|
|
179
|
+
# But to support the logic of "key is known to exist",
|
|
180
|
+
# we might need to store it. For now, we only store actual values.
|
|
181
|
+
pass
|
|
182
|
+
|
|
183
|
+
def is_cached(self, key: str) -> bool:
|
|
184
|
+
# In LRU mode, if it's in the map, it's cached.
|
|
185
|
+
# If it's not, we consider it "not cached" (needs DB fetch)
|
|
186
|
+
return key in self._data
|
|
187
|
+
|
|
188
|
+
def clear(self) -> None:
|
|
189
|
+
self._data.clear()
|
|
190
|
+
|
|
191
|
+
def get_data(self) -> MutableMapping[str, Any]:
|
|
192
|
+
return self._data
|
|
193
|
+
|
|
194
|
+
@property
|
|
195
|
+
def size(self) -> int:
|
|
196
|
+
return len(self._data)
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
class FastLRUCache(CacheStrategy):
|
|
200
|
+
"""
|
|
201
|
+
High-performance LRU implementation using lru-dict (C extension).
|
|
202
|
+
"""
|
|
203
|
+
|
|
204
|
+
def __init__(self, max_size: int):
|
|
205
|
+
if not HAS_FAST_LRU or _FAST_LRU is None:
|
|
206
|
+
raise ImportError("lru-dict is not installed. Use 'fast_lru' extra or fallback to StdLRUCache.")
|
|
207
|
+
self._data = _FAST_LRU(max_size)
|
|
208
|
+
|
|
209
|
+
def get(self, key: str) -> Any | None:
|
|
210
|
+
return self._data.get(key)
|
|
211
|
+
|
|
212
|
+
def set(self, key: str, value: Any) -> None:
|
|
213
|
+
self._data[key] = value
|
|
214
|
+
|
|
215
|
+
def delete(self, key: str) -> None:
|
|
216
|
+
if key in self._data:
|
|
217
|
+
del self._data[key]
|
|
218
|
+
|
|
219
|
+
def invalidate(self, key: str) -> None:
|
|
220
|
+
self.delete(key)
|
|
221
|
+
|
|
222
|
+
def contains(self, key: str) -> bool:
|
|
223
|
+
return key in self._data
|
|
224
|
+
|
|
225
|
+
def mark_cached(self, key: str) -> None:
|
|
226
|
+
pass
|
|
227
|
+
|
|
228
|
+
def is_cached(self, key: str) -> bool:
|
|
229
|
+
return key in self._data
|
|
230
|
+
|
|
231
|
+
def clear(self) -> None:
|
|
232
|
+
self._data.clear()
|
|
233
|
+
|
|
234
|
+
def get_data(self) -> MutableMapping[str, Any]:
|
|
235
|
+
# lru-dict behaves like a dict
|
|
236
|
+
return self._data # type: ignore
|
|
237
|
+
|
|
238
|
+
@property
|
|
239
|
+
def size(self) -> int:
|
|
240
|
+
return len(self._data)
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
class TTLCache(CacheStrategy):
|
|
244
|
+
"""
|
|
245
|
+
有効期限付きキャッシュ (TTL)
|
|
246
|
+
"""
|
|
247
|
+
|
|
248
|
+
def __init__(
|
|
249
|
+
self,
|
|
250
|
+
ttl: float,
|
|
251
|
+
max_size: int | None = None,
|
|
252
|
+
on_expire: Callable[[str, Any], None] | None = None,
|
|
253
|
+
):
|
|
254
|
+
"""
|
|
255
|
+
Args:
|
|
256
|
+
ttl: 有効期限(秒)
|
|
257
|
+
max_size: 最大保持件数 (FIFO併用、None の場合は無制限)
|
|
258
|
+
on_expire: 有効期限切れ時のコールバック
|
|
259
|
+
"""
|
|
260
|
+
from .utils import ExpiringDict
|
|
261
|
+
|
|
262
|
+
self._data = ExpiringDict(expiration_time=ttl, on_expire=on_expire)
|
|
263
|
+
self._max_size = max_size
|
|
264
|
+
self._cached_keys: set[str] = set()
|
|
265
|
+
|
|
266
|
+
def get(self, key: str) -> Any | None:
|
|
267
|
+
return self._data.get(key)
|
|
268
|
+
|
|
269
|
+
def set(self, key: str, value: Any) -> None:
|
|
270
|
+
if self._max_size and self._max_size > 0:
|
|
271
|
+
if key not in self._data and len(self._data) >= self._max_size:
|
|
272
|
+
oldest_key = next(iter(self._data))
|
|
273
|
+
del self._data[oldest_key]
|
|
274
|
+
self._cached_keys.discard(oldest_key)
|
|
275
|
+
|
|
276
|
+
self._data[key] = value
|
|
277
|
+
self._cached_keys.add(key)
|
|
278
|
+
|
|
279
|
+
def delete(self, key: str) -> None:
|
|
280
|
+
if key in self._data:
|
|
281
|
+
del self._data[key]
|
|
282
|
+
self._cached_keys.discard(key)
|
|
283
|
+
|
|
284
|
+
def clear(self) -> None:
|
|
285
|
+
self._data.clear()
|
|
286
|
+
self._cached_keys.clear()
|
|
287
|
+
|
|
288
|
+
def mark_cached(self, key: str) -> None:
|
|
289
|
+
self._cached_keys.add(key)
|
|
290
|
+
|
|
291
|
+
def is_cached(self, key: str) -> bool:
|
|
292
|
+
return key in self._data or key in self._cached_keys
|
|
293
|
+
|
|
294
|
+
def invalidate(self, key: str) -> None:
|
|
295
|
+
self.delete(key)
|
|
296
|
+
|
|
297
|
+
def contains(self, key: str) -> bool:
|
|
298
|
+
return key in self._data
|
|
299
|
+
|
|
300
|
+
@property
|
|
301
|
+
def size(self) -> int:
|
|
302
|
+
return len(self._data)
|
|
303
|
+
|
|
304
|
+
def get_data(self) -> MutableMapping[str, Any]:
|
|
305
|
+
return self._data
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
def create_cache(
|
|
309
|
+
strategy: str | CacheType = CacheType.UNBOUNDED,
|
|
310
|
+
size: int | None = None,
|
|
311
|
+
ttl: float | None = None,
|
|
312
|
+
on_expire: Callable[[str, Any], None] | None = None,
|
|
313
|
+
) -> CacheStrategy:
|
|
314
|
+
"""Factory to create appropriate cache instance"""
|
|
315
|
+
# Normalize strategy
|
|
316
|
+
if isinstance(strategy, CacheType):
|
|
317
|
+
strategy = strategy.value
|
|
318
|
+
|
|
319
|
+
if strategy == CacheType.LRU:
|
|
320
|
+
if size is None or size <= 0:
|
|
321
|
+
raise ValueError("cache_size must be a positive integer when using LRU strategy")
|
|
322
|
+
|
|
323
|
+
if HAS_FAST_LRU:
|
|
324
|
+
logger.info(f"Using FastLRUCache (lru-dict) with size {size}")
|
|
325
|
+
return FastLRUCache(size)
|
|
326
|
+
else:
|
|
327
|
+
logger.warning(f"lru-dict not found. Falling back to standard LRUCache (OrderedDict) with size {size}")
|
|
328
|
+
return StdLRUCache(size)
|
|
329
|
+
|
|
330
|
+
if strategy == CacheType.TTL:
|
|
331
|
+
if ttl is None or ttl <= 0:
|
|
332
|
+
raise ValueError("cache_ttl must be a positive value when using TTL strategy")
|
|
333
|
+
return TTLCache(ttl, max_size=size, on_expire=on_expire)
|
|
334
|
+
|
|
335
|
+
return UnboundedCache(max_size=size)
|