beaver-db 2.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beaver/__init__.py +16 -0
- beaver/blobs.py +223 -0
- beaver/bridge.py +167 -0
- beaver/cache.py +274 -0
- beaver/channels.py +249 -0
- beaver/cli/__init__.py +133 -0
- beaver/cli/blobs.py +225 -0
- beaver/cli/channels.py +166 -0
- beaver/cli/collections.py +500 -0
- beaver/cli/dicts.py +171 -0
- beaver/cli/lists.py +244 -0
- beaver/cli/locks.py +202 -0
- beaver/cli/logs.py +248 -0
- beaver/cli/queues.py +215 -0
- beaver/client.py +392 -0
- beaver/core.py +646 -0
- beaver/dicts.py +314 -0
- beaver/docs.py +459 -0
- beaver/events.py +155 -0
- beaver/graphs.py +212 -0
- beaver/lists.py +337 -0
- beaver/locks.py +186 -0
- beaver/logs.py +187 -0
- beaver/manager.py +203 -0
- beaver/queries.py +66 -0
- beaver/queues.py +215 -0
- beaver/security.py +144 -0
- beaver/server.py +452 -0
- beaver/sketches.py +307 -0
- beaver/types.py +32 -0
- beaver/vectors.py +198 -0
- beaver_db-2.0rc2.dist-info/METADATA +149 -0
- beaver_db-2.0rc2.dist-info/RECORD +36 -0
- beaver_db-2.0rc2.dist-info/WHEEL +4 -0
- beaver_db-2.0rc2.dist-info/entry_points.txt +2 -0
- beaver_db-2.0rc2.dist-info/licenses/LICENSE +21 -0
beaver/__init__.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from .core import BeaverDB, AsyncBeaverDB
|
|
2
|
+
from .docs import Document
|
|
3
|
+
from .events import Event
|
|
4
|
+
from .queries import q
|
|
5
|
+
from .security import Secret
|
|
6
|
+
|
|
7
|
+
__version__ = "2.0rc2"
|
|
8
|
+
|
|
9
|
+
__all__ = [
|
|
10
|
+
"AsyncBeaverDB",
|
|
11
|
+
"BeaverDB",
|
|
12
|
+
"Document",
|
|
13
|
+
"Secret",
|
|
14
|
+
"Event",
|
|
15
|
+
"q",
|
|
16
|
+
]
|
beaver/blobs.py
ADDED
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
from base64 import b64encode
|
|
2
|
+
import json
|
|
3
|
+
from typing import (
|
|
4
|
+
IO,
|
|
5
|
+
Any,
|
|
6
|
+
Iterator,
|
|
7
|
+
Tuple,
|
|
8
|
+
Protocol,
|
|
9
|
+
runtime_checkable,
|
|
10
|
+
TYPE_CHECKING,
|
|
11
|
+
overload,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from pydantic import BaseModel
|
|
15
|
+
|
|
16
|
+
from .manager import AsyncBeaverBase, atomic, emits
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from .core import AsyncBeaverDB
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class BlobItem(BaseModel):
|
|
23
|
+
"""Represents a retrieved blob with its metadata."""
|
|
24
|
+
|
|
25
|
+
key: str
|
|
26
|
+
data: bytes
|
|
27
|
+
metadata: dict | None = None
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@runtime_checkable
|
|
31
|
+
class IBeaverBlob[T: BaseModel](Protocol):
|
|
32
|
+
"""
|
|
33
|
+
The Synchronous Protocol exposed to the user via BeaverBridge.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def __getitem__(self, key: str) -> bytes: ...
|
|
37
|
+
def __setitem__(self, key: str, data: bytes) -> None: ...
|
|
38
|
+
def __delitem__(self, key: str) -> None: ...
|
|
39
|
+
def __len__(self) -> int: ...
|
|
40
|
+
def __contains__(self, key: str) -> bool: ...
|
|
41
|
+
def __iter__(self) -> Iterator[str]: ...
|
|
42
|
+
|
|
43
|
+
def get(self, key: str) -> bytes: ...
|
|
44
|
+
def set(self, key: str, data: bytes) -> None: ...
|
|
45
|
+
def delete(self, key: str) -> None: ...
|
|
46
|
+
|
|
47
|
+
def put(self, key: str, data: bytes, metadata: dict | None = None) -> None: ...
|
|
48
|
+
def fetch(self, key: str) -> BlobItem: ...
|
|
49
|
+
def count(self) -> int: ...
|
|
50
|
+
def keys(self) -> Iterator[str]: ...
|
|
51
|
+
def items(self) -> Iterator[Tuple[str, bytes]]: ...
|
|
52
|
+
def clear(self) -> None: ...
|
|
53
|
+
def dump(self, fp: IO[str] | None = None) -> dict | None: ...
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class AsyncBeaverBlob[T: BaseModel](AsyncBeaverBase[T]):
|
|
57
|
+
"""
|
|
58
|
+
A wrapper providing a dictionary-like interface for storing binary blobs
|
|
59
|
+
with optional metadata.
|
|
60
|
+
Refactored for Async-First architecture (v2.0).
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
@emits("put", payload=lambda key, *args, **kwargs: dict(key=key))
|
|
64
|
+
@atomic
|
|
65
|
+
async def put(self, key: str, data: bytes, metadata: dict | None = None):
|
|
66
|
+
"""
|
|
67
|
+
Stores binary data under a key, optionally with JSON metadata.
|
|
68
|
+
"""
|
|
69
|
+
if not isinstance(data, bytes):
|
|
70
|
+
raise TypeError("Blob data must be bytes.")
|
|
71
|
+
|
|
72
|
+
meta_json = json.dumps(metadata) if metadata is not None else None
|
|
73
|
+
|
|
74
|
+
await self.connection.execute(
|
|
75
|
+
"""
|
|
76
|
+
INSERT OR REPLACE INTO __beaver_blobs__
|
|
77
|
+
(store_name, key, data, metadata)
|
|
78
|
+
VALUES (?, ?, ?, ?)
|
|
79
|
+
""",
|
|
80
|
+
(self._name, key, data, meta_json),
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
@atomic
|
|
84
|
+
async def fetch(self, key: str) -> BlobItem:
|
|
85
|
+
"""
|
|
86
|
+
Retrieves the full BlobItem (data + metadata).
|
|
87
|
+
Raises KeyError if missing.
|
|
88
|
+
"""
|
|
89
|
+
cursor = await self.connection.execute(
|
|
90
|
+
"SELECT data, metadata FROM __beaver_blobs__ WHERE store_name = ? AND key = ?",
|
|
91
|
+
(self._name, key),
|
|
92
|
+
)
|
|
93
|
+
row = await cursor.fetchone()
|
|
94
|
+
|
|
95
|
+
if row is None:
|
|
96
|
+
raise KeyError(f"Key '{key}' not found in blob store '{self._name}'")
|
|
97
|
+
|
|
98
|
+
meta = json.loads(row["metadata"]) if row["metadata"] else None
|
|
99
|
+
return BlobItem(key=key, data=row["data"], metadata=meta)
|
|
100
|
+
|
|
101
|
+
@atomic
|
|
102
|
+
async def get(self, key: str) -> bytes:
|
|
103
|
+
"""
|
|
104
|
+
Retrieves just the binary data for a key.
|
|
105
|
+
Mapped from __getitem__ via the Bridge.
|
|
106
|
+
"""
|
|
107
|
+
cursor = await self.connection.execute(
|
|
108
|
+
"SELECT data FROM __beaver_blobs__ WHERE store_name = ? AND key = ?",
|
|
109
|
+
(self._name, key),
|
|
110
|
+
)
|
|
111
|
+
row = await cursor.fetchone()
|
|
112
|
+
|
|
113
|
+
if row is None:
|
|
114
|
+
raise KeyError(f"Key '{key}' not found in blob store '{self._name}'")
|
|
115
|
+
|
|
116
|
+
return row["data"]
|
|
117
|
+
|
|
118
|
+
@emits("set", payload=lambda key, *args, **kwargs: dict(key=key))
|
|
119
|
+
@atomic
|
|
120
|
+
async def set(self, key: str, data: bytes):
|
|
121
|
+
"""
|
|
122
|
+
Alias for put(key, data) without metadata.
|
|
123
|
+
Mapped from __setitem__ via the Bridge.
|
|
124
|
+
"""
|
|
125
|
+
await self.put(key, data)
|
|
126
|
+
|
|
127
|
+
@emits("del", payload=lambda key, *args, **kwargs: dict(key=key))
|
|
128
|
+
@atomic
|
|
129
|
+
async def delete(self, key: str):
|
|
130
|
+
"""
|
|
131
|
+
Deletes a blob.
|
|
132
|
+
Mapped from __delitem__ via the Bridge.
|
|
133
|
+
"""
|
|
134
|
+
cursor = await self.connection.execute(
|
|
135
|
+
"DELETE FROM __beaver_blobs__ WHERE store_name = ? AND key = ?",
|
|
136
|
+
(self._name, key),
|
|
137
|
+
)
|
|
138
|
+
if cursor.rowcount == 0:
|
|
139
|
+
raise KeyError(f"Key '{key}' not found in blob store '{self._name}'")
|
|
140
|
+
|
|
141
|
+
async def count(self) -> int:
|
|
142
|
+
cursor = await self.connection.execute(
|
|
143
|
+
"SELECT COUNT(*) FROM __beaver_blobs__ WHERE store_name = ?", (self._name,)
|
|
144
|
+
)
|
|
145
|
+
row = await cursor.fetchone()
|
|
146
|
+
return row[0] if row else 0
|
|
147
|
+
|
|
148
|
+
async def contains(self, key: str) -> bool:
|
|
149
|
+
cursor = await self.connection.execute(
|
|
150
|
+
"SELECT 1 FROM __beaver_blobs__ WHERE store_name = ? AND key = ? LIMIT 1",
|
|
151
|
+
(self._name, key),
|
|
152
|
+
)
|
|
153
|
+
return await cursor.fetchone() is not None
|
|
154
|
+
|
|
155
|
+
@emits("clear", payload=lambda *args, **kwargs: dict())
|
|
156
|
+
@atomic
|
|
157
|
+
async def clear(self):
|
|
158
|
+
await self.connection.execute(
|
|
159
|
+
"DELETE FROM __beaver_blobs__ WHERE store_name = ?",
|
|
160
|
+
(self._name,),
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
# --- Iterators ---
|
|
164
|
+
|
|
165
|
+
async def __aiter__(self):
|
|
166
|
+
async for key in self.keys():
|
|
167
|
+
yield key
|
|
168
|
+
|
|
169
|
+
async def keys(self):
|
|
170
|
+
cursor = await self.connection.execute(
|
|
171
|
+
"SELECT key FROM __beaver_blobs__ WHERE store_name = ?", (self._name,)
|
|
172
|
+
)
|
|
173
|
+
async for row in cursor:
|
|
174
|
+
yield row["key"]
|
|
175
|
+
|
|
176
|
+
async def items(self):
|
|
177
|
+
cursor = await self.connection.execute(
|
|
178
|
+
"SELECT key, data FROM __beaver_blobs__ WHERE store_name = ?", (self._name,)
|
|
179
|
+
)
|
|
180
|
+
async for row in cursor:
|
|
181
|
+
yield (row["key"], row["data"])
|
|
182
|
+
|
|
183
|
+
async def dump(
|
|
184
|
+
self, fp: IO[str] | None = None, *, payload: bool = False
|
|
185
|
+
) -> dict | None:
|
|
186
|
+
"""
|
|
187
|
+
Dumps blobs to a JSON-compatible object.
|
|
188
|
+
Note: Binary data is serialized to base-64 strings, *only* when payload=True.
|
|
189
|
+
Otherwise, only metadata is dumped.
|
|
190
|
+
"""
|
|
191
|
+
items = []
|
|
192
|
+
async for key in self.keys():
|
|
193
|
+
# For blobs, dumping full content to JSON is dangerous (memory).
|
|
194
|
+
# We dump metadata primarily.
|
|
195
|
+
try:
|
|
196
|
+
item = await self.fetch(key)
|
|
197
|
+
items.append(
|
|
198
|
+
{
|
|
199
|
+
"key": key,
|
|
200
|
+
"metadata": item.metadata,
|
|
201
|
+
"size": len(item.data),
|
|
202
|
+
"payload": (
|
|
203
|
+
b64encode(item.data).decode("utf-8") if payload else None
|
|
204
|
+
),
|
|
205
|
+
}
|
|
206
|
+
)
|
|
207
|
+
except KeyError:
|
|
208
|
+
continue
|
|
209
|
+
|
|
210
|
+
dump_obj = {
|
|
211
|
+
"metadata": {
|
|
212
|
+
"type": "BlobStore",
|
|
213
|
+
"name": self._name,
|
|
214
|
+
"count": len(items),
|
|
215
|
+
},
|
|
216
|
+
"items": items,
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
if fp:
|
|
220
|
+
json.dump(dump_obj, fp, indent=2)
|
|
221
|
+
return None
|
|
222
|
+
|
|
223
|
+
return dump_obj
|
beaver/bridge.py
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import inspect
|
|
3
|
+
from typing import Any, Iterator, AsyncIterator
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class _SyncIteratorBridge:
|
|
7
|
+
"""
|
|
8
|
+
Helper class that wraps an AsyncIterator (or AsyncGenerator)
|
|
9
|
+
and exposes it as a standard synchronous Iterator.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
def __init__(self, async_iter: AsyncIterator, loop: asyncio.AbstractEventLoop):
|
|
13
|
+
self._async_iter = async_iter
|
|
14
|
+
self._loop = loop
|
|
15
|
+
|
|
16
|
+
def __iter__(self):
|
|
17
|
+
return self
|
|
18
|
+
|
|
19
|
+
def __next__(self):
|
|
20
|
+
# Define a coroutine to fetch the next item safely on the background loop
|
|
21
|
+
async def step():
|
|
22
|
+
try:
|
|
23
|
+
# Use the builtin anext() (Python 3.10+)
|
|
24
|
+
return await anext(self._async_iter)
|
|
25
|
+
except StopAsyncIteration:
|
|
26
|
+
# Raise a special sentinel to signal the loop to stop
|
|
27
|
+
return StopAsyncIteration
|
|
28
|
+
|
|
29
|
+
# Run the step on the reactor thread
|
|
30
|
+
future = asyncio.run_coroutine_threadsafe(step(), self._loop)
|
|
31
|
+
result = future.result()
|
|
32
|
+
|
|
33
|
+
# Propagate the stop signal as a synchronous StopIteration
|
|
34
|
+
if result is StopAsyncIteration:
|
|
35
|
+
raise StopIteration
|
|
36
|
+
|
|
37
|
+
return result
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class BeaverBridge:
|
|
41
|
+
"""
|
|
42
|
+
A generic synchronous bridge that proxies access to an asynchronous object
|
|
43
|
+
running on a background asyncio loop.
|
|
44
|
+
|
|
45
|
+
This class enables the "Portal Pattern", allowing standard synchronous
|
|
46
|
+
Python code to interact with the Async-First core of BeaverDB safely.
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def __init__(self, async_obj: Any, loop: asyncio.AbstractEventLoop):
|
|
50
|
+
self._async_obj = async_obj
|
|
51
|
+
self._loop = loop
|
|
52
|
+
|
|
53
|
+
def _run(self, coro: Any) -> Any:
|
|
54
|
+
"""
|
|
55
|
+
Helper to run a coroutine on the background loop and block
|
|
56
|
+
the calling thread until the result is ready.
|
|
57
|
+
"""
|
|
58
|
+
if not inspect.iscoroutine(coro):
|
|
59
|
+
return coro
|
|
60
|
+
|
|
61
|
+
future = asyncio.run_coroutine_threadsafe(coro, self._loop)
|
|
62
|
+
return future.result()
|
|
63
|
+
|
|
64
|
+
def __getattr__(self, name: str) -> Any:
|
|
65
|
+
"""
|
|
66
|
+
Dynamically intercepts method calls and properties.
|
|
67
|
+
"""
|
|
68
|
+
try:
|
|
69
|
+
attr = getattr(self._async_obj, name)
|
|
70
|
+
except AttributeError:
|
|
71
|
+
raise AttributeError(
|
|
72
|
+
f"'{type(self._async_obj).__name__}' object has no attribute '{name}'"
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
# If it is a method, wrap it to handle Coroutines AND AsyncGenerators
|
|
76
|
+
if inspect.ismethod(attr) or inspect.isfunction(attr):
|
|
77
|
+
|
|
78
|
+
def wrapper(*args, **kwargs):
|
|
79
|
+
# 1. Call the method (this is fast/non-blocking for async defs)
|
|
80
|
+
result = attr(*args, **kwargs)
|
|
81
|
+
|
|
82
|
+
# 2. Check if it returned an Async Generator (e.g. .keys(), .live())
|
|
83
|
+
if inspect.isasyncgen(result):
|
|
84
|
+
return _SyncIteratorBridge(result, self._loop)
|
|
85
|
+
|
|
86
|
+
# 3. Otherwise, run it (handles coroutines or regular values)
|
|
87
|
+
return self._run(result)
|
|
88
|
+
|
|
89
|
+
return wrapper
|
|
90
|
+
|
|
91
|
+
return attr
|
|
92
|
+
|
|
93
|
+
def __repr__(self) -> str:
|
|
94
|
+
async def safe_repr():
|
|
95
|
+
return repr(self._async_obj)
|
|
96
|
+
|
|
97
|
+
try:
|
|
98
|
+
return self._run(safe_repr())
|
|
99
|
+
except Exception:
|
|
100
|
+
return f"<BeaverBridge wrapping {type(self._async_obj).__name__}>"
|
|
101
|
+
|
|
102
|
+
# --- Context Manager ---
|
|
103
|
+
|
|
104
|
+
def __enter__(self):
|
|
105
|
+
if not hasattr(self._async_obj, "__aenter__"):
|
|
106
|
+
raise TypeError(
|
|
107
|
+
f"Object of type {type(self._async_obj).__name__} does not support context manager protocol"
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
raw_result = self._run(self._async_obj.__aenter__())
|
|
111
|
+
|
|
112
|
+
if raw_result is self._async_obj:
|
|
113
|
+
return self
|
|
114
|
+
|
|
115
|
+
return BeaverBridge(raw_result, self._loop)
|
|
116
|
+
|
|
117
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
118
|
+
return self._run(self._async_obj.__aexit__(exc_type, exc_val, exc_tb))
|
|
119
|
+
|
|
120
|
+
# --- Magic Methods (Container Emulation) ---
|
|
121
|
+
|
|
122
|
+
def __len__(self) -> int:
|
|
123
|
+
if hasattr(self._async_obj, "count"):
|
|
124
|
+
return self._run(self._async_obj.count())
|
|
125
|
+
raise TypeError(f"Object of type {type(self._async_obj).__name__} has no len()")
|
|
126
|
+
|
|
127
|
+
def __getitem__(self, key: Any) -> Any:
|
|
128
|
+
if hasattr(self._async_obj, "get"):
|
|
129
|
+
return self._run(self._async_obj.get(key))
|
|
130
|
+
raise TypeError(
|
|
131
|
+
f"Object of type {type(self._async_obj).__name__} is not subscriptable"
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
def __setitem__(self, key: Any, value: Any):
|
|
135
|
+
if hasattr(self._async_obj, "set"):
|
|
136
|
+
return self._run(self._async_obj.set(key, value))
|
|
137
|
+
raise TypeError(
|
|
138
|
+
f"Object of type {type(self._async_obj).__name__} does not support item assignment"
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
def __delitem__(self, key: Any):
|
|
142
|
+
if hasattr(self._async_obj, "delete"):
|
|
143
|
+
return self._run(self._async_obj.delete(key))
|
|
144
|
+
raise TypeError(
|
|
145
|
+
f"Object of type {type(self._async_obj).__name__} does not support item deletion"
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
def __contains__(self, key: Any) -> bool:
|
|
149
|
+
if hasattr(self._async_obj, "contains"):
|
|
150
|
+
return self._run(self._async_obj.contains(key))
|
|
151
|
+
return False
|
|
152
|
+
|
|
153
|
+
def __iter__(self) -> Iterator[Any]:
|
|
154
|
+
"""
|
|
155
|
+
Bridges AsyncIterator -> SyncIterator using the helper class.
|
|
156
|
+
"""
|
|
157
|
+
if not hasattr(self._async_obj, "__aiter__"):
|
|
158
|
+
raise TypeError(
|
|
159
|
+
f"Object of type {type(self._async_obj).__name__} is not iterable"
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
# Create the async iterator on the background thread
|
|
163
|
+
async def get_iter():
|
|
164
|
+
return self._async_obj.__aiter__()
|
|
165
|
+
|
|
166
|
+
async_iter = self._run(get_iter())
|
|
167
|
+
return _SyncIteratorBridge(async_iter, self._loop)
|
beaver/cache.py
ADDED
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import functools
|
|
3
|
+
import threading
|
|
4
|
+
import time
|
|
5
|
+
from typing import Optional, Any, Protocol, NamedTuple
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class CacheStats(NamedTuple):
|
|
9
|
+
"""Holds performance metrics for a cache instance."""
|
|
10
|
+
|
|
11
|
+
hits: int
|
|
12
|
+
misses: int
|
|
13
|
+
invalidations: int
|
|
14
|
+
sets: int
|
|
15
|
+
pops: int
|
|
16
|
+
|
|
17
|
+
@property
|
|
18
|
+
def reads(self) -> int:
|
|
19
|
+
return self.hits + self.misses
|
|
20
|
+
|
|
21
|
+
@property
|
|
22
|
+
def operations(self) -> int:
|
|
23
|
+
return self.hits + self.misses + self.sets + self.pops
|
|
24
|
+
|
|
25
|
+
@property
|
|
26
|
+
def hit_rate(self) -> float:
|
|
27
|
+
"""Returns the cache hit rate (0.0 to 1.0)."""
|
|
28
|
+
if self.reads == 0:
|
|
29
|
+
return 0.0
|
|
30
|
+
|
|
31
|
+
return self.hits / self.reads
|
|
32
|
+
|
|
33
|
+
@property
|
|
34
|
+
def invalidation_rate(self) -> float:
|
|
35
|
+
"""Returns the rate of invalidations per operation (0.0 to 1.0)."""
|
|
36
|
+
if self.reads == 0:
|
|
37
|
+
return 0.0
|
|
38
|
+
|
|
39
|
+
return self.invalidations / self.operations
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class ICache(Protocol):
|
|
43
|
+
"""Defines the public interface for all cache objects."""
|
|
44
|
+
|
|
45
|
+
def get(self, key: str) -> Optional[Any]: ...
|
|
46
|
+
def set(self, key: Any, value: Any): ...
|
|
47
|
+
def pop(self, key: str): ...
|
|
48
|
+
def invalidate(self): ...
|
|
49
|
+
def stats(self) -> CacheStats: ...
|
|
50
|
+
def touch(self): ...
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class DummyCache:
|
|
54
|
+
"""A cache object that does nothing. Used when caching is disabled."""
|
|
55
|
+
|
|
56
|
+
_stats = CacheStats(hits=0, misses=0, invalidations=0, sets=0, pops=0)
|
|
57
|
+
|
|
58
|
+
def get(self, key: str) -> Optional[Any]:
|
|
59
|
+
return None
|
|
60
|
+
|
|
61
|
+
def set(self, key: str, value: Any):
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
def pop(self, key: str):
|
|
65
|
+
pass
|
|
66
|
+
|
|
67
|
+
def invalidate(self):
|
|
68
|
+
pass
|
|
69
|
+
|
|
70
|
+
def stats(self) -> CacheStats:
|
|
71
|
+
return self._stats
|
|
72
|
+
|
|
73
|
+
@classmethod
|
|
74
|
+
def singleton(cls) -> ICache:
|
|
75
|
+
if not hasattr(cls, "__instance"):
|
|
76
|
+
cls.__instance = cls()
|
|
77
|
+
|
|
78
|
+
return cls.__instance
|
|
79
|
+
|
|
80
|
+
def touch(self):
|
|
81
|
+
pass
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class LocalCache:
|
|
85
|
+
"""
|
|
86
|
+
A thread-local cache that invalidates based on a central,
|
|
87
|
+
database-backed version number, checking only once per interval.
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
def __init__(self, db, cache_namespace: str, check_interval: float):
|
|
91
|
+
from .types import IDatabase
|
|
92
|
+
|
|
93
|
+
self._db: IDatabase = db
|
|
94
|
+
self._data: dict[str, Any] = {}
|
|
95
|
+
self._lock = threading.Lock()
|
|
96
|
+
|
|
97
|
+
self._version_key: str = cache_namespace # e.g., "list:tasks"
|
|
98
|
+
self._local_version: int = -1
|
|
99
|
+
self._last_check_time: float = 0.0
|
|
100
|
+
self._min_check_interval: float = check_interval
|
|
101
|
+
|
|
102
|
+
# Statistics
|
|
103
|
+
self._hits = 0
|
|
104
|
+
self._misses = 0
|
|
105
|
+
self._invalidations = 0
|
|
106
|
+
self._sets = 0
|
|
107
|
+
self._pops = 0
|
|
108
|
+
self._clears = 0
|
|
109
|
+
|
|
110
|
+
def _get_global_version(self) -> int:
|
|
111
|
+
"""Reads the 'source of truth' version from the DB."""
|
|
112
|
+
# This is a raw, direct DB call to avoid circular dependencies
|
|
113
|
+
cursor = self._db.connection.cursor()
|
|
114
|
+
cursor.execute(
|
|
115
|
+
"SELECT version FROM beaver_manager_versions WHERE namespace = ?",
|
|
116
|
+
(self._version_key,),
|
|
117
|
+
)
|
|
118
|
+
result = cursor.fetchone()
|
|
119
|
+
return int(result[0]) if result else 0
|
|
120
|
+
|
|
121
|
+
def _check_and_invalidate(self):
|
|
122
|
+
"""
|
|
123
|
+
Checks if the cache is stale, but only hits the DB
|
|
124
|
+
once per check_interval.
|
|
125
|
+
"""
|
|
126
|
+
now = time.time()
|
|
127
|
+
|
|
128
|
+
# --- 1. The Hot Path (Pure In-Memory Check) ---
|
|
129
|
+
if (now - self._last_check_time) < self._min_check_interval:
|
|
130
|
+
return
|
|
131
|
+
|
|
132
|
+
# --- 2. The "Coalesced" DB Check ---
|
|
133
|
+
with self._lock:
|
|
134
|
+
# Double-check inside lock in case another thread just ran this
|
|
135
|
+
if (time.time() - self._last_check_time) < self._min_check_interval:
|
|
136
|
+
return
|
|
137
|
+
|
|
138
|
+
global_version = self._get_global_version()
|
|
139
|
+
self._last_check_time = time.time() # Reset timer
|
|
140
|
+
|
|
141
|
+
if global_version != self._local_version:
|
|
142
|
+
self._data.clear()
|
|
143
|
+
self._local_version = global_version
|
|
144
|
+
self._invalidations += 1
|
|
145
|
+
|
|
146
|
+
def get(self, key: str) -> Optional[Any]:
|
|
147
|
+
# This check is now extremely fast
|
|
148
|
+
self._check_and_invalidate()
|
|
149
|
+
|
|
150
|
+
with self._lock:
|
|
151
|
+
value = self._data.get(key)
|
|
152
|
+
|
|
153
|
+
if value is not None:
|
|
154
|
+
self._hits += 1
|
|
155
|
+
return value
|
|
156
|
+
|
|
157
|
+
self._misses += 1
|
|
158
|
+
|
|
159
|
+
return None
|
|
160
|
+
|
|
161
|
+
def set(self, key: str, value: Any):
|
|
162
|
+
with self._lock:
|
|
163
|
+
self._data[key] = value
|
|
164
|
+
self._sets += 1
|
|
165
|
+
|
|
166
|
+
def pop(self, key: str):
|
|
167
|
+
with self._lock:
|
|
168
|
+
self._data.pop(key, None)
|
|
169
|
+
self._pops += 1
|
|
170
|
+
|
|
171
|
+
def invalidate(self):
|
|
172
|
+
with self._lock:
|
|
173
|
+
self._data.clear()
|
|
174
|
+
self._local_version = 0 # Must force re-check
|
|
175
|
+
self._invalidations += 1
|
|
176
|
+
self._last_check_time = 0.0
|
|
177
|
+
|
|
178
|
+
def touch(self):
|
|
179
|
+
"""
|
|
180
|
+
Atomically increments the cache version in the native SQL table
|
|
181
|
+
and syncs the cache's local version to avoid self-invalidation.
|
|
182
|
+
|
|
183
|
+
Only call this when you make a change that should invalidate
|
|
184
|
+
other caches of the same namespace in other processes,
|
|
185
|
+
but keep this cache valid.
|
|
186
|
+
"""
|
|
187
|
+
with self._lock:
|
|
188
|
+
new_version = 0
|
|
189
|
+
|
|
190
|
+
with self._db.connection:
|
|
191
|
+
# This is a single, atomic, native SQL operation.
|
|
192
|
+
cursor = self._db.connection.execute(
|
|
193
|
+
"""
|
|
194
|
+
INSERT INTO beaver_manager_versions (namespace, version)
|
|
195
|
+
VALUES (?, 1)
|
|
196
|
+
ON CONFLICT(namespace) DO UPDATE SET
|
|
197
|
+
version = version + 1
|
|
198
|
+
RETURNING version;
|
|
199
|
+
""",
|
|
200
|
+
(self._version_key,),
|
|
201
|
+
)
|
|
202
|
+
new_version = cursor.fetchone()[0]
|
|
203
|
+
|
|
204
|
+
# Keep the cache in sync to avoid self-invalidation
|
|
205
|
+
self._last_check_time = time.time()
|
|
206
|
+
self._local_version = new_version
|
|
207
|
+
|
|
208
|
+
def stats(self) -> CacheStats:
|
|
209
|
+
return CacheStats(
|
|
210
|
+
hits=self._hits,
|
|
211
|
+
misses=self._misses,
|
|
212
|
+
invalidations=self._invalidations,
|
|
213
|
+
sets=self._sets,
|
|
214
|
+
pops=self._pops,
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
def __repr__(self) -> str:
|
|
218
|
+
return f"<LocalCache namespace='{self._version_key}', version={self._local_version}>"
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def cached(key):
|
|
222
|
+
"""
|
|
223
|
+
Decorator for read methods.
|
|
224
|
+
- Generates a cache key using key on the arguments.
|
|
225
|
+
- If key is None, bypasses cache.
|
|
226
|
+
- If key is in cache, returns cached value.
|
|
227
|
+
- If key is not in cache, runs the decorated function,
|
|
228
|
+
stores the result, and returns it.
|
|
229
|
+
"""
|
|
230
|
+
from .manager import AsyncBeaverBase
|
|
231
|
+
|
|
232
|
+
def decorator(func):
|
|
233
|
+
@functools.wraps(func)
|
|
234
|
+
def wrapper(self: AsyncBeaverBase, *args, **kwargs):
|
|
235
|
+
cache = self.cache
|
|
236
|
+
cache_key = key(*args, **kwargs)
|
|
237
|
+
|
|
238
|
+
if cache_key is None:
|
|
239
|
+
return func(self, *args, **kwargs)
|
|
240
|
+
|
|
241
|
+
if not self.locked:
|
|
242
|
+
cached_value = cache.get(cache_key)
|
|
243
|
+
|
|
244
|
+
if cached_value is not None:
|
|
245
|
+
return cached_value # HIT
|
|
246
|
+
|
|
247
|
+
result = func(self, *args, **kwargs)
|
|
248
|
+
cache.set(cache_key, result)
|
|
249
|
+
|
|
250
|
+
return result
|
|
251
|
+
|
|
252
|
+
return wrapper
|
|
253
|
+
|
|
254
|
+
return decorator
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def invalidates_cache(func):
|
|
258
|
+
"""
|
|
259
|
+
Decorator for write methods that need to invalidate cache.
|
|
260
|
+
- Runs the decorated function.
|
|
261
|
+
- Clears the cache even if there is any exception.
|
|
262
|
+
"""
|
|
263
|
+
from .manager import AsyncBeaverBase
|
|
264
|
+
|
|
265
|
+
@functools.wraps(func)
|
|
266
|
+
def wrapper(self: AsyncBeaverBase, *args, **kwargs):
|
|
267
|
+
try:
|
|
268
|
+
result = func(self, *args, **kwargs)
|
|
269
|
+
finally:
|
|
270
|
+
self.cache.invalidate()
|
|
271
|
+
|
|
272
|
+
return result
|
|
273
|
+
|
|
274
|
+
return wrapper
|