tuft 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tuft/__init__.py +5 -2
- tuft/__main__.py +7 -0
- tuft/auth.py +35 -0
- tuft/backend.py +254 -0
- tuft/backends/__init__.py +10 -0
- tuft/backends/base_backend.py +112 -0
- tuft/backends/hf_training_model.py +404 -0
- tuft/backends/sampling_backend.py +253 -0
- tuft/backends/training_backend.py +327 -0
- tuft/checkpoints.py +193 -0
- tuft/cli.py +124 -0
- tuft/config.py +123 -0
- tuft/exceptions.py +138 -0
- tuft/futures.py +431 -0
- tuft/loss_fn/__init__.py +48 -0
- tuft/loss_fn/cispo.py +40 -0
- tuft/loss_fn/cross_entropy.py +26 -0
- tuft/loss_fn/dro.py +37 -0
- tuft/loss_fn/importance_sampling.py +33 -0
- tuft/loss_fn/ppo.py +43 -0
- tuft/persistence/__init__.py +32 -0
- tuft/persistence/file_redis.py +268 -0
- tuft/persistence/redis_store.py +488 -0
- tuft/sampling_controller.py +368 -0
- tuft/server.py +720 -0
- tuft/state.py +352 -0
- tuft/telemetry/__init__.py +17 -0
- tuft/telemetry/metrics.py +335 -0
- tuft/telemetry/provider.py +198 -0
- tuft/telemetry/tracing.py +43 -0
- tuft/training_controller.py +728 -0
- tuft-0.1.2.dist-info/METADATA +633 -0
- tuft-0.1.2.dist-info/RECORD +36 -0
- {tuft-0.1.0.dist-info → tuft-0.1.2.dist-info}/WHEEL +1 -2
- tuft-0.1.2.dist-info/entry_points.txt +2 -0
- {tuft-0.1.0.dist-info → tuft-0.1.2.dist-info}/licenses/LICENSE +2 -2
- tuft-0.1.0.dist-info/METADATA +0 -77
- tuft-0.1.0.dist-info/RECORD +0 -6
- tuft-0.1.0.dist-info/top_level.txt +0 -1
|
@@ -0,0 +1,488 @@
|
|
|
1
|
+
"""Simple Redis persistence module for TuFT.
|
|
2
|
+
|
|
3
|
+
This module provides direct Redis-based persistence using redis-py.
|
|
4
|
+
Each data record is stored as a separate Redis key with JSON serialization.
|
|
5
|
+
|
|
6
|
+
Key Design:
|
|
7
|
+
- Top-level records: {namespace}::{type}::{id}
|
|
8
|
+
- Nested records: {namespace}::{type}::{parent_id}::{nested_type}::{nested_id}
|
|
9
|
+
|
|
10
|
+
Persistence Modes:
|
|
11
|
+
- disabled: No persistence, all data is in-memory only
|
|
12
|
+
- redis_url: Use external Redis server via URL
|
|
13
|
+
- file_redis: Use file-backed storage for tests and demos
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
from __future__ import annotations
|
|
17
|
+
|
|
18
|
+
import logging
|
|
19
|
+
import os
|
|
20
|
+
import threading
|
|
21
|
+
import time
|
|
22
|
+
from dataclasses import dataclass
|
|
23
|
+
from enum import Enum
|
|
24
|
+
from pathlib import Path
|
|
25
|
+
from typing import Any, TypeVar
|
|
26
|
+
|
|
27
|
+
from pydantic import BaseModel
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
logger = logging.getLogger(__name__)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _get_tracer():
|
|
34
|
+
"""Lazy import tracer to avoid circular imports."""
|
|
35
|
+
from tuft.telemetry.tracing import get_tracer
|
|
36
|
+
|
|
37
|
+
return get_tracer("tuft.redis_store")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _get_metrics():
|
|
41
|
+
"""Lazy import metrics to avoid circular imports."""
|
|
42
|
+
from tuft.telemetry.metrics import get_metrics
|
|
43
|
+
|
|
44
|
+
return get_metrics()
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
T = TypeVar("T", bound=BaseModel)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class PersistenceMode(str, Enum):
|
|
51
|
+
"""Persistence mode options."""
|
|
52
|
+
|
|
53
|
+
DISABLED = "disabled" # No persistence
|
|
54
|
+
REDIS_URL = "redis_url" # Use external Redis server
|
|
55
|
+
FILE_REDIS = "file_redis" # Use file-backed storage for tests/demos
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
# Default TTL values in seconds
|
|
59
|
+
DEFAULT_FUTURE_TTL_SECONDS = 24 * 3600 # 1 day for future records (short-lived)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
@dataclass
|
|
63
|
+
class PersistenceConfig:
|
|
64
|
+
"""Configuration for Redis persistence.
|
|
65
|
+
|
|
66
|
+
Attributes:
|
|
67
|
+
mode: Persistence mode - disabled, redis_url, or file_redis
|
|
68
|
+
redis_url: Redis server URL (only used when mode=redis_url)
|
|
69
|
+
file_path: JSON file path (only used when mode=file_redis)
|
|
70
|
+
namespace: Key namespace prefix
|
|
71
|
+
future_ttl_seconds: TTL for future records in seconds. None means no expiry.
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
mode: PersistenceMode = PersistenceMode.DISABLED
|
|
75
|
+
redis_url: str = "redis://localhost:6379/0"
|
|
76
|
+
file_path: Path | None = None
|
|
77
|
+
namespace: str = "tuft"
|
|
78
|
+
future_ttl_seconds: int | None = DEFAULT_FUTURE_TTL_SECONDS # Futures expire after 1 day
|
|
79
|
+
|
|
80
|
+
@property
|
|
81
|
+
def enabled(self) -> bool:
|
|
82
|
+
"""Check if persistence is enabled."""
|
|
83
|
+
return self.mode != PersistenceMode.DISABLED
|
|
84
|
+
|
|
85
|
+
@classmethod
|
|
86
|
+
def disabled(cls, namespace: str = "tuft") -> "PersistenceConfig":
|
|
87
|
+
"""Create a disabled persistence config."""
|
|
88
|
+
return cls(mode=PersistenceMode.DISABLED, namespace=namespace)
|
|
89
|
+
|
|
90
|
+
@classmethod
|
|
91
|
+
def from_redis_url(
|
|
92
|
+
cls,
|
|
93
|
+
redis_url: str,
|
|
94
|
+
namespace: str = "tuft",
|
|
95
|
+
future_ttl_seconds: int | None = DEFAULT_FUTURE_TTL_SECONDS,
|
|
96
|
+
) -> "PersistenceConfig":
|
|
97
|
+
"""Create a config using external Redis server."""
|
|
98
|
+
return cls(
|
|
99
|
+
mode=PersistenceMode.REDIS_URL,
|
|
100
|
+
redis_url=redis_url,
|
|
101
|
+
namespace=namespace,
|
|
102
|
+
future_ttl_seconds=future_ttl_seconds,
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
@classmethod
|
|
106
|
+
def from_file_redis(
|
|
107
|
+
cls,
|
|
108
|
+
file_path: Path | None = None,
|
|
109
|
+
namespace: str = "tuft",
|
|
110
|
+
future_ttl_seconds: int | None = DEFAULT_FUTURE_TTL_SECONDS,
|
|
111
|
+
) -> "PersistenceConfig":
|
|
112
|
+
"""Create a config using file-backed storage."""
|
|
113
|
+
return cls(
|
|
114
|
+
mode=PersistenceMode.FILE_REDIS,
|
|
115
|
+
file_path=file_path,
|
|
116
|
+
namespace=namespace,
|
|
117
|
+
future_ttl_seconds=future_ttl_seconds,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class RedisStore:
|
|
122
|
+
"""Global Redis connection and operation manager.
|
|
123
|
+
|
|
124
|
+
Supports two modes:
|
|
125
|
+
- External Redis server (via redis-py)
|
|
126
|
+
- No persistence (disabled mode)
|
|
127
|
+
"""
|
|
128
|
+
|
|
129
|
+
_instance: "RedisStore | None" = None
|
|
130
|
+
_lock = threading.Lock()
|
|
131
|
+
|
|
132
|
+
def __init__(self) -> None:
|
|
133
|
+
self._redis: Any = None
|
|
134
|
+
self._config: PersistenceConfig | None = None
|
|
135
|
+
self._pid: int | None = None
|
|
136
|
+
|
|
137
|
+
@classmethod
|
|
138
|
+
def get_instance(cls) -> "RedisStore":
|
|
139
|
+
if cls._instance is None:
|
|
140
|
+
with cls._lock:
|
|
141
|
+
if cls._instance is None:
|
|
142
|
+
cls._instance = cls()
|
|
143
|
+
return cls._instance
|
|
144
|
+
|
|
145
|
+
def configure(self, config: PersistenceConfig) -> None:
|
|
146
|
+
self._config = config
|
|
147
|
+
self._close_connections()
|
|
148
|
+
self._pid = None
|
|
149
|
+
|
|
150
|
+
def _close_connections(self) -> None:
|
|
151
|
+
"""Close all Redis connections."""
|
|
152
|
+
if self._redis is not None:
|
|
153
|
+
try:
|
|
154
|
+
self._redis.close()
|
|
155
|
+
except Exception:
|
|
156
|
+
logger.exception("Failed to close Redis connection")
|
|
157
|
+
self._redis = None
|
|
158
|
+
|
|
159
|
+
def _get_redis(self) -> Any:
|
|
160
|
+
if self._config is None or not self._config.enabled:
|
|
161
|
+
return None
|
|
162
|
+
|
|
163
|
+
current_pid = os.getpid()
|
|
164
|
+
if self._redis is None or self._pid != current_pid:
|
|
165
|
+
with self._lock:
|
|
166
|
+
if self._redis is None or self._pid != current_pid:
|
|
167
|
+
self._close_connections()
|
|
168
|
+
|
|
169
|
+
if self._config.mode in (PersistenceMode.REDIS_URL, PersistenceMode.FILE_REDIS):
|
|
170
|
+
logger.info("Redis connection begin")
|
|
171
|
+
self._redis = self._create_redis_client()
|
|
172
|
+
|
|
173
|
+
if self._redis is not None:
|
|
174
|
+
self._pid = current_pid
|
|
175
|
+
logger.info("Redis connection established")
|
|
176
|
+
|
|
177
|
+
return self._redis
|
|
178
|
+
|
|
179
|
+
def _create_redis_client(self) -> Any:
|
|
180
|
+
"""Create a client for the configured persistence backend."""
|
|
181
|
+
if self._config is None:
|
|
182
|
+
return None
|
|
183
|
+
try:
|
|
184
|
+
if self._config.mode == PersistenceMode.FILE_REDIS:
|
|
185
|
+
from .file_redis import FileRedis
|
|
186
|
+
|
|
187
|
+
file_path = self._config.file_path or (
|
|
188
|
+
Path.home() / ".cache" / "tuft" / "file_redis.json"
|
|
189
|
+
)
|
|
190
|
+
return FileRedis(file_path=file_path)
|
|
191
|
+
import redis
|
|
192
|
+
|
|
193
|
+
return redis.Redis.from_url(self._config.redis_url, decode_responses=True)
|
|
194
|
+
except ImportError:
|
|
195
|
+
logger.warning("redis package not installed, persistence will be disabled")
|
|
196
|
+
return None
|
|
197
|
+
|
|
198
|
+
@property
|
|
199
|
+
def is_enabled(self) -> bool:
|
|
200
|
+
return self._config is not None and self._config.enabled
|
|
201
|
+
|
|
202
|
+
@property
|
|
203
|
+
def namespace(self) -> str:
|
|
204
|
+
return self._config.namespace if self._config else "tuft"
|
|
205
|
+
|
|
206
|
+
@property
|
|
207
|
+
def future_ttl(self) -> int | None:
|
|
208
|
+
"""Get the TTL for future records in seconds."""
|
|
209
|
+
return self._config.future_ttl_seconds if self._config else DEFAULT_FUTURE_TTL_SECONDS
|
|
210
|
+
|
|
211
|
+
def close(self) -> None:
|
|
212
|
+
self._close_connections()
|
|
213
|
+
self._pid = None
|
|
214
|
+
|
|
215
|
+
def reset(self) -> None:
|
|
216
|
+
self.close()
|
|
217
|
+
self._config = None
|
|
218
|
+
|
|
219
|
+
def build_key(self, *parts: str) -> str:
|
|
220
|
+
"""Build a Redis key from parts using :: as separator."""
|
|
221
|
+
escaped = [p.replace("::", "__SEP__") for p in parts]
|
|
222
|
+
return "::".join([self.namespace] + escaped)
|
|
223
|
+
|
|
224
|
+
def set(self, key: str, value: str, ttl_seconds: int | None = None) -> bool:
|
|
225
|
+
redis = self._get_redis()
|
|
226
|
+
if redis is None:
|
|
227
|
+
return False
|
|
228
|
+
|
|
229
|
+
start_time = time.perf_counter()
|
|
230
|
+
tracer = _get_tracer()
|
|
231
|
+
|
|
232
|
+
try:
|
|
233
|
+
with tracer.start_as_current_span("redis.SET") as span:
|
|
234
|
+
span.set_attribute("db.system", "redis")
|
|
235
|
+
span.set_attribute("db.operation", "SET")
|
|
236
|
+
if ttl_seconds is not None:
|
|
237
|
+
redis.setex(key, ttl_seconds, value)
|
|
238
|
+
else:
|
|
239
|
+
redis.set(key, value)
|
|
240
|
+
|
|
241
|
+
# Record metrics
|
|
242
|
+
duration = time.perf_counter() - start_time
|
|
243
|
+
metrics = _get_metrics()
|
|
244
|
+
metrics.redis_operation_duration.record(duration, {"operation": "SET"})
|
|
245
|
+
if duration > 0.1:
|
|
246
|
+
logger.warning("Redis operation slow: SET (%.3fs)", duration)
|
|
247
|
+
|
|
248
|
+
return True
|
|
249
|
+
except Exception:
|
|
250
|
+
logger.exception("Failed to set key %s in Redis", key)
|
|
251
|
+
logger.error("Redis connection failed")
|
|
252
|
+
return False
|
|
253
|
+
|
|
254
|
+
def get(self, key: str) -> str | None:
|
|
255
|
+
redis = self._get_redis()
|
|
256
|
+
if redis is None:
|
|
257
|
+
return None
|
|
258
|
+
|
|
259
|
+
start_time = time.perf_counter()
|
|
260
|
+
tracer = _get_tracer()
|
|
261
|
+
|
|
262
|
+
try:
|
|
263
|
+
with tracer.start_as_current_span("redis.GET") as span:
|
|
264
|
+
span.set_attribute("db.system", "redis")
|
|
265
|
+
span.set_attribute("db.operation", "GET")
|
|
266
|
+
result = redis.get(key)
|
|
267
|
+
|
|
268
|
+
# Record metrics
|
|
269
|
+
duration = time.perf_counter() - start_time
|
|
270
|
+
metrics = _get_metrics()
|
|
271
|
+
metrics.redis_operation_duration.record(duration, {"operation": "GET"})
|
|
272
|
+
if duration > 0.1:
|
|
273
|
+
logger.warning("Redis operation slow: GET (%.3fs)", duration)
|
|
274
|
+
|
|
275
|
+
return result
|
|
276
|
+
except Exception:
|
|
277
|
+
logger.exception("Failed to get key %s from Redis", key)
|
|
278
|
+
logger.error("Redis connection failed")
|
|
279
|
+
return None
|
|
280
|
+
|
|
281
|
+
def delete(self, key: str) -> bool:
|
|
282
|
+
redis = self._get_redis()
|
|
283
|
+
if redis is None:
|
|
284
|
+
return False
|
|
285
|
+
|
|
286
|
+
start_time = time.perf_counter()
|
|
287
|
+
tracer = _get_tracer()
|
|
288
|
+
|
|
289
|
+
try:
|
|
290
|
+
with tracer.start_as_current_span("redis.DEL") as span:
|
|
291
|
+
span.set_attribute("db.system", "redis")
|
|
292
|
+
span.set_attribute("db.operation", "DEL")
|
|
293
|
+
redis.delete(key)
|
|
294
|
+
|
|
295
|
+
# Record metrics
|
|
296
|
+
duration = time.perf_counter() - start_time
|
|
297
|
+
metrics = _get_metrics()
|
|
298
|
+
metrics.redis_operation_duration.record(duration, {"operation": "DEL"})
|
|
299
|
+
|
|
300
|
+
return True
|
|
301
|
+
except Exception:
|
|
302
|
+
logger.exception("Failed to delete key %s from Redis", key)
|
|
303
|
+
return False
|
|
304
|
+
|
|
305
|
+
def keys(self, pattern: str) -> list[str]:
|
|
306
|
+
"""Get all keys matching the pattern using SCAN for better performance."""
|
|
307
|
+
redis = self._get_redis()
|
|
308
|
+
if redis is None:
|
|
309
|
+
return []
|
|
310
|
+
|
|
311
|
+
start_time = time.perf_counter()
|
|
312
|
+
tracer = _get_tracer()
|
|
313
|
+
|
|
314
|
+
try:
|
|
315
|
+
with tracer.start_as_current_span("redis.SCAN") as span:
|
|
316
|
+
span.set_attribute("db.system", "redis")
|
|
317
|
+
span.set_attribute("db.operation", "SCAN")
|
|
318
|
+
result = list(redis.scan_iter(match=pattern))
|
|
319
|
+
|
|
320
|
+
# Record metrics
|
|
321
|
+
duration = time.perf_counter() - start_time
|
|
322
|
+
metrics = _get_metrics()
|
|
323
|
+
metrics.redis_operation_duration.record(duration, {"operation": "SCAN"})
|
|
324
|
+
|
|
325
|
+
return result
|
|
326
|
+
except Exception:
|
|
327
|
+
logger.exception("Failed to scan keys with pattern %s from Redis", pattern)
|
|
328
|
+
return []
|
|
329
|
+
|
|
330
|
+
def delete_pattern(self, pattern: str) -> int:
|
|
331
|
+
redis = self._get_redis()
|
|
332
|
+
if redis is None:
|
|
333
|
+
return 0
|
|
334
|
+
try:
|
|
335
|
+
keys = list(redis.scan_iter(match=pattern))
|
|
336
|
+
if keys:
|
|
337
|
+
return redis.delete(*keys)
|
|
338
|
+
return 0
|
|
339
|
+
except Exception:
|
|
340
|
+
logger.exception("Failed to delete keys with pattern %s from Redis", pattern)
|
|
341
|
+
return 0
|
|
342
|
+
|
|
343
|
+
def exists(self, key: str) -> bool:
|
|
344
|
+
redis = self._get_redis()
|
|
345
|
+
if redis is None:
|
|
346
|
+
return False
|
|
347
|
+
try:
|
|
348
|
+
return redis.exists(key) > 0
|
|
349
|
+
except Exception:
|
|
350
|
+
logger.exception("Failed to check existence of key %s in Redis", key)
|
|
351
|
+
return False
|
|
352
|
+
|
|
353
|
+
def pipeline(self) -> "RedisPipeline":
|
|
354
|
+
"""Create a pipeline for atomic batch operations.
|
|
355
|
+
|
|
356
|
+
Usage:
|
|
357
|
+
with store.pipeline() as pipe:
|
|
358
|
+
pipe.set("key1", "value1")
|
|
359
|
+
pipe.set("key2", "value2")
|
|
360
|
+
# All operations are executed atomically on context exit
|
|
361
|
+
"""
|
|
362
|
+
return RedisPipeline(self)
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
class RedisPipeline:
|
|
366
|
+
"""Pipeline for atomic batch Redis operations using MULTI/EXEC transactions."""
|
|
367
|
+
|
|
368
|
+
def __init__(self, store: RedisStore) -> None:
|
|
369
|
+
self._store = store
|
|
370
|
+
self._redis = store._get_redis()
|
|
371
|
+
self._pipe: Any = None
|
|
372
|
+
if self._redis is not None:
|
|
373
|
+
self._pipe = self._redis.pipeline(transaction=True)
|
|
374
|
+
|
|
375
|
+
def __enter__(self) -> "RedisPipeline":
|
|
376
|
+
return self
|
|
377
|
+
|
|
378
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
379
|
+
if exc_type is None and self._pipe is not None:
|
|
380
|
+
try:
|
|
381
|
+
self._pipe.execute()
|
|
382
|
+
except Exception:
|
|
383
|
+
logger.exception("Failed to execute Redis pipeline")
|
|
384
|
+
|
|
385
|
+
def set(self, key: str, value: str, ttl_seconds: int | None = None) -> "RedisPipeline":
|
|
386
|
+
"""Add a SET operation to the pipeline."""
|
|
387
|
+
if self._pipe is not None:
|
|
388
|
+
if ttl_seconds is not None:
|
|
389
|
+
self._pipe.setex(key, ttl_seconds, value)
|
|
390
|
+
else:
|
|
391
|
+
self._pipe.set(key, value)
|
|
392
|
+
return self
|
|
393
|
+
|
|
394
|
+
def delete(self, key: str) -> "RedisPipeline":
|
|
395
|
+
"""Add a DELETE operation to the pipeline."""
|
|
396
|
+
if self._pipe is not None:
|
|
397
|
+
self._pipe.delete(key)
|
|
398
|
+
return self
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
def save_record(key: str, record: BaseModel, ttl_seconds: int | None = None) -> bool:
|
|
402
|
+
"""Save a Pydantic model record to Redis.
|
|
403
|
+
|
|
404
|
+
Args:
|
|
405
|
+
key: Redis key to store the record under.
|
|
406
|
+
record: Pydantic BaseModel instance to serialize and store.
|
|
407
|
+
ttl_seconds: Optional TTL in seconds for the key. If None, no expiry is set.
|
|
408
|
+
|
|
409
|
+
Returns:
|
|
410
|
+
True if the record was saved successfully, False otherwise.
|
|
411
|
+
"""
|
|
412
|
+
store = RedisStore.get_instance()
|
|
413
|
+
if not store.is_enabled:
|
|
414
|
+
return False
|
|
415
|
+
try:
|
|
416
|
+
# Use Pydantic's model_dump_json for serialization
|
|
417
|
+
json_str = record.model_dump_json()
|
|
418
|
+
return store.set(key, json_str, ttl_seconds=ttl_seconds)
|
|
419
|
+
except Exception:
|
|
420
|
+
logger.exception("Failed to save record with key %s to Redis", key)
|
|
421
|
+
return False
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
def save_records_atomic(
|
|
425
|
+
records: list[tuple[str, BaseModel]], ttl_seconds: int | None = None
|
|
426
|
+
) -> bool:
|
|
427
|
+
"""Save multiple Pydantic model records to Redis atomically using a transaction.
|
|
428
|
+
|
|
429
|
+
Args:
|
|
430
|
+
records: List of (key, record) tuples.
|
|
431
|
+
ttl_seconds: Optional TTL in seconds for all keys. If None, no expiry is set.
|
|
432
|
+
|
|
433
|
+
Returns:
|
|
434
|
+
True if all records were saved successfully, False otherwise.
|
|
435
|
+
"""
|
|
436
|
+
store = RedisStore.get_instance()
|
|
437
|
+
if not store.is_enabled:
|
|
438
|
+
return False
|
|
439
|
+
try:
|
|
440
|
+
with store.pipeline() as pipe:
|
|
441
|
+
for key, record in records:
|
|
442
|
+
json_str = record.model_dump_json()
|
|
443
|
+
pipe.set(key, json_str, ttl_seconds=ttl_seconds)
|
|
444
|
+
return True
|
|
445
|
+
except Exception:
|
|
446
|
+
logger.exception("Failed to save records atomically to Redis")
|
|
447
|
+
return False
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
def load_record(key: str, target_class: type[T]) -> T | None:
|
|
451
|
+
"""Load a Pydantic model record from Redis.
|
|
452
|
+
|
|
453
|
+
Args:
|
|
454
|
+
key: Redis key to load from.
|
|
455
|
+
target_class: The Pydantic model class to deserialize into.
|
|
456
|
+
|
|
457
|
+
Returns:
|
|
458
|
+
The deserialized record, or None if not found or on error.
|
|
459
|
+
"""
|
|
460
|
+
store = RedisStore.get_instance()
|
|
461
|
+
if not store.is_enabled:
|
|
462
|
+
return None
|
|
463
|
+
try:
|
|
464
|
+
json_str = store.get(key)
|
|
465
|
+
if json_str is None:
|
|
466
|
+
return None
|
|
467
|
+
return target_class.model_validate_json(json_str)
|
|
468
|
+
except Exception:
|
|
469
|
+
logger.exception("Failed to load record with key %s from Redis", key)
|
|
470
|
+
return None
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
def delete_record(key: str) -> bool:
|
|
474
|
+
"""Delete a record from Redis."""
|
|
475
|
+
store = RedisStore.get_instance()
|
|
476
|
+
if not store.is_enabled:
|
|
477
|
+
return False
|
|
478
|
+
return store.delete(key)
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
def is_persistence_enabled() -> bool:
|
|
482
|
+
"""Check if persistence is enabled."""
|
|
483
|
+
return RedisStore.get_instance().is_enabled
|
|
484
|
+
|
|
485
|
+
|
|
486
|
+
def get_redis_store() -> RedisStore:
|
|
487
|
+
"""Get the global Redis store instance."""
|
|
488
|
+
return RedisStore.get_instance()
|