fastapi-redis-utils 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,14 @@
1
+ """
2
+ FastAPI Redis Utils
3
+
4
+ Async Redis manager with FastAPI integration, connection pooling and retry mechanism.
5
+ """
6
+
7
+ __version__ = "1.0.2"
8
+
9
+ from .dependency import create_redis_client_dependencies
10
+ from .manager import RedisManager
11
+ from .models import BaseResultModel
12
+ from .repository import BaseRepository
13
+
14
+ __all__ = ["RedisManager", "create_redis_client_dependencies", "BaseRepository", "BaseResultModel"]
@@ -0,0 +1,24 @@
1
+ from collections.abc import Awaitable, Callable
2
+
3
+ import redis.asyncio as redis
4
+
5
+ from .manager import RedisManager
6
+
7
+
8
+ def create_redis_client_dependencies(redis_manager: RedisManager) -> Callable[[], Awaitable[redis.Redis]]:
9
+ """
10
+ Create FastAPI dependency for Redis manager.
11
+
12
+ Args:
13
+ redis_manager: RedisManager instance
14
+
15
+ Returns:
16
+ get_redis_client function
17
+ """
18
+
19
+ async def get_redis_client() -> redis.Redis:
20
+ """Dependency to get Redis client instance with connection validation"""
21
+ await redis_manager.ensure_connection()
22
+ return redis_manager.get_client()
23
+
24
+ return get_redis_client
@@ -0,0 +1,137 @@
1
+ import asyncio
2
+ import logging
3
+ from collections.abc import Callable
4
+ from typing import Any
5
+
6
+ import redis.asyncio as redis
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+
11
+ class RedisManager:
12
+ """Async Redis manager with connection pooling and retry mechanism."""
13
+
14
+ def __init__(
15
+ self,
16
+ dsn: str,
17
+ max_connections: int = 20,
18
+ retry_attempts: int = 3,
19
+ retry_delay: float = 1.0,
20
+ socket_connect_timeout: int = 5,
21
+ socket_timeout: int = 5,
22
+ ):
23
+ """
24
+ Initialize Redis manager.
25
+
26
+ Args:
27
+ dsn: Redis connection DSN
28
+ max_connections: Maximum number of connections in the pool
29
+ retry_attempts: Number of retry attempts for connection
30
+ retry_delay: Base delay between retry attempts
31
+ socket_connect_timeout: Socket connection timeout in seconds
32
+ socket_timeout: Socket operation timeout in seconds
33
+ """
34
+ self.dsn = dsn
35
+ self.max_connections = max_connections
36
+ self.retry_attempts = retry_attempts
37
+ self.retry_delay = retry_delay
38
+ self.socket_connect_timeout = socket_connect_timeout
39
+ self.socket_timeout = socket_timeout
40
+
41
+ self.redis_client: redis.Redis | None = None
42
+ self._connection_pool: redis.ConnectionPool | None = None
43
+ self._is_connected: bool = False
44
+
45
+ async def connect(self) -> None:
46
+ """Create Redis connection with retry mechanism."""
47
+ for attempt in range(self.retry_attempts):
48
+ try:
49
+ logger.info(f"Attempting to connect to Redis (attempt {attempt + 1}/{self.retry_attempts})")
50
+
51
+ self._connection_pool = redis.ConnectionPool.from_url(
52
+ self.dsn,
53
+ decode_responses=True,
54
+ max_connections=self.max_connections,
55
+ retry_on_timeout=True,
56
+ socket_connect_timeout=self.socket_connect_timeout,
57
+ socket_timeout=self.socket_timeout,
58
+ )
59
+
60
+ self.redis_client = redis.Redis(connection_pool=self._connection_pool)
61
+
62
+ await self.redis_client.ping()
63
+ self._is_connected = True
64
+
65
+ logger.info("Successfully connected to Redis")
66
+ return
67
+
68
+ except Exception as e:
69
+ logger.error(f"Failed to connect to Redis (attempt {attempt + 1}): {e}")
70
+ if attempt < self.retry_attempts - 1:
71
+ await asyncio.sleep(self.retry_delay * (attempt + 1))
72
+ else:
73
+ self._is_connected = False
74
+ raise ConnectionError(f"Failed to connect to Redis after {self.retry_attempts} attempts: {e}")
75
+
76
+ async def ensure_connection(self) -> None:
77
+ """Ensure Redis connection is available."""
78
+ if not self._is_connected:
79
+ await self.connect()
80
+
81
+ async def close(self) -> None:
82
+ """Close Redis connection and cleanup."""
83
+ self._is_connected = False
84
+ if self.redis_client:
85
+ try:
86
+ await self.redis_client.aclose()
87
+ logger.info("Redis connection closed")
88
+ except Exception as e:
89
+ logger.error(f"Error closing Redis connection: {e}")
90
+ finally:
91
+ self.redis_client = None
92
+
93
+ if self._connection_pool:
94
+ try:
95
+ await self._connection_pool.disconnect()
96
+ logger.info("Redis connection pool closed")
97
+ except Exception as e:
98
+ logger.error(f"Error closing Redis connection pool: {e}")
99
+ finally:
100
+ self._connection_pool = None
101
+
102
+ async def health_check(self) -> bool:
103
+ """
104
+ Check if Redis connection is healthy.
105
+
106
+ This method can be used for external monitoring/metrics collection.
107
+ """
108
+ if not self.redis_client or not self._is_connected:
109
+ return False
110
+
111
+ try:
112
+ await self.redis_client.ping()
113
+ return True
114
+ except Exception as e:
115
+ logger.exception(f"Redis health check failed: {e}")
116
+ self._is_connected = False
117
+ return False
118
+
119
+ def get_client(self) -> redis.Redis:
120
+ """Get Redis client instance with connection validation."""
121
+ if not self.redis_client or not self._is_connected:
122
+ raise RuntimeError("Redis client not initialized or disconnected. Call connect() first.")
123
+ return self.redis_client
124
+
125
+ async def execute_with_retry(self, operation: Callable, *args: Any, **kwargs: Any) -> Any:
126
+ """Execute Redis operation with retry mechanism."""
127
+ for attempt in range(self.retry_attempts):
128
+ try:
129
+ if not self._is_connected:
130
+ await self.connect()
131
+ return await operation(*args, **kwargs)
132
+ except Exception as e:
133
+ logger.warning(f"Redis operation failed (attempt {attempt + 1}): {e}")
134
+ if attempt < self.retry_attempts - 1:
135
+ await asyncio.sleep(self.retry_delay * (attempt + 1))
136
+ else:
137
+ raise
@@ -0,0 +1,11 @@
1
+ import logging
2
+ from abc import ABC, abstractmethod
3
+
4
+ from pydantic import BaseModel
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+
9
+ class BaseResultModel(ABC, BaseModel):
10
+ @abstractmethod
11
+ def set_id(self, id: str) -> None: ...
@@ -0,0 +1,180 @@
1
+ import logging
2
+ from typing import Generic, TypeVar
3
+
4
+ from pydantic import BaseModel, ValidationError
5
+
6
+ from .manager import RedisManager
7
+ from .models import BaseResultModel
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ T = TypeVar("T", bound=BaseModel)
13
+ CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel)
14
+ UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel)
15
+ ResultSchemaType = TypeVar("ResultSchemaType", bound=BaseResultModel)
16
+
17
+
18
+ class BaseRepository(Generic[CreateSchemaType, UpdateSchemaType, ResultSchemaType]):
19
+ def __init__(
20
+ self,
21
+ redis_manager: RedisManager,
22
+ create_model: type[CreateSchemaType],
23
+ update_model: type[UpdateSchemaType],
24
+ result_model: type[ResultSchemaType],
25
+ key_prefix: str | None = None,
26
+ default_ttl: int | None = None,
27
+ ):
28
+ self.redis_manager = redis_manager
29
+ self.create_model = create_model
30
+ self.update_model = update_model
31
+ self.result_model = result_model
32
+ self.key_prefix = key_prefix or f"{self.create_model.__name__.lower()}:"
33
+ self.default_ttl = default_ttl
34
+
35
+ def _make_key(self, key: str) -> str:
36
+ return f"{self.key_prefix}{key}"
37
+
38
+ def _serialize(self, data: T) -> str:
39
+ try:
40
+ return data.model_dump_json()
41
+ except Exception as e:
42
+ logger.error(f"Failed to serialize model: {e}")
43
+ raise ValueError("Failed to serialize model") from e
44
+
45
+ def _deserialize(self, data: str, model: type[T]) -> T:
46
+ try:
47
+ return model.model_validate_json(data)
48
+ except ValidationError as e:
49
+ logger.error(f"Failed to deserialize model: {e}")
50
+ raise ValueError("Failed to deserialize model") from e
51
+ except Exception as e:
52
+ logger.error(f"Unexpected error deserializing model: {e}")
53
+ raise ValueError("Failed to deserialize model") from e
54
+
55
+ def _create_result_model(self, data: CreateSchemaType, key: str) -> ResultSchemaType:
56
+ result_model = self.result_model(**data.model_dump())
57
+ result_model.set_id(key)
58
+ return result_model
59
+
60
+ async def create(self, key: str, data: CreateSchemaType, ttl: int | None = None) -> ResultSchemaType:
61
+ redis_client = await self.redis_manager.get_client()
62
+ full_key = self._make_key(key)
63
+ serialized_data = self._serialize(data)
64
+ ttl_to_use = ttl if ttl is not None else self.default_ttl
65
+ if ttl_to_use is not None:
66
+ await redis_client.setex(full_key, ttl_to_use, serialized_data)
67
+ else:
68
+ await redis_client.set(full_key, serialized_data)
69
+ logger.debug(f"Created record with key: {full_key}")
70
+ return self._create_result_model(data, key)
71
+
72
+ async def get(self, key: str) -> ResultSchemaType | None:
73
+ redis_client = await self.redis_manager.get_client()
74
+ full_key = self._make_key(key)
75
+ data = await redis_client.get(full_key)
76
+ if data is None:
77
+ return None
78
+
79
+ try:
80
+ stored_model = self._deserialize(data, self.create_model)
81
+ return self._create_result_model(stored_model, key)
82
+ except ValueError as e:
83
+ logger.error(f"Failed to deserialize data for key {full_key}: {e}")
84
+ return None
85
+
86
+ async def update(
87
+ self,
88
+ key: str,
89
+ data: UpdateSchemaType,
90
+ ttl: int | None = None,
91
+ ) -> ResultSchemaType | None:
92
+ redis_client = await self.redis_manager.get_client()
93
+ full_key = self._make_key(key)
94
+ existing_data = await redis_client.get(full_key)
95
+ if existing_data is None:
96
+ return None
97
+
98
+ try:
99
+ existing_model = self._deserialize(existing_data, self.create_model)
100
+ update_dict = data.model_dump(exclude_unset=True)
101
+ updated_dict = existing_model.model_dump()
102
+ updated_dict.update(update_dict)
103
+ updated_model = self.create_model(**updated_dict)
104
+ serialized_data = self._serialize(updated_model)
105
+ ttl_to_use = ttl if ttl is not None else self.default_ttl
106
+ if ttl_to_use is not None:
107
+ await redis_client.setex(full_key, ttl_to_use, serialized_data)
108
+ else:
109
+ await redis_client.set(full_key, serialized_data)
110
+ logger.debug(f"Updated record with key: {full_key}")
111
+ return self._create_result_model(updated_model, key)
112
+ except ValueError as e:
113
+ logger.error(f"Failed to update data for key {full_key}: {e}")
114
+ return None
115
+
116
+ async def delete(self, key: str) -> bool:
117
+ redis_client = await self.redis_manager.get_client()
118
+ full_key = self._make_key(key)
119
+ deleted: int = await redis_client.delete(full_key)
120
+ logger.debug(f"Deleted record with key: {full_key}")
121
+ return deleted > 0
122
+
123
+ async def exists(self, key: str) -> bool:
124
+ redis_client = await self.redis_manager.get_client()
125
+ full_key = self._make_key(key)
126
+ return bool(await redis_client.exists(full_key))
127
+
128
+ async def list(self, pattern: str = "*", limit: int | None = None) -> list[ResultSchemaType]:
129
+ redis_client = await self.redis_manager.get_client()
130
+ full_pattern = f"{self.key_prefix}{pattern}"
131
+ keys = await redis_client.keys(full_pattern)
132
+ if limit:
133
+ keys = keys[:limit]
134
+
135
+ if not keys:
136
+ return []
137
+
138
+ pipeline = await redis_client.pipeline()
139
+ for key in keys:
140
+ await pipeline.get(key)
141
+ values = await pipeline.execute()
142
+ result = []
143
+ for key, value in zip(keys, values, strict=False):
144
+ if value is not None:
145
+ try:
146
+ clean_key = key.decode() if isinstance(key, bytes) else key
147
+ stored_model = self._deserialize(value, self.create_model)
148
+ model_instance = self._create_result_model(stored_model, clean_key)
149
+ result.append(model_instance)
150
+ except ValueError as e:
151
+ logger.warning(f"Failed to deserialize data for key {clean_key}: {e}")
152
+ continue
153
+ return result
154
+
155
+ async def count(self, pattern: str = "*") -> int:
156
+ redis_client = await self.redis_manager.get_client()
157
+ full_pattern = f"{self.key_prefix}{pattern}"
158
+ keys = await redis_client.keys(full_pattern)
159
+ return len(keys)
160
+
161
+ async def set_ttl(self, key: str, ttl: int) -> bool:
162
+ redis_client = await self.redis_manager.get_client()
163
+ full_key = self._make_key(key)
164
+ return bool(await redis_client.expire(full_key, ttl))
165
+
166
+ async def get_ttl(self, key: str) -> int | None:
167
+ redis_client = await self.redis_manager.get_client()
168
+ full_key = self._make_key(key)
169
+ ttl = await redis_client.ttl(full_key)
170
+ return ttl if ttl != -2 else None
171
+
172
+ async def clear(self, pattern: str = "*") -> int:
173
+ redis_client = await self.redis_manager.get_client()
174
+ full_pattern = f"{self.key_prefix}{pattern}"
175
+ keys = await redis_client.keys(full_pattern)
176
+ if not keys:
177
+ return 0
178
+ deleted: int = await redis_client.delete(*keys)
179
+ logger.info(f"Cleared {deleted} records")
180
+ return deleted
@@ -0,0 +1,394 @@
1
+ Metadata-Version: 2.4
2
+ Name: fastapi-redis-utils
3
+ Version: 1.0.2
4
+ Summary: Async Redis manager with FastAPI integration, connection pooling and retry mechanism
5
+ Project-URL: Homepage, https://github.com/serafinovsky/fastapi-redis-utils
6
+ Project-URL: Documentation, https://github.com/serafinovsky/fastapi-redis-utils#readme
7
+ Project-URL: Repository, https://github.com/serafinovsky/fastapi-redis-utils
8
+ Project-URL: Issues, https://github.com/serafinovsky/fastapi-redis-utils/issues
9
+ Author-email: Your Name <your.email@example.com>
10
+ License: MIT
11
+ License-File: LICENSE
12
+ Keywords: async,connection-pool,dependency-injection,fastapi,redis,utils
13
+ Classifier: Development Status :: 5 - Production/Stable
14
+ Classifier: Framework :: FastAPI
15
+ Classifier: Intended Audience :: Developers
16
+ Classifier: License :: OSI Approved :: MIT License
17
+ Classifier: Operating System :: OS Independent
18
+ Classifier: Programming Language :: Python :: 3
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
22
+ Classifier: Topic :: Database
23
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
24
+ Requires-Python: >=3.11
25
+ Requires-Dist: pydantic>=2.11.7
26
+ Requires-Dist: redis>=6.2.0
27
+ Description-Content-Type: text/markdown
28
+
29
+ # FastAPI Redis Utils
30
+
31
+ Async Redis manager with FastAPI integration, connection pooling and retry mechanism.
32
+
33
+ ## Features
34
+
35
+ - 🔄 **Async-first** - Full async/await support
36
+ - 🏊 **Connection pooling** - Efficient connection management
37
+ - 🔁 **Retry mechanism** - Automatic retries on failures
38
+ - ⚡ **FastAPI integration** - Ready-to-use FastAPI dependencies
39
+ - 🏥 **Health check** - Built-in connection status monitoring
40
+ - 🛡️ **Type safety** - Full type hints support
41
+ - 📦 **BaseRepository** - Base repository class with Pydantic model support
42
+
43
+ ## Documentation
44
+
45
+ - 📖 **[Usage Guide](USAGE.md)** - Detailed usage examples and advanced features
46
+ - 🚀 **[FastAPI Integration Example](examples/fastapi_integration.py)** - Complete FastAPI application with Redis integration
47
+
48
+ ## Installation
49
+
50
+ ### From PyPI
51
+
52
+ ```bash
53
+ uv add fastapi-redis-utils
54
+ ```
55
+
56
+ ### From Git repository
57
+
58
+ ```bash
59
+ uv add git+https://github.com/serafinovsky/fastapi-redis-utils.git
60
+ ```
61
+
62
+ ### For development
63
+
64
+ ```bash
65
+ git clone https://github.com/serafinovsky/fastapi-redis-utils.git
66
+ cd fastapi-redis-utils
67
+ uv sync --dev
68
+ ```
69
+
70
+ ## Quick Start
71
+
72
+ ### Basic Usage
73
+
74
+ ```python
75
+ import asyncio
76
+ from fastapi_redis_utils import RedisManager
77
+
78
+ async def main():
79
+ # Create manager
80
+ redis_manager = RedisManager(
81
+ dsn="redis://localhost:6379",
82
+ max_connections=20,
83
+ retry_attempts=3
84
+ )
85
+
86
+ # Connect
87
+ await redis_manager.connect()
88
+
89
+ # Use
90
+ client = redis_manager.get_client()
91
+ await client.set("key", "value")
92
+ value = await client.get("key")
93
+
94
+ # Close
95
+ await redis_manager.close()
96
+
97
+ asyncio.run(main())
98
+ ```
99
+
100
+ ### FastAPI Integration
101
+
102
+ ```python
103
+ from fastapi import FastAPI, Depends
104
+ from fastapi_redis_utils import RedisManager, create_redis_client_dependencies
105
+ import redis.asyncio as redis
106
+
107
+ app = FastAPI()
108
+
109
+ # Create Redis manager
110
+ redis_manager = RedisManager(
111
+ dsn="redis://localhost:6379"
112
+ )
113
+
114
+ # Create FastAPI dependency
115
+ get_redis_client = create_redis_client_dependencies(redis_manager)
116
+
117
+ @app.on_event("startup")
118
+ async def startup_event():
119
+ """Connect to Redis on application startup"""
120
+ await redis_manager.connect()
121
+
122
+ @app.on_event("shutdown")
123
+ async def shutdown_event():
124
+ """Close connection on application shutdown"""
125
+ await redis_manager.close()
126
+
127
+ @app.get("/cache/{key}")
128
+ async def get_cached_data(key: str, redis_client: redis.Redis = Depends(get_redis_client)):
129
+ """Get data from cache"""
130
+ value = await redis_client.get(key)
131
+ return {"key": key, "value": value}
132
+
133
+ @app.post("/cache/{key}")
134
+ async def set_cached_data(
135
+ key: str,
136
+ value: str,
137
+ redis_client: redis.Redis = Depends(get_redis_client)
138
+ ):
139
+ """Save data to cache"""
140
+ await redis_client.set(key, value)
141
+ return {"key": key, "value": value, "status": "saved"}
142
+
143
+ @app.get("/health")
144
+ async def health_check():
145
+ """Check Redis connection status"""
146
+ is_healthy = await redis_manager.health_check()
147
+ return {"redis_healthy": is_healthy}
148
+ ```
149
+
150
+ ### Using BaseRepository with Separate Create and Update Schemas
151
+
152
+ ```python
153
+ import uuid
154
+ from uuid import UUID
155
+ from fastapi import HTTPException, status
156
+ from pydantic import BaseModel
157
+ from typing import Optional
158
+ from datetime import datetime
159
+ from fastapi_redis_utils import BaseRepository, RedisManager, BaseResultModel
160
+
161
+ class CreateDemoSchema(BaseModel):
162
+ field1: str
163
+ field2: str
164
+
165
+
166
+ class UpdateDemoSchema(BaseModel):
167
+ field1: str | None = None
168
+ field2: str | None = None
169
+
170
+
171
+ class DemoSchema(BaseResultModel):
172
+ id: str | None = None
173
+ field1: str
174
+ field2: str
175
+
176
+ def set_id(self, id: str) -> None:
177
+ self.id = id
178
+
179
+
180
+ class DemoRepository(BaseRepository[CreateDemoSchema, UpdateDemoSchema, DemoSchema]):
181
+ pass
182
+
183
+
184
+ demo_crud = DemoRepository(redis_manager, CreateDemoSchema, UpdateDemoSchema, DemoSchema)
185
+
186
+
187
+ @app.post("/repo/", response_model=DemoSchema, status_code=status.HTTP_201_CREATED)
188
+ async def create_demo(demo_model: CreateDemoSchema) -> DemoSchema:
189
+ """Create a new demo record."""
190
+ demo_id = str(uuid.uuid4())
191
+ return await demo_crud.create(demo_id, demo_model)
192
+
193
+
194
+ @app.get("/repo/{demo_id}", response_model=DemoSchema)
195
+ async def get_demo(demo_id: UUID) -> DemoSchema:
196
+ """Get a demo record by ID."""
197
+ demo = await demo_crud.get(str(demo_id))
198
+ if demo is None:
199
+ raise HTTPException(
200
+ status_code=status.HTTP_404_NOT_FOUND,
201
+ detail=f"Demo record with ID '{demo_id}' not found",
202
+ )
203
+
204
+ return demo
205
+
206
+
207
+ @app.get("/repo/", response_model=list[DemoSchema])
208
+ async def list_demos(limit: int = 100) -> list[DemoSchema]:
209
+ """List all demo records"""
210
+ return await demo_crud.list(limit=limit)
211
+
212
+
213
+ @app.put("/repo/{demo_id}", response_model=DemoSchema)
214
+ async def update_demo(demo_id: UUID, demo_update: UpdateDemoSchema) -> DemoSchema:
215
+ """Update a demo record."""
216
+ updated_demo = await demo_crud.update(str(demo_id), demo_update)
217
+ if updated_demo is None:
218
+ raise HTTPException(
219
+ status_code=status.HTTP_404_NOT_FOUND,
220
+ detail=f"Demo record with ID '{demo_id}' not found",
221
+ )
222
+ return updated_demo
223
+
224
+
225
+ @app.delete("/repo/{demo_id}")
226
+ async def delete_demo(demo_id: UUID) -> dict[str, UUID]:
227
+ """Delete a demo record."""
228
+ deleted = await demo_crud.delete(str(demo_id))
229
+ if not deleted:
230
+ raise HTTPException(
231
+ status_code=status.HTTP_404_NOT_FOUND,
232
+ detail=f"Demo record with ID '{demo_id}' not found",
233
+ )
234
+ return {"id": demo_id}
235
+
236
+
237
+ @app.get("/repo/{demo_id}/exists")
238
+ async def check_demo_exists(demo_id: UUID) -> dict[str, UUID | bool]:
239
+ """Check if a demo record exists."""
240
+ exists = await demo_crud.exists(str(demo_id))
241
+ return {"id": demo_id, "exists": exists}
242
+ ```
243
+
244
+ ### Executing Operations with Retry
245
+
246
+ ```python
247
+ async def complex_operation():
248
+ async def operation():
249
+ client = redis_manager.get_client()
250
+ # Complex Redis operation
251
+ result = await client.execute_command("COMPLEX_COMMAND")
252
+ return result
253
+
254
+ # Automatic retries on failures
255
+ result = await redis_manager.execute_with_retry(operation)
256
+ return result
257
+ ```
258
+
259
+ ## Configuration
260
+
261
+ ### RedisManager Parameters
262
+
263
+ | Parameter | Type | Default | Description |
264
+ | ------------------------ | ----- | ------- | ------------------------------------- |
265
+ | `dsn` | str | `-` | DSN for Redis connection |
266
+ | `max_connections` | int | `20` | Maximum number of connections in pool |
267
+ | `retry_attempts` | int | `3` | Number of reconnection attempts |
268
+ | `retry_delay` | float | `1.0` | Base delay between attempts (seconds) |
269
+ | `socket_connect_timeout` | int | `5` | Socket connection timeout (seconds) |
270
+ | `socket_timeout` | int | `5` | Socket operation timeout (seconds) |
271
+
272
+ ## API Reference
273
+
274
+ ### RedisManager
275
+
276
+ Main class for managing Redis connections.
277
+
278
+ #### Methods
279
+
280
+ - `connect()` - Connect to Redis with retry mechanism
281
+ - `ensure_connection()` - Ensure connection availability
282
+ - `close()` - Close connection and cleanup resources
283
+ - `health_check()` - Check connection status
284
+ - `get_client()` - Get Redis client
285
+ - `execute_with_retry()` - Execute operations with retry
286
+
287
+ ### create_redis_client_dependencies
288
+
289
+ Creates FastAPI dependency for getting Redis client.
290
+
291
+ ### BaseRepository
292
+
293
+ Base repository class for working with Pydantic models in Redis. Supports separate schemas for create, update, and result operations with partial updates.
294
+
295
+ #### Generic Parameters
296
+
297
+ - `CreateSchemaType` - Pydantic model for create operations
298
+ - `UpdateSchemaType` - Pydantic model for update operations (all fields optional)
299
+ - `ResultSchemaType` - Pydantic model for result operations (must inherit from BaseResultModel)
300
+
301
+ #### Core Methods
302
+
303
+ - `create(key, data: CreateSchemaType, ttl=None)` - Create record
304
+ - `get(key)` - Get record (returns ResultSchemaType)
305
+ - `update(key, data: UpdateSchemaType, ttl=None)` - Update record with partial update (only set fields)
306
+ - `delete(key)` - Delete record
307
+ - `exists(key)` - Check record existence
308
+ - `list(pattern="*", limit=None)` - Get list of records
309
+ - `count(pattern="*")` - Count records
310
+ - `set_ttl(key, ttl)` - Set TTL
311
+ - `get_ttl(key)` - Get TTL
312
+ - `clear(pattern="*")` - Clear records
313
+
314
+ #### Partial Update Feature
315
+
316
+ The `update` method performs partial updates - only fields that are set in the update schema will be modified. Fields with `None` values are ignored.
317
+
318
+ ## Development
319
+
320
+ ### Install development dependencies
321
+
322
+ ```bash
323
+ uv sync --dev
324
+ ```
325
+
326
+ ### Run tests
327
+
328
+ ```bash
329
+ uv run pytest
330
+ ```
331
+
332
+ ### Code checks
333
+
334
+ ```bash
335
+ uv run ruff check .
336
+ uv run mypy .
337
+ ```
338
+
339
+ ### Build package
340
+
341
+ ```bash
342
+ uv run build
343
+ ```
344
+
345
+ ### Makefile Commands
346
+
347
+ The project includes convenient Makefile commands for development:
348
+
349
+ ```bash
350
+ # Main commands
351
+ make install # Install development dependencies
352
+ make test # Run tests
353
+ make lint # Check code with linters
354
+ make format # Format code
355
+ make build # Build package
356
+ make clean # Clean temporary files
357
+
358
+ # Version management
359
+ make version # Show current version
360
+
361
+ # Publishing
362
+ make publish # Create and push git tag with current version
363
+ make publish-dry-run # Show what would be done without creating tag
364
+ make release # Full release: build, test, tag and push
365
+
366
+ # Additional commands
367
+ make tags # List all git tags
368
+ make check # Full pre-commit check
369
+ make example-fastapi # Run FastAPI example
370
+ ```
371
+
372
+ ### Release Workflow
373
+
374
+ 1. Update version in `fastapi_redis_utils/__init__.py`
375
+ 2. Run full release: `make release`
376
+ 3. Or step by step:
377
+
378
+ ```bash
379
+ make test # Run tests
380
+ make build # Build package
381
+ make publish # Create and push tag
382
+ ```
383
+
384
+ ## License
385
+
386
+ MIT License - see [LICENSE](LICENSE) file for details.
387
+
388
+ ## Contributing
389
+
390
+ 1. Fork the repository
391
+ 2. Create your feature branch (`git checkout -b feature/amazing-feature`)
392
+ 3. Commit your changes (`git commit -m 'Add amazing feature'`)
393
+ 4. Push to the branch (`git push origin feature/amazing-feature`)
394
+ 5. Open a Pull Request
@@ -0,0 +1,9 @@
1
+ fastapi_redis_utils/__init__.py,sha256=nGOleNFxSV-3zpiWSxHyGJPuDyC16IxciBSlQBw2zjM,406
2
+ fastapi_redis_utils/dependency.py,sha256=8OW5BMNUew3-54T3GyC_d-n8wDpL606yMxHlqVNEaJ0,642
3
+ fastapi_redis_utils/manager.py,sha256=u2BLJm3MBlpXgzr2JGqfqWW0xEZklIZQbxH2HNXASmo,5158
4
+ fastapi_redis_utils/models.py,sha256=pCfaYUz6l9FTiMlwx7ixaSLZszxQrQdrp-rKhvzLhb8,225
5
+ fastapi_redis_utils/repository.py,sha256=pj0YIxhDfJEjgmLPJNbfxFnT6maHzH96BgGXMciG4VA,7295
6
+ fastapi_redis_utils-1.0.2.dist-info/METADATA,sha256=jKmFx_Hb5OIyrbGB2Akej4AoaiQJ21tpjaHvH6SIOic,11382
7
+ fastapi_redis_utils-1.0.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
8
+ fastapi_redis_utils-1.0.2.dist-info/licenses/LICENSE,sha256=KvgfMbLqWT7Xf9NBazC5Ip8y_llTK4_eb5rjUzuUKSo,1081
9
+ fastapi_redis_utils-1.0.2.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.27.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 FastAPI Redis Connection
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.