ipulse-shared-core-ftredge 12.0.1__tar.gz → 14.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.
- {ipulse_shared_core_ftredge-12.0.1/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-14.0.1}/PKG-INFO +1 -1
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/setup.py +1 -1
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/__init__.py +1 -1
- ipulse_shared_core_ftredge-14.0.1/src/ipulse_shared_core_ftredge/cache/__init__.py +4 -0
- ipulse_shared_core_ftredge-14.0.1/src/ipulse_shared_core_ftredge/cache/shared_cache.py +312 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/dependencies/authz_for_apis.py +28 -40
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/__init__.py +1 -2
- ipulse_shared_core_ftredge-14.0.1/src/ipulse_shared_core_ftredge/services/__init__.py +14 -0
- ipulse_shared_core_ftredge-14.0.1/src/ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py +169 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/services/credit_service.py +1 -1
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +1 -1
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +6 -1
- ipulse_shared_core_ftredge-14.0.1/tests/test_cache_aware_service.py +233 -0
- ipulse_shared_core_ftredge-14.0.1/tests/test_shared_cache.py +146 -0
- ipulse_shared_core_ftredge-12.0.1/src/ipulse_shared_core_ftredge/services/__init__.py +0 -6
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/LICENCE +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/README.md +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/pyproject.toml +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/setup.cfg +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/dependencies/__init__.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/dependencies/auth_protected_router.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/dependencies/firestore_client.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/base_api_response.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/base_data_model.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/organization_profile.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/subscription.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/user_profile.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/user_status.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/services/base_firestore_service.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/services/base_service_exceptions.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/services/fastapiservicemon.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/services/servicemon.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/utils/__init__.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/utils/custom_json_encoder.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/utils/json_encoder.py +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -0
- {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ipulse_shared_core_ftredge
|
|
3
|
-
Version:
|
|
3
|
+
Version: 14.0.1
|
|
4
4
|
Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
|
|
5
5
|
Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
|
|
6
6
|
Author: Russlan Ramdowar
|
|
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
|
|
|
3
3
|
|
|
4
4
|
setup(
|
|
5
5
|
name='ipulse_shared_core_ftredge',
|
|
6
|
-
version='
|
|
6
|
+
version='14.0.1',
|
|
7
7
|
package_dir={'': 'src'}, # Specify the source directory
|
|
8
8
|
packages=find_packages(where='src'), # Look for packages in 'src'
|
|
9
9
|
install_requires=[
|
|
@@ -0,0 +1,312 @@
|
|
|
1
|
+
"""Module for shared caching functionality that can be used across microservices."""
|
|
2
|
+
import os
|
|
3
|
+
import time
|
|
4
|
+
import logging
|
|
5
|
+
import traceback
|
|
6
|
+
import inspect
|
|
7
|
+
import asyncio
|
|
8
|
+
import threading
|
|
9
|
+
from typing import Dict, Any, Optional, TypeVar, Generic, Callable, Tuple, List, Awaitable
|
|
10
|
+
|
|
11
|
+
T = TypeVar('T')
|
|
12
|
+
|
|
13
|
+
class SharedCache(Generic[T]):
|
|
14
|
+
"""
|
|
15
|
+
Generic shared cache implementation that can be used across services.
|
|
16
|
+
|
|
17
|
+
Attributes:
|
|
18
|
+
name: The name of the cache for logging and identification.
|
|
19
|
+
ttl: Time-to-live in seconds for cached items.
|
|
20
|
+
enabled: Whether the cache is enabled.
|
|
21
|
+
logger: Logger for cache operations.
|
|
22
|
+
_cache: Dictionary holding cached values.
|
|
23
|
+
_timestamps: Dictionary holding timestamps for each cached item.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(
|
|
27
|
+
self,
|
|
28
|
+
name: str,
|
|
29
|
+
ttl: float,
|
|
30
|
+
enabled: bool = True,
|
|
31
|
+
logger: Optional[logging.Logger] = None
|
|
32
|
+
):
|
|
33
|
+
"""Initialize the cache with name, TTL and enabled state."""
|
|
34
|
+
self.name = name
|
|
35
|
+
self.ttl = ttl
|
|
36
|
+
self.enabled = enabled
|
|
37
|
+
self.logger = logger or logging.getLogger(__name__)
|
|
38
|
+
self._cache: Dict[str, T] = {}
|
|
39
|
+
self._timestamps: Dict[str, float] = {}
|
|
40
|
+
|
|
41
|
+
# Thread-safe attributes
|
|
42
|
+
self.lock = threading.Lock()
|
|
43
|
+
self.hits = 0
|
|
44
|
+
self.misses = 0
|
|
45
|
+
self.sets = 0
|
|
46
|
+
self.evictions = 0
|
|
47
|
+
|
|
48
|
+
self.logger.info(f"{name} cache initialized. Enabled: {enabled}, TTL: {ttl} seconds")
|
|
49
|
+
|
|
50
|
+
def get(self, key: str) -> Optional[T]:
|
|
51
|
+
"""
|
|
52
|
+
Get a value from the cache if it exists and hasn't expired.
|
|
53
|
+
|
|
54
|
+
Args:
|
|
55
|
+
key: The cache key to retrieve.
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
The cached value if found and valid, None otherwise.
|
|
59
|
+
"""
|
|
60
|
+
if not self.enabled:
|
|
61
|
+
return None
|
|
62
|
+
|
|
63
|
+
with self.lock:
|
|
64
|
+
try:
|
|
65
|
+
if key in self._cache:
|
|
66
|
+
timestamp = self._timestamps.get(key, 0)
|
|
67
|
+
if time.time() - timestamp < self.ttl:
|
|
68
|
+
self.hits += 1
|
|
69
|
+
self.logger.debug(f"Cache hit for {key} in {self.name}")
|
|
70
|
+
return self._cache[key]
|
|
71
|
+
else:
|
|
72
|
+
# Expired item, remove it
|
|
73
|
+
self.invalidate(key)
|
|
74
|
+
self.logger.debug(f"Cache expired for {key} in {self.name}")
|
|
75
|
+
else:
|
|
76
|
+
self.misses += 1
|
|
77
|
+
except Exception as e:
|
|
78
|
+
self.logger.error(f"Error getting item from {self.name} cache with key {key}: {str(e)}")
|
|
79
|
+
self.logger.error(traceback.format_exc())
|
|
80
|
+
|
|
81
|
+
return None
|
|
82
|
+
|
|
83
|
+
def set(self, key: str, value: T) -> None:
|
|
84
|
+
"""
|
|
85
|
+
Set a value in the cache.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
key: The cache key to set.
|
|
89
|
+
value: The value to cache.
|
|
90
|
+
"""
|
|
91
|
+
if not self.enabled:
|
|
92
|
+
return
|
|
93
|
+
|
|
94
|
+
with self.lock:
|
|
95
|
+
try:
|
|
96
|
+
if len(self._cache) >= 1000 and key not in self._cache:
|
|
97
|
+
# Basic LRU-like eviction: remove the first item found (not true LRU)
|
|
98
|
+
try:
|
|
99
|
+
oldest_key = next(iter(self._cache))
|
|
100
|
+
self.invalidate(oldest_key)
|
|
101
|
+
self.evictions += 1
|
|
102
|
+
except StopIteration:
|
|
103
|
+
# Cache was empty, which shouldn't happen if len >= max_size > 0
|
|
104
|
+
pass # Or log an error
|
|
105
|
+
|
|
106
|
+
self._cache[key] = value
|
|
107
|
+
self._timestamps[key] = time.time()
|
|
108
|
+
self.sets += 1
|
|
109
|
+
self.logger.debug(f"Cached item {key} in {self.name}")
|
|
110
|
+
except Exception as e:
|
|
111
|
+
self.logger.error(f"Error setting item in {self.name} cache with key {key}: {str(e)}")
|
|
112
|
+
self.logger.error(traceback.format_exc())
|
|
113
|
+
|
|
114
|
+
def invalidate(self, key: str) -> None:
|
|
115
|
+
"""
|
|
116
|
+
Remove a specific key from the cache.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
key: The cache key to invalidate.
|
|
120
|
+
"""
|
|
121
|
+
with self.lock:
|
|
122
|
+
try:
|
|
123
|
+
self._cache.pop(key, None)
|
|
124
|
+
self._timestamps.pop(key, None)
|
|
125
|
+
self.evictions += 1
|
|
126
|
+
self.logger.debug(f"Invalidated cache for {key} in {self.name}")
|
|
127
|
+
except Exception as e:
|
|
128
|
+
self.logger.error(f"Error invalidating cache in {self.name} for key {key}: {str(e)}")
|
|
129
|
+
self.logger.error(traceback.format_exc())
|
|
130
|
+
|
|
131
|
+
def invalidate_all(self) -> None:
|
|
132
|
+
"""Clear all cached items."""
|
|
133
|
+
with self.lock:
|
|
134
|
+
try:
|
|
135
|
+
cache_size = len(self._cache)
|
|
136
|
+
self._cache.clear()
|
|
137
|
+
self._timestamps.clear()
|
|
138
|
+
self.evictions += cache_size
|
|
139
|
+
self.logger.info(f"Invalidated all {cache_size} entries in {self.name} cache")
|
|
140
|
+
except Exception as e:
|
|
141
|
+
self.logger.error(f"Error invalidating all cache entries in {self.name}: {str(e)}")
|
|
142
|
+
self.logger.error(traceback.format_exc())
|
|
143
|
+
|
|
144
|
+
def get_or_set(
|
|
145
|
+
self,
|
|
146
|
+
key: str,
|
|
147
|
+
data_loader: Callable[[], T]
|
|
148
|
+
) -> Tuple[T, bool]:
|
|
149
|
+
"""
|
|
150
|
+
Get a value from cache or set it using the data_loader if missing or expired.
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
key: The cache key.
|
|
154
|
+
data_loader: Function to load data if not in cache.
|
|
155
|
+
|
|
156
|
+
Returns:
|
|
157
|
+
Tuple of (data, was_cached) where was_cached indicates if from cache.
|
|
158
|
+
"""
|
|
159
|
+
try:
|
|
160
|
+
cached_data = self.get(key)
|
|
161
|
+
if cached_data is not None:
|
|
162
|
+
return cached_data, True
|
|
163
|
+
|
|
164
|
+
# Not in cache or expired, load the data
|
|
165
|
+
self.logger.debug(f"Cache miss for {key} in {self.name}, loading data...")
|
|
166
|
+
|
|
167
|
+
# Check if the data_loader is a coroutine function
|
|
168
|
+
if inspect.iscoroutinefunction(data_loader):
|
|
169
|
+
self.logger.error(
|
|
170
|
+
f"Error in get_or_set for {key} in {self.name}: "
|
|
171
|
+
f"data_loader is a coroutine function which is not supported. "
|
|
172
|
+
f"Use a regular function that returns a value, not a coroutine."
|
|
173
|
+
)
|
|
174
|
+
# Fall back to running the coroutine in the event loop if possible
|
|
175
|
+
try:
|
|
176
|
+
loop = asyncio.get_event_loop()
|
|
177
|
+
fresh_data = loop.run_until_complete(data_loader())
|
|
178
|
+
except Exception as coro_err:
|
|
179
|
+
self.logger.error(f"Failed to execute coroutine data_loader: {str(coro_err)}")
|
|
180
|
+
raise RuntimeError(f"Cannot use coroutine data_loader in cache: {str(coro_err)}")
|
|
181
|
+
else:
|
|
182
|
+
# Regular function, just call it
|
|
183
|
+
fresh_data = data_loader()
|
|
184
|
+
|
|
185
|
+
if fresh_data is not None: # Only cache if we got valid data
|
|
186
|
+
self.set(key, fresh_data)
|
|
187
|
+
|
|
188
|
+
if fresh_data is None:
|
|
189
|
+
raise ValueError(f"Data loader returned None for key {key} in {self.name}")
|
|
190
|
+
return fresh_data, False
|
|
191
|
+
except Exception as e:
|
|
192
|
+
self.logger.error(f"Error in get_or_set for {key} in {self.name}: {str(e)}")
|
|
193
|
+
self.logger.error(traceback.format_exc())
|
|
194
|
+
|
|
195
|
+
# Since this is a critical function, re-raise the exception
|
|
196
|
+
# after logging it, but add context about the cache
|
|
197
|
+
raise RuntimeError(f"Cache error in {self.name} for key {key}: {str(e)}") from e
|
|
198
|
+
|
|
199
|
+
async def async_get_or_set(
|
|
200
|
+
self,
|
|
201
|
+
key: str,
|
|
202
|
+
async_data_loader: Callable[[], Awaitable[T]]
|
|
203
|
+
) -> Tuple[T, bool]:
|
|
204
|
+
"""
|
|
205
|
+
Async version of get_or_set for use with async data loaders.
|
|
206
|
+
|
|
207
|
+
Args:
|
|
208
|
+
key: The cache key.
|
|
209
|
+
async_data_loader: Async function to load data if not in cache.
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
Tuple of (data, was_cached) where was_cached indicates if from cache.
|
|
213
|
+
"""
|
|
214
|
+
if not self.enabled:
|
|
215
|
+
self.logger.debug(f"Cache {self.name} is disabled. Loading data directly for key {key}.")
|
|
216
|
+
try:
|
|
217
|
+
fresh_data = await async_data_loader()
|
|
218
|
+
if fresh_data is None:
|
|
219
|
+
self.logger.error(f"Async data loader returned None for key {key} in disabled cache {self.name}")
|
|
220
|
+
raise ValueError(f"Async data loader returned None for key {key} in {self.name} (cache disabled)")
|
|
221
|
+
return fresh_data, False
|
|
222
|
+
except Exception as e:
|
|
223
|
+
self.logger.error(f"Error in async_data_loader for key {key} in disabled cache {self.name}: {str(e)}")
|
|
224
|
+
self.logger.error(traceback.format_exc())
|
|
225
|
+
raise RuntimeError(f"Cache error (disabled) in {self.name} for key {key}: {str(e)}") from e
|
|
226
|
+
|
|
227
|
+
try:
|
|
228
|
+
cached_data = self.get(key) # self.get() is synchronous, assumed to be fast.
|
|
229
|
+
if cached_data is not None:
|
|
230
|
+
self.logger.debug(f"Cache HIT for key {key} in {self.name} (async_get_or_set)")
|
|
231
|
+
return cached_data, True
|
|
232
|
+
|
|
233
|
+
self.logger.debug(f"Cache MISS for key {key} in {self.name} (async_get_or_set). Loading data.")
|
|
234
|
+
# Not in cache or expired, load the data asynchronously
|
|
235
|
+
fresh_data = await async_data_loader()
|
|
236
|
+
|
|
237
|
+
if fresh_data is not None: # Only cache if we got valid data
|
|
238
|
+
self.set(key, fresh_data)
|
|
239
|
+
else:
|
|
240
|
+
# Log an error if data_loader returns None, as it's unexpected.
|
|
241
|
+
self.logger.error(f"Async data loader returned None for key {key} in {self.name}")
|
|
242
|
+
raise ValueError(f"Async data loader returned None for key {key} in {self.name}")
|
|
243
|
+
|
|
244
|
+
return fresh_data, False
|
|
245
|
+
except Exception as e:
|
|
246
|
+
self.logger.error(f"Error in async_get_or_set for key {key} in {self.name}: {str(e)}")
|
|
247
|
+
self.logger.error(traceback.format_exc())
|
|
248
|
+
# Re-raise the exception after logging, adding context
|
|
249
|
+
raise RuntimeError(f"Cache error in {self.name} for key {key} (async): {str(e)}") from e
|
|
250
|
+
|
|
251
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
252
|
+
"""Get statistics about the current cache state."""
|
|
253
|
+
with self.lock:
|
|
254
|
+
try:
|
|
255
|
+
# Clean up expired items before reporting size
|
|
256
|
+
current_time = time.time()
|
|
257
|
+
# Corrected: Use self._timestamps to find expired keys
|
|
258
|
+
expired_keys = [k for k, ts in self._timestamps.items() if current_time - ts >= self.ttl]
|
|
259
|
+
for k in expired_keys:
|
|
260
|
+
self._cache.pop(k, None)
|
|
261
|
+
self._timestamps.pop(k, None)
|
|
262
|
+
self.evictions += 1
|
|
263
|
+
|
|
264
|
+
return {
|
|
265
|
+
"name": self.name,
|
|
266
|
+
"enabled": self.enabled,
|
|
267
|
+
"ttl_seconds": self.ttl,
|
|
268
|
+
"item_count": len(self._cache),
|
|
269
|
+
"first_20_keys": list(self._cache.keys())[:20], # Limit to first 20 keys
|
|
270
|
+
"total_keys": len(self._cache.keys()),
|
|
271
|
+
"memory_usage_estimate_megabytes": round(
|
|
272
|
+
sum(len(str(k)) + self._estimate_size(v) for k, v in self._cache.items()) / (1024 * 1024),
|
|
273
|
+
3
|
|
274
|
+
),
|
|
275
|
+
"hits": self.hits,
|
|
276
|
+
"misses": self.misses,
|
|
277
|
+
"sets": self.sets,
|
|
278
|
+
"evictions": self.evictions,
|
|
279
|
+
"default_ttl": self.ttl
|
|
280
|
+
}
|
|
281
|
+
except Exception as e:
|
|
282
|
+
self.logger.error(f"Error getting stats for {self.name} cache: {str(e)}")
|
|
283
|
+
self.logger.error(traceback.format_exc())
|
|
284
|
+
return {
|
|
285
|
+
"name": self.name,
|
|
286
|
+
"enabled": self.enabled,
|
|
287
|
+
"error": str(e),
|
|
288
|
+
"ttl_seconds": self.ttl,
|
|
289
|
+
"item_count": len(self._cache) if self._cache else 0
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
def _estimate_size(self, obj: Any) -> int:
|
|
293
|
+
"""Estimate the memory size of an object in bytes."""
|
|
294
|
+
try:
|
|
295
|
+
if obj is None:
|
|
296
|
+
return 0
|
|
297
|
+
if isinstance(obj, (str, bytes, bytearray)):
|
|
298
|
+
return len(obj)
|
|
299
|
+
if isinstance(obj, (int, float, bool)):
|
|
300
|
+
return 8
|
|
301
|
+
if isinstance(obj, dict):
|
|
302
|
+
return sum(len(str(k)) + self._estimate_size(v) for k, v in obj.items())
|
|
303
|
+
if isinstance(obj, (list, tuple, set)):
|
|
304
|
+
return sum(self._estimate_size(i) for i in obj)
|
|
305
|
+
# For other objects, use a rough approximation
|
|
306
|
+
return len(str(obj))
|
|
307
|
+
except Exception:
|
|
308
|
+
# If we can't estimate, return a reasonable default
|
|
309
|
+
return 100
|
|
310
|
+
|
|
311
|
+
def __str__(self) -> str:
|
|
312
|
+
return f"SharedCache(name='{self.name}', size={len(self._cache)}, max_size={self.max_size}, hits={self.hits}, misses={self.misses})"
|
|
@@ -231,7 +231,7 @@ async def authorizeAPIRequest(
|
|
|
231
231
|
) -> Dict[str, Any]:
|
|
232
232
|
"""
|
|
233
233
|
Authorize API request based on user status and OPA policies.
|
|
234
|
-
Enhanced with credit check information.
|
|
234
|
+
Enhanced with credit check information and proper exception handling.
|
|
235
235
|
|
|
236
236
|
Args:
|
|
237
237
|
request: The incoming request
|
|
@@ -241,6 +241,9 @@ async def authorizeAPIRequest(
|
|
|
241
241
|
|
|
242
242
|
Returns:
|
|
243
243
|
Authorization result containing decision details
|
|
244
|
+
|
|
245
|
+
Raises:
|
|
246
|
+
HTTPException: For authorization failures (403) or service errors (500)
|
|
244
247
|
"""
|
|
245
248
|
opa_decision = None
|
|
246
249
|
try:
|
|
@@ -251,9 +254,11 @@ async def authorizeAPIRequest(
|
|
|
251
254
|
# Extract request context
|
|
252
255
|
user_uid = request.state.user.get('uid')
|
|
253
256
|
if not user_uid:
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
+
# Log authorization failures at DEBUG level, not ERROR
|
|
258
|
+
logger.debug(f"Authorization denied for {request.method} {request.url.path}: No user UID found")
|
|
259
|
+
raise HTTPException(
|
|
260
|
+
status_code=403,
|
|
261
|
+
detail="Not authorized to access this resource"
|
|
257
262
|
)
|
|
258
263
|
|
|
259
264
|
# Determine if we need fresh status
|
|
@@ -313,7 +318,7 @@ async def authorizeAPIRequest(
|
|
|
313
318
|
timeout=5.0 # 5 seconds timeout
|
|
314
319
|
)
|
|
315
320
|
logger.debug(f"OPA Response Status: {response.status_code}")
|
|
316
|
-
logger.debug(f"OPA Response Body: {response.text}")
|
|
321
|
+
# logger.debug(f"OPA Response Body: {response.text}")
|
|
317
322
|
|
|
318
323
|
if response.status_code != 200:
|
|
319
324
|
logger.error(f"OPA authorization failed: {response.text}")
|
|
@@ -326,11 +331,9 @@ async def authorizeAPIRequest(
|
|
|
326
331
|
logger.debug(f"Parsed OPA response: {result}")
|
|
327
332
|
|
|
328
333
|
# Handle unusual OPA response formats
|
|
329
|
-
# Try to find "decision" field as an alternative
|
|
330
334
|
if "result" in result:
|
|
331
335
|
opa_decision = result["result"]
|
|
332
336
|
else:
|
|
333
|
-
# If we still don't have a result after all attempts, use default structure
|
|
334
337
|
logger.warning(f"OPA response missing 'result' field, using default")
|
|
335
338
|
raise HTTPException(
|
|
336
339
|
status_code=500,
|
|
@@ -340,31 +343,21 @@ async def authorizeAPIRequest(
|
|
|
340
343
|
# Extract key fields from result with better default handling
|
|
341
344
|
allow = opa_decision.get("allow", False)
|
|
342
345
|
|
|
343
|
-
# Handle authorization denial
|
|
346
|
+
# Handle authorization denial - log at DEBUG level, not ERROR
|
|
344
347
|
if not allow:
|
|
345
|
-
logger.
|
|
346
|
-
raise
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
"user_uid": user_uid,
|
|
350
|
-
"resource_fields": request_resource_fields,
|
|
351
|
-
"opa_decision": opa_decision, # Include the full OPA decision result
|
|
352
|
-
# Include the raw result if it's different from the processed decision
|
|
353
|
-
"raw_opa_response": result if result != {"result": opa_decision} else None
|
|
354
|
-
}
|
|
348
|
+
logger.debug(f"Authorization denied for {request.method} {request.url.path}: insufficient permissions")
|
|
349
|
+
raise HTTPException(
|
|
350
|
+
status_code=403,
|
|
351
|
+
detail=f"Not authorized to {request.method} {request.url.path}"
|
|
355
352
|
)
|
|
356
353
|
|
|
357
354
|
except httpx.RequestError as e:
|
|
355
|
+
# Only log actual system errors at ERROR level
|
|
358
356
|
logger.error(f"Failed to connect to OPA: {str(e)}")
|
|
359
|
-
raise
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
additional_info={
|
|
364
|
-
"opa_url": opa_url,
|
|
365
|
-
"connection_error": str(e)
|
|
366
|
-
}
|
|
367
|
-
) from e
|
|
357
|
+
raise HTTPException(
|
|
358
|
+
status_code=500,
|
|
359
|
+
detail="Authorization service temporarily unavailable"
|
|
360
|
+
)
|
|
368
361
|
|
|
369
362
|
# More descriptive metadata about the data freshness
|
|
370
363
|
return {
|
|
@@ -374,21 +367,16 @@ async def authorizeAPIRequest(
|
|
|
374
367
|
"opa_decision": opa_decision
|
|
375
368
|
}
|
|
376
369
|
|
|
377
|
-
except
|
|
370
|
+
except HTTPException:
|
|
371
|
+
# Re-raise HTTPExceptions as-is (they're already properly formatted)
|
|
378
372
|
raise
|
|
379
373
|
except Exception as e:
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
"path": str(request.url),
|
|
387
|
-
"method": request.method,
|
|
388
|
-
"user_uid": request.state.user.get('uid'),
|
|
389
|
-
"resource_fields": request_resource_fields
|
|
390
|
-
}
|
|
391
|
-
) from e
|
|
374
|
+
# Only log unexpected errors at ERROR level
|
|
375
|
+
logger.error(f"Unexpected error during authorization for {request.method} {request.url.path}: {str(e)}")
|
|
376
|
+
raise HTTPException(
|
|
377
|
+
status_code=500,
|
|
378
|
+
detail="Internal authorization error"
|
|
379
|
+
)
|
|
392
380
|
|
|
393
381
|
def _should_force_fresh_status(request: Request) -> bool:
|
|
394
382
|
"""
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"""Service utilities for shared core."""
|
|
2
|
+
# Import existing components
|
|
3
|
+
from ipulse_shared_core_ftredge.services.base_service_exceptions import (
|
|
4
|
+
BaseServiceException, ServiceError, ValidationError, ResourceNotFoundError, AuthorizationError
|
|
5
|
+
)
|
|
6
|
+
from ipulse_shared_core_ftredge.services.servicemon import Servicemon
|
|
7
|
+
from ipulse_shared_core_ftredge.services.base_firestore_service import BaseFirestoreService
|
|
8
|
+
from ipulse_shared_core_ftredge.services.cache_aware_firestore_service import CacheAwareFirestoreService
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
'AuthorizationError', 'BaseServiceException', 'ServiceError', 'ValidationError',
|
|
12
|
+
'ResourceNotFoundError', 'BaseFirestoreService',
|
|
13
|
+
'CacheAwareFirestoreService'
|
|
14
|
+
]
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
"""Cache-aware Firestore service base class."""
|
|
2
|
+
import time
|
|
3
|
+
from typing import TypeVar, Generic, Dict, Any, List, Optional
|
|
4
|
+
from google.cloud import firestore
|
|
5
|
+
from ipulse_shared_core_ftredge.services.base_firestore_service import BaseFirestoreService
|
|
6
|
+
from ipulse_shared_core_ftredge.services.base_service_exceptions import ResourceNotFoundError, ServiceError
|
|
7
|
+
from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
|
|
8
|
+
from ipulse_shared_core_ftredge import BaseDataModel
|
|
9
|
+
|
|
10
|
+
T = TypeVar('T', bound=BaseDataModel)
|
|
11
|
+
|
|
12
|
+
class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
|
|
13
|
+
"""
|
|
14
|
+
Base service class that adds caching capabilities to BaseFirestoreService.
|
|
15
|
+
Supports both document-level and collection-level caching.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
db: firestore.Client,
|
|
21
|
+
collection_name: str,
|
|
22
|
+
resource_type: str,
|
|
23
|
+
logger,
|
|
24
|
+
document_cache: Optional[SharedCache] = None,
|
|
25
|
+
collection_cache: Optional[SharedCache] = None,
|
|
26
|
+
timeout: float = 30.0
|
|
27
|
+
):
|
|
28
|
+
super().__init__(db, collection_name, resource_type, logger)
|
|
29
|
+
self.document_cache = document_cache
|
|
30
|
+
self.collection_cache = collection_cache
|
|
31
|
+
self.timeout = timeout
|
|
32
|
+
|
|
33
|
+
# Log cache configuration
|
|
34
|
+
if self.document_cache:
|
|
35
|
+
self.logger.info(f"Document cache enabled for {resource_type}: {self.document_cache.name}")
|
|
36
|
+
if self.collection_cache:
|
|
37
|
+
self.logger.info(f"Collection cache enabled for {resource_type}: {self.collection_cache.name}")
|
|
38
|
+
|
|
39
|
+
async def get_document(self, doc_id: str) -> Dict[str, Any]:
|
|
40
|
+
"""
|
|
41
|
+
Get a document with caching support.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
doc_id: Document ID to fetch
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
Document data as dictionary
|
|
48
|
+
|
|
49
|
+
Raises:
|
|
50
|
+
ResourceNotFoundError: If document doesn't exist
|
|
51
|
+
"""
|
|
52
|
+
# Check cache first
|
|
53
|
+
if self.document_cache:
|
|
54
|
+
start_time = time.time()
|
|
55
|
+
cached_doc = self.document_cache.get(doc_id)
|
|
56
|
+
cache_check_time = (time.time() - start_time) * 1000
|
|
57
|
+
|
|
58
|
+
if cached_doc is not None:
|
|
59
|
+
self.logger.debug(f"Cache HIT for document {doc_id} in {cache_check_time:.2f}ms")
|
|
60
|
+
return cached_doc
|
|
61
|
+
else:
|
|
62
|
+
self.logger.debug(f"Cache MISS for document {doc_id} - checking Firestore")
|
|
63
|
+
|
|
64
|
+
# Fetch from Firestore
|
|
65
|
+
start_time = time.time()
|
|
66
|
+
doc_ref = self.db.collection(self.collection_name).document(doc_id)
|
|
67
|
+
doc = doc_ref.get(timeout=self.timeout)
|
|
68
|
+
firestore_time = (time.time() - start_time) * 1000
|
|
69
|
+
|
|
70
|
+
if not doc.exists:
|
|
71
|
+
self.logger.info(f"Document {doc_id} not found in Firestore after {firestore_time:.2f}ms")
|
|
72
|
+
raise ResourceNotFoundError(self.resource_type, doc_id)
|
|
73
|
+
|
|
74
|
+
doc_data = doc.to_dict()
|
|
75
|
+
self.logger.debug(f"Fetched document {doc_id} from Firestore in {firestore_time:.2f}ms")
|
|
76
|
+
|
|
77
|
+
# Cache the result
|
|
78
|
+
if self.document_cache and doc_data:
|
|
79
|
+
self.document_cache.set(doc_id, doc_data)
|
|
80
|
+
self.logger.debug(f"Cached document {doc_id}")
|
|
81
|
+
|
|
82
|
+
return doc_data
|
|
83
|
+
|
|
84
|
+
async def get_all_documents(self, cache_key: Optional[str] = None) -> List[Dict[str, Any]]:
|
|
85
|
+
"""
|
|
86
|
+
Retrieves all documents from the collection.
|
|
87
|
+
Uses collection_cache if cache_key is provided and cache is available.
|
|
88
|
+
Also populates document_cache for each retrieved document.
|
|
89
|
+
"""
|
|
90
|
+
if cache_key and self.collection_cache:
|
|
91
|
+
cached_collection_data = self.collection_cache.get(cache_key)
|
|
92
|
+
if cached_collection_data is not None:
|
|
93
|
+
self.logger.debug(f"Cache HIT for collection key '{cache_key}' in {self.collection_cache.name}")
|
|
94
|
+
# Ensure individual documents are also in document_cache if possible
|
|
95
|
+
if self.document_cache:
|
|
96
|
+
for doc_data in cached_collection_data:
|
|
97
|
+
if "id" in doc_data and not self.document_cache.get(doc_data["id"]):
|
|
98
|
+
self._cache_document_data(doc_data["id"], doc_data)
|
|
99
|
+
return cached_collection_data
|
|
100
|
+
else:
|
|
101
|
+
self.logger.debug(f"Cache MISS for collection key '{cache_key}' in {self.collection_cache.name} - checking Firestore")
|
|
102
|
+
|
|
103
|
+
self.logger.info(f"Fetching all documents for {self.resource_type} from Firestore.")
|
|
104
|
+
start_time = time.time()
|
|
105
|
+
|
|
106
|
+
try:
|
|
107
|
+
docs_stream = self.db.collection(self.collection_name).stream(timeout=self.timeout)
|
|
108
|
+
docs_data_list = []
|
|
109
|
+
for doc in docs_stream:
|
|
110
|
+
doc_data = doc.to_dict()
|
|
111
|
+
if doc_data is not None:
|
|
112
|
+
doc_data["id"] = doc.id # Ensure 'id' field is present
|
|
113
|
+
docs_data_list.append(doc_data)
|
|
114
|
+
|
|
115
|
+
fetch_time = (time.time() - start_time) * 1000
|
|
116
|
+
self.logger.debug(f"Fetched {len(docs_data_list)} documents for {self.resource_type} from Firestore in {fetch_time:.2f}ms")
|
|
117
|
+
|
|
118
|
+
# Cache the entire collection if cache_key and collection_cache are available
|
|
119
|
+
if cache_key and self.collection_cache:
|
|
120
|
+
self.collection_cache.set(cache_key, docs_data_list)
|
|
121
|
+
self.logger.debug(f"Cached collection with key '{cache_key}' in {self.collection_cache.name}")
|
|
122
|
+
|
|
123
|
+
# Populate individual document cache
|
|
124
|
+
if self.document_cache:
|
|
125
|
+
self.logger.debug(f"Populating document cache ({self.document_cache.name}) with {len(docs_data_list)} items for {self.resource_type}.")
|
|
126
|
+
for doc_data in docs_data_list:
|
|
127
|
+
# _cache_document_data expects 'id' to be in doc_data for keying
|
|
128
|
+
self._cache_document_data(doc_data["id"], doc_data)
|
|
129
|
+
|
|
130
|
+
return docs_data_list
|
|
131
|
+
|
|
132
|
+
except Exception as e:
|
|
133
|
+
self.logger.error(f"Error fetching all documents for {self.resource_type}: {str(e)}", exc_info=True)
|
|
134
|
+
raise ServiceError(operation=f"fetching all {self.resource_type}s", error=e, resource_type=self.resource_type) from e
|
|
135
|
+
|
|
136
|
+
def _cache_document_data(self, doc_id: str, data: Dict[str, Any]):
|
|
137
|
+
"""Helper to cache document data if document_cache is available."""
|
|
138
|
+
if self.document_cache:
|
|
139
|
+
self.document_cache.set(doc_id, data)
|
|
140
|
+
self.logger.debug(f"Cached item {doc_id} in {self.document_cache.name}")
|
|
141
|
+
|
|
142
|
+
async def create_document(self, doc_id: str, data: T, creator_uid: str) -> Dict[str, Any]:
|
|
143
|
+
"""Create document and invalidate cache."""
|
|
144
|
+
result = await super().create_document(doc_id, data, creator_uid)
|
|
145
|
+
self._invalidate_document_cache(doc_id)
|
|
146
|
+
return result
|
|
147
|
+
|
|
148
|
+
async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str) -> Dict[str, Any]:
|
|
149
|
+
"""Update document and invalidate cache."""
|
|
150
|
+
result = await super().update_document(doc_id, update_data, updater_uid)
|
|
151
|
+
self._invalidate_document_cache(doc_id)
|
|
152
|
+
return result
|
|
153
|
+
|
|
154
|
+
async def delete_document(self, doc_id: str, deleter_uid: Optional[str] = None) -> None:
|
|
155
|
+
"""Delete document and invalidate cache."""
|
|
156
|
+
await super().delete_document(doc_id)
|
|
157
|
+
self._invalidate_document_cache(doc_id)
|
|
158
|
+
|
|
159
|
+
def _invalidate_document_cache(self, doc_id: str) -> None:
|
|
160
|
+
"""Invalidate document cache for a specific document."""
|
|
161
|
+
if self.document_cache:
|
|
162
|
+
self.document_cache.invalidate(doc_id)
|
|
163
|
+
self.logger.debug(f"Invalidated cache for document {doc_id}")
|
|
164
|
+
|
|
165
|
+
def _invalidate_collection_cache(self, cache_key: str) -> None:
|
|
166
|
+
"""Invalidate collection cache for a specific cache key."""
|
|
167
|
+
if self.collection_cache:
|
|
168
|
+
self.collection_cache.invalidate(cache_key)
|
|
169
|
+
self.logger.debug(f"Invalidated collection cache for key {cache_key}")
|
|
@@ -135,7 +135,7 @@ class CreditService:
|
|
|
135
135
|
error=e,
|
|
136
136
|
resource_type="user_credits",
|
|
137
137
|
resource_id=user_uid,
|
|
138
|
-
additional_info={"credits_to_charge":
|
|
138
|
+
additional_info={"credits_to_charge": required_credits_for_resource}
|
|
139
139
|
) from e
|
|
140
140
|
|
|
141
141
|
async def charge_credits(self, user_uid: str, credits_to_charge: Optional[float], operation_details: str) -> bool:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ipulse_shared_core_ftredge
|
|
3
|
-
Version:
|
|
3
|
+
Version: 14.0.1
|
|
4
4
|
Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
|
|
5
5
|
Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
|
|
6
6
|
Author: Russlan Ramdowar
|
|
@@ -8,6 +8,8 @@ src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt
|
|
|
8
8
|
src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt
|
|
9
9
|
src/ipulse_shared_core_ftredge.egg-info/requires.txt
|
|
10
10
|
src/ipulse_shared_core_ftredge.egg-info/top_level.txt
|
|
11
|
+
src/ipulse_shared_core_ftredge/cache/__init__.py
|
|
12
|
+
src/ipulse_shared_core_ftredge/cache/shared_cache.py
|
|
11
13
|
src/ipulse_shared_core_ftredge/dependencies/__init__.py
|
|
12
14
|
src/ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py
|
|
13
15
|
src/ipulse_shared_core_ftredge/dependencies/auth_protected_router.py
|
|
@@ -25,9 +27,12 @@ src/ipulse_shared_core_ftredge/models/user_status.py
|
|
|
25
27
|
src/ipulse_shared_core_ftredge/services/__init__.py
|
|
26
28
|
src/ipulse_shared_core_ftredge/services/base_firestore_service.py
|
|
27
29
|
src/ipulse_shared_core_ftredge/services/base_service_exceptions.py
|
|
30
|
+
src/ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py
|
|
28
31
|
src/ipulse_shared_core_ftredge/services/credit_service.py
|
|
29
32
|
src/ipulse_shared_core_ftredge/services/fastapiservicemon.py
|
|
30
33
|
src/ipulse_shared_core_ftredge/services/servicemon.py
|
|
31
34
|
src/ipulse_shared_core_ftredge/utils/__init__.py
|
|
32
35
|
src/ipulse_shared_core_ftredge/utils/custom_json_encoder.py
|
|
33
|
-
src/ipulse_shared_core_ftredge/utils/json_encoder.py
|
|
36
|
+
src/ipulse_shared_core_ftredge/utils/json_encoder.py
|
|
37
|
+
tests/test_cache_aware_service.py
|
|
38
|
+
tests/test_shared_cache.py
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
"""Tests for the CacheAwareFirestoreService."""
|
|
2
|
+
|
|
3
|
+
import unittest
|
|
4
|
+
import logging
|
|
5
|
+
from unittest.mock import MagicMock, patch, AsyncMock
|
|
6
|
+
from pydantic import BaseModel
|
|
7
|
+
from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
|
|
8
|
+
from ipulse_shared_core_ftredge.services import CacheAwareFirestoreService
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
# Configure logging for tests
|
|
12
|
+
logging.basicConfig(level=logging.INFO)
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# Create a simple model for testing
|
|
17
|
+
class TestModel(BaseModel):
|
|
18
|
+
id: str
|
|
19
|
+
name: str
|
|
20
|
+
description: str
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class TestCacheAwareFirestoreService(unittest.TestCase):
|
|
24
|
+
"""Test cases for CacheAwareFirestoreService."""
|
|
25
|
+
|
|
26
|
+
def setUp(self):
|
|
27
|
+
"""Set up test fixtures."""
|
|
28
|
+
# Create mock Firestore client
|
|
29
|
+
self.db_mock = MagicMock()
|
|
30
|
+
|
|
31
|
+
# Create mock caches
|
|
32
|
+
self.document_cache = SharedCache[dict](
|
|
33
|
+
name="TestDocCache",
|
|
34
|
+
ttl=1.0,
|
|
35
|
+
enabled=True,
|
|
36
|
+
logger=logger
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
self.collection_cache = SharedCache[list](
|
|
40
|
+
name="TestCollectionCache",
|
|
41
|
+
ttl=1.0,
|
|
42
|
+
enabled=True,
|
|
43
|
+
logger=logger
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
# Create service instance with mocks
|
|
47
|
+
self.service = CacheAwareFirestoreService[TestModel](
|
|
48
|
+
db=self.db_mock,
|
|
49
|
+
collection_name="test_collection",
|
|
50
|
+
resource_type="test_resource",
|
|
51
|
+
logger=logger,
|
|
52
|
+
document_cache=self.document_cache,
|
|
53
|
+
collection_cache=self.collection_cache,
|
|
54
|
+
timeout=5.0
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
@patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.get_document')
|
|
58
|
+
async def test_get_document_cache_hit(self, mock_get_document):
|
|
59
|
+
"""Test get_document with cache hit."""
|
|
60
|
+
# Prepare cached data
|
|
61
|
+
test_data = {"id": "doc123", "name": "Test Doc", "description": "This is a test"}
|
|
62
|
+
self.document_cache.set("doc123", test_data)
|
|
63
|
+
|
|
64
|
+
# Execute get_document
|
|
65
|
+
result = await self.service.get_document("doc123")
|
|
66
|
+
|
|
67
|
+
# Verify result comes from cache
|
|
68
|
+
self.assertEqual(result, test_data)
|
|
69
|
+
|
|
70
|
+
# Verify Firestore was not called
|
|
71
|
+
mock_get_document.assert_not_called()
|
|
72
|
+
|
|
73
|
+
@patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.get_document')
|
|
74
|
+
async def test_get_document_cache_miss(self, mock_get_document):
|
|
75
|
+
"""Test get_document with cache miss."""
|
|
76
|
+
# Configure mock to return data
|
|
77
|
+
mock_data = {"id": "doc123", "name": "Test Doc", "description": "This is a test"}
|
|
78
|
+
mock_get_document.return_value = mock_data
|
|
79
|
+
|
|
80
|
+
# Execute get_document
|
|
81
|
+
result = await self.service.get_document("doc123")
|
|
82
|
+
|
|
83
|
+
# Verify Firestore was called
|
|
84
|
+
mock_get_document.assert_called_once_with("doc123")
|
|
85
|
+
|
|
86
|
+
# Verify result is correct
|
|
87
|
+
self.assertEqual(result, mock_data)
|
|
88
|
+
|
|
89
|
+
# Verify data was cached
|
|
90
|
+
cached_data = self.document_cache.get("doc123")
|
|
91
|
+
self.assertEqual(cached_data, mock_data)
|
|
92
|
+
|
|
93
|
+
@patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.get_all_documents')
|
|
94
|
+
async def test_get_all_documents_cache_hit(self, mock_get_all):
|
|
95
|
+
"""Test get_all_documents with cache hit."""
|
|
96
|
+
# Prepare cached data
|
|
97
|
+
test_docs = [
|
|
98
|
+
{"id": "doc1", "name": "Doc 1", "description": "First doc"},
|
|
99
|
+
{"id": "doc2", "name": "Doc 2", "description": "Second doc"}
|
|
100
|
+
]
|
|
101
|
+
self.collection_cache.set("test_cache_key", test_docs)
|
|
102
|
+
|
|
103
|
+
# Execute get_all_documents
|
|
104
|
+
result = await self.service.get_all_documents("test_cache_key")
|
|
105
|
+
|
|
106
|
+
# Verify result comes from cache
|
|
107
|
+
self.assertEqual(result, test_docs)
|
|
108
|
+
|
|
109
|
+
# Verify Firestore was not called
|
|
110
|
+
mock_get_all.assert_not_called()
|
|
111
|
+
|
|
112
|
+
@patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.get_all_documents')
|
|
113
|
+
async def test_get_all_documents_cache_miss(self, mock_get_all):
|
|
114
|
+
"""Test get_all_documents with cache miss."""
|
|
115
|
+
# Configure mock to return data
|
|
116
|
+
mock_docs = [
|
|
117
|
+
{"id": "doc1", "name": "Doc 1", "description": "First doc"},
|
|
118
|
+
{"id": "doc2", "name": "Doc 2", "description": "Second doc"}
|
|
119
|
+
]
|
|
120
|
+
mock_get_all.return_value = mock_docs
|
|
121
|
+
|
|
122
|
+
# Execute get_all_documents
|
|
123
|
+
result = await self.service.get_all_documents("test_cache_key")
|
|
124
|
+
|
|
125
|
+
# Verify Firestore was called
|
|
126
|
+
mock_get_all.assert_called_once()
|
|
127
|
+
|
|
128
|
+
# Verify result is correct
|
|
129
|
+
self.assertEqual(result, mock_docs)
|
|
130
|
+
|
|
131
|
+
# Verify data was cached
|
|
132
|
+
cached_docs = self.collection_cache.get("test_cache_key")
|
|
133
|
+
self.assertEqual(cached_docs, mock_docs)
|
|
134
|
+
|
|
135
|
+
@patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.update_document')
|
|
136
|
+
async def test_update_document_invalidates_cache(self, mock_update):
|
|
137
|
+
"""Test that update_document invalidates cache."""
|
|
138
|
+
# Prepare cached data
|
|
139
|
+
test_data = {"id": "doc123", "name": "Test Doc", "description": "This is a test"}
|
|
140
|
+
self.document_cache.set("doc123", test_data)
|
|
141
|
+
|
|
142
|
+
# Configure mock to return updated data
|
|
143
|
+
updated_data = {"id": "doc123", "name": "Updated Doc", "description": "This was updated"}
|
|
144
|
+
mock_update.return_value = updated_data
|
|
145
|
+
|
|
146
|
+
# Execute update_document
|
|
147
|
+
await self.service.update_document("doc123", {"name": "Updated Doc"}, "user123")
|
|
148
|
+
|
|
149
|
+
# Verify cache was invalidated
|
|
150
|
+
self.assertIsNone(self.document_cache.get("doc123"))
|
|
151
|
+
|
|
152
|
+
# Verify collection cache was also invalidated
|
|
153
|
+
if self.collection_cache.get("all_documents"):
|
|
154
|
+
self.fail("Collection cache was not invalidated")
|
|
155
|
+
|
|
156
|
+
@patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.create_document')
|
|
157
|
+
async def test_create_document_invalidates_cache(self, mock_create):
|
|
158
|
+
"""Test that create_document invalidates cache."""
|
|
159
|
+
# Prepare collection cache data
|
|
160
|
+
test_docs = [{"id": "doc1", "name": "Doc 1"}]
|
|
161
|
+
self.collection_cache.set("all_documents", test_docs)
|
|
162
|
+
|
|
163
|
+
# Configure mock to return created data
|
|
164
|
+
new_data = {"id": "doc2", "name": "New Doc", "description": "Newly created"}
|
|
165
|
+
mock_create.return_value = new_data
|
|
166
|
+
|
|
167
|
+
# Create model instance
|
|
168
|
+
new_model = TestModel(id="doc2", name="New Doc", description="Newly created")
|
|
169
|
+
|
|
170
|
+
# Execute create_document
|
|
171
|
+
await self.service.create_document("doc2", new_model, "user123")
|
|
172
|
+
|
|
173
|
+
# Verify collection cache was invalidated
|
|
174
|
+
self.assertIsNone(self.collection_cache.get("all_documents"))
|
|
175
|
+
|
|
176
|
+
@patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.delete_document')
|
|
177
|
+
async def test_delete_document_invalidates_cache(self, mock_delete):
|
|
178
|
+
"""Test that delete_document invalidates cache."""
|
|
179
|
+
# Prepare cached data
|
|
180
|
+
test_data = {"id": "doc123", "name": "Test Doc", "description": "This is a test"}
|
|
181
|
+
self.document_cache.set("doc123", test_data)
|
|
182
|
+
|
|
183
|
+
test_docs = [test_data]
|
|
184
|
+
self.collection_cache.set("all_documents", test_docs)
|
|
185
|
+
|
|
186
|
+
# Execute delete_document
|
|
187
|
+
await self.service.delete_document("doc123")
|
|
188
|
+
|
|
189
|
+
# Verify document cache was invalidated
|
|
190
|
+
self.assertIsNone(self.document_cache.get("doc123"))
|
|
191
|
+
|
|
192
|
+
# Verify collection cache was also invalidated
|
|
193
|
+
self.assertIsNone(self.collection_cache.get("all_documents"))
|
|
194
|
+
|
|
195
|
+
def test_invalidate_document_cache(self):
|
|
196
|
+
"""Test _invalidate_document_cache method."""
|
|
197
|
+
# Prepare cached data
|
|
198
|
+
test_data = {"id": "doc123", "name": "Test Doc"}
|
|
199
|
+
self.document_cache.set("doc123", test_data)
|
|
200
|
+
|
|
201
|
+
# Execute invalidation
|
|
202
|
+
self.service._invalidate_document_cache("doc123")
|
|
203
|
+
|
|
204
|
+
# Verify cache was invalidated
|
|
205
|
+
self.assertIsNone(self.document_cache.get("doc123"))
|
|
206
|
+
|
|
207
|
+
def test_invalidate_collection_cache(self):
|
|
208
|
+
"""Test _invalidate_collection_cache method."""
|
|
209
|
+
# Prepare cached data
|
|
210
|
+
test_docs = [{"id": "doc1", "name": "Doc 1"}, {"id": "doc2", "name": "Doc 2"}]
|
|
211
|
+
self.collection_cache.set("all_documents", test_docs)
|
|
212
|
+
|
|
213
|
+
# Execute invalidation
|
|
214
|
+
self.service._invalidate_collection_cache()
|
|
215
|
+
|
|
216
|
+
# Verify cache was invalidated
|
|
217
|
+
self.assertIsNone(self.collection_cache.get("all_documents"))
|
|
218
|
+
|
|
219
|
+
def test_invalidate_collection_cache_custom_key(self):
|
|
220
|
+
"""Test _invalidate_collection_cache method with custom key."""
|
|
221
|
+
# Prepare cached data
|
|
222
|
+
test_docs = [{"id": "doc1", "name": "Doc 1"}, {"id": "doc2", "name": "Doc 2"}]
|
|
223
|
+
self.collection_cache.set("custom_key", test_docs)
|
|
224
|
+
|
|
225
|
+
# Execute invalidation
|
|
226
|
+
self.service._invalidate_collection_cache("custom_key")
|
|
227
|
+
|
|
228
|
+
# Verify cache was invalidated
|
|
229
|
+
self.assertIsNone(self.collection_cache.get("custom_key"))
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
if __name__ == "__main__":
|
|
233
|
+
unittest.main()
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
"""Tests for the SharedCache implementation."""
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
import unittest
|
|
5
|
+
import logging
|
|
6
|
+
from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
|
|
7
|
+
|
|
8
|
+
# Configure logging for tests
|
|
9
|
+
logging.basicConfig(level=logging.INFO)
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class TestSharedCache(unittest.TestCase):
|
|
14
|
+
"""Test cases for SharedCache."""
|
|
15
|
+
|
|
16
|
+
def setUp(self):
|
|
17
|
+
"""Set up test fixtures."""
|
|
18
|
+
self.cache = SharedCache[str](
|
|
19
|
+
name="TestCache",
|
|
20
|
+
ttl=0.5, # Short TTL for faster testing
|
|
21
|
+
enabled=True,
|
|
22
|
+
logger=logger
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
def test_cache_set_get(self):
|
|
26
|
+
"""Test basic cache set and get operations."""
|
|
27
|
+
# Set a value
|
|
28
|
+
self.cache.set("test_key", "test_value")
|
|
29
|
+
|
|
30
|
+
# Get the value
|
|
31
|
+
cached_value = self.cache.get("test_key")
|
|
32
|
+
|
|
33
|
+
# Verify value was cached
|
|
34
|
+
self.assertEqual(cached_value, "test_value")
|
|
35
|
+
|
|
36
|
+
def test_cache_ttl_expiration(self):
|
|
37
|
+
"""Test cache TTL expiration."""
|
|
38
|
+
# Set a value
|
|
39
|
+
self.cache.set("expiring_key", "expiring_value")
|
|
40
|
+
|
|
41
|
+
# Verify it's initially cached
|
|
42
|
+
self.assertEqual(self.cache.get("expiring_key"), "expiring_value")
|
|
43
|
+
|
|
44
|
+
# Wait for TTL to expire
|
|
45
|
+
time.sleep(0.6) # Slightly longer than TTL
|
|
46
|
+
|
|
47
|
+
# Verify value is no longer cached
|
|
48
|
+
self.assertIsNone(self.cache.get("expiring_key"))
|
|
49
|
+
|
|
50
|
+
def test_cache_invalidate(self):
|
|
51
|
+
"""Test cache invalidation."""
|
|
52
|
+
# Set multiple values
|
|
53
|
+
self.cache.set("key1", "value1")
|
|
54
|
+
self.cache.set("key2", "value2")
|
|
55
|
+
|
|
56
|
+
# Invalidate specific key
|
|
57
|
+
self.cache.invalidate("key1")
|
|
58
|
+
|
|
59
|
+
# Verify key1 is gone but key2 remains
|
|
60
|
+
self.assertIsNone(self.cache.get("key1"))
|
|
61
|
+
self.assertEqual(self.cache.get("key2"), "value2")
|
|
62
|
+
|
|
63
|
+
def test_cache_invalidate_all(self):
|
|
64
|
+
"""Test invalidating all cache entries."""
|
|
65
|
+
# Set multiple values
|
|
66
|
+
self.cache.set("key1", "value1")
|
|
67
|
+
self.cache.set("key2", "value2")
|
|
68
|
+
|
|
69
|
+
# Invalidate all
|
|
70
|
+
self.cache.invalidate_all()
|
|
71
|
+
|
|
72
|
+
# Verify both keys are gone
|
|
73
|
+
self.assertIsNone(self.cache.get("key1"))
|
|
74
|
+
self.assertIsNone(self.cache.get("key2"))
|
|
75
|
+
|
|
76
|
+
def test_cache_get_or_set(self):
|
|
77
|
+
"""Test get_or_set functionality."""
|
|
78
|
+
# Define a counter to verify how many times the loader is called
|
|
79
|
+
counter = [0]
|
|
80
|
+
|
|
81
|
+
def data_loader():
|
|
82
|
+
counter[0] += 1
|
|
83
|
+
return f"loaded_value_{counter[0]}"
|
|
84
|
+
|
|
85
|
+
# First call should use data_loader
|
|
86
|
+
value1, was_cached1 = self.cache.get_or_set("loader_key", data_loader)
|
|
87
|
+
|
|
88
|
+
# Second call should use cached value
|
|
89
|
+
value2, was_cached2 = self.cache.get_or_set("loader_key", data_loader)
|
|
90
|
+
|
|
91
|
+
# Verify results
|
|
92
|
+
self.assertEqual(value1, "loaded_value_1")
|
|
93
|
+
self.assertEqual(value2, "loaded_value_1") # Same value from cache
|
|
94
|
+
self.assertFalse(was_cached1) # First call was not cached
|
|
95
|
+
self.assertTrue(was_cached2) # Second call was cached
|
|
96
|
+
self.assertEqual(counter[0], 1) # Loader called exactly once
|
|
97
|
+
|
|
98
|
+
def test_cache_disabled(self):
|
|
99
|
+
"""Test cache behavior when disabled."""
|
|
100
|
+
# Create disabled cache
|
|
101
|
+
disabled_cache = SharedCache[str](
|
|
102
|
+
name="DisabledCache",
|
|
103
|
+
ttl=1.0,
|
|
104
|
+
enabled=False,
|
|
105
|
+
logger=logger
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
# Set a value
|
|
109
|
+
disabled_cache.set("disabled_key", "disabled_value")
|
|
110
|
+
|
|
111
|
+
# Attempt to get - should return None since cache is disabled
|
|
112
|
+
cached_value = disabled_cache.get("disabled_key")
|
|
113
|
+
self.assertIsNone(cached_value)
|
|
114
|
+
|
|
115
|
+
def test_cache_generic_typing(self):
|
|
116
|
+
"""Test cache with different data types."""
|
|
117
|
+
# Integer cache
|
|
118
|
+
int_cache = SharedCache[int](name="IntCache", ttl=1.0, enabled=True)
|
|
119
|
+
int_cache.set("int_key", 123)
|
|
120
|
+
self.assertEqual(int_cache.get("int_key"), 123)
|
|
121
|
+
|
|
122
|
+
# Dictionary cache
|
|
123
|
+
dict_cache = SharedCache[dict](name="DictCache", ttl=1.0, enabled=True)
|
|
124
|
+
dict_cache.set("dict_key", {"a": 1, "b": 2})
|
|
125
|
+
self.assertEqual(dict_cache.get("dict_key"), {"a": 1, "b": 2})
|
|
126
|
+
|
|
127
|
+
def test_cache_stats(self):
|
|
128
|
+
"""Test cache statistics."""
|
|
129
|
+
# Add some data
|
|
130
|
+
self.cache.set("stats_key1", "stats_value1")
|
|
131
|
+
self.cache.set("stats_key2", "stats_value2")
|
|
132
|
+
|
|
133
|
+
# Get stats
|
|
134
|
+
stats = self.cache.get_stats()
|
|
135
|
+
|
|
136
|
+
# Verify stats
|
|
137
|
+
self.assertEqual(stats["name"], "TestCache")
|
|
138
|
+
self.assertEqual(stats["enabled"], True)
|
|
139
|
+
self.assertEqual(stats["ttl_seconds"], 0.5)
|
|
140
|
+
self.assertEqual(stats["item_count"], 2)
|
|
141
|
+
self.assertIn("stats_key1", stats["keys"])
|
|
142
|
+
self.assertIn("stats_key2", stats["keys"])
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
if __name__ == "__main__":
|
|
146
|
+
unittest.main()
|
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
from .base_firestore_service import BaseFirestoreService
|
|
2
|
-
|
|
3
|
-
from .base_service_exceptions import (BaseServiceException, ResourceNotFoundError, AuthorizationError,
|
|
4
|
-
ValidationError ,ServiceError)
|
|
5
|
-
from .servicemon import Servicemon
|
|
6
|
-
from .fastapiservicemon import FastAPIServiceMon
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|