ipulse-shared-core-ftredge 13.0.1__py3-none-any.whl → 14.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ipulse_shared_core_ftredge/__init__.py +1 -1
- ipulse_shared_core_ftredge/cache/shared_cache.py +127 -64
- ipulse_shared_core_ftredge/dependencies/authz_for_apis.py +28 -40
- ipulse_shared_core_ftredge/models/__init__.py +1 -2
- ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py +120 -122
- {ipulse_shared_core_ftredge-13.0.1.dist-info → ipulse_shared_core_ftredge-14.0.1.dist-info}/METADATA +1 -1
- {ipulse_shared_core_ftredge-13.0.1.dist-info → ipulse_shared_core_ftredge-14.0.1.dist-info}/RECORD +10 -10
- {ipulse_shared_core_ftredge-13.0.1.dist-info → ipulse_shared_core_ftredge-14.0.1.dist-info}/WHEEL +1 -1
- {ipulse_shared_core_ftredge-13.0.1.dist-info → ipulse_shared_core_ftredge-14.0.1.dist-info}/licenses/LICENCE +0 -0
- {ipulse_shared_core_ftredge-13.0.1.dist-info → ipulse_shared_core_ftredge-14.0.1.dist-info}/top_level.txt +0 -0
|
@@ -5,6 +5,7 @@ import logging
|
|
|
5
5
|
import traceback
|
|
6
6
|
import inspect
|
|
7
7
|
import asyncio
|
|
8
|
+
import threading
|
|
8
9
|
from typing import Dict, Any, Optional, TypeVar, Generic, Callable, Tuple, List, Awaitable
|
|
9
10
|
|
|
10
11
|
T = TypeVar('T')
|
|
@@ -37,6 +38,13 @@ class SharedCache(Generic[T]):
|
|
|
37
38
|
self._cache: Dict[str, T] = {}
|
|
38
39
|
self._timestamps: Dict[str, float] = {}
|
|
39
40
|
|
|
41
|
+
# Thread-safe attributes
|
|
42
|
+
self.lock = threading.Lock()
|
|
43
|
+
self.hits = 0
|
|
44
|
+
self.misses = 0
|
|
45
|
+
self.sets = 0
|
|
46
|
+
self.evictions = 0
|
|
47
|
+
|
|
40
48
|
self.logger.info(f"{name} cache initialized. Enabled: {enabled}, TTL: {ttl} seconds")
|
|
41
49
|
|
|
42
50
|
def get(self, key: str) -> Optional[T]:
|
|
@@ -52,21 +60,25 @@ class SharedCache(Generic[T]):
|
|
|
52
60
|
if not self.enabled:
|
|
53
61
|
return None
|
|
54
62
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
63
|
+
with self.lock:
|
|
64
|
+
try:
|
|
65
|
+
if key in self._cache:
|
|
66
|
+
timestamp = self._timestamps.get(key, 0)
|
|
67
|
+
if time.time() - timestamp < self.ttl:
|
|
68
|
+
self.hits += 1
|
|
69
|
+
self.logger.debug(f"Cache hit for {key} in {self.name}")
|
|
70
|
+
return self._cache[key]
|
|
71
|
+
else:
|
|
72
|
+
# Expired item, remove it
|
|
73
|
+
self.invalidate(key)
|
|
74
|
+
self.logger.debug(f"Cache expired for {key} in {self.name}")
|
|
61
75
|
else:
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
self.logger.error(f"Error getting item from {self.name} cache with key {key}: {str(e)}")
|
|
67
|
-
self.logger.error(traceback.format_exc())
|
|
76
|
+
self.misses += 1
|
|
77
|
+
except Exception as e:
|
|
78
|
+
self.logger.error(f"Error getting item from {self.name} cache with key {key}: {str(e)}")
|
|
79
|
+
self.logger.error(traceback.format_exc())
|
|
68
80
|
|
|
69
|
-
|
|
81
|
+
return None
|
|
70
82
|
|
|
71
83
|
def set(self, key: str, value: T) -> None:
|
|
72
84
|
"""
|
|
@@ -79,13 +91,25 @@ class SharedCache(Generic[T]):
|
|
|
79
91
|
if not self.enabled:
|
|
80
92
|
return
|
|
81
93
|
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
94
|
+
with self.lock:
|
|
95
|
+
try:
|
|
96
|
+
if len(self._cache) >= 1000 and key not in self._cache:
|
|
97
|
+
# Basic LRU-like eviction: remove the first item found (not true LRU)
|
|
98
|
+
try:
|
|
99
|
+
oldest_key = next(iter(self._cache))
|
|
100
|
+
self.invalidate(oldest_key)
|
|
101
|
+
self.evictions += 1
|
|
102
|
+
except StopIteration:
|
|
103
|
+
# Cache was empty, which shouldn't happen if len >= max_size > 0
|
|
104
|
+
pass # Or log an error
|
|
105
|
+
|
|
106
|
+
self._cache[key] = value
|
|
107
|
+
self._timestamps[key] = time.time()
|
|
108
|
+
self.sets += 1
|
|
109
|
+
self.logger.debug(f"Cached item {key} in {self.name}")
|
|
110
|
+
except Exception as e:
|
|
111
|
+
self.logger.error(f"Error setting item in {self.name} cache with key {key}: {str(e)}")
|
|
112
|
+
self.logger.error(traceback.format_exc())
|
|
89
113
|
|
|
90
114
|
def invalidate(self, key: str) -> None:
|
|
91
115
|
"""
|
|
@@ -94,24 +118,28 @@ class SharedCache(Generic[T]):
|
|
|
94
118
|
Args:
|
|
95
119
|
key: The cache key to invalidate.
|
|
96
120
|
"""
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
121
|
+
with self.lock:
|
|
122
|
+
try:
|
|
123
|
+
self._cache.pop(key, None)
|
|
124
|
+
self._timestamps.pop(key, None)
|
|
125
|
+
self.evictions += 1
|
|
126
|
+
self.logger.debug(f"Invalidated cache for {key} in {self.name}")
|
|
127
|
+
except Exception as e:
|
|
128
|
+
self.logger.error(f"Error invalidating cache in {self.name} for key {key}: {str(e)}")
|
|
129
|
+
self.logger.error(traceback.format_exc())
|
|
104
130
|
|
|
105
131
|
def invalidate_all(self) -> None:
|
|
106
132
|
"""Clear all cached items."""
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
133
|
+
with self.lock:
|
|
134
|
+
try:
|
|
135
|
+
cache_size = len(self._cache)
|
|
136
|
+
self._cache.clear()
|
|
137
|
+
self._timestamps.clear()
|
|
138
|
+
self.evictions += cache_size
|
|
139
|
+
self.logger.info(f"Invalidated all {cache_size} entries in {self.name} cache")
|
|
140
|
+
except Exception as e:
|
|
141
|
+
self.logger.error(f"Error invalidating all cache entries in {self.name}: {str(e)}")
|
|
142
|
+
self.logger.error(traceback.format_exc())
|
|
115
143
|
|
|
116
144
|
def get_or_set(
|
|
117
145
|
self,
|
|
@@ -183,51 +211,83 @@ class SharedCache(Generic[T]):
|
|
|
183
211
|
Returns:
|
|
184
212
|
Tuple of (data, was_cached) where was_cached indicates if from cache.
|
|
185
213
|
"""
|
|
214
|
+
if not self.enabled:
|
|
215
|
+
self.logger.debug(f"Cache {self.name} is disabled. Loading data directly for key {key}.")
|
|
216
|
+
try:
|
|
217
|
+
fresh_data = await async_data_loader()
|
|
218
|
+
if fresh_data is None:
|
|
219
|
+
self.logger.error(f"Async data loader returned None for key {key} in disabled cache {self.name}")
|
|
220
|
+
raise ValueError(f"Async data loader returned None for key {key} in {self.name} (cache disabled)")
|
|
221
|
+
return fresh_data, False
|
|
222
|
+
except Exception as e:
|
|
223
|
+
self.logger.error(f"Error in async_data_loader for key {key} in disabled cache {self.name}: {str(e)}")
|
|
224
|
+
self.logger.error(traceback.format_exc())
|
|
225
|
+
raise RuntimeError(f"Cache error (disabled) in {self.name} for key {key}: {str(e)}") from e
|
|
226
|
+
|
|
186
227
|
try:
|
|
187
|
-
cached_data = self.get(key)
|
|
228
|
+
cached_data = self.get(key) # self.get() is synchronous, assumed to be fast.
|
|
188
229
|
if cached_data is not None:
|
|
230
|
+
self.logger.debug(f"Cache HIT for key {key} in {self.name} (async_get_or_set)")
|
|
189
231
|
return cached_data, True
|
|
190
232
|
|
|
233
|
+
self.logger.debug(f"Cache MISS for key {key} in {self.name} (async_get_or_set). Loading data.")
|
|
191
234
|
# Not in cache or expired, load the data asynchronously
|
|
192
|
-
self.logger.debug(f"Cache miss for {key} in {self.name}, loading data asynchronously...")
|
|
193
|
-
|
|
194
|
-
# Execute the async data loader
|
|
195
235
|
fresh_data = await async_data_loader()
|
|
196
236
|
|
|
197
237
|
if fresh_data is not None: # Only cache if we got valid data
|
|
198
238
|
self.set(key, fresh_data)
|
|
239
|
+
else:
|
|
240
|
+
# Log an error if data_loader returns None, as it's unexpected.
|
|
241
|
+
self.logger.error(f"Async data loader returned None for key {key} in {self.name}")
|
|
242
|
+
raise ValueError(f"Async data loader returned None for key {key} in {self.name}")
|
|
199
243
|
|
|
200
244
|
return fresh_data, False
|
|
201
245
|
except Exception as e:
|
|
202
|
-
self.logger.error(f"Error in async_get_or_set for {key} in {self.name}: {str(e)}")
|
|
246
|
+
self.logger.error(f"Error in async_get_or_set for key {key} in {self.name}: {str(e)}")
|
|
203
247
|
self.logger.error(traceback.format_exc())
|
|
204
|
-
raise
|
|
248
|
+
# Re-raise the exception after logging, adding context
|
|
249
|
+
raise RuntimeError(f"Cache error in {self.name} for key {key} (async): {str(e)}") from e
|
|
205
250
|
|
|
206
251
|
def get_stats(self) -> Dict[str, Any]:
|
|
207
252
|
"""Get statistics about the current cache state."""
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
253
|
+
with self.lock:
|
|
254
|
+
try:
|
|
255
|
+
# Clean up expired items before reporting size
|
|
256
|
+
current_time = time.time()
|
|
257
|
+
# Corrected: Use self._timestamps to find expired keys
|
|
258
|
+
expired_keys = [k for k, ts in self._timestamps.items() if current_time - ts >= self.ttl]
|
|
259
|
+
for k in expired_keys:
|
|
260
|
+
self._cache.pop(k, None)
|
|
261
|
+
self._timestamps.pop(k, None)
|
|
262
|
+
self.evictions += 1
|
|
263
|
+
|
|
264
|
+
return {
|
|
265
|
+
"name": self.name,
|
|
266
|
+
"enabled": self.enabled,
|
|
267
|
+
"ttl_seconds": self.ttl,
|
|
268
|
+
"item_count": len(self._cache),
|
|
269
|
+
"first_20_keys": list(self._cache.keys())[:20], # Limit to first 20 keys
|
|
270
|
+
"total_keys": len(self._cache.keys()),
|
|
271
|
+
"memory_usage_estimate_megabytes": round(
|
|
272
|
+
sum(len(str(k)) + self._estimate_size(v) for k, v in self._cache.items()) / (1024 * 1024),
|
|
273
|
+
3
|
|
274
|
+
),
|
|
275
|
+
"hits": self.hits,
|
|
276
|
+
"misses": self.misses,
|
|
277
|
+
"sets": self.sets,
|
|
278
|
+
"evictions": self.evictions,
|
|
279
|
+
"default_ttl": self.ttl
|
|
280
|
+
}
|
|
281
|
+
except Exception as e:
|
|
282
|
+
self.logger.error(f"Error getting stats for {self.name} cache: {str(e)}")
|
|
283
|
+
self.logger.error(traceback.format_exc())
|
|
284
|
+
return {
|
|
285
|
+
"name": self.name,
|
|
286
|
+
"enabled": self.enabled,
|
|
287
|
+
"error": str(e),
|
|
288
|
+
"ttl_seconds": self.ttl,
|
|
289
|
+
"item_count": len(self._cache) if self._cache else 0
|
|
290
|
+
}
|
|
231
291
|
|
|
232
292
|
def _estimate_size(self, obj: Any) -> int:
|
|
233
293
|
"""Estimate the memory size of an object in bytes."""
|
|
@@ -247,3 +307,6 @@ class SharedCache(Generic[T]):
|
|
|
247
307
|
except Exception:
|
|
248
308
|
# If we can't estimate, return a reasonable default
|
|
249
309
|
return 100
|
|
310
|
+
|
|
311
|
+
def __str__(self) -> str:
|
|
312
|
+
return f"SharedCache(name='{self.name}', size={len(self._cache)}, max_size={self.max_size}, hits={self.hits}, misses={self.misses})"
|
|
@@ -231,7 +231,7 @@ async def authorizeAPIRequest(
|
|
|
231
231
|
) -> Dict[str, Any]:
|
|
232
232
|
"""
|
|
233
233
|
Authorize API request based on user status and OPA policies.
|
|
234
|
-
Enhanced with credit check information.
|
|
234
|
+
Enhanced with credit check information and proper exception handling.
|
|
235
235
|
|
|
236
236
|
Args:
|
|
237
237
|
request: The incoming request
|
|
@@ -241,6 +241,9 @@ async def authorizeAPIRequest(
|
|
|
241
241
|
|
|
242
242
|
Returns:
|
|
243
243
|
Authorization result containing decision details
|
|
244
|
+
|
|
245
|
+
Raises:
|
|
246
|
+
HTTPException: For authorization failures (403) or service errors (500)
|
|
244
247
|
"""
|
|
245
248
|
opa_decision = None
|
|
246
249
|
try:
|
|
@@ -251,9 +254,11 @@ async def authorizeAPIRequest(
|
|
|
251
254
|
# Extract request context
|
|
252
255
|
user_uid = request.state.user.get('uid')
|
|
253
256
|
if not user_uid:
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
+
# Log authorization failures at DEBUG level, not ERROR
|
|
258
|
+
logger.debug(f"Authorization denied for {request.method} {request.url.path}: No user UID found")
|
|
259
|
+
raise HTTPException(
|
|
260
|
+
status_code=403,
|
|
261
|
+
detail="Not authorized to access this resource"
|
|
257
262
|
)
|
|
258
263
|
|
|
259
264
|
# Determine if we need fresh status
|
|
@@ -313,7 +318,7 @@ async def authorizeAPIRequest(
|
|
|
313
318
|
timeout=5.0 # 5 seconds timeout
|
|
314
319
|
)
|
|
315
320
|
logger.debug(f"OPA Response Status: {response.status_code}")
|
|
316
|
-
logger.debug(f"OPA Response Body: {response.text}")
|
|
321
|
+
# logger.debug(f"OPA Response Body: {response.text}")
|
|
317
322
|
|
|
318
323
|
if response.status_code != 200:
|
|
319
324
|
logger.error(f"OPA authorization failed: {response.text}")
|
|
@@ -326,11 +331,9 @@ async def authorizeAPIRequest(
|
|
|
326
331
|
logger.debug(f"Parsed OPA response: {result}")
|
|
327
332
|
|
|
328
333
|
# Handle unusual OPA response formats
|
|
329
|
-
# Try to find "decision" field as an alternative
|
|
330
334
|
if "result" in result:
|
|
331
335
|
opa_decision = result["result"]
|
|
332
336
|
else:
|
|
333
|
-
# If we still don't have a result after all attempts, use default structure
|
|
334
337
|
logger.warning(f"OPA response missing 'result' field, using default")
|
|
335
338
|
raise HTTPException(
|
|
336
339
|
status_code=500,
|
|
@@ -340,31 +343,21 @@ async def authorizeAPIRequest(
|
|
|
340
343
|
# Extract key fields from result with better default handling
|
|
341
344
|
allow = opa_decision.get("allow", False)
|
|
342
345
|
|
|
343
|
-
# Handle authorization denial
|
|
346
|
+
# Handle authorization denial - log at DEBUG level, not ERROR
|
|
344
347
|
if not allow:
|
|
345
|
-
logger.
|
|
346
|
-
raise
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
"user_uid": user_uid,
|
|
350
|
-
"resource_fields": request_resource_fields,
|
|
351
|
-
"opa_decision": opa_decision, # Include the full OPA decision result
|
|
352
|
-
# Include the raw result if it's different from the processed decision
|
|
353
|
-
"raw_opa_response": result if result != {"result": opa_decision} else None
|
|
354
|
-
}
|
|
348
|
+
logger.debug(f"Authorization denied for {request.method} {request.url.path}: insufficient permissions")
|
|
349
|
+
raise HTTPException(
|
|
350
|
+
status_code=403,
|
|
351
|
+
detail=f"Not authorized to {request.method} {request.url.path}"
|
|
355
352
|
)
|
|
356
353
|
|
|
357
354
|
except httpx.RequestError as e:
|
|
355
|
+
# Only log actual system errors at ERROR level
|
|
358
356
|
logger.error(f"Failed to connect to OPA: {str(e)}")
|
|
359
|
-
raise
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
additional_info={
|
|
364
|
-
"opa_url": opa_url,
|
|
365
|
-
"connection_error": str(e)
|
|
366
|
-
}
|
|
367
|
-
) from e
|
|
357
|
+
raise HTTPException(
|
|
358
|
+
status_code=500,
|
|
359
|
+
detail="Authorization service temporarily unavailable"
|
|
360
|
+
)
|
|
368
361
|
|
|
369
362
|
# More descriptive metadata about the data freshness
|
|
370
363
|
return {
|
|
@@ -374,21 +367,16 @@ async def authorizeAPIRequest(
|
|
|
374
367
|
"opa_decision": opa_decision
|
|
375
368
|
}
|
|
376
369
|
|
|
377
|
-
except
|
|
370
|
+
except HTTPException:
|
|
371
|
+
# Re-raise HTTPExceptions as-is (they're already properly formatted)
|
|
378
372
|
raise
|
|
379
373
|
except Exception as e:
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
"path": str(request.url),
|
|
387
|
-
"method": request.method,
|
|
388
|
-
"user_uid": request.state.user.get('uid'),
|
|
389
|
-
"resource_fields": request_resource_fields
|
|
390
|
-
}
|
|
391
|
-
) from e
|
|
374
|
+
# Only log unexpected errors at ERROR level
|
|
375
|
+
logger.error(f"Unexpected error during authorization for {request.method} {request.url.path}: {str(e)}")
|
|
376
|
+
raise HTTPException(
|
|
377
|
+
status_code=500,
|
|
378
|
+
detail="Internal authorization error"
|
|
379
|
+
)
|
|
392
380
|
|
|
393
381
|
def _should_force_fresh_status(request: Request) -> bool:
|
|
394
382
|
"""
|
|
@@ -1,18 +1,18 @@
|
|
|
1
|
-
"""
|
|
2
|
-
|
|
3
|
-
from typing import Dict, Any, List, Optional
|
|
1
|
+
"""Cache-aware Firestore service base class."""
|
|
2
|
+
import time
|
|
3
|
+
from typing import TypeVar, Generic, Dict, Any, List, Optional
|
|
4
4
|
from google.cloud import firestore
|
|
5
|
-
from
|
|
6
|
-
import
|
|
5
|
+
from ipulse_shared_core_ftredge.services.base_firestore_service import BaseFirestoreService
|
|
6
|
+
from ipulse_shared_core_ftredge.services.base_service_exceptions import ResourceNotFoundError, ServiceError
|
|
7
7
|
from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
|
|
8
|
-
from ipulse_shared_core_ftredge
|
|
8
|
+
from ipulse_shared_core_ftredge import BaseDataModel
|
|
9
9
|
|
|
10
|
-
T = TypeVar('T', bound=
|
|
10
|
+
T = TypeVar('T', bound=BaseDataModel)
|
|
11
11
|
|
|
12
|
-
class CacheAwareFirestoreService(BaseFirestoreService, Generic[T]):
|
|
12
|
+
class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
|
|
13
13
|
"""
|
|
14
|
-
Base service class that
|
|
15
|
-
|
|
14
|
+
Base service class that adds caching capabilities to BaseFirestoreService.
|
|
15
|
+
Supports both document-level and collection-level caching.
|
|
16
16
|
"""
|
|
17
17
|
|
|
18
18
|
def __init__(
|
|
@@ -20,152 +20,150 @@ class CacheAwareFirestoreService(BaseFirestoreService, Generic[T]):
|
|
|
20
20
|
db: firestore.Client,
|
|
21
21
|
collection_name: str,
|
|
22
22
|
resource_type: str,
|
|
23
|
-
logger
|
|
23
|
+
logger,
|
|
24
24
|
document_cache: Optional[SharedCache] = None,
|
|
25
25
|
collection_cache: Optional[SharedCache] = None,
|
|
26
|
-
timeout: float =
|
|
26
|
+
timeout: float = 30.0
|
|
27
27
|
):
|
|
28
|
-
|
|
29
|
-
Initialize the service with optional cache instances.
|
|
30
|
-
|
|
31
|
-
Args:
|
|
32
|
-
db: Firestore client
|
|
33
|
-
collection_name: Firestore collection name
|
|
34
|
-
resource_type: Resource type for error messages
|
|
35
|
-
logger: Logger instance
|
|
36
|
-
document_cache: Cache for individual documents (optional)
|
|
37
|
-
collection_cache: Cache for collection-level queries (optional)
|
|
38
|
-
timeout: Firestore operation timeout in seconds
|
|
39
|
-
"""
|
|
40
|
-
super().__init__(
|
|
41
|
-
db=db,
|
|
42
|
-
collection_name=collection_name,
|
|
43
|
-
resource_type=resource_type,
|
|
44
|
-
logger=logger,
|
|
45
|
-
timeout=timeout
|
|
46
|
-
)
|
|
28
|
+
super().__init__(db, collection_name, resource_type, logger)
|
|
47
29
|
self.document_cache = document_cache
|
|
48
30
|
self.collection_cache = collection_cache
|
|
31
|
+
self.timeout = timeout
|
|
49
32
|
|
|
50
33
|
# Log cache configuration
|
|
51
|
-
if document_cache:
|
|
52
|
-
self.logger.info(f"Document cache enabled for {resource_type}: {document_cache.name}")
|
|
53
|
-
if collection_cache:
|
|
54
|
-
self.logger.info(f"Collection cache enabled for {resource_type}: {collection_cache.name}")
|
|
55
|
-
|
|
56
|
-
async def create_document(self, doc_id: str, data: T, creator_uid: str) -> Dict[str, Any]:
|
|
57
|
-
"""Create a document and invalidate relevant caches."""
|
|
58
|
-
result = await super().create_document(doc_id, data, creator_uid)
|
|
59
|
-
|
|
60
|
-
# Invalidate document cache if it exists
|
|
61
|
-
self._invalidate_document_cache(doc_id)
|
|
62
|
-
|
|
63
|
-
# Invalidate collection cache if it exists
|
|
64
|
-
self._invalidate_collection_cache()
|
|
65
|
-
|
|
66
|
-
return result
|
|
67
|
-
|
|
68
|
-
async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str) -> Dict[str, Any]:
|
|
69
|
-
"""Update a document and invalidate relevant caches."""
|
|
70
|
-
result = await super().update_document(doc_id, update_data, updater_uid)
|
|
71
|
-
|
|
72
|
-
# Invalidate document cache if it exists
|
|
73
|
-
self._invalidate_document_cache(doc_id)
|
|
74
|
-
|
|
75
|
-
# Invalidate collection cache if it exists
|
|
76
|
-
self._invalidate_collection_cache()
|
|
77
|
-
|
|
78
|
-
return result
|
|
79
|
-
|
|
80
|
-
async def delete_document(self, doc_id: str, deleter_uid: Optional[str] = None) -> None:
|
|
81
|
-
"""Delete a document and invalidate relevant caches."""
|
|
82
|
-
# Invalidate caches before deletion to handle potential failures
|
|
83
|
-
self._invalidate_document_cache(doc_id)
|
|
84
|
-
self._invalidate_collection_cache()
|
|
85
|
-
|
|
86
|
-
# Delete the document
|
|
87
|
-
await super().delete_document(doc_id)
|
|
34
|
+
if self.document_cache:
|
|
35
|
+
self.logger.info(f"Document cache enabled for {resource_type}: {self.document_cache.name}")
|
|
36
|
+
if self.collection_cache:
|
|
37
|
+
self.logger.info(f"Collection cache enabled for {resource_type}: {self.collection_cache.name}")
|
|
88
38
|
|
|
89
39
|
async def get_document(self, doc_id: str) -> Dict[str, Any]:
|
|
90
40
|
"""
|
|
91
|
-
Get a document
|
|
41
|
+
Get a document with caching support.
|
|
92
42
|
|
|
93
43
|
Args:
|
|
94
|
-
doc_id:
|
|
44
|
+
doc_id: Document ID to fetch
|
|
95
45
|
|
|
96
46
|
Returns:
|
|
97
|
-
|
|
47
|
+
Document data as dictionary
|
|
48
|
+
|
|
49
|
+
Raises:
|
|
50
|
+
ResourceNotFoundError: If document doesn't exist
|
|
98
51
|
"""
|
|
99
|
-
# Check
|
|
52
|
+
# Check cache first
|
|
100
53
|
if self.document_cache:
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
54
|
+
start_time = time.time()
|
|
55
|
+
cached_doc = self.document_cache.get(doc_id)
|
|
56
|
+
cache_check_time = (time.time() - start_time) * 1000
|
|
57
|
+
|
|
58
|
+
if cached_doc is not None:
|
|
59
|
+
self.logger.debug(f"Cache HIT for document {doc_id} in {cache_check_time:.2f}ms")
|
|
60
|
+
return cached_doc
|
|
61
|
+
else:
|
|
62
|
+
self.logger.debug(f"Cache MISS for document {doc_id} - checking Firestore")
|
|
63
|
+
|
|
64
|
+
# Fetch from Firestore
|
|
65
|
+
start_time = time.time()
|
|
66
|
+
doc_ref = self.db.collection(self.collection_name).document(doc_id)
|
|
67
|
+
doc = doc_ref.get(timeout=self.timeout)
|
|
68
|
+
firestore_time = (time.time() - start_time) * 1000
|
|
69
|
+
|
|
70
|
+
if not doc.exists:
|
|
71
|
+
self.logger.info(f"Document {doc_id} not found in Firestore after {firestore_time:.2f}ms")
|
|
72
|
+
raise ResourceNotFoundError(self.resource_type, doc_id)
|
|
73
|
+
|
|
74
|
+
doc_data = doc.to_dict()
|
|
75
|
+
self.logger.debug(f"Fetched document {doc_id} from Firestore in {firestore_time:.2f}ms")
|
|
76
|
+
|
|
77
|
+
# Cache the result
|
|
110
78
|
if self.document_cache and doc_data:
|
|
111
|
-
# Make sure ID is included in the cached data
|
|
112
|
-
if 'id' not in doc_data:
|
|
113
|
-
doc_data['id'] = doc_id
|
|
114
79
|
self.document_cache.set(doc_id, doc_data)
|
|
115
80
|
self.logger.debug(f"Cached document {doc_id}")
|
|
116
81
|
|
|
117
82
|
return doc_data
|
|
118
83
|
|
|
119
|
-
async def get_all_documents(self, cache_key: str =
|
|
84
|
+
async def get_all_documents(self, cache_key: Optional[str] = None) -> List[Dict[str, Any]]:
|
|
120
85
|
"""
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
cache_key: The key to use for caching the full collection
|
|
125
|
-
|
|
126
|
-
Returns:
|
|
127
|
-
List of all documents in the collection
|
|
86
|
+
Retrieves all documents from the collection.
|
|
87
|
+
Uses collection_cache if cache_key is provided and cache is available.
|
|
88
|
+
Also populates document_cache for each retrieved document.
|
|
128
89
|
"""
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
90
|
+
if cache_key and self.collection_cache:
|
|
91
|
+
cached_collection_data = self.collection_cache.get(cache_key)
|
|
92
|
+
if cached_collection_data is not None:
|
|
93
|
+
self.logger.debug(f"Cache HIT for collection key '{cache_key}' in {self.collection_cache.name}")
|
|
94
|
+
# Ensure individual documents are also in document_cache if possible
|
|
95
|
+
if self.document_cache:
|
|
96
|
+
for doc_data in cached_collection_data:
|
|
97
|
+
if "id" in doc_data and not self.document_cache.get(doc_data["id"]):
|
|
98
|
+
self._cache_document_data(doc_data["id"], doc_data)
|
|
99
|
+
return cached_collection_data
|
|
100
|
+
else:
|
|
101
|
+
self.logger.debug(f"Cache MISS for collection key '{cache_key}' in {self.collection_cache.name} - checking Firestore")
|
|
102
|
+
|
|
103
|
+
self.logger.info(f"Fetching all documents for {self.resource_type} from Firestore.")
|
|
104
|
+
start_time = time.time()
|
|
105
|
+
|
|
106
|
+
try:
|
|
107
|
+
docs_stream = self.db.collection(self.collection_name).stream(timeout=self.timeout)
|
|
108
|
+
docs_data_list = []
|
|
109
|
+
for doc in docs_stream:
|
|
110
|
+
doc_data = doc.to_dict()
|
|
111
|
+
if doc_data is not None:
|
|
112
|
+
doc_data["id"] = doc.id # Ensure 'id' field is present
|
|
113
|
+
docs_data_list.append(doc_data)
|
|
114
|
+
|
|
115
|
+
fetch_time = (time.time() - start_time) * 1000
|
|
116
|
+
self.logger.debug(f"Fetched {len(docs_data_list)} documents for {self.resource_type} from Firestore in {fetch_time:.2f}ms")
|
|
117
|
+
|
|
118
|
+
# Cache the entire collection if cache_key and collection_cache are available
|
|
119
|
+
if cache_key and self.collection_cache:
|
|
120
|
+
self.collection_cache.set(cache_key, docs_data_list)
|
|
121
|
+
self.logger.debug(f"Cached collection with key '{cache_key}' in {self.collection_cache.name}")
|
|
122
|
+
|
|
123
|
+
# Populate individual document cache
|
|
124
|
+
if self.document_cache:
|
|
125
|
+
self.logger.debug(f"Populating document cache ({self.document_cache.name}) with {len(docs_data_list)} items for {self.resource_type}.")
|
|
126
|
+
for doc_data in docs_data_list:
|
|
127
|
+
# _cache_document_data expects 'id' to be in doc_data for keying
|
|
128
|
+
self._cache_document_data(doc_data["id"], doc_data)
|
|
139
129
|
|
|
140
|
-
|
|
141
|
-
doc_data = doc.to_dict()
|
|
130
|
+
return docs_data_list
|
|
142
131
|
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
132
|
+
except Exception as e:
|
|
133
|
+
self.logger.error(f"Error fetching all documents for {self.resource_type}: {str(e)}", exc_info=True)
|
|
134
|
+
raise ServiceError(operation=f"fetching all {self.resource_type}s", error=e, resource_type=self.resource_type) from e
|
|
146
135
|
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
136
|
+
def _cache_document_data(self, doc_id: str, data: Dict[str, Any]):
|
|
137
|
+
"""Helper to cache document data if document_cache is available."""
|
|
138
|
+
if self.document_cache:
|
|
139
|
+
self.document_cache.set(doc_id, data)
|
|
140
|
+
self.logger.debug(f"Cached item {doc_id} in {self.document_cache.name}")
|
|
150
141
|
|
|
151
|
-
|
|
142
|
+
async def create_document(self, doc_id: str, data: T, creator_uid: str) -> Dict[str, Any]:
|
|
143
|
+
"""Create document and invalidate cache."""
|
|
144
|
+
result = await super().create_document(doc_id, data, creator_uid)
|
|
145
|
+
self._invalidate_document_cache(doc_id)
|
|
146
|
+
return result
|
|
152
147
|
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
148
|
+
async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str) -> Dict[str, Any]:
|
|
149
|
+
"""Update document and invalidate cache."""
|
|
150
|
+
result = await super().update_document(doc_id, update_data, updater_uid)
|
|
151
|
+
self._invalidate_document_cache(doc_id)
|
|
152
|
+
return result
|
|
157
153
|
|
|
158
|
-
|
|
154
|
+
async def delete_document(self, doc_id: str, deleter_uid: Optional[str] = None) -> None:
|
|
155
|
+
"""Delete document and invalidate cache."""
|
|
156
|
+
await super().delete_document(doc_id)
|
|
157
|
+
self._invalidate_document_cache(doc_id)
|
|
159
158
|
|
|
160
159
|
def _invalidate_document_cache(self, doc_id: str) -> None:
|
|
161
|
-
"""Invalidate
|
|
160
|
+
"""Invalidate document cache for a specific document."""
|
|
162
161
|
if self.document_cache:
|
|
163
162
|
self.document_cache.invalidate(doc_id)
|
|
164
|
-
self.logger.debug(f"Invalidated
|
|
163
|
+
self.logger.debug(f"Invalidated cache for document {doc_id}")
|
|
165
164
|
|
|
166
|
-
def _invalidate_collection_cache(self, cache_key: str
|
|
167
|
-
"""Invalidate
|
|
165
|
+
def _invalidate_collection_cache(self, cache_key: str) -> None:
|
|
166
|
+
"""Invalidate collection cache for a specific cache key."""
|
|
168
167
|
if self.collection_cache:
|
|
169
|
-
# For single key collection cache
|
|
170
168
|
self.collection_cache.invalidate(cache_key)
|
|
171
|
-
self.logger.debug(f"Invalidated collection cache
|
|
169
|
+
self.logger.debug(f"Invalidated collection cache for key {cache_key}")
|
{ipulse_shared_core_ftredge-13.0.1.dist-info → ipulse_shared_core_ftredge-14.0.1.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ipulse_shared_core_ftredge
|
|
3
|
-
Version:
|
|
3
|
+
Version: 14.0.1
|
|
4
4
|
Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
|
|
5
5
|
Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
|
|
6
6
|
Author: Russlan Ramdowar
|
{ipulse_shared_core_ftredge-13.0.1.dist-info → ipulse_shared_core_ftredge-14.0.1.dist-info}/RECORD
RENAMED
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
ipulse_shared_core_ftredge/__init__.py,sha256=
|
|
1
|
+
ipulse_shared_core_ftredge/__init__.py,sha256=b7hQEEfgIhLyLycNaM5vrCNVfiCrFoUkVdAGCP0nsbM,516
|
|
2
2
|
ipulse_shared_core_ftredge/cache/__init__.py,sha256=i2fPojmZiBwAoY5ovnnnME9USl4bi8MRPYkAgEfACfI,136
|
|
3
|
-
ipulse_shared_core_ftredge/cache/shared_cache.py,sha256
|
|
3
|
+
ipulse_shared_core_ftredge/cache/shared_cache.py,sha256=-B7Cv-c2jVppTvbk4hbGVcrYmBeejfySPXv2hYC9frI,12923
|
|
4
4
|
ipulse_shared_core_ftredge/dependencies/__init__.py,sha256=HGsR8HUguKTfjz_BorCILS4izX8CAjG-apE0kIPE0Yo,68
|
|
5
5
|
ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py,sha256=EFWyhoVOI0tGYOWqN5St4JNIy4cMwpxeBhKdjOwEfbg,1888
|
|
6
6
|
ipulse_shared_core_ftredge/dependencies/auth_protected_router.py,sha256=em5D5tE7OkgZmuCtYCKuUAnIZCgRJhCF8Ye5QmtGWlk,1807
|
|
7
|
-
ipulse_shared_core_ftredge/dependencies/authz_for_apis.py,sha256=
|
|
7
|
+
ipulse_shared_core_ftredge/dependencies/authz_for_apis.py,sha256=Z2ISjyLM7p63YhCCkFAEtAv5ekjExIFM1m2UGUQCUrY,15512
|
|
8
8
|
ipulse_shared_core_ftredge/dependencies/firestore_client.py,sha256=VbTb121nsc9EZPd1RDEsHBLW5pIiVw6Wdo2JFL4afMg,714
|
|
9
|
-
ipulse_shared_core_ftredge/models/__init__.py,sha256=
|
|
9
|
+
ipulse_shared_core_ftredge/models/__init__.py,sha256=KACWEIj5IFfJE7L4k_csRORUSdD39zslkR3fFEzoKkc,385
|
|
10
10
|
ipulse_shared_core_ftredge/models/base_api_response.py,sha256=WOHxtv_FEk5MKzXORgIsp-sKP4O5WJCgrJMI6tYph4U,1880
|
|
11
11
|
ipulse_shared_core_ftredge/models/base_data_model.py,sha256=frvUDiKnjMGPXIQX_qdpNgGcm3SauCth6GiRuabmD5s,2509
|
|
12
12
|
ipulse_shared_core_ftredge/models/organization_profile.py,sha256=OnjsSVcp_LSB65F9Tl9udwNgqMg7gjSpv38eArpVXPc,3668
|
|
@@ -18,15 +18,15 @@ ipulse_shared_core_ftredge/models/user_status.py,sha256=rAx8l5GrB8TN7RvZ1eIMskph
|
|
|
18
18
|
ipulse_shared_core_ftredge/services/__init__.py,sha256=iwbBlviqOxVPmJC9tRsOyU6zzQlAn7Do0Gc3WKRi4Ao,697
|
|
19
19
|
ipulse_shared_core_ftredge/services/base_firestore_service.py,sha256=n1lymQEFcu6zHkdscNNCNIzTIVmja8cBtNy2yi5vfTE,9817
|
|
20
20
|
ipulse_shared_core_ftredge/services/base_service_exceptions.py,sha256=Bi0neeMY0YncWDeqUavu5JUslkjJ6QcDVRU32Ipjc08,4294
|
|
21
|
-
ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py,sha256=
|
|
21
|
+
ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py,sha256=rPaE2gZ05iAo5TKfIqc0yuyiVJqfbd7TQBFhWdUHJNc,7870
|
|
22
22
|
ipulse_shared_core_ftredge/services/credit_service.py,sha256=C07rOr58LsK4udznu64mQFUSBxY8AdfRaxw_9Pw_AOI,12038
|
|
23
23
|
ipulse_shared_core_ftredge/services/fastapiservicemon.py,sha256=27clTZXH32mbju8o-HLO_8VrmugmpXwHLuX-OOoIAew,5308
|
|
24
24
|
ipulse_shared_core_ftredge/services/servicemon.py,sha256=wWhsLwU1_07emaEyCNziZA1bDQVLxcfvQj0OseTLSTI,7969
|
|
25
25
|
ipulse_shared_core_ftredge/utils/__init__.py,sha256=JnxUb8I2MRjJC7rBPXSrpwBIQDEOku5O9JsiTi3oun8,56
|
|
26
26
|
ipulse_shared_core_ftredge/utils/custom_json_encoder.py,sha256=DblQLD0KOSNDyQ58wQRogBrShIXzPIZUw_oGOBATnJY,1366
|
|
27
27
|
ipulse_shared_core_ftredge/utils/json_encoder.py,sha256=QkcaFneVv3-q-s__Dz4OiUWYnM6jgHDJrDMdPv09RCA,2093
|
|
28
|
-
ipulse_shared_core_ftredge-
|
|
29
|
-
ipulse_shared_core_ftredge-
|
|
30
|
-
ipulse_shared_core_ftredge-
|
|
31
|
-
ipulse_shared_core_ftredge-
|
|
32
|
-
ipulse_shared_core_ftredge-
|
|
28
|
+
ipulse_shared_core_ftredge-14.0.1.dist-info/licenses/LICENCE,sha256=YBtYAXNqCCOo9Mr2hfkbSPAM9CeAr2j1VZBSwQTrNwE,1060
|
|
29
|
+
ipulse_shared_core_ftredge-14.0.1.dist-info/METADATA,sha256=kVoc68wS8z5W2WmIyskfb9uCijskolt6LoVo81K0CUI,803
|
|
30
|
+
ipulse_shared_core_ftredge-14.0.1.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
|
|
31
|
+
ipulse_shared_core_ftredge-14.0.1.dist-info/top_level.txt,sha256=8sgYrptpexkA_6_HyGvho26cVFH9kmtGvaK8tHbsGHk,27
|
|
32
|
+
ipulse_shared_core_ftredge-14.0.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|