ipulse-shared-core-ftredge 13.0.1__py3-none-any.whl → 14.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,7 @@
2
2
  from .models import ( UserAuth, UserProfile,Subscription,
3
3
  UserStatus, IAMUnitRefAssignment, UserProfileUpdate,
4
4
  OrganizationProfile, BaseAPIResponse,
5
- CustomJSONResponse )
5
+ CustomJSONResponse, BaseDataModel )
6
6
 
7
7
 
8
8
 
@@ -5,6 +5,7 @@ import logging
5
5
  import traceback
6
6
  import inspect
7
7
  import asyncio
8
+ import threading
8
9
  from typing import Dict, Any, Optional, TypeVar, Generic, Callable, Tuple, List, Awaitable
9
10
 
10
11
  T = TypeVar('T')
@@ -37,6 +38,13 @@ class SharedCache(Generic[T]):
37
38
  self._cache: Dict[str, T] = {}
38
39
  self._timestamps: Dict[str, float] = {}
39
40
 
41
+ # Thread-safe attributes
42
+ self.lock = threading.Lock()
43
+ self.hits = 0
44
+ self.misses = 0
45
+ self.sets = 0
46
+ self.evictions = 0
47
+
40
48
  self.logger.info(f"{name} cache initialized. Enabled: {enabled}, TTL: {ttl} seconds")
41
49
 
42
50
  def get(self, key: str) -> Optional[T]:
@@ -52,21 +60,25 @@ class SharedCache(Generic[T]):
52
60
  if not self.enabled:
53
61
  return None
54
62
 
55
- try:
56
- if key in self._cache:
57
- timestamp = self._timestamps.get(key, 0)
58
- if time.time() - timestamp < self.ttl:
59
- self.logger.debug(f"Cache hit for {key} in {self.name}")
60
- return self._cache[key]
63
+ with self.lock:
64
+ try:
65
+ if key in self._cache:
66
+ timestamp = self._timestamps.get(key, 0)
67
+ if time.time() - timestamp < self.ttl:
68
+ self.hits += 1
69
+ self.logger.debug(f"Cache hit for {key} in {self.name}")
70
+ return self._cache[key]
71
+ else:
72
+ # Expired item, remove it
73
+ self.invalidate(key)
74
+ self.logger.debug(f"Cache expired for {key} in {self.name}")
61
75
  else:
62
- # Expired item, remove it
63
- self.invalidate(key)
64
- self.logger.debug(f"Cache expired for {key} in {self.name}")
65
- except Exception as e:
66
- self.logger.error(f"Error getting item from {self.name} cache with key {key}: {str(e)}")
67
- self.logger.error(traceback.format_exc())
76
+ self.misses += 1
77
+ except Exception as e:
78
+ self.logger.error(f"Error getting item from {self.name} cache with key {key}: {str(e)}")
79
+ self.logger.error(traceback.format_exc())
68
80
 
69
- return None
81
+ return None
70
82
 
71
83
  def set(self, key: str, value: T) -> None:
72
84
  """
@@ -79,13 +91,25 @@ class SharedCache(Generic[T]):
79
91
  if not self.enabled:
80
92
  return
81
93
 
82
- try:
83
- self._cache[key] = value
84
- self._timestamps[key] = time.time()
85
- self.logger.debug(f"Cached item {key} in {self.name}")
86
- except Exception as e:
87
- self.logger.error(f"Error setting item in {self.name} cache with key {key}: {str(e)}")
88
- self.logger.error(traceback.format_exc())
94
+ with self.lock:
95
+ try:
96
+ if len(self._cache) >= 1000 and key not in self._cache:
97
+ # Basic LRU-like eviction: remove the first item found (not true LRU)
98
+ try:
99
+ oldest_key = next(iter(self._cache))
100
+ self.invalidate(oldest_key)
101
+ self.evictions += 1
102
+ except StopIteration:
103
+ # Cache was empty, which shouldn't happen if len >= max_size > 0
104
+ pass # Or log an error
105
+
106
+ self._cache[key] = value
107
+ self._timestamps[key] = time.time()
108
+ self.sets += 1
109
+ self.logger.debug(f"Cached item {key} in {self.name}")
110
+ except Exception as e:
111
+ self.logger.error(f"Error setting item in {self.name} cache with key {key}: {str(e)}")
112
+ self.logger.error(traceback.format_exc())
89
113
 
90
114
  def invalidate(self, key: str) -> None:
91
115
  """
@@ -94,24 +118,28 @@ class SharedCache(Generic[T]):
94
118
  Args:
95
119
  key: The cache key to invalidate.
96
120
  """
97
- try:
98
- self._cache.pop(key, None)
99
- self._timestamps.pop(key, None)
100
- self.logger.debug(f"Invalidated cache for {key} in {self.name}")
101
- except Exception as e:
102
- self.logger.error(f"Error invalidating cache in {self.name} for key {key}: {str(e)}")
103
- self.logger.error(traceback.format_exc())
121
+ with self.lock:
122
+ try:
123
+ self._cache.pop(key, None)
124
+ self._timestamps.pop(key, None)
125
+ self.evictions += 1
126
+ self.logger.debug(f"Invalidated cache for {key} in {self.name}")
127
+ except Exception as e:
128
+ self.logger.error(f"Error invalidating cache in {self.name} for key {key}: {str(e)}")
129
+ self.logger.error(traceback.format_exc())
104
130
 
105
131
  def invalidate_all(self) -> None:
106
132
  """Clear all cached items."""
107
- try:
108
- cache_size = len(self._cache)
109
- self._cache.clear()
110
- self._timestamps.clear()
111
- self.logger.info(f"Invalidated all {cache_size} entries in {self.name} cache")
112
- except Exception as e:
113
- self.logger.error(f"Error invalidating all cache entries in {self.name}: {str(e)}")
114
- self.logger.error(traceback.format_exc())
133
+ with self.lock:
134
+ try:
135
+ cache_size = len(self._cache)
136
+ self._cache.clear()
137
+ self._timestamps.clear()
138
+ self.evictions += cache_size
139
+ self.logger.info(f"Invalidated all {cache_size} entries in {self.name} cache")
140
+ except Exception as e:
141
+ self.logger.error(f"Error invalidating all cache entries in {self.name}: {str(e)}")
142
+ self.logger.error(traceback.format_exc())
115
143
 
116
144
  def get_or_set(
117
145
  self,
@@ -183,51 +211,83 @@ class SharedCache(Generic[T]):
183
211
  Returns:
184
212
  Tuple of (data, was_cached) where was_cached indicates if from cache.
185
213
  """
214
+ if not self.enabled:
215
+ self.logger.debug(f"Cache {self.name} is disabled. Loading data directly for key {key}.")
216
+ try:
217
+ fresh_data = await async_data_loader()
218
+ if fresh_data is None:
219
+ self.logger.error(f"Async data loader returned None for key {key} in disabled cache {self.name}")
220
+ raise ValueError(f"Async data loader returned None for key {key} in {self.name} (cache disabled)")
221
+ return fresh_data, False
222
+ except Exception as e:
223
+ self.logger.error(f"Error in async_data_loader for key {key} in disabled cache {self.name}: {str(e)}")
224
+ self.logger.error(traceback.format_exc())
225
+ raise RuntimeError(f"Cache error (disabled) in {self.name} for key {key}: {str(e)}") from e
226
+
186
227
  try:
187
- cached_data = self.get(key)
228
+ cached_data = self.get(key) # self.get() is synchronous, assumed to be fast.
188
229
  if cached_data is not None:
230
+ self.logger.debug(f"Cache HIT for key {key} in {self.name} (async_get_or_set)")
189
231
  return cached_data, True
190
232
 
233
+ self.logger.debug(f"Cache MISS for key {key} in {self.name} (async_get_or_set). Loading data.")
191
234
  # Not in cache or expired, load the data asynchronously
192
- self.logger.debug(f"Cache miss for {key} in {self.name}, loading data asynchronously...")
193
-
194
- # Execute the async data loader
195
235
  fresh_data = await async_data_loader()
196
236
 
197
237
  if fresh_data is not None: # Only cache if we got valid data
198
238
  self.set(key, fresh_data)
239
+ else:
240
+ # Log an error if data_loader returns None, as it's unexpected.
241
+ self.logger.error(f"Async data loader returned None for key {key} in {self.name}")
242
+ raise ValueError(f"Async data loader returned None for key {key} in {self.name}")
199
243
 
200
244
  return fresh_data, False
201
245
  except Exception as e:
202
- self.logger.error(f"Error in async_get_or_set for {key} in {self.name}: {str(e)}")
246
+ self.logger.error(f"Error in async_get_or_set for key {key} in {self.name}: {str(e)}")
203
247
  self.logger.error(traceback.format_exc())
204
- raise RuntimeError(f"Cache error in {self.name} for key {key}: {str(e)}") from e
248
+ # Re-raise the exception after logging, adding context
249
+ raise RuntimeError(f"Cache error in {self.name} for key {key} (async): {str(e)}") from e
205
250
 
206
251
  def get_stats(self) -> Dict[str, Any]:
207
252
  """Get statistics about the current cache state."""
208
- try:
209
- return {
210
- "name": self.name,
211
- "enabled": self.enabled,
212
- "ttl_seconds": self.ttl,
213
- "item_count": len(self._cache),
214
- "keys": list(self._cache.keys())[:20], # Limit to first 20 keys
215
- "has_more_keys": len(self._cache.keys()) > 20,
216
- "memory_usage_estimate_bytes": sum(
217
- len(str(k)) + self._estimate_size(v)
218
- for k, v in self._cache.items()
219
- )
220
- }
221
- except Exception as e:
222
- self.logger.error(f"Error getting stats for {self.name} cache: {str(e)}")
223
- self.logger.error(traceback.format_exc())
224
- return {
225
- "name": self.name,
226
- "enabled": self.enabled,
227
- "error": str(e),
228
- "ttl_seconds": self.ttl,
229
- "item_count": len(self._cache) if self._cache else 0
230
- }
253
+ with self.lock:
254
+ try:
255
+ # Clean up expired items before reporting size
256
+ current_time = time.time()
257
+ # Corrected: Use self._timestamps to find expired keys
258
+ expired_keys = [k for k, ts in self._timestamps.items() if current_time - ts >= self.ttl]
259
+ for k in expired_keys:
260
+ self._cache.pop(k, None)
261
+ self._timestamps.pop(k, None)
262
+ self.evictions += 1
263
+
264
+ return {
265
+ "name": self.name,
266
+ "enabled": self.enabled,
267
+ "ttl_seconds": self.ttl,
268
+ "item_count": len(self._cache),
269
+ "first_20_keys": list(self._cache.keys())[:20], # Limit to first 20 keys
270
+ "total_keys": len(self._cache.keys()),
271
+ "memory_usage_estimate_megabytes": round(
272
+ sum(len(str(k)) + self._estimate_size(v) for k, v in self._cache.items()) / (1024 * 1024),
273
+ 3
274
+ ),
275
+ "hits": self.hits,
276
+ "misses": self.misses,
277
+ "sets": self.sets,
278
+ "evictions": self.evictions,
279
+ "default_ttl": self.ttl
280
+ }
281
+ except Exception as e:
282
+ self.logger.error(f"Error getting stats for {self.name} cache: {str(e)}")
283
+ self.logger.error(traceback.format_exc())
284
+ return {
285
+ "name": self.name,
286
+ "enabled": self.enabled,
287
+ "error": str(e),
288
+ "ttl_seconds": self.ttl,
289
+ "item_count": len(self._cache) if self._cache else 0
290
+ }
231
291
 
232
292
  def _estimate_size(self, obj: Any) -> int:
233
293
  """Estimate the memory size of an object in bytes."""
@@ -247,3 +307,6 @@ class SharedCache(Generic[T]):
247
307
  except Exception:
248
308
  # If we can't estimate, return a reasonable default
249
309
  return 100
310
+
311
+ def __str__(self) -> str:
312
+ return f"SharedCache(name='{self.name}', size={len(self._cache)}, max_size={self.max_size}, hits={self.hits}, misses={self.misses})"
@@ -231,7 +231,7 @@ async def authorizeAPIRequest(
231
231
  ) -> Dict[str, Any]:
232
232
  """
233
233
  Authorize API request based on user status and OPA policies.
234
- Enhanced with credit check information.
234
+ Enhanced with credit check information and proper exception handling.
235
235
 
236
236
  Args:
237
237
  request: The incoming request
@@ -241,6 +241,9 @@ async def authorizeAPIRequest(
241
241
 
242
242
  Returns:
243
243
  Authorization result containing decision details
244
+
245
+ Raises:
246
+ HTTPException: For authorization failures (403) or service errors (500)
244
247
  """
245
248
  opa_decision = None
246
249
  try:
@@ -251,9 +254,11 @@ async def authorizeAPIRequest(
251
254
  # Extract request context
252
255
  user_uid = request.state.user.get('uid')
253
256
  if not user_uid:
254
- raise AuthorizationError(
255
- action="access API",
256
- additional_info={"path": str(request.url)}
257
+ # Log authorization failures at DEBUG level, not ERROR
258
+ logger.debug(f"Authorization denied for {request.method} {request.url.path}: No user UID found")
259
+ raise HTTPException(
260
+ status_code=403,
261
+ detail="Not authorized to access this resource"
257
262
  )
258
263
 
259
264
  # Determine if we need fresh status
@@ -313,7 +318,7 @@ async def authorizeAPIRequest(
313
318
  timeout=5.0 # 5 seconds timeout
314
319
  )
315
320
  logger.debug(f"OPA Response Status: {response.status_code}")
316
- logger.debug(f"OPA Response Body: {response.text}")
321
+ # logger.debug(f"OPA Response Body: {response.text}")
317
322
 
318
323
  if response.status_code != 200:
319
324
  logger.error(f"OPA authorization failed: {response.text}")
@@ -326,11 +331,9 @@ async def authorizeAPIRequest(
326
331
  logger.debug(f"Parsed OPA response: {result}")
327
332
 
328
333
  # Handle unusual OPA response formats
329
- # Try to find "decision" field as an alternative
330
334
  if "result" in result:
331
335
  opa_decision = result["result"]
332
336
  else:
333
- # If we still don't have a result after all attempts, use default structure
334
337
  logger.warning(f"OPA response missing 'result' field, using default")
335
338
  raise HTTPException(
336
339
  status_code=500,
@@ -340,31 +343,21 @@ async def authorizeAPIRequest(
340
343
  # Extract key fields from result with better default handling
341
344
  allow = opa_decision.get("allow", False)
342
345
 
343
- # Handle authorization denial
346
+ # Handle authorization denial - log at DEBUG level, not ERROR
344
347
  if not allow:
345
- logger.error(f"Authorization denied: {result}")
346
- raise AuthorizationError(
347
- action=f"{request.method} {request.url.path}",
348
- additional_info={
349
- "user_uid": user_uid,
350
- "resource_fields": request_resource_fields,
351
- "opa_decision": opa_decision, # Include the full OPA decision result
352
- # Include the raw result if it's different from the processed decision
353
- "raw_opa_response": result if result != {"result": opa_decision} else None
354
- }
348
+ logger.debug(f"Authorization denied for {request.method} {request.url.path}: insufficient permissions")
349
+ raise HTTPException(
350
+ status_code=403,
351
+ detail=f"Not authorized to {request.method} {request.url.path}"
355
352
  )
356
353
 
357
354
  except httpx.RequestError as e:
355
+ # Only log actual system errors at ERROR level
358
356
  logger.error(f"Failed to connect to OPA: {str(e)}")
359
- raise ServiceError(
360
- operation="API authorization",
361
- error=e,
362
- resource_type="authorization",
363
- additional_info={
364
- "opa_url": opa_url,
365
- "connection_error": str(e)
366
- }
367
- ) from e
357
+ raise HTTPException(
358
+ status_code=500,
359
+ detail="Authorization service temporarily unavailable"
360
+ )
368
361
 
369
362
  # More descriptive metadata about the data freshness
370
363
  return {
@@ -374,21 +367,16 @@ async def authorizeAPIRequest(
374
367
  "opa_decision": opa_decision
375
368
  }
376
369
 
377
- except (AuthorizationError, ResourceNotFoundError):
370
+ except HTTPException:
371
+ # Re-raise HTTPExceptions as-is (they're already properly formatted)
378
372
  raise
379
373
  except Exception as e:
380
- logger.exception(f"Exception in authorizeAPIRequest: {e}")
381
- raise ServiceError(
382
- operation="API authorization",
383
- error=e,
384
- resource_type="authorization",
385
- additional_info={
386
- "path": str(request.url),
387
- "method": request.method,
388
- "user_uid": request.state.user.get('uid'),
389
- "resource_fields": request_resource_fields
390
- }
391
- ) from e
374
+ # Only log unexpected errors at ERROR level
375
+ logger.error(f"Unexpected error during authorization for {request.method} {request.url.path}: {str(e)}")
376
+ raise HTTPException(
377
+ status_code=500,
378
+ detail="Internal authorization error"
379
+ )
392
380
 
393
381
  def _should_force_fresh_status(request: Request) -> bool:
394
382
  """
@@ -5,7 +5,6 @@ from .user_profile_update import UserProfileUpdate
5
5
  from .user_auth import UserAuth
6
6
  from .organization_profile import OrganizationProfile
7
7
  from .base_api_response import BaseAPIResponse , CustomJSONResponse
8
-
9
-
8
+ from .base_data_model import BaseDataModel
10
9
 
11
10
 
@@ -1,18 +1,18 @@
1
- """Base service with built-in cache awareness for Firestore operations."""
2
-
3
- from typing import Dict, Any, List, Optional, TypeVar, Generic
1
+ """Cache-aware Firestore service base class."""
2
+ import time
3
+ from typing import TypeVar, Generic, Dict, Any, List, Optional
4
4
  from google.cloud import firestore
5
- from pydantic import BaseModel
6
- import logging
5
+ from ipulse_shared_core_ftredge.services.base_firestore_service import BaseFirestoreService
6
+ from ipulse_shared_core_ftredge.services.base_service_exceptions import ResourceNotFoundError, ServiceError
7
7
  from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
8
- from ipulse_shared_core_ftredge.services import BaseFirestoreService
8
+ from ipulse_shared_core_ftredge import BaseDataModel
9
9
 
10
- T = TypeVar('T', bound=BaseModel)
10
+ T = TypeVar('T', bound=BaseDataModel)
11
11
 
12
- class CacheAwareFirestoreService(BaseFirestoreService, Generic[T]):
12
+ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
13
13
  """
14
- Base service class that integrates caching with Firestore operations.
15
- This allows services to inherit cache-aware CRUD methods without reimplementing them.
14
+ Base service class that adds caching capabilities to BaseFirestoreService.
15
+ Supports both document-level and collection-level caching.
16
16
  """
17
17
 
18
18
  def __init__(
@@ -20,152 +20,150 @@ class CacheAwareFirestoreService(BaseFirestoreService, Generic[T]):
20
20
  db: firestore.Client,
21
21
  collection_name: str,
22
22
  resource_type: str,
23
- logger: logging.Logger,
23
+ logger,
24
24
  document_cache: Optional[SharedCache] = None,
25
25
  collection_cache: Optional[SharedCache] = None,
26
- timeout: float = 15.0
26
+ timeout: float = 30.0
27
27
  ):
28
- """
29
- Initialize the service with optional cache instances.
30
-
31
- Args:
32
- db: Firestore client
33
- collection_name: Firestore collection name
34
- resource_type: Resource type for error messages
35
- logger: Logger instance
36
- document_cache: Cache for individual documents (optional)
37
- collection_cache: Cache for collection-level queries (optional)
38
- timeout: Firestore operation timeout in seconds
39
- """
40
- super().__init__(
41
- db=db,
42
- collection_name=collection_name,
43
- resource_type=resource_type,
44
- logger=logger,
45
- timeout=timeout
46
- )
28
+ super().__init__(db, collection_name, resource_type, logger)
47
29
  self.document_cache = document_cache
48
30
  self.collection_cache = collection_cache
31
+ self.timeout = timeout
49
32
 
50
33
  # Log cache configuration
51
- if document_cache:
52
- self.logger.info(f"Document cache enabled for {resource_type}: {document_cache.name}")
53
- if collection_cache:
54
- self.logger.info(f"Collection cache enabled for {resource_type}: {collection_cache.name}")
55
-
56
- async def create_document(self, doc_id: str, data: T, creator_uid: str) -> Dict[str, Any]:
57
- """Create a document and invalidate relevant caches."""
58
- result = await super().create_document(doc_id, data, creator_uid)
59
-
60
- # Invalidate document cache if it exists
61
- self._invalidate_document_cache(doc_id)
62
-
63
- # Invalidate collection cache if it exists
64
- self._invalidate_collection_cache()
65
-
66
- return result
67
-
68
- async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str) -> Dict[str, Any]:
69
- """Update a document and invalidate relevant caches."""
70
- result = await super().update_document(doc_id, update_data, updater_uid)
71
-
72
- # Invalidate document cache if it exists
73
- self._invalidate_document_cache(doc_id)
74
-
75
- # Invalidate collection cache if it exists
76
- self._invalidate_collection_cache()
77
-
78
- return result
79
-
80
- async def delete_document(self, doc_id: str, deleter_uid: Optional[str] = None) -> None:
81
- """Delete a document and invalidate relevant caches."""
82
- # Invalidate caches before deletion to handle potential failures
83
- self._invalidate_document_cache(doc_id)
84
- self._invalidate_collection_cache()
85
-
86
- # Delete the document
87
- await super().delete_document(doc_id)
34
+ if self.document_cache:
35
+ self.logger.info(f"Document cache enabled for {resource_type}: {self.document_cache.name}")
36
+ if self.collection_cache:
37
+ self.logger.info(f"Collection cache enabled for {resource_type}: {self.collection_cache.name}")
88
38
 
89
39
  async def get_document(self, doc_id: str) -> Dict[str, Any]:
90
40
  """
91
- Get a document by ID with caching if available.
41
+ Get a document with caching support.
92
42
 
93
43
  Args:
94
- doc_id: The document ID to fetch
44
+ doc_id: Document ID to fetch
95
45
 
96
46
  Returns:
97
- The document data
47
+ Document data as dictionary
48
+
49
+ Raises:
50
+ ResourceNotFoundError: If document doesn't exist
98
51
  """
99
- # Check document cache first if available
52
+ # Check cache first
100
53
  if self.document_cache:
101
- cached_data = self.document_cache.get(doc_id)
102
- if cached_data is not None:
103
- self.logger.debug(f"Cache hit for document {doc_id}")
104
- return cached_data
105
-
106
- # Cache miss or no cache configured, fetch from Firestore
107
- doc_data = await super().get_document(doc_id)
108
-
109
- # Store in cache if available
54
+ start_time = time.time()
55
+ cached_doc = self.document_cache.get(doc_id)
56
+ cache_check_time = (time.time() - start_time) * 1000
57
+
58
+ if cached_doc is not None:
59
+ self.logger.debug(f"Cache HIT for document {doc_id} in {cache_check_time:.2f}ms")
60
+ return cached_doc
61
+ else:
62
+ self.logger.debug(f"Cache MISS for document {doc_id} - checking Firestore")
63
+
64
+ # Fetch from Firestore
65
+ start_time = time.time()
66
+ doc_ref = self.db.collection(self.collection_name).document(doc_id)
67
+ doc = doc_ref.get(timeout=self.timeout)
68
+ firestore_time = (time.time() - start_time) * 1000
69
+
70
+ if not doc.exists:
71
+ self.logger.info(f"Document {doc_id} not found in Firestore after {firestore_time:.2f}ms")
72
+ raise ResourceNotFoundError(self.resource_type, doc_id)
73
+
74
+ doc_data = doc.to_dict()
75
+ self.logger.debug(f"Fetched document {doc_id} from Firestore in {firestore_time:.2f}ms")
76
+
77
+ # Cache the result
110
78
  if self.document_cache and doc_data:
111
- # Make sure ID is included in the cached data
112
- if 'id' not in doc_data:
113
- doc_data['id'] = doc_id
114
79
  self.document_cache.set(doc_id, doc_data)
115
80
  self.logger.debug(f"Cached document {doc_id}")
116
81
 
117
82
  return doc_data
118
83
 
119
- async def get_all_documents(self, cache_key: str = "all_documents") -> List[Dict[str, Any]]:
84
+ async def get_all_documents(self, cache_key: Optional[str] = None) -> List[Dict[str, Any]]:
120
85
  """
121
- Get all documents in the collection with caching.
122
-
123
- Args:
124
- cache_key: The key to use for caching the full collection
125
-
126
- Returns:
127
- List of all documents in the collection
86
+ Retrieves all documents from the collection.
87
+ Uses collection_cache if cache_key is provided and cache is available.
88
+ Also populates document_cache for each retrieved document.
128
89
  """
129
- # Check collection cache first if available
130
- if self.collection_cache:
131
- cached_data = self.collection_cache.get(cache_key)
132
- if cached_data is not None:
133
- self.logger.debug(f"Cache hit for collection query: {cache_key}")
134
- return cached_data
135
-
136
- # Cache miss or no cache configured, fetch from Firestore
137
- query = self.db.collection(self.collection_name).stream(timeout=self.timeout)
138
- documents = []
90
+ if cache_key and self.collection_cache:
91
+ cached_collection_data = self.collection_cache.get(cache_key)
92
+ if cached_collection_data is not None:
93
+ self.logger.debug(f"Cache HIT for collection key '{cache_key}' in {self.collection_cache.name}")
94
+ # Ensure individual documents are also in document_cache if possible
95
+ if self.document_cache:
96
+ for doc_data in cached_collection_data:
97
+ if "id" in doc_data and not self.document_cache.get(doc_data["id"]):
98
+ self._cache_document_data(doc_data["id"], doc_data)
99
+ return cached_collection_data
100
+ else:
101
+ self.logger.debug(f"Cache MISS for collection key '{cache_key}' in {self.collection_cache.name} - checking Firestore")
102
+
103
+ self.logger.info(f"Fetching all documents for {self.resource_type} from Firestore.")
104
+ start_time = time.time()
105
+
106
+ try:
107
+ docs_stream = self.db.collection(self.collection_name).stream(timeout=self.timeout)
108
+ docs_data_list = []
109
+ for doc in docs_stream:
110
+ doc_data = doc.to_dict()
111
+ if doc_data is not None:
112
+ doc_data["id"] = doc.id # Ensure 'id' field is present
113
+ docs_data_list.append(doc_data)
114
+
115
+ fetch_time = (time.time() - start_time) * 1000
116
+ self.logger.debug(f"Fetched {len(docs_data_list)} documents for {self.resource_type} from Firestore in {fetch_time:.2f}ms")
117
+
118
+ # Cache the entire collection if cache_key and collection_cache are available
119
+ if cache_key and self.collection_cache:
120
+ self.collection_cache.set(cache_key, docs_data_list)
121
+ self.logger.debug(f"Cached collection with key '{cache_key}' in {self.collection_cache.name}")
122
+
123
+ # Populate individual document cache
124
+ if self.document_cache:
125
+ self.logger.debug(f"Populating document cache ({self.document_cache.name}) with {len(docs_data_list)} items for {self.resource_type}.")
126
+ for doc_data in docs_data_list:
127
+ # _cache_document_data expects 'id' to be in doc_data for keying
128
+ self._cache_document_data(doc_data["id"], doc_data)
139
129
 
140
- for doc in query:
141
- doc_data = doc.to_dict()
130
+ return docs_data_list
142
131
 
143
- # Make sure ID is included in the data
144
- if 'id' not in doc_data:
145
- doc_data['id'] = doc.id
132
+ except Exception as e:
133
+ self.logger.error(f"Error fetching all documents for {self.resource_type}: {str(e)}", exc_info=True)
134
+ raise ServiceError(operation=f"fetching all {self.resource_type}s", error=e, resource_type=self.resource_type) from e
146
135
 
147
- # Also update the document cache if configured
148
- if self.document_cache:
149
- self.document_cache.set(doc.id, doc_data)
136
+ def _cache_document_data(self, doc_id: str, data: Dict[str, Any]):
137
+ """Helper to cache document data if document_cache is available."""
138
+ if self.document_cache:
139
+ self.document_cache.set(doc_id, data)
140
+ self.logger.debug(f"Cached item {doc_id} in {self.document_cache.name}")
150
141
 
151
- documents.append(doc_data)
142
+ async def create_document(self, doc_id: str, data: T, creator_uid: str) -> Dict[str, Any]:
143
+ """Create document and invalidate cache."""
144
+ result = await super().create_document(doc_id, data, creator_uid)
145
+ self._invalidate_document_cache(doc_id)
146
+ return result
152
147
 
153
- # Store in collection cache if available
154
- if self.collection_cache:
155
- self.collection_cache.set(cache_key, documents)
156
- self.logger.debug(f"Cached collection query result: {cache_key} with {len(documents)} documents")
148
+ async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str) -> Dict[str, Any]:
149
+ """Update document and invalidate cache."""
150
+ result = await super().update_document(doc_id, update_data, updater_uid)
151
+ self._invalidate_document_cache(doc_id)
152
+ return result
157
153
 
158
- return documents
154
+ async def delete_document(self, doc_id: str, deleter_uid: Optional[str] = None) -> None:
155
+ """Delete document and invalidate cache."""
156
+ await super().delete_document(doc_id)
157
+ self._invalidate_document_cache(doc_id)
159
158
 
160
159
  def _invalidate_document_cache(self, doc_id: str) -> None:
161
- """Invalidate the document cache for a specific document ID."""
160
+ """Invalidate document cache for a specific document."""
162
161
  if self.document_cache:
163
162
  self.document_cache.invalidate(doc_id)
164
- self.logger.debug(f"Invalidated document cache for {doc_id}")
163
+ self.logger.debug(f"Invalidated cache for document {doc_id}")
165
164
 
166
- def _invalidate_collection_cache(self, cache_key: str = "all_documents") -> None:
167
- """Invalidate the collection cache."""
165
+ def _invalidate_collection_cache(self, cache_key: str) -> None:
166
+ """Invalidate collection cache for a specific cache key."""
168
167
  if self.collection_cache:
169
- # For single key collection cache
170
168
  self.collection_cache.invalidate(cache_key)
171
- self.logger.debug(f"Invalidated collection cache: {cache_key}")
169
+ self.logger.debug(f"Invalidated collection cache for key {cache_key}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 13.0.1
3
+ Version: 14.0.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -1,12 +1,12 @@
1
- ipulse_shared_core_ftredge/__init__.py,sha256=Bj1WgZq6EmiZeFC-3gYludUpoWgsUrRq1NME5nMN22Q,501
1
+ ipulse_shared_core_ftredge/__init__.py,sha256=b7hQEEfgIhLyLycNaM5vrCNVfiCrFoUkVdAGCP0nsbM,516
2
2
  ipulse_shared_core_ftredge/cache/__init__.py,sha256=i2fPojmZiBwAoY5ovnnnME9USl4bi8MRPYkAgEfACfI,136
3
- ipulse_shared_core_ftredge/cache/shared_cache.py,sha256=pDHJuMRU6zkqbykaK2ldpyVmUHLa0TAI4Xu3P9M-0B0,9454
3
+ ipulse_shared_core_ftredge/cache/shared_cache.py,sha256=-B7Cv-c2jVppTvbk4hbGVcrYmBeejfySPXv2hYC9frI,12923
4
4
  ipulse_shared_core_ftredge/dependencies/__init__.py,sha256=HGsR8HUguKTfjz_BorCILS4izX8CAjG-apE0kIPE0Yo,68
5
5
  ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py,sha256=EFWyhoVOI0tGYOWqN5St4JNIy4cMwpxeBhKdjOwEfbg,1888
6
6
  ipulse_shared_core_ftredge/dependencies/auth_protected_router.py,sha256=em5D5tE7OkgZmuCtYCKuUAnIZCgRJhCF8Ye5QmtGWlk,1807
7
- ipulse_shared_core_ftredge/dependencies/authz_for_apis.py,sha256=e_UZ_jY1xhC3XX8r3xffb06t6buLfXT2SQf46-DXVoo,15977
7
+ ipulse_shared_core_ftredge/dependencies/authz_for_apis.py,sha256=Z2ISjyLM7p63YhCCkFAEtAv5ekjExIFM1m2UGUQCUrY,15512
8
8
  ipulse_shared_core_ftredge/dependencies/firestore_client.py,sha256=VbTb121nsc9EZPd1RDEsHBLW5pIiVw6Wdo2JFL4afMg,714
9
- ipulse_shared_core_ftredge/models/__init__.py,sha256=cf2P65BXRQoOrcuvlbmT6yW50U7wyj8ZNvHTuTvlETo,344
9
+ ipulse_shared_core_ftredge/models/__init__.py,sha256=KACWEIj5IFfJE7L4k_csRORUSdD39zslkR3fFEzoKkc,385
10
10
  ipulse_shared_core_ftredge/models/base_api_response.py,sha256=WOHxtv_FEk5MKzXORgIsp-sKP4O5WJCgrJMI6tYph4U,1880
11
11
  ipulse_shared_core_ftredge/models/base_data_model.py,sha256=frvUDiKnjMGPXIQX_qdpNgGcm3SauCth6GiRuabmD5s,2509
12
12
  ipulse_shared_core_ftredge/models/organization_profile.py,sha256=OnjsSVcp_LSB65F9Tl9udwNgqMg7gjSpv38eArpVXPc,3668
@@ -18,15 +18,15 @@ ipulse_shared_core_ftredge/models/user_status.py,sha256=rAx8l5GrB8TN7RvZ1eIMskph
18
18
  ipulse_shared_core_ftredge/services/__init__.py,sha256=iwbBlviqOxVPmJC9tRsOyU6zzQlAn7Do0Gc3WKRi4Ao,697
19
19
  ipulse_shared_core_ftredge/services/base_firestore_service.py,sha256=n1lymQEFcu6zHkdscNNCNIzTIVmja8cBtNy2yi5vfTE,9817
20
20
  ipulse_shared_core_ftredge/services/base_service_exceptions.py,sha256=Bi0neeMY0YncWDeqUavu5JUslkjJ6QcDVRU32Ipjc08,4294
21
- ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py,sha256=DLNS1BegJUPSHs41j5jkP3g6w2tGSDUIurIWjI__xf4,6486
21
+ ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py,sha256=rPaE2gZ05iAo5TKfIqc0yuyiVJqfbd7TQBFhWdUHJNc,7870
22
22
  ipulse_shared_core_ftredge/services/credit_service.py,sha256=C07rOr58LsK4udznu64mQFUSBxY8AdfRaxw_9Pw_AOI,12038
23
23
  ipulse_shared_core_ftredge/services/fastapiservicemon.py,sha256=27clTZXH32mbju8o-HLO_8VrmugmpXwHLuX-OOoIAew,5308
24
24
  ipulse_shared_core_ftredge/services/servicemon.py,sha256=wWhsLwU1_07emaEyCNziZA1bDQVLxcfvQj0OseTLSTI,7969
25
25
  ipulse_shared_core_ftredge/utils/__init__.py,sha256=JnxUb8I2MRjJC7rBPXSrpwBIQDEOku5O9JsiTi3oun8,56
26
26
  ipulse_shared_core_ftredge/utils/custom_json_encoder.py,sha256=DblQLD0KOSNDyQ58wQRogBrShIXzPIZUw_oGOBATnJY,1366
27
27
  ipulse_shared_core_ftredge/utils/json_encoder.py,sha256=QkcaFneVv3-q-s__Dz4OiUWYnM6jgHDJrDMdPv09RCA,2093
28
- ipulse_shared_core_ftredge-13.0.1.dist-info/licenses/LICENCE,sha256=YBtYAXNqCCOo9Mr2hfkbSPAM9CeAr2j1VZBSwQTrNwE,1060
29
- ipulse_shared_core_ftredge-13.0.1.dist-info/METADATA,sha256=OYQw5kTq7LHJinYen7EVx-NryTit726UoMoNm6PxtMA,803
30
- ipulse_shared_core_ftredge-13.0.1.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91
31
- ipulse_shared_core_ftredge-13.0.1.dist-info/top_level.txt,sha256=8sgYrptpexkA_6_HyGvho26cVFH9kmtGvaK8tHbsGHk,27
32
- ipulse_shared_core_ftredge-13.0.1.dist-info/RECORD,,
28
+ ipulse_shared_core_ftredge-14.0.1.dist-info/licenses/LICENCE,sha256=YBtYAXNqCCOo9Mr2hfkbSPAM9CeAr2j1VZBSwQTrNwE,1060
29
+ ipulse_shared_core_ftredge-14.0.1.dist-info/METADATA,sha256=kVoc68wS8z5W2WmIyskfb9uCijskolt6LoVo81K0CUI,803
30
+ ipulse_shared_core_ftredge-14.0.1.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
31
+ ipulse_shared_core_ftredge-14.0.1.dist-info/top_level.txt,sha256=8sgYrptpexkA_6_HyGvho26cVFH9kmtGvaK8tHbsGHk,27
32
+ ipulse_shared_core_ftredge-14.0.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.7.1)
2
+ Generator: setuptools (80.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5