ipulse-shared-core-ftredge 13.0.1__tar.gz → 14.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (42) hide show
  1. {ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-14.0.1}/PKG-INFO +1 -1
  2. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/setup.py +1 -1
  3. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/__init__.py +1 -1
  4. ipulse_shared_core_ftredge-14.0.1/src/ipulse_shared_core_ftredge/cache/shared_cache.py +312 -0
  5. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/dependencies/authz_for_apis.py +28 -40
  6. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/__init__.py +1 -2
  7. ipulse_shared_core_ftredge-14.0.1/src/ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py +169 -0
  8. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +1 -1
  9. ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge/cache/shared_cache.py +0 -249
  10. ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py +0 -171
  11. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/LICENCE +0 -0
  12. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/README.md +0 -0
  13. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/pyproject.toml +0 -0
  14. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/setup.cfg +0 -0
  15. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/cache/__init__.py +0 -0
  16. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/dependencies/__init__.py +0 -0
  17. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py +0 -0
  18. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/dependencies/auth_protected_router.py +0 -0
  19. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/dependencies/firestore_client.py +0 -0
  20. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/base_api_response.py +0 -0
  21. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/base_data_model.py +0 -0
  22. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/organization_profile.py +0 -0
  23. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/subscription.py +0 -0
  24. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
  25. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/user_profile.py +0 -0
  26. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
  27. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/models/user_status.py +0 -0
  28. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/services/__init__.py +0 -0
  29. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/services/base_firestore_service.py +0 -0
  30. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/services/base_service_exceptions.py +0 -0
  31. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/services/credit_service.py +0 -0
  32. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/services/fastapiservicemon.py +0 -0
  33. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/services/servicemon.py +0 -0
  34. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/utils/__init__.py +0 -0
  35. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/utils/custom_json_encoder.py +0 -0
  36. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge/utils/json_encoder.py +0 -0
  37. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +0 -0
  38. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  39. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -0
  40. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
  41. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/tests/test_cache_aware_service.py +0 -0
  42. {ipulse_shared_core_ftredge-13.0.1 → ipulse_shared_core_ftredge-14.0.1}/tests/test_shared_cache.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 13.0.1
3
+ Version: 14.0.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
 
4
4
  setup(
5
5
  name='ipulse_shared_core_ftredge',
6
- version='13.0.1',
6
+ version='14.0.1',
7
7
  package_dir={'': 'src'}, # Specify the source directory
8
8
  packages=find_packages(where='src'), # Look for packages in 'src'
9
9
  install_requires=[
@@ -2,7 +2,7 @@
2
2
  from .models import ( UserAuth, UserProfile,Subscription,
3
3
  UserStatus, IAMUnitRefAssignment, UserProfileUpdate,
4
4
  OrganizationProfile, BaseAPIResponse,
5
- CustomJSONResponse )
5
+ CustomJSONResponse, BaseDataModel )
6
6
 
7
7
 
8
8
 
@@ -0,0 +1,312 @@
1
+ """Module for shared caching functionality that can be used across microservices."""
2
+ import os
3
+ import time
4
+ import logging
5
+ import traceback
6
+ import inspect
7
+ import asyncio
8
+ import threading
9
+ from typing import Dict, Any, Optional, TypeVar, Generic, Callable, Tuple, List, Awaitable
10
+
11
+ T = TypeVar('T')
12
+
13
+ class SharedCache(Generic[T]):
14
+ """
15
+ Generic shared cache implementation that can be used across services.
16
+
17
+ Attributes:
18
+ name: The name of the cache for logging and identification.
19
+ ttl: Time-to-live in seconds for cached items.
20
+ enabled: Whether the cache is enabled.
21
+ logger: Logger for cache operations.
22
+ _cache: Dictionary holding cached values.
23
+ _timestamps: Dictionary holding timestamps for each cached item.
24
+ """
25
+
26
+ def __init__(
27
+ self,
28
+ name: str,
29
+ ttl: float,
30
+ enabled: bool = True,
31
+ logger: Optional[logging.Logger] = None
32
+ ):
33
+ """Initialize the cache with name, TTL and enabled state."""
34
+ self.name = name
35
+ self.ttl = ttl
36
+ self.enabled = enabled
37
+ self.logger = logger or logging.getLogger(__name__)
38
+ self._cache: Dict[str, T] = {}
39
+ self._timestamps: Dict[str, float] = {}
40
+
41
+ # Thread-safe attributes
42
+ self.lock = threading.Lock()
43
+ self.hits = 0
44
+ self.misses = 0
45
+ self.sets = 0
46
+ self.evictions = 0
47
+
48
+ self.logger.info(f"{name} cache initialized. Enabled: {enabled}, TTL: {ttl} seconds")
49
+
50
+ def get(self, key: str) -> Optional[T]:
51
+ """
52
+ Get a value from the cache if it exists and hasn't expired.
53
+
54
+ Args:
55
+ key: The cache key to retrieve.
56
+
57
+ Returns:
58
+ The cached value if found and valid, None otherwise.
59
+ """
60
+ if not self.enabled:
61
+ return None
62
+
63
+ with self.lock:
64
+ try:
65
+ if key in self._cache:
66
+ timestamp = self._timestamps.get(key, 0)
67
+ if time.time() - timestamp < self.ttl:
68
+ self.hits += 1
69
+ self.logger.debug(f"Cache hit for {key} in {self.name}")
70
+ return self._cache[key]
71
+ else:
72
+ # Expired item, remove it
73
+ self.invalidate(key)
74
+ self.logger.debug(f"Cache expired for {key} in {self.name}")
75
+ else:
76
+ self.misses += 1
77
+ except Exception as e:
78
+ self.logger.error(f"Error getting item from {self.name} cache with key {key}: {str(e)}")
79
+ self.logger.error(traceback.format_exc())
80
+
81
+ return None
82
+
83
+ def set(self, key: str, value: T) -> None:
84
+ """
85
+ Set a value in the cache.
86
+
87
+ Args:
88
+ key: The cache key to set.
89
+ value: The value to cache.
90
+ """
91
+ if not self.enabled:
92
+ return
93
+
94
+ with self.lock:
95
+ try:
96
+ if len(self._cache) >= 1000 and key not in self._cache:
97
+ # Basic LRU-like eviction: remove the first item found (not true LRU)
98
+ try:
99
+ oldest_key = next(iter(self._cache))
100
+ self.invalidate(oldest_key)
101
+ self.evictions += 1
102
+ except StopIteration:
103
+ # Cache was empty, which shouldn't happen if len >= max_size > 0
104
+ pass # Or log an error
105
+
106
+ self._cache[key] = value
107
+ self._timestamps[key] = time.time()
108
+ self.sets += 1
109
+ self.logger.debug(f"Cached item {key} in {self.name}")
110
+ except Exception as e:
111
+ self.logger.error(f"Error setting item in {self.name} cache with key {key}: {str(e)}")
112
+ self.logger.error(traceback.format_exc())
113
+
114
+ def invalidate(self, key: str) -> None:
115
+ """
116
+ Remove a specific key from the cache.
117
+
118
+ Args:
119
+ key: The cache key to invalidate.
120
+ """
121
+ with self.lock:
122
+ try:
123
+ self._cache.pop(key, None)
124
+ self._timestamps.pop(key, None)
125
+ self.evictions += 1
126
+ self.logger.debug(f"Invalidated cache for {key} in {self.name}")
127
+ except Exception as e:
128
+ self.logger.error(f"Error invalidating cache in {self.name} for key {key}: {str(e)}")
129
+ self.logger.error(traceback.format_exc())
130
+
131
+ def invalidate_all(self) -> None:
132
+ """Clear all cached items."""
133
+ with self.lock:
134
+ try:
135
+ cache_size = len(self._cache)
136
+ self._cache.clear()
137
+ self._timestamps.clear()
138
+ self.evictions += cache_size
139
+ self.logger.info(f"Invalidated all {cache_size} entries in {self.name} cache")
140
+ except Exception as e:
141
+ self.logger.error(f"Error invalidating all cache entries in {self.name}: {str(e)}")
142
+ self.logger.error(traceback.format_exc())
143
+
144
+ def get_or_set(
145
+ self,
146
+ key: str,
147
+ data_loader: Callable[[], T]
148
+ ) -> Tuple[T, bool]:
149
+ """
150
+ Get a value from cache or set it using the data_loader if missing or expired.
151
+
152
+ Args:
153
+ key: The cache key.
154
+ data_loader: Function to load data if not in cache.
155
+
156
+ Returns:
157
+ Tuple of (data, was_cached) where was_cached indicates if from cache.
158
+ """
159
+ try:
160
+ cached_data = self.get(key)
161
+ if cached_data is not None:
162
+ return cached_data, True
163
+
164
+ # Not in cache or expired, load the data
165
+ self.logger.debug(f"Cache miss for {key} in {self.name}, loading data...")
166
+
167
+ # Check if the data_loader is a coroutine function
168
+ if inspect.iscoroutinefunction(data_loader):
169
+ self.logger.error(
170
+ f"Error in get_or_set for {key} in {self.name}: "
171
+ f"data_loader is a coroutine function which is not supported. "
172
+ f"Use a regular function that returns a value, not a coroutine."
173
+ )
174
+ # Fall back to running the coroutine in the event loop if possible
175
+ try:
176
+ loop = asyncio.get_event_loop()
177
+ fresh_data = loop.run_until_complete(data_loader())
178
+ except Exception as coro_err:
179
+ self.logger.error(f"Failed to execute coroutine data_loader: {str(coro_err)}")
180
+ raise RuntimeError(f"Cannot use coroutine data_loader in cache: {str(coro_err)}")
181
+ else:
182
+ # Regular function, just call it
183
+ fresh_data = data_loader()
184
+
185
+ if fresh_data is not None: # Only cache if we got valid data
186
+ self.set(key, fresh_data)
187
+
188
+ if fresh_data is None:
189
+ raise ValueError(f"Data loader returned None for key {key} in {self.name}")
190
+ return fresh_data, False
191
+ except Exception as e:
192
+ self.logger.error(f"Error in get_or_set for {key} in {self.name}: {str(e)}")
193
+ self.logger.error(traceback.format_exc())
194
+
195
+ # Since this is a critical function, re-raise the exception
196
+ # after logging it, but add context about the cache
197
+ raise RuntimeError(f"Cache error in {self.name} for key {key}: {str(e)}") from e
198
+
199
+ async def async_get_or_set(
200
+ self,
201
+ key: str,
202
+ async_data_loader: Callable[[], Awaitable[T]]
203
+ ) -> Tuple[T, bool]:
204
+ """
205
+ Async version of get_or_set for use with async data loaders.
206
+
207
+ Args:
208
+ key: The cache key.
209
+ async_data_loader: Async function to load data if not in cache.
210
+
211
+ Returns:
212
+ Tuple of (data, was_cached) where was_cached indicates if from cache.
213
+ """
214
+ if not self.enabled:
215
+ self.logger.debug(f"Cache {self.name} is disabled. Loading data directly for key {key}.")
216
+ try:
217
+ fresh_data = await async_data_loader()
218
+ if fresh_data is None:
219
+ self.logger.error(f"Async data loader returned None for key {key} in disabled cache {self.name}")
220
+ raise ValueError(f"Async data loader returned None for key {key} in {self.name} (cache disabled)")
221
+ return fresh_data, False
222
+ except Exception as e:
223
+ self.logger.error(f"Error in async_data_loader for key {key} in disabled cache {self.name}: {str(e)}")
224
+ self.logger.error(traceback.format_exc())
225
+ raise RuntimeError(f"Cache error (disabled) in {self.name} for key {key}: {str(e)}") from e
226
+
227
+ try:
228
+ cached_data = self.get(key) # self.get() is synchronous, assumed to be fast.
229
+ if cached_data is not None:
230
+ self.logger.debug(f"Cache HIT for key {key} in {self.name} (async_get_or_set)")
231
+ return cached_data, True
232
+
233
+ self.logger.debug(f"Cache MISS for key {key} in {self.name} (async_get_or_set). Loading data.")
234
+ # Not in cache or expired, load the data asynchronously
235
+ fresh_data = await async_data_loader()
236
+
237
+ if fresh_data is not None: # Only cache if we got valid data
238
+ self.set(key, fresh_data)
239
+ else:
240
+ # Log an error if data_loader returns None, as it's unexpected.
241
+ self.logger.error(f"Async data loader returned None for key {key} in {self.name}")
242
+ raise ValueError(f"Async data loader returned None for key {key} in {self.name}")
243
+
244
+ return fresh_data, False
245
+ except Exception as e:
246
+ self.logger.error(f"Error in async_get_or_set for key {key} in {self.name}: {str(e)}")
247
+ self.logger.error(traceback.format_exc())
248
+ # Re-raise the exception after logging, adding context
249
+ raise RuntimeError(f"Cache error in {self.name} for key {key} (async): {str(e)}") from e
250
+
251
+ def get_stats(self) -> Dict[str, Any]:
252
+ """Get statistics about the current cache state."""
253
+ with self.lock:
254
+ try:
255
+ # Clean up expired items before reporting size
256
+ current_time = time.time()
257
+ # Corrected: Use self._timestamps to find expired keys
258
+ expired_keys = [k for k, ts in self._timestamps.items() if current_time - ts >= self.ttl]
259
+ for k in expired_keys:
260
+ self._cache.pop(k, None)
261
+ self._timestamps.pop(k, None)
262
+ self.evictions += 1
263
+
264
+ return {
265
+ "name": self.name,
266
+ "enabled": self.enabled,
267
+ "ttl_seconds": self.ttl,
268
+ "item_count": len(self._cache),
269
+ "first_20_keys": list(self._cache.keys())[:20], # Limit to first 20 keys
270
+ "total_keys": len(self._cache.keys()),
271
+ "memory_usage_estimate_megabytes": round(
272
+ sum(len(str(k)) + self._estimate_size(v) for k, v in self._cache.items()) / (1024 * 1024),
273
+ 3
274
+ ),
275
+ "hits": self.hits,
276
+ "misses": self.misses,
277
+ "sets": self.sets,
278
+ "evictions": self.evictions,
279
+ "default_ttl": self.ttl
280
+ }
281
+ except Exception as e:
282
+ self.logger.error(f"Error getting stats for {self.name} cache: {str(e)}")
283
+ self.logger.error(traceback.format_exc())
284
+ return {
285
+ "name": self.name,
286
+ "enabled": self.enabled,
287
+ "error": str(e),
288
+ "ttl_seconds": self.ttl,
289
+ "item_count": len(self._cache) if self._cache else 0
290
+ }
291
+
292
+ def _estimate_size(self, obj: Any) -> int:
293
+ """Estimate the memory size of an object in bytes."""
294
+ try:
295
+ if obj is None:
296
+ return 0
297
+ if isinstance(obj, (str, bytes, bytearray)):
298
+ return len(obj)
299
+ if isinstance(obj, (int, float, bool)):
300
+ return 8
301
+ if isinstance(obj, dict):
302
+ return sum(len(str(k)) + self._estimate_size(v) for k, v in obj.items())
303
+ if isinstance(obj, (list, tuple, set)):
304
+ return sum(self._estimate_size(i) for i in obj)
305
+ # For other objects, use a rough approximation
306
+ return len(str(obj))
307
+ except Exception:
308
+ # If we can't estimate, return a reasonable default
309
+ return 100
310
+
311
+ def __str__(self) -> str:
312
+ return f"SharedCache(name='{self.name}', size={len(self._cache)}, max_size={self.max_size}, hits={self.hits}, misses={self.misses})"
@@ -231,7 +231,7 @@ async def authorizeAPIRequest(
231
231
  ) -> Dict[str, Any]:
232
232
  """
233
233
  Authorize API request based on user status and OPA policies.
234
- Enhanced with credit check information.
234
+ Enhanced with credit check information and proper exception handling.
235
235
 
236
236
  Args:
237
237
  request: The incoming request
@@ -241,6 +241,9 @@ async def authorizeAPIRequest(
241
241
 
242
242
  Returns:
243
243
  Authorization result containing decision details
244
+
245
+ Raises:
246
+ HTTPException: For authorization failures (403) or service errors (500)
244
247
  """
245
248
  opa_decision = None
246
249
  try:
@@ -251,9 +254,11 @@ async def authorizeAPIRequest(
251
254
  # Extract request context
252
255
  user_uid = request.state.user.get('uid')
253
256
  if not user_uid:
254
- raise AuthorizationError(
255
- action="access API",
256
- additional_info={"path": str(request.url)}
257
+ # Log authorization failures at DEBUG level, not ERROR
258
+ logger.debug(f"Authorization denied for {request.method} {request.url.path}: No user UID found")
259
+ raise HTTPException(
260
+ status_code=403,
261
+ detail="Not authorized to access this resource"
257
262
  )
258
263
 
259
264
  # Determine if we need fresh status
@@ -313,7 +318,7 @@ async def authorizeAPIRequest(
313
318
  timeout=5.0 # 5 seconds timeout
314
319
  )
315
320
  logger.debug(f"OPA Response Status: {response.status_code}")
316
- logger.debug(f"OPA Response Body: {response.text}")
321
+ # logger.debug(f"OPA Response Body: {response.text}")
317
322
 
318
323
  if response.status_code != 200:
319
324
  logger.error(f"OPA authorization failed: {response.text}")
@@ -326,11 +331,9 @@ async def authorizeAPIRequest(
326
331
  logger.debug(f"Parsed OPA response: {result}")
327
332
 
328
333
  # Handle unusual OPA response formats
329
- # Try to find "decision" field as an alternative
330
334
  if "result" in result:
331
335
  opa_decision = result["result"]
332
336
  else:
333
- # If we still don't have a result after all attempts, use default structure
334
337
  logger.warning(f"OPA response missing 'result' field, using default")
335
338
  raise HTTPException(
336
339
  status_code=500,
@@ -340,31 +343,21 @@ async def authorizeAPIRequest(
340
343
  # Extract key fields from result with better default handling
341
344
  allow = opa_decision.get("allow", False)
342
345
 
343
- # Handle authorization denial
346
+ # Handle authorization denial - log at DEBUG level, not ERROR
344
347
  if not allow:
345
- logger.error(f"Authorization denied: {result}")
346
- raise AuthorizationError(
347
- action=f"{request.method} {request.url.path}",
348
- additional_info={
349
- "user_uid": user_uid,
350
- "resource_fields": request_resource_fields,
351
- "opa_decision": opa_decision, # Include the full OPA decision result
352
- # Include the raw result if it's different from the processed decision
353
- "raw_opa_response": result if result != {"result": opa_decision} else None
354
- }
348
+ logger.debug(f"Authorization denied for {request.method} {request.url.path}: insufficient permissions")
349
+ raise HTTPException(
350
+ status_code=403,
351
+ detail=f"Not authorized to {request.method} {request.url.path}"
355
352
  )
356
353
 
357
354
  except httpx.RequestError as e:
355
+ # Only log actual system errors at ERROR level
358
356
  logger.error(f"Failed to connect to OPA: {str(e)}")
359
- raise ServiceError(
360
- operation="API authorization",
361
- error=e,
362
- resource_type="authorization",
363
- additional_info={
364
- "opa_url": opa_url,
365
- "connection_error": str(e)
366
- }
367
- ) from e
357
+ raise HTTPException(
358
+ status_code=500,
359
+ detail="Authorization service temporarily unavailable"
360
+ )
368
361
 
369
362
  # More descriptive metadata about the data freshness
370
363
  return {
@@ -374,21 +367,16 @@ async def authorizeAPIRequest(
374
367
  "opa_decision": opa_decision
375
368
  }
376
369
 
377
- except (AuthorizationError, ResourceNotFoundError):
370
+ except HTTPException:
371
+ # Re-raise HTTPExceptions as-is (they're already properly formatted)
378
372
  raise
379
373
  except Exception as e:
380
- logger.exception(f"Exception in authorizeAPIRequest: {e}")
381
- raise ServiceError(
382
- operation="API authorization",
383
- error=e,
384
- resource_type="authorization",
385
- additional_info={
386
- "path": str(request.url),
387
- "method": request.method,
388
- "user_uid": request.state.user.get('uid'),
389
- "resource_fields": request_resource_fields
390
- }
391
- ) from e
374
+ # Only log unexpected errors at ERROR level
375
+ logger.error(f"Unexpected error during authorization for {request.method} {request.url.path}: {str(e)}")
376
+ raise HTTPException(
377
+ status_code=500,
378
+ detail="Internal authorization error"
379
+ )
392
380
 
393
381
  def _should_force_fresh_status(request: Request) -> bool:
394
382
  """
@@ -5,7 +5,6 @@ from .user_profile_update import UserProfileUpdate
5
5
  from .user_auth import UserAuth
6
6
  from .organization_profile import OrganizationProfile
7
7
  from .base_api_response import BaseAPIResponse , CustomJSONResponse
8
-
9
-
8
+ from .base_data_model import BaseDataModel
10
9
 
11
10
 
@@ -0,0 +1,169 @@
1
+ """Cache-aware Firestore service base class."""
2
+ import time
3
+ from typing import TypeVar, Generic, Dict, Any, List, Optional
4
+ from google.cloud import firestore
5
+ from ipulse_shared_core_ftredge.services.base_firestore_service import BaseFirestoreService
6
+ from ipulse_shared_core_ftredge.services.base_service_exceptions import ResourceNotFoundError, ServiceError
7
+ from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
8
+ from ipulse_shared_core_ftredge import BaseDataModel
9
+
10
+ T = TypeVar('T', bound=BaseDataModel)
11
+
12
+ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
13
+ """
14
+ Base service class that adds caching capabilities to BaseFirestoreService.
15
+ Supports both document-level and collection-level caching.
16
+ """
17
+
18
+ def __init__(
19
+ self,
20
+ db: firestore.Client,
21
+ collection_name: str,
22
+ resource_type: str,
23
+ logger,
24
+ document_cache: Optional[SharedCache] = None,
25
+ collection_cache: Optional[SharedCache] = None,
26
+ timeout: float = 30.0
27
+ ):
28
+ super().__init__(db, collection_name, resource_type, logger)
29
+ self.document_cache = document_cache
30
+ self.collection_cache = collection_cache
31
+ self.timeout = timeout
32
+
33
+ # Log cache configuration
34
+ if self.document_cache:
35
+ self.logger.info(f"Document cache enabled for {resource_type}: {self.document_cache.name}")
36
+ if self.collection_cache:
37
+ self.logger.info(f"Collection cache enabled for {resource_type}: {self.collection_cache.name}")
38
+
39
+ async def get_document(self, doc_id: str) -> Dict[str, Any]:
40
+ """
41
+ Get a document with caching support.
42
+
43
+ Args:
44
+ doc_id: Document ID to fetch
45
+
46
+ Returns:
47
+ Document data as dictionary
48
+
49
+ Raises:
50
+ ResourceNotFoundError: If document doesn't exist
51
+ """
52
+ # Check cache first
53
+ if self.document_cache:
54
+ start_time = time.time()
55
+ cached_doc = self.document_cache.get(doc_id)
56
+ cache_check_time = (time.time() - start_time) * 1000
57
+
58
+ if cached_doc is not None:
59
+ self.logger.debug(f"Cache HIT for document {doc_id} in {cache_check_time:.2f}ms")
60
+ return cached_doc
61
+ else:
62
+ self.logger.debug(f"Cache MISS for document {doc_id} - checking Firestore")
63
+
64
+ # Fetch from Firestore
65
+ start_time = time.time()
66
+ doc_ref = self.db.collection(self.collection_name).document(doc_id)
67
+ doc = doc_ref.get(timeout=self.timeout)
68
+ firestore_time = (time.time() - start_time) * 1000
69
+
70
+ if not doc.exists:
71
+ self.logger.info(f"Document {doc_id} not found in Firestore after {firestore_time:.2f}ms")
72
+ raise ResourceNotFoundError(self.resource_type, doc_id)
73
+
74
+ doc_data = doc.to_dict()
75
+ self.logger.debug(f"Fetched document {doc_id} from Firestore in {firestore_time:.2f}ms")
76
+
77
+ # Cache the result
78
+ if self.document_cache and doc_data:
79
+ self.document_cache.set(doc_id, doc_data)
80
+ self.logger.debug(f"Cached document {doc_id}")
81
+
82
+ return doc_data
83
+
84
+ async def get_all_documents(self, cache_key: Optional[str] = None) -> List[Dict[str, Any]]:
85
+ """
86
+ Retrieves all documents from the collection.
87
+ Uses collection_cache if cache_key is provided and cache is available.
88
+ Also populates document_cache for each retrieved document.
89
+ """
90
+ if cache_key and self.collection_cache:
91
+ cached_collection_data = self.collection_cache.get(cache_key)
92
+ if cached_collection_data is not None:
93
+ self.logger.debug(f"Cache HIT for collection key '{cache_key}' in {self.collection_cache.name}")
94
+ # Ensure individual documents are also in document_cache if possible
95
+ if self.document_cache:
96
+ for doc_data in cached_collection_data:
97
+ if "id" in doc_data and not self.document_cache.get(doc_data["id"]):
98
+ self._cache_document_data(doc_data["id"], doc_data)
99
+ return cached_collection_data
100
+ else:
101
+ self.logger.debug(f"Cache MISS for collection key '{cache_key}' in {self.collection_cache.name} - checking Firestore")
102
+
103
+ self.logger.info(f"Fetching all documents for {self.resource_type} from Firestore.")
104
+ start_time = time.time()
105
+
106
+ try:
107
+ docs_stream = self.db.collection(self.collection_name).stream(timeout=self.timeout)
108
+ docs_data_list = []
109
+ for doc in docs_stream:
110
+ doc_data = doc.to_dict()
111
+ if doc_data is not None:
112
+ doc_data["id"] = doc.id # Ensure 'id' field is present
113
+ docs_data_list.append(doc_data)
114
+
115
+ fetch_time = (time.time() - start_time) * 1000
116
+ self.logger.debug(f"Fetched {len(docs_data_list)} documents for {self.resource_type} from Firestore in {fetch_time:.2f}ms")
117
+
118
+ # Cache the entire collection if cache_key and collection_cache are available
119
+ if cache_key and self.collection_cache:
120
+ self.collection_cache.set(cache_key, docs_data_list)
121
+ self.logger.debug(f"Cached collection with key '{cache_key}' in {self.collection_cache.name}")
122
+
123
+ # Populate individual document cache
124
+ if self.document_cache:
125
+ self.logger.debug(f"Populating document cache ({self.document_cache.name}) with {len(docs_data_list)} items for {self.resource_type}.")
126
+ for doc_data in docs_data_list:
127
+ # _cache_document_data expects 'id' to be in doc_data for keying
128
+ self._cache_document_data(doc_data["id"], doc_data)
129
+
130
+ return docs_data_list
131
+
132
+ except Exception as e:
133
+ self.logger.error(f"Error fetching all documents for {self.resource_type}: {str(e)}", exc_info=True)
134
+ raise ServiceError(operation=f"fetching all {self.resource_type}s", error=e, resource_type=self.resource_type) from e
135
+
136
+ def _cache_document_data(self, doc_id: str, data: Dict[str, Any]):
137
+ """Helper to cache document data if document_cache is available."""
138
+ if self.document_cache:
139
+ self.document_cache.set(doc_id, data)
140
+ self.logger.debug(f"Cached item {doc_id} in {self.document_cache.name}")
141
+
142
+ async def create_document(self, doc_id: str, data: T, creator_uid: str) -> Dict[str, Any]:
143
+ """Create document and invalidate cache."""
144
+ result = await super().create_document(doc_id, data, creator_uid)
145
+ self._invalidate_document_cache(doc_id)
146
+ return result
147
+
148
+ async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str) -> Dict[str, Any]:
149
+ """Update document and invalidate cache."""
150
+ result = await super().update_document(doc_id, update_data, updater_uid)
151
+ self._invalidate_document_cache(doc_id)
152
+ return result
153
+
154
+ async def delete_document(self, doc_id: str, deleter_uid: Optional[str] = None) -> None:
155
+ """Delete document and invalidate cache."""
156
+ await super().delete_document(doc_id)
157
+ self._invalidate_document_cache(doc_id)
158
+
159
+ def _invalidate_document_cache(self, doc_id: str) -> None:
160
+ """Invalidate document cache for a specific document."""
161
+ if self.document_cache:
162
+ self.document_cache.invalidate(doc_id)
163
+ self.logger.debug(f"Invalidated cache for document {doc_id}")
164
+
165
+ def _invalidate_collection_cache(self, cache_key: str) -> None:
166
+ """Invalidate collection cache for a specific cache key."""
167
+ if self.collection_cache:
168
+ self.collection_cache.invalidate(cache_key)
169
+ self.logger.debug(f"Invalidated collection cache for key {cache_key}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 13.0.1
3
+ Version: 14.0.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -1,249 +0,0 @@
1
- """Module for shared caching functionality that can be used across microservices."""
2
- import os
3
- import time
4
- import logging
5
- import traceback
6
- import inspect
7
- import asyncio
8
- from typing import Dict, Any, Optional, TypeVar, Generic, Callable, Tuple, List, Awaitable
9
-
10
- T = TypeVar('T')
11
-
12
- class SharedCache(Generic[T]):
13
- """
14
- Generic shared cache implementation that can be used across services.
15
-
16
- Attributes:
17
- name: The name of the cache for logging and identification.
18
- ttl: Time-to-live in seconds for cached items.
19
- enabled: Whether the cache is enabled.
20
- logger: Logger for cache operations.
21
- _cache: Dictionary holding cached values.
22
- _timestamps: Dictionary holding timestamps for each cached item.
23
- """
24
-
25
- def __init__(
26
- self,
27
- name: str,
28
- ttl: float,
29
- enabled: bool = True,
30
- logger: Optional[logging.Logger] = None
31
- ):
32
- """Initialize the cache with name, TTL and enabled state."""
33
- self.name = name
34
- self.ttl = ttl
35
- self.enabled = enabled
36
- self.logger = logger or logging.getLogger(__name__)
37
- self._cache: Dict[str, T] = {}
38
- self._timestamps: Dict[str, float] = {}
39
-
40
- self.logger.info(f"{name} cache initialized. Enabled: {enabled}, TTL: {ttl} seconds")
41
-
42
- def get(self, key: str) -> Optional[T]:
43
- """
44
- Get a value from the cache if it exists and hasn't expired.
45
-
46
- Args:
47
- key: The cache key to retrieve.
48
-
49
- Returns:
50
- The cached value if found and valid, None otherwise.
51
- """
52
- if not self.enabled:
53
- return None
54
-
55
- try:
56
- if key in self._cache:
57
- timestamp = self._timestamps.get(key, 0)
58
- if time.time() - timestamp < self.ttl:
59
- self.logger.debug(f"Cache hit for {key} in {self.name}")
60
- return self._cache[key]
61
- else:
62
- # Expired item, remove it
63
- self.invalidate(key)
64
- self.logger.debug(f"Cache expired for {key} in {self.name}")
65
- except Exception as e:
66
- self.logger.error(f"Error getting item from {self.name} cache with key {key}: {str(e)}")
67
- self.logger.error(traceback.format_exc())
68
-
69
- return None
70
-
71
- def set(self, key: str, value: T) -> None:
72
- """
73
- Set a value in the cache.
74
-
75
- Args:
76
- key: The cache key to set.
77
- value: The value to cache.
78
- """
79
- if not self.enabled:
80
- return
81
-
82
- try:
83
- self._cache[key] = value
84
- self._timestamps[key] = time.time()
85
- self.logger.debug(f"Cached item {key} in {self.name}")
86
- except Exception as e:
87
- self.logger.error(f"Error setting item in {self.name} cache with key {key}: {str(e)}")
88
- self.logger.error(traceback.format_exc())
89
-
90
- def invalidate(self, key: str) -> None:
91
- """
92
- Remove a specific key from the cache.
93
-
94
- Args:
95
- key: The cache key to invalidate.
96
- """
97
- try:
98
- self._cache.pop(key, None)
99
- self._timestamps.pop(key, None)
100
- self.logger.debug(f"Invalidated cache for {key} in {self.name}")
101
- except Exception as e:
102
- self.logger.error(f"Error invalidating cache in {self.name} for key {key}: {str(e)}")
103
- self.logger.error(traceback.format_exc())
104
-
105
- def invalidate_all(self) -> None:
106
- """Clear all cached items."""
107
- try:
108
- cache_size = len(self._cache)
109
- self._cache.clear()
110
- self._timestamps.clear()
111
- self.logger.info(f"Invalidated all {cache_size} entries in {self.name} cache")
112
- except Exception as e:
113
- self.logger.error(f"Error invalidating all cache entries in {self.name}: {str(e)}")
114
- self.logger.error(traceback.format_exc())
115
-
116
- def get_or_set(
117
- self,
118
- key: str,
119
- data_loader: Callable[[], T]
120
- ) -> Tuple[T, bool]:
121
- """
122
- Get a value from cache or set it using the data_loader if missing or expired.
123
-
124
- Args:
125
- key: The cache key.
126
- data_loader: Function to load data if not in cache.
127
-
128
- Returns:
129
- Tuple of (data, was_cached) where was_cached indicates if from cache.
130
- """
131
- try:
132
- cached_data = self.get(key)
133
- if cached_data is not None:
134
- return cached_data, True
135
-
136
- # Not in cache or expired, load the data
137
- self.logger.debug(f"Cache miss for {key} in {self.name}, loading data...")
138
-
139
- # Check if the data_loader is a coroutine function
140
- if inspect.iscoroutinefunction(data_loader):
141
- self.logger.error(
142
- f"Error in get_or_set for {key} in {self.name}: "
143
- f"data_loader is a coroutine function which is not supported. "
144
- f"Use a regular function that returns a value, not a coroutine."
145
- )
146
- # Fall back to running the coroutine in the event loop if possible
147
- try:
148
- loop = asyncio.get_event_loop()
149
- fresh_data = loop.run_until_complete(data_loader())
150
- except Exception as coro_err:
151
- self.logger.error(f"Failed to execute coroutine data_loader: {str(coro_err)}")
152
- raise RuntimeError(f"Cannot use coroutine data_loader in cache: {str(coro_err)}")
153
- else:
154
- # Regular function, just call it
155
- fresh_data = data_loader()
156
-
157
- if fresh_data is not None: # Only cache if we got valid data
158
- self.set(key, fresh_data)
159
-
160
- if fresh_data is None:
161
- raise ValueError(f"Data loader returned None for key {key} in {self.name}")
162
- return fresh_data, False
163
- except Exception as e:
164
- self.logger.error(f"Error in get_or_set for {key} in {self.name}: {str(e)}")
165
- self.logger.error(traceback.format_exc())
166
-
167
- # Since this is a critical function, re-raise the exception
168
- # after logging it, but add context about the cache
169
- raise RuntimeError(f"Cache error in {self.name} for key {key}: {str(e)}") from e
170
-
171
- async def async_get_or_set(
172
- self,
173
- key: str,
174
- async_data_loader: Callable[[], Awaitable[T]]
175
- ) -> Tuple[T, bool]:
176
- """
177
- Async version of get_or_set for use with async data loaders.
178
-
179
- Args:
180
- key: The cache key.
181
- async_data_loader: Async function to load data if not in cache.
182
-
183
- Returns:
184
- Tuple of (data, was_cached) where was_cached indicates if from cache.
185
- """
186
- try:
187
- cached_data = self.get(key)
188
- if cached_data is not None:
189
- return cached_data, True
190
-
191
- # Not in cache or expired, load the data asynchronously
192
- self.logger.debug(f"Cache miss for {key} in {self.name}, loading data asynchronously...")
193
-
194
- # Execute the async data loader
195
- fresh_data = await async_data_loader()
196
-
197
- if fresh_data is not None: # Only cache if we got valid data
198
- self.set(key, fresh_data)
199
-
200
- return fresh_data, False
201
- except Exception as e:
202
- self.logger.error(f"Error in async_get_or_set for {key} in {self.name}: {str(e)}")
203
- self.logger.error(traceback.format_exc())
204
- raise RuntimeError(f"Cache error in {self.name} for key {key}: {str(e)}") from e
205
-
206
- def get_stats(self) -> Dict[str, Any]:
207
- """Get statistics about the current cache state."""
208
- try:
209
- return {
210
- "name": self.name,
211
- "enabled": self.enabled,
212
- "ttl_seconds": self.ttl,
213
- "item_count": len(self._cache),
214
- "keys": list(self._cache.keys())[:20], # Limit to first 20 keys
215
- "has_more_keys": len(self._cache.keys()) > 20,
216
- "memory_usage_estimate_bytes": sum(
217
- len(str(k)) + self._estimate_size(v)
218
- for k, v in self._cache.items()
219
- )
220
- }
221
- except Exception as e:
222
- self.logger.error(f"Error getting stats for {self.name} cache: {str(e)}")
223
- self.logger.error(traceback.format_exc())
224
- return {
225
- "name": self.name,
226
- "enabled": self.enabled,
227
- "error": str(e),
228
- "ttl_seconds": self.ttl,
229
- "item_count": len(self._cache) if self._cache else 0
230
- }
231
-
232
- def _estimate_size(self, obj: Any) -> int:
233
- """Estimate the memory size of an object in bytes."""
234
- try:
235
- if obj is None:
236
- return 0
237
- if isinstance(obj, (str, bytes, bytearray)):
238
- return len(obj)
239
- if isinstance(obj, (int, float, bool)):
240
- return 8
241
- if isinstance(obj, dict):
242
- return sum(len(str(k)) + self._estimate_size(v) for k, v in obj.items())
243
- if isinstance(obj, (list, tuple, set)):
244
- return sum(self._estimate_size(i) for i in obj)
245
- # For other objects, use a rough approximation
246
- return len(str(obj))
247
- except Exception:
248
- # If we can't estimate, return a reasonable default
249
- return 100
@@ -1,171 +0,0 @@
1
- """Base service with built-in cache awareness for Firestore operations."""
2
-
3
- from typing import Dict, Any, List, Optional, TypeVar, Generic
4
- from google.cloud import firestore
5
- from pydantic import BaseModel
6
- import logging
7
- from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
8
- from ipulse_shared_core_ftredge.services import BaseFirestoreService
9
-
10
- T = TypeVar('T', bound=BaseModel)
11
-
12
- class CacheAwareFirestoreService(BaseFirestoreService, Generic[T]):
13
- """
14
- Base service class that integrates caching with Firestore operations.
15
- This allows services to inherit cache-aware CRUD methods without reimplementing them.
16
- """
17
-
18
- def __init__(
19
- self,
20
- db: firestore.Client,
21
- collection_name: str,
22
- resource_type: str,
23
- logger: logging.Logger,
24
- document_cache: Optional[SharedCache] = None,
25
- collection_cache: Optional[SharedCache] = None,
26
- timeout: float = 15.0
27
- ):
28
- """
29
- Initialize the service with optional cache instances.
30
-
31
- Args:
32
- db: Firestore client
33
- collection_name: Firestore collection name
34
- resource_type: Resource type for error messages
35
- logger: Logger instance
36
- document_cache: Cache for individual documents (optional)
37
- collection_cache: Cache for collection-level queries (optional)
38
- timeout: Firestore operation timeout in seconds
39
- """
40
- super().__init__(
41
- db=db,
42
- collection_name=collection_name,
43
- resource_type=resource_type,
44
- logger=logger,
45
- timeout=timeout
46
- )
47
- self.document_cache = document_cache
48
- self.collection_cache = collection_cache
49
-
50
- # Log cache configuration
51
- if document_cache:
52
- self.logger.info(f"Document cache enabled for {resource_type}: {document_cache.name}")
53
- if collection_cache:
54
- self.logger.info(f"Collection cache enabled for {resource_type}: {collection_cache.name}")
55
-
56
- async def create_document(self, doc_id: str, data: T, creator_uid: str) -> Dict[str, Any]:
57
- """Create a document and invalidate relevant caches."""
58
- result = await super().create_document(doc_id, data, creator_uid)
59
-
60
- # Invalidate document cache if it exists
61
- self._invalidate_document_cache(doc_id)
62
-
63
- # Invalidate collection cache if it exists
64
- self._invalidate_collection_cache()
65
-
66
- return result
67
-
68
- async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str) -> Dict[str, Any]:
69
- """Update a document and invalidate relevant caches."""
70
- result = await super().update_document(doc_id, update_data, updater_uid)
71
-
72
- # Invalidate document cache if it exists
73
- self._invalidate_document_cache(doc_id)
74
-
75
- # Invalidate collection cache if it exists
76
- self._invalidate_collection_cache()
77
-
78
- return result
79
-
80
- async def delete_document(self, doc_id: str, deleter_uid: Optional[str] = None) -> None:
81
- """Delete a document and invalidate relevant caches."""
82
- # Invalidate caches before deletion to handle potential failures
83
- self._invalidate_document_cache(doc_id)
84
- self._invalidate_collection_cache()
85
-
86
- # Delete the document
87
- await super().delete_document(doc_id)
88
-
89
- async def get_document(self, doc_id: str) -> Dict[str, Any]:
90
- """
91
- Get a document by ID with caching if available.
92
-
93
- Args:
94
- doc_id: The document ID to fetch
95
-
96
- Returns:
97
- The document data
98
- """
99
- # Check document cache first if available
100
- if self.document_cache:
101
- cached_data = self.document_cache.get(doc_id)
102
- if cached_data is not None:
103
- self.logger.debug(f"Cache hit for document {doc_id}")
104
- return cached_data
105
-
106
- # Cache miss or no cache configured, fetch from Firestore
107
- doc_data = await super().get_document(doc_id)
108
-
109
- # Store in cache if available
110
- if self.document_cache and doc_data:
111
- # Make sure ID is included in the cached data
112
- if 'id' not in doc_data:
113
- doc_data['id'] = doc_id
114
- self.document_cache.set(doc_id, doc_data)
115
- self.logger.debug(f"Cached document {doc_id}")
116
-
117
- return doc_data
118
-
119
- async def get_all_documents(self, cache_key: str = "all_documents") -> List[Dict[str, Any]]:
120
- """
121
- Get all documents in the collection with caching.
122
-
123
- Args:
124
- cache_key: The key to use for caching the full collection
125
-
126
- Returns:
127
- List of all documents in the collection
128
- """
129
- # Check collection cache first if available
130
- if self.collection_cache:
131
- cached_data = self.collection_cache.get(cache_key)
132
- if cached_data is not None:
133
- self.logger.debug(f"Cache hit for collection query: {cache_key}")
134
- return cached_data
135
-
136
- # Cache miss or no cache configured, fetch from Firestore
137
- query = self.db.collection(self.collection_name).stream(timeout=self.timeout)
138
- documents = []
139
-
140
- for doc in query:
141
- doc_data = doc.to_dict()
142
-
143
- # Make sure ID is included in the data
144
- if 'id' not in doc_data:
145
- doc_data['id'] = doc.id
146
-
147
- # Also update the document cache if configured
148
- if self.document_cache:
149
- self.document_cache.set(doc.id, doc_data)
150
-
151
- documents.append(doc_data)
152
-
153
- # Store in collection cache if available
154
- if self.collection_cache:
155
- self.collection_cache.set(cache_key, documents)
156
- self.logger.debug(f"Cached collection query result: {cache_key} with {len(documents)} documents")
157
-
158
- return documents
159
-
160
- def _invalidate_document_cache(self, doc_id: str) -> None:
161
- """Invalidate the document cache for a specific document ID."""
162
- if self.document_cache:
163
- self.document_cache.invalidate(doc_id)
164
- self.logger.debug(f"Invalidated document cache for {doc_id}")
165
-
166
- def _invalidate_collection_cache(self, cache_key: str = "all_documents") -> None:
167
- """Invalidate the collection cache."""
168
- if self.collection_cache:
169
- # For single key collection cache
170
- self.collection_cache.invalidate(cache_key)
171
- self.logger.debug(f"Invalidated collection cache: {cache_key}")