ipulse-shared-core-ftredge 12.0.1__tar.gz → 13.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (41) hide show
  1. {ipulse_shared_core_ftredge-12.0.1/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-13.0.1}/PKG-INFO +1 -1
  2. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/setup.py +1 -1
  3. ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge/cache/__init__.py +4 -0
  4. ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge/cache/shared_cache.py +249 -0
  5. ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge/services/__init__.py +14 -0
  6. ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py +171 -0
  7. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/services/credit_service.py +1 -1
  8. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +1 -1
  9. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +6 -1
  10. ipulse_shared_core_ftredge-13.0.1/tests/test_cache_aware_service.py +233 -0
  11. ipulse_shared_core_ftredge-13.0.1/tests/test_shared_cache.py +146 -0
  12. ipulse_shared_core_ftredge-12.0.1/src/ipulse_shared_core_ftredge/services/__init__.py +0 -6
  13. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/LICENCE +0 -0
  14. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/README.md +0 -0
  15. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/pyproject.toml +0 -0
  16. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/setup.cfg +0 -0
  17. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/__init__.py +0 -0
  18. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/dependencies/__init__.py +0 -0
  19. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py +0 -0
  20. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/dependencies/auth_protected_router.py +0 -0
  21. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/dependencies/authz_for_apis.py +0 -0
  22. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/dependencies/firestore_client.py +0 -0
  23. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/__init__.py +0 -0
  24. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/base_api_response.py +0 -0
  25. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/base_data_model.py +0 -0
  26. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/organization_profile.py +0 -0
  27. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/subscription.py +0 -0
  28. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
  29. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/user_profile.py +0 -0
  30. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
  31. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/user_status.py +0 -0
  32. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/services/base_firestore_service.py +0 -0
  33. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/services/base_service_exceptions.py +0 -0
  34. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/services/fastapiservicemon.py +0 -0
  35. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/services/servicemon.py +0 -0
  36. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/utils/__init__.py +0 -0
  37. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/utils/custom_json_encoder.py +0 -0
  38. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/utils/json_encoder.py +0 -0
  39. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  40. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -0
  41. {ipulse_shared_core_ftredge-12.0.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 12.0.1
3
+ Version: 13.0.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
 
4
4
  setup(
5
5
  name='ipulse_shared_core_ftredge',
6
- version='12.0.1',
6
+ version='13.0.1',
7
7
  package_dir={'': 'src'}, # Specify the source directory
8
8
  packages=find_packages(where='src'), # Look for packages in 'src'
9
9
  install_requires=[
@@ -0,0 +1,4 @@
1
+ """Cache utilities for shared core."""
2
+ from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
3
+
4
+ __all__ = ['SharedCache']
@@ -0,0 +1,249 @@
1
+ """Module for shared caching functionality that can be used across microservices."""
2
+ import os
3
+ import time
4
+ import logging
5
+ import traceback
6
+ import inspect
7
+ import asyncio
8
+ from typing import Dict, Any, Optional, TypeVar, Generic, Callable, Tuple, List, Awaitable
9
+
10
+ T = TypeVar('T')
11
+
12
+ class SharedCache(Generic[T]):
13
+ """
14
+ Generic shared cache implementation that can be used across services.
15
+
16
+ Attributes:
17
+ name: The name of the cache for logging and identification.
18
+ ttl: Time-to-live in seconds for cached items.
19
+ enabled: Whether the cache is enabled.
20
+ logger: Logger for cache operations.
21
+ _cache: Dictionary holding cached values.
22
+ _timestamps: Dictionary holding timestamps for each cached item.
23
+ """
24
+
25
+ def __init__(
26
+ self,
27
+ name: str,
28
+ ttl: float,
29
+ enabled: bool = True,
30
+ logger: Optional[logging.Logger] = None
31
+ ):
32
+ """Initialize the cache with name, TTL and enabled state."""
33
+ self.name = name
34
+ self.ttl = ttl
35
+ self.enabled = enabled
36
+ self.logger = logger or logging.getLogger(__name__)
37
+ self._cache: Dict[str, T] = {}
38
+ self._timestamps: Dict[str, float] = {}
39
+
40
+ self.logger.info(f"{name} cache initialized. Enabled: {enabled}, TTL: {ttl} seconds")
41
+
42
+ def get(self, key: str) -> Optional[T]:
43
+ """
44
+ Get a value from the cache if it exists and hasn't expired.
45
+
46
+ Args:
47
+ key: The cache key to retrieve.
48
+
49
+ Returns:
50
+ The cached value if found and valid, None otherwise.
51
+ """
52
+ if not self.enabled:
53
+ return None
54
+
55
+ try:
56
+ if key in self._cache:
57
+ timestamp = self._timestamps.get(key, 0)
58
+ if time.time() - timestamp < self.ttl:
59
+ self.logger.debug(f"Cache hit for {key} in {self.name}")
60
+ return self._cache[key]
61
+ else:
62
+ # Expired item, remove it
63
+ self.invalidate(key)
64
+ self.logger.debug(f"Cache expired for {key} in {self.name}")
65
+ except Exception as e:
66
+ self.logger.error(f"Error getting item from {self.name} cache with key {key}: {str(e)}")
67
+ self.logger.error(traceback.format_exc())
68
+
69
+ return None
70
+
71
+ def set(self, key: str, value: T) -> None:
72
+ """
73
+ Set a value in the cache.
74
+
75
+ Args:
76
+ key: The cache key to set.
77
+ value: The value to cache.
78
+ """
79
+ if not self.enabled:
80
+ return
81
+
82
+ try:
83
+ self._cache[key] = value
84
+ self._timestamps[key] = time.time()
85
+ self.logger.debug(f"Cached item {key} in {self.name}")
86
+ except Exception as e:
87
+ self.logger.error(f"Error setting item in {self.name} cache with key {key}: {str(e)}")
88
+ self.logger.error(traceback.format_exc())
89
+
90
+ def invalidate(self, key: str) -> None:
91
+ """
92
+ Remove a specific key from the cache.
93
+
94
+ Args:
95
+ key: The cache key to invalidate.
96
+ """
97
+ try:
98
+ self._cache.pop(key, None)
99
+ self._timestamps.pop(key, None)
100
+ self.logger.debug(f"Invalidated cache for {key} in {self.name}")
101
+ except Exception as e:
102
+ self.logger.error(f"Error invalidating cache in {self.name} for key {key}: {str(e)}")
103
+ self.logger.error(traceback.format_exc())
104
+
105
+ def invalidate_all(self) -> None:
106
+ """Clear all cached items."""
107
+ try:
108
+ cache_size = len(self._cache)
109
+ self._cache.clear()
110
+ self._timestamps.clear()
111
+ self.logger.info(f"Invalidated all {cache_size} entries in {self.name} cache")
112
+ except Exception as e:
113
+ self.logger.error(f"Error invalidating all cache entries in {self.name}: {str(e)}")
114
+ self.logger.error(traceback.format_exc())
115
+
116
+ def get_or_set(
117
+ self,
118
+ key: str,
119
+ data_loader: Callable[[], T]
120
+ ) -> Tuple[T, bool]:
121
+ """
122
+ Get a value from cache or set it using the data_loader if missing or expired.
123
+
124
+ Args:
125
+ key: The cache key.
126
+ data_loader: Function to load data if not in cache.
127
+
128
+ Returns:
129
+ Tuple of (data, was_cached) where was_cached indicates if from cache.
130
+ """
131
+ try:
132
+ cached_data = self.get(key)
133
+ if cached_data is not None:
134
+ return cached_data, True
135
+
136
+ # Not in cache or expired, load the data
137
+ self.logger.debug(f"Cache miss for {key} in {self.name}, loading data...")
138
+
139
+ # Check if the data_loader is a coroutine function
140
+ if inspect.iscoroutinefunction(data_loader):
141
+ self.logger.error(
142
+ f"Error in get_or_set for {key} in {self.name}: "
143
+ f"data_loader is a coroutine function which is not supported. "
144
+ f"Use a regular function that returns a value, not a coroutine."
145
+ )
146
+ # Fall back to running the coroutine in the event loop if possible
147
+ try:
148
+ loop = asyncio.get_event_loop()
149
+ fresh_data = loop.run_until_complete(data_loader())
150
+ except Exception as coro_err:
151
+ self.logger.error(f"Failed to execute coroutine data_loader: {str(coro_err)}")
152
+ raise RuntimeError(f"Cannot use coroutine data_loader in cache: {str(coro_err)}")
153
+ else:
154
+ # Regular function, just call it
155
+ fresh_data = data_loader()
156
+
157
+ if fresh_data is not None: # Only cache if we got valid data
158
+ self.set(key, fresh_data)
159
+
160
+ if fresh_data is None:
161
+ raise ValueError(f"Data loader returned None for key {key} in {self.name}")
162
+ return fresh_data, False
163
+ except Exception as e:
164
+ self.logger.error(f"Error in get_or_set for {key} in {self.name}: {str(e)}")
165
+ self.logger.error(traceback.format_exc())
166
+
167
+ # Since this is a critical function, re-raise the exception
168
+ # after logging it, but add context about the cache
169
+ raise RuntimeError(f"Cache error in {self.name} for key {key}: {str(e)}") from e
170
+
171
+ async def async_get_or_set(
172
+ self,
173
+ key: str,
174
+ async_data_loader: Callable[[], Awaitable[T]]
175
+ ) -> Tuple[T, bool]:
176
+ """
177
+ Async version of get_or_set for use with async data loaders.
178
+
179
+ Args:
180
+ key: The cache key.
181
+ async_data_loader: Async function to load data if not in cache.
182
+
183
+ Returns:
184
+ Tuple of (data, was_cached) where was_cached indicates if from cache.
185
+ """
186
+ try:
187
+ cached_data = self.get(key)
188
+ if cached_data is not None:
189
+ return cached_data, True
190
+
191
+ # Not in cache or expired, load the data asynchronously
192
+ self.logger.debug(f"Cache miss for {key} in {self.name}, loading data asynchronously...")
193
+
194
+ # Execute the async data loader
195
+ fresh_data = await async_data_loader()
196
+
197
+ if fresh_data is not None: # Only cache if we got valid data
198
+ self.set(key, fresh_data)
199
+
200
+ return fresh_data, False
201
+ except Exception as e:
202
+ self.logger.error(f"Error in async_get_or_set for {key} in {self.name}: {str(e)}")
203
+ self.logger.error(traceback.format_exc())
204
+ raise RuntimeError(f"Cache error in {self.name} for key {key}: {str(e)}") from e
205
+
206
+ def get_stats(self) -> Dict[str, Any]:
207
+ """Get statistics about the current cache state."""
208
+ try:
209
+ return {
210
+ "name": self.name,
211
+ "enabled": self.enabled,
212
+ "ttl_seconds": self.ttl,
213
+ "item_count": len(self._cache),
214
+ "keys": list(self._cache.keys())[:20], # Limit to first 20 keys
215
+ "has_more_keys": len(self._cache.keys()) > 20,
216
+ "memory_usage_estimate_bytes": sum(
217
+ len(str(k)) + self._estimate_size(v)
218
+ for k, v in self._cache.items()
219
+ )
220
+ }
221
+ except Exception as e:
222
+ self.logger.error(f"Error getting stats for {self.name} cache: {str(e)}")
223
+ self.logger.error(traceback.format_exc())
224
+ return {
225
+ "name": self.name,
226
+ "enabled": self.enabled,
227
+ "error": str(e),
228
+ "ttl_seconds": self.ttl,
229
+ "item_count": len(self._cache) if self._cache else 0
230
+ }
231
+
232
+ def _estimate_size(self, obj: Any) -> int:
233
+ """Estimate the memory size of an object in bytes."""
234
+ try:
235
+ if obj is None:
236
+ return 0
237
+ if isinstance(obj, (str, bytes, bytearray)):
238
+ return len(obj)
239
+ if isinstance(obj, (int, float, bool)):
240
+ return 8
241
+ if isinstance(obj, dict):
242
+ return sum(len(str(k)) + self._estimate_size(v) for k, v in obj.items())
243
+ if isinstance(obj, (list, tuple, set)):
244
+ return sum(self._estimate_size(i) for i in obj)
245
+ # For other objects, use a rough approximation
246
+ return len(str(obj))
247
+ except Exception:
248
+ # If we can't estimate, return a reasonable default
249
+ return 100
@@ -0,0 +1,14 @@
1
+ """Service utilities for shared core."""
2
+ # Import existing components
3
+ from ipulse_shared_core_ftredge.services.base_service_exceptions import (
4
+ BaseServiceException, ServiceError, ValidationError, ResourceNotFoundError, AuthorizationError
5
+ )
6
+ from ipulse_shared_core_ftredge.services.servicemon import Servicemon
7
+ from ipulse_shared_core_ftredge.services.base_firestore_service import BaseFirestoreService
8
+ from ipulse_shared_core_ftredge.services.cache_aware_firestore_service import CacheAwareFirestoreService
9
+
10
+ __all__ = [
11
+ 'AuthorizationError', 'BaseServiceException', 'ServiceError', 'ValidationError',
12
+ 'ResourceNotFoundError', 'BaseFirestoreService',
13
+ 'CacheAwareFirestoreService'
14
+ ]
@@ -0,0 +1,171 @@
1
+ """Base service with built-in cache awareness for Firestore operations."""
2
+
3
+ from typing import Dict, Any, List, Optional, TypeVar, Generic
4
+ from google.cloud import firestore
5
+ from pydantic import BaseModel
6
+ import logging
7
+ from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
8
+ from ipulse_shared_core_ftredge.services import BaseFirestoreService
9
+
10
+ T = TypeVar('T', bound=BaseModel)
11
+
12
+ class CacheAwareFirestoreService(BaseFirestoreService, Generic[T]):
13
+ """
14
+ Base service class that integrates caching with Firestore operations.
15
+ This allows services to inherit cache-aware CRUD methods without reimplementing them.
16
+ """
17
+
18
+ def __init__(
19
+ self,
20
+ db: firestore.Client,
21
+ collection_name: str,
22
+ resource_type: str,
23
+ logger: logging.Logger,
24
+ document_cache: Optional[SharedCache] = None,
25
+ collection_cache: Optional[SharedCache] = None,
26
+ timeout: float = 15.0
27
+ ):
28
+ """
29
+ Initialize the service with optional cache instances.
30
+
31
+ Args:
32
+ db: Firestore client
33
+ collection_name: Firestore collection name
34
+ resource_type: Resource type for error messages
35
+ logger: Logger instance
36
+ document_cache: Cache for individual documents (optional)
37
+ collection_cache: Cache for collection-level queries (optional)
38
+ timeout: Firestore operation timeout in seconds
39
+ """
40
+ super().__init__(
41
+ db=db,
42
+ collection_name=collection_name,
43
+ resource_type=resource_type,
44
+ logger=logger,
45
+ timeout=timeout
46
+ )
47
+ self.document_cache = document_cache
48
+ self.collection_cache = collection_cache
49
+
50
+ # Log cache configuration
51
+ if document_cache:
52
+ self.logger.info(f"Document cache enabled for {resource_type}: {document_cache.name}")
53
+ if collection_cache:
54
+ self.logger.info(f"Collection cache enabled for {resource_type}: {collection_cache.name}")
55
+
56
+ async def create_document(self, doc_id: str, data: T, creator_uid: str) -> Dict[str, Any]:
57
+ """Create a document and invalidate relevant caches."""
58
+ result = await super().create_document(doc_id, data, creator_uid)
59
+
60
+ # Invalidate document cache if it exists
61
+ self._invalidate_document_cache(doc_id)
62
+
63
+ # Invalidate collection cache if it exists
64
+ self._invalidate_collection_cache()
65
+
66
+ return result
67
+
68
+ async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str) -> Dict[str, Any]:
69
+ """Update a document and invalidate relevant caches."""
70
+ result = await super().update_document(doc_id, update_data, updater_uid)
71
+
72
+ # Invalidate document cache if it exists
73
+ self._invalidate_document_cache(doc_id)
74
+
75
+ # Invalidate collection cache if it exists
76
+ self._invalidate_collection_cache()
77
+
78
+ return result
79
+
80
+ async def delete_document(self, doc_id: str, deleter_uid: Optional[str] = None) -> None:
81
+ """Delete a document and invalidate relevant caches."""
82
+ # Invalidate caches before deletion to handle potential failures
83
+ self._invalidate_document_cache(doc_id)
84
+ self._invalidate_collection_cache()
85
+
86
+ # Delete the document
87
+ await super().delete_document(doc_id)
88
+
89
+ async def get_document(self, doc_id: str) -> Dict[str, Any]:
90
+ """
91
+ Get a document by ID with caching if available.
92
+
93
+ Args:
94
+ doc_id: The document ID to fetch
95
+
96
+ Returns:
97
+ The document data
98
+ """
99
+ # Check document cache first if available
100
+ if self.document_cache:
101
+ cached_data = self.document_cache.get(doc_id)
102
+ if cached_data is not None:
103
+ self.logger.debug(f"Cache hit for document {doc_id}")
104
+ return cached_data
105
+
106
+ # Cache miss or no cache configured, fetch from Firestore
107
+ doc_data = await super().get_document(doc_id)
108
+
109
+ # Store in cache if available
110
+ if self.document_cache and doc_data:
111
+ # Make sure ID is included in the cached data
112
+ if 'id' not in doc_data:
113
+ doc_data['id'] = doc_id
114
+ self.document_cache.set(doc_id, doc_data)
115
+ self.logger.debug(f"Cached document {doc_id}")
116
+
117
+ return doc_data
118
+
119
+ async def get_all_documents(self, cache_key: str = "all_documents") -> List[Dict[str, Any]]:
120
+ """
121
+ Get all documents in the collection with caching.
122
+
123
+ Args:
124
+ cache_key: The key to use for caching the full collection
125
+
126
+ Returns:
127
+ List of all documents in the collection
128
+ """
129
+ # Check collection cache first if available
130
+ if self.collection_cache:
131
+ cached_data = self.collection_cache.get(cache_key)
132
+ if cached_data is not None:
133
+ self.logger.debug(f"Cache hit for collection query: {cache_key}")
134
+ return cached_data
135
+
136
+ # Cache miss or no cache configured, fetch from Firestore
137
+ query = self.db.collection(self.collection_name).stream(timeout=self.timeout)
138
+ documents = []
139
+
140
+ for doc in query:
141
+ doc_data = doc.to_dict()
142
+
143
+ # Make sure ID is included in the data
144
+ if 'id' not in doc_data:
145
+ doc_data['id'] = doc.id
146
+
147
+ # Also update the document cache if configured
148
+ if self.document_cache:
149
+ self.document_cache.set(doc.id, doc_data)
150
+
151
+ documents.append(doc_data)
152
+
153
+ # Store in collection cache if available
154
+ if self.collection_cache:
155
+ self.collection_cache.set(cache_key, documents)
156
+ self.logger.debug(f"Cached collection query result: {cache_key} with {len(documents)} documents")
157
+
158
+ return documents
159
+
160
+ def _invalidate_document_cache(self, doc_id: str) -> None:
161
+ """Invalidate the document cache for a specific document ID."""
162
+ if self.document_cache:
163
+ self.document_cache.invalidate(doc_id)
164
+ self.logger.debug(f"Invalidated document cache for {doc_id}")
165
+
166
+ def _invalidate_collection_cache(self, cache_key: str = "all_documents") -> None:
167
+ """Invalidate the collection cache."""
168
+ if self.collection_cache:
169
+ # For single key collection cache
170
+ self.collection_cache.invalidate(cache_key)
171
+ self.logger.debug(f"Invalidated collection cache: {cache_key}")
@@ -135,7 +135,7 @@ class CreditService:
135
135
  error=e,
136
136
  resource_type="user_credits",
137
137
  resource_id=user_uid,
138
- additional_info={"credits_to_charge": credits_to_charge}
138
+ additional_info={"credits_to_charge": required_credits_for_resource}
139
139
  ) from e
140
140
 
141
141
  async def charge_credits(self, user_uid: str, credits_to_charge: Optional[float], operation_details: str) -> bool:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 12.0.1
3
+ Version: 13.0.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -8,6 +8,8 @@ src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt
8
8
  src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt
9
9
  src/ipulse_shared_core_ftredge.egg-info/requires.txt
10
10
  src/ipulse_shared_core_ftredge.egg-info/top_level.txt
11
+ src/ipulse_shared_core_ftredge/cache/__init__.py
12
+ src/ipulse_shared_core_ftredge/cache/shared_cache.py
11
13
  src/ipulse_shared_core_ftredge/dependencies/__init__.py
12
14
  src/ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py
13
15
  src/ipulse_shared_core_ftredge/dependencies/auth_protected_router.py
@@ -25,9 +27,12 @@ src/ipulse_shared_core_ftredge/models/user_status.py
25
27
  src/ipulse_shared_core_ftredge/services/__init__.py
26
28
  src/ipulse_shared_core_ftredge/services/base_firestore_service.py
27
29
  src/ipulse_shared_core_ftredge/services/base_service_exceptions.py
30
+ src/ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py
28
31
  src/ipulse_shared_core_ftredge/services/credit_service.py
29
32
  src/ipulse_shared_core_ftredge/services/fastapiservicemon.py
30
33
  src/ipulse_shared_core_ftredge/services/servicemon.py
31
34
  src/ipulse_shared_core_ftredge/utils/__init__.py
32
35
  src/ipulse_shared_core_ftredge/utils/custom_json_encoder.py
33
- src/ipulse_shared_core_ftredge/utils/json_encoder.py
36
+ src/ipulse_shared_core_ftredge/utils/json_encoder.py
37
+ tests/test_cache_aware_service.py
38
+ tests/test_shared_cache.py
@@ -0,0 +1,233 @@
1
+ """Tests for the CacheAwareFirestoreService."""
2
+
3
+ import unittest
4
+ import logging
5
+ from unittest.mock import MagicMock, patch, AsyncMock
6
+ from pydantic import BaseModel
7
+ from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
8
+ from ipulse_shared_core_ftredge.services import CacheAwareFirestoreService
9
+
10
+
11
+ # Configure logging for tests
12
+ logging.basicConfig(level=logging.INFO)
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ # Create a simple model for testing
17
+ class TestModel(BaseModel):
18
+ id: str
19
+ name: str
20
+ description: str
21
+
22
+
23
+ class TestCacheAwareFirestoreService(unittest.TestCase):
24
+ """Test cases for CacheAwareFirestoreService."""
25
+
26
+ def setUp(self):
27
+ """Set up test fixtures."""
28
+ # Create mock Firestore client
29
+ self.db_mock = MagicMock()
30
+
31
+ # Create mock caches
32
+ self.document_cache = SharedCache[dict](
33
+ name="TestDocCache",
34
+ ttl=1.0,
35
+ enabled=True,
36
+ logger=logger
37
+ )
38
+
39
+ self.collection_cache = SharedCache[list](
40
+ name="TestCollectionCache",
41
+ ttl=1.0,
42
+ enabled=True,
43
+ logger=logger
44
+ )
45
+
46
+ # Create service instance with mocks
47
+ self.service = CacheAwareFirestoreService[TestModel](
48
+ db=self.db_mock,
49
+ collection_name="test_collection",
50
+ resource_type="test_resource",
51
+ logger=logger,
52
+ document_cache=self.document_cache,
53
+ collection_cache=self.collection_cache,
54
+ timeout=5.0
55
+ )
56
+
57
+ @patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.get_document')
58
+ async def test_get_document_cache_hit(self, mock_get_document):
59
+ """Test get_document with cache hit."""
60
+ # Prepare cached data
61
+ test_data = {"id": "doc123", "name": "Test Doc", "description": "This is a test"}
62
+ self.document_cache.set("doc123", test_data)
63
+
64
+ # Execute get_document
65
+ result = await self.service.get_document("doc123")
66
+
67
+ # Verify result comes from cache
68
+ self.assertEqual(result, test_data)
69
+
70
+ # Verify Firestore was not called
71
+ mock_get_document.assert_not_called()
72
+
73
+ @patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.get_document')
74
+ async def test_get_document_cache_miss(self, mock_get_document):
75
+ """Test get_document with cache miss."""
76
+ # Configure mock to return data
77
+ mock_data = {"id": "doc123", "name": "Test Doc", "description": "This is a test"}
78
+ mock_get_document.return_value = mock_data
79
+
80
+ # Execute get_document
81
+ result = await self.service.get_document("doc123")
82
+
83
+ # Verify Firestore was called
84
+ mock_get_document.assert_called_once_with("doc123")
85
+
86
+ # Verify result is correct
87
+ self.assertEqual(result, mock_data)
88
+
89
+ # Verify data was cached
90
+ cached_data = self.document_cache.get("doc123")
91
+ self.assertEqual(cached_data, mock_data)
92
+
93
+ @patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.get_all_documents')
94
+ async def test_get_all_documents_cache_hit(self, mock_get_all):
95
+ """Test get_all_documents with cache hit."""
96
+ # Prepare cached data
97
+ test_docs = [
98
+ {"id": "doc1", "name": "Doc 1", "description": "First doc"},
99
+ {"id": "doc2", "name": "Doc 2", "description": "Second doc"}
100
+ ]
101
+ self.collection_cache.set("test_cache_key", test_docs)
102
+
103
+ # Execute get_all_documents
104
+ result = await self.service.get_all_documents("test_cache_key")
105
+
106
+ # Verify result comes from cache
107
+ self.assertEqual(result, test_docs)
108
+
109
+ # Verify Firestore was not called
110
+ mock_get_all.assert_not_called()
111
+
112
+ @patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.get_all_documents')
113
+ async def test_get_all_documents_cache_miss(self, mock_get_all):
114
+ """Test get_all_documents with cache miss."""
115
+ # Configure mock to return data
116
+ mock_docs = [
117
+ {"id": "doc1", "name": "Doc 1", "description": "First doc"},
118
+ {"id": "doc2", "name": "Doc 2", "description": "Second doc"}
119
+ ]
120
+ mock_get_all.return_value = mock_docs
121
+
122
+ # Execute get_all_documents
123
+ result = await self.service.get_all_documents("test_cache_key")
124
+
125
+ # Verify Firestore was called
126
+ mock_get_all.assert_called_once()
127
+
128
+ # Verify result is correct
129
+ self.assertEqual(result, mock_docs)
130
+
131
+ # Verify data was cached
132
+ cached_docs = self.collection_cache.get("test_cache_key")
133
+ self.assertEqual(cached_docs, mock_docs)
134
+
135
+ @patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.update_document')
136
+ async def test_update_document_invalidates_cache(self, mock_update):
137
+ """Test that update_document invalidates cache."""
138
+ # Prepare cached data
139
+ test_data = {"id": "doc123", "name": "Test Doc", "description": "This is a test"}
140
+ self.document_cache.set("doc123", test_data)
141
+
142
+ # Configure mock to return updated data
143
+ updated_data = {"id": "doc123", "name": "Updated Doc", "description": "This was updated"}
144
+ mock_update.return_value = updated_data
145
+
146
+ # Execute update_document
147
+ await self.service.update_document("doc123", {"name": "Updated Doc"}, "user123")
148
+
149
+ # Verify cache was invalidated
150
+ self.assertIsNone(self.document_cache.get("doc123"))
151
+
152
+ # Verify collection cache was also invalidated
153
+ if self.collection_cache.get("all_documents"):
154
+ self.fail("Collection cache was not invalidated")
155
+
156
+ @patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.create_document')
157
+ async def test_create_document_invalidates_cache(self, mock_create):
158
+ """Test that create_document invalidates cache."""
159
+ # Prepare collection cache data
160
+ test_docs = [{"id": "doc1", "name": "Doc 1"}]
161
+ self.collection_cache.set("all_documents", test_docs)
162
+
163
+ # Configure mock to return created data
164
+ new_data = {"id": "doc2", "name": "New Doc", "description": "Newly created"}
165
+ mock_create.return_value = new_data
166
+
167
+ # Create model instance
168
+ new_model = TestModel(id="doc2", name="New Doc", description="Newly created")
169
+
170
+ # Execute create_document
171
+ await self.service.create_document("doc2", new_model, "user123")
172
+
173
+ # Verify collection cache was invalidated
174
+ self.assertIsNone(self.collection_cache.get("all_documents"))
175
+
176
+ @patch('ipulse_shared_core_ftredge.services.BaseFirestoreService.delete_document')
177
+ async def test_delete_document_invalidates_cache(self, mock_delete):
178
+ """Test that delete_document invalidates cache."""
179
+ # Prepare cached data
180
+ test_data = {"id": "doc123", "name": "Test Doc", "description": "This is a test"}
181
+ self.document_cache.set("doc123", test_data)
182
+
183
+ test_docs = [test_data]
184
+ self.collection_cache.set("all_documents", test_docs)
185
+
186
+ # Execute delete_document
187
+ await self.service.delete_document("doc123")
188
+
189
+ # Verify document cache was invalidated
190
+ self.assertIsNone(self.document_cache.get("doc123"))
191
+
192
+ # Verify collection cache was also invalidated
193
+ self.assertIsNone(self.collection_cache.get("all_documents"))
194
+
195
+ def test_invalidate_document_cache(self):
196
+ """Test _invalidate_document_cache method."""
197
+ # Prepare cached data
198
+ test_data = {"id": "doc123", "name": "Test Doc"}
199
+ self.document_cache.set("doc123", test_data)
200
+
201
+ # Execute invalidation
202
+ self.service._invalidate_document_cache("doc123")
203
+
204
+ # Verify cache was invalidated
205
+ self.assertIsNone(self.document_cache.get("doc123"))
206
+
207
+ def test_invalidate_collection_cache(self):
208
+ """Test _invalidate_collection_cache method."""
209
+ # Prepare cached data
210
+ test_docs = [{"id": "doc1", "name": "Doc 1"}, {"id": "doc2", "name": "Doc 2"}]
211
+ self.collection_cache.set("all_documents", test_docs)
212
+
213
+ # Execute invalidation
214
+ self.service._invalidate_collection_cache()
215
+
216
+ # Verify cache was invalidated
217
+ self.assertIsNone(self.collection_cache.get("all_documents"))
218
+
219
+ def test_invalidate_collection_cache_custom_key(self):
220
+ """Test _invalidate_collection_cache method with custom key."""
221
+ # Prepare cached data
222
+ test_docs = [{"id": "doc1", "name": "Doc 1"}, {"id": "doc2", "name": "Doc 2"}]
223
+ self.collection_cache.set("custom_key", test_docs)
224
+
225
+ # Execute invalidation
226
+ self.service._invalidate_collection_cache("custom_key")
227
+
228
+ # Verify cache was invalidated
229
+ self.assertIsNone(self.collection_cache.get("custom_key"))
230
+
231
+
232
+ if __name__ == "__main__":
233
+ unittest.main()
@@ -0,0 +1,146 @@
1
+ """Tests for the SharedCache implementation."""
2
+
3
+ import time
4
+ import unittest
5
+ import logging
6
+ from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
7
+
8
+ # Configure logging for tests
9
+ logging.basicConfig(level=logging.INFO)
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ class TestSharedCache(unittest.TestCase):
14
+ """Test cases for SharedCache."""
15
+
16
+ def setUp(self):
17
+ """Set up test fixtures."""
18
+ self.cache = SharedCache[str](
19
+ name="TestCache",
20
+ ttl=0.5, # Short TTL for faster testing
21
+ enabled=True,
22
+ logger=logger
23
+ )
24
+
25
+ def test_cache_set_get(self):
26
+ """Test basic cache set and get operations."""
27
+ # Set a value
28
+ self.cache.set("test_key", "test_value")
29
+
30
+ # Get the value
31
+ cached_value = self.cache.get("test_key")
32
+
33
+ # Verify value was cached
34
+ self.assertEqual(cached_value, "test_value")
35
+
36
+ def test_cache_ttl_expiration(self):
37
+ """Test cache TTL expiration."""
38
+ # Set a value
39
+ self.cache.set("expiring_key", "expiring_value")
40
+
41
+ # Verify it's initially cached
42
+ self.assertEqual(self.cache.get("expiring_key"), "expiring_value")
43
+
44
+ # Wait for TTL to expire
45
+ time.sleep(0.6) # Slightly longer than TTL
46
+
47
+ # Verify value is no longer cached
48
+ self.assertIsNone(self.cache.get("expiring_key"))
49
+
50
+ def test_cache_invalidate(self):
51
+ """Test cache invalidation."""
52
+ # Set multiple values
53
+ self.cache.set("key1", "value1")
54
+ self.cache.set("key2", "value2")
55
+
56
+ # Invalidate specific key
57
+ self.cache.invalidate("key1")
58
+
59
+ # Verify key1 is gone but key2 remains
60
+ self.assertIsNone(self.cache.get("key1"))
61
+ self.assertEqual(self.cache.get("key2"), "value2")
62
+
63
+ def test_cache_invalidate_all(self):
64
+ """Test invalidating all cache entries."""
65
+ # Set multiple values
66
+ self.cache.set("key1", "value1")
67
+ self.cache.set("key2", "value2")
68
+
69
+ # Invalidate all
70
+ self.cache.invalidate_all()
71
+
72
+ # Verify both keys are gone
73
+ self.assertIsNone(self.cache.get("key1"))
74
+ self.assertIsNone(self.cache.get("key2"))
75
+
76
+ def test_cache_get_or_set(self):
77
+ """Test get_or_set functionality."""
78
+ # Define a counter to verify how many times the loader is called
79
+ counter = [0]
80
+
81
+ def data_loader():
82
+ counter[0] += 1
83
+ return f"loaded_value_{counter[0]}"
84
+
85
+ # First call should use data_loader
86
+ value1, was_cached1 = self.cache.get_or_set("loader_key", data_loader)
87
+
88
+ # Second call should use cached value
89
+ value2, was_cached2 = self.cache.get_or_set("loader_key", data_loader)
90
+
91
+ # Verify results
92
+ self.assertEqual(value1, "loaded_value_1")
93
+ self.assertEqual(value2, "loaded_value_1") # Same value from cache
94
+ self.assertFalse(was_cached1) # First call was not cached
95
+ self.assertTrue(was_cached2) # Second call was cached
96
+ self.assertEqual(counter[0], 1) # Loader called exactly once
97
+
98
+ def test_cache_disabled(self):
99
+ """Test cache behavior when disabled."""
100
+ # Create disabled cache
101
+ disabled_cache = SharedCache[str](
102
+ name="DisabledCache",
103
+ ttl=1.0,
104
+ enabled=False,
105
+ logger=logger
106
+ )
107
+
108
+ # Set a value
109
+ disabled_cache.set("disabled_key", "disabled_value")
110
+
111
+ # Attempt to get - should return None since cache is disabled
112
+ cached_value = disabled_cache.get("disabled_key")
113
+ self.assertIsNone(cached_value)
114
+
115
+ def test_cache_generic_typing(self):
116
+ """Test cache with different data types."""
117
+ # Integer cache
118
+ int_cache = SharedCache[int](name="IntCache", ttl=1.0, enabled=True)
119
+ int_cache.set("int_key", 123)
120
+ self.assertEqual(int_cache.get("int_key"), 123)
121
+
122
+ # Dictionary cache
123
+ dict_cache = SharedCache[dict](name="DictCache", ttl=1.0, enabled=True)
124
+ dict_cache.set("dict_key", {"a": 1, "b": 2})
125
+ self.assertEqual(dict_cache.get("dict_key"), {"a": 1, "b": 2})
126
+
127
+ def test_cache_stats(self):
128
+ """Test cache statistics."""
129
+ # Add some data
130
+ self.cache.set("stats_key1", "stats_value1")
131
+ self.cache.set("stats_key2", "stats_value2")
132
+
133
+ # Get stats
134
+ stats = self.cache.get_stats()
135
+
136
+ # Verify stats
137
+ self.assertEqual(stats["name"], "TestCache")
138
+ self.assertEqual(stats["enabled"], True)
139
+ self.assertEqual(stats["ttl_seconds"], 0.5)
140
+ self.assertEqual(stats["item_count"], 2)
141
+ self.assertIn("stats_key1", stats["keys"])
142
+ self.assertIn("stats_key2", stats["keys"])
143
+
144
+
145
+ if __name__ == "__main__":
146
+ unittest.main()
@@ -1,6 +0,0 @@
1
- from .base_firestore_service import BaseFirestoreService
2
-
3
- from .base_service_exceptions import (BaseServiceException, ResourceNotFoundError, AuthorizationError,
4
- ValidationError ,ServiceError)
5
- from .servicemon import Servicemon
6
- from .fastapiservicemon import FastAPIServiceMon