ipulse-shared-core-ftredge 12.0.1__py3-none-any.whl → 13.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.
- ipulse_shared_core_ftredge/cache/__init__.py +4 -0
- ipulse_shared_core_ftredge/cache/shared_cache.py +249 -0
- ipulse_shared_core_ftredge/services/__init__.py +13 -5
- ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py +171 -0
- ipulse_shared_core_ftredge/services/credit_service.py +1 -1
- {ipulse_shared_core_ftredge-12.0.1.dist-info → ipulse_shared_core_ftredge-13.0.1.dist-info}/METADATA +1 -1
- {ipulse_shared_core_ftredge-12.0.1.dist-info → ipulse_shared_core_ftredge-13.0.1.dist-info}/RECORD +10 -7
- {ipulse_shared_core_ftredge-12.0.1.dist-info → ipulse_shared_core_ftredge-13.0.1.dist-info}/WHEEL +0 -0
- {ipulse_shared_core_ftredge-12.0.1.dist-info → ipulse_shared_core_ftredge-13.0.1.dist-info}/licenses/LICENCE +0 -0
- {ipulse_shared_core_ftredge-12.0.1.dist-info → ipulse_shared_core_ftredge-13.0.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
"""Module for shared caching functionality that can be used across microservices."""
|
|
2
|
+
import os
|
|
3
|
+
import time
|
|
4
|
+
import logging
|
|
5
|
+
import traceback
|
|
6
|
+
import inspect
|
|
7
|
+
import asyncio
|
|
8
|
+
from typing import Dict, Any, Optional, TypeVar, Generic, Callable, Tuple, List, Awaitable
|
|
9
|
+
|
|
10
|
+
T = TypeVar('T')
|
|
11
|
+
|
|
12
|
+
class SharedCache(Generic[T]):
|
|
13
|
+
"""
|
|
14
|
+
Generic shared cache implementation that can be used across services.
|
|
15
|
+
|
|
16
|
+
Attributes:
|
|
17
|
+
name: The name of the cache for logging and identification.
|
|
18
|
+
ttl: Time-to-live in seconds for cached items.
|
|
19
|
+
enabled: Whether the cache is enabled.
|
|
20
|
+
logger: Logger for cache operations.
|
|
21
|
+
_cache: Dictionary holding cached values.
|
|
22
|
+
_timestamps: Dictionary holding timestamps for each cached item.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
name: str,
|
|
28
|
+
ttl: float,
|
|
29
|
+
enabled: bool = True,
|
|
30
|
+
logger: Optional[logging.Logger] = None
|
|
31
|
+
):
|
|
32
|
+
"""Initialize the cache with name, TTL and enabled state."""
|
|
33
|
+
self.name = name
|
|
34
|
+
self.ttl = ttl
|
|
35
|
+
self.enabled = enabled
|
|
36
|
+
self.logger = logger or logging.getLogger(__name__)
|
|
37
|
+
self._cache: Dict[str, T] = {}
|
|
38
|
+
self._timestamps: Dict[str, float] = {}
|
|
39
|
+
|
|
40
|
+
self.logger.info(f"{name} cache initialized. Enabled: {enabled}, TTL: {ttl} seconds")
|
|
41
|
+
|
|
42
|
+
def get(self, key: str) -> Optional[T]:
|
|
43
|
+
"""
|
|
44
|
+
Get a value from the cache if it exists and hasn't expired.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
key: The cache key to retrieve.
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
The cached value if found and valid, None otherwise.
|
|
51
|
+
"""
|
|
52
|
+
if not self.enabled:
|
|
53
|
+
return None
|
|
54
|
+
|
|
55
|
+
try:
|
|
56
|
+
if key in self._cache:
|
|
57
|
+
timestamp = self._timestamps.get(key, 0)
|
|
58
|
+
if time.time() - timestamp < self.ttl:
|
|
59
|
+
self.logger.debug(f"Cache hit for {key} in {self.name}")
|
|
60
|
+
return self._cache[key]
|
|
61
|
+
else:
|
|
62
|
+
# Expired item, remove it
|
|
63
|
+
self.invalidate(key)
|
|
64
|
+
self.logger.debug(f"Cache expired for {key} in {self.name}")
|
|
65
|
+
except Exception as e:
|
|
66
|
+
self.logger.error(f"Error getting item from {self.name} cache with key {key}: {str(e)}")
|
|
67
|
+
self.logger.error(traceback.format_exc())
|
|
68
|
+
|
|
69
|
+
return None
|
|
70
|
+
|
|
71
|
+
def set(self, key: str, value: T) -> None:
|
|
72
|
+
"""
|
|
73
|
+
Set a value in the cache.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
key: The cache key to set.
|
|
77
|
+
value: The value to cache.
|
|
78
|
+
"""
|
|
79
|
+
if not self.enabled:
|
|
80
|
+
return
|
|
81
|
+
|
|
82
|
+
try:
|
|
83
|
+
self._cache[key] = value
|
|
84
|
+
self._timestamps[key] = time.time()
|
|
85
|
+
self.logger.debug(f"Cached item {key} in {self.name}")
|
|
86
|
+
except Exception as e:
|
|
87
|
+
self.logger.error(f"Error setting item in {self.name} cache with key {key}: {str(e)}")
|
|
88
|
+
self.logger.error(traceback.format_exc())
|
|
89
|
+
|
|
90
|
+
def invalidate(self, key: str) -> None:
|
|
91
|
+
"""
|
|
92
|
+
Remove a specific key from the cache.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
key: The cache key to invalidate.
|
|
96
|
+
"""
|
|
97
|
+
try:
|
|
98
|
+
self._cache.pop(key, None)
|
|
99
|
+
self._timestamps.pop(key, None)
|
|
100
|
+
self.logger.debug(f"Invalidated cache for {key} in {self.name}")
|
|
101
|
+
except Exception as e:
|
|
102
|
+
self.logger.error(f"Error invalidating cache in {self.name} for key {key}: {str(e)}")
|
|
103
|
+
self.logger.error(traceback.format_exc())
|
|
104
|
+
|
|
105
|
+
def invalidate_all(self) -> None:
|
|
106
|
+
"""Clear all cached items."""
|
|
107
|
+
try:
|
|
108
|
+
cache_size = len(self._cache)
|
|
109
|
+
self._cache.clear()
|
|
110
|
+
self._timestamps.clear()
|
|
111
|
+
self.logger.info(f"Invalidated all {cache_size} entries in {self.name} cache")
|
|
112
|
+
except Exception as e:
|
|
113
|
+
self.logger.error(f"Error invalidating all cache entries in {self.name}: {str(e)}")
|
|
114
|
+
self.logger.error(traceback.format_exc())
|
|
115
|
+
|
|
116
|
+
def get_or_set(
|
|
117
|
+
self,
|
|
118
|
+
key: str,
|
|
119
|
+
data_loader: Callable[[], T]
|
|
120
|
+
) -> Tuple[T, bool]:
|
|
121
|
+
"""
|
|
122
|
+
Get a value from cache or set it using the data_loader if missing or expired.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
key: The cache key.
|
|
126
|
+
data_loader: Function to load data if not in cache.
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
Tuple of (data, was_cached) where was_cached indicates if from cache.
|
|
130
|
+
"""
|
|
131
|
+
try:
|
|
132
|
+
cached_data = self.get(key)
|
|
133
|
+
if cached_data is not None:
|
|
134
|
+
return cached_data, True
|
|
135
|
+
|
|
136
|
+
# Not in cache or expired, load the data
|
|
137
|
+
self.logger.debug(f"Cache miss for {key} in {self.name}, loading data...")
|
|
138
|
+
|
|
139
|
+
# Check if the data_loader is a coroutine function
|
|
140
|
+
if inspect.iscoroutinefunction(data_loader):
|
|
141
|
+
self.logger.error(
|
|
142
|
+
f"Error in get_or_set for {key} in {self.name}: "
|
|
143
|
+
f"data_loader is a coroutine function which is not supported. "
|
|
144
|
+
f"Use a regular function that returns a value, not a coroutine."
|
|
145
|
+
)
|
|
146
|
+
# Fall back to running the coroutine in the event loop if possible
|
|
147
|
+
try:
|
|
148
|
+
loop = asyncio.get_event_loop()
|
|
149
|
+
fresh_data = loop.run_until_complete(data_loader())
|
|
150
|
+
except Exception as coro_err:
|
|
151
|
+
self.logger.error(f"Failed to execute coroutine data_loader: {str(coro_err)}")
|
|
152
|
+
raise RuntimeError(f"Cannot use coroutine data_loader in cache: {str(coro_err)}")
|
|
153
|
+
else:
|
|
154
|
+
# Regular function, just call it
|
|
155
|
+
fresh_data = data_loader()
|
|
156
|
+
|
|
157
|
+
if fresh_data is not None: # Only cache if we got valid data
|
|
158
|
+
self.set(key, fresh_data)
|
|
159
|
+
|
|
160
|
+
if fresh_data is None:
|
|
161
|
+
raise ValueError(f"Data loader returned None for key {key} in {self.name}")
|
|
162
|
+
return fresh_data, False
|
|
163
|
+
except Exception as e:
|
|
164
|
+
self.logger.error(f"Error in get_or_set for {key} in {self.name}: {str(e)}")
|
|
165
|
+
self.logger.error(traceback.format_exc())
|
|
166
|
+
|
|
167
|
+
# Since this is a critical function, re-raise the exception
|
|
168
|
+
# after logging it, but add context about the cache
|
|
169
|
+
raise RuntimeError(f"Cache error in {self.name} for key {key}: {str(e)}") from e
|
|
170
|
+
|
|
171
|
+
async def async_get_or_set(
|
|
172
|
+
self,
|
|
173
|
+
key: str,
|
|
174
|
+
async_data_loader: Callable[[], Awaitable[T]]
|
|
175
|
+
) -> Tuple[T, bool]:
|
|
176
|
+
"""
|
|
177
|
+
Async version of get_or_set for use with async data loaders.
|
|
178
|
+
|
|
179
|
+
Args:
|
|
180
|
+
key: The cache key.
|
|
181
|
+
async_data_loader: Async function to load data if not in cache.
|
|
182
|
+
|
|
183
|
+
Returns:
|
|
184
|
+
Tuple of (data, was_cached) where was_cached indicates if from cache.
|
|
185
|
+
"""
|
|
186
|
+
try:
|
|
187
|
+
cached_data = self.get(key)
|
|
188
|
+
if cached_data is not None:
|
|
189
|
+
return cached_data, True
|
|
190
|
+
|
|
191
|
+
# Not in cache or expired, load the data asynchronously
|
|
192
|
+
self.logger.debug(f"Cache miss for {key} in {self.name}, loading data asynchronously...")
|
|
193
|
+
|
|
194
|
+
# Execute the async data loader
|
|
195
|
+
fresh_data = await async_data_loader()
|
|
196
|
+
|
|
197
|
+
if fresh_data is not None: # Only cache if we got valid data
|
|
198
|
+
self.set(key, fresh_data)
|
|
199
|
+
|
|
200
|
+
return fresh_data, False
|
|
201
|
+
except Exception as e:
|
|
202
|
+
self.logger.error(f"Error in async_get_or_set for {key} in {self.name}: {str(e)}")
|
|
203
|
+
self.logger.error(traceback.format_exc())
|
|
204
|
+
raise RuntimeError(f"Cache error in {self.name} for key {key}: {str(e)}") from e
|
|
205
|
+
|
|
206
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
207
|
+
"""Get statistics about the current cache state."""
|
|
208
|
+
try:
|
|
209
|
+
return {
|
|
210
|
+
"name": self.name,
|
|
211
|
+
"enabled": self.enabled,
|
|
212
|
+
"ttl_seconds": self.ttl,
|
|
213
|
+
"item_count": len(self._cache),
|
|
214
|
+
"keys": list(self._cache.keys())[:20], # Limit to first 20 keys
|
|
215
|
+
"has_more_keys": len(self._cache.keys()) > 20,
|
|
216
|
+
"memory_usage_estimate_bytes": sum(
|
|
217
|
+
len(str(k)) + self._estimate_size(v)
|
|
218
|
+
for k, v in self._cache.items()
|
|
219
|
+
)
|
|
220
|
+
}
|
|
221
|
+
except Exception as e:
|
|
222
|
+
self.logger.error(f"Error getting stats for {self.name} cache: {str(e)}")
|
|
223
|
+
self.logger.error(traceback.format_exc())
|
|
224
|
+
return {
|
|
225
|
+
"name": self.name,
|
|
226
|
+
"enabled": self.enabled,
|
|
227
|
+
"error": str(e),
|
|
228
|
+
"ttl_seconds": self.ttl,
|
|
229
|
+
"item_count": len(self._cache) if self._cache else 0
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
def _estimate_size(self, obj: Any) -> int:
|
|
233
|
+
"""Estimate the memory size of an object in bytes."""
|
|
234
|
+
try:
|
|
235
|
+
if obj is None:
|
|
236
|
+
return 0
|
|
237
|
+
if isinstance(obj, (str, bytes, bytearray)):
|
|
238
|
+
return len(obj)
|
|
239
|
+
if isinstance(obj, (int, float, bool)):
|
|
240
|
+
return 8
|
|
241
|
+
if isinstance(obj, dict):
|
|
242
|
+
return sum(len(str(k)) + self._estimate_size(v) for k, v in obj.items())
|
|
243
|
+
if isinstance(obj, (list, tuple, set)):
|
|
244
|
+
return sum(self._estimate_size(i) for i in obj)
|
|
245
|
+
# For other objects, use a rough approximation
|
|
246
|
+
return len(str(obj))
|
|
247
|
+
except Exception:
|
|
248
|
+
# If we can't estimate, return a reasonable default
|
|
249
|
+
return 100
|
|
@@ -1,6 +1,14 @@
|
|
|
1
|
-
|
|
1
|
+
"""Service utilities for shared core."""
|
|
2
|
+
# Import existing components
|
|
3
|
+
from ipulse_shared_core_ftredge.services.base_service_exceptions import (
|
|
4
|
+
BaseServiceException, ServiceError, ValidationError, ResourceNotFoundError, AuthorizationError
|
|
5
|
+
)
|
|
6
|
+
from ipulse_shared_core_ftredge.services.servicemon import Servicemon
|
|
7
|
+
from ipulse_shared_core_ftredge.services.base_firestore_service import BaseFirestoreService
|
|
8
|
+
from ipulse_shared_core_ftredge.services.cache_aware_firestore_service import CacheAwareFirestoreService
|
|
2
9
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
10
|
+
__all__ = [
|
|
11
|
+
'AuthorizationError', 'BaseServiceException', 'ServiceError', 'ValidationError',
|
|
12
|
+
'ResourceNotFoundError', 'BaseFirestoreService',
|
|
13
|
+
'CacheAwareFirestoreService'
|
|
14
|
+
]
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
"""Base service with built-in cache awareness for Firestore operations."""
|
|
2
|
+
|
|
3
|
+
from typing import Dict, Any, List, Optional, TypeVar, Generic
|
|
4
|
+
from google.cloud import firestore
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
import logging
|
|
7
|
+
from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
|
|
8
|
+
from ipulse_shared_core_ftredge.services import BaseFirestoreService
|
|
9
|
+
|
|
10
|
+
T = TypeVar('T', bound=BaseModel)
|
|
11
|
+
|
|
12
|
+
class CacheAwareFirestoreService(BaseFirestoreService, Generic[T]):
|
|
13
|
+
"""
|
|
14
|
+
Base service class that integrates caching with Firestore operations.
|
|
15
|
+
This allows services to inherit cache-aware CRUD methods without reimplementing them.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
db: firestore.Client,
|
|
21
|
+
collection_name: str,
|
|
22
|
+
resource_type: str,
|
|
23
|
+
logger: logging.Logger,
|
|
24
|
+
document_cache: Optional[SharedCache] = None,
|
|
25
|
+
collection_cache: Optional[SharedCache] = None,
|
|
26
|
+
timeout: float = 15.0
|
|
27
|
+
):
|
|
28
|
+
"""
|
|
29
|
+
Initialize the service with optional cache instances.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
db: Firestore client
|
|
33
|
+
collection_name: Firestore collection name
|
|
34
|
+
resource_type: Resource type for error messages
|
|
35
|
+
logger: Logger instance
|
|
36
|
+
document_cache: Cache for individual documents (optional)
|
|
37
|
+
collection_cache: Cache for collection-level queries (optional)
|
|
38
|
+
timeout: Firestore operation timeout in seconds
|
|
39
|
+
"""
|
|
40
|
+
super().__init__(
|
|
41
|
+
db=db,
|
|
42
|
+
collection_name=collection_name,
|
|
43
|
+
resource_type=resource_type,
|
|
44
|
+
logger=logger,
|
|
45
|
+
timeout=timeout
|
|
46
|
+
)
|
|
47
|
+
self.document_cache = document_cache
|
|
48
|
+
self.collection_cache = collection_cache
|
|
49
|
+
|
|
50
|
+
# Log cache configuration
|
|
51
|
+
if document_cache:
|
|
52
|
+
self.logger.info(f"Document cache enabled for {resource_type}: {document_cache.name}")
|
|
53
|
+
if collection_cache:
|
|
54
|
+
self.logger.info(f"Collection cache enabled for {resource_type}: {collection_cache.name}")
|
|
55
|
+
|
|
56
|
+
async def create_document(self, doc_id: str, data: T, creator_uid: str) -> Dict[str, Any]:
|
|
57
|
+
"""Create a document and invalidate relevant caches."""
|
|
58
|
+
result = await super().create_document(doc_id, data, creator_uid)
|
|
59
|
+
|
|
60
|
+
# Invalidate document cache if it exists
|
|
61
|
+
self._invalidate_document_cache(doc_id)
|
|
62
|
+
|
|
63
|
+
# Invalidate collection cache if it exists
|
|
64
|
+
self._invalidate_collection_cache()
|
|
65
|
+
|
|
66
|
+
return result
|
|
67
|
+
|
|
68
|
+
async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str) -> Dict[str, Any]:
|
|
69
|
+
"""Update a document and invalidate relevant caches."""
|
|
70
|
+
result = await super().update_document(doc_id, update_data, updater_uid)
|
|
71
|
+
|
|
72
|
+
# Invalidate document cache if it exists
|
|
73
|
+
self._invalidate_document_cache(doc_id)
|
|
74
|
+
|
|
75
|
+
# Invalidate collection cache if it exists
|
|
76
|
+
self._invalidate_collection_cache()
|
|
77
|
+
|
|
78
|
+
return result
|
|
79
|
+
|
|
80
|
+
async def delete_document(self, doc_id: str, deleter_uid: Optional[str] = None) -> None:
|
|
81
|
+
"""Delete a document and invalidate relevant caches."""
|
|
82
|
+
# Invalidate caches before deletion to handle potential failures
|
|
83
|
+
self._invalidate_document_cache(doc_id)
|
|
84
|
+
self._invalidate_collection_cache()
|
|
85
|
+
|
|
86
|
+
# Delete the document
|
|
87
|
+
await super().delete_document(doc_id)
|
|
88
|
+
|
|
89
|
+
async def get_document(self, doc_id: str) -> Dict[str, Any]:
|
|
90
|
+
"""
|
|
91
|
+
Get a document by ID with caching if available.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
doc_id: The document ID to fetch
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
The document data
|
|
98
|
+
"""
|
|
99
|
+
# Check document cache first if available
|
|
100
|
+
if self.document_cache:
|
|
101
|
+
cached_data = self.document_cache.get(doc_id)
|
|
102
|
+
if cached_data is not None:
|
|
103
|
+
self.logger.debug(f"Cache hit for document {doc_id}")
|
|
104
|
+
return cached_data
|
|
105
|
+
|
|
106
|
+
# Cache miss or no cache configured, fetch from Firestore
|
|
107
|
+
doc_data = await super().get_document(doc_id)
|
|
108
|
+
|
|
109
|
+
# Store in cache if available
|
|
110
|
+
if self.document_cache and doc_data:
|
|
111
|
+
# Make sure ID is included in the cached data
|
|
112
|
+
if 'id' not in doc_data:
|
|
113
|
+
doc_data['id'] = doc_id
|
|
114
|
+
self.document_cache.set(doc_id, doc_data)
|
|
115
|
+
self.logger.debug(f"Cached document {doc_id}")
|
|
116
|
+
|
|
117
|
+
return doc_data
|
|
118
|
+
|
|
119
|
+
async def get_all_documents(self, cache_key: str = "all_documents") -> List[Dict[str, Any]]:
|
|
120
|
+
"""
|
|
121
|
+
Get all documents in the collection with caching.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
cache_key: The key to use for caching the full collection
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
List of all documents in the collection
|
|
128
|
+
"""
|
|
129
|
+
# Check collection cache first if available
|
|
130
|
+
if self.collection_cache:
|
|
131
|
+
cached_data = self.collection_cache.get(cache_key)
|
|
132
|
+
if cached_data is not None:
|
|
133
|
+
self.logger.debug(f"Cache hit for collection query: {cache_key}")
|
|
134
|
+
return cached_data
|
|
135
|
+
|
|
136
|
+
# Cache miss or no cache configured, fetch from Firestore
|
|
137
|
+
query = self.db.collection(self.collection_name).stream(timeout=self.timeout)
|
|
138
|
+
documents = []
|
|
139
|
+
|
|
140
|
+
for doc in query:
|
|
141
|
+
doc_data = doc.to_dict()
|
|
142
|
+
|
|
143
|
+
# Make sure ID is included in the data
|
|
144
|
+
if 'id' not in doc_data:
|
|
145
|
+
doc_data['id'] = doc.id
|
|
146
|
+
|
|
147
|
+
# Also update the document cache if configured
|
|
148
|
+
if self.document_cache:
|
|
149
|
+
self.document_cache.set(doc.id, doc_data)
|
|
150
|
+
|
|
151
|
+
documents.append(doc_data)
|
|
152
|
+
|
|
153
|
+
# Store in collection cache if available
|
|
154
|
+
if self.collection_cache:
|
|
155
|
+
self.collection_cache.set(cache_key, documents)
|
|
156
|
+
self.logger.debug(f"Cached collection query result: {cache_key} with {len(documents)} documents")
|
|
157
|
+
|
|
158
|
+
return documents
|
|
159
|
+
|
|
160
|
+
def _invalidate_document_cache(self, doc_id: str) -> None:
|
|
161
|
+
"""Invalidate the document cache for a specific document ID."""
|
|
162
|
+
if self.document_cache:
|
|
163
|
+
self.document_cache.invalidate(doc_id)
|
|
164
|
+
self.logger.debug(f"Invalidated document cache for {doc_id}")
|
|
165
|
+
|
|
166
|
+
def _invalidate_collection_cache(self, cache_key: str = "all_documents") -> None:
|
|
167
|
+
"""Invalidate the collection cache."""
|
|
168
|
+
if self.collection_cache:
|
|
169
|
+
# For single key collection cache
|
|
170
|
+
self.collection_cache.invalidate(cache_key)
|
|
171
|
+
self.logger.debug(f"Invalidated collection cache: {cache_key}")
|
|
@@ -135,7 +135,7 @@ class CreditService:
|
|
|
135
135
|
error=e,
|
|
136
136
|
resource_type="user_credits",
|
|
137
137
|
resource_id=user_uid,
|
|
138
|
-
additional_info={"credits_to_charge":
|
|
138
|
+
additional_info={"credits_to_charge": required_credits_for_resource}
|
|
139
139
|
) from e
|
|
140
140
|
|
|
141
141
|
async def charge_credits(self, user_uid: str, credits_to_charge: Optional[float], operation_details: str) -> bool:
|
{ipulse_shared_core_ftredge-12.0.1.dist-info → ipulse_shared_core_ftredge-13.0.1.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ipulse_shared_core_ftredge
|
|
3
|
-
Version:
|
|
3
|
+
Version: 13.0.1
|
|
4
4
|
Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
|
|
5
5
|
Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
|
|
6
6
|
Author: Russlan Ramdowar
|
{ipulse_shared_core_ftredge-12.0.1.dist-info → ipulse_shared_core_ftredge-13.0.1.dist-info}/RECORD
RENAMED
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
ipulse_shared_core_ftredge/__init__.py,sha256=Bj1WgZq6EmiZeFC-3gYludUpoWgsUrRq1NME5nMN22Q,501
|
|
2
|
+
ipulse_shared_core_ftredge/cache/__init__.py,sha256=i2fPojmZiBwAoY5ovnnnME9USl4bi8MRPYkAgEfACfI,136
|
|
3
|
+
ipulse_shared_core_ftredge/cache/shared_cache.py,sha256=pDHJuMRU6zkqbykaK2ldpyVmUHLa0TAI4Xu3P9M-0B0,9454
|
|
2
4
|
ipulse_shared_core_ftredge/dependencies/__init__.py,sha256=HGsR8HUguKTfjz_BorCILS4izX8CAjG-apE0kIPE0Yo,68
|
|
3
5
|
ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py,sha256=EFWyhoVOI0tGYOWqN5St4JNIy4cMwpxeBhKdjOwEfbg,1888
|
|
4
6
|
ipulse_shared_core_ftredge/dependencies/auth_protected_router.py,sha256=em5D5tE7OkgZmuCtYCKuUAnIZCgRJhCF8Ye5QmtGWlk,1807
|
|
@@ -13,17 +15,18 @@ ipulse_shared_core_ftredge/models/user_auth.py,sha256=YgCeK0uJ-JOkPavwzogl4wGC3R
|
|
|
13
15
|
ipulse_shared_core_ftredge/models/user_profile.py,sha256=5cTTZa7pMkgKCsLgTPpvz_aPn-ZyQcJ3xSEtu3jq3HE,4138
|
|
14
16
|
ipulse_shared_core_ftredge/models/user_profile_update.py,sha256=3BqAAqnVKXPKhAcfV_aOERe8GyIkX0NU_LJcQa02aLw,1319
|
|
15
17
|
ipulse_shared_core_ftredge/models/user_status.py,sha256=rAx8l5GrB8TN7RvZ1eIMskphRxdYqO1OZ8NnaIxUUW8,23660
|
|
16
|
-
ipulse_shared_core_ftredge/services/__init__.py,sha256=
|
|
18
|
+
ipulse_shared_core_ftredge/services/__init__.py,sha256=iwbBlviqOxVPmJC9tRsOyU6zzQlAn7Do0Gc3WKRi4Ao,697
|
|
17
19
|
ipulse_shared_core_ftredge/services/base_firestore_service.py,sha256=n1lymQEFcu6zHkdscNNCNIzTIVmja8cBtNy2yi5vfTE,9817
|
|
18
20
|
ipulse_shared_core_ftredge/services/base_service_exceptions.py,sha256=Bi0neeMY0YncWDeqUavu5JUslkjJ6QcDVRU32Ipjc08,4294
|
|
19
|
-
ipulse_shared_core_ftredge/services/
|
|
21
|
+
ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py,sha256=DLNS1BegJUPSHs41j5jkP3g6w2tGSDUIurIWjI__xf4,6486
|
|
22
|
+
ipulse_shared_core_ftredge/services/credit_service.py,sha256=C07rOr58LsK4udznu64mQFUSBxY8AdfRaxw_9Pw_AOI,12038
|
|
20
23
|
ipulse_shared_core_ftredge/services/fastapiservicemon.py,sha256=27clTZXH32mbju8o-HLO_8VrmugmpXwHLuX-OOoIAew,5308
|
|
21
24
|
ipulse_shared_core_ftredge/services/servicemon.py,sha256=wWhsLwU1_07emaEyCNziZA1bDQVLxcfvQj0OseTLSTI,7969
|
|
22
25
|
ipulse_shared_core_ftredge/utils/__init__.py,sha256=JnxUb8I2MRjJC7rBPXSrpwBIQDEOku5O9JsiTi3oun8,56
|
|
23
26
|
ipulse_shared_core_ftredge/utils/custom_json_encoder.py,sha256=DblQLD0KOSNDyQ58wQRogBrShIXzPIZUw_oGOBATnJY,1366
|
|
24
27
|
ipulse_shared_core_ftredge/utils/json_encoder.py,sha256=QkcaFneVv3-q-s__Dz4OiUWYnM6jgHDJrDMdPv09RCA,2093
|
|
25
|
-
ipulse_shared_core_ftredge-
|
|
26
|
-
ipulse_shared_core_ftredge-
|
|
27
|
-
ipulse_shared_core_ftredge-
|
|
28
|
-
ipulse_shared_core_ftredge-
|
|
29
|
-
ipulse_shared_core_ftredge-
|
|
28
|
+
ipulse_shared_core_ftredge-13.0.1.dist-info/licenses/LICENCE,sha256=YBtYAXNqCCOo9Mr2hfkbSPAM9CeAr2j1VZBSwQTrNwE,1060
|
|
29
|
+
ipulse_shared_core_ftredge-13.0.1.dist-info/METADATA,sha256=OYQw5kTq7LHJinYen7EVx-NryTit726UoMoNm6PxtMA,803
|
|
30
|
+
ipulse_shared_core_ftredge-13.0.1.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91
|
|
31
|
+
ipulse_shared_core_ftredge-13.0.1.dist-info/top_level.txt,sha256=8sgYrptpexkA_6_HyGvho26cVFH9kmtGvaK8tHbsGHk,27
|
|
32
|
+
ipulse_shared_core_ftredge-13.0.1.dist-info/RECORD,,
|
{ipulse_shared_core_ftredge-12.0.1.dist-info → ipulse_shared_core_ftredge-13.0.1.dist-info}/WHEEL
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|