ipulse-shared-core-ftredge 11.1.1__tar.gz → 13.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (42) hide show
  1. {ipulse_shared_core_ftredge-11.1.1/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-13.0.1}/PKG-INFO +2 -2
  2. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/setup.py +2 -2
  3. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/__init__.py +1 -1
  4. ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge/cache/__init__.py +4 -0
  5. ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge/cache/shared_cache.py +249 -0
  6. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/dependencies/authz_for_apis.py +123 -21
  7. ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge/models/base_data_model.py +55 -0
  8. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/user_status.py +4 -0
  9. ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge/services/__init__.py +14 -0
  10. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/services/base_firestore_service.py +95 -17
  11. ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py +171 -0
  12. ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge/services/credit_service.py +270 -0
  13. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +2 -2
  14. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +7 -1
  15. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +1 -1
  16. ipulse_shared_core_ftredge-13.0.1/tests/test_cache_aware_service.py +233 -0
  17. ipulse_shared_core_ftredge-13.0.1/tests/test_shared_cache.py +146 -0
  18. ipulse_shared_core_ftredge-11.1.1/src/ipulse_shared_core_ftredge/models/base_data_model.py +0 -41
  19. ipulse_shared_core_ftredge-11.1.1/src/ipulse_shared_core_ftredge/services/__init__.py +0 -6
  20. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/LICENCE +0 -0
  21. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/README.md +0 -0
  22. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/pyproject.toml +0 -0
  23. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/setup.cfg +0 -0
  24. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/dependencies/__init__.py +0 -0
  25. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py +0 -0
  26. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/dependencies/auth_protected_router.py +0 -0
  27. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/dependencies/firestore_client.py +0 -0
  28. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/__init__.py +0 -0
  29. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/base_api_response.py +0 -0
  30. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/organization_profile.py +0 -0
  31. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/subscription.py +0 -0
  32. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
  33. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/user_profile.py +0 -0
  34. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
  35. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/services/base_service_exceptions.py +0 -0
  36. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/services/fastapiservicemon.py +0 -0
  37. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/services/servicemon.py +0 -0
  38. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/utils/__init__.py +0 -0
  39. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/utils/custom_json_encoder.py +0 -0
  40. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge/utils/json_encoder.py +0 -0
  41. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  42. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-13.0.1}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 11.1.1
3
+ Version: 13.0.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -13,7 +13,7 @@ Requires-Dist: pydantic[email]~=2.5
13
13
  Requires-Dist: python-dateutil~=2.8
14
14
  Requires-Dist: fastapi~=0.115.8
15
15
  Requires-Dist: pytest
16
- Requires-Dist: ipulse_shared_base_ftredge>=6.4.1
16
+ Requires-Dist: ipulse_shared_base_ftredge==6.5.1
17
17
  Dynamic: author
18
18
  Dynamic: classifier
19
19
  Dynamic: home-page
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
 
4
4
  setup(
5
5
  name='ipulse_shared_core_ftredge',
6
- version='11.1.1',
6
+ version='13.0.1',
7
7
  package_dir={'': 'src'}, # Specify the source directory
8
8
  packages=find_packages(where='src'), # Look for packages in 'src'
9
9
  install_requires=[
@@ -12,7 +12,7 @@ setup(
12
12
  'python-dateutil~=2.8',
13
13
  'fastapi~=0.115.8',
14
14
  'pytest',
15
- 'ipulse_shared_base_ftredge>=6.4.1',
15
+ 'ipulse_shared_base_ftredge==6.5.1',
16
16
  ],
17
17
  author='Russlan Ramdowar',
18
18
  description='Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.',
@@ -7,6 +7,6 @@ from .models import ( UserAuth, UserProfile,Subscription,
7
7
 
8
8
 
9
9
  from .services import (BaseFirestoreService,BaseServiceException, ResourceNotFoundError, AuthorizationError,
10
- ValidationError)
10
+ ValidationError, ServiceError)
11
11
 
12
12
  from .utils import (EnsureJSONEncoderCompatibility)
@@ -0,0 +1,4 @@
1
+ """Cache utilities for shared core."""
2
+ from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
3
+
4
+ __all__ = ['SharedCache']
@@ -0,0 +1,249 @@
1
+ """Module for shared caching functionality that can be used across microservices."""
2
+ import os
3
+ import time
4
+ import logging
5
+ import traceback
6
+ import inspect
7
+ import asyncio
8
+ from typing import Dict, Any, Optional, TypeVar, Generic, Callable, Tuple, List, Awaitable
9
+
10
+ T = TypeVar('T')
11
+
12
+ class SharedCache(Generic[T]):
13
+ """
14
+ Generic shared cache implementation that can be used across services.
15
+
16
+ Attributes:
17
+ name: The name of the cache for logging and identification.
18
+ ttl: Time-to-live in seconds for cached items.
19
+ enabled: Whether the cache is enabled.
20
+ logger: Logger for cache operations.
21
+ _cache: Dictionary holding cached values.
22
+ _timestamps: Dictionary holding timestamps for each cached item.
23
+ """
24
+
25
+ def __init__(
26
+ self,
27
+ name: str,
28
+ ttl: float,
29
+ enabled: bool = True,
30
+ logger: Optional[logging.Logger] = None
31
+ ):
32
+ """Initialize the cache with name, TTL and enabled state."""
33
+ self.name = name
34
+ self.ttl = ttl
35
+ self.enabled = enabled
36
+ self.logger = logger or logging.getLogger(__name__)
37
+ self._cache: Dict[str, T] = {}
38
+ self._timestamps: Dict[str, float] = {}
39
+
40
+ self.logger.info(f"{name} cache initialized. Enabled: {enabled}, TTL: {ttl} seconds")
41
+
42
+ def get(self, key: str) -> Optional[T]:
43
+ """
44
+ Get a value from the cache if it exists and hasn't expired.
45
+
46
+ Args:
47
+ key: The cache key to retrieve.
48
+
49
+ Returns:
50
+ The cached value if found and valid, None otherwise.
51
+ """
52
+ if not self.enabled:
53
+ return None
54
+
55
+ try:
56
+ if key in self._cache:
57
+ timestamp = self._timestamps.get(key, 0)
58
+ if time.time() - timestamp < self.ttl:
59
+ self.logger.debug(f"Cache hit for {key} in {self.name}")
60
+ return self._cache[key]
61
+ else:
62
+ # Expired item, remove it
63
+ self.invalidate(key)
64
+ self.logger.debug(f"Cache expired for {key} in {self.name}")
65
+ except Exception as e:
66
+ self.logger.error(f"Error getting item from {self.name} cache with key {key}: {str(e)}")
67
+ self.logger.error(traceback.format_exc())
68
+
69
+ return None
70
+
71
+ def set(self, key: str, value: T) -> None:
72
+ """
73
+ Set a value in the cache.
74
+
75
+ Args:
76
+ key: The cache key to set.
77
+ value: The value to cache.
78
+ """
79
+ if not self.enabled:
80
+ return
81
+
82
+ try:
83
+ self._cache[key] = value
84
+ self._timestamps[key] = time.time()
85
+ self.logger.debug(f"Cached item {key} in {self.name}")
86
+ except Exception as e:
87
+ self.logger.error(f"Error setting item in {self.name} cache with key {key}: {str(e)}")
88
+ self.logger.error(traceback.format_exc())
89
+
90
+ def invalidate(self, key: str) -> None:
91
+ """
92
+ Remove a specific key from the cache.
93
+
94
+ Args:
95
+ key: The cache key to invalidate.
96
+ """
97
+ try:
98
+ self._cache.pop(key, None)
99
+ self._timestamps.pop(key, None)
100
+ self.logger.debug(f"Invalidated cache for {key} in {self.name}")
101
+ except Exception as e:
102
+ self.logger.error(f"Error invalidating cache in {self.name} for key {key}: {str(e)}")
103
+ self.logger.error(traceback.format_exc())
104
+
105
+ def invalidate_all(self) -> None:
106
+ """Clear all cached items."""
107
+ try:
108
+ cache_size = len(self._cache)
109
+ self._cache.clear()
110
+ self._timestamps.clear()
111
+ self.logger.info(f"Invalidated all {cache_size} entries in {self.name} cache")
112
+ except Exception as e:
113
+ self.logger.error(f"Error invalidating all cache entries in {self.name}: {str(e)}")
114
+ self.logger.error(traceback.format_exc())
115
+
116
+ def get_or_set(
117
+ self,
118
+ key: str,
119
+ data_loader: Callable[[], T]
120
+ ) -> Tuple[T, bool]:
121
+ """
122
+ Get a value from cache or set it using the data_loader if missing or expired.
123
+
124
+ Args:
125
+ key: The cache key.
126
+ data_loader: Function to load data if not in cache.
127
+
128
+ Returns:
129
+ Tuple of (data, was_cached) where was_cached indicates if from cache.
130
+ """
131
+ try:
132
+ cached_data = self.get(key)
133
+ if cached_data is not None:
134
+ return cached_data, True
135
+
136
+ # Not in cache or expired, load the data
137
+ self.logger.debug(f"Cache miss for {key} in {self.name}, loading data...")
138
+
139
+ # Check if the data_loader is a coroutine function
140
+ if inspect.iscoroutinefunction(data_loader):
141
+ self.logger.error(
142
+ f"Error in get_or_set for {key} in {self.name}: "
143
+ f"data_loader is a coroutine function which is not supported. "
144
+ f"Use a regular function that returns a value, not a coroutine."
145
+ )
146
+ # Fall back to running the coroutine in the event loop if possible
147
+ try:
148
+ loop = asyncio.get_event_loop()
149
+ fresh_data = loop.run_until_complete(data_loader())
150
+ except Exception as coro_err:
151
+ self.logger.error(f"Failed to execute coroutine data_loader: {str(coro_err)}")
152
+ raise RuntimeError(f"Cannot use coroutine data_loader in cache: {str(coro_err)}")
153
+ else:
154
+ # Regular function, just call it
155
+ fresh_data = data_loader()
156
+
157
+ if fresh_data is not None: # Only cache if we got valid data
158
+ self.set(key, fresh_data)
159
+
160
+ if fresh_data is None:
161
+ raise ValueError(f"Data loader returned None for key {key} in {self.name}")
162
+ return fresh_data, False
163
+ except Exception as e:
164
+ self.logger.error(f"Error in get_or_set for {key} in {self.name}: {str(e)}")
165
+ self.logger.error(traceback.format_exc())
166
+
167
+ # Since this is a critical function, re-raise the exception
168
+ # after logging it, but add context about the cache
169
+ raise RuntimeError(f"Cache error in {self.name} for key {key}: {str(e)}") from e
170
+
171
+ async def async_get_or_set(
172
+ self,
173
+ key: str,
174
+ async_data_loader: Callable[[], Awaitable[T]]
175
+ ) -> Tuple[T, bool]:
176
+ """
177
+ Async version of get_or_set for use with async data loaders.
178
+
179
+ Args:
180
+ key: The cache key.
181
+ async_data_loader: Async function to load data if not in cache.
182
+
183
+ Returns:
184
+ Tuple of (data, was_cached) where was_cached indicates if from cache.
185
+ """
186
+ try:
187
+ cached_data = self.get(key)
188
+ if cached_data is not None:
189
+ return cached_data, True
190
+
191
+ # Not in cache or expired, load the data asynchronously
192
+ self.logger.debug(f"Cache miss for {key} in {self.name}, loading data asynchronously...")
193
+
194
+ # Execute the async data loader
195
+ fresh_data = await async_data_loader()
196
+
197
+ if fresh_data is not None: # Only cache if we got valid data
198
+ self.set(key, fresh_data)
199
+
200
+ return fresh_data, False
201
+ except Exception as e:
202
+ self.logger.error(f"Error in async_get_or_set for {key} in {self.name}: {str(e)}")
203
+ self.logger.error(traceback.format_exc())
204
+ raise RuntimeError(f"Cache error in {self.name} for key {key}: {str(e)}") from e
205
+
206
+ def get_stats(self) -> Dict[str, Any]:
207
+ """Get statistics about the current cache state."""
208
+ try:
209
+ return {
210
+ "name": self.name,
211
+ "enabled": self.enabled,
212
+ "ttl_seconds": self.ttl,
213
+ "item_count": len(self._cache),
214
+ "keys": list(self._cache.keys())[:20], # Limit to first 20 keys
215
+ "has_more_keys": len(self._cache.keys()) > 20,
216
+ "memory_usage_estimate_bytes": sum(
217
+ len(str(k)) + self._estimate_size(v)
218
+ for k, v in self._cache.items()
219
+ )
220
+ }
221
+ except Exception as e:
222
+ self.logger.error(f"Error getting stats for {self.name} cache: {str(e)}")
223
+ self.logger.error(traceback.format_exc())
224
+ return {
225
+ "name": self.name,
226
+ "enabled": self.enabled,
227
+ "error": str(e),
228
+ "ttl_seconds": self.ttl,
229
+ "item_count": len(self._cache) if self._cache else 0
230
+ }
231
+
232
+ def _estimate_size(self, obj: Any) -> int:
233
+ """Estimate the memory size of an object in bytes."""
234
+ try:
235
+ if obj is None:
236
+ return 0
237
+ if isinstance(obj, (str, bytes, bytearray)):
238
+ return len(obj)
239
+ if isinstance(obj, (int, float, bool)):
240
+ return 8
241
+ if isinstance(obj, dict):
242
+ return sum(len(str(k)) + self._estimate_size(v) for k, v in obj.items())
243
+ if isinstance(obj, (list, tuple, set)):
244
+ return sum(self._estimate_size(i) for i in obj)
245
+ # For other objects, use a rough approximation
246
+ return len(str(obj))
247
+ except Exception:
248
+ # If we can't estimate, return a reasonable default
249
+ return 100
@@ -1,5 +1,8 @@
1
1
  import os
2
2
  import logging
3
+ import json
4
+ import asyncio
5
+ from concurrent.futures import ThreadPoolExecutor
3
6
  from typing import Optional, Iterable, Dict, Any, List
4
7
  from datetime import datetime, timedelta, timezone
5
8
  import json
@@ -10,10 +13,10 @@ from ipulse_shared_core_ftredge.services import ServiceError, AuthorizationError
10
13
  from ipulse_shared_core_ftredge.models import UserStatus
11
14
  from ipulse_shared_core_ftredge.utils.json_encoder import convert_to_json_serializable
12
15
 
13
- # Constants
14
- USERS_STATUS_COLLECTION_NAME = UserStatus.get_collection_name()
15
- USERS_STATUS_DOC_REF = "userstatus_"
16
- CACHE_TTL = 60 # 60 seconds
16
+ # Constants derived from UserStatus model
17
+ USERS_STATUS_COLLECTION_NAME = UserStatus.COLLECTION_NAME
18
+ USERS_STATUS_DOC_REF = f"{UserStatus.OBJ_REF}_" # Use OBJ_REF and append underscore
19
+ USERSTATUS_CACHE_TTL = 60 # 60 seconds
17
20
 
18
21
  class UserStatusCache:
19
22
  """Manages user status caching with dynamic invalidation"""
@@ -33,7 +36,7 @@ class UserStatusCache:
33
36
  status_data = self._cache[user_uid]
34
37
  # Force refresh for credit-consuming or sensitive operations
35
38
  # Check TTL for normal operations
36
- if datetime.now() - self._timestamps[user_uid] < timedelta(seconds=CACHE_TTL):
39
+ if datetime.now() - self._timestamps[user_uid] < timedelta(seconds=USERSTATUS_CACHE_TTL):
37
40
  return status_data
38
41
  self.invalidate(user_uid)
39
42
  return None
@@ -65,13 +68,59 @@ userstatus_cache = UserStatusCache()
65
68
  # Replace the logger dependency with a standard logger
66
69
  logger = logging.getLogger(__name__)
67
70
 
71
+ # Create a custom FirestoreTimeoutError class that can be identified in middlewares
72
+ class FirestoreTimeoutError(TimeoutError):
73
+ """Custom exception for Firestore timeout errors to make them more identifiable."""
74
+ pass
75
+
76
+
77
+ # Define a function to get a Firestore document with a strict timeout
78
+ async def get_with_strict_timeout(doc_ref, timeout_seconds: float):
79
+ """
80
+ Get a Firestore document with a strictly enforced timeout.
81
+
82
+ Args:
83
+ doc_ref: Firestore document reference
84
+ timeout_seconds: Maximum time to wait in seconds
85
+
86
+ Returns:
87
+ Document snapshot
88
+
89
+ Raises:
90
+ FirestoreTimeoutError: If the operation takes longer than timeout_seconds
91
+ """
92
+ loop = asyncio.get_running_loop()
93
+ with ThreadPoolExecutor() as executor:
94
+ try:
95
+ # Run the blocking Firestore get() operation in a thread and apply a strict timeout
96
+ logger.debug(f"Starting Firestore get with strict timeout of {timeout_seconds}s")
97
+ return await asyncio.wait_for(
98
+ loop.run_in_executor(executor, doc_ref.get),
99
+ timeout=timeout_seconds
100
+ )
101
+ except asyncio.TimeoutError:
102
+ error_message = f"User Status fetching for Authz timed out after {timeout_seconds} seconds, perhaps issue with Firestore Connectivity"
103
+ logger.error(error_message)
104
+ raise FirestoreTimeoutError(error_message)
105
+
106
+ # Update get_userstatus to use our new strict timeout function
68
107
  async def get_userstatus(
69
108
  user_uid: str,
70
- db: firestore.Client, # Note: This expects the actual client, not a Depends
71
- force_fresh: bool = False
109
+ db: firestore.Client,
110
+ force_fresh: bool = False,
111
+ timeout: float = 12.0 # Default timeout but allow override
72
112
  ) -> tuple[Dict[str, Any], bool]:
73
113
  """
74
- Fetch user status with intelligent caching
114
+ Fetch user status with intelligent caching and configurable timeout
115
+
116
+ Args:
117
+ user_uid: User ID to fetch status for
118
+ db: Firestore client
119
+ force_fresh: Whether to bypass cache
120
+ timeout: Timeout for Firestore operations in seconds
121
+
122
+ Returns:
123
+ Tuple of (user status data, whether cache was used)
75
124
  """
76
125
  cache_used = False
77
126
  if not force_fresh:
@@ -85,8 +134,11 @@ async def get_userstatus(
85
134
  userstatus_id = USERS_STATUS_DOC_REF + user_uid
86
135
  user_ref = db.collection(USERS_STATUS_COLLECTION_NAME).document(userstatus_id)
87
136
 
88
- # Get the document
89
- snapshot = user_ref.get()
137
+ logger.debug(f"Fetching user status for {user_uid} with strict timeout {timeout}s")
138
+
139
+ # Use our strict timeout wrapper instead of the native timeout parameter
140
+ snapshot = await get_with_strict_timeout(user_ref, timeout)
141
+
90
142
  if not snapshot.exists:
91
143
  raise ResourceNotFoundError(
92
144
  resource_type="authorization userstatus",
@@ -101,9 +153,23 @@ async def get_userstatus(
101
153
  userstatus_cache.set(user_uid, status_data)
102
154
  return status_data, cache_used
103
155
 
156
+ except TimeoutError as e:
157
+ logger.error(f"Timeout while fetching user status for {user_uid}: {str(e)}")
158
+ raise ServiceError(
159
+ operation="fetching user status for authz",
160
+ error=e,
161
+ resource_type="userstatus",
162
+ resource_id=user_uid,
163
+ additional_info={
164
+ "force_fresh": force_fresh,
165
+ "collection": USERS_STATUS_COLLECTION_NAME,
166
+ "timeout_seconds": timeout
167
+ }
168
+ )
104
169
  except ResourceNotFoundError:
105
170
  raise
106
171
  except Exception as e:
172
+ logger.error(f"Error fetching user status for {user_uid}: {str(e)}")
107
173
  raise ServiceError(
108
174
  operation=f"fetching user status",
109
175
  error=e,
@@ -156,15 +222,27 @@ async def extract_request_fields(request: Request) -> Optional[List[str]]:
156
222
  logger.warning(f"Could not extract fields from request body: {str(e)}")
157
223
  return None # Return None instead of raising an error
158
224
 
225
+ # Main authorization function with configurable timeout
159
226
  async def authorizeAPIRequest(
160
227
  request: Request,
161
- db: firestore.Client, # Changed: Now expects actual client instance
228
+ db: firestore.Client,
162
229
  request_resource_fields: Optional[Iterable[str]] = None,
230
+ firestore_timeout: float = 15.0 # Allow specifying timeout
163
231
  ) -> Dict[str, Any]:
164
232
  """
165
233
  Authorize API request based on user status and OPA policies.
166
234
  Enhanced with credit check information.
235
+
236
+ Args:
237
+ request: The incoming request
238
+ db: Firestore client
239
+ request_resource_fields: Fields being accessed/modified in the request
240
+ firestore_timeout: Timeout for Firestore operations in seconds
241
+
242
+ Returns:
243
+ Authorization result containing decision details
167
244
  """
245
+ opa_decision = None
168
246
  try:
169
247
  # Extract fields for both PATCH and POST if not provided
170
248
  if not request_resource_fields:
@@ -180,7 +258,12 @@ async def authorizeAPIRequest(
180
258
 
181
259
  # Determine if we need fresh status
182
260
  force_fresh = _should_force_fresh_status(request)
183
- userstatus, cache_used = await get_userstatus(user_uid, db, force_fresh=force_fresh)
261
+ userstatus, cache_used = await get_userstatus(
262
+ user_uid,
263
+ db,
264
+ force_fresh=force_fresh,
265
+ timeout=firestore_timeout # Pass the specified timeout
266
+ )
184
267
 
185
268
  # Prepare authorization input that matches OPA expectations
186
269
  # Extract required values from user status
@@ -214,7 +297,13 @@ async def authorizeAPIRequest(
214
297
  # Query OPA
215
298
  opa_url = f"{os.getenv('OPA_SERVER_URL', 'http://localhost:8181')}{os.getenv('OPA_DECISION_PATH', '/v1/data/http/authz/ingress/decision')}"
216
299
  logger.debug(f"Attempting to connect to OPA at: {opa_url}")
217
- logger.debug(f"Authorization input: {authz_input}")
300
+
301
+ # Debug: Print raw JSON payload to identify any potential issues
302
+ try:
303
+ payload_json = json.dumps({"input": json_safe_authz_input})
304
+ logger.debug(f"OPA Request JSON payload: {payload_json}")
305
+ except Exception as json_err:
306
+ logger.error(f"Error serializing OPA request payload: {json_err}")
218
307
 
219
308
  async with httpx.AsyncClient() as client:
220
309
  try:
@@ -236,20 +325,35 @@ async def authorizeAPIRequest(
236
325
  result = response.json()
237
326
  logger.debug(f"Parsed OPA response: {result}")
238
327
 
239
- if not result.get("result", {}).get("allow", False):
328
+ # Handle unusual OPA response formats
329
+ # Try to find "decision" field as an alternative
330
+ if "result" in result:
331
+ opa_decision = result["result"]
332
+ else:
333
+ # If we still don't have a result after all attempts, use default structure
334
+ logger.warning(f"OPA response missing 'result' field, using default")
335
+ raise HTTPException(
336
+ status_code=500,
337
+ detail="Authorization service error: OPA response format unexpected"
338
+ )
339
+
340
+ # Extract key fields from result with better default handling
341
+ allow = opa_decision.get("allow", False)
342
+
343
+ # Handle authorization denial
344
+ if not allow:
240
345
  logger.error(f"Authorization denied: {result}")
241
346
  raise AuthorizationError(
242
347
  action=f"{request.method} {request.url.path}",
243
348
  additional_info={
244
349
  "user_uid": user_uid,
245
350
  "resource_fields": request_resource_fields,
246
- "opa_decision": result.get("result", {})
351
+ "opa_decision": opa_decision, # Include the full OPA decision result
352
+ # Include the raw result if it's different from the processed decision
353
+ "raw_opa_response": result if result != {"result": opa_decision} else None
247
354
  }
248
355
  )
249
356
 
250
- # Extract credit check information from the OPA response
251
- credit_check = result.get("result", {}).get("credit_check", {})
252
-
253
357
  except httpx.RequestError as e:
254
358
  logger.error(f"Failed to connect to OPA: {str(e)}")
255
359
  raise ServiceError(
@@ -267,9 +371,7 @@ async def authorizeAPIRequest(
267
371
  "used_cached_status": cache_used,
268
372
  "required_fresh_status": force_fresh,
269
373
  "status_retrieved_at": datetime.now(timezone.utc).isoformat(),
270
- "credit_check": credit_check,
271
- "allow_all_fields": result.get("result", {}).get("allow_all_fields", False),
272
- "allowed_fields": result.get("result", {}).get("allowed_fields", [])
374
+ "opa_decision": opa_decision
273
375
  }
274
376
 
275
377
  except (AuthorizationError, ResourceNotFoundError):
@@ -0,0 +1,55 @@
1
+ from datetime import datetime, timezone
2
+ from typing import Any
3
+ from typing import ClassVar
4
+ from pydantic import BaseModel, Field, ConfigDict, field_validator
5
+ import dateutil.parser
6
+
7
+ class BaseDataModel(BaseModel):
8
+ """Base model with common fields and configuration"""
9
+ model_config = ConfigDict(frozen=True, extra="forbid")
10
+
11
+ # Required class variables that must be defined in subclasses
12
+ VERSION: ClassVar[float]
13
+ DOMAIN: ClassVar[str]
14
+ OBJ_REF: ClassVar[str]
15
+
16
+ # Schema versioning
17
+ schema_version: float = Field(
18
+ ..., # Make this required
19
+ description="Version of this Class == version of DB Schema",
20
+ frozen=True
21
+ )
22
+
23
+ # Audit fields
24
+ created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc), frozen=True)
25
+ created_by: str = Field(..., frozen=True)
26
+ updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc), frozen=True)
27
+ updated_by: str = Field(...)
28
+
29
+ @classmethod
30
+ def get_collection_name(cls) -> str:
31
+ """Generate standard collection name"""
32
+ return f"{cls.DOMAIN}_{cls.OBJ_REF}s"
33
+
34
+ @field_validator('created_at', 'updated_at', mode='before')
35
+ @classmethod
36
+ def parse_datetime(cls, v: Any) -> datetime:
37
+ if isinstance(v, datetime): # If Firestore already gave a datetime object
38
+ return v # Just use it, no parsing needed
39
+ if isinstance(v, str): # If it's a string (e.g. from an API request, not Firestore direct)
40
+ try:
41
+ return dateutil.parser.isoparse(v)
42
+ except (TypeError, ValueError) as e:
43
+ raise ValueError(f"Invalid datetime string format: {v} - {e}")
44
+ # Firestore might send google.api_core.datetime_helpers.DatetimeWithNanoseconds
45
+ # which is a subclass of datetime.datetime, so isinstance(v, datetime) should catch it.
46
+ # If for some reason it's a different type not caught by isinstance(v, datetime)
47
+ # but has isoformat(), perhaps try that, but it's unlikely with current Firestore client.
48
+ # For example, if v is some custom timestamp object from an older library:
49
+ if hasattr(v, 'isoformat'): # Fallback for unknown datetime-like objects
50
+ try:
51
+ return dateutil.parser.isoparse(v.isoformat())
52
+ except Exception as e:
53
+ raise ValueError(f"Could not parse datetime-like object: {v} - {e}")
54
+
55
+ raise ValueError(f"Unsupported type for datetime parsing: {type(v)} value: {v}")
@@ -51,6 +51,10 @@ class UserStatus(BaseDataModel):
51
51
  DOMAIN: ClassVar[str] = "_".join(list_as_lower_strings(Layer.PULSE_APP, Module.CORE.name, Subject.USER.name))
52
52
  OBJ_REF: ClassVar[str] = "userstatus"
53
53
 
54
+ # Centralized collection name and document ID prefix
55
+ COLLECTION_NAME: ClassVar[str] = "papp_core_user_userstatuss"
56
+
57
+
54
58
  # System-managed fields
55
59
  schema_version: float = Field(
56
60
  default=VERSION,
@@ -0,0 +1,14 @@
1
+ """Service utilities for shared core."""
2
+ # Import existing components
3
+ from ipulse_shared_core_ftredge.services.base_service_exceptions import (
4
+ BaseServiceException, ServiceError, ValidationError, ResourceNotFoundError, AuthorizationError
5
+ )
6
+ from ipulse_shared_core_ftredge.services.servicemon import Servicemon
7
+ from ipulse_shared_core_ftredge.services.base_firestore_service import BaseFirestoreService
8
+ from ipulse_shared_core_ftredge.services.cache_aware_firestore_service import CacheAwareFirestoreService
9
+
10
+ __all__ = [
11
+ 'AuthorizationError', 'BaseServiceException', 'ServiceError', 'ValidationError',
12
+ 'ResourceNotFoundError', 'BaseFirestoreService',
13
+ 'CacheAwareFirestoreService'
14
+ ]