ipulse-shared-core-ftredge 11.1.1__tar.gz → 12.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (36) hide show
  1. {ipulse_shared_core_ftredge-11.1.1/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-12.0.1}/PKG-INFO +2 -2
  2. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/setup.py +2 -2
  3. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/__init__.py +1 -1
  4. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/dependencies/authz_for_apis.py +123 -21
  5. ipulse_shared_core_ftredge-12.0.1/src/ipulse_shared_core_ftredge/models/base_data_model.py +55 -0
  6. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/user_status.py +4 -0
  7. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/services/base_firestore_service.py +95 -17
  8. ipulse_shared_core_ftredge-12.0.1/src/ipulse_shared_core_ftredge/services/credit_service.py +270 -0
  9. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +2 -2
  10. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +1 -0
  11. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +1 -1
  12. ipulse_shared_core_ftredge-11.1.1/src/ipulse_shared_core_ftredge/models/base_data_model.py +0 -41
  13. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/LICENCE +0 -0
  14. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/README.md +0 -0
  15. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/pyproject.toml +0 -0
  16. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/setup.cfg +0 -0
  17. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/dependencies/__init__.py +0 -0
  18. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py +0 -0
  19. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/dependencies/auth_protected_router.py +0 -0
  20. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/dependencies/firestore_client.py +0 -0
  21. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/__init__.py +0 -0
  22. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/base_api_response.py +0 -0
  23. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/organization_profile.py +0 -0
  24. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/subscription.py +0 -0
  25. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
  26. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/user_profile.py +0 -0
  27. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
  28. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/services/__init__.py +0 -0
  29. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/services/base_service_exceptions.py +0 -0
  30. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/services/fastapiservicemon.py +0 -0
  31. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/services/servicemon.py +0 -0
  32. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/utils/__init__.py +0 -0
  33. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/utils/custom_json_encoder.py +0 -0
  34. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/utils/json_encoder.py +0 -0
  35. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  36. {ipulse_shared_core_ftredge-11.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 11.1.1
3
+ Version: 12.0.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -13,7 +13,7 @@ Requires-Dist: pydantic[email]~=2.5
13
13
  Requires-Dist: python-dateutil~=2.8
14
14
  Requires-Dist: fastapi~=0.115.8
15
15
  Requires-Dist: pytest
16
- Requires-Dist: ipulse_shared_base_ftredge>=6.4.1
16
+ Requires-Dist: ipulse_shared_base_ftredge==6.5.1
17
17
  Dynamic: author
18
18
  Dynamic: classifier
19
19
  Dynamic: home-page
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
 
4
4
  setup(
5
5
  name='ipulse_shared_core_ftredge',
6
- version='11.1.1',
6
+ version='12.0.1',
7
7
  package_dir={'': 'src'}, # Specify the source directory
8
8
  packages=find_packages(where='src'), # Look for packages in 'src'
9
9
  install_requires=[
@@ -12,7 +12,7 @@ setup(
12
12
  'python-dateutil~=2.8',
13
13
  'fastapi~=0.115.8',
14
14
  'pytest',
15
- 'ipulse_shared_base_ftredge>=6.4.1',
15
+ 'ipulse_shared_base_ftredge==6.5.1',
16
16
  ],
17
17
  author='Russlan Ramdowar',
18
18
  description='Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.',
@@ -7,6 +7,6 @@ from .models import ( UserAuth, UserProfile,Subscription,
7
7
 
8
8
 
9
9
  from .services import (BaseFirestoreService,BaseServiceException, ResourceNotFoundError, AuthorizationError,
10
- ValidationError)
10
+ ValidationError, ServiceError)
11
11
 
12
12
  from .utils import (EnsureJSONEncoderCompatibility)
@@ -1,5 +1,8 @@
1
1
  import os
2
2
  import logging
3
+ import json
4
+ import asyncio
5
+ from concurrent.futures import ThreadPoolExecutor
3
6
  from typing import Optional, Iterable, Dict, Any, List
4
7
  from datetime import datetime, timedelta, timezone
5
8
  import json
@@ -10,10 +13,10 @@ from ipulse_shared_core_ftredge.services import ServiceError, AuthorizationError
10
13
  from ipulse_shared_core_ftredge.models import UserStatus
11
14
  from ipulse_shared_core_ftredge.utils.json_encoder import convert_to_json_serializable
12
15
 
13
- # Constants
14
- USERS_STATUS_COLLECTION_NAME = UserStatus.get_collection_name()
15
- USERS_STATUS_DOC_REF = "userstatus_"
16
- CACHE_TTL = 60 # 60 seconds
16
+ # Constants derived from UserStatus model
17
+ USERS_STATUS_COLLECTION_NAME = UserStatus.COLLECTION_NAME
18
+ USERS_STATUS_DOC_REF = f"{UserStatus.OBJ_REF}_" # Use OBJ_REF and append underscore
19
+ USERSTATUS_CACHE_TTL = 60 # 60 seconds
17
20
 
18
21
  class UserStatusCache:
19
22
  """Manages user status caching with dynamic invalidation"""
@@ -33,7 +36,7 @@ class UserStatusCache:
33
36
  status_data = self._cache[user_uid]
34
37
  # Force refresh for credit-consuming or sensitive operations
35
38
  # Check TTL for normal operations
36
- if datetime.now() - self._timestamps[user_uid] < timedelta(seconds=CACHE_TTL):
39
+ if datetime.now() - self._timestamps[user_uid] < timedelta(seconds=USERSTATUS_CACHE_TTL):
37
40
  return status_data
38
41
  self.invalidate(user_uid)
39
42
  return None
@@ -65,13 +68,59 @@ userstatus_cache = UserStatusCache()
65
68
  # Replace the logger dependency with a standard logger
66
69
  logger = logging.getLogger(__name__)
67
70
 
71
+ # Create a custom FirestoreTimeoutError class that can be identified in middlewares
72
+ class FirestoreTimeoutError(TimeoutError):
73
+ """Custom exception for Firestore timeout errors to make them more identifiable."""
74
+ pass
75
+
76
+
77
+ # Define a function to get a Firestore document with a strict timeout
78
+ async def get_with_strict_timeout(doc_ref, timeout_seconds: float):
79
+ """
80
+ Get a Firestore document with a strictly enforced timeout.
81
+
82
+ Args:
83
+ doc_ref: Firestore document reference
84
+ timeout_seconds: Maximum time to wait in seconds
85
+
86
+ Returns:
87
+ Document snapshot
88
+
89
+ Raises:
90
+ FirestoreTimeoutError: If the operation takes longer than timeout_seconds
91
+ """
92
+ loop = asyncio.get_running_loop()
93
+ with ThreadPoolExecutor() as executor:
94
+ try:
95
+ # Run the blocking Firestore get() operation in a thread and apply a strict timeout
96
+ logger.debug(f"Starting Firestore get with strict timeout of {timeout_seconds}s")
97
+ return await asyncio.wait_for(
98
+ loop.run_in_executor(executor, doc_ref.get),
99
+ timeout=timeout_seconds
100
+ )
101
+ except asyncio.TimeoutError:
102
+ error_message = f"User Status fetching for Authz timed out after {timeout_seconds} seconds, perhaps issue with Firestore Connectivity"
103
+ logger.error(error_message)
104
+ raise FirestoreTimeoutError(error_message)
105
+
106
+ # Update get_userstatus to use our new strict timeout function
68
107
  async def get_userstatus(
69
108
  user_uid: str,
70
- db: firestore.Client, # Note: This expects the actual client, not a Depends
71
- force_fresh: bool = False
109
+ db: firestore.Client,
110
+ force_fresh: bool = False,
111
+ timeout: float = 12.0 # Default timeout but allow override
72
112
  ) -> tuple[Dict[str, Any], bool]:
73
113
  """
74
- Fetch user status with intelligent caching
114
+ Fetch user status with intelligent caching and configurable timeout
115
+
116
+ Args:
117
+ user_uid: User ID to fetch status for
118
+ db: Firestore client
119
+ force_fresh: Whether to bypass cache
120
+ timeout: Timeout for Firestore operations in seconds
121
+
122
+ Returns:
123
+ Tuple of (user status data, whether cache was used)
75
124
  """
76
125
  cache_used = False
77
126
  if not force_fresh:
@@ -85,8 +134,11 @@ async def get_userstatus(
85
134
  userstatus_id = USERS_STATUS_DOC_REF + user_uid
86
135
  user_ref = db.collection(USERS_STATUS_COLLECTION_NAME).document(userstatus_id)
87
136
 
88
- # Get the document
89
- snapshot = user_ref.get()
137
+ logger.debug(f"Fetching user status for {user_uid} with strict timeout {timeout}s")
138
+
139
+ # Use our strict timeout wrapper instead of the native timeout parameter
140
+ snapshot = await get_with_strict_timeout(user_ref, timeout)
141
+
90
142
  if not snapshot.exists:
91
143
  raise ResourceNotFoundError(
92
144
  resource_type="authorization userstatus",
@@ -101,9 +153,23 @@ async def get_userstatus(
101
153
  userstatus_cache.set(user_uid, status_data)
102
154
  return status_data, cache_used
103
155
 
156
+ except TimeoutError as e:
157
+ logger.error(f"Timeout while fetching user status for {user_uid}: {str(e)}")
158
+ raise ServiceError(
159
+ operation="fetching user status for authz",
160
+ error=e,
161
+ resource_type="userstatus",
162
+ resource_id=user_uid,
163
+ additional_info={
164
+ "force_fresh": force_fresh,
165
+ "collection": USERS_STATUS_COLLECTION_NAME,
166
+ "timeout_seconds": timeout
167
+ }
168
+ )
104
169
  except ResourceNotFoundError:
105
170
  raise
106
171
  except Exception as e:
172
+ logger.error(f"Error fetching user status for {user_uid}: {str(e)}")
107
173
  raise ServiceError(
108
174
  operation=f"fetching user status",
109
175
  error=e,
@@ -156,15 +222,27 @@ async def extract_request_fields(request: Request) -> Optional[List[str]]:
156
222
  logger.warning(f"Could not extract fields from request body: {str(e)}")
157
223
  return None # Return None instead of raising an error
158
224
 
225
+ # Main authorization function with configurable timeout
159
226
  async def authorizeAPIRequest(
160
227
  request: Request,
161
- db: firestore.Client, # Changed: Now expects actual client instance
228
+ db: firestore.Client,
162
229
  request_resource_fields: Optional[Iterable[str]] = None,
230
+ firestore_timeout: float = 15.0 # Allow specifying timeout
163
231
  ) -> Dict[str, Any]:
164
232
  """
165
233
  Authorize API request based on user status and OPA policies.
166
234
  Enhanced with credit check information.
235
+
236
+ Args:
237
+ request: The incoming request
238
+ db: Firestore client
239
+ request_resource_fields: Fields being accessed/modified in the request
240
+ firestore_timeout: Timeout for Firestore operations in seconds
241
+
242
+ Returns:
243
+ Authorization result containing decision details
167
244
  """
245
+ opa_decision = None
168
246
  try:
169
247
  # Extract fields for both PATCH and POST if not provided
170
248
  if not request_resource_fields:
@@ -180,7 +258,12 @@ async def authorizeAPIRequest(
180
258
 
181
259
  # Determine if we need fresh status
182
260
  force_fresh = _should_force_fresh_status(request)
183
- userstatus, cache_used = await get_userstatus(user_uid, db, force_fresh=force_fresh)
261
+ userstatus, cache_used = await get_userstatus(
262
+ user_uid,
263
+ db,
264
+ force_fresh=force_fresh,
265
+ timeout=firestore_timeout # Pass the specified timeout
266
+ )
184
267
 
185
268
  # Prepare authorization input that matches OPA expectations
186
269
  # Extract required values from user status
@@ -214,7 +297,13 @@ async def authorizeAPIRequest(
214
297
  # Query OPA
215
298
  opa_url = f"{os.getenv('OPA_SERVER_URL', 'http://localhost:8181')}{os.getenv('OPA_DECISION_PATH', '/v1/data/http/authz/ingress/decision')}"
216
299
  logger.debug(f"Attempting to connect to OPA at: {opa_url}")
217
- logger.debug(f"Authorization input: {authz_input}")
300
+
301
+ # Debug: Print raw JSON payload to identify any potential issues
302
+ try:
303
+ payload_json = json.dumps({"input": json_safe_authz_input})
304
+ logger.debug(f"OPA Request JSON payload: {payload_json}")
305
+ except Exception as json_err:
306
+ logger.error(f"Error serializing OPA request payload: {json_err}")
218
307
 
219
308
  async with httpx.AsyncClient() as client:
220
309
  try:
@@ -236,20 +325,35 @@ async def authorizeAPIRequest(
236
325
  result = response.json()
237
326
  logger.debug(f"Parsed OPA response: {result}")
238
327
 
239
- if not result.get("result", {}).get("allow", False):
328
+ # Handle unusual OPA response formats
329
+ # Try to find "decision" field as an alternative
330
+ if "result" in result:
331
+ opa_decision = result["result"]
332
+ else:
333
+ # If we still don't have a result after all attempts, use default structure
334
+ logger.warning(f"OPA response missing 'result' field, using default")
335
+ raise HTTPException(
336
+ status_code=500,
337
+ detail="Authorization service error: OPA response format unexpected"
338
+ )
339
+
340
+ # Extract key fields from result with better default handling
341
+ allow = opa_decision.get("allow", False)
342
+
343
+ # Handle authorization denial
344
+ if not allow:
240
345
  logger.error(f"Authorization denied: {result}")
241
346
  raise AuthorizationError(
242
347
  action=f"{request.method} {request.url.path}",
243
348
  additional_info={
244
349
  "user_uid": user_uid,
245
350
  "resource_fields": request_resource_fields,
246
- "opa_decision": result.get("result", {})
351
+ "opa_decision": opa_decision, # Include the full OPA decision result
352
+ # Include the raw result if it's different from the processed decision
353
+ "raw_opa_response": result if result != {"result": opa_decision} else None
247
354
  }
248
355
  )
249
356
 
250
- # Extract credit check information from the OPA response
251
- credit_check = result.get("result", {}).get("credit_check", {})
252
-
253
357
  except httpx.RequestError as e:
254
358
  logger.error(f"Failed to connect to OPA: {str(e)}")
255
359
  raise ServiceError(
@@ -267,9 +371,7 @@ async def authorizeAPIRequest(
267
371
  "used_cached_status": cache_used,
268
372
  "required_fresh_status": force_fresh,
269
373
  "status_retrieved_at": datetime.now(timezone.utc).isoformat(),
270
- "credit_check": credit_check,
271
- "allow_all_fields": result.get("result", {}).get("allow_all_fields", False),
272
- "allowed_fields": result.get("result", {}).get("allowed_fields", [])
374
+ "opa_decision": opa_decision
273
375
  }
274
376
 
275
377
  except (AuthorizationError, ResourceNotFoundError):
@@ -0,0 +1,55 @@
1
+ from datetime import datetime, timezone
2
+ from typing import Any
3
+ from typing import ClassVar
4
+ from pydantic import BaseModel, Field, ConfigDict, field_validator
5
+ import dateutil.parser
6
+
7
+ class BaseDataModel(BaseModel):
8
+ """Base model with common fields and configuration"""
9
+ model_config = ConfigDict(frozen=True, extra="forbid")
10
+
11
+ # Required class variables that must be defined in subclasses
12
+ VERSION: ClassVar[float]
13
+ DOMAIN: ClassVar[str]
14
+ OBJ_REF: ClassVar[str]
15
+
16
+ # Schema versioning
17
+ schema_version: float = Field(
18
+ ..., # Make this required
19
+ description="Version of this Class == version of DB Schema",
20
+ frozen=True
21
+ )
22
+
23
+ # Audit fields
24
+ created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc), frozen=True)
25
+ created_by: str = Field(..., frozen=True)
26
+ updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc), frozen=True)
27
+ updated_by: str = Field(...)
28
+
29
+ @classmethod
30
+ def get_collection_name(cls) -> str:
31
+ """Generate standard collection name"""
32
+ return f"{cls.DOMAIN}_{cls.OBJ_REF}s"
33
+
34
+ @field_validator('created_at', 'updated_at', mode='before')
35
+ @classmethod
36
+ def parse_datetime(cls, v: Any) -> datetime:
37
+ if isinstance(v, datetime): # If Firestore already gave a datetime object
38
+ return v # Just use it, no parsing needed
39
+ if isinstance(v, str): # If it's a string (e.g. from an API request, not Firestore direct)
40
+ try:
41
+ return dateutil.parser.isoparse(v)
42
+ except (TypeError, ValueError) as e:
43
+ raise ValueError(f"Invalid datetime string format: {v} - {e}")
44
+ # Firestore might send google.api_core.datetime_helpers.DatetimeWithNanoseconds
45
+ # which is a subclass of datetime.datetime, so isinstance(v, datetime) should catch it.
46
+ # If for some reason it's a different type not caught by isinstance(v, datetime)
47
+ # but has isoformat(), perhaps try that, but it's unlikely with current Firestore client.
48
+ # For example, if v is some custom timestamp object from an older library:
49
+ if hasattr(v, 'isoformat'): # Fallback for unknown datetime-like objects
50
+ try:
51
+ return dateutil.parser.isoparse(v.isoformat())
52
+ except Exception as e:
53
+ raise ValueError(f"Could not parse datetime-like object: {v} - {e}")
54
+
55
+ raise ValueError(f"Unsupported type for datetime parsing: {type(v)} value: {v}")
@@ -51,6 +51,10 @@ class UserStatus(BaseDataModel):
51
51
  DOMAIN: ClassVar[str] = "_".join(list_as_lower_strings(Layer.PULSE_APP, Module.CORE.name, Subject.USER.name))
52
52
  OBJ_REF: ClassVar[str] = "userstatus"
53
53
 
54
+ # Centralized collection name and document ID prefix
55
+ COLLECTION_NAME: ClassVar[str] = "papp_core_user_userstatuss"
56
+
57
+
54
58
  # System-managed fields
55
59
  schema_version: float = Field(
56
60
  default=VERSION,
@@ -1,6 +1,7 @@
1
1
  """ Base class for Firestore services with common CRUD operations """
2
- from typing import Dict, Any, List, TypeVar, Generic
2
+ from typing import Dict, Any, List, TypeVar, Generic, Optional
3
3
  import logging
4
+ import time
4
5
  from datetime import datetime, timezone
5
6
  from pydantic import BaseModel
6
7
  from google.cloud import firestore
@@ -11,11 +12,20 @@ T = TypeVar('T', bound=BaseModel)
11
12
  class BaseFirestoreService(Generic[T]):
12
13
  """Base class for Firestore services with common CRUD operations"""
13
14
 
14
- def __init__(self, db: firestore.Client, collection_name: str, resource_type: str, logger: logging.Logger):
15
+ def __init__(
16
+ self,
17
+ db: firestore.Client,
18
+ collection_name: str,
19
+ resource_type: str,
20
+ logger: logging.Logger,
21
+ timeout: float = 15.0 # Default to 15 seconds, but allow override
22
+ ):
15
23
  self.db = db
16
24
  self.collection_name = collection_name
17
25
  self.resource_type = resource_type
18
26
  self.logger = logger
27
+ self.timeout = timeout # Store the timeout as an instance attribute
28
+ self.logger.info(f"Initialized {self.resource_type} service with timeout={timeout}s")
19
29
 
20
30
  async def create_document(self, doc_id: str, data: T, creator_uid: str) -> Dict[str, Any]:
21
31
  """Standard create method with audit fields"""
@@ -32,7 +42,8 @@ class BaseFirestoreService(Generic[T]):
32
42
  })
33
43
 
34
44
  doc_ref = self.db.collection(self.collection_name).document(doc_id)
35
- doc_ref.set(doc_data)
45
+ # Apply timeout to the set operation
46
+ doc_ref.set(doc_data, timeout=self.timeout)
36
47
 
37
48
  self.logger.info(f"Created {self.resource_type}: {doc_id}")
38
49
  return doc_data
@@ -66,7 +77,8 @@ class BaseFirestoreService(Generic[T]):
66
77
  batch.set(doc_ref, doc_data)
67
78
  created_docs.append(doc_data)
68
79
 
69
- batch.commit()
80
+ # Apply timeout to the commit operation
81
+ batch.commit(timeout=self.timeout)
70
82
  self.logger.info(f"Created {len(documents)} {self.resource_type}s in batch")
71
83
  return created_docs
72
84
 
@@ -81,24 +93,48 @@ class BaseFirestoreService(Generic[T]):
81
93
 
82
94
  async def get_document(self, doc_id: str) -> Dict[str, Any]:
83
95
  """Get a document by ID with standardized error handling"""
84
- doc_ref = self.db.collection(self.collection_name).document(doc_id)
85
- doc = doc_ref.get()
96
+ self.logger.debug(f"Getting {self.resource_type} document: {doc_id} with timeout={self.timeout}s")
97
+ start_time = time.time()
86
98
 
87
- if not doc.exists:
88
- raise ResourceNotFoundError(
99
+ try:
100
+ doc_ref = self.db.collection(self.collection_name).document(doc_id)
101
+
102
+ # Apply timeout to the get operation
103
+ doc = doc_ref.get(timeout=self.timeout)
104
+
105
+ elapsed = (time.time() - start_time) * 1000
106
+ self.logger.debug(f"Firestore get for {doc_id} completed in {elapsed:.2f}ms")
107
+
108
+ if not doc.exists:
109
+ self.logger.warning(f"Document {doc_id} not found in {self.collection_name}")
110
+ raise ResourceNotFoundError(
111
+ resource_type=self.resource_type,
112
+ resource_id=doc_id,
113
+ additional_info={"collection": self.collection_name}
114
+ )
115
+
116
+ return doc.to_dict()
117
+
118
+ except ResourceNotFoundError:
119
+ raise
120
+ except Exception as e:
121
+ elapsed = (time.time() - start_time) * 1000
122
+ self.logger.error(f"Error getting document {doc_id} after {elapsed:.2f}ms: {str(e)}", exc_info=True)
123
+ raise ServiceError(
124
+ operation=f"retrieving {self.resource_type}",
125
+ error=e,
89
126
  resource_type=self.resource_type,
90
127
  resource_id=doc_id,
91
- additional_info={"collection": self.collection_name}
92
- )
93
-
94
- return doc.to_dict()
128
+ additional_info={"collection": self.collection_name, "timeout": self.timeout}
129
+ ) from e
95
130
 
96
131
  async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str) -> Dict[str, Any]:
97
132
  """Standard update method with validation and audit fields"""
98
133
  try:
99
134
  doc_ref = self.db.collection(self.collection_name).document(doc_id)
100
135
 
101
- if not doc_ref.get().exists:
136
+ # Apply timeout to the get operation
137
+ if not doc_ref.get(timeout=self.timeout).exists:
102
138
  raise ResourceNotFoundError(
103
139
  resource_type=self.resource_type,
104
140
  resource_id=doc_id,
@@ -113,8 +149,10 @@ class BaseFirestoreService(Generic[T]):
113
149
  'updated_by': updater_uid
114
150
  })
115
151
 
116
- doc_ref.update(valid_fields)
117
- return doc_ref.get().to_dict()
152
+ # Apply timeout to the update operation
153
+ doc_ref.update(valid_fields, timeout=self.timeout)
154
+ # Apply timeout to the get operation
155
+ return doc_ref.get(timeout=self.timeout).to_dict()
118
156
 
119
157
  except (ResourceNotFoundError, ValidationError):
120
158
  raise
@@ -131,13 +169,15 @@ class BaseFirestoreService(Generic[T]):
131
169
  """Standard delete method"""
132
170
  try:
133
171
  doc_ref = self.db.collection(self.collection_name).document(doc_id)
134
- if not doc_ref.get().exists:
172
+ # Apply timeout to the get operation
173
+ if not doc_ref.get(timeout=self.timeout).exists:
135
174
  raise ResourceNotFoundError(
136
175
  resource_type=self.resource_type,
137
176
  resource_id=doc_id
138
177
  )
139
178
 
140
- doc_ref.delete()
179
+ # Apply timeout to the delete operation
180
+ doc_ref.delete(timeout=self.timeout)
141
181
  self.logger.info(f"Deleted {self.resource_type}: {doc_id}")
142
182
 
143
183
  except ResourceNotFoundError:
@@ -151,6 +191,44 @@ class BaseFirestoreService(Generic[T]):
151
191
  resource_id=doc_id
152
192
  ) from e
153
193
 
194
+ # Add query method with timeout
195
+ async def query_documents(
196
+ self,
197
+ filters: Optional[List[tuple]] = None,
198
+ limit: Optional[int] = None,
199
+ order_by: Optional[tuple] = None
200
+ ) -> List[Dict[str, Any]]:
201
+ """Query documents with filters, limit, and ordering"""
202
+ try:
203
+ # Start with the collection reference
204
+ query = self.db.collection(self.collection_name)
205
+
206
+ # Apply filters if provided
207
+ if filters:
208
+ for field, op, value in filters:
209
+ query = query.where(field=field, op_string=op, value=value)
210
+
211
+ # Apply ordering if provided
212
+ if order_by:
213
+ field, direction = order_by
214
+ query = query.order_by(field, direction=direction)
215
+
216
+ # Apply limit if provided
217
+ if limit:
218
+ query = query.limit(limit)
219
+
220
+ # Execute query with timeout
221
+ docs = query.stream(timeout=self.timeout)
222
+ return [doc.to_dict() for doc in docs]
223
+
224
+ except Exception as e:
225
+ self.logger.error(f"Error querying {self.resource_type}: {e}", exc_info=True)
226
+ raise ServiceError(
227
+ operation=f"querying {self.resource_type}",
228
+ error=e,
229
+ resource_type=self.resource_type
230
+ ) from e
231
+
154
232
  def _validate_update_fields(self, update_data: Dict[str, Any]) -> Dict[str, Any]:
155
233
  """Centralized update fields validation"""
156
234
  if not isinstance(update_data, dict):
@@ -0,0 +1,270 @@
1
+ """Service for managing credit operations in a generic way."""
2
+ import logging
3
+ from typing import Dict, Any, Optional, Tuple
4
+ from datetime import datetime, timezone
5
+ from google.cloud import firestore
6
+ from ipulse_shared_core_ftredge.services import ServiceError, ResourceNotFoundError, ValidationError
7
+ from ipulse_shared_core_ftredge.models.user_status import UserStatus
8
+
9
+ # Default Firestore timeout if not provided by the consuming application
10
+ DEFAULT_FIRESTORE_TIMEOUT = 15.0
11
+
12
+ class CreditService:
13
+ """
14
+ Service class for credit operations.
15
+ Designed to be project-agnostic and directly uses UserStatus model constants.
16
+ """
17
+
18
+ def __init__(
19
+ self,
20
+ db: firestore.Client,
21
+ logger: Optional[logging.Logger] = None,
22
+ firestore_timeout: float = DEFAULT_FIRESTORE_TIMEOUT
23
+ ):
24
+ """
25
+ Initialize the credit service.
26
+
27
+ Args:
28
+ db: Firestore client.
29
+ logger: Optional logger instance. Defaults to a new logger for this module.
30
+ firestore_timeout: Timeout for Firestore operations in seconds.
31
+ """
32
+ self.db = db
33
+ # Use UserStatus constants directly
34
+ self.users_status_collection_name = UserStatus.COLLECTION_NAME
35
+ self.user_status_doc_prefix = f"{UserStatus.OBJ_REF}_" # Append underscore to OBJ_REF
36
+ self.logger = logger or logging.getLogger(__name__)
37
+ self.timeout = firestore_timeout
38
+
39
+ self.logger.info(
40
+ f"CreditService initialized using UserStatus constants. Collection: {self.users_status_collection_name}, "
41
+ f"Doc Prefix: {self.user_status_doc_prefix}, Timeout: {self.timeout}s"
42
+ )
43
+
44
+ async def verify_credits(
45
+ self,
46
+ user_uid: str,
47
+ required_credits_for_resource: float,
48
+ pre_fetched_user_credits: Optional[Dict[str, float]] = None
49
+ ) -> Tuple[bool, Dict[str, Any]]:
50
+ """
51
+ Verify if a user has enough credits for an operation.
52
+
53
+ Args:
54
+ user_uid: The user's UID.
55
+ required_credits_for_resource: The number of credits required for the operation.
56
+ pre_fetched_user_credits: Optional dict with pre-fetched credit info.
57
+ (keys: 'sbscrptn_based_insight_credits', 'extra_insight_credits')
58
+
59
+ Returns:
60
+ Tuple of (has_enough_credits, user_status_data) where user_status_data
61
+ will be a dict with keys 'sbscrptn_based_insight_credits' and 'extra_insight_credits'.
62
+
63
+ Raises:
64
+ ValidationError: If required_credits_for_resource is None (pricing not properly configured).
65
+ """
66
+ self.logger.info(
67
+ f"verify_credits called for user {user_uid}, "
68
+ f"required_credits={required_credits_for_resource}, "
69
+ f"pre_fetched_credits={pre_fetched_user_credits}"
70
+ )
71
+
72
+ if required_credits_for_resource is None:
73
+ self.logger.error(f"Credit cost is None for user {user_uid}, pricing not properly configured")
74
+ raise ValidationError(
75
+ resource_type="credit_cost",
76
+ detail="Credit cost is not configured for this resource",
77
+ resource_id=None, # Resource ID might not be known here, or could be passed
78
+ additional_info={"user_uid": user_uid}
79
+ )
80
+
81
+ if required_credits_for_resource <= 0:
82
+ self.logger.info(f"No credits required for user {user_uid}, bypassing credit verification")
83
+ return True, {"sbscrptn_based_insight_credits": 0, "extra_insight_credits": 0}
84
+
85
+ if pre_fetched_user_credits is not None:
86
+ self.logger.info(f"Using pre-fetched credit info for user {user_uid}")
87
+ subscription_credits = pre_fetched_user_credits.get("sbscrptn_based_insight_credits", 0)
88
+ extra_credits = pre_fetched_user_credits.get("extra_insight_credits", 0)
89
+ total_credits = subscription_credits + extra_credits
90
+
91
+ self.logger.info(
92
+ f"User {user_uid} has {total_credits} total pre-fetched credits "
93
+ f"(subscription: {subscription_credits}, extra: {extra_credits})"
94
+ )
95
+
96
+ userstatus_data_to_return = {
97
+ "sbscrptn_based_insight_credits": subscription_credits,
98
+ "extra_insight_credits": extra_credits
99
+ }
100
+
101
+ has_enough_credits = total_credits >= required_credits_for_resource
102
+ return has_enough_credits, userstatus_data_to_return
103
+
104
+ try:
105
+ self.logger.info(
106
+ f"Fetching user status from Firestore for user {user_uid} (collection: {self.users_status_collection_name})"
107
+ )
108
+ full_userstatus_doc = await self._get_userstatus(user_uid)
109
+
110
+ subscription_credits = full_userstatus_doc.get("sbscrptn_based_insight_credits", 0)
111
+ extra_credits = full_userstatus_doc.get("extra_insight_credits", 0)
112
+ total_credits = subscription_credits + extra_credits
113
+
114
+ self.logger.info(
115
+ f"User {user_uid} has {total_credits} total credits from Firestore "
116
+ f"(subscription: {subscription_credits}, extra: {extra_credits})"
117
+ )
118
+
119
+ has_enough_credits = total_credits >= required_credits_for_resource
120
+
121
+ userstatus_data_to_return = {
122
+ "sbscrptn_based_insight_credits": subscription_credits,
123
+ "extra_insight_credits": extra_credits
124
+ }
125
+
126
+ return has_enough_credits, userstatus_data_to_return
127
+
128
+ except ResourceNotFoundError:
129
+ self.logger.warning(f"User status not found for {user_uid} in {self.users_status_collection_name}. Assuming no credits.")
130
+ return False, {"sbscrptn_based_insight_credits": 0, "extra_insight_credits": 0}
131
+ except Exception as e:
132
+ self.logger.error(f"Error verifying credits for user {user_uid}: {str(e)}")
133
+ raise ServiceError(
134
+ operation="verifying credits",
135
+ error=e,
136
+ resource_type="user_credits",
137
+ resource_id=user_uid,
138
+ additional_info={"credits_to_charge": credits_to_charge}
139
+ ) from e
140
+
141
+ async def charge_credits(self, user_uid: str, credits_to_charge: Optional[float], operation_details: str) -> bool:
142
+ """
143
+ Charge a user's credits for an operation.
144
+
145
+ Args:
146
+ user_uid: The user's UID.
147
+ credits_to_charge: The number of credits to charge.
148
+ operation_details: Details about the operation (for logging).
149
+
150
+ Returns:
151
+ Whether the charging was successful.
152
+
153
+ Raises:
154
+ ValidationError: If credits_to_charge is None (pricing not properly configured).
155
+ """
156
+ if credits_to_charge is None:
157
+ self.logger.error(f"Credit cost is None for user {user_uid} (charge_credits), pricing not properly configured")
158
+ raise ValidationError(
159
+ resource_type="credit_cost",
160
+ detail="Credit cost is not configured for this resource (charge_credits)",
161
+ resource_id=None,
162
+ additional_info={"user_uid": user_uid}
163
+ )
164
+
165
+ if credits_to_charge == 0:
166
+ self.logger.info(f"No credits to charge for user {user_uid}, operation: {operation_details}")
167
+ return True
168
+
169
+ try:
170
+ userstatus_id = f"{self.user_status_doc_prefix}{user_uid}"
171
+ user_ref = self.db.collection(self.users_status_collection_name).document(userstatus_id)
172
+
173
+ transaction = self.db.transaction()
174
+
175
+ @firestore.transactional
176
+ def update_credits_transaction(transaction_obj, current_user_ref):
177
+ user_doc = current_user_ref.get(transaction=transaction_obj)
178
+ if not user_doc.exists:
179
+ self.logger.warning(
180
+ f"Cannot charge credits - user status not found for {user_uid} in {self.users_status_collection_name}"
181
+ )
182
+ return False
183
+
184
+ userstatus = user_doc.to_dict()
185
+
186
+ subscription_credits = userstatus.get("sbscrptn_based_insight_credits", 0)
187
+ extra_credits = userstatus.get("extra_insight_credits", 0)
188
+ total_credits = subscription_credits + extra_credits
189
+
190
+ if total_credits < credits_to_charge:
191
+ self.logger.warning(
192
+ f"Insufficient credits for user {user_uid} during transaction: "
193
+ f"has {total_credits}, needs {credits_to_charge}"
194
+ )
195
+ return False
196
+
197
+ subscription_credits_to_charge = min(subscription_credits, credits_to_charge)
198
+ extra_credits_to_charge = credits_to_charge - subscription_credits_to_charge
199
+
200
+ update_data = {
201
+ "updated_at": datetime.now(timezone.utc).isoformat(),
202
+ "updated_by": "credit_service" # Consider making this configurable or more generic
203
+ }
204
+
205
+ if subscription_credits_to_charge > 0:
206
+ update_data["sbscrptn_based_insight_credits"] = firestore.Increment(-subscription_credits_to_charge)
207
+ update_data["sbscrptn_based_insight_credits_updtd_on"] = datetime.now(timezone.utc).isoformat()
208
+
209
+ if extra_credits_to_charge > 0:
210
+ update_data["extra_insight_credits"] = firestore.Increment(-extra_credits_to_charge)
211
+ update_data["extra_insight_credits_updtd_on"] = datetime.now(timezone.utc).isoformat()
212
+
213
+ transaction_obj.update(current_user_ref, update_data)
214
+ return True
215
+
216
+ success = update_credits_transaction(transaction, user_ref)
217
+
218
+ if success:
219
+ self.logger.info(
220
+ f"Successfully charged {credits_to_charge} credits for user {user_uid}. "
221
+ f"Operation: {operation_details}"
222
+ )
223
+ else:
224
+ self.logger.warning(
225
+ f"Failed to charge {credits_to_charge} credits for user {user_uid} (transaction outcome). "
226
+ f"Operation: {operation_details}"
227
+ )
228
+
229
+ return success
230
+
231
+ except Exception as e:
232
+ self.logger.error(f"Error charging credits for user {user_uid}: {str(e)}")
233
+ raise ServiceError(
234
+ operation="charging credits",
235
+ error=e,
236
+ resource_type="user_credits",
237
+ resource_id=user_uid,
238
+ additional_info={"credits_to_charge": credits_to_charge}
239
+ ) from e
240
+
241
+ async def _get_userstatus(self, user_uid: str) -> Dict[str, Any]:
242
+ """Get a user's status document."""
243
+ try:
244
+ userstatus_id = f"{self.user_status_doc_prefix}{user_uid}"
245
+ doc_ref = self.db.collection(self.users_status_collection_name).document(userstatus_id)
246
+
247
+ # Using the timeout value set during initialization
248
+ doc = await doc_ref.get(timeout=self.timeout)
249
+
250
+ if not doc.exists:
251
+ raise ResourceNotFoundError(
252
+ resource_type="user_status", # Generic resource type
253
+ resource_id=userstatus_id,
254
+ additional_info={"collection": self.users_status_collection_name}
255
+ )
256
+
257
+ return doc.to_dict()
258
+
259
+ except ResourceNotFoundError:
260
+ raise
261
+ except Exception as e: # Catch generic Exception to handle potential timeout errors from Firestore client
262
+ self.logger.error(f"Error getting user status for {user_uid} from {self.users_status_collection_name}: {str(e)}")
263
+ raise ServiceError(
264
+ operation="getting user status",
265
+ error=e,
266
+ resource_type="user_status",
267
+ resource_id=user_uid,
268
+ additional_info={"collection": self.users_status_collection_name}
269
+ ) from e
270
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 11.1.1
3
+ Version: 12.0.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -13,7 +13,7 @@ Requires-Dist: pydantic[email]~=2.5
13
13
  Requires-Dist: python-dateutil~=2.8
14
14
  Requires-Dist: fastapi~=0.115.8
15
15
  Requires-Dist: pytest
16
- Requires-Dist: ipulse_shared_base_ftredge>=6.4.1
16
+ Requires-Dist: ipulse_shared_base_ftredge==6.5.1
17
17
  Dynamic: author
18
18
  Dynamic: classifier
19
19
  Dynamic: home-page
@@ -25,6 +25,7 @@ src/ipulse_shared_core_ftredge/models/user_status.py
25
25
  src/ipulse_shared_core_ftredge/services/__init__.py
26
26
  src/ipulse_shared_core_ftredge/services/base_firestore_service.py
27
27
  src/ipulse_shared_core_ftredge/services/base_service_exceptions.py
28
+ src/ipulse_shared_core_ftredge/services/credit_service.py
28
29
  src/ipulse_shared_core_ftredge/services/fastapiservicemon.py
29
30
  src/ipulse_shared_core_ftredge/services/servicemon.py
30
31
  src/ipulse_shared_core_ftredge/utils/__init__.py
@@ -2,4 +2,4 @@ pydantic[email]~=2.5
2
2
  python-dateutil~=2.8
3
3
  fastapi~=0.115.8
4
4
  pytest
5
- ipulse_shared_base_ftredge>=6.4.1
5
+ ipulse_shared_base_ftredge==6.5.1
@@ -1,41 +0,0 @@
1
- from datetime import datetime, timezone
2
- from typing import ClassVar
3
- from pydantic import BaseModel, Field, ConfigDict, field_validator
4
- import dateutil.parser
5
-
6
- class BaseDataModel(BaseModel):
7
- """Base model with common fields and configuration"""
8
- model_config = ConfigDict(frozen=True, extra="forbid")
9
-
10
- # Required class variables that must be defined in subclasses
11
- VERSION: ClassVar[float]
12
- DOMAIN: ClassVar[str]
13
- OBJ_REF: ClassVar[str]
14
-
15
- # Schema versioning
16
- schema_version: float = Field(
17
- ..., # Make this required
18
- description="Version of this Class == version of DB Schema",
19
- frozen=True
20
- )
21
-
22
- # Audit fields
23
- created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc), frozen=True)
24
- created_by: str = Field(..., frozen=True)
25
- updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc), frozen=True)
26
- updated_by: str = Field(...)
27
-
28
- @classmethod
29
- def get_collection_name(cls) -> str:
30
- """Generate standard collection name"""
31
- return f"{cls.DOMAIN}_{cls.OBJ_REF}s"
32
-
33
- @field_validator('created_at', 'updated_at', mode='before')
34
- @classmethod
35
- def parse_datetime(cls, v: any) -> datetime:
36
- if isinstance(v, datetime):
37
- return v
38
- try:
39
- return dateutil.parser.isoparse(v)
40
- except (TypeError, ValueError) as e:
41
- raise ValueError(f"Invalid datetime format: {e}")