ipulse-shared-core-ftredge 10.1.1__tar.gz → 12.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (38) hide show
  1. {ipulse_shared_core_ftredge-10.1.1/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-12.0.1}/PKG-INFO +2 -2
  2. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/setup.py +2 -2
  3. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/__init__.py +2 -2
  4. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/dependencies/authz_for_apis.py +171 -47
  5. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/base_api_response.py +8 -5
  6. ipulse_shared_core_ftredge-12.0.1/src/ipulse_shared_core_ftredge/models/base_data_model.py +55 -0
  7. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/user_status.py +4 -0
  8. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/services/base_firestore_service.py +95 -17
  9. ipulse_shared_core_ftredge-12.0.1/src/ipulse_shared_core_ftredge/services/credit_service.py +270 -0
  10. ipulse_shared_core_ftredge-12.0.1/src/ipulse_shared_core_ftredge/utils/__init__.py +1 -0
  11. ipulse_shared_core_ftredge-12.0.1/src/ipulse_shared_core_ftredge/utils/json_encoder.py +62 -0
  12. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +2 -2
  13. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +1 -0
  14. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +1 -1
  15. ipulse_shared_core_ftredge-10.1.1/src/ipulse_shared_core_ftredge/models/base_data_model.py +0 -41
  16. ipulse_shared_core_ftredge-10.1.1/src/ipulse_shared_core_ftredge/utils/__init__.py +0 -1
  17. ipulse_shared_core_ftredge-10.1.1/src/ipulse_shared_core_ftredge/utils/json_encoder.py +0 -13
  18. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/LICENCE +0 -0
  19. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/README.md +0 -0
  20. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/pyproject.toml +0 -0
  21. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/setup.cfg +0 -0
  22. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/dependencies/__init__.py +0 -0
  23. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py +0 -0
  24. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/dependencies/auth_protected_router.py +0 -0
  25. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/dependencies/firestore_client.py +0 -0
  26. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/__init__.py +0 -0
  27. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/organization_profile.py +0 -0
  28. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/subscription.py +0 -0
  29. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
  30. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/user_profile.py +0 -0
  31. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
  32. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/services/__init__.py +0 -0
  33. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/services/base_service_exceptions.py +0 -0
  34. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/services/fastapiservicemon.py +0 -0
  35. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/services/servicemon.py +0 -0
  36. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge/utils/custom_json_encoder.py +0 -0
  37. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  38. {ipulse_shared_core_ftredge-10.1.1 → ipulse_shared_core_ftredge-12.0.1}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 10.1.1
3
+ Version: 12.0.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -13,7 +13,7 @@ Requires-Dist: pydantic[email]~=2.5
13
13
  Requires-Dist: python-dateutil~=2.8
14
14
  Requires-Dist: fastapi~=0.115.8
15
15
  Requires-Dist: pytest
16
- Requires-Dist: ipulse_shared_base_ftredge>=6.4.1
16
+ Requires-Dist: ipulse_shared_base_ftredge==6.5.1
17
17
  Dynamic: author
18
18
  Dynamic: classifier
19
19
  Dynamic: home-page
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
 
4
4
  setup(
5
5
  name='ipulse_shared_core_ftredge',
6
- version='10.1.1',
6
+ version='12.0.1',
7
7
  package_dir={'': 'src'}, # Specify the source directory
8
8
  packages=find_packages(where='src'), # Look for packages in 'src'
9
9
  install_requires=[
@@ -12,7 +12,7 @@ setup(
12
12
  'python-dateutil~=2.8',
13
13
  'fastapi~=0.115.8',
14
14
  'pytest',
15
- 'ipulse_shared_base_ftredge>=6.4.1',
15
+ 'ipulse_shared_base_ftredge==6.5.1',
16
16
  ],
17
17
  author='Russlan Ramdowar',
18
18
  description='Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.',
@@ -7,6 +7,6 @@ from .models import ( UserAuth, UserProfile,Subscription,
7
7
 
8
8
 
9
9
  from .services import (BaseFirestoreService,BaseServiceException, ResourceNotFoundError, AuthorizationError,
10
- ValidationError)
10
+ ValidationError, ServiceError)
11
11
 
12
- from .utils import (CustomJSONEncoder)
12
+ from .utils import (EnsureJSONEncoderCompatibility)
@@ -1,17 +1,23 @@
1
1
  import os
2
2
  import logging
3
+ import json
4
+ import asyncio
5
+ from concurrent.futures import ThreadPoolExecutor
3
6
  from typing import Optional, Iterable, Dict, Any, List
4
7
  from datetime import datetime, timedelta, timezone
8
+ import json
5
9
  import httpx
6
10
  from fastapi import HTTPException, Request
7
11
  from google.cloud import firestore
8
12
  from ipulse_shared_core_ftredge.services import ServiceError, AuthorizationError, ResourceNotFoundError
9
13
  from ipulse_shared_core_ftredge.models import UserStatus
14
+ from ipulse_shared_core_ftredge.utils.json_encoder import convert_to_json_serializable
15
+
16
+ # Constants derived from UserStatus model
17
+ USERS_STATUS_COLLECTION_NAME = UserStatus.COLLECTION_NAME
18
+ USERS_STATUS_DOC_REF = f"{UserStatus.OBJ_REF}_" # Use OBJ_REF and append underscore
19
+ USERSTATUS_CACHE_TTL = 60 # 60 seconds
10
20
 
11
- # Constants
12
- USERS_STATUS_COLLECTION_NAME = UserStatus.get_collection_name()
13
- USERS_STATUS_DOC_REF = "userstatus_"
14
- CACHE_TTL = 60 # 60 seconds
15
21
  class UserStatusCache:
16
22
  """Manages user status caching with dynamic invalidation"""
17
23
  def __init__(self):
@@ -30,7 +36,7 @@ class UserStatusCache:
30
36
  status_data = self._cache[user_uid]
31
37
  # Force refresh for credit-consuming or sensitive operations
32
38
  # Check TTL for normal operations
33
- if datetime.now() - self._timestamps[user_uid] < timedelta(seconds=CACHE_TTL):
39
+ if datetime.now() - self._timestamps[user_uid] < timedelta(seconds=USERSTATUS_CACHE_TTL):
34
40
  return status_data
35
41
  self.invalidate(user_uid)
36
42
  return None
@@ -62,13 +68,59 @@ userstatus_cache = UserStatusCache()
62
68
  # Replace the logger dependency with a standard logger
63
69
  logger = logging.getLogger(__name__)
64
70
 
71
+ # Create a custom FirestoreTimeoutError class that can be identified in middlewares
72
+ class FirestoreTimeoutError(TimeoutError):
73
+ """Custom exception for Firestore timeout errors to make them more identifiable."""
74
+ pass
75
+
76
+
77
+ # Define a function to get a Firestore document with a strict timeout
78
+ async def get_with_strict_timeout(doc_ref, timeout_seconds: float):
79
+ """
80
+ Get a Firestore document with a strictly enforced timeout.
81
+
82
+ Args:
83
+ doc_ref: Firestore document reference
84
+ timeout_seconds: Maximum time to wait in seconds
85
+
86
+ Returns:
87
+ Document snapshot
88
+
89
+ Raises:
90
+ FirestoreTimeoutError: If the operation takes longer than timeout_seconds
91
+ """
92
+ loop = asyncio.get_running_loop()
93
+ with ThreadPoolExecutor() as executor:
94
+ try:
95
+ # Run the blocking Firestore get() operation in a thread and apply a strict timeout
96
+ logger.debug(f"Starting Firestore get with strict timeout of {timeout_seconds}s")
97
+ return await asyncio.wait_for(
98
+ loop.run_in_executor(executor, doc_ref.get),
99
+ timeout=timeout_seconds
100
+ )
101
+ except asyncio.TimeoutError:
102
+ error_message = f"User Status fetching for Authz timed out after {timeout_seconds} seconds, perhaps issue with Firestore Connectivity"
103
+ logger.error(error_message)
104
+ raise FirestoreTimeoutError(error_message)
105
+
106
+ # Update get_userstatus to use our new strict timeout function
65
107
  async def get_userstatus(
66
108
  user_uid: str,
67
- db: firestore.Client, # Note: This expects the actual client, not a Depends
68
- force_fresh: bool = False
109
+ db: firestore.Client,
110
+ force_fresh: bool = False,
111
+ timeout: float = 12.0 # Default timeout but allow override
69
112
  ) -> tuple[Dict[str, Any], bool]:
70
113
  """
71
- Fetch user status with intelligent caching
114
+ Fetch user status with intelligent caching and configurable timeout
115
+
116
+ Args:
117
+ user_uid: User ID to fetch status for
118
+ db: Firestore client
119
+ force_fresh: Whether to bypass cache
120
+ timeout: Timeout for Firestore operations in seconds
121
+
122
+ Returns:
123
+ Tuple of (user status data, whether cache was used)
72
124
  """
73
125
  cache_used = False
74
126
  if not force_fresh:
@@ -82,8 +134,11 @@ async def get_userstatus(
82
134
  userstatus_id = USERS_STATUS_DOC_REF + user_uid
83
135
  user_ref = db.collection(USERS_STATUS_COLLECTION_NAME).document(userstatus_id)
84
136
 
85
- # Get the document
86
- snapshot = user_ref.get()
137
+ logger.debug(f"Fetching user status for {user_uid} with strict timeout {timeout}s")
138
+
139
+ # Use our strict timeout wrapper instead of the native timeout parameter
140
+ snapshot = await get_with_strict_timeout(user_ref, timeout)
141
+
87
142
  if not snapshot.exists:
88
143
  raise ResourceNotFoundError(
89
144
  resource_type="authorization userstatus",
@@ -98,9 +153,23 @@ async def get_userstatus(
98
153
  userstatus_cache.set(user_uid, status_data)
99
154
  return status_data, cache_used
100
155
 
156
+ except TimeoutError as e:
157
+ logger.error(f"Timeout while fetching user status for {user_uid}: {str(e)}")
158
+ raise ServiceError(
159
+ operation="fetching user status for authz",
160
+ error=e,
161
+ resource_type="userstatus",
162
+ resource_id=user_uid,
163
+ additional_info={
164
+ "force_fresh": force_fresh,
165
+ "collection": USERS_STATUS_COLLECTION_NAME,
166
+ "timeout_seconds": timeout
167
+ }
168
+ )
101
169
  except ResourceNotFoundError:
102
170
  raise
103
171
  except Exception as e:
172
+ logger.error(f"Error fetching user status for {user_uid}: {str(e)}")
104
173
  raise ServiceError(
105
174
  operation=f"fetching user status",
106
175
  error=e,
@@ -153,15 +222,27 @@ async def extract_request_fields(request: Request) -> Optional[List[str]]:
153
222
  logger.warning(f"Could not extract fields from request body: {str(e)}")
154
223
  return None # Return None instead of raising an error
155
224
 
225
+ # Main authorization function with configurable timeout
156
226
  async def authorizeAPIRequest(
157
227
  request: Request,
158
- db: firestore.Client, # Changed: Now expects actual client instance
228
+ db: firestore.Client,
159
229
  request_resource_fields: Optional[Iterable[str]] = None,
230
+ firestore_timeout: float = 15.0 # Allow specifying timeout
160
231
  ) -> Dict[str, Any]:
161
232
  """
162
233
  Authorize API request based on user status and OPA policies.
163
234
  Enhanced with credit check information.
235
+
236
+ Args:
237
+ request: The incoming request
238
+ db: Firestore client
239
+ request_resource_fields: Fields being accessed/modified in the request
240
+ firestore_timeout: Timeout for Firestore operations in seconds
241
+
242
+ Returns:
243
+ Authorization result containing decision details
164
244
  """
245
+ opa_decision = None
165
246
  try:
166
247
  # Extract fields for both PATCH and POST if not provided
167
248
  if not request_resource_fields:
@@ -175,41 +256,104 @@ async def authorizeAPIRequest(
175
256
  additional_info={"path": str(request.url)}
176
257
  )
177
258
 
178
-
179
259
  # Determine if we need fresh status
180
260
  force_fresh = _should_force_fresh_status(request)
181
- userstatus, cache_used = await get_userstatus(user_uid, db, force_fresh=force_fresh)
182
-
183
- # Prepare authorization input
184
- auth_input = {
261
+ userstatus, cache_used = await get_userstatus(
262
+ user_uid,
263
+ db,
264
+ force_fresh=force_fresh,
265
+ timeout=firestore_timeout # Pass the specified timeout
266
+ )
267
+
268
+ # Prepare authorization input that matches OPA expectations
269
+ # Extract required values from user status
270
+ primary_usertype = userstatus.get("primary_usertype")
271
+ secondary_usertypes = userstatus.get("secondary_usertypes", [])
272
+
273
+ # Extract IAM domain permissions
274
+ iam_domain_permissions = userstatus.get("iam_domain_permissions", {})
275
+
276
+ # Format the authz_input to match what the OPA policies expect
277
+ authz_input = {
185
278
  "api_url": request.url.path,
186
279
  "requestor": {
187
280
  "uid": user_uid,
188
- "usertypes": request.state.user.get("usertypes"),
189
- "email_verified": request.state.user.get("email_verified"),
190
- "iam_groups": userstatus.get("iam_groups"),
191
- "subscriptions": userstatus.get("subscriptions"),
192
- "sbscrptn_based_insight_credits": userstatus.get("sbscrptn_based_insight_credits"),
193
- "extra_insight_credits": userstatus.get("extra_insight_credits")
281
+ "primary_usertype": primary_usertype,
282
+ "secondary_usertypes": secondary_usertypes,
283
+ "usertypes": [primary_usertype] + secondary_usertypes if primary_usertype else secondary_usertypes,
284
+ "email_verified": request.state.user.get("email_verified", False),
285
+ "iam_domain_permissions": iam_domain_permissions,
286
+ "sbscrptn_based_insight_credits": userstatus.get("sbscrptn_based_insight_credits", 0),
287
+ "extra_insight_credits": userstatus.get("extra_insight_credits", 0)
194
288
  },
195
289
  "method": request.method.lower(),
196
290
  "request_resource_fields": request_resource_fields
197
291
  }
198
292
 
199
- ####!!!!!!!!!! OPA call
293
+ # Convert any non-serializable objects to JSON serializable format
294
+ # Using the unified utility from utils
295
+ json_safe_authz_input = convert_to_json_serializable(authz_input)
296
+
200
297
  # Query OPA
201
298
  opa_url = f"{os.getenv('OPA_SERVER_URL', 'http://localhost:8181')}{os.getenv('OPA_DECISION_PATH', '/v1/data/http/authz/ingress/decision')}"
202
299
  logger.debug(f"Attempting to connect to OPA at: {opa_url}")
203
- logger.debug(f"Authorization input: {auth_input}")
300
+
301
+ # Debug: Print raw JSON payload to identify any potential issues
302
+ try:
303
+ payload_json = json.dumps({"input": json_safe_authz_input})
304
+ logger.debug(f"OPA Request JSON payload: {payload_json}")
305
+ except Exception as json_err:
306
+ logger.error(f"Error serializing OPA request payload: {json_err}")
307
+
204
308
  async with httpx.AsyncClient() as client:
205
309
  try:
206
310
  response = await client.post(
207
311
  opa_url,
208
- json={"input": auth_input},
312
+ json={"input": json_safe_authz_input},
209
313
  timeout=5.0 # 5 seconds timeout
210
314
  )
211
315
  logger.debug(f"OPA Response Status: {response.status_code}")
212
316
  logger.debug(f"OPA Response Body: {response.text}")
317
+
318
+ if response.status_code != 200:
319
+ logger.error(f"OPA authorization failed: {response.text}")
320
+ raise HTTPException(
321
+ status_code=500,
322
+ detail="Authorization service error"
323
+ )
324
+
325
+ result = response.json()
326
+ logger.debug(f"Parsed OPA response: {result}")
327
+
328
+ # Handle unusual OPA response formats
329
+ # Try to find "decision" field as an alternative
330
+ if "result" in result:
331
+ opa_decision = result["result"]
332
+ else:
333
+ # If we still don't have a result after all attempts, use default structure
334
+ logger.warning(f"OPA response missing 'result' field, using default")
335
+ raise HTTPException(
336
+ status_code=500,
337
+ detail="Authorization service error: OPA response format unexpected"
338
+ )
339
+
340
+ # Extract key fields from result with better default handling
341
+ allow = opa_decision.get("allow", False)
342
+
343
+ # Handle authorization denial
344
+ if not allow:
345
+ logger.error(f"Authorization denied: {result}")
346
+ raise AuthorizationError(
347
+ action=f"{request.method} {request.url.path}",
348
+ additional_info={
349
+ "user_uid": user_uid,
350
+ "resource_fields": request_resource_fields,
351
+ "opa_decision": opa_decision, # Include the full OPA decision result
352
+ # Include the raw result if it's different from the processed decision
353
+ "raw_opa_response": result if result != {"result": opa_decision} else None
354
+ }
355
+ )
356
+
213
357
  except httpx.RequestError as e:
214
358
  logger.error(f"Failed to connect to OPA: {str(e)}")
215
359
  raise ServiceError(
@@ -221,39 +365,19 @@ async def authorizeAPIRequest(
221
365
  "connection_error": str(e)
222
366
  }
223
367
  ) from e
224
- if response.status_code != 200:
225
- logger.error(f"OPA authorization failed: {response.text}")
226
- raise HTTPException(
227
- status_code=500,
228
- detail="Authorization service error"
229
- )
230
-
231
- result = response.json()
232
- if not result.get("result", {}).get("allow", False):
233
- raise AuthorizationError(
234
- action=f"{request.method} {request.url.path}",
235
- additional_info={
236
- "user_uid": user_uid,
237
- "resource_fields": request_resource_fields
238
- }
239
- )
240
-
241
- # Extract credit check information from the OPA response
242
- credit_check = {}
243
- if "credit_check" in result.get("result", {}):
244
- credit_check = result["result"]["credit_check"]
245
368
 
246
369
  # More descriptive metadata about the data freshness
247
370
  return {
248
371
  "used_cached_status": cache_used,
249
372
  "required_fresh_status": force_fresh,
250
373
  "status_retrieved_at": datetime.now(timezone.utc).isoformat(),
251
- "credit_check": credit_check
374
+ "opa_decision": opa_decision
252
375
  }
253
376
 
254
377
  except (AuthorizationError, ResourceNotFoundError):
255
378
  raise
256
379
  except Exception as e:
380
+ logger.exception(f"Exception in authorizeAPIRequest: {e}")
257
381
  raise ServiceError(
258
382
  operation="API authorization",
259
383
  error=e,
@@ -3,7 +3,7 @@ import datetime as dt
3
3
  import json
4
4
  from pydantic import BaseModel, ConfigDict
5
5
  from fastapi.responses import JSONResponse
6
- from ipulse_shared_core_ftredge.utils import CustomJSONEncoder
6
+ from ipulse_shared_core_ftredge.utils.json_encoder import EnsureJSONEncoderCompatibility, convert_to_json_serializable
7
7
 
8
8
 
9
9
  T = TypeVar('T')
@@ -27,7 +27,7 @@ class PaginatedAPIResponse(BaseAPIResponse, Generic[T]):
27
27
 
28
28
  class CustomJSONResponse(JSONResponse):
29
29
  def render(self, content) -> bytes:
30
- # Handle Pydantic models to exclude computed fields
30
+ # First preprocess content with our utility function
31
31
  if isinstance(content, dict) and "data" in content and hasattr(content["data"], "model_dump"):
32
32
  # If content["data"] is a Pydantic model, use model_dump with exclude_unset=True
33
33
  # and exclude_computed=True to prevent serialization of computed fields
@@ -37,12 +37,15 @@ class CustomJSONResponse(JSONResponse):
37
37
  exclude_computed=True
38
38
  )
39
39
 
40
- # Use the CustomJSONEncoder for serialization
40
+ # Now convert all problematic types to JSON serializable values
41
+ json_safe_content = convert_to_json_serializable(content)
42
+
43
+ # Use the CustomJSONEncoder for additional safety
41
44
  return json.dumps(
42
- content,
45
+ json_safe_content,
43
46
  ensure_ascii=False,
44
47
  allow_nan=False,
45
48
  indent=None,
46
49
  separators=(",", ":"),
47
- default=CustomJSONEncoder().default
50
+ cls=EnsureJSONEncoderCompatibility
48
51
  ).encode("utf-8")
@@ -0,0 +1,55 @@
1
+ from datetime import datetime, timezone
2
+ from typing import Any
3
+ from typing import ClassVar
4
+ from pydantic import BaseModel, Field, ConfigDict, field_validator
5
+ import dateutil.parser
6
+
7
+ class BaseDataModel(BaseModel):
8
+ """Base model with common fields and configuration"""
9
+ model_config = ConfigDict(frozen=True, extra="forbid")
10
+
11
+ # Required class variables that must be defined in subclasses
12
+ VERSION: ClassVar[float]
13
+ DOMAIN: ClassVar[str]
14
+ OBJ_REF: ClassVar[str]
15
+
16
+ # Schema versioning
17
+ schema_version: float = Field(
18
+ ..., # Make this required
19
+ description="Version of this Class == version of DB Schema",
20
+ frozen=True
21
+ )
22
+
23
+ # Audit fields
24
+ created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc), frozen=True)
25
+ created_by: str = Field(..., frozen=True)
26
+ updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc), frozen=True)
27
+ updated_by: str = Field(...)
28
+
29
+ @classmethod
30
+ def get_collection_name(cls) -> str:
31
+ """Generate standard collection name"""
32
+ return f"{cls.DOMAIN}_{cls.OBJ_REF}s"
33
+
34
+ @field_validator('created_at', 'updated_at', mode='before')
35
+ @classmethod
36
+ def parse_datetime(cls, v: Any) -> datetime:
37
+ if isinstance(v, datetime): # If Firestore already gave a datetime object
38
+ return v # Just use it, no parsing needed
39
+ if isinstance(v, str): # If it's a string (e.g. from an API request, not Firestore direct)
40
+ try:
41
+ return dateutil.parser.isoparse(v)
42
+ except (TypeError, ValueError) as e:
43
+ raise ValueError(f"Invalid datetime string format: {v} - {e}")
44
+ # Firestore might send google.api_core.datetime_helpers.DatetimeWithNanoseconds
45
+ # which is a subclass of datetime.datetime, so isinstance(v, datetime) should catch it.
46
+ # If for some reason it's a different type not caught by isinstance(v, datetime)
47
+ # but has isoformat(), perhaps try that, but it's unlikely with current Firestore client.
48
+ # For example, if v is some custom timestamp object from an older library:
49
+ if hasattr(v, 'isoformat'): # Fallback for unknown datetime-like objects
50
+ try:
51
+ return dateutil.parser.isoparse(v.isoformat())
52
+ except Exception as e:
53
+ raise ValueError(f"Could not parse datetime-like object: {v} - {e}")
54
+
55
+ raise ValueError(f"Unsupported type for datetime parsing: {type(v)} value: {v}")
@@ -51,6 +51,10 @@ class UserStatus(BaseDataModel):
51
51
  DOMAIN: ClassVar[str] = "_".join(list_as_lower_strings(Layer.PULSE_APP, Module.CORE.name, Subject.USER.name))
52
52
  OBJ_REF: ClassVar[str] = "userstatus"
53
53
 
54
+ # Centralized collection name and document ID prefix
55
+ COLLECTION_NAME: ClassVar[str] = "papp_core_user_userstatuss"
56
+
57
+
54
58
  # System-managed fields
55
59
  schema_version: float = Field(
56
60
  default=VERSION,
@@ -1,6 +1,7 @@
1
1
  """ Base class for Firestore services with common CRUD operations """
2
- from typing import Dict, Any, List, TypeVar, Generic
2
+ from typing import Dict, Any, List, TypeVar, Generic, Optional
3
3
  import logging
4
+ import time
4
5
  from datetime import datetime, timezone
5
6
  from pydantic import BaseModel
6
7
  from google.cloud import firestore
@@ -11,11 +12,20 @@ T = TypeVar('T', bound=BaseModel)
11
12
  class BaseFirestoreService(Generic[T]):
12
13
  """Base class for Firestore services with common CRUD operations"""
13
14
 
14
- def __init__(self, db: firestore.Client, collection_name: str, resource_type: str, logger: logging.Logger):
15
+ def __init__(
16
+ self,
17
+ db: firestore.Client,
18
+ collection_name: str,
19
+ resource_type: str,
20
+ logger: logging.Logger,
21
+ timeout: float = 15.0 # Default to 15 seconds, but allow override
22
+ ):
15
23
  self.db = db
16
24
  self.collection_name = collection_name
17
25
  self.resource_type = resource_type
18
26
  self.logger = logger
27
+ self.timeout = timeout # Store the timeout as an instance attribute
28
+ self.logger.info(f"Initialized {self.resource_type} service with timeout={timeout}s")
19
29
 
20
30
  async def create_document(self, doc_id: str, data: T, creator_uid: str) -> Dict[str, Any]:
21
31
  """Standard create method with audit fields"""
@@ -32,7 +42,8 @@ class BaseFirestoreService(Generic[T]):
32
42
  })
33
43
 
34
44
  doc_ref = self.db.collection(self.collection_name).document(doc_id)
35
- doc_ref.set(doc_data)
45
+ # Apply timeout to the set operation
46
+ doc_ref.set(doc_data, timeout=self.timeout)
36
47
 
37
48
  self.logger.info(f"Created {self.resource_type}: {doc_id}")
38
49
  return doc_data
@@ -66,7 +77,8 @@ class BaseFirestoreService(Generic[T]):
66
77
  batch.set(doc_ref, doc_data)
67
78
  created_docs.append(doc_data)
68
79
 
69
- batch.commit()
80
+ # Apply timeout to the commit operation
81
+ batch.commit(timeout=self.timeout)
70
82
  self.logger.info(f"Created {len(documents)} {self.resource_type}s in batch")
71
83
  return created_docs
72
84
 
@@ -81,24 +93,48 @@ class BaseFirestoreService(Generic[T]):
81
93
 
82
94
  async def get_document(self, doc_id: str) -> Dict[str, Any]:
83
95
  """Get a document by ID with standardized error handling"""
84
- doc_ref = self.db.collection(self.collection_name).document(doc_id)
85
- doc = doc_ref.get()
96
+ self.logger.debug(f"Getting {self.resource_type} document: {doc_id} with timeout={self.timeout}s")
97
+ start_time = time.time()
86
98
 
87
- if not doc.exists:
88
- raise ResourceNotFoundError(
99
+ try:
100
+ doc_ref = self.db.collection(self.collection_name).document(doc_id)
101
+
102
+ # Apply timeout to the get operation
103
+ doc = doc_ref.get(timeout=self.timeout)
104
+
105
+ elapsed = (time.time() - start_time) * 1000
106
+ self.logger.debug(f"Firestore get for {doc_id} completed in {elapsed:.2f}ms")
107
+
108
+ if not doc.exists:
109
+ self.logger.warning(f"Document {doc_id} not found in {self.collection_name}")
110
+ raise ResourceNotFoundError(
111
+ resource_type=self.resource_type,
112
+ resource_id=doc_id,
113
+ additional_info={"collection": self.collection_name}
114
+ )
115
+
116
+ return doc.to_dict()
117
+
118
+ except ResourceNotFoundError:
119
+ raise
120
+ except Exception as e:
121
+ elapsed = (time.time() - start_time) * 1000
122
+ self.logger.error(f"Error getting document {doc_id} after {elapsed:.2f}ms: {str(e)}", exc_info=True)
123
+ raise ServiceError(
124
+ operation=f"retrieving {self.resource_type}",
125
+ error=e,
89
126
  resource_type=self.resource_type,
90
127
  resource_id=doc_id,
91
- additional_info={"collection": self.collection_name}
92
- )
93
-
94
- return doc.to_dict()
128
+ additional_info={"collection": self.collection_name, "timeout": self.timeout}
129
+ ) from e
95
130
 
96
131
  async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str) -> Dict[str, Any]:
97
132
  """Standard update method with validation and audit fields"""
98
133
  try:
99
134
  doc_ref = self.db.collection(self.collection_name).document(doc_id)
100
135
 
101
- if not doc_ref.get().exists:
136
+ # Apply timeout to the get operation
137
+ if not doc_ref.get(timeout=self.timeout).exists:
102
138
  raise ResourceNotFoundError(
103
139
  resource_type=self.resource_type,
104
140
  resource_id=doc_id,
@@ -113,8 +149,10 @@ class BaseFirestoreService(Generic[T]):
113
149
  'updated_by': updater_uid
114
150
  })
115
151
 
116
- doc_ref.update(valid_fields)
117
- return doc_ref.get().to_dict()
152
+ # Apply timeout to the update operation
153
+ doc_ref.update(valid_fields, timeout=self.timeout)
154
+ # Apply timeout to the get operation
155
+ return doc_ref.get(timeout=self.timeout).to_dict()
118
156
 
119
157
  except (ResourceNotFoundError, ValidationError):
120
158
  raise
@@ -131,13 +169,15 @@ class BaseFirestoreService(Generic[T]):
131
169
  """Standard delete method"""
132
170
  try:
133
171
  doc_ref = self.db.collection(self.collection_name).document(doc_id)
134
- if not doc_ref.get().exists:
172
+ # Apply timeout to the get operation
173
+ if not doc_ref.get(timeout=self.timeout).exists:
135
174
  raise ResourceNotFoundError(
136
175
  resource_type=self.resource_type,
137
176
  resource_id=doc_id
138
177
  )
139
178
 
140
- doc_ref.delete()
179
+ # Apply timeout to the delete operation
180
+ doc_ref.delete(timeout=self.timeout)
141
181
  self.logger.info(f"Deleted {self.resource_type}: {doc_id}")
142
182
 
143
183
  except ResourceNotFoundError:
@@ -151,6 +191,44 @@ class BaseFirestoreService(Generic[T]):
151
191
  resource_id=doc_id
152
192
  ) from e
153
193
 
194
+ # Add query method with timeout
195
+ async def query_documents(
196
+ self,
197
+ filters: Optional[List[tuple]] = None,
198
+ limit: Optional[int] = None,
199
+ order_by: Optional[tuple] = None
200
+ ) -> List[Dict[str, Any]]:
201
+ """Query documents with filters, limit, and ordering"""
202
+ try:
203
+ # Start with the collection reference
204
+ query = self.db.collection(self.collection_name)
205
+
206
+ # Apply filters if provided
207
+ if filters:
208
+ for field, op, value in filters:
209
+ query = query.where(field=field, op_string=op, value=value)
210
+
211
+ # Apply ordering if provided
212
+ if order_by:
213
+ field, direction = order_by
214
+ query = query.order_by(field, direction=direction)
215
+
216
+ # Apply limit if provided
217
+ if limit:
218
+ query = query.limit(limit)
219
+
220
+ # Execute query with timeout
221
+ docs = query.stream(timeout=self.timeout)
222
+ return [doc.to_dict() for doc in docs]
223
+
224
+ except Exception as e:
225
+ self.logger.error(f"Error querying {self.resource_type}: {e}", exc_info=True)
226
+ raise ServiceError(
227
+ operation=f"querying {self.resource_type}",
228
+ error=e,
229
+ resource_type=self.resource_type
230
+ ) from e
231
+
154
232
  def _validate_update_fields(self, update_data: Dict[str, Any]) -> Dict[str, Any]:
155
233
  """Centralized update fields validation"""
156
234
  if not isinstance(update_data, dict):
@@ -0,0 +1,270 @@
1
+ """Service for managing credit operations in a generic way."""
2
+ import logging
3
+ from typing import Dict, Any, Optional, Tuple
4
+ from datetime import datetime, timezone
5
+ from google.cloud import firestore
6
+ from ipulse_shared_core_ftredge.services import ServiceError, ResourceNotFoundError, ValidationError
7
+ from ipulse_shared_core_ftredge.models.user_status import UserStatus
8
+
9
+ # Default Firestore timeout if not provided by the consuming application
10
+ DEFAULT_FIRESTORE_TIMEOUT = 15.0
11
+
12
+ class CreditService:
13
+ """
14
+ Service class for credit operations.
15
+ Designed to be project-agnostic and directly uses UserStatus model constants.
16
+ """
17
+
18
+ def __init__(
19
+ self,
20
+ db: firestore.Client,
21
+ logger: Optional[logging.Logger] = None,
22
+ firestore_timeout: float = DEFAULT_FIRESTORE_TIMEOUT
23
+ ):
24
+ """
25
+ Initialize the credit service.
26
+
27
+ Args:
28
+ db: Firestore client.
29
+ logger: Optional logger instance. Defaults to a new logger for this module.
30
+ firestore_timeout: Timeout for Firestore operations in seconds.
31
+ """
32
+ self.db = db
33
+ # Use UserStatus constants directly
34
+ self.users_status_collection_name = UserStatus.COLLECTION_NAME
35
+ self.user_status_doc_prefix = f"{UserStatus.OBJ_REF}_" # Append underscore to OBJ_REF
36
+ self.logger = logger or logging.getLogger(__name__)
37
+ self.timeout = firestore_timeout
38
+
39
+ self.logger.info(
40
+ f"CreditService initialized using UserStatus constants. Collection: {self.users_status_collection_name}, "
41
+ f"Doc Prefix: {self.user_status_doc_prefix}, Timeout: {self.timeout}s"
42
+ )
43
+
44
+ async def verify_credits(
45
+ self,
46
+ user_uid: str,
47
+ required_credits_for_resource: float,
48
+ pre_fetched_user_credits: Optional[Dict[str, float]] = None
49
+ ) -> Tuple[bool, Dict[str, Any]]:
50
+ """
51
+ Verify if a user has enough credits for an operation.
52
+
53
+ Args:
54
+ user_uid: The user's UID.
55
+ required_credits_for_resource: The number of credits required for the operation.
56
+ pre_fetched_user_credits: Optional dict with pre-fetched credit info.
57
+ (keys: 'sbscrptn_based_insight_credits', 'extra_insight_credits')
58
+
59
+ Returns:
60
+ Tuple of (has_enough_credits, user_status_data) where user_status_data
61
+ will be a dict with keys 'sbscrptn_based_insight_credits' and 'extra_insight_credits'.
62
+
63
+ Raises:
64
+ ValidationError: If required_credits_for_resource is None (pricing not properly configured).
65
+ """
66
+ self.logger.info(
67
+ f"verify_credits called for user {user_uid}, "
68
+ f"required_credits={required_credits_for_resource}, "
69
+ f"pre_fetched_credits={pre_fetched_user_credits}"
70
+ )
71
+
72
+ if required_credits_for_resource is None:
73
+ self.logger.error(f"Credit cost is None for user {user_uid}, pricing not properly configured")
74
+ raise ValidationError(
75
+ resource_type="credit_cost",
76
+ detail="Credit cost is not configured for this resource",
77
+ resource_id=None, # Resource ID might not be known here, or could be passed
78
+ additional_info={"user_uid": user_uid}
79
+ )
80
+
81
+ if required_credits_for_resource <= 0:
82
+ self.logger.info(f"No credits required for user {user_uid}, bypassing credit verification")
83
+ return True, {"sbscrptn_based_insight_credits": 0, "extra_insight_credits": 0}
84
+
85
+ if pre_fetched_user_credits is not None:
86
+ self.logger.info(f"Using pre-fetched credit info for user {user_uid}")
87
+ subscription_credits = pre_fetched_user_credits.get("sbscrptn_based_insight_credits", 0)
88
+ extra_credits = pre_fetched_user_credits.get("extra_insight_credits", 0)
89
+ total_credits = subscription_credits + extra_credits
90
+
91
+ self.logger.info(
92
+ f"User {user_uid} has {total_credits} total pre-fetched credits "
93
+ f"(subscription: {subscription_credits}, extra: {extra_credits})"
94
+ )
95
+
96
+ userstatus_data_to_return = {
97
+ "sbscrptn_based_insight_credits": subscription_credits,
98
+ "extra_insight_credits": extra_credits
99
+ }
100
+
101
+ has_enough_credits = total_credits >= required_credits_for_resource
102
+ return has_enough_credits, userstatus_data_to_return
103
+
104
+ try:
105
+ self.logger.info(
106
+ f"Fetching user status from Firestore for user {user_uid} (collection: {self.users_status_collection_name})"
107
+ )
108
+ full_userstatus_doc = await self._get_userstatus(user_uid)
109
+
110
+ subscription_credits = full_userstatus_doc.get("sbscrptn_based_insight_credits", 0)
111
+ extra_credits = full_userstatus_doc.get("extra_insight_credits", 0)
112
+ total_credits = subscription_credits + extra_credits
113
+
114
+ self.logger.info(
115
+ f"User {user_uid} has {total_credits} total credits from Firestore "
116
+ f"(subscription: {subscription_credits}, extra: {extra_credits})"
117
+ )
118
+
119
+ has_enough_credits = total_credits >= required_credits_for_resource
120
+
121
+ userstatus_data_to_return = {
122
+ "sbscrptn_based_insight_credits": subscription_credits,
123
+ "extra_insight_credits": extra_credits
124
+ }
125
+
126
+ return has_enough_credits, userstatus_data_to_return
127
+
128
+ except ResourceNotFoundError:
129
+ self.logger.warning(f"User status not found for {user_uid} in {self.users_status_collection_name}. Assuming no credits.")
130
+ return False, {"sbscrptn_based_insight_credits": 0, "extra_insight_credits": 0}
131
+ except Exception as e:
132
+ self.logger.error(f"Error verifying credits for user {user_uid}: {str(e)}")
133
+ raise ServiceError(
134
+ operation="verifying credits",
135
+ error=e,
136
+ resource_type="user_credits",
137
+ resource_id=user_uid,
138
+ additional_info={"credits_to_charge": credits_to_charge}
139
+ ) from e
140
+
141
+ async def charge_credits(self, user_uid: str, credits_to_charge: Optional[float], operation_details: str) -> bool:
142
+ """
143
+ Charge a user's credits for an operation.
144
+
145
+ Args:
146
+ user_uid: The user's UID.
147
+ credits_to_charge: The number of credits to charge.
148
+ operation_details: Details about the operation (for logging).
149
+
150
+ Returns:
151
+ Whether the charging was successful.
152
+
153
+ Raises:
154
+ ValidationError: If credits_to_charge is None (pricing not properly configured).
155
+ """
156
+ if credits_to_charge is None:
157
+ self.logger.error(f"Credit cost is None for user {user_uid} (charge_credits), pricing not properly configured")
158
+ raise ValidationError(
159
+ resource_type="credit_cost",
160
+ detail="Credit cost is not configured for this resource (charge_credits)",
161
+ resource_id=None,
162
+ additional_info={"user_uid": user_uid}
163
+ )
164
+
165
+ if credits_to_charge == 0:
166
+ self.logger.info(f"No credits to charge for user {user_uid}, operation: {operation_details}")
167
+ return True
168
+
169
+ try:
170
+ userstatus_id = f"{self.user_status_doc_prefix}{user_uid}"
171
+ user_ref = self.db.collection(self.users_status_collection_name).document(userstatus_id)
172
+
173
+ transaction = self.db.transaction()
174
+
175
+ @firestore.transactional
176
+ def update_credits_transaction(transaction_obj, current_user_ref):
177
+ user_doc = current_user_ref.get(transaction=transaction_obj)
178
+ if not user_doc.exists:
179
+ self.logger.warning(
180
+ f"Cannot charge credits - user status not found for {user_uid} in {self.users_status_collection_name}"
181
+ )
182
+ return False
183
+
184
+ userstatus = user_doc.to_dict()
185
+
186
+ subscription_credits = userstatus.get("sbscrptn_based_insight_credits", 0)
187
+ extra_credits = userstatus.get("extra_insight_credits", 0)
188
+ total_credits = subscription_credits + extra_credits
189
+
190
+ if total_credits < credits_to_charge:
191
+ self.logger.warning(
192
+ f"Insufficient credits for user {user_uid} during transaction: "
193
+ f"has {total_credits}, needs {credits_to_charge}"
194
+ )
195
+ return False
196
+
197
+ subscription_credits_to_charge = min(subscription_credits, credits_to_charge)
198
+ extra_credits_to_charge = credits_to_charge - subscription_credits_to_charge
199
+
200
+ update_data = {
201
+ "updated_at": datetime.now(timezone.utc).isoformat(),
202
+ "updated_by": "credit_service" # Consider making this configurable or more generic
203
+ }
204
+
205
+ if subscription_credits_to_charge > 0:
206
+ update_data["sbscrptn_based_insight_credits"] = firestore.Increment(-subscription_credits_to_charge)
207
+ update_data["sbscrptn_based_insight_credits_updtd_on"] = datetime.now(timezone.utc).isoformat()
208
+
209
+ if extra_credits_to_charge > 0:
210
+ update_data["extra_insight_credits"] = firestore.Increment(-extra_credits_to_charge)
211
+ update_data["extra_insight_credits_updtd_on"] = datetime.now(timezone.utc).isoformat()
212
+
213
+ transaction_obj.update(current_user_ref, update_data)
214
+ return True
215
+
216
+ success = update_credits_transaction(transaction, user_ref)
217
+
218
+ if success:
219
+ self.logger.info(
220
+ f"Successfully charged {credits_to_charge} credits for user {user_uid}. "
221
+ f"Operation: {operation_details}"
222
+ )
223
+ else:
224
+ self.logger.warning(
225
+ f"Failed to charge {credits_to_charge} credits for user {user_uid} (transaction outcome). "
226
+ f"Operation: {operation_details}"
227
+ )
228
+
229
+ return success
230
+
231
+ except Exception as e:
232
+ self.logger.error(f"Error charging credits for user {user_uid}: {str(e)}")
233
+ raise ServiceError(
234
+ operation="charging credits",
235
+ error=e,
236
+ resource_type="user_credits",
237
+ resource_id=user_uid,
238
+ additional_info={"credits_to_charge": credits_to_charge}
239
+ ) from e
240
+
241
+ async def _get_userstatus(self, user_uid: str) -> Dict[str, Any]:
242
+ """Get a user's status document."""
243
+ try:
244
+ userstatus_id = f"{self.user_status_doc_prefix}{user_uid}"
245
+ doc_ref = self.db.collection(self.users_status_collection_name).document(userstatus_id)
246
+
247
+ # Using the timeout value set during initialization
248
+ doc = await doc_ref.get(timeout=self.timeout)
249
+
250
+ if not doc.exists:
251
+ raise ResourceNotFoundError(
252
+ resource_type="user_status", # Generic resource type
253
+ resource_id=userstatus_id,
254
+ additional_info={"collection": self.users_status_collection_name}
255
+ )
256
+
257
+ return doc.to_dict()
258
+
259
+ except ResourceNotFoundError:
260
+ raise
261
+ except Exception as e: # Catch generic Exception to handle potential timeout errors from Firestore client
262
+ self.logger.error(f"Error getting user status for {user_uid} from {self.users_status_collection_name}: {str(e)}")
263
+ raise ServiceError(
264
+ operation="getting user status",
265
+ error=e,
266
+ resource_type="user_status",
267
+ resource_id=user_uid,
268
+ additional_info={"collection": self.users_status_collection_name}
269
+ ) from e
270
+
@@ -0,0 +1 @@
1
+ from .json_encoder import EnsureJSONEncoderCompatibility
@@ -0,0 +1,62 @@
1
+ import json
2
+ from datetime import datetime
3
+ from enum import Enum
4
+ from google.cloud.firestore_v1._helpers import DatetimeWithNanoseconds
5
+ from google.api_core import datetime_helpers
6
+
7
+ class EnsureJSONEncoderCompatibility(json.JSONEncoder):
8
+ """Custom JSON encoder that handles Firestore datetime types and other non-serializable objects."""
9
+ def default(self, obj):
10
+ # Handle datetime types
11
+ if isinstance(obj, (datetime, DatetimeWithNanoseconds, datetime_helpers.DatetimeWithNanoseconds)):
12
+ return obj.isoformat()
13
+ # Handle enum types
14
+ elif isinstance(obj, Enum):
15
+ return obj.value
16
+ # Handle pydantic models
17
+ elif hasattr(obj, 'model_dump'):
18
+ return obj.model_dump()
19
+ # Default behavior for other types
20
+ return super().default(obj)
21
+
22
+ def convert_to_json_serializable(obj):
23
+ """
24
+ Recursively convert objects to JSON serializable format.
25
+ Handles datetime objects, Enums, and nested structures.
26
+
27
+ Args:
28
+ obj: Any Python object that might contain non-serializable types
29
+
30
+ Returns:
31
+ The object with all non-serializable types converted to serializable ones
32
+ """
33
+ # Handle None
34
+ if obj is None:
35
+ return None
36
+
37
+ # Handle datetime objects (including Firestore's DatetimeWithNanoseconds)
38
+ if hasattr(obj, 'isoformat'):
39
+ return obj.isoformat()
40
+
41
+ # Handle Enum values
42
+ elif isinstance(obj, Enum):
43
+ return obj.value
44
+
45
+ # Handle dictionaries
46
+ elif isinstance(obj, dict):
47
+ return {key: convert_to_json_serializable(value) for key, value in obj.items()}
48
+
49
+ # Handle lists and tuples
50
+ elif isinstance(obj, (list, tuple)):
51
+ return [convert_to_json_serializable(item) for item in obj]
52
+
53
+ # Handle sets
54
+ elif isinstance(obj, set):
55
+ return [convert_to_json_serializable(item) for item in obj]
56
+
57
+ # Handle Pydantic models and other objects with model_dump method
58
+ elif hasattr(obj, 'model_dump'):
59
+ return convert_to_json_serializable(obj.model_dump())
60
+
61
+ # Return primitive types as-is
62
+ return obj
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 10.1.1
3
+ Version: 12.0.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -13,7 +13,7 @@ Requires-Dist: pydantic[email]~=2.5
13
13
  Requires-Dist: python-dateutil~=2.8
14
14
  Requires-Dist: fastapi~=0.115.8
15
15
  Requires-Dist: pytest
16
- Requires-Dist: ipulse_shared_base_ftredge>=6.4.1
16
+ Requires-Dist: ipulse_shared_base_ftredge==6.5.1
17
17
  Dynamic: author
18
18
  Dynamic: classifier
19
19
  Dynamic: home-page
@@ -25,6 +25,7 @@ src/ipulse_shared_core_ftredge/models/user_status.py
25
25
  src/ipulse_shared_core_ftredge/services/__init__.py
26
26
  src/ipulse_shared_core_ftredge/services/base_firestore_service.py
27
27
  src/ipulse_shared_core_ftredge/services/base_service_exceptions.py
28
+ src/ipulse_shared_core_ftredge/services/credit_service.py
28
29
  src/ipulse_shared_core_ftredge/services/fastapiservicemon.py
29
30
  src/ipulse_shared_core_ftredge/services/servicemon.py
30
31
  src/ipulse_shared_core_ftredge/utils/__init__.py
@@ -2,4 +2,4 @@ pydantic[email]~=2.5
2
2
  python-dateutil~=2.8
3
3
  fastapi~=0.115.8
4
4
  pytest
5
- ipulse_shared_base_ftredge>=6.4.1
5
+ ipulse_shared_base_ftredge==6.5.1
@@ -1,41 +0,0 @@
1
- from datetime import datetime, timezone
2
- from typing import ClassVar
3
- from pydantic import BaseModel, Field, ConfigDict, field_validator
4
- import dateutil.parser
5
-
6
- class BaseDataModel(BaseModel):
7
- """Base model with common fields and configuration"""
8
- model_config = ConfigDict(frozen=True, extra="forbid")
9
-
10
- # Required class variables that must be defined in subclasses
11
- VERSION: ClassVar[float]
12
- DOMAIN: ClassVar[str]
13
- OBJ_REF: ClassVar[str]
14
-
15
- # Schema versioning
16
- schema_version: float = Field(
17
- ..., # Make this required
18
- description="Version of this Class == version of DB Schema",
19
- frozen=True
20
- )
21
-
22
- # Audit fields
23
- created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc), frozen=True)
24
- created_by: str = Field(..., frozen=True)
25
- updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc), frozen=True)
26
- updated_by: str = Field(...)
27
-
28
- @classmethod
29
- def get_collection_name(cls) -> str:
30
- """Generate standard collection name"""
31
- return f"{cls.DOMAIN}_{cls.OBJ_REF}s"
32
-
33
- @field_validator('created_at', 'updated_at', mode='before')
34
- @classmethod
35
- def parse_datetime(cls, v: any) -> datetime:
36
- if isinstance(v, datetime):
37
- return v
38
- try:
39
- return dateutil.parser.isoparse(v)
40
- except (TypeError, ValueError) as e:
41
- raise ValueError(f"Invalid datetime format: {e}")
@@ -1 +0,0 @@
1
- from .json_encoder import CustomJSONEncoder
@@ -1,13 +0,0 @@
1
- import json
2
- from datetime import datetime
3
- from google.cloud.firestore_v1._helpers import DatetimeWithNanoseconds
4
- from google.api_core import datetime_helpers
5
-
6
- class CustomJSONEncoder(json.JSONEncoder):
7
- """Custom JSON encoder that handles Firestore datetime types."""
8
- def default(self, obj):
9
- if isinstance(obj, (datetime, DatetimeWithNanoseconds)):
10
- return obj.isoformat()
11
- if isinstance(obj, datetime_helpers.DatetimeWithNanoseconds):
12
- return obj.isoformat()
13
- return super().default(obj)