ipulse-shared-core-ftredge 3.2.3__tar.gz → 5.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (31) hide show
  1. {ipulse_shared_core_ftredge-3.2.3/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-5.1.1}/PKG-INFO +1 -1
  2. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/setup.py +1 -1
  3. ipulse_shared_core_ftredge-5.1.1/src/ipulse_shared_core_ftredge/__init__.py +9 -0
  4. ipulse_shared_core_ftredge-5.1.1/src/ipulse_shared_core_ftredge/dependencies/__init__.py +1 -0
  5. ipulse_shared_core_ftredge-5.1.1/src/ipulse_shared_core_ftredge/dependencies/auth_router.py +58 -0
  6. ipulse_shared_core_ftredge-5.1.1/src/ipulse_shared_core_ftredge/dependencies/authorization_api.py +260 -0
  7. ipulse_shared_core_ftredge-5.1.1/src/ipulse_shared_core_ftredge/dependencies/database.py +19 -0
  8. ipulse_shared_core_ftredge-5.1.1/src/ipulse_shared_core_ftredge/dependencies/token_validation.py +59 -0
  9. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/src/ipulse_shared_core_ftredge/models/__init__.py +2 -0
  10. ipulse_shared_core_ftredge-5.1.1/src/ipulse_shared_core_ftredge/models/api_response.py +37 -0
  11. ipulse_shared_core_ftredge-5.1.1/src/ipulse_shared_core_ftredge/models/subscription.py +34 -0
  12. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/src/ipulse_shared_core_ftredge/models/user_profile.py +7 -11
  13. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/src/ipulse_shared_core_ftredge/models/user_status.py +14 -35
  14. ipulse_shared_core_ftredge-5.1.1/src/ipulse_shared_core_ftredge/services/__init__.py +4 -0
  15. ipulse_shared_core_ftredge-5.1.1/src/ipulse_shared_core_ftredge/services/base_firestore_service.py +75 -0
  16. ipulse_shared_core_ftredge-5.1.1/src/ipulse_shared_core_ftredge/services/exceptions.py +133 -0
  17. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +1 -1
  18. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +10 -1
  19. ipulse_shared_core_ftredge-3.2.3/src/ipulse_shared_core_ftredge/__init__.py +0 -4
  20. ipulse_shared_core_ftredge-3.2.3/tests/test_utils_gcp.py +0 -189
  21. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/LICENCE +0 -0
  22. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/README.md +0 -0
  23. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/pyproject.toml +0 -0
  24. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/setup.cfg +0 -0
  25. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/src/ipulse_shared_core_ftredge/models/organisation.py +0 -0
  26. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/src/ipulse_shared_core_ftredge/models/resource_catalog_item.py +0 -0
  27. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
  28. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
  29. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  30. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -0
  31. {ipulse_shared_core_ftredge-3.2.3 → ipulse_shared_core_ftredge-5.1.1}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 3.2.3
3
+ Version: 5.1.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
 
4
4
  setup(
5
5
  name='ipulse_shared_core_ftredge',
6
- version='3.2.3',
6
+ version='5.1.1',
7
7
  package_dir={'': 'src'}, # Specify the source directory
8
8
  packages=find_packages(where='src'), # Look for packages in 'src'
9
9
  install_requires=[
@@ -0,0 +1,9 @@
1
+ # pylint: disable=missing-module-docstring
2
+ from .models import ( UserAuth, UserProfile,Subscription,
3
+ UserStatus, UserProfileUpdate,
4
+ Organisation, StandardResponse )
5
+
6
+
7
+
8
+ from .services import (BaseFirestoreService,BaseServiceException, ResourceNotFoundError, AuthorizationError,
9
+ ValidationError)
@@ -0,0 +1 @@
1
+ from .token_validation import verify_firebase_token
@@ -0,0 +1,58 @@
1
+ from typing import Callable, Optional, List
2
+ from fastapi import APIRouter, Depends
3
+ from .token_validation import verify_firebase_token
4
+
5
+ def create_protected_router(
6
+ *,
7
+ prefix: str = "",
8
+ tags: Optional[List[str]] = None,
9
+ public_paths: Optional[List[str]] = None
10
+ ) -> APIRouter:
11
+ """
12
+ Creates an APIRouter with authentication enabled by default.
13
+
14
+ Args:
15
+ prefix: Router prefix
16
+ tags: OpenAPI tags
17
+ public_paths: List of paths that should be public (no auth required)
18
+
19
+ Example:
20
+ router = create_protected_router(
21
+ prefix="/api/v1",
22
+ tags=["users"],
23
+ public_paths=["/health", "/docs"]
24
+ )
25
+ """
26
+ public_paths = public_paths or []
27
+ router = APIRouter(prefix=prefix, tags=tags)
28
+
29
+ # Store the original route registration method
30
+ original_add_api_route = router.add_api_route
31
+
32
+ def add_api_route_with_auth(
33
+ path: str,
34
+ endpoint: Callable,
35
+ *args,
36
+ dependencies: List = None,
37
+ **kwargs
38
+ ):
39
+ # If path is not in public_paths, add authentication dependency
40
+ if path not in public_paths:
41
+ dependencies = dependencies or []
42
+ # Fix: Check if verify_firebase_token is already in dependencies
43
+ if not any(getattr(dep.dependency, '__name__', None) == 'verify_firebase_token'
44
+ for dep in dependencies):
45
+ dependencies.append(Depends(verify_firebase_token))
46
+
47
+ return original_add_api_route(
48
+ path,
49
+ endpoint,
50
+ *args,
51
+ dependencies=dependencies,
52
+ **kwargs
53
+ )
54
+
55
+ # Replace the route registration method with our custom one
56
+ router.add_api_route = add_api_route_with_auth # type: ignore
57
+
58
+ return router
@@ -0,0 +1,260 @@
1
+ import os
2
+ import logging
3
+ from typing import Optional, Iterable, Dict, Any, List
4
+ from datetime import datetime, timedelta, timezone
5
+ import httpx
6
+ from fastapi import HTTPException, Request
7
+ from google.cloud import firestore
8
+ from ipulse_shared_core_ftredge.services.exceptions import ServiceError, AuthorizationError, ResourceNotFoundError
9
+
10
+ # Constants
11
+ USERS_STATUS_COLLECTION_NAME = "user-statuses"
12
+ USERS_STATUS_DOC_REF = "userusrsttus_"
13
+ CACHE_TTL = 60 # 60 seconds
14
+ class UserStatusCache:
15
+ """Manages user status caching with dynamic invalidation"""
16
+ def __init__(self):
17
+ self._cache: Dict[str, Dict[str, Any]] = {}
18
+ self._timestamps: Dict[str, datetime] = {}
19
+
20
+ def get(self, user_uid: str) -> Optional[Dict[str, Any]]:
21
+ """
22
+ Retrieves user status from cache if available and valid.
23
+
24
+ Args:
25
+ user_uid (str): The user ID.
26
+
27
+ """
28
+ if user_uid in self._cache:
29
+ status_data = self._cache[user_uid]
30
+ # Force refresh for credit-consuming or sensitive operations
31
+ # Check TTL for normal operations
32
+ if datetime.now() - self._timestamps[user_uid] < timedelta(seconds=CACHE_TTL):
33
+ return status_data
34
+ self.invalidate(user_uid)
35
+ return None
36
+
37
+ def set(self, user_uid: str, data: Dict[str, Any]) -> None:
38
+ """
39
+ Sets user status data in the cache.
40
+
41
+ Args:
42
+ user_uid (str): The user ID.
43
+ data (Dict[str, Any]): The user status data to cache.
44
+ """
45
+ self._cache[user_uid] = data
46
+ self._timestamps[user_uid] = datetime.now()
47
+
48
+ def invalidate(self, user_uid: str) -> None:
49
+ """
50
+ Invalidates (removes) user status from the cache.
51
+
52
+ Args:
53
+ user_uid (str): The user ID to invalidate.
54
+ """
55
+ self._cache.pop(user_uid, None)
56
+ self._timestamps.pop(user_uid, None)
57
+
58
+ # Global cache instance
59
+ user_status_cache = UserStatusCache()
60
+
61
+ # Replace the logger dependency with a standard logger
62
+ logger = logging.getLogger(__name__)
63
+
64
+ async def get_user_status(
65
+ user_uid: str,
66
+ db: firestore.Client, # Note: This expects the actual client, not a Depends
67
+ force_fresh: bool = False
68
+ ) -> tuple[Dict[str, Any], bool]:
69
+ """
70
+ Fetch user status with intelligent caching
71
+ """
72
+ cache_used = False
73
+ if not force_fresh:
74
+ cached_status = user_status_cache.get(user_uid)
75
+ if cached_status:
76
+ cache_used = True
77
+ return cached_status, cache_used
78
+
79
+ try:
80
+ # Get reference to the document
81
+ user_status_id = USERS_STATUS_DOC_REF + user_uid
82
+ user_ref = db.collection(USERS_STATUS_COLLECTION_NAME).document(user_status_id)
83
+
84
+ # Get the document
85
+ snapshot = user_ref.get()
86
+ if not snapshot.exists:
87
+ raise ResourceNotFoundError(
88
+ resource_type="user_status",
89
+ resource_id=user_status_id,
90
+ additional_info={"user_uid": user_uid}
91
+ )
92
+
93
+ status_data = snapshot.to_dict()
94
+
95
+ # Only cache if not forced fresh
96
+ if not force_fresh:
97
+ user_status_cache.set(user_uid, status_data)
98
+ return status_data, cache_used
99
+
100
+ except ResourceNotFoundError:
101
+ raise
102
+ except Exception as e:
103
+ raise ServiceError(
104
+ operation=f"fetching user status",
105
+ error=e,
106
+ resource_type="user_status",
107
+ resource_id=user_uid,
108
+ additional_info={
109
+ "force_fresh": force_fresh,
110
+ "collection": USERS_STATUS_COLLECTION_NAME
111
+ }
112
+ ) from e
113
+
114
+ def _validate_resource_fields(fields: Dict[str, Any]) -> List[str]:
115
+ """
116
+ Filter out invalid fields similar to BaseFirestoreService validation.
117
+ Returns only fields that have actual values to update.
118
+ """
119
+ valid_fields = {
120
+ k: v for k, v in fields.items()
121
+ if v is not None and not (isinstance(v, (list, dict, set)) and len(v) == 0)
122
+ }
123
+ return list(valid_fields.keys())
124
+
125
+ async def extract_request_fields(request: Request) -> Optional[List[str]]:
126
+ """
127
+ Extract fields from request body for both PATCH and POST methods.
128
+ """
129
+ try:
130
+ body = await request.json()
131
+ if isinstance(body, dict):
132
+ if request.method == "PATCH":
133
+ return _validate_resource_fields(body)
134
+ elif request.method == "POST":
135
+ # For POST, we want to include all fields being set
136
+ return list(body.keys())
137
+ elif hasattr(body, 'model_dump'):
138
+ data = body.model_dump(exclude_unset=True)
139
+ if request.method == "PATCH":
140
+ return _validate_resource_fields(data)
141
+ elif request.method == "POST":
142
+ return list(data.keys())
143
+
144
+ return None
145
+
146
+ except Exception as e:
147
+ logger.error(f"Error extracting fields from request body: {str(e)}")
148
+ return None
149
+
150
+ async def authorizeAPIRequest(
151
+ request: Request,
152
+ db: firestore.Client, # Changed: Now expects actual client instance
153
+ request_resource_fields: Optional[Iterable[str]] = None,
154
+ ) -> Dict[str, Any]:
155
+ """
156
+ Authorize API request based on user status and OPA policies.
157
+ Note: This expects an actual Firestore client instance, not a dependency.
158
+ """
159
+ try:
160
+ # Extract fields for both PATCH and POST if not provided
161
+ if not request_resource_fields:
162
+ request_resource_fields = await extract_request_fields(request)
163
+
164
+ # Extract request context
165
+ user_uid = request.state.user.get('uid')
166
+ if not user_uid:
167
+ raise AuthorizationError(
168
+ action="access API",
169
+ additional_info={"path": str(request.url)}
170
+ )
171
+
172
+
173
+ # Determine if we need fresh status
174
+ force_fresh = _should_force_fresh_status(request)
175
+ user_status, cache_used = await get_user_status(user_uid, db, force_fresh=force_fresh)
176
+
177
+ # Prepare authorization input
178
+ auth_input = {
179
+ "api_url": request.url.path,
180
+ "requestor": {
181
+ "uid": user_uid,
182
+ "usertypes": request.state.user.get("usertypes"),
183
+ "email_verified": request.state.user.get("email_verified"),
184
+ "iam_groups": user_status.get("iam_groups"),
185
+ "active_sbscrptn_plan": user_status.get("active_sbscrptn_plan"),
186
+ "active_sbscrptn_status": user_status.get("sbscrptn_status"),
187
+ "sbscrptn_insight_credits": user_status.get("sbscrptn_insight_credits"),
188
+ },
189
+ "method": request.method.lower(),
190
+ "request_resource_fields": request_resource_fields
191
+ }
192
+
193
+ # Query OPA
194
+ opa_url = f"{os.getenv('OPA_SERVER_URL', 'http://localhost:8181')}{os.getenv('OPA_DECISION_PATH', '/v1/data/http/authz/ingress/decision')}"
195
+ async with httpx.AsyncClient() as client:
196
+ response = await client.post(
197
+ opa_url,
198
+ json={"input": auth_input},
199
+ timeout=5.0 # 5 seconds timeout
200
+ )
201
+
202
+ if response.status_code != 200:
203
+ logger.error(f"OPA authorization failed: {response.text}")
204
+ raise HTTPException(
205
+ status_code=500,
206
+ detail="Authorization service error"
207
+ )
208
+
209
+ result = response.json()
210
+ if not result.get("result", {}).get("allow", False):
211
+ raise AuthorizationError(
212
+ action=f"{request.method} {request.url.path}",
213
+ additional_info={
214
+ "user_uid": user_uid,
215
+ "resource_fields": request_resource_fields
216
+ }
217
+ )
218
+
219
+ # More descriptive metadata about the data freshness
220
+ return {
221
+ "used_cached_status": cache_used,
222
+ "required_fresh_status": force_fresh,
223
+ "status_retrieved_at": datetime.now(timezone.utc).isoformat()
224
+ }
225
+
226
+ except (AuthorizationError, ResourceNotFoundError):
227
+ raise
228
+ except Exception as e:
229
+ raise ServiceError(
230
+ operation="API authorization",
231
+ error=e,
232
+ resource_type="authorization",
233
+ additional_info={
234
+ "path": str(request.url),
235
+ "method": request.method,
236
+ "user_uid": request.state.user.get('uid'),
237
+ "resource_fields": request_resource_fields
238
+ }
239
+ )
240
+
241
+ def _should_force_fresh_status(request: Request) -> bool:
242
+ """
243
+ Determine if we should force a fresh status check based on the request path patterns
244
+ and HTTP methods
245
+ """
246
+ # Path patterns that indicate credit-sensitive operations
247
+ credit_sensitive_patterns = [
248
+ 'prediction',
249
+ 'user-statuses',
250
+ ]
251
+ # Methods that require fresh status
252
+ sensitive_methods = {'post', 'patch', 'put', 'delete'}
253
+
254
+ path = request.url.path.lower()
255
+ method = request.method.lower()
256
+
257
+ return (
258
+ any(pattern in path for pattern in credit_sensitive_patterns) or
259
+ method in sensitive_methods
260
+ )
@@ -0,0 +1,19 @@
1
+ from typing import Annotated
2
+ from fastapi import Depends
3
+ from google.cloud import firestore
4
+ from functools import lru_cache
5
+ import logging
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+ @lru_cache()
10
+ def get_db() -> firestore.Client:
11
+ """
12
+ Dependency function to inject the Firestore client.
13
+ Each service implementing this should override this function with their own Firebase initialization.
14
+ """
15
+ logger.info("Base get_db dependency called - this should be overridden by the implementing service")
16
+ return firestore.Client()
17
+
18
+ # Base type for dependency injection that services will implement
19
+ FirestoreDB = Annotated[firestore.Client, Depends(get_db)]
@@ -0,0 +1,59 @@
1
+ from typing import Optional, Annotated
2
+ from fastapi import Request, HTTPException, Depends, Header
3
+ from firebase_admin import auth
4
+
5
+ class AuthenticatedUser:
6
+ """
7
+ Represents an authenticated user with necessary attributes.
8
+ """
9
+ def __init__(self, uid: str, email: str, email_verified: bool, usertypes: list[str]):
10
+ self.uid = uid
11
+ self.email = email
12
+ self.email_verified = email_verified
13
+ self.usertypes = usertypes
14
+
15
+ async def verify_firebase_token(
16
+ request: Request,
17
+ x_forwarded_authorization: Optional[str] = Header(None),
18
+ authorization: Optional[str] = Header(None)
19
+ ) -> AuthenticatedUser:
20
+ """
21
+ Represents an authenticated user with necessary attributes.
22
+ """
23
+ # Get token from either x-forwarded-authorization or authorization header
24
+ token = x_forwarded_authorization or authorization
25
+
26
+ if not token:
27
+ raise HTTPException(
28
+ status_code=401,
29
+ detail="Authorization token is missing"
30
+ )
31
+
32
+ try:
33
+ # Remove 'Bearer ' prefix if present
34
+ token = token.replace("Bearer ", "")
35
+ # Verify the token
36
+ decoded_token = auth.verify_id_token(token)
37
+
38
+ # Create AuthenticatedUser instance
39
+ user = AuthenticatedUser(
40
+ uid=decoded_token.get('uid'),
41
+ email=decoded_token.get('email'),
42
+ email_verified=decoded_token.get('email_verified', False),
43
+ usertypes=decoded_token.get('usertypes', [])
44
+ )
45
+
46
+ # Store user in request state for use in other parts of the application
47
+ request.state.user = decoded_token
48
+
49
+ return user
50
+
51
+ except Exception as e:
52
+ raise HTTPException(
53
+ status_code=401,
54
+ detail=f"Invalid token: {str(e)}"
55
+ )
56
+
57
+ # Type alias for dependency injection
58
+ AuthUser = Annotated[AuthenticatedUser, Depends(verify_firebase_token)]
59
+
@@ -1,8 +1,10 @@
1
1
  from .user_profile import UserProfile
2
+ from .subscription import Subscription
2
3
  from .user_status import UserStatus
3
4
  from .user_profile_update import UserProfileUpdate
4
5
  from .user_auth import UserAuth
5
6
  from .organisation import Organisation
7
+ from .api_response import StandardResponse
6
8
 
7
9
 
8
10
 
@@ -0,0 +1,37 @@
1
+ from typing import Generic, TypeVar, Optional, Any, Dict, List
2
+ from pydantic import BaseModel, ConfigDict
3
+ import datetime as dt
4
+ from fastapi.responses import JSONResponse
5
+ from ipulse_shared_core_ftredge.utils.json_encoder import CustomJSONEncoder
6
+ import json
7
+
8
+
9
+ T = TypeVar('T')
10
+
11
+ class StandardResponse(BaseModel, Generic[T]):
12
+ model_config = ConfigDict(arbitrary_types_allowed=True)
13
+ success: bool
14
+ data: Optional[T] = None
15
+ message: Optional[str] = None
16
+ error: Optional[str] = None
17
+
18
+ metadata: Dict[str, Any] = {
19
+ "timestamp": dt.datetime.now(dt.timezone.utc).isoformat()
20
+ }
21
+
22
+ class PaginatedResponse(StandardResponse, Generic[T]):
23
+ total_count: int
24
+ page: int
25
+ page_size: int
26
+ items: List[T]
27
+
28
+ class CustomJSONResponse(JSONResponse):
29
+ def render(self, content) -> bytes:
30
+ return json.dumps(
31
+ content,
32
+ ensure_ascii=False,
33
+ allow_nan=False,
34
+ indent=None,
35
+ separators=(",", ":"),
36
+ default=CustomJSONEncoder().default
37
+ ).encode("utf-8")
@@ -0,0 +1,34 @@
1
+ from datetime import datetime, timezone
2
+ from dateutil.relativedelta import relativedelta
3
+ from typing import Set, Optional, Dict, List, ClassVar
4
+ from pydantic import BaseModel, Field, ConfigDict
5
+ from ipulse_shared_base_ftredge import Layer, Module, list_as_lower_strings
6
+ # ORIGINAL AUTHOR ="Russlan Ramdowar;russlan@ftredge.com"
7
+ # CLASS_ORGIN_DATE=datetime(2024, 2, 12, 20, 5)
8
+
9
+
10
+ DEFAULT_SUBSCRIPTION_PLAN="subscription_free"
11
+ DEFAULT_SUBSCRIPTION_STATUS="active"
12
+
13
+ ############################################ !!!!! ALWAYS UPDATE SCHEMA VERSION , IF SCHEMA IS BEING MODIFIED !!! ############################################
14
+ class Subscription(BaseModel):
15
+ """
16
+ Represents a single subscription cycle.
17
+ """
18
+ plan_name: str = Field(
19
+ default=DEFAULT_SUBSCRIPTION_PLAN,
20
+ description="Subscription Plan Name"
21
+ )
22
+
23
+ cycle_start_date: datetime = Field(
24
+ default=datetime.now(timezone.utc),
25
+ description="Subscription Cycle Start Date"
26
+ )
27
+ cycle_end_date: datetime = Field(
28
+ default=lambda: datetime.now(timezone.utc) + relativedelta(years=1),
29
+ description="Subscription Cycle End Date"
30
+ )
31
+ status: str = Field(
32
+ default=DEFAULT_SUBSCRIPTION_STATUS,
33
+ description="Subscription Status (active, inactive, etc.)"
34
+ )
@@ -1,7 +1,11 @@
1
1
  from datetime import datetime, date
2
2
  from typing import Set, Optional, ClassVar
3
3
  from pydantic import BaseModel, EmailStr, Field, ConfigDict
4
+ from ipulse_shared_base_ftredge import Layer, Module, list_as_lower_strings
4
5
 
6
+ # # Revision history (as model metadata)
7
+ # CLASS_ORIGIN_AUTHOR: ClassVar[str] = "Russlan Ramdowar;russlan@ftredge.com"
8
+ # CLASS_ORGIN_DATE: ClassVar[datetime] = datetime(2024, 1, 16, 20, 5)
5
9
  class UserProfile(BaseModel):
6
10
  """
7
11
  User Profile model representing user information and metadata.
@@ -11,12 +15,11 @@ class UserProfile(BaseModel):
11
15
 
12
16
  # Metadata as class variables
13
17
  VERSION: ClassVar[float] = 3.01
14
- DOMAIN: ClassVar[str] = "user"
15
- OBJ_REF: ClassVar[str] = "usprfl"
18
+ DOMAIN: ClassVar[str] = "_".join(list_as_lower_strings(Layer.PULSE_APP, Module.CORE.name))
16
19
 
17
20
  # System-managed fields (read-only)
18
21
  schema_version: float = Field(
19
- default=3.01,
22
+ default=VERSION,
20
23
  description="Version of this Class == version of DB Schema",
21
24
  frozen=True
22
25
  )
@@ -66,11 +69,4 @@ class UserProfile(BaseModel):
66
69
  default=None,
67
70
  pattern=r"^\+?[1-9]\d{1,14}$", # Added 'r' prefix for raw string
68
71
  description="E.164 format phone number"
69
- )
70
-
71
- # Revision history (as model metadata)
72
- CLASS_ORIGIN_AUTHOR: ClassVar[str] = "Russlan Ramdowar;russlan@ftredge.com"
73
- CLASS_ORGIN_DATE: ClassVar[datetime] = datetime(2024, 1, 16, 20, 5)
74
- CLASS_REVISION_AUTHOR: ClassVar[str] = "Russlan Ramdowar;russlan@ftredge.com"
75
- CLASS_REVISION_DATE: ClassVar[datetime] = datetime(2024, 2, 13, 20, 15)
76
- LAST_MODIFICATION: ClassVar[str] = "Updated to Pydantic v2 with improved validation"
72
+ )
@@ -1,22 +1,13 @@
1
- from datetime import datetime
1
+ from datetime import datetime, timezone
2
2
  from dateutil.relativedelta import relativedelta
3
3
  from typing import Set, Optional, Dict, List, ClassVar
4
4
  from pydantic import BaseModel, Field, ConfigDict
5
-
5
+ from .subscription import Subscription
6
+ from ipulse_shared_base_ftredge import Layer, Module, list_as_lower_strings
6
7
  # ORIGINAL AUTHOR ="Russlan Ramdowar;russlan@ftredge.com"
7
- CLASS_ORGIN_DATE=datetime(2024, 2, 12, 20, 5)
8
-
9
- SCHEMA_VERSION = 2.3
10
- CLASS_REVISION_AUTHOR="Russlan Ramdowar;russlan@ftredge.com"
11
- CLASS_REVISION_DATE=datetime(2024, 2, 13, 20, 15)
12
- LAST_MODIFICATION="Changed default IAM_GROUPS"
13
-
14
- DOMAIN="user"
15
- OBJ_REF = "usrsttus"
8
+ # CLASS_ORGIN_DATE=datetime(2024, 2, 12, 20, 5)
16
9
 
17
10
  DEFAULT_IAM_GROUPS={"pulseroot":["full_open_read"]}
18
- DEFAULT_SUBSCRIPTION_PLAN="subscription_free"
19
- DEFAULT_SUBSCRIPTION_STATUS="active"
20
11
  DEFAULT_SUBSCRIPTION_INSIGHT_CREDITS=10
21
12
  DEFAULT_EXTRA_INSIGHT_CREDITS=0
22
13
 
@@ -29,7 +20,7 @@ class UserStatus(BaseModel):
29
20
 
30
21
  # Class constants
31
22
  VERSION: ClassVar[float] = 2.3
32
- DOMAIN: ClassVar[str] = "user"
23
+ DOMAIN: ClassVar[str] = "_".join(list_as_lower_strings(Layer.PULSE_APP, Module.CORE.name))
33
24
  OBJ_REF: ClassVar[str] = "usrsttus"
34
25
 
35
26
  # Default values as class variables
@@ -41,7 +32,7 @@ class UserStatus(BaseModel):
41
32
 
42
33
  # System-managed fields
43
34
  schema_version: float = Field(
44
- default=2.3,
35
+ default=VERSION,
45
36
  description="Version of this Class == version of DB Schema"
46
37
  )
47
38
 
@@ -50,32 +41,20 @@ class UserStatus(BaseModel):
50
41
  default_factory=lambda: UserStatus.DEFAULT_IAM_GROUPS,
51
42
  description="User's Groups, with a default one for all authenticated Pulse users"
52
43
  )
53
- sbscrptn_plan: str = Field(
54
- default_factory=lambda: UserStatus.DEFAULT_SUBSCRIPTION_PLAN,
55
- description="Subscription Plan"
56
- )
57
- sbscrptn_status: str = Field(
58
- default_factory=lambda: UserStatus.DEFAULT_SUBSCRIPTION_STATUS,
59
- description="Subscription Status"
60
- )
61
44
 
62
- # Subscription dates
63
- sbscrptn_start_date: datetime = Field(
64
- default_factory=datetime.utcnow,
65
- description="Subscription Start Date"
66
- )
67
- sbscrptn_end_date: datetime = Field(
68
- default_factory=lambda: datetime.utcnow() + relativedelta(years=1),
69
- description="Subscription End Date"
45
+ # Subscription Management
46
+ subscriptions: Dict[str, Subscription] = Field(
47
+ default_factory=dict,
48
+ description="Dictionary of user's active and past subscriptions, keyed by plan name"
70
49
  )
71
-
50
+
72
51
  # Credits management
73
- sbscrptn_insight_credits: int = Field(
52
+ sbscrptn_allowance_insight_credits: int = Field(
74
53
  default_factory=lambda: UserStatus.DEFAULT_SUBSCRIPTION_INSIGHT_CREDITS,
75
54
  description="Subscription-based insight credits"
76
55
  )
77
- sbscrptn_ins_crdts_updtd_since_datetime: datetime = Field(
78
- default_factory=datetime.utcnow,
56
+ sbscrptn_allowance_insight_credits_updtd_on: datetime = Field(
57
+ default_factory=datetime.now,
79
58
  description="Last update timestamp for subscription credits"
80
59
  )
81
60
  extra_insight_credits: int = Field(
@@ -0,0 +1,4 @@
1
+ from .base_firestore_service import BaseFirestoreService
2
+
3
+ from .exceptions import (BaseServiceException, ResourceNotFoundError, AuthorizationError,
4
+ ValidationError )
@@ -0,0 +1,75 @@
1
+ from typing import Dict, Any, Optional
2
+ from datetime import datetime,timezone
3
+ from fastapi import HTTPException
4
+ from google.cloud import firestore
5
+ from .exceptions import ResourceNotFoundError, ValidationError
6
+
7
+ class BaseFirestoreService:
8
+ def __init__(self, db: firestore.Client, collection_name: str, resource_type: str):
9
+ self.db = db
10
+ self.collection_name = collection_name
11
+ self.resource_type = resource_type
12
+
13
+ def _validate_update_fields(self, update_data: Dict[str, Any]) -> Dict[str, Any]:
14
+ """Centralized update fields validation"""
15
+ if not isinstance(update_data, dict):
16
+ update_data = update_data.model_dump(exclude_unset=True)
17
+
18
+ valid_fields = {
19
+ k: v for k, v in update_data.items()
20
+ if v is not None and not (isinstance(v, (list, dict, set)) and len(v) == 0)
21
+ }
22
+
23
+ if not valid_fields:
24
+ raise ValidationError(
25
+ resource_type=self.resource_type,
26
+ detail="No valid fields to update",
27
+ resource_id=None
28
+ )
29
+
30
+ return valid_fields
31
+
32
+ async def get_document(self, doc_id: str) -> Dict[str, Any]:
33
+ """Get a document by ID with standardized error handling"""
34
+ doc_ref = self.db.collection(self.collection_name).document(doc_id)
35
+ doc = doc_ref.get()
36
+
37
+ if not doc.exists:
38
+ raise ResourceNotFoundError(
39
+ resource_type=self.resource_type,
40
+ resource_id=doc_id,
41
+ additional_info={"collection": self.collection_name}
42
+ )
43
+
44
+ return doc.to_dict()
45
+
46
+ async def update_document(self, doc_id: str, update_data: Dict[str, Any], user_uid: Optional[str] = None) -> Dict[str, Any]:
47
+ """Standard update method with validation and audit fields"""
48
+ try:
49
+ doc_ref = self.db.collection(self.collection_name).document(doc_id)
50
+
51
+ if not doc_ref.get().exists:
52
+ raise ResourceNotFoundError(
53
+ resource_type=self.resource_type,
54
+ resource_id=doc_id,
55
+ additional_info={"collection": self.collection_name}
56
+ )
57
+
58
+ valid_fields = self._validate_update_fields(update_data)
59
+
60
+ # Add audit fields
61
+ valid_fields.update({
62
+ 'updt_date': datetime.now(timezone.utc).isoformat(),
63
+ 'updt_by_user': user_uid if user_uid else None
64
+ })
65
+
66
+ doc_ref.update(valid_fields)
67
+ return doc_ref.get().to_dict()
68
+
69
+ except (ResourceNotFoundError, ValidationError):
70
+ raise
71
+ except Exception as e:
72
+ raise HTTPException(
73
+ status_code=500,
74
+ detail=f"Failed to update {self.resource_type}: {str(e)}"
75
+ )
@@ -0,0 +1,133 @@
1
+ from fastapi import HTTPException
2
+ from typing import Optional, Any, Dict
3
+ import traceback
4
+ import logging
5
+
6
+ class BaseServiceException(HTTPException):
7
+ def __init__(
8
+ self,
9
+ status_code: int,
10
+ detail: str,
11
+ resource_type: str,
12
+ resource_id: Optional[str] = None,
13
+ additional_info: Optional[Dict[str, Any]] = None,
14
+ original_error: Optional[Exception] = None
15
+ ):
16
+ self.resource_type = resource_type
17
+ self.resource_id = resource_id
18
+ self.additional_info = additional_info or {}
19
+ self.original_error = original_error
20
+
21
+ # Get full traceback if there's an original error
22
+ if original_error:
23
+ self.traceback = ''.join(traceback.format_exception(
24
+ type(original_error),
25
+ original_error,
26
+ original_error.__traceback__
27
+ ))
28
+ else:
29
+ self.traceback = ''.join(traceback.format_stack())
30
+
31
+ # Build detailed message
32
+ detail_msg = f"{detail}"
33
+ if resource_type:
34
+ detail_msg += f" [Resource Type: {resource_type}]"
35
+ if resource_id:
36
+ detail_msg += f" [ID: {resource_id}]"
37
+
38
+ super().__init__(status_code=status_code, detail=detail_msg)
39
+
40
+ def log_error(self, logger: logging.Logger):
41
+ """Log error with full context"""
42
+ error_context = {
43
+ "status_code": self.status_code,
44
+ "resource_type": self.resource_type,
45
+ "resource_id": self.resource_id,
46
+ "detail": self.detail,
47
+ **self.additional_info
48
+ }
49
+
50
+ log_message = f"""
51
+ Service Error Occurred:
52
+ Status Code: {self.status_code}
53
+ Resource Type: {self.resource_type}
54
+ Resource ID: {self.resource_id}
55
+ Detail: {self.detail}
56
+ Additional Info: {self.additional_info}
57
+ {'Original Error: ' + str(self.original_error) if self.original_error else ''}
58
+ Traceback:
59
+ {self.traceback}
60
+ """
61
+
62
+ logger.error(log_message, extra=error_context)
63
+
64
+
65
+ class ServiceError(BaseServiceException):
66
+ """Generic service error with enhanced logging"""
67
+ def __init__(
68
+ self,
69
+ operation: str,
70
+ error: Exception,
71
+ resource_type: str,
72
+ resource_id: Optional[str] = None,
73
+ additional_info: Optional[Dict[str, Any]] = None
74
+ ):
75
+ super().__init__(
76
+ status_code=500,
77
+ detail=f"Error during {operation}: {str(error)}",
78
+ resource_type=resource_type,
79
+ resource_id=resource_id,
80
+ additional_info=additional_info,
81
+ original_error=error
82
+ )
83
+
84
+
85
+ class ResourceNotFoundError(BaseServiceException):
86
+ def __init__(
87
+ self,
88
+ resource_type: str,
89
+ resource_id: str,
90
+ additional_info: Optional[Dict[str, Any]] = None
91
+ ):
92
+ super().__init__(
93
+ status_code=404,
94
+ detail="Resource not found",
95
+ resource_type=resource_type,
96
+ resource_id=resource_id,
97
+ additional_info=additional_info
98
+ )
99
+
100
+ class AuthorizationError(BaseServiceException):
101
+ def __init__(
102
+ self,
103
+ action: str,
104
+ resource_type: str = "authorization",
105
+ resource_id: Optional[str] = None,
106
+ additional_info: Optional[Dict[str, Any]] = None,
107
+ original_error: Optional[Exception] = None
108
+ ):
109
+ super().__init__(
110
+ status_code=403,
111
+ detail=f"Not authorized to {action}",
112
+ resource_type=resource_type,
113
+ resource_id=resource_id,
114
+ additional_info=additional_info,
115
+ original_error=original_error
116
+ )
117
+
118
+ class ValidationError(BaseServiceException):
119
+ def __init__(
120
+ self,
121
+ resource_type: str,
122
+ detail: str,
123
+ resource_id: Optional[str] = None,
124
+ additional_info: Optional[Dict[str, Any]] = None
125
+ ):
126
+ super().__init__(
127
+ status_code=422,
128
+ detail=detail,
129
+ resource_type=resource_type,
130
+ resource_id=resource_id,
131
+ additional_info=additional_info
132
+ )
133
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 3.2.3
3
+ Version: 5.1.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -8,11 +8,20 @@ src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt
8
8
  src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt
9
9
  src/ipulse_shared_core_ftredge.egg-info/requires.txt
10
10
  src/ipulse_shared_core_ftredge.egg-info/top_level.txt
11
+ src/ipulse_shared_core_ftredge/dependencies/__init__.py
12
+ src/ipulse_shared_core_ftredge/dependencies/auth_router.py
13
+ src/ipulse_shared_core_ftredge/dependencies/authorization_api.py
14
+ src/ipulse_shared_core_ftredge/dependencies/database.py
15
+ src/ipulse_shared_core_ftredge/dependencies/token_validation.py
11
16
  src/ipulse_shared_core_ftredge/models/__init__.py
17
+ src/ipulse_shared_core_ftredge/models/api_response.py
12
18
  src/ipulse_shared_core_ftredge/models/organisation.py
13
19
  src/ipulse_shared_core_ftredge/models/resource_catalog_item.py
20
+ src/ipulse_shared_core_ftredge/models/subscription.py
14
21
  src/ipulse_shared_core_ftredge/models/user_auth.py
15
22
  src/ipulse_shared_core_ftredge/models/user_profile.py
16
23
  src/ipulse_shared_core_ftredge/models/user_profile_update.py
17
24
  src/ipulse_shared_core_ftredge/models/user_status.py
18
- tests/test_utils_gcp.py
25
+ src/ipulse_shared_core_ftredge/services/__init__.py
26
+ src/ipulse_shared_core_ftredge/services/base_firestore_service.py
27
+ src/ipulse_shared_core_ftredge/services/exceptions.py
@@ -1,4 +0,0 @@
1
- # pylint: disable=missing-module-docstring
2
- from .models import ( UserAuth, UserProfile,
3
- UserStatus, UserProfileUpdate,
4
- Organisation)
@@ -1,189 +0,0 @@
1
- # pylint: disable=missing-module-docstring
2
- # pylint: disable=import-error
3
- # pylint: disable=missing-module-docstring
4
- # pylint: disable=line-too-long
5
- # pylint: disable=missing-function-docstring
6
-
7
- import os
8
- import json
9
- from unittest.mock import MagicMock, patch
10
- import pytest
11
- from ipulse_shared_core_ftredge.utils_cloud_gcp import write_json_to_gcs
12
-
13
-
14
- # Mocking Google Cloud Storage components for testing using pytest-mock
15
-
16
- @pytest.fixture
17
- def mock_blob(mocker):
18
- mock_blob_class = mocker.patch('google.cloud.storage.Blob', autospec=True)
19
- mock_blob = mock_blob_class.return_value
20
- mock_blob.exists.return_value = False
21
- return mock_blob
22
-
23
-
24
- @pytest.fixture
25
- def mock_bucket(mocker, mock_blob):
26
- mock_bucket_class = mocker.patch('google.cloud.storage.Bucket', autospec=True)
27
- mock_bucket = mock_bucket_class.return_value
28
- mock_bucket.list_blobs.return_value = []
29
- mock_bucket.blob.return_value = mock_blob # this avoids creating a new blob for each test, which will confuse the test results
30
- return mock_bucket
31
-
32
- @pytest.fixture
33
- def mock_storage_client(mocker, mock_bucket):
34
- mock_client_class = mocker.patch('google.cloud.storage.Client', autospec=True)
35
- mock_client = mock_client_class.return_value
36
- mock_client.bucket.return_value = mock_bucket
37
- return mock_client
38
-
39
-
40
-
41
- # --- Test Cases ---
42
-
43
- def test_successful_gcs_upload(mock_storage_client):
44
- test_data = {"key": "value"}
45
- test_bucket_name = "test_bucket"
46
- test_file_name = "test_file.json"
47
-
48
- result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name, test_file_name)
49
-
50
- assert result['gcs_path'] == f"gs://{test_bucket_name}/{test_file_name}"
51
- assert result['local_path'] is None
52
- assert result['gcs_file_already_exists'] is False
53
- assert result['gcs_file_overwritten'] is False
54
- assert result['gcs_file_saved_with_increment'] is False
55
-
56
-
57
- def test_invalid_data_type(mock_storage_client):
58
- with pytest.raises(ValueError) as exc_info:
59
- write_json_to_gcs(mock_storage_client, 12345, "test_bucket", "test_file.json")
60
- assert str(exc_info.value) == "Unsupported data type. Data must be a list, dict, or str."
61
-
62
-
63
- def test_overwrite_if_exists(mock_storage_client, mock_blob):
64
- mock_blob.exists.return_value = True # Simulate existing file
65
- test_data = {"key": "value"}
66
- test_bucket_name = "test_bucket"
67
- test_file_name = "test_file.json"
68
-
69
- result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name, test_file_name, overwrite_if_exists=True)
70
-
71
- assert result['gcs_file_overwritten'] is True
72
-
73
-
74
- def test_overwrite_with_substring(mock_storage_client, mock_bucket):
75
- mock_bucket.list_blobs.return_value = [MagicMock(name='test_prefix_file1.json'), MagicMock(name='test_prefix_file2.json')]
76
- test_data = {"key": "value"}
77
- test_bucket_name = "test_bucket"
78
- test_file_name = "test_file.json"
79
- test_prefix = 'test_prefix'
80
-
81
- result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
82
- test_file_name, overwrite_if_exists=True,
83
- file_exists_if_starts_with_prefix=test_prefix)
84
- assert result['gcs_file_overwritten'] is True
85
- assert result['gcs_file_exists_checked_on_name'] == test_prefix
86
-
87
-
88
- def test_increment_if_exists(mock_storage_client, mock_blob):
89
- mock_blob.exists.side_effect = [True, True, False] # Simulate existing files
90
- test_data = {"key": "value"}
91
- test_bucket_name = "test_bucket"
92
- test_file_name = "test_file.json"
93
- result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name, test_file_name, increment_if_exists=True)
94
- assert result['gcs_path'] == f"gs://{test_bucket_name}/test_file_v2.json"
95
- assert result['gcs_file_saved_with_increment'] is True
96
-
97
-
98
- def test_overwrite_and_increment_raise_value_error(mock_storage_client):
99
- test_data = {"key": "value"}
100
- test_bucket_name = "test_bucket"
101
- test_file_name = "test_file.json"
102
- with pytest.raises(ValueError) as exc_info:
103
- write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
104
- test_file_name, overwrite_if_exists=True,
105
- increment_if_exists=True)
106
- assert str(exc_info.value) == "Both 'overwrite_if_exists' and 'increment_if_exists' cannot be True simultaneously."
107
-
108
-
109
- @patch('os.path.exists', return_value=False) # Assume local file exists for simplicity
110
- @patch('builtins.open', new_callable=MagicMock)
111
- def test_local_save_after_gcs_failure(mock_open, mock_exists, mock_storage_client, mock_blob):
112
- mock_blob.upload_from_string.side_effect = Exception("GCS Upload Failed")
113
- test_data = {"key": "value"}
114
- test_bucket_name = "test_bucket"
115
- test_file_name = "test_file.json"
116
-
117
- # Expecting an exception because GCS upload fails
118
- with pytest.raises(Exception) as exc_info:
119
- result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name, test_file_name, save_locally=True)
120
-
121
- assert "GCS Upload Failed" in str(exc_info.value)
122
- mock_open.assert_called_once_with(os.path.join("/tmp", test_file_name), 'w', encoding='utf-8')
123
-
124
-
125
- @patch('builtins.open', new_callable=MagicMock)
126
- def test_local_save_with_custom_path(mock_open, mock_storage_client):
127
- local_path = "/my/custom/path"
128
- test_data = {"key": "value"}
129
- test_bucket_name = "test_bucket"
130
- test_file_name = "test_file.json"
131
-
132
- result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
133
- test_file_name, local_path=local_path)
134
-
135
- assert result['local_path'] == os.path.join(local_path, test_file_name)
136
- mock_open.assert_called_once()
137
-
138
- @patch('os.path.exists', side_effect=[True, True, False])
139
- @patch('builtins.open', new_callable=MagicMock)
140
- def test_local_save_with_increment(mock_open, mock_exists, mock_storage_client, mock_blob):
141
- test_data = {"key": "value"}
142
- test_bucket_name = "test_bucket"
143
- test_file_name = "test_file.json"
144
-
145
- result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
146
- test_file_name, save_locally=True, increment_if_exists=True)
147
-
148
- assert f"/tmp/test_file_v1.json" == result['local_path']
149
- mock_open.assert_called_once()
150
-
151
-
152
- @patch('builtins.open', new_callable=MagicMock)
153
- def test_local_save_overwrite(mock_open, mock_storage_client):
154
- test_data = {"key": "value"}
155
- test_bucket_name = "test_bucket"
156
- test_file_name = "test_file.json"
157
-
158
- # Execute function
159
- result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
160
- test_file_name, save_locally=True, overwrite_if_exists=True)
161
-
162
- # Check results
163
- assert result['local_path'] == os.path.join("/tmp", test_file_name)
164
- mock_open.assert_called_once_with(os.path.join("/tmp", test_file_name), 'w', encoding='utf-8')
165
- file_handle = mock_open()
166
-
167
-
168
- @patch('os.path.exists', return_value=True)
169
- @patch('builtins.open', new_callable=MagicMock)
170
- def test_local_save_skip(mock_open, mock_exists, mock_storage_client):
171
- test_data = {"key": "value"}
172
- test_bucket_name = "test_bucket"
173
- test_file_name = "test_file.json"
174
-
175
- result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
176
- test_file_name, save_locally=True, overwrite_if_exists=False)
177
-
178
- assert result['local_path'] == os.path.join("/tmp", test_file_name)
179
- mock_open.assert_not_called()
180
-
181
-
182
- def test_string_data_handling(mock_storage_client, mock_blob):
183
- test_string_data = "This is a test string."
184
- test_bucket_name = "test_bucket"
185
- test_file_name = "test_file.json"
186
-
187
- result = write_json_to_gcs(mock_storage_client, test_string_data, test_bucket_name, test_file_name)
188
-
189
- assert result['gcs_path'] == f"gs://{test_bucket_name}/{test_file_name}"