ipulse-shared-core-ftredge 4.1.1__tar.gz → 5.2.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.
- {ipulse_shared_core_ftredge-4.1.1/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-5.2.1}/PKG-INFO +1 -1
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/setup.py +1 -1
- ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge/__init__.py +9 -0
- ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge/dependencies/__init__.py +1 -0
- ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge/dependencies/auth_router.py +58 -0
- ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge/dependencies/authorization_api.py +260 -0
- ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge/dependencies/database.py +19 -0
- ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge/dependencies/token_validation.py +59 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/src/ipulse_shared_core_ftredge/models/__init__.py +2 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/src/ipulse_shared_core_ftredge/models/api_response.py +15 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/src/ipulse_shared_core_ftredge/models/organisation.py +33 -8
- ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge/models/subscription.py +47 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/src/ipulse_shared_core_ftredge/models/user_profile.py +26 -14
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +9 -3
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/src/ipulse_shared_core_ftredge/models/user_status.py +31 -46
- ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge/services/__init__.py +4 -0
- ipulse_shared_core_ftredge-4.1.1/src/ipulse_shared_core_ftredge/services/base_service.py → ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge/services/base_firestore_service.py +1 -1
- ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge/services/exceptions.py +133 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +1 -1
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +8 -3
- ipulse_shared_core_ftredge-4.1.1/src/ipulse_shared_core_ftredge/__init__.py +0 -9
- ipulse_shared_core_ftredge-4.1.1/src/ipulse_shared_core_ftredge/exceptions.py +0 -71
- ipulse_shared_core_ftredge-4.1.1/src/ipulse_shared_core_ftredge/services/__init__.py +0 -1
- ipulse_shared_core_ftredge-4.1.1/tests/test_utils_gcp.py +0 -189
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/LICENCE +0 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/README.md +0 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/pyproject.toml +0 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/setup.cfg +0 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/src/ipulse_shared_core_ftredge/models/resource_catalog_item.py +0 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -0
- {ipulse_shared_core_ftredge-4.1.1 → ipulse_shared_core_ftredge-5.2.1}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: ipulse_shared_core_ftredge
|
|
3
|
-
Version:
|
|
3
|
+
Version: 5.2.1
|
|
4
4
|
Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
|
|
5
5
|
Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
|
|
6
6
|
Author: Russlan Ramdowar
|
|
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
|
|
|
3
3
|
|
|
4
4
|
setup(
|
|
5
5
|
name='ipulse_shared_core_ftredge',
|
|
6
|
-
version='
|
|
6
|
+
version='5.2.1',
|
|
7
7
|
package_dir={'': 'src'}, # Specify the source directory
|
|
8
8
|
packages=find_packages(where='src'), # Look for packages in 'src'
|
|
9
9
|
install_requires=[
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
# pylint: disable=missing-module-docstring
|
|
2
|
+
from .models import ( UserAuth, UserProfile,Subscription,
|
|
3
|
+
UserStatus, UserProfileUpdate,
|
|
4
|
+
Organisation, StandardResponse )
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
from .services import (BaseFirestoreService,BaseServiceException, ResourceNotFoundError, AuthorizationError,
|
|
9
|
+
ValidationError)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .token_validation import verify_firebase_token
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from typing import Callable, Optional, List
|
|
2
|
+
from fastapi import APIRouter, Depends
|
|
3
|
+
from .token_validation import verify_firebase_token
|
|
4
|
+
|
|
5
|
+
def create_protected_router(
|
|
6
|
+
*,
|
|
7
|
+
prefix: str = "",
|
|
8
|
+
tags: Optional[List[str]] = None,
|
|
9
|
+
public_paths: Optional[List[str]] = None
|
|
10
|
+
) -> APIRouter:
|
|
11
|
+
"""
|
|
12
|
+
Creates an APIRouter with authentication enabled by default.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
prefix: Router prefix
|
|
16
|
+
tags: OpenAPI tags
|
|
17
|
+
public_paths: List of paths that should be public (no auth required)
|
|
18
|
+
|
|
19
|
+
Example:
|
|
20
|
+
router = create_protected_router(
|
|
21
|
+
prefix="/api/v1",
|
|
22
|
+
tags=["users"],
|
|
23
|
+
public_paths=["/health", "/docs"]
|
|
24
|
+
)
|
|
25
|
+
"""
|
|
26
|
+
public_paths = public_paths or []
|
|
27
|
+
router = APIRouter(prefix=prefix, tags=tags)
|
|
28
|
+
|
|
29
|
+
# Store the original route registration method
|
|
30
|
+
original_add_api_route = router.add_api_route
|
|
31
|
+
|
|
32
|
+
def add_api_route_with_auth(
|
|
33
|
+
path: str,
|
|
34
|
+
endpoint: Callable,
|
|
35
|
+
*args,
|
|
36
|
+
dependencies: List = None,
|
|
37
|
+
**kwargs
|
|
38
|
+
):
|
|
39
|
+
# If path is not in public_paths, add authentication dependency
|
|
40
|
+
if path not in public_paths:
|
|
41
|
+
dependencies = dependencies or []
|
|
42
|
+
# Fix: Check if verify_firebase_token is already in dependencies
|
|
43
|
+
if not any(getattr(dep.dependency, '__name__', None) == 'verify_firebase_token'
|
|
44
|
+
for dep in dependencies):
|
|
45
|
+
dependencies.append(Depends(verify_firebase_token))
|
|
46
|
+
|
|
47
|
+
return original_add_api_route(
|
|
48
|
+
path,
|
|
49
|
+
endpoint,
|
|
50
|
+
*args,
|
|
51
|
+
dependencies=dependencies,
|
|
52
|
+
**kwargs
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
# Replace the route registration method with our custom one
|
|
56
|
+
router.add_api_route = add_api_route_with_auth # type: ignore
|
|
57
|
+
|
|
58
|
+
return router
|
ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge/dependencies/authorization_api.py
ADDED
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Optional, Iterable, Dict, Any, List
|
|
4
|
+
from datetime import datetime, timedelta, timezone
|
|
5
|
+
import httpx
|
|
6
|
+
from fastapi import HTTPException, Request
|
|
7
|
+
from google.cloud import firestore
|
|
8
|
+
from ipulse_shared_core_ftredge.services.exceptions import ServiceError, AuthorizationError, ResourceNotFoundError
|
|
9
|
+
|
|
10
|
+
# Constants
|
|
11
|
+
USERS_STATUS_COLLECTION_NAME = "user-statuses"
|
|
12
|
+
USERS_STATUS_DOC_REF = "userusrsttus_"
|
|
13
|
+
CACHE_TTL = 60 # 60 seconds
|
|
14
|
+
class UserStatusCache:
|
|
15
|
+
"""Manages user status caching with dynamic invalidation"""
|
|
16
|
+
def __init__(self):
|
|
17
|
+
self._cache: Dict[str, Dict[str, Any]] = {}
|
|
18
|
+
self._timestamps: Dict[str, datetime] = {}
|
|
19
|
+
|
|
20
|
+
def get(self, user_uid: str) -> Optional[Dict[str, Any]]:
|
|
21
|
+
"""
|
|
22
|
+
Retrieves user status from cache if available and valid.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
user_uid (str): The user ID.
|
|
26
|
+
|
|
27
|
+
"""
|
|
28
|
+
if user_uid in self._cache:
|
|
29
|
+
status_data = self._cache[user_uid]
|
|
30
|
+
# Force refresh for credit-consuming or sensitive operations
|
|
31
|
+
# Check TTL for normal operations
|
|
32
|
+
if datetime.now() - self._timestamps[user_uid] < timedelta(seconds=CACHE_TTL):
|
|
33
|
+
return status_data
|
|
34
|
+
self.invalidate(user_uid)
|
|
35
|
+
return None
|
|
36
|
+
|
|
37
|
+
def set(self, user_uid: str, data: Dict[str, Any]) -> None:
|
|
38
|
+
"""
|
|
39
|
+
Sets user status data in the cache.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
user_uid (str): The user ID.
|
|
43
|
+
data (Dict[str, Any]): The user status data to cache.
|
|
44
|
+
"""
|
|
45
|
+
self._cache[user_uid] = data
|
|
46
|
+
self._timestamps[user_uid] = datetime.now()
|
|
47
|
+
|
|
48
|
+
def invalidate(self, user_uid: str) -> None:
|
|
49
|
+
"""
|
|
50
|
+
Invalidates (removes) user status from the cache.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
user_uid (str): The user ID to invalidate.
|
|
54
|
+
"""
|
|
55
|
+
self._cache.pop(user_uid, None)
|
|
56
|
+
self._timestamps.pop(user_uid, None)
|
|
57
|
+
|
|
58
|
+
# Global cache instance
|
|
59
|
+
user_status_cache = UserStatusCache()
|
|
60
|
+
|
|
61
|
+
# Replace the logger dependency with a standard logger
|
|
62
|
+
logger = logging.getLogger(__name__)
|
|
63
|
+
|
|
64
|
+
async def get_user_status(
|
|
65
|
+
user_uid: str,
|
|
66
|
+
db: firestore.Client, # Note: This expects the actual client, not a Depends
|
|
67
|
+
force_fresh: bool = False
|
|
68
|
+
) -> tuple[Dict[str, Any], bool]:
|
|
69
|
+
"""
|
|
70
|
+
Fetch user status with intelligent caching
|
|
71
|
+
"""
|
|
72
|
+
cache_used = False
|
|
73
|
+
if not force_fresh:
|
|
74
|
+
cached_status = user_status_cache.get(user_uid)
|
|
75
|
+
if cached_status:
|
|
76
|
+
cache_used = True
|
|
77
|
+
return cached_status, cache_used
|
|
78
|
+
|
|
79
|
+
try:
|
|
80
|
+
# Get reference to the document
|
|
81
|
+
user_status_id = USERS_STATUS_DOC_REF + user_uid
|
|
82
|
+
user_ref = db.collection(USERS_STATUS_COLLECTION_NAME).document(user_status_id)
|
|
83
|
+
|
|
84
|
+
# Get the document
|
|
85
|
+
snapshot = user_ref.get()
|
|
86
|
+
if not snapshot.exists:
|
|
87
|
+
raise ResourceNotFoundError(
|
|
88
|
+
resource_type="user_status",
|
|
89
|
+
resource_id=user_status_id,
|
|
90
|
+
additional_info={"user_uid": user_uid}
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
status_data = snapshot.to_dict()
|
|
94
|
+
|
|
95
|
+
# Only cache if not forced fresh
|
|
96
|
+
if not force_fresh:
|
|
97
|
+
user_status_cache.set(user_uid, status_data)
|
|
98
|
+
return status_data, cache_used
|
|
99
|
+
|
|
100
|
+
except ResourceNotFoundError:
|
|
101
|
+
raise
|
|
102
|
+
except Exception as e:
|
|
103
|
+
raise ServiceError(
|
|
104
|
+
operation=f"fetching user status",
|
|
105
|
+
error=e,
|
|
106
|
+
resource_type="user_status",
|
|
107
|
+
resource_id=user_uid,
|
|
108
|
+
additional_info={
|
|
109
|
+
"force_fresh": force_fresh,
|
|
110
|
+
"collection": USERS_STATUS_COLLECTION_NAME
|
|
111
|
+
}
|
|
112
|
+
) from e
|
|
113
|
+
|
|
114
|
+
def _validate_resource_fields(fields: Dict[str, Any]) -> List[str]:
|
|
115
|
+
"""
|
|
116
|
+
Filter out invalid fields similar to BaseFirestoreService validation.
|
|
117
|
+
Returns only fields that have actual values to update.
|
|
118
|
+
"""
|
|
119
|
+
valid_fields = {
|
|
120
|
+
k: v for k, v in fields.items()
|
|
121
|
+
if v is not None and not (isinstance(v, (list, dict, set)) and len(v) == 0)
|
|
122
|
+
}
|
|
123
|
+
return list(valid_fields.keys())
|
|
124
|
+
|
|
125
|
+
async def extract_request_fields(request: Request) -> Optional[List[str]]:
|
|
126
|
+
"""
|
|
127
|
+
Extract fields from request body for both PATCH and POST methods.
|
|
128
|
+
"""
|
|
129
|
+
try:
|
|
130
|
+
body = await request.json()
|
|
131
|
+
if isinstance(body, dict):
|
|
132
|
+
if request.method == "PATCH":
|
|
133
|
+
return _validate_resource_fields(body)
|
|
134
|
+
elif request.method == "POST":
|
|
135
|
+
# For POST, we want to include all fields being set
|
|
136
|
+
return list(body.keys())
|
|
137
|
+
elif hasattr(body, 'model_dump'):
|
|
138
|
+
data = body.model_dump(exclude_unset=True)
|
|
139
|
+
if request.method == "PATCH":
|
|
140
|
+
return _validate_resource_fields(data)
|
|
141
|
+
elif request.method == "POST":
|
|
142
|
+
return list(data.keys())
|
|
143
|
+
|
|
144
|
+
return None
|
|
145
|
+
|
|
146
|
+
except Exception as e:
|
|
147
|
+
logger.error(f"Error extracting fields from request body: {str(e)}")
|
|
148
|
+
return None
|
|
149
|
+
|
|
150
|
+
async def authorizeAPIRequest(
|
|
151
|
+
request: Request,
|
|
152
|
+
db: firestore.Client, # Changed: Now expects actual client instance
|
|
153
|
+
request_resource_fields: Optional[Iterable[str]] = None,
|
|
154
|
+
) -> Dict[str, Any]:
|
|
155
|
+
"""
|
|
156
|
+
Authorize API request based on user status and OPA policies.
|
|
157
|
+
Note: This expects an actual Firestore client instance, not a dependency.
|
|
158
|
+
"""
|
|
159
|
+
try:
|
|
160
|
+
# Extract fields for both PATCH and POST if not provided
|
|
161
|
+
if not request_resource_fields:
|
|
162
|
+
request_resource_fields = await extract_request_fields(request)
|
|
163
|
+
|
|
164
|
+
# Extract request context
|
|
165
|
+
user_uid = request.state.user.get('uid')
|
|
166
|
+
if not user_uid:
|
|
167
|
+
raise AuthorizationError(
|
|
168
|
+
action="access API",
|
|
169
|
+
additional_info={"path": str(request.url)}
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
# Determine if we need fresh status
|
|
174
|
+
force_fresh = _should_force_fresh_status(request)
|
|
175
|
+
user_status, cache_used = await get_user_status(user_uid, db, force_fresh=force_fresh)
|
|
176
|
+
|
|
177
|
+
# Prepare authorization input
|
|
178
|
+
auth_input = {
|
|
179
|
+
"api_url": request.url.path,
|
|
180
|
+
"requestor": {
|
|
181
|
+
"uid": user_uid,
|
|
182
|
+
"usertypes": request.state.user.get("usertypes"),
|
|
183
|
+
"email_verified": request.state.user.get("email_verified"),
|
|
184
|
+
"iam_groups": user_status.get("iam_groups"),
|
|
185
|
+
"active_sbscrptn_plan": user_status.get("active_sbscrptn_plan"),
|
|
186
|
+
"active_sbscrptn_status": user_status.get("sbscrptn_status"),
|
|
187
|
+
"sbscrptn_insight_credits": user_status.get("sbscrptn_insight_credits"),
|
|
188
|
+
},
|
|
189
|
+
"method": request.method.lower(),
|
|
190
|
+
"request_resource_fields": request_resource_fields
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
# Query OPA
|
|
194
|
+
opa_url = f"{os.getenv('OPA_SERVER_URL', 'http://localhost:8181')}{os.getenv('OPA_DECISION_PATH', '/v1/data/http/authz/ingress/decision')}"
|
|
195
|
+
async with httpx.AsyncClient() as client:
|
|
196
|
+
response = await client.post(
|
|
197
|
+
opa_url,
|
|
198
|
+
json={"input": auth_input},
|
|
199
|
+
timeout=5.0 # 5 seconds timeout
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
if response.status_code != 200:
|
|
203
|
+
logger.error(f"OPA authorization failed: {response.text}")
|
|
204
|
+
raise HTTPException(
|
|
205
|
+
status_code=500,
|
|
206
|
+
detail="Authorization service error"
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
result = response.json()
|
|
210
|
+
if not result.get("result", {}).get("allow", False):
|
|
211
|
+
raise AuthorizationError(
|
|
212
|
+
action=f"{request.method} {request.url.path}",
|
|
213
|
+
additional_info={
|
|
214
|
+
"user_uid": user_uid,
|
|
215
|
+
"resource_fields": request_resource_fields
|
|
216
|
+
}
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
# More descriptive metadata about the data freshness
|
|
220
|
+
return {
|
|
221
|
+
"used_cached_status": cache_used,
|
|
222
|
+
"required_fresh_status": force_fresh,
|
|
223
|
+
"status_retrieved_at": datetime.now(timezone.utc).isoformat()
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
except (AuthorizationError, ResourceNotFoundError):
|
|
227
|
+
raise
|
|
228
|
+
except Exception as e:
|
|
229
|
+
raise ServiceError(
|
|
230
|
+
operation="API authorization",
|
|
231
|
+
error=e,
|
|
232
|
+
resource_type="authorization",
|
|
233
|
+
additional_info={
|
|
234
|
+
"path": str(request.url),
|
|
235
|
+
"method": request.method,
|
|
236
|
+
"user_uid": request.state.user.get('uid'),
|
|
237
|
+
"resource_fields": request_resource_fields
|
|
238
|
+
}
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
def _should_force_fresh_status(request: Request) -> bool:
|
|
242
|
+
"""
|
|
243
|
+
Determine if we should force a fresh status check based on the request path patterns
|
|
244
|
+
and HTTP methods
|
|
245
|
+
"""
|
|
246
|
+
# Path patterns that indicate credit-sensitive operations
|
|
247
|
+
credit_sensitive_patterns = [
|
|
248
|
+
'prediction',
|
|
249
|
+
'user-statuses',
|
|
250
|
+
]
|
|
251
|
+
# Methods that require fresh status
|
|
252
|
+
sensitive_methods = {'post', 'patch', 'put', 'delete'}
|
|
253
|
+
|
|
254
|
+
path = request.url.path.lower()
|
|
255
|
+
method = request.method.lower()
|
|
256
|
+
|
|
257
|
+
return (
|
|
258
|
+
any(pattern in path for pattern in credit_sensitive_patterns) or
|
|
259
|
+
method in sensitive_methods
|
|
260
|
+
)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from typing import Annotated
|
|
2
|
+
from fastapi import Depends
|
|
3
|
+
from google.cloud import firestore
|
|
4
|
+
from functools import lru_cache
|
|
5
|
+
import logging
|
|
6
|
+
|
|
7
|
+
logger = logging.getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
@lru_cache()
|
|
10
|
+
def get_db() -> firestore.Client:
|
|
11
|
+
"""
|
|
12
|
+
Dependency function to inject the Firestore client.
|
|
13
|
+
Each service implementing this should override this function with their own Firebase initialization.
|
|
14
|
+
"""
|
|
15
|
+
logger.info("Base get_db dependency called - this should be overridden by the implementing service")
|
|
16
|
+
return firestore.Client()
|
|
17
|
+
|
|
18
|
+
# Base type for dependency injection that services will implement
|
|
19
|
+
FirestoreDB = Annotated[firestore.Client, Depends(get_db)]
|
ipulse_shared_core_ftredge-5.2.1/src/ipulse_shared_core_ftredge/dependencies/token_validation.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
from typing import Optional, Annotated
|
|
2
|
+
from fastapi import Request, HTTPException, Depends, Header
|
|
3
|
+
from firebase_admin import auth
|
|
4
|
+
|
|
5
|
+
class AuthenticatedUser:
|
|
6
|
+
"""
|
|
7
|
+
Represents an authenticated user with necessary attributes.
|
|
8
|
+
"""
|
|
9
|
+
def __init__(self, uid: str, email: str, email_verified: bool, usertypes: list[str]):
|
|
10
|
+
self.uid = uid
|
|
11
|
+
self.email = email
|
|
12
|
+
self.email_verified = email_verified
|
|
13
|
+
self.usertypes = usertypes
|
|
14
|
+
|
|
15
|
+
async def verify_firebase_token(
|
|
16
|
+
request: Request,
|
|
17
|
+
x_forwarded_authorization: Optional[str] = Header(None),
|
|
18
|
+
authorization: Optional[str] = Header(None)
|
|
19
|
+
) -> AuthenticatedUser:
|
|
20
|
+
"""
|
|
21
|
+
Represents an authenticated user with necessary attributes.
|
|
22
|
+
"""
|
|
23
|
+
# Get token from either x-forwarded-authorization or authorization header
|
|
24
|
+
token = x_forwarded_authorization or authorization
|
|
25
|
+
|
|
26
|
+
if not token:
|
|
27
|
+
raise HTTPException(
|
|
28
|
+
status_code=401,
|
|
29
|
+
detail="Authorization token is missing"
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
try:
|
|
33
|
+
# Remove 'Bearer ' prefix if present
|
|
34
|
+
token = token.replace("Bearer ", "")
|
|
35
|
+
# Verify the token
|
|
36
|
+
decoded_token = auth.verify_id_token(token)
|
|
37
|
+
|
|
38
|
+
# Create AuthenticatedUser instance
|
|
39
|
+
user = AuthenticatedUser(
|
|
40
|
+
uid=decoded_token.get('uid'),
|
|
41
|
+
email=decoded_token.get('email'),
|
|
42
|
+
email_verified=decoded_token.get('email_verified', False),
|
|
43
|
+
usertypes=decoded_token.get('usertypes', [])
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
# Store user in request state for use in other parts of the application
|
|
47
|
+
request.state.user = decoded_token
|
|
48
|
+
|
|
49
|
+
return user
|
|
50
|
+
|
|
51
|
+
except Exception as e:
|
|
52
|
+
raise HTTPException(
|
|
53
|
+
status_code=401,
|
|
54
|
+
detail=f"Invalid token: {str(e)}"
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# Type alias for dependency injection
|
|
58
|
+
AuthUser = Annotated[AuthenticatedUser, Depends(verify_firebase_token)]
|
|
59
|
+
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
from .user_profile import UserProfile
|
|
2
|
+
from .subscription import Subscription
|
|
2
3
|
from .user_status import UserStatus
|
|
3
4
|
from .user_profile_update import UserProfileUpdate
|
|
4
5
|
from .user_auth import UserAuth
|
|
5
6
|
from .organisation import Organisation
|
|
7
|
+
from .api_response import StandardResponse
|
|
6
8
|
|
|
7
9
|
|
|
8
10
|
|
|
@@ -1,6 +1,10 @@
|
|
|
1
1
|
from typing import Generic, TypeVar, Optional, Any, Dict, List
|
|
2
2
|
from pydantic import BaseModel, ConfigDict
|
|
3
3
|
import datetime as dt
|
|
4
|
+
from fastapi.responses import JSONResponse
|
|
5
|
+
from ipulse_shared_core_ftredge.utils.json_encoder import CustomJSONEncoder
|
|
6
|
+
import json
|
|
7
|
+
|
|
4
8
|
|
|
5
9
|
T = TypeVar('T')
|
|
6
10
|
|
|
@@ -20,3 +24,14 @@ class PaginatedResponse(StandardResponse, Generic[T]):
|
|
|
20
24
|
page: int
|
|
21
25
|
page_size: int
|
|
22
26
|
items: List[T]
|
|
27
|
+
|
|
28
|
+
class CustomJSONResponse(JSONResponse):
|
|
29
|
+
def render(self, content) -> bytes:
|
|
30
|
+
return json.dumps(
|
|
31
|
+
content,
|
|
32
|
+
ensure_ascii=False,
|
|
33
|
+
allow_nan=False,
|
|
34
|
+
indent=None,
|
|
35
|
+
separators=(",", ":"),
|
|
36
|
+
default=CustomJSONEncoder().default
|
|
37
|
+
).encode("utf-8")
|
|
@@ -13,7 +13,11 @@ import uuid
|
|
|
13
13
|
import dateutil.parser
|
|
14
14
|
from ipulse_shared_base_ftredge import (
|
|
15
15
|
OrganizationRelation,
|
|
16
|
-
OrganizationIndustry
|
|
16
|
+
OrganizationIndustry,
|
|
17
|
+
Layer,
|
|
18
|
+
Module,
|
|
19
|
+
list_as_lower_strings,
|
|
20
|
+
Sector
|
|
17
21
|
)
|
|
18
22
|
|
|
19
23
|
class Organisation(BaseModel):
|
|
@@ -24,15 +28,27 @@ class Organisation(BaseModel):
|
|
|
24
28
|
model_config = ConfigDict(frozen=True, extra="forbid")
|
|
25
29
|
|
|
26
30
|
# Class constants
|
|
27
|
-
VERSION: ClassVar[float] = 1
|
|
28
|
-
MODULE: ClassVar[str] = "
|
|
29
|
-
|
|
31
|
+
VERSION: ClassVar[float] = 4.1
|
|
32
|
+
MODULE: ClassVar[str] = "_".join(list_as_lower_strings(Layer.PULSE_APP, Module.CORE.name, Sector.USERCORE.name))
|
|
33
|
+
OBJ_REF: ClassVar[str] = "orgprofile"
|
|
30
34
|
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
+
schema_version: float = Field(
|
|
36
|
+
default=VERSION,
|
|
37
|
+
description="Version of this Class == version of DB Schema",
|
|
38
|
+
frozen=True
|
|
35
39
|
)
|
|
40
|
+
|
|
41
|
+
org_uid: str = Field(
|
|
42
|
+
default_factory=lambda: uuid.uuid4().hex,
|
|
43
|
+
description="Unique identifier for the organisation",
|
|
44
|
+
frozen=True
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
id: str = Field(
|
|
48
|
+
default=None,
|
|
49
|
+
description="Organisation ID, format: {OBJ_REF}_{org_uid}"
|
|
50
|
+
)
|
|
51
|
+
|
|
36
52
|
name: str = Field(..., min_length=1, max_length=100)
|
|
37
53
|
relations: Set[OrganizationRelation] = Field(..., description="Organisation relations/types")
|
|
38
54
|
|
|
@@ -48,6 +64,15 @@ class Organisation(BaseModel):
|
|
|
48
64
|
website: Optional[str] = Field(None, max_length=200)
|
|
49
65
|
org_admin_user_uids: Optional[Set[str]] = None
|
|
50
66
|
|
|
67
|
+
@field_validator('id', mode='before')
|
|
68
|
+
@classmethod
|
|
69
|
+
def generate_id(cls, v: Optional[str], info) -> str:
|
|
70
|
+
values = info.data
|
|
71
|
+
org_uid = values.get('org_uid')
|
|
72
|
+
if not org_uid:
|
|
73
|
+
raise ValueError("org_uid must be set before generating id")
|
|
74
|
+
return f"{cls.OBJ_REF}_{org_uid}"
|
|
75
|
+
|
|
51
76
|
@field_validator('relations')
|
|
52
77
|
@classmethod
|
|
53
78
|
def validate_relations(cls, v: Set[OrganizationRelation]) -> Set[OrganizationRelation]:
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
from datetime import datetime, timezone
|
|
2
|
+
from dateutil.relativedelta import relativedelta
|
|
3
|
+
from typing import Set, Optional, Dict, List, ClassVar
|
|
4
|
+
from pydantic import BaseModel, Field, ConfigDict
|
|
5
|
+
from ipulse_shared_base_ftredge import Layer, Module, list_as_lower_strings, Sector, SubscriptionPlan
|
|
6
|
+
# ORIGINAL AUTHOR ="Russlan Ramdowar;russlan@ftredge.com"
|
|
7
|
+
# CLASS_ORGIN_DATE=datetime(2024, 2, 12, 20, 5)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
DEFAULT_SUBSCRIPTION_PLAN = SubscriptionPlan.FREE
|
|
11
|
+
DEFAULT_SUBSCRIPTION_STATUS = "active"
|
|
12
|
+
|
|
13
|
+
############################################ !!!!! ALWAYS UPDATE SCHEMA VERSION , IF SCHEMA IS BEING MODIFIED !!! ############################################
|
|
14
|
+
class Subscription(BaseModel):
|
|
15
|
+
"""
|
|
16
|
+
Represents a single subscription cycle.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
model_config = ConfigDict(frozen=True, extra="forbid")
|
|
20
|
+
|
|
21
|
+
VERSION: ClassVar[float] = 1.1
|
|
22
|
+
DOMAIN: ClassVar[str] = "_".join(list_as_lower_strings(Layer.PULSE_APP, Module.CORE.name, Sector.LOOKUP.name))
|
|
23
|
+
|
|
24
|
+
# System-managed fields (read-only)
|
|
25
|
+
schema_version: float = Field(
|
|
26
|
+
default=VERSION,
|
|
27
|
+
description="Version of this Class == version of DB Schema",
|
|
28
|
+
frozen=True
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
plan_name: SubscriptionPlan = Field(
|
|
32
|
+
default=DEFAULT_SUBSCRIPTION_PLAN,
|
|
33
|
+
description="Subscription Plan Name"
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
cycle_start_date: datetime = Field(
|
|
37
|
+
default=datetime.now(timezone.utc),
|
|
38
|
+
description="Subscription Cycle Start Date"
|
|
39
|
+
)
|
|
40
|
+
cycle_end_date: datetime = Field(
|
|
41
|
+
default=lambda: datetime.now(timezone.utc) + relativedelta(years=1),
|
|
42
|
+
description="Subscription Cycle End Date"
|
|
43
|
+
)
|
|
44
|
+
status: str = Field(
|
|
45
|
+
default=DEFAULT_SUBSCRIPTION_STATUS,
|
|
46
|
+
description="Subscription Status (active, inactive, etc.)"
|
|
47
|
+
)
|
|
@@ -1,7 +1,11 @@
|
|
|
1
1
|
from datetime import datetime, date
|
|
2
2
|
from typing import Set, Optional, ClassVar
|
|
3
3
|
from pydantic import BaseModel, EmailStr, Field, ConfigDict
|
|
4
|
+
from ipulse_shared_base_ftredge import Layer, Module, list_as_lower_strings, Sector
|
|
4
5
|
|
|
6
|
+
# # Revision history (as model metadata)
|
|
7
|
+
# CLASS_ORIGIN_AUTHOR: ClassVar[str] = "Russlan Ramdowar;russlan@ftredge.com"
|
|
8
|
+
# CLASS_ORGIN_DATE: ClassVar[datetime] = datetime(2024, 1, 16, 20, 5)
|
|
5
9
|
class UserProfile(BaseModel):
|
|
6
10
|
"""
|
|
7
11
|
User Profile model representing user information and metadata.
|
|
@@ -10,16 +14,27 @@ class UserProfile(BaseModel):
|
|
|
10
14
|
model_config = ConfigDict(frozen=True, extra="forbid")
|
|
11
15
|
|
|
12
16
|
# Metadata as class variables
|
|
13
|
-
VERSION: ClassVar[float] =
|
|
14
|
-
DOMAIN: ClassVar[str] = "
|
|
15
|
-
OBJ_REF: ClassVar[str] = "usprfl"
|
|
17
|
+
VERSION: ClassVar[float] = 4.1
|
|
18
|
+
DOMAIN: ClassVar[str] = "_".join(list_as_lower_strings(Layer.PULSE_APP, Module.CORE.name, Sector.USERCORE.name))
|
|
16
19
|
|
|
17
20
|
# System-managed fields (read-only)
|
|
18
21
|
schema_version: float = Field(
|
|
19
|
-
default=
|
|
22
|
+
default=VERSION,
|
|
20
23
|
description="Version of this Class == version of DB Schema",
|
|
21
24
|
frozen=True
|
|
22
25
|
)
|
|
26
|
+
|
|
27
|
+
id : str = Field(
|
|
28
|
+
...,
|
|
29
|
+
description="User ID, propagated from Firebase Auth"
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
user_uid: str = Field(
|
|
33
|
+
...,
|
|
34
|
+
description="User UID, propagated from Firebase Auth"
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
23
38
|
email: EmailStr = Field(
|
|
24
39
|
...,
|
|
25
40
|
description="Propagated from Firebase Auth",
|
|
@@ -27,8 +42,7 @@ class UserProfile(BaseModel):
|
|
|
27
42
|
)
|
|
28
43
|
organizations_uids: Set[str] = Field(
|
|
29
44
|
default_factory=set,
|
|
30
|
-
description="Depends on Subscription Plan, Regularly Updated"
|
|
31
|
-
frozen=True
|
|
45
|
+
description="Depends on Subscription Plan, Regularly Updated"
|
|
32
46
|
)
|
|
33
47
|
|
|
34
48
|
# Timestamps and audit fields (read-only)
|
|
@@ -40,8 +54,7 @@ class UserProfile(BaseModel):
|
|
|
40
54
|
# System identification (read-only)
|
|
41
55
|
provider_id: str = Field(frozen=True)
|
|
42
56
|
aliases: Optional[Set[str]] = Field(
|
|
43
|
-
default=None
|
|
44
|
-
frozen=True
|
|
57
|
+
default=None
|
|
45
58
|
)
|
|
46
59
|
|
|
47
60
|
# User-editable fields
|
|
@@ -68,9 +81,8 @@ class UserProfile(BaseModel):
|
|
|
68
81
|
description="E.164 format phone number"
|
|
69
82
|
)
|
|
70
83
|
|
|
71
|
-
#
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
LAST_MODIFICATION: ClassVar[str] = "Updated to Pydantic v2 with improved validation"
|
|
84
|
+
# Audit fields
|
|
85
|
+
creat_date: datetime = Field(default_factory=datetime.now)
|
|
86
|
+
creat_by_user: str = Field(frozen=True)
|
|
87
|
+
updt_date: datetime = Field(default_factory=datetime.now)
|
|
88
|
+
updt_by_user: str = Field(frozen=True)
|
|
@@ -2,6 +2,10 @@ from typing import Optional, Set, ClassVar
|
|
|
2
2
|
from pydantic import BaseModel, Field, EmailStr, ConfigDict
|
|
3
3
|
from datetime import date, datetime
|
|
4
4
|
|
|
5
|
+
# CLASS_ORGIN_DATE: ClassVar[datetime] = datetime(2024, 3, 15, 20, 15)
|
|
6
|
+
# CLASS_REVISION_DATE: ClassVar[datetime] = datetime(2024, 3, 15, 20, 15)
|
|
7
|
+
|
|
8
|
+
|
|
5
9
|
class UserProfileUpdate(BaseModel):
|
|
6
10
|
"""
|
|
7
11
|
User Profile Update model for partial updates of user information.
|
|
@@ -12,11 +16,9 @@ class UserProfileUpdate(BaseModel):
|
|
|
12
16
|
# Metadata as class variables
|
|
13
17
|
VERSION: ClassVar[float] = 2.01
|
|
14
18
|
CLASS_ORIGIN_AUTHOR: ClassVar[str] = "Russlan Ramdowar;russlan@ftredge.com"
|
|
15
|
-
|
|
16
|
-
CLASS_REVISION_DATE: ClassVar[datetime] = datetime(2024, 3, 15, 20, 15)
|
|
19
|
+
|
|
17
20
|
|
|
18
21
|
# System fields
|
|
19
|
-
schema_version: Optional[float] = Field(None, description="Version of this Class == version of DB Schema")
|
|
20
22
|
email: Optional[EmailStr] = Field(None, description="Propagated from Firebase Auth")
|
|
21
23
|
organizations_uids: Optional[Set[str]] = Field(None, description="Organization memberships")
|
|
22
24
|
|
|
@@ -37,6 +39,10 @@ class UserProfileUpdate(BaseModel):
|
|
|
37
39
|
last_name: Optional[str] = Field(None, max_length=100)
|
|
38
40
|
mobile: Optional[str] = Field(None, pattern=r"^\+?[1-9]\d{1,14}$")
|
|
39
41
|
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
|
|
40
46
|
def model_dump(self, **kwargs):
|
|
41
47
|
kwargs.setdefault('exclude_none', True)
|
|
42
48
|
return super().model_dump(**kwargs)
|
|
@@ -1,24 +1,10 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
|
-
from dateutil.relativedelta import relativedelta
|
|
3
2
|
from typing import Set, Optional, Dict, List, ClassVar
|
|
4
3
|
from pydantic import BaseModel, Field, ConfigDict
|
|
5
|
-
|
|
4
|
+
from .subscription import Subscription
|
|
5
|
+
from ipulse_shared_base_ftredge import Layer, Module, list_as_lower_strings, Sector
|
|
6
6
|
# ORIGINAL AUTHOR ="Russlan Ramdowar;russlan@ftredge.com"
|
|
7
|
-
CLASS_ORGIN_DATE=datetime(2024, 2, 12, 20, 5)
|
|
8
|
-
|
|
9
|
-
SCHEMA_VERSION = 2.3
|
|
10
|
-
CLASS_REVISION_AUTHOR="Russlan Ramdowar;russlan@ftredge.com"
|
|
11
|
-
CLASS_REVISION_DATE=datetime(2024, 2, 13, 20, 15)
|
|
12
|
-
LAST_MODIFICATION="Changed default IAM_GROUPS"
|
|
13
|
-
|
|
14
|
-
DOMAIN="user"
|
|
15
|
-
OBJ_REF = "usrsttus"
|
|
16
|
-
|
|
17
|
-
DEFAULT_IAM_GROUPS={"pulseroot":["full_open_read"]}
|
|
18
|
-
DEFAULT_SUBSCRIPTION_PLAN="subscription_free"
|
|
19
|
-
DEFAULT_SUBSCRIPTION_STATUS="active"
|
|
20
|
-
DEFAULT_SUBSCRIPTION_INSIGHT_CREDITS=10
|
|
21
|
-
DEFAULT_EXTRA_INSIGHT_CREDITS=0
|
|
7
|
+
# CLASS_ORGIN_DATE=datetime(2024, 2, 12, 20, 5)
|
|
22
8
|
|
|
23
9
|
############################################ !!!!! ALWAYS UPDATE SCHEMA VERSION , IF SCHEMA IS BEING MODIFIED !!! ############################################
|
|
24
10
|
class UserStatus(BaseModel):
|
|
@@ -28,9 +14,9 @@ class UserStatus(BaseModel):
|
|
|
28
14
|
model_config = ConfigDict(frozen=True, extra="forbid")
|
|
29
15
|
|
|
30
16
|
# Class constants
|
|
31
|
-
VERSION: ClassVar[float] =
|
|
32
|
-
DOMAIN: ClassVar[str] = "
|
|
33
|
-
OBJ_REF: ClassVar[str] = "
|
|
17
|
+
VERSION: ClassVar[float] = 4.1
|
|
18
|
+
DOMAIN: ClassVar[str] = "_".join(list_as_lower_strings(Layer.PULSE_APP, Module.CORE.name, Sector.USERCORE.name))
|
|
19
|
+
OBJ_REF: ClassVar[str] = "userstatus"
|
|
34
20
|
|
|
35
21
|
# Default values as class variables
|
|
36
22
|
DEFAULT_IAM_GROUPS: ClassVar[Dict[str, List[str]]] = {"pulseroot": ["full_open_read"]}
|
|
@@ -39,43 +25,42 @@ class UserStatus(BaseModel):
|
|
|
39
25
|
DEFAULT_SUBSCRIPTION_INSIGHT_CREDITS: ClassVar[int] = 10
|
|
40
26
|
DEFAULT_EXTRA_INSIGHT_CREDITS: ClassVar[int] = 0
|
|
41
27
|
|
|
28
|
+
|
|
42
29
|
# System-managed fields
|
|
43
30
|
schema_version: float = Field(
|
|
44
|
-
default=
|
|
31
|
+
default=VERSION,
|
|
45
32
|
description="Version of this Class == version of DB Schema"
|
|
46
33
|
)
|
|
47
34
|
|
|
35
|
+
id : str = Field(
|
|
36
|
+
...,
|
|
37
|
+
description="User ID, propagated from Firebase Auth"
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
user_uid: str = Field(
|
|
41
|
+
...,
|
|
42
|
+
description="User UID, propagated from Firebase Auth"
|
|
43
|
+
)
|
|
44
|
+
|
|
48
45
|
# IAM and subscription fields
|
|
49
46
|
iam_groups: Dict[str, List[str]] = Field(
|
|
50
47
|
default_factory=lambda: UserStatus.DEFAULT_IAM_GROUPS,
|
|
51
48
|
description="User's Groups, with a default one for all authenticated Pulse users"
|
|
52
49
|
)
|
|
53
|
-
sbscrptn_plan: str = Field(
|
|
54
|
-
default_factory=lambda: UserStatus.DEFAULT_SUBSCRIPTION_PLAN,
|
|
55
|
-
description="Subscription Plan"
|
|
56
|
-
)
|
|
57
|
-
sbscrptn_status: str = Field(
|
|
58
|
-
default_factory=lambda: UserStatus.DEFAULT_SUBSCRIPTION_STATUS,
|
|
59
|
-
description="Subscription Status"
|
|
60
|
-
)
|
|
61
50
|
|
|
62
|
-
# Subscription
|
|
63
|
-
|
|
64
|
-
default_factory=
|
|
65
|
-
description="
|
|
51
|
+
# Subscription Management
|
|
52
|
+
subscriptions: Dict[str, Subscription] = Field(
|
|
53
|
+
default_factory=dict,
|
|
54
|
+
description="Dictionary of user's active and past subscriptions, keyed by plan name"
|
|
66
55
|
)
|
|
67
|
-
|
|
68
|
-
default_factory=lambda: datetime.utcnow() + relativedelta(years=1),
|
|
69
|
-
description="Subscription End Date"
|
|
70
|
-
)
|
|
71
|
-
|
|
56
|
+
|
|
72
57
|
# Credits management
|
|
73
|
-
|
|
58
|
+
sbscrptn_allowance_insight_credits: int = Field(
|
|
74
59
|
default_factory=lambda: UserStatus.DEFAULT_SUBSCRIPTION_INSIGHT_CREDITS,
|
|
75
60
|
description="Subscription-based insight credits"
|
|
76
61
|
)
|
|
77
|
-
|
|
78
|
-
default_factory=datetime.
|
|
62
|
+
sbscrptn_allowance_insight_credits_updtd_on: datetime = Field(
|
|
63
|
+
default_factory=datetime.now,
|
|
79
64
|
description="Last update timestamp for subscription credits"
|
|
80
65
|
)
|
|
81
66
|
extra_insight_credits: int = Field(
|
|
@@ -86,8 +71,8 @@ class UserStatus(BaseModel):
|
|
|
86
71
|
# Optional fields
|
|
87
72
|
payment_refs_uids: Optional[Set[str]] = None
|
|
88
73
|
|
|
89
|
-
|
|
90
|
-
creat_date: datetime
|
|
91
|
-
creat_by_user: str
|
|
92
|
-
updt_date: datetime
|
|
93
|
-
updt_by_user: str
|
|
74
|
+
# Audit fields
|
|
75
|
+
creat_date: datetime = Field(default_factory=datetime.now)
|
|
76
|
+
creat_by_user: str = Field(frozen=True)
|
|
77
|
+
updt_date: datetime = Field(default_factory=datetime.now)
|
|
78
|
+
updt_by_user: str = Field(frozen=True)
|
|
@@ -2,7 +2,7 @@ from typing import Dict, Any, Optional
|
|
|
2
2
|
from datetime import datetime,timezone
|
|
3
3
|
from fastapi import HTTPException
|
|
4
4
|
from google.cloud import firestore
|
|
5
|
-
from
|
|
5
|
+
from .exceptions import ResourceNotFoundError, ValidationError
|
|
6
6
|
|
|
7
7
|
class BaseFirestoreService:
|
|
8
8
|
def __init__(self, db: firestore.Client, collection_name: str, resource_type: str):
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
from fastapi import HTTPException
|
|
2
|
+
from typing import Optional, Any, Dict
|
|
3
|
+
import traceback
|
|
4
|
+
import logging
|
|
5
|
+
|
|
6
|
+
class BaseServiceException(HTTPException):
|
|
7
|
+
def __init__(
|
|
8
|
+
self,
|
|
9
|
+
status_code: int,
|
|
10
|
+
detail: str,
|
|
11
|
+
resource_type: str,
|
|
12
|
+
resource_id: Optional[str] = None,
|
|
13
|
+
additional_info: Optional[Dict[str, Any]] = None,
|
|
14
|
+
original_error: Optional[Exception] = None
|
|
15
|
+
):
|
|
16
|
+
self.resource_type = resource_type
|
|
17
|
+
self.resource_id = resource_id
|
|
18
|
+
self.additional_info = additional_info or {}
|
|
19
|
+
self.original_error = original_error
|
|
20
|
+
|
|
21
|
+
# Get full traceback if there's an original error
|
|
22
|
+
if original_error:
|
|
23
|
+
self.traceback = ''.join(traceback.format_exception(
|
|
24
|
+
type(original_error),
|
|
25
|
+
original_error,
|
|
26
|
+
original_error.__traceback__
|
|
27
|
+
))
|
|
28
|
+
else:
|
|
29
|
+
self.traceback = ''.join(traceback.format_stack())
|
|
30
|
+
|
|
31
|
+
# Build detailed message
|
|
32
|
+
detail_msg = f"{detail}"
|
|
33
|
+
if resource_type:
|
|
34
|
+
detail_msg += f" [Resource Type: {resource_type}]"
|
|
35
|
+
if resource_id:
|
|
36
|
+
detail_msg += f" [ID: {resource_id}]"
|
|
37
|
+
|
|
38
|
+
super().__init__(status_code=status_code, detail=detail_msg)
|
|
39
|
+
|
|
40
|
+
def log_error(self, logger: logging.Logger):
|
|
41
|
+
"""Log error with full context"""
|
|
42
|
+
error_context = {
|
|
43
|
+
"status_code": self.status_code,
|
|
44
|
+
"resource_type": self.resource_type,
|
|
45
|
+
"resource_id": self.resource_id,
|
|
46
|
+
"detail": self.detail,
|
|
47
|
+
**self.additional_info
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
log_message = f"""
|
|
51
|
+
Service Error Occurred:
|
|
52
|
+
Status Code: {self.status_code}
|
|
53
|
+
Resource Type: {self.resource_type}
|
|
54
|
+
Resource ID: {self.resource_id}
|
|
55
|
+
Detail: {self.detail}
|
|
56
|
+
Additional Info: {self.additional_info}
|
|
57
|
+
{'Original Error: ' + str(self.original_error) if self.original_error else ''}
|
|
58
|
+
Traceback:
|
|
59
|
+
{self.traceback}
|
|
60
|
+
"""
|
|
61
|
+
|
|
62
|
+
logger.error(log_message, extra=error_context)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class ServiceError(BaseServiceException):
|
|
66
|
+
"""Generic service error with enhanced logging"""
|
|
67
|
+
def __init__(
|
|
68
|
+
self,
|
|
69
|
+
operation: str,
|
|
70
|
+
error: Exception,
|
|
71
|
+
resource_type: str,
|
|
72
|
+
resource_id: Optional[str] = None,
|
|
73
|
+
additional_info: Optional[Dict[str, Any]] = None
|
|
74
|
+
):
|
|
75
|
+
super().__init__(
|
|
76
|
+
status_code=500,
|
|
77
|
+
detail=f"Error during {operation}: {str(error)}",
|
|
78
|
+
resource_type=resource_type,
|
|
79
|
+
resource_id=resource_id,
|
|
80
|
+
additional_info=additional_info,
|
|
81
|
+
original_error=error
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class ResourceNotFoundError(BaseServiceException):
|
|
86
|
+
def __init__(
|
|
87
|
+
self,
|
|
88
|
+
resource_type: str,
|
|
89
|
+
resource_id: str,
|
|
90
|
+
additional_info: Optional[Dict[str, Any]] = None
|
|
91
|
+
):
|
|
92
|
+
super().__init__(
|
|
93
|
+
status_code=404,
|
|
94
|
+
detail="Resource not found",
|
|
95
|
+
resource_type=resource_type,
|
|
96
|
+
resource_id=resource_id,
|
|
97
|
+
additional_info=additional_info
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
class AuthorizationError(BaseServiceException):
|
|
101
|
+
def __init__(
|
|
102
|
+
self,
|
|
103
|
+
action: str,
|
|
104
|
+
resource_type: str = "authorization",
|
|
105
|
+
resource_id: Optional[str] = None,
|
|
106
|
+
additional_info: Optional[Dict[str, Any]] = None,
|
|
107
|
+
original_error: Optional[Exception] = None
|
|
108
|
+
):
|
|
109
|
+
super().__init__(
|
|
110
|
+
status_code=403,
|
|
111
|
+
detail=f"Not authorized to {action}",
|
|
112
|
+
resource_type=resource_type,
|
|
113
|
+
resource_id=resource_id,
|
|
114
|
+
additional_info=additional_info,
|
|
115
|
+
original_error=original_error
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
class ValidationError(BaseServiceException):
|
|
119
|
+
def __init__(
|
|
120
|
+
self,
|
|
121
|
+
resource_type: str,
|
|
122
|
+
detail: str,
|
|
123
|
+
resource_id: Optional[str] = None,
|
|
124
|
+
additional_info: Optional[Dict[str, Any]] = None
|
|
125
|
+
):
|
|
126
|
+
super().__init__(
|
|
127
|
+
status_code=422,
|
|
128
|
+
detail=detail,
|
|
129
|
+
resource_type=resource_type,
|
|
130
|
+
resource_id=resource_id,
|
|
131
|
+
additional_info=additional_info
|
|
132
|
+
)
|
|
133
|
+
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: ipulse_shared_core_ftredge
|
|
3
|
-
Version:
|
|
3
|
+
Version: 5.2.1
|
|
4
4
|
Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
|
|
5
5
|
Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
|
|
6
6
|
Author: Russlan Ramdowar
|
|
@@ -3,20 +3,25 @@ README.md
|
|
|
3
3
|
pyproject.toml
|
|
4
4
|
setup.py
|
|
5
5
|
src/ipulse_shared_core_ftredge/__init__.py
|
|
6
|
-
src/ipulse_shared_core_ftredge/exceptions.py
|
|
7
6
|
src/ipulse_shared_core_ftredge.egg-info/PKG-INFO
|
|
8
7
|
src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt
|
|
9
8
|
src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt
|
|
10
9
|
src/ipulse_shared_core_ftredge.egg-info/requires.txt
|
|
11
10
|
src/ipulse_shared_core_ftredge.egg-info/top_level.txt
|
|
11
|
+
src/ipulse_shared_core_ftredge/dependencies/__init__.py
|
|
12
|
+
src/ipulse_shared_core_ftredge/dependencies/auth_router.py
|
|
13
|
+
src/ipulse_shared_core_ftredge/dependencies/authorization_api.py
|
|
14
|
+
src/ipulse_shared_core_ftredge/dependencies/database.py
|
|
15
|
+
src/ipulse_shared_core_ftredge/dependencies/token_validation.py
|
|
12
16
|
src/ipulse_shared_core_ftredge/models/__init__.py
|
|
13
17
|
src/ipulse_shared_core_ftredge/models/api_response.py
|
|
14
18
|
src/ipulse_shared_core_ftredge/models/organisation.py
|
|
15
19
|
src/ipulse_shared_core_ftredge/models/resource_catalog_item.py
|
|
20
|
+
src/ipulse_shared_core_ftredge/models/subscription.py
|
|
16
21
|
src/ipulse_shared_core_ftredge/models/user_auth.py
|
|
17
22
|
src/ipulse_shared_core_ftredge/models/user_profile.py
|
|
18
23
|
src/ipulse_shared_core_ftredge/models/user_profile_update.py
|
|
19
24
|
src/ipulse_shared_core_ftredge/models/user_status.py
|
|
20
25
|
src/ipulse_shared_core_ftredge/services/__init__.py
|
|
21
|
-
src/ipulse_shared_core_ftredge/services/
|
|
22
|
-
|
|
26
|
+
src/ipulse_shared_core_ftredge/services/base_firestore_service.py
|
|
27
|
+
src/ipulse_shared_core_ftredge/services/exceptions.py
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
# pylint: disable=missing-module-docstring
|
|
2
|
-
from .models import ( UserAuth, UserProfile,
|
|
3
|
-
UserStatus, UserProfileUpdate,
|
|
4
|
-
Organisation)
|
|
5
|
-
|
|
6
|
-
from .exceptions import (BaseServiceException, ResourceNotFoundError, AuthorizationError,
|
|
7
|
-
ValidationError )
|
|
8
|
-
|
|
9
|
-
from .services import (BaseFirestoreService)
|
|
@@ -1,71 +0,0 @@
|
|
|
1
|
-
from fastapi import HTTPException
|
|
2
|
-
from typing import Optional, Any, Dict
|
|
3
|
-
|
|
4
|
-
class BaseServiceException(HTTPException):
|
|
5
|
-
def __init__(
|
|
6
|
-
self,
|
|
7
|
-
status_code: int,
|
|
8
|
-
detail: str,
|
|
9
|
-
resource_type: str,
|
|
10
|
-
resource_id: Optional[str] = None,
|
|
11
|
-
additional_info: Optional[Dict[str, Any]] = None
|
|
12
|
-
):
|
|
13
|
-
self.resource_type = resource_type
|
|
14
|
-
self.resource_id = resource_id
|
|
15
|
-
self.additional_info = additional_info or {}
|
|
16
|
-
|
|
17
|
-
# Build detailed message
|
|
18
|
-
detail_msg = f"{detail}"
|
|
19
|
-
if resource_type:
|
|
20
|
-
detail_msg += f" [Resource Type: {resource_type}]"
|
|
21
|
-
if resource_id:
|
|
22
|
-
detail_msg += f" [ID: {resource_id}]"
|
|
23
|
-
|
|
24
|
-
super().__init__(status_code=status_code, detail=detail_msg)
|
|
25
|
-
|
|
26
|
-
class ResourceNotFoundError(BaseServiceException):
|
|
27
|
-
def __init__(
|
|
28
|
-
self,
|
|
29
|
-
resource_type: str,
|
|
30
|
-
resource_id: str,
|
|
31
|
-
additional_info: Optional[Dict[str, Any]] = None
|
|
32
|
-
):
|
|
33
|
-
super().__init__(
|
|
34
|
-
status_code=404,
|
|
35
|
-
detail="Resource not found",
|
|
36
|
-
resource_type=resource_type,
|
|
37
|
-
resource_id=resource_id,
|
|
38
|
-
additional_info=additional_info
|
|
39
|
-
)
|
|
40
|
-
|
|
41
|
-
class AuthorizationError(BaseServiceException):
|
|
42
|
-
def __init__(
|
|
43
|
-
self,
|
|
44
|
-
resource_type: str,
|
|
45
|
-
action: str,
|
|
46
|
-
resource_id: Optional[str] = None,
|
|
47
|
-
additional_info: Optional[Dict[str, Any]] = None
|
|
48
|
-
):
|
|
49
|
-
super().__init__(
|
|
50
|
-
status_code=403,
|
|
51
|
-
detail=f"Not authorized to {action}",
|
|
52
|
-
resource_type=resource_type,
|
|
53
|
-
resource_id=resource_id,
|
|
54
|
-
additional_info=additional_info
|
|
55
|
-
)
|
|
56
|
-
|
|
57
|
-
class ValidationError(BaseServiceException):
|
|
58
|
-
def __init__(
|
|
59
|
-
self,
|
|
60
|
-
resource_type: str,
|
|
61
|
-
detail: str,
|
|
62
|
-
resource_id: Optional[str] = None,
|
|
63
|
-
additional_info: Optional[Dict[str, Any]] = None
|
|
64
|
-
):
|
|
65
|
-
super().__init__(
|
|
66
|
-
status_code=422,
|
|
67
|
-
detail=detail,
|
|
68
|
-
resource_type=resource_type,
|
|
69
|
-
resource_id=resource_id,
|
|
70
|
-
additional_info=additional_info
|
|
71
|
-
)
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
from .base_service import BaseFirestoreService
|
|
@@ -1,189 +0,0 @@
|
|
|
1
|
-
# pylint: disable=missing-module-docstring
|
|
2
|
-
# pylint: disable=import-error
|
|
3
|
-
# pylint: disable=missing-module-docstring
|
|
4
|
-
# pylint: disable=line-too-long
|
|
5
|
-
# pylint: disable=missing-function-docstring
|
|
6
|
-
|
|
7
|
-
import os
|
|
8
|
-
import json
|
|
9
|
-
from unittest.mock import MagicMock, patch
|
|
10
|
-
import pytest
|
|
11
|
-
from ipulse_shared_core_ftredge.utils_cloud_gcp import write_json_to_gcs
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
# Mocking Google Cloud Storage components for testing using pytest-mock
|
|
15
|
-
|
|
16
|
-
@pytest.fixture
|
|
17
|
-
def mock_blob(mocker):
|
|
18
|
-
mock_blob_class = mocker.patch('google.cloud.storage.Blob', autospec=True)
|
|
19
|
-
mock_blob = mock_blob_class.return_value
|
|
20
|
-
mock_blob.exists.return_value = False
|
|
21
|
-
return mock_blob
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
@pytest.fixture
|
|
25
|
-
def mock_bucket(mocker, mock_blob):
|
|
26
|
-
mock_bucket_class = mocker.patch('google.cloud.storage.Bucket', autospec=True)
|
|
27
|
-
mock_bucket = mock_bucket_class.return_value
|
|
28
|
-
mock_bucket.list_blobs.return_value = []
|
|
29
|
-
mock_bucket.blob.return_value = mock_blob # this avoids creating a new blob for each test, which will confuse the test results
|
|
30
|
-
return mock_bucket
|
|
31
|
-
|
|
32
|
-
@pytest.fixture
|
|
33
|
-
def mock_storage_client(mocker, mock_bucket):
|
|
34
|
-
mock_client_class = mocker.patch('google.cloud.storage.Client', autospec=True)
|
|
35
|
-
mock_client = mock_client_class.return_value
|
|
36
|
-
mock_client.bucket.return_value = mock_bucket
|
|
37
|
-
return mock_client
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
# --- Test Cases ---
|
|
42
|
-
|
|
43
|
-
def test_successful_gcs_upload(mock_storage_client):
|
|
44
|
-
test_data = {"key": "value"}
|
|
45
|
-
test_bucket_name = "test_bucket"
|
|
46
|
-
test_file_name = "test_file.json"
|
|
47
|
-
|
|
48
|
-
result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name, test_file_name)
|
|
49
|
-
|
|
50
|
-
assert result['gcs_path'] == f"gs://{test_bucket_name}/{test_file_name}"
|
|
51
|
-
assert result['local_path'] is None
|
|
52
|
-
assert result['gcs_file_already_exists'] is False
|
|
53
|
-
assert result['gcs_file_overwritten'] is False
|
|
54
|
-
assert result['gcs_file_saved_with_increment'] is False
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
def test_invalid_data_type(mock_storage_client):
|
|
58
|
-
with pytest.raises(ValueError) as exc_info:
|
|
59
|
-
write_json_to_gcs(mock_storage_client, 12345, "test_bucket", "test_file.json")
|
|
60
|
-
assert str(exc_info.value) == "Unsupported data type. Data must be a list, dict, or str."
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
def test_overwrite_if_exists(mock_storage_client, mock_blob):
|
|
64
|
-
mock_blob.exists.return_value = True # Simulate existing file
|
|
65
|
-
test_data = {"key": "value"}
|
|
66
|
-
test_bucket_name = "test_bucket"
|
|
67
|
-
test_file_name = "test_file.json"
|
|
68
|
-
|
|
69
|
-
result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name, test_file_name, overwrite_if_exists=True)
|
|
70
|
-
|
|
71
|
-
assert result['gcs_file_overwritten'] is True
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
def test_overwrite_with_substring(mock_storage_client, mock_bucket):
|
|
75
|
-
mock_bucket.list_blobs.return_value = [MagicMock(name='test_prefix_file1.json'), MagicMock(name='test_prefix_file2.json')]
|
|
76
|
-
test_data = {"key": "value"}
|
|
77
|
-
test_bucket_name = "test_bucket"
|
|
78
|
-
test_file_name = "test_file.json"
|
|
79
|
-
test_prefix = 'test_prefix'
|
|
80
|
-
|
|
81
|
-
result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
|
|
82
|
-
test_file_name, overwrite_if_exists=True,
|
|
83
|
-
file_exists_if_starts_with_prefix=test_prefix)
|
|
84
|
-
assert result['gcs_file_overwritten'] is True
|
|
85
|
-
assert result['gcs_file_exists_checked_on_name'] == test_prefix
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
def test_increment_if_exists(mock_storage_client, mock_blob):
|
|
89
|
-
mock_blob.exists.side_effect = [True, True, False] # Simulate existing files
|
|
90
|
-
test_data = {"key": "value"}
|
|
91
|
-
test_bucket_name = "test_bucket"
|
|
92
|
-
test_file_name = "test_file.json"
|
|
93
|
-
result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name, test_file_name, increment_if_exists=True)
|
|
94
|
-
assert result['gcs_path'] == f"gs://{test_bucket_name}/test_file_v2.json"
|
|
95
|
-
assert result['gcs_file_saved_with_increment'] is True
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
def test_overwrite_and_increment_raise_value_error(mock_storage_client):
|
|
99
|
-
test_data = {"key": "value"}
|
|
100
|
-
test_bucket_name = "test_bucket"
|
|
101
|
-
test_file_name = "test_file.json"
|
|
102
|
-
with pytest.raises(ValueError) as exc_info:
|
|
103
|
-
write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
|
|
104
|
-
test_file_name, overwrite_if_exists=True,
|
|
105
|
-
increment_if_exists=True)
|
|
106
|
-
assert str(exc_info.value) == "Both 'overwrite_if_exists' and 'increment_if_exists' cannot be True simultaneously."
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
@patch('os.path.exists', return_value=False) # Assume local file exists for simplicity
|
|
110
|
-
@patch('builtins.open', new_callable=MagicMock)
|
|
111
|
-
def test_local_save_after_gcs_failure(mock_open, mock_exists, mock_storage_client, mock_blob):
|
|
112
|
-
mock_blob.upload_from_string.side_effect = Exception("GCS Upload Failed")
|
|
113
|
-
test_data = {"key": "value"}
|
|
114
|
-
test_bucket_name = "test_bucket"
|
|
115
|
-
test_file_name = "test_file.json"
|
|
116
|
-
|
|
117
|
-
# Expecting an exception because GCS upload fails
|
|
118
|
-
with pytest.raises(Exception) as exc_info:
|
|
119
|
-
result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name, test_file_name, save_locally=True)
|
|
120
|
-
|
|
121
|
-
assert "GCS Upload Failed" in str(exc_info.value)
|
|
122
|
-
mock_open.assert_called_once_with(os.path.join("/tmp", test_file_name), 'w', encoding='utf-8')
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
@patch('builtins.open', new_callable=MagicMock)
|
|
126
|
-
def test_local_save_with_custom_path(mock_open, mock_storage_client):
|
|
127
|
-
local_path = "/my/custom/path"
|
|
128
|
-
test_data = {"key": "value"}
|
|
129
|
-
test_bucket_name = "test_bucket"
|
|
130
|
-
test_file_name = "test_file.json"
|
|
131
|
-
|
|
132
|
-
result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
|
|
133
|
-
test_file_name, local_path=local_path)
|
|
134
|
-
|
|
135
|
-
assert result['local_path'] == os.path.join(local_path, test_file_name)
|
|
136
|
-
mock_open.assert_called_once()
|
|
137
|
-
|
|
138
|
-
@patch('os.path.exists', side_effect=[True, True, False])
|
|
139
|
-
@patch('builtins.open', new_callable=MagicMock)
|
|
140
|
-
def test_local_save_with_increment(mock_open, mock_exists, mock_storage_client, mock_blob):
|
|
141
|
-
test_data = {"key": "value"}
|
|
142
|
-
test_bucket_name = "test_bucket"
|
|
143
|
-
test_file_name = "test_file.json"
|
|
144
|
-
|
|
145
|
-
result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
|
|
146
|
-
test_file_name, save_locally=True, increment_if_exists=True)
|
|
147
|
-
|
|
148
|
-
assert f"/tmp/test_file_v1.json" == result['local_path']
|
|
149
|
-
mock_open.assert_called_once()
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
@patch('builtins.open', new_callable=MagicMock)
|
|
153
|
-
def test_local_save_overwrite(mock_open, mock_storage_client):
|
|
154
|
-
test_data = {"key": "value"}
|
|
155
|
-
test_bucket_name = "test_bucket"
|
|
156
|
-
test_file_name = "test_file.json"
|
|
157
|
-
|
|
158
|
-
# Execute function
|
|
159
|
-
result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
|
|
160
|
-
test_file_name, save_locally=True, overwrite_if_exists=True)
|
|
161
|
-
|
|
162
|
-
# Check results
|
|
163
|
-
assert result['local_path'] == os.path.join("/tmp", test_file_name)
|
|
164
|
-
mock_open.assert_called_once_with(os.path.join("/tmp", test_file_name), 'w', encoding='utf-8')
|
|
165
|
-
file_handle = mock_open()
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
@patch('os.path.exists', return_value=True)
|
|
169
|
-
@patch('builtins.open', new_callable=MagicMock)
|
|
170
|
-
def test_local_save_skip(mock_open, mock_exists, mock_storage_client):
|
|
171
|
-
test_data = {"key": "value"}
|
|
172
|
-
test_bucket_name = "test_bucket"
|
|
173
|
-
test_file_name = "test_file.json"
|
|
174
|
-
|
|
175
|
-
result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
|
|
176
|
-
test_file_name, save_locally=True, overwrite_if_exists=False)
|
|
177
|
-
|
|
178
|
-
assert result['local_path'] == os.path.join("/tmp", test_file_name)
|
|
179
|
-
mock_open.assert_not_called()
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
def test_string_data_handling(mock_storage_client, mock_blob):
|
|
183
|
-
test_string_data = "This is a test string."
|
|
184
|
-
test_bucket_name = "test_bucket"
|
|
185
|
-
test_file_name = "test_file.json"
|
|
186
|
-
|
|
187
|
-
result = write_json_to_gcs(mock_storage_client, test_string_data, test_bucket_name, test_file_name)
|
|
188
|
-
|
|
189
|
-
assert result['gcs_path'] == f"gs://{test_bucket_name}/{test_file_name}"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|