trovesuite 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- trovesuite/__init__.py +16 -0
- trovesuite/auth/__init__.py +16 -0
- trovesuite/auth/auth_base.py +4 -0
- trovesuite/auth/auth_controller.py +10 -0
- trovesuite/auth/auth_read_dto.py +18 -0
- trovesuite/auth/auth_service.py +334 -0
- trovesuite/auth/auth_write_dto.py +10 -0
- trovesuite/configs/__init__.py +16 -0
- trovesuite/configs/database.py +221 -0
- trovesuite/configs/logging.py +261 -0
- trovesuite/configs/settings.py +153 -0
- trovesuite/entities/__init__.py +11 -0
- trovesuite/entities/health.py +84 -0
- trovesuite/entities/sh_response.py +61 -0
- trovesuite/utils/__init__.py +11 -0
- trovesuite/utils/helper.py +36 -0
- trovesuite-1.0.0.dist-info/METADATA +572 -0
- trovesuite-1.0.0.dist-info/RECORD +21 -0
- trovesuite-1.0.0.dist-info/WHEEL +5 -0
- trovesuite-1.0.0.dist-info/licenses/LICENSE +21 -0
- trovesuite-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import logging.config
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Dict, Any
|
|
8
|
+
import json
|
|
9
|
+
from .settings import db_settings
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ColoredFormatter(logging.Formatter):
|
|
13
|
+
"""Custom formatter with colors for different log levels"""
|
|
14
|
+
|
|
15
|
+
# Color codes
|
|
16
|
+
COLORS = {
|
|
17
|
+
'DEBUG': '\033[36m', # Cyan
|
|
18
|
+
'INFO': '\033[32m', # Green
|
|
19
|
+
'WARNING': '\033[33m', # Yellow
|
|
20
|
+
'ERROR': '\033[31m', # Red
|
|
21
|
+
'CRITICAL': '\033[35m', # Magenta
|
|
22
|
+
'RESET': '\033[0m' # Reset
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
def format(self, record):
|
|
26
|
+
log_color = self.COLORS.get(record.levelname, self.COLORS['RESET'])
|
|
27
|
+
reset_color = self.COLORS['RESET']
|
|
28
|
+
|
|
29
|
+
# Add color to level name
|
|
30
|
+
record.levelname = f"{log_color}{record.levelname}{reset_color}"
|
|
31
|
+
|
|
32
|
+
return super().format(record)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class JSONFormatter(logging.Formatter):
|
|
36
|
+
"""Custom JSON formatter for structured logging"""
|
|
37
|
+
|
|
38
|
+
def format(self, record):
|
|
39
|
+
log_entry = {
|
|
40
|
+
'timestamp': datetime.fromtimestamp(record.created).isoformat(),
|
|
41
|
+
'level': record.levelname,
|
|
42
|
+
'logger': record.name,
|
|
43
|
+
'message': record.getMessage(),
|
|
44
|
+
'module': record.module,
|
|
45
|
+
'function': record.funcName,
|
|
46
|
+
'line': record.lineno,
|
|
47
|
+
'thread': record.thread,
|
|
48
|
+
'process': record.process
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
# Add exception info if present
|
|
52
|
+
if record.exc_info:
|
|
53
|
+
log_entry['exception'] = self.formatException(record.exc_info)
|
|
54
|
+
|
|
55
|
+
# Add extra fields if present
|
|
56
|
+
if hasattr(record, 'extra_fields'):
|
|
57
|
+
log_entry.update(record.extra_fields)
|
|
58
|
+
|
|
59
|
+
return json.dumps(log_entry, ensure_ascii=False)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class LoggingConfig:
|
|
63
|
+
"""Centralized logging configuration for the application"""
|
|
64
|
+
|
|
65
|
+
def __init__(self):
|
|
66
|
+
self.settings = db_settings
|
|
67
|
+
self.log_level = self._get_log_level()
|
|
68
|
+
self.log_dir = self._setup_log_directory()
|
|
69
|
+
|
|
70
|
+
def _get_log_level(self) -> str:
|
|
71
|
+
"""Get log level from environment or settings"""
|
|
72
|
+
env_level = os.getenv("LOG_LEVEL", "INFO").upper()
|
|
73
|
+
if db_settings.DEBUG:
|
|
74
|
+
return "DEBUG"
|
|
75
|
+
return env_level
|
|
76
|
+
|
|
77
|
+
def _setup_log_directory(self) -> Path:
|
|
78
|
+
"""Create and return log directory path"""
|
|
79
|
+
log_dir = Path("logs")
|
|
80
|
+
log_dir.mkdir(exist_ok=True)
|
|
81
|
+
return log_dir
|
|
82
|
+
|
|
83
|
+
def get_logging_config(self) -> Dict[str, Any]:
|
|
84
|
+
"""Return complete logging configuration dictionary"""
|
|
85
|
+
return {
|
|
86
|
+
'version': 1,
|
|
87
|
+
'disable_existing_loggers': False,
|
|
88
|
+
'formatters': {
|
|
89
|
+
'detailed': {
|
|
90
|
+
'format': '%(asctime)s | %(levelname)-8s | %(name)s | %(module)s.%(funcName)s:%(lineno)d | %(message)s',
|
|
91
|
+
'datefmt': '%Y-%m-%d %H:%M:%S'
|
|
92
|
+
},
|
|
93
|
+
'colored': {
|
|
94
|
+
'()': ColoredFormatter,
|
|
95
|
+
'format': '%(asctime)s | %(levelname)-8s | %(name)s | %(module)s.%(funcName)s:%(lineno)d | %(message)s',
|
|
96
|
+
'datefmt': '%Y-%m-%d %H:%M:%S'
|
|
97
|
+
},
|
|
98
|
+
'json': {
|
|
99
|
+
'()': JSONFormatter
|
|
100
|
+
},
|
|
101
|
+
'simple': {
|
|
102
|
+
'format': '%(levelname)-8s | %(name)s | %(message)s'
|
|
103
|
+
}
|
|
104
|
+
},
|
|
105
|
+
'handlers': {
|
|
106
|
+
'console': {
|
|
107
|
+
'class': 'logging.StreamHandler',
|
|
108
|
+
'level': self.log_level,
|
|
109
|
+
'formatter': 'colored' if sys.stdout.isatty() else 'detailed',
|
|
110
|
+
'stream': 'ext://sys.stdout'
|
|
111
|
+
},
|
|
112
|
+
'file_info': {
|
|
113
|
+
'class': 'logging.handlers.RotatingFileHandler',
|
|
114
|
+
'level': 'INFO',
|
|
115
|
+
'formatter': 'detailed',
|
|
116
|
+
'filename': str(self.log_dir / 'app.log'),
|
|
117
|
+
'maxBytes': 10485760, # 10MB
|
|
118
|
+
'backupCount': 5,
|
|
119
|
+
'encoding': 'utf8'
|
|
120
|
+
},
|
|
121
|
+
'file_error': {
|
|
122
|
+
'class': 'logging.handlers.RotatingFileHandler',
|
|
123
|
+
'level': 'ERROR',
|
|
124
|
+
'formatter': 'detailed',
|
|
125
|
+
'filename': str(self.log_dir / 'error.log'),
|
|
126
|
+
'maxBytes': 10485760, # 10MB
|
|
127
|
+
'backupCount': 5,
|
|
128
|
+
'encoding': 'utf8'
|
|
129
|
+
},
|
|
130
|
+
'file_debug': {
|
|
131
|
+
'class': 'logging.handlers.RotatingFileHandler',
|
|
132
|
+
'level': 'DEBUG',
|
|
133
|
+
'formatter': 'json',
|
|
134
|
+
'filename': str(self.log_dir / 'debug.log'),
|
|
135
|
+
'maxBytes': 10485760, # 10MB
|
|
136
|
+
'backupCount': 3,
|
|
137
|
+
'encoding': 'utf8'
|
|
138
|
+
}
|
|
139
|
+
},
|
|
140
|
+
'loggers': {
|
|
141
|
+
'': { # Root logger
|
|
142
|
+
'level': self.log_level,
|
|
143
|
+
'handlers': ['console', 'file_info', 'file_error'],
|
|
144
|
+
'propagate': False
|
|
145
|
+
},
|
|
146
|
+
'app': {
|
|
147
|
+
'level': self.log_level,
|
|
148
|
+
'handlers': ['console', 'file_info', 'file_error', 'file_debug'],
|
|
149
|
+
'propagate': False
|
|
150
|
+
},
|
|
151
|
+
'uvicorn': {
|
|
152
|
+
'level': 'INFO',
|
|
153
|
+
'handlers': ['console', 'file_info'],
|
|
154
|
+
'propagate': False
|
|
155
|
+
},
|
|
156
|
+
'uvicorn.access': {
|
|
157
|
+
'level': 'INFO',
|
|
158
|
+
'handlers': ['console', 'file_info'],
|
|
159
|
+
'propagate': False
|
|
160
|
+
},
|
|
161
|
+
'fastapi': {
|
|
162
|
+
'level': 'INFO',
|
|
163
|
+
'handlers': ['console', 'file_info'],
|
|
164
|
+
'propagate': False
|
|
165
|
+
},
|
|
166
|
+
'sqlalchemy': {
|
|
167
|
+
'level': 'WARNING',
|
|
168
|
+
'handlers': ['console', 'file_info'],
|
|
169
|
+
'propagate': False
|
|
170
|
+
},
|
|
171
|
+
'psycopg2': {
|
|
172
|
+
'level': 'WARNING',
|
|
173
|
+
'handlers': ['console', 'file_info'],
|
|
174
|
+
'propagate': False
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def setup_logging() -> logging.Logger:
|
|
181
|
+
"""Initialize and configure logging for the application"""
|
|
182
|
+
config = LoggingConfig()
|
|
183
|
+
logging_config = config.get_logging_config()
|
|
184
|
+
|
|
185
|
+
# Apply the configuration
|
|
186
|
+
logging.config.dictConfig(logging_config)
|
|
187
|
+
|
|
188
|
+
# Get the main application logger
|
|
189
|
+
logger = logging.getLogger('app')
|
|
190
|
+
|
|
191
|
+
# Log the configuration
|
|
192
|
+
logger.info(f"Logging initialized with level: {config.log_level}")
|
|
193
|
+
logger.info(f"Log directory: {config.log_dir}")
|
|
194
|
+
logger.info(f"Environment: {config.settings.ENVIRONMENT}")
|
|
195
|
+
logger.info(f"Debug mode: {config.settings.DEBUG}")
|
|
196
|
+
|
|
197
|
+
return logger
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def get_logger(name: str = None) -> logging.Logger:
|
|
201
|
+
"""Get a logger instance for the given name"""
|
|
202
|
+
if name is None:
|
|
203
|
+
return logging.getLogger('app')
|
|
204
|
+
return logging.getLogger(f'app.{name}')
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
class LoggerMixin:
|
|
208
|
+
"""Mixin class to add logging capabilities to any class"""
|
|
209
|
+
|
|
210
|
+
@property
|
|
211
|
+
def logger(self) -> logging.Logger:
|
|
212
|
+
"""Get logger for this class"""
|
|
213
|
+
return get_logger(self.__class__.__name__)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def log_function_call(func):
|
|
217
|
+
"""Decorator to log function calls with parameters and execution time"""
|
|
218
|
+
def wrapper(*args, **kwargs):
|
|
219
|
+
logger = get_logger(func.__module__)
|
|
220
|
+
|
|
221
|
+
# Log function entry
|
|
222
|
+
logger.debug(f"Calling {func.__name__} with args={args}, kwargs={kwargs}")
|
|
223
|
+
|
|
224
|
+
start_time = datetime.now()
|
|
225
|
+
try:
|
|
226
|
+
result = func(*args, **kwargs)
|
|
227
|
+
execution_time = (datetime.now() - start_time).total_seconds()
|
|
228
|
+
logger.debug(f"Function {func.__name__} completed successfully in {execution_time:.4f}s")
|
|
229
|
+
return result
|
|
230
|
+
except Exception as e:
|
|
231
|
+
execution_time = (datetime.now() - start_time).total_seconds()
|
|
232
|
+
logger.error(f"Function {func.__name__} failed after {execution_time:.4f}s: {str(e)}", exc_info=True)
|
|
233
|
+
raise
|
|
234
|
+
|
|
235
|
+
return wrapper
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
def log_async_function_call(func):
|
|
239
|
+
"""Decorator to log async function calls with parameters and execution time"""
|
|
240
|
+
async def wrapper(*args, **kwargs):
|
|
241
|
+
logger = get_logger(func.__module__)
|
|
242
|
+
|
|
243
|
+
# Log function entry
|
|
244
|
+
logger.debug(f"Calling async {func.__name__} with args={args}, kwargs={kwargs}")
|
|
245
|
+
|
|
246
|
+
start_time = datetime.now()
|
|
247
|
+
try:
|
|
248
|
+
result = await func(*args, **kwargs)
|
|
249
|
+
execution_time = (datetime.now() - start_time).total_seconds()
|
|
250
|
+
logger.debug(f"Async function {func.__name__} completed successfully in {execution_time:.4f}s")
|
|
251
|
+
return result
|
|
252
|
+
except Exception as e:
|
|
253
|
+
execution_time = (datetime.now() - start_time).total_seconds()
|
|
254
|
+
logger.error(f"Async function {func.__name__} failed after {execution_time:.4f}s: {str(e)}", exc_info=True)
|
|
255
|
+
raise
|
|
256
|
+
|
|
257
|
+
return wrapper
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
# Initialize logging when this module is imported
|
|
261
|
+
main_logger = setup_logging()
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import warnings
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
class Settings:
|
|
6
|
+
"""Settings configuration for TroveSuite Auth Service"""
|
|
7
|
+
|
|
8
|
+
# =============================================================================
|
|
9
|
+
# DATABASE CONFIGURATION
|
|
10
|
+
# =============================================================================
|
|
11
|
+
DATABASE_URL: str = os.getenv(
|
|
12
|
+
"DATABASE_URL",
|
|
13
|
+
"postgresql://username:password@localhost:5432/database_name"
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
# Alternative database configuration
|
|
17
|
+
DB_USER: Optional[str] = os.getenv("DB_USER")
|
|
18
|
+
DB_HOST: Optional[str] = os.getenv("DB_HOST")
|
|
19
|
+
DB_NAME: Optional[str] = os.getenv("DB_NAME")
|
|
20
|
+
DB_PORT: int = int(os.getenv("DB_PORT", "5432"))
|
|
21
|
+
DB_PASSWORD: Optional[str] = os.getenv("DB_PASSWORD")
|
|
22
|
+
ENVIRONMENT: str = os.getenv("ENVIRONMENT", "development")
|
|
23
|
+
|
|
24
|
+
# =============================================================================
|
|
25
|
+
# APPLICATION SETTINGS
|
|
26
|
+
# =============================================================================
|
|
27
|
+
APP_NAME: str = os.getenv("APP_NAME", "TroveSuite Auth Service")
|
|
28
|
+
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
|
|
29
|
+
|
|
30
|
+
# =============================================================================
|
|
31
|
+
# SECURITY SETTINGS
|
|
32
|
+
# =============================================================================
|
|
33
|
+
ALGORITHM: str = os.getenv("ALGORITHM", "HS256")
|
|
34
|
+
SECRET_KEY: str = os.getenv("SECRET_KEY", "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7")
|
|
35
|
+
ACCESS_TOKEN_EXPIRE_MINUTES: int = int(os.getenv("ACCESS_TOKEN_EXPIRE_MINUTES", "60"))
|
|
36
|
+
|
|
37
|
+
# =============================================================================
|
|
38
|
+
# LOGGING SETTINGS
|
|
39
|
+
# =============================================================================
|
|
40
|
+
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
|
41
|
+
LOG_FORMAT: str = os.getenv("LOG_FORMAT", "detailed") # detailed, json, simple
|
|
42
|
+
LOG_TO_FILE: bool = os.getenv("LOG_TO_FILE", "False").lower() == "false"
|
|
43
|
+
LOG_MAX_SIZE: int = int(os.getenv("LOG_MAX_SIZE", "10485760")) # 10MB
|
|
44
|
+
LOG_BACKUP_COUNT: int = int(os.getenv("LOG_BACKUP_COUNT", "5"))
|
|
45
|
+
LOG_DIR: str = os.getenv("LOG_DIR", "logs")
|
|
46
|
+
|
|
47
|
+
# =============================================================================
|
|
48
|
+
# DATABASE TABLE NAMES
|
|
49
|
+
# =============================================================================
|
|
50
|
+
# Main schema tables
|
|
51
|
+
MAIN_TENANTS_TABLE: str = os.getenv("MAIN_TENANTS_TABLE", "tenants")
|
|
52
|
+
ROLE_PERMISSIONS_TABLE: str = os.getenv("ROLE_PERMISSIONS_TABLE", "role_permissions")
|
|
53
|
+
|
|
54
|
+
# Tenant-specific tables (used in queries with tenant schema)
|
|
55
|
+
TENANT_LOGIN_SETTINGS_TABLE: str = os.getenv("TENANT_LOGIN_SETTINGS_TABLE", "login_settings")
|
|
56
|
+
USER_GROUPS_TABLE: str = os.getenv("USER_GROUPS_TABLE", "user_groups")
|
|
57
|
+
ASSIGN_ROLES_TABLE: str = os.getenv("ASSIGN_ROLES_TABLE", "assign_roles")
|
|
58
|
+
|
|
59
|
+
# =============================================================================
|
|
60
|
+
# AZURE CONFIGURATION (Optional - for queue functionality)
|
|
61
|
+
# =============================================================================
|
|
62
|
+
STORAGE_ACCOUNT_NAME: str = os.getenv("STORAGE_ACCOUNT_NAME", "")
|
|
63
|
+
USER_ASSIGNED_MANAGED_IDENTITY: str = os.getenv("USER_ASSIGNED_MANAGED_IDENTITY", "")
|
|
64
|
+
|
|
65
|
+
@property
|
|
66
|
+
def database_url(self) -> str:
|
|
67
|
+
"""Get the database URL, either from DATABASE_URL or constructed from individual components"""
|
|
68
|
+
if self.DATABASE_URL != "postgresql://username:password@localhost:5432/database_name":
|
|
69
|
+
return self.DATABASE_URL
|
|
70
|
+
|
|
71
|
+
# Validate individual components
|
|
72
|
+
if not all([self.DB_USER, self.DB_HOST, self.DB_NAME, self.DB_PASSWORD]):
|
|
73
|
+
missing = []
|
|
74
|
+
if not self.DB_USER:
|
|
75
|
+
missing.append("DB_USER")
|
|
76
|
+
if not self.DB_HOST:
|
|
77
|
+
missing.append("DB_HOST")
|
|
78
|
+
if not self.DB_NAME:
|
|
79
|
+
missing.append("DB_NAME")
|
|
80
|
+
if not self.DB_PASSWORD:
|
|
81
|
+
missing.append("DB_PASSWORD")
|
|
82
|
+
|
|
83
|
+
raise ValueError(
|
|
84
|
+
f"Database configuration incomplete. Missing environment variables: {', '.join(missing)}. "
|
|
85
|
+
f"Please set these variables or provide a complete DATABASE_URL."
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
return f"postgresql://{self.DB_USER}:{self.DB_PASSWORD}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_NAME}"
|
|
89
|
+
|
|
90
|
+
def validate_configuration(self) -> None:
|
|
91
|
+
"""Validate the current configuration and warn about potential issues"""
|
|
92
|
+
warnings_list = []
|
|
93
|
+
|
|
94
|
+
# Check for default secret key
|
|
95
|
+
if self.SECRET_KEY == "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7":
|
|
96
|
+
warnings_list.append(
|
|
97
|
+
"SECRET_KEY is using the default value. This is insecure for production. "
|
|
98
|
+
"Please set a strong, unique SECRET_KEY environment variable."
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
# Check for development environment in production-like settings
|
|
102
|
+
if self.ENVIRONMENT == "development" and self.DEBUG is False:
|
|
103
|
+
warnings_list.append(
|
|
104
|
+
"ENVIRONMENT is set to 'development' but DEBUG is False. "
|
|
105
|
+
"Consider setting ENVIRONMENT to 'production' for production deployments."
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
# Check database configuration
|
|
109
|
+
try:
|
|
110
|
+
self.database_url
|
|
111
|
+
except ValueError as e:
|
|
112
|
+
warnings_list.append(f"Database configuration issue: {str(e)}")
|
|
113
|
+
|
|
114
|
+
# Check for missing Azure configuration if needed
|
|
115
|
+
if self.ENVIRONMENT == "production" and not self.STORAGE_ACCOUNT_NAME:
|
|
116
|
+
warnings_list.append(
|
|
117
|
+
"STORAGE_ACCOUNT_NAME is not set. Azure queue functionality may not work properly."
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
# Emit warnings
|
|
121
|
+
for warning in warnings_list:
|
|
122
|
+
warnings.warn(warning, UserWarning)
|
|
123
|
+
|
|
124
|
+
def get_configuration_summary(self) -> dict:
|
|
125
|
+
"""Get a summary of the current configuration (excluding sensitive data)"""
|
|
126
|
+
return {
|
|
127
|
+
"app_name": self.APP_NAME,
|
|
128
|
+
"environment": self.ENVIRONMENT,
|
|
129
|
+
"debug": self.DEBUG,
|
|
130
|
+
"database_host": self.DB_HOST,
|
|
131
|
+
"database_port": self.DB_PORT,
|
|
132
|
+
"database_name": self.DB_NAME,
|
|
133
|
+
"database_user": self.DB_USER,
|
|
134
|
+
"log_level": self.LOG_LEVEL,
|
|
135
|
+
"log_format": self.LOG_FORMAT,
|
|
136
|
+
"log_to_file": self.LOG_TO_FILE,
|
|
137
|
+
"algorithm": self.ALGORITHM,
|
|
138
|
+
"access_token_expire_minutes": self.ACCESS_TOKEN_EXPIRE_MINUTES,
|
|
139
|
+
"MAIN_TENANTS_TABLE": self.MAIN_TENANTS_TABLE,
|
|
140
|
+
"role_permissions_table": self.ROLE_PERMISSIONS_TABLE,
|
|
141
|
+
"TENANT_LOGIN_SETTINGS_TABLE": self.TENANT_LOGIN_SETTINGS_TABLE,
|
|
142
|
+
"user_groups_table": self.USER_GROUPS_TABLE,
|
|
143
|
+
"assign_roles_table": self.ASSIGN_ROLES_TABLE,
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
# Global settings instance
|
|
147
|
+
db_settings = Settings()
|
|
148
|
+
|
|
149
|
+
# Validate configuration on import
|
|
150
|
+
try:
|
|
151
|
+
db_settings.validate_configuration()
|
|
152
|
+
except Exception as e:
|
|
153
|
+
warnings.warn("Configuration validation failed: %s", str(e), UserWarning)
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
from fastapi import APIRouter
|
|
2
|
+
from src.entities.shared.shared_response import Respons
|
|
3
|
+
from src.configs.settings import db_settings
|
|
4
|
+
from src.configs.database import DatabaseManager
|
|
5
|
+
from src.configs.logging import get_logger
|
|
6
|
+
|
|
7
|
+
health_check_router = APIRouter(tags=["Health Path"])
|
|
8
|
+
logger = get_logger("health")
|
|
9
|
+
|
|
10
|
+
@health_check_router.get("/health", response_model=Respons[dict])
|
|
11
|
+
async def health():
|
|
12
|
+
"""Health check endpoint."""
|
|
13
|
+
try:
|
|
14
|
+
# Basic application health
|
|
15
|
+
app_health = {
|
|
16
|
+
"status": "healthy",
|
|
17
|
+
"app_name": db_settings.APP_NAME,
|
|
18
|
+
"version": db_settings.APP_VERSION,
|
|
19
|
+
"environment": db_settings.ENVIRONMENT,
|
|
20
|
+
"debug": db_settings.DEBUG
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
# Database health check
|
|
24
|
+
db_health = DatabaseManager.health_check()
|
|
25
|
+
|
|
26
|
+
# Overall health status
|
|
27
|
+
overall_status = "healthy" if db_health["status"] == "healthy" else "degraded"
|
|
28
|
+
|
|
29
|
+
health_data = {
|
|
30
|
+
"overall_status": overall_status,
|
|
31
|
+
"application": app_health,
|
|
32
|
+
"database": db_health
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
logger.info(f"Health check completed with status: {overall_status}")
|
|
36
|
+
|
|
37
|
+
return Respons[dict](
|
|
38
|
+
details=f"Health check successful - Status: {overall_status}",
|
|
39
|
+
data=[health_data],
|
|
40
|
+
success=True,
|
|
41
|
+
status_code=200
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
except Exception as e:
|
|
45
|
+
logger.error(f"Health check failed: {str(e)}")
|
|
46
|
+
return Respons[dict](
|
|
47
|
+
details="Health check failed",
|
|
48
|
+
error=str(e),
|
|
49
|
+
data=[],
|
|
50
|
+
success=False,
|
|
51
|
+
status_code=500
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
@health_check_router.get("/health/db", response_model=Respons[dict])
|
|
55
|
+
async def database_health():
|
|
56
|
+
"""Database-specific health check endpoint."""
|
|
57
|
+
try:
|
|
58
|
+
db_health = DatabaseManager.health_check()
|
|
59
|
+
|
|
60
|
+
if db_health["status"] == "healthy":
|
|
61
|
+
return Respons[dict](
|
|
62
|
+
details="Database health check successful",
|
|
63
|
+
data=[db_health],
|
|
64
|
+
success=True,
|
|
65
|
+
status_code=200
|
|
66
|
+
)
|
|
67
|
+
else:
|
|
68
|
+
return Respons[dict](
|
|
69
|
+
details="Database health check failed",
|
|
70
|
+
error=db_health.get("error", "Unknown database error"),
|
|
71
|
+
data=[db_health],
|
|
72
|
+
success=False,
|
|
73
|
+
status_code=503
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
except Exception as e:
|
|
77
|
+
logger.error(f"Database health check failed: {str(e)}")
|
|
78
|
+
return Respons[dict](
|
|
79
|
+
details="Database health check failed",
|
|
80
|
+
error=str(e),
|
|
81
|
+
data=[],
|
|
82
|
+
success=False,
|
|
83
|
+
status_code=500
|
|
84
|
+
)
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from pydantic import BaseModel
|
|
2
|
+
from typing import Optional, List, Any, TypeVar, Generic
|
|
3
|
+
|
|
4
|
+
class PaginationMeta(BaseModel):
|
|
5
|
+
"""Pagination metadata"""
|
|
6
|
+
page: int
|
|
7
|
+
size: int
|
|
8
|
+
total: int
|
|
9
|
+
has_next: bool
|
|
10
|
+
|
|
11
|
+
T = TypeVar('T')
|
|
12
|
+
|
|
13
|
+
class Respons(BaseModel, Generic[T]):
|
|
14
|
+
detail: Optional[str] = None
|
|
15
|
+
error: Optional[str] = None
|
|
16
|
+
data: Optional[List[T]] = None
|
|
17
|
+
status_code: int = 200
|
|
18
|
+
success: bool = True
|
|
19
|
+
pagination: Optional[PaginationMeta] = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ResponseException(Exception):
|
|
23
|
+
"""Custom exception that carries a response model"""
|
|
24
|
+
|
|
25
|
+
def __init__(self, message: str, response: Respons):
|
|
26
|
+
self.message = message
|
|
27
|
+
self.response = response
|
|
28
|
+
super().__init__(self.message)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def create_error_response(
|
|
32
|
+
error_message: str,
|
|
33
|
+
status_code: int = 500,
|
|
34
|
+
details: str = "An error occurred",
|
|
35
|
+
data: Optional[List[Any]] = None
|
|
36
|
+
) -> dict:
|
|
37
|
+
"""Helper function to create error response dictionary"""
|
|
38
|
+
return {
|
|
39
|
+
"detail": details,
|
|
40
|
+
"error": error_message,
|
|
41
|
+
"data": data or [],
|
|
42
|
+
"status_code": status_code,
|
|
43
|
+
"success": False
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def raise_with_response(
|
|
48
|
+
message: str,
|
|
49
|
+
status_code: int = 500,
|
|
50
|
+
details: str = "An error occurred",
|
|
51
|
+
data: Optional[List[Any]] = None
|
|
52
|
+
) -> None:
|
|
53
|
+
"""Helper function to raise ResponseException with error response"""
|
|
54
|
+
error_response = Respons(
|
|
55
|
+
detail=details,
|
|
56
|
+
error=message,
|
|
57
|
+
data=data or [],
|
|
58
|
+
status_code=status_code,
|
|
59
|
+
success=False
|
|
60
|
+
)
|
|
61
|
+
raise ResponseException(message, error_response)
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import List
|
|
3
|
+
from typing import TypeVar
|
|
4
|
+
|
|
5
|
+
logger = logging.getLogger(__name__)
|
|
6
|
+
|
|
7
|
+
T = TypeVar("T")
|
|
8
|
+
|
|
9
|
+
class Helper:
|
|
10
|
+
|
|
11
|
+
@staticmethod
|
|
12
|
+
def map_to_dto(data: list, dto_class: T) -> List[T]:
|
|
13
|
+
"""
|
|
14
|
+
Helper method to convert database results to DTO objects
|
|
15
|
+
Args:
|
|
16
|
+
data: List of database query results (dictionaries)
|
|
17
|
+
dto_class: The DTO class to instantiate
|
|
18
|
+
Returns:
|
|
19
|
+
List of DTO instances
|
|
20
|
+
"""
|
|
21
|
+
if not data:
|
|
22
|
+
return []
|
|
23
|
+
|
|
24
|
+
try:
|
|
25
|
+
result = []
|
|
26
|
+
for row in data:
|
|
27
|
+
# Convert RealDictRow to regular dict
|
|
28
|
+
if hasattr(row, 'items'):
|
|
29
|
+
row_dict = dict(row.items())
|
|
30
|
+
else:
|
|
31
|
+
row_dict = dict(row)
|
|
32
|
+
result.append(dto_class(**row_dict))
|
|
33
|
+
return result
|
|
34
|
+
except Exception as e:
|
|
35
|
+
logger.error(f"Error mapping data to DTO: {str(e)}")
|
|
36
|
+
raise
|