trovesuite 1.0.5__py3-none-any.whl → 1.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- trovesuite/__init__.py +7 -4
- trovesuite/auth/auth_controller.py +5 -5
- trovesuite/configs/database.py +104 -22
- trovesuite/entities/health.py +4 -4
- trovesuite/notification/notification_controller.py +5 -5
- trovesuite/notification/notification_read_dto.py +1 -1
- trovesuite/notification/notification_service.py +3 -3
- trovesuite/notification/notification_write_dto.py +1 -1
- trovesuite/storage/__init__.py +42 -0
- trovesuite/storage/storage_base.py +63 -0
- trovesuite/storage/storage_controller.py +198 -0
- trovesuite/storage/storage_read_dto.py +74 -0
- trovesuite/storage/storage_service.py +529 -0
- trovesuite/storage/storage_write_dto.py +70 -0
- {trovesuite-1.0.5.dist-info → trovesuite-1.0.7.dist-info}/METADATA +57 -3
- trovesuite-1.0.7.dist-info/RECORD +33 -0
- trovesuite-1.0.5.dist-info/RECORD +0 -27
- {trovesuite-1.0.5.dist-info → trovesuite-1.0.7.dist-info}/WHEEL +0 -0
- {trovesuite-1.0.5.dist-info → trovesuite-1.0.7.dist-info}/licenses/LICENSE +0 -0
- {trovesuite-1.0.5.dist-info → trovesuite-1.0.7.dist-info}/top_level.txt +0 -0
trovesuite/__init__.py
CHANGED
|
@@ -1,18 +1,21 @@
|
|
|
1
1
|
"""
|
|
2
2
|
TroveSuite Package
|
|
3
3
|
|
|
4
|
-
A comprehensive authentication, authorization, and
|
|
5
|
-
Provides JWT token validation, user authorization, permission checking,
|
|
4
|
+
A comprehensive authentication, authorization, notification, and storage service for ERP systems.
|
|
5
|
+
Provides JWT token validation, user authorization, permission checking, notification capabilities,
|
|
6
|
+
and Azure Storage blob management.
|
|
6
7
|
"""
|
|
7
8
|
|
|
8
9
|
from .auth import AuthService
|
|
9
10
|
from .notification import NotificationService
|
|
11
|
+
from .storage import StorageService
|
|
10
12
|
|
|
11
|
-
__version__ = "1.0.
|
|
13
|
+
__version__ = "1.0.7"
|
|
12
14
|
__author__ = "Bright Debrah Owusu"
|
|
13
15
|
__email__ = "owusu.debrah@deladetech.com"
|
|
14
16
|
|
|
15
17
|
__all__ = [
|
|
16
18
|
"AuthService",
|
|
17
|
-
"NotificationService"
|
|
19
|
+
"NotificationService",
|
|
20
|
+
"StorageService"
|
|
18
21
|
]
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
from fastapi import APIRouter
|
|
2
|
-
from
|
|
3
|
-
from
|
|
4
|
-
from
|
|
5
|
-
from
|
|
2
|
+
from .auth_write_dto import AuthControllerWriteDto
|
|
3
|
+
from .auth_read_dto import AuthControllerReadDto
|
|
4
|
+
from .auth_service import AuthService
|
|
5
|
+
from ..entities.sh_response import Respons
|
|
6
6
|
|
|
7
|
-
auth_router = APIRouter()
|
|
7
|
+
auth_router = APIRouter(tags=["Auth"])
|
|
8
8
|
|
|
9
9
|
@auth_router.post("/auth", response_model=Respons[AuthControllerReadDto])
|
|
10
10
|
async def authorize(data: AuthControllerWriteDto):
|
trovesuite/configs/database.py
CHANGED
|
@@ -17,20 +17,25 @@ _connection_pool: Optional[psycopg2.pool.ThreadedConnectionPool] = None
|
|
|
17
17
|
|
|
18
18
|
class DatabaseConfig:
|
|
19
19
|
"""Database configuration and connection management"""
|
|
20
|
-
|
|
20
|
+
|
|
21
21
|
def __init__(self):
|
|
22
22
|
self.settings = db_settings
|
|
23
23
|
self.database_url = self.settings.database_url
|
|
24
24
|
self.pool_size = 5
|
|
25
25
|
self.max_overflow = 10
|
|
26
|
-
|
|
26
|
+
|
|
27
27
|
def get_connection_params(self) -> dict:
|
|
28
28
|
"""Get database connection parameters"""
|
|
29
29
|
if self.settings.DATABASE_URL:
|
|
30
30
|
# Use full DATABASE_URL if available
|
|
31
31
|
return {
|
|
32
32
|
"dsn": self.settings.DATABASE_URL,
|
|
33
|
-
"cursor_factory": RealDictCursor
|
|
33
|
+
"cursor_factory": RealDictCursor,
|
|
34
|
+
"keepalives": 1,
|
|
35
|
+
"keepalives_idle": 30,
|
|
36
|
+
"keepalives_interval": 10,
|
|
37
|
+
"keepalives_count": 5,
|
|
38
|
+
"connect_timeout": 10
|
|
34
39
|
}
|
|
35
40
|
|
|
36
41
|
# fallback to individual DB_* variables
|
|
@@ -41,9 +46,14 @@ class DatabaseConfig:
|
|
|
41
46
|
"user": self.settings.DB_USER,
|
|
42
47
|
"password": self.settings.DB_PASSWORD,
|
|
43
48
|
"cursor_factory": RealDictCursor,
|
|
44
|
-
"application_name": f"{self.settings.APP_NAME}_{self.settings.ENVIRONMENT}"
|
|
49
|
+
"application_name": f"{self.settings.APP_NAME}_{self.settings.ENVIRONMENT}",
|
|
50
|
+
"keepalives": 1,
|
|
51
|
+
"keepalives_idle": 30,
|
|
52
|
+
"keepalives_interval": 10,
|
|
53
|
+
"keepalives_count": 5,
|
|
54
|
+
"connect_timeout": 10
|
|
45
55
|
}
|
|
46
|
-
|
|
56
|
+
|
|
47
57
|
def create_connection_pool(self) -> psycopg2.pool.ThreadedConnectionPool:
|
|
48
58
|
"""Create a connection pool for psycopg2"""
|
|
49
59
|
try:
|
|
@@ -57,7 +67,7 @@ class DatabaseConfig:
|
|
|
57
67
|
except Exception as e:
|
|
58
68
|
logger.error(f"Failed to create database connection pool: {str(e)}")
|
|
59
69
|
raise
|
|
60
|
-
|
|
70
|
+
|
|
61
71
|
def test_connection(self) -> bool:
|
|
62
72
|
"""Test database connection"""
|
|
63
73
|
try:
|
|
@@ -81,17 +91,17 @@ db_config = DatabaseConfig()
|
|
|
81
91
|
def initialize_database():
|
|
82
92
|
"""Initialize database connections and pool"""
|
|
83
93
|
global _connection_pool
|
|
84
|
-
|
|
94
|
+
|
|
85
95
|
try:
|
|
86
96
|
# Test connection first
|
|
87
97
|
if not db_config.test_connection():
|
|
88
98
|
raise Exception("Database connection test failed")
|
|
89
|
-
|
|
99
|
+
|
|
90
100
|
# Create connection pool
|
|
91
101
|
_connection_pool = db_config.create_connection_pool()
|
|
92
|
-
|
|
102
|
+
|
|
93
103
|
logger.info("Database initialization completed successfully")
|
|
94
|
-
|
|
104
|
+
|
|
95
105
|
except Exception as e:
|
|
96
106
|
logger.error(f"Database initialization failed: {str(e)}")
|
|
97
107
|
raise
|
|
@@ -114,6 +124,17 @@ def get_connection_pool() -> psycopg2.pool.ThreadedConnectionPool:
|
|
|
114
124
|
return _connection_pool
|
|
115
125
|
|
|
116
126
|
|
|
127
|
+
def _validate_connection(conn) -> bool:
|
|
128
|
+
"""Validate if a connection is still alive"""
|
|
129
|
+
try:
|
|
130
|
+
# Test if connection is alive with a simple query
|
|
131
|
+
with conn.cursor() as cursor:
|
|
132
|
+
cursor.execute("SELECT 1")
|
|
133
|
+
return True
|
|
134
|
+
except (psycopg2.OperationalError, psycopg2.InterfaceError):
|
|
135
|
+
return False
|
|
136
|
+
|
|
137
|
+
|
|
117
138
|
@contextmanager
|
|
118
139
|
def get_db_connection():
|
|
119
140
|
"""Get a database connection from the pool (context manager)"""
|
|
@@ -121,52 +142,79 @@ def get_db_connection():
|
|
|
121
142
|
conn = None
|
|
122
143
|
try:
|
|
123
144
|
conn = pool.getconn()
|
|
145
|
+
|
|
146
|
+
# Validate connection before using it
|
|
147
|
+
if not _validate_connection(conn):
|
|
148
|
+
logger.warning("Stale connection detected, getting new connection")
|
|
149
|
+
pool.putconn(conn, close=True)
|
|
150
|
+
conn = pool.getconn()
|
|
151
|
+
|
|
124
152
|
logger.debug("Database connection acquired from pool")
|
|
125
153
|
yield conn
|
|
126
154
|
except Exception as e:
|
|
127
155
|
logger.error(f"Database connection error: {str(e)}")
|
|
128
156
|
if conn:
|
|
129
|
-
|
|
157
|
+
try:
|
|
158
|
+
# Only rollback if connection is still open
|
|
159
|
+
if not conn.closed:
|
|
160
|
+
conn.rollback()
|
|
161
|
+
except (psycopg2.OperationalError, psycopg2.InterfaceError) as rollback_error:
|
|
162
|
+
logger.warning(f"Could not rollback closed connection: {str(rollback_error)}")
|
|
130
163
|
raise
|
|
131
164
|
finally:
|
|
132
165
|
if conn:
|
|
133
|
-
|
|
134
|
-
|
|
166
|
+
try:
|
|
167
|
+
# If connection is broken, close it instead of returning to pool
|
|
168
|
+
if conn.closed:
|
|
169
|
+
pool.putconn(conn, close=True)
|
|
170
|
+
else:
|
|
171
|
+
pool.putconn(conn)
|
|
172
|
+
logger.debug("Database connection returned to pool")
|
|
173
|
+
except Exception as put_error:
|
|
174
|
+
logger.error(f"Error returning connection to pool: {str(put_error)}")
|
|
135
175
|
|
|
136
176
|
|
|
137
177
|
@contextmanager
|
|
138
178
|
def get_db_cursor():
|
|
139
179
|
"""Get a database cursor (context manager)"""
|
|
140
180
|
with get_db_connection() as conn:
|
|
141
|
-
cursor = conn.cursor()
|
|
181
|
+
cursor = conn.cursor(cursor_factory=RealDictCursor)
|
|
142
182
|
try:
|
|
143
183
|
yield cursor
|
|
144
|
-
conn.
|
|
184
|
+
if not conn.closed:
|
|
185
|
+
conn.commit()
|
|
145
186
|
except Exception as e:
|
|
146
|
-
conn.
|
|
187
|
+
if not conn.closed:
|
|
188
|
+
try:
|
|
189
|
+
conn.rollback()
|
|
190
|
+
except (psycopg2.OperationalError, psycopg2.InterfaceError) as rollback_error:
|
|
191
|
+
logger.warning(f"Could not rollback transaction on closed connection: {str(rollback_error)}")
|
|
147
192
|
logger.error(f"Database cursor error: {str(e)}")
|
|
148
193
|
raise
|
|
149
194
|
finally:
|
|
150
|
-
|
|
195
|
+
try:
|
|
196
|
+
cursor.close()
|
|
197
|
+
except Exception as close_error:
|
|
198
|
+
logger.warning(f"Error closing cursor: {str(close_error)}")
|
|
151
199
|
|
|
152
200
|
|
|
153
201
|
class DatabaseManager:
|
|
154
202
|
"""Database manager for common operations"""
|
|
155
|
-
|
|
203
|
+
|
|
156
204
|
@staticmethod
|
|
157
205
|
def execute_query(query: str, params: tuple = None) -> list:
|
|
158
206
|
"""Execute a SELECT query and return results"""
|
|
159
207
|
with get_db_cursor() as cursor:
|
|
160
208
|
cursor.execute(query, params)
|
|
161
209
|
return cursor.fetchall()
|
|
162
|
-
|
|
210
|
+
|
|
163
211
|
@staticmethod
|
|
164
212
|
def execute_update(query: str, params: tuple = None) -> int:
|
|
165
213
|
"""Execute an INSERT/UPDATE/DELETE query and return affected rows"""
|
|
166
214
|
with get_db_cursor() as cursor:
|
|
167
215
|
cursor.execute(query, params)
|
|
168
216
|
return cursor.rowcount
|
|
169
|
-
|
|
217
|
+
|
|
170
218
|
@staticmethod
|
|
171
219
|
def execute_scalar(query: str, params: tuple = None):
|
|
172
220
|
"""Execute a query and return a single value"""
|
|
@@ -182,7 +230,41 @@ class DatabaseManager:
|
|
|
182
230
|
# Handle tuple result
|
|
183
231
|
return result[0] if len(result) > 0 else None
|
|
184
232
|
return None
|
|
185
|
-
|
|
233
|
+
|
|
234
|
+
@staticmethod
|
|
235
|
+
@contextmanager
|
|
236
|
+
def transaction():
|
|
237
|
+
"""
|
|
238
|
+
Context manager for database transactions.
|
|
239
|
+
Wraps multiple operations in a single transaction.
|
|
240
|
+
|
|
241
|
+
Usage:
|
|
242
|
+
with DatabaseManager.transaction() as cursor:
|
|
243
|
+
cursor.execute("INSERT INTO table1 ...")
|
|
244
|
+
cursor.execute("INSERT INTO table2 ...")
|
|
245
|
+
# Auto-commits on success, auto-rollbacks on exception
|
|
246
|
+
"""
|
|
247
|
+
with get_db_connection() as conn:
|
|
248
|
+
cursor = conn.cursor(cursor_factory=RealDictCursor)
|
|
249
|
+
try:
|
|
250
|
+
yield cursor
|
|
251
|
+
if not conn.closed:
|
|
252
|
+
conn.commit()
|
|
253
|
+
logger.debug("Transaction committed successfully")
|
|
254
|
+
except Exception as e:
|
|
255
|
+
if not conn.closed:
|
|
256
|
+
try:
|
|
257
|
+
conn.rollback()
|
|
258
|
+
logger.warning(f"Transaction rolled back due to error: {str(e)}")
|
|
259
|
+
except (psycopg2.OperationalError, psycopg2.InterfaceError) as rollback_error:
|
|
260
|
+
logger.error(f"Could not rollback transaction: {str(rollback_error)}")
|
|
261
|
+
raise
|
|
262
|
+
finally:
|
|
263
|
+
try:
|
|
264
|
+
cursor.close()
|
|
265
|
+
except Exception as close_error:
|
|
266
|
+
logger.warning(f"Error closing transaction cursor: {str(close_error)}")
|
|
267
|
+
|
|
186
268
|
@staticmethod
|
|
187
269
|
def health_check() -> dict:
|
|
188
270
|
"""Perform database health check"""
|
|
@@ -190,7 +272,7 @@ class DatabaseManager:
|
|
|
190
272
|
with get_db_cursor() as cursor:
|
|
191
273
|
cursor.execute("SELECT version(), current_database(), current_user")
|
|
192
274
|
result = cursor.fetchone()
|
|
193
|
-
|
|
275
|
+
|
|
194
276
|
if result:
|
|
195
277
|
# Handle RealDictRow (dictionary-like) result
|
|
196
278
|
if hasattr(result, 'get'):
|
trovesuite/entities/health.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
from fastapi import APIRouter
|
|
2
|
-
from
|
|
3
|
-
from
|
|
4
|
-
from
|
|
5
|
-
from
|
|
2
|
+
from .sh_response import Respons
|
|
3
|
+
from ..configs.settings import db_settings
|
|
4
|
+
from ..configs.database import DatabaseManager
|
|
5
|
+
from ..configs.logging import get_logger
|
|
6
6
|
|
|
7
7
|
health_check_router = APIRouter(tags=["Health Path"])
|
|
8
8
|
logger = get_logger("health")
|
|
@@ -1,16 +1,16 @@
|
|
|
1
|
-
from
|
|
1
|
+
from .notification_write_dto import (
|
|
2
2
|
NotificationEmailControllerWriteDto,
|
|
3
3
|
NotificationSMSControllerWriteDto
|
|
4
4
|
)
|
|
5
|
-
from
|
|
5
|
+
from .notification_read_dto import (
|
|
6
6
|
NotificationEmailControllerReadDto,
|
|
7
7
|
NotificationSMSControllerReadDto
|
|
8
8
|
)
|
|
9
|
-
from
|
|
10
|
-
from
|
|
9
|
+
from .notification_service import NotificationService
|
|
10
|
+
from ..entities.sh_response import Respons
|
|
11
11
|
from fastapi import APIRouter
|
|
12
12
|
|
|
13
|
-
notification_router = APIRouter()
|
|
13
|
+
notification_router = APIRouter(tags=["Notification"])
|
|
14
14
|
|
|
15
15
|
@notification_router.post("/send_email", response_model=Respons[NotificationEmailControllerReadDto])
|
|
16
16
|
async def send_email(data: NotificationEmailControllerWriteDto):
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import smtplib
|
|
2
2
|
from email.mime.text import MIMEText
|
|
3
3
|
from email.mime.multipart import MIMEMultipart
|
|
4
|
-
from
|
|
5
|
-
from
|
|
4
|
+
from ..entities.sh_response import Respons
|
|
5
|
+
from .notification_read_dto import (
|
|
6
6
|
NotificationEmailServiceReadDto,
|
|
7
7
|
NotificationSMSServiceReadDto
|
|
8
8
|
)
|
|
9
|
-
from
|
|
9
|
+
from .notification_write_dto import (
|
|
10
10
|
NotificationEmailServiceWriteDto,
|
|
11
11
|
NotificationSMSServiceWriteDto
|
|
12
12
|
)
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TroveSuite Storage Service
|
|
3
|
+
|
|
4
|
+
Provides Azure Storage blob management capabilities for TroveSuite applications.
|
|
5
|
+
Includes container creation, file upload/download/update/delete, and presigned URL generation.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from .storage_service import StorageService
|
|
9
|
+
from .storage_write_dto import (
|
|
10
|
+
StorageContainerCreateServiceWriteDto,
|
|
11
|
+
StorageFileUploadServiceWriteDto,
|
|
12
|
+
StorageFileUpdateServiceWriteDto,
|
|
13
|
+
StorageFileDeleteServiceWriteDto,
|
|
14
|
+
StorageFileDownloadServiceWriteDto,
|
|
15
|
+
StorageFileUrlServiceWriteDto
|
|
16
|
+
)
|
|
17
|
+
from .storage_read_dto import (
|
|
18
|
+
StorageContainerCreateServiceReadDto,
|
|
19
|
+
StorageFileUploadServiceReadDto,
|
|
20
|
+
StorageFileUpdateServiceReadDto,
|
|
21
|
+
StorageFileDeleteServiceReadDto,
|
|
22
|
+
StorageFileDownloadServiceReadDto,
|
|
23
|
+
StorageFileUrlServiceReadDto
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
__all__ = [
|
|
27
|
+
"StorageService",
|
|
28
|
+
# Write DTOs
|
|
29
|
+
"StorageContainerCreateServiceWriteDto",
|
|
30
|
+
"StorageFileUploadServiceWriteDto",
|
|
31
|
+
"StorageFileUpdateServiceWriteDto",
|
|
32
|
+
"StorageFileDeleteServiceWriteDto",
|
|
33
|
+
"StorageFileDownloadServiceWriteDto",
|
|
34
|
+
"StorageFileUrlServiceWriteDto",
|
|
35
|
+
# Read DTOs
|
|
36
|
+
"StorageContainerCreateServiceReadDto",
|
|
37
|
+
"StorageFileUploadServiceReadDto",
|
|
38
|
+
"StorageFileUpdateServiceReadDto",
|
|
39
|
+
"StorageFileDeleteServiceReadDto",
|
|
40
|
+
"StorageFileDownloadServiceReadDto",
|
|
41
|
+
"StorageFileUrlServiceReadDto",
|
|
42
|
+
]
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
from pydantic import BaseModel
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class StorageConnectionBase(BaseModel):
|
|
6
|
+
"""Base model for Azure Storage connection using Managed Identity"""
|
|
7
|
+
storage_account_url: str # e.g., https://<account-name>.blob.core.windows.net
|
|
8
|
+
container_name: str
|
|
9
|
+
managed_identity_client_id: Optional[str] = None # Optional: For user-assigned managed identity
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class StorageFileUploadBase(BaseModel):
|
|
13
|
+
"""Base model for file upload operations"""
|
|
14
|
+
storage_account_url: str
|
|
15
|
+
container_name: str
|
|
16
|
+
file_content: bytes
|
|
17
|
+
blob_name: str
|
|
18
|
+
directory_path: Optional[str] = None
|
|
19
|
+
content_type: Optional[str] = None
|
|
20
|
+
managed_identity_client_id: Optional[str] = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class StorageFileUpdateBase(BaseModel):
|
|
24
|
+
"""Base model for file update operations"""
|
|
25
|
+
storage_account_url: str
|
|
26
|
+
container_name: str
|
|
27
|
+
blob_name: str
|
|
28
|
+
file_content: bytes
|
|
29
|
+
content_type: Optional[str] = None
|
|
30
|
+
managed_identity_client_id: Optional[str] = None
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class StorageFileDeleteBase(BaseModel):
|
|
34
|
+
"""Base model for file delete operations"""
|
|
35
|
+
storage_account_url: str
|
|
36
|
+
container_name: str
|
|
37
|
+
blob_name: str
|
|
38
|
+
managed_identity_client_id: Optional[str] = None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class StorageFileDownloadBase(BaseModel):
|
|
42
|
+
"""Base model for file download operations"""
|
|
43
|
+
storage_account_url: str
|
|
44
|
+
container_name: str
|
|
45
|
+
blob_name: str
|
|
46
|
+
managed_identity_client_id: Optional[str] = None
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class StorageFileUrlBase(BaseModel):
|
|
50
|
+
"""Base model for getting presigned URL"""
|
|
51
|
+
storage_account_url: str
|
|
52
|
+
container_name: str
|
|
53
|
+
blob_name: str
|
|
54
|
+
expiry_hours: Optional[int] = 1
|
|
55
|
+
managed_identity_client_id: Optional[str] = None
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class StorageContainerCreateBase(BaseModel):
|
|
59
|
+
"""Base model for creating a container"""
|
|
60
|
+
storage_account_url: str
|
|
61
|
+
container_name: str
|
|
62
|
+
public_access: Optional[str] = None
|
|
63
|
+
managed_identity_client_id: Optional[str] = None
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
from typing import List
|
|
2
|
+
from fastapi import APIRouter, File, UploadFile, Form
|
|
3
|
+
from fastapi.responses import StreamingResponse
|
|
4
|
+
from io import BytesIO
|
|
5
|
+
from .storage_write_dto import (
|
|
6
|
+
StorageContainerCreateControllerWriteDto,
|
|
7
|
+
StorageFileUploadControllerWriteDto,
|
|
8
|
+
StorageFileUpdateControllerWriteDto,
|
|
9
|
+
StorageFileDeleteControllerWriteDto,
|
|
10
|
+
StorageFileDownloadControllerWriteDto,
|
|
11
|
+
StorageFileUrlControllerWriteDto
|
|
12
|
+
)
|
|
13
|
+
from .storage_read_dto import (
|
|
14
|
+
StorageContainerCreateControllerReadDto,
|
|
15
|
+
StorageFileUploadControllerReadDto,
|
|
16
|
+
StorageFileUpdateControllerReadDto,
|
|
17
|
+
StorageFileDeleteControllerReadDto,
|
|
18
|
+
StorageFileDownloadControllerReadDto,
|
|
19
|
+
StorageFileUrlControllerReadDto
|
|
20
|
+
)
|
|
21
|
+
from .storage_service import StorageService
|
|
22
|
+
from ..entities.sh_response import Respons
|
|
23
|
+
|
|
24
|
+
storage_router = APIRouter(tags=["File Storage"])
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@storage_router.post("/create-container", response_model=Respons[StorageContainerCreateControllerReadDto])
|
|
28
|
+
async def create_container(data: StorageContainerCreateControllerWriteDto):
|
|
29
|
+
"""
|
|
30
|
+
Create a new Azure Storage container.
|
|
31
|
+
|
|
32
|
+
Example request body:
|
|
33
|
+
{
|
|
34
|
+
"storage_account_url": "https://myaccount.blob.core.windows.net",
|
|
35
|
+
"container_name": "my-container",
|
|
36
|
+
"public_access": null,
|
|
37
|
+
"managed_identity_client_id": "your-client-id" // optional
|
|
38
|
+
}
|
|
39
|
+
"""
|
|
40
|
+
return StorageService.create_container(data=data)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@storage_router.post("/upload", response_model=Respons[StorageFileUploadControllerReadDto])
|
|
44
|
+
async def upload_file(
|
|
45
|
+
storage_account_url: str = Form(...),
|
|
46
|
+
container_name: str = Form(...),
|
|
47
|
+
blob_name: str = Form(...),
|
|
48
|
+
file: UploadFile = File(...),
|
|
49
|
+
directory_path: str = Form(None),
|
|
50
|
+
managed_identity_client_id: str = Form(None)
|
|
51
|
+
):
|
|
52
|
+
"""
|
|
53
|
+
Upload a file to Azure Storage.
|
|
54
|
+
|
|
55
|
+
Use form-data with the following fields:
|
|
56
|
+
- storage_account_url: Your Azure storage URL
|
|
57
|
+
- container_name: Container name
|
|
58
|
+
- blob_name: Name for the blob
|
|
59
|
+
- file: The file to upload
|
|
60
|
+
- directory_path: Optional directory path (e.g., "uploads/2024")
|
|
61
|
+
- managed_identity_client_id: Optional client ID for user-assigned managed identity
|
|
62
|
+
"""
|
|
63
|
+
content = await file.read()
|
|
64
|
+
|
|
65
|
+
upload_data = StorageFileUploadControllerWriteDto(
|
|
66
|
+
storage_account_url=storage_account_url,
|
|
67
|
+
container_name=container_name,
|
|
68
|
+
file_content=content,
|
|
69
|
+
blob_name=blob_name,
|
|
70
|
+
directory_path=directory_path,
|
|
71
|
+
content_type=file.content_type,
|
|
72
|
+
managed_identity_client_id=managed_identity_client_id
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
return StorageService.upload_file(data=upload_data)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@storage_router.put("/update", response_model=Respons[StorageFileUpdateControllerReadDto])
|
|
79
|
+
async def update_file(
|
|
80
|
+
storage_account_url: str = Form(...),
|
|
81
|
+
container_name: str = Form(...),
|
|
82
|
+
blob_name: str = Form(...),
|
|
83
|
+
file: UploadFile = File(...),
|
|
84
|
+
managed_identity_client_id: str = Form(None)
|
|
85
|
+
):
|
|
86
|
+
"""
|
|
87
|
+
Update an existing file in Azure Storage.
|
|
88
|
+
|
|
89
|
+
Use form-data with the following fields:
|
|
90
|
+
- storage_account_url: Your Azure storage URL
|
|
91
|
+
- container_name: Container name
|
|
92
|
+
- blob_name: Full blob name including path (e.g., "uploads/2024/file.pdf")
|
|
93
|
+
- file: The new file content
|
|
94
|
+
- managed_identity_client_id: Optional client ID for user-assigned managed identity
|
|
95
|
+
"""
|
|
96
|
+
content = await file.read()
|
|
97
|
+
|
|
98
|
+
update_data = StorageFileUpdateControllerWriteDto(
|
|
99
|
+
storage_account_url=storage_account_url,
|
|
100
|
+
container_name=container_name,
|
|
101
|
+
blob_name=blob_name,
|
|
102
|
+
file_content=content,
|
|
103
|
+
content_type=file.content_type,
|
|
104
|
+
managed_identity_client_id=managed_identity_client_id
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
return StorageService.update_file(data=update_data)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
@storage_router.delete("/delete", response_model=Respons[StorageFileDeleteControllerReadDto])
|
|
111
|
+
async def delete_file(data: StorageFileDeleteControllerWriteDto):
|
|
112
|
+
"""
|
|
113
|
+
Delete a file from Azure Storage.
|
|
114
|
+
|
|
115
|
+
Example request body:
|
|
116
|
+
{
|
|
117
|
+
"storage_account_url": "https://myaccount.blob.core.windows.net",
|
|
118
|
+
"container_name": "my-container",
|
|
119
|
+
"blob_name": "uploads/2024/file.pdf",
|
|
120
|
+
"managed_identity_client_id": "your-client-id" // optional
|
|
121
|
+
}
|
|
122
|
+
"""
|
|
123
|
+
return StorageService.delete_file(data=data)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
@storage_router.delete("/delete-multiple", response_model=Respons[StorageFileDeleteControllerReadDto])
|
|
127
|
+
async def delete_multiple_files(
|
|
128
|
+
storage_account_url: str,
|
|
129
|
+
container_name: str,
|
|
130
|
+
blob_names: List[str],
|
|
131
|
+
managed_identity_client_id: str = None
|
|
132
|
+
):
|
|
133
|
+
"""
|
|
134
|
+
Delete multiple files from Azure Storage.
|
|
135
|
+
|
|
136
|
+
Example request body:
|
|
137
|
+
{
|
|
138
|
+
"storage_account_url": "https://myaccount.blob.core.windows.net",
|
|
139
|
+
"container_name": "my-container",
|
|
140
|
+
"blob_names": ["file1.pdf", "file2.pdf", "folder/file3.jpg"],
|
|
141
|
+
"managed_identity_client_id": "your-client-id" // optional
|
|
142
|
+
}
|
|
143
|
+
"""
|
|
144
|
+
return StorageService.delete_multiple_files(
|
|
145
|
+
storage_account_url=storage_account_url,
|
|
146
|
+
container_name=container_name,
|
|
147
|
+
blob_names=blob_names,
|
|
148
|
+
managed_identity_client_id=managed_identity_client_id
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
@storage_router.post("/download")
|
|
153
|
+
async def download_file(data: StorageFileDownloadControllerWriteDto):
|
|
154
|
+
"""
|
|
155
|
+
Download a file from Azure Storage.
|
|
156
|
+
|
|
157
|
+
Returns the file as a streaming response.
|
|
158
|
+
|
|
159
|
+
Example request body:
|
|
160
|
+
{
|
|
161
|
+
"storage_account_url": "https://myaccount.blob.core.windows.net",
|
|
162
|
+
"container_name": "my-container",
|
|
163
|
+
"blob_name": "uploads/2024/file.pdf",
|
|
164
|
+
"managed_identity_client_id": "your-client-id" // optional
|
|
165
|
+
}
|
|
166
|
+
"""
|
|
167
|
+
result = StorageService.download_file(data=data)
|
|
168
|
+
|
|
169
|
+
if not result.success:
|
|
170
|
+
return result
|
|
171
|
+
|
|
172
|
+
file_data = result.data[0]
|
|
173
|
+
|
|
174
|
+
# Return as streaming response
|
|
175
|
+
return StreamingResponse(
|
|
176
|
+
BytesIO(file_data.content),
|
|
177
|
+
media_type=file_data.content_type or "application/octet-stream",
|
|
178
|
+
headers={
|
|
179
|
+
"Content-Disposition": f"attachment; filename={data.blob_name.split('/')[-1]}"
|
|
180
|
+
}
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
@storage_router.post("/get-url", response_model=Respons[StorageFileUrlControllerReadDto])
|
|
185
|
+
async def get_file_url(data: StorageFileUrlControllerWriteDto):
|
|
186
|
+
"""
|
|
187
|
+
Generate a presigned URL for a file.
|
|
188
|
+
|
|
189
|
+
Example request body:
|
|
190
|
+
{
|
|
191
|
+
"storage_account_url": "https://myaccount.blob.core.windows.net",
|
|
192
|
+
"container_name": "my-container",
|
|
193
|
+
"blob_name": "uploads/2024/file.pdf",
|
|
194
|
+
"expiry_hours": 2,
|
|
195
|
+
"managed_identity_client_id": "your-client-id" // optional
|
|
196
|
+
}
|
|
197
|
+
"""
|
|
198
|
+
return StorageService.get_file_url(data=data)
|