trovesuite 1.0.6__py3-none-any.whl → 1.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- trovesuite/__init__.py +7 -4
- trovesuite/auth/auth_controller.py +1 -1
- trovesuite/configs/database.py +104 -22
- trovesuite/notification/notification_controller.py +1 -1
- trovesuite/storage/__init__.py +42 -0
- trovesuite/storage/storage_base.py +63 -0
- trovesuite/storage/storage_controller.py +198 -0
- trovesuite/storage/storage_read_dto.py +74 -0
- trovesuite/storage/storage_service.py +529 -0
- trovesuite/storage/storage_write_dto.py +70 -0
- {trovesuite-1.0.6.dist-info → trovesuite-1.0.7.dist-info}/METADATA +5 -3
- {trovesuite-1.0.6.dist-info → trovesuite-1.0.7.dist-info}/RECORD +15 -9
- {trovesuite-1.0.6.dist-info → trovesuite-1.0.7.dist-info}/WHEEL +0 -0
- {trovesuite-1.0.6.dist-info → trovesuite-1.0.7.dist-info}/licenses/LICENSE +0 -0
- {trovesuite-1.0.6.dist-info → trovesuite-1.0.7.dist-info}/top_level.txt +0 -0
trovesuite/__init__.py
CHANGED
|
@@ -1,18 +1,21 @@
|
|
|
1
1
|
"""
|
|
2
2
|
TroveSuite Package
|
|
3
3
|
|
|
4
|
-
A comprehensive authentication, authorization, and
|
|
5
|
-
Provides JWT token validation, user authorization, permission checking,
|
|
4
|
+
A comprehensive authentication, authorization, notification, and storage service for ERP systems.
|
|
5
|
+
Provides JWT token validation, user authorization, permission checking, notification capabilities,
|
|
6
|
+
and Azure Storage blob management.
|
|
6
7
|
"""
|
|
7
8
|
|
|
8
9
|
from .auth import AuthService
|
|
9
10
|
from .notification import NotificationService
|
|
11
|
+
from .storage import StorageService
|
|
10
12
|
|
|
11
|
-
__version__ = "1.0.
|
|
13
|
+
__version__ = "1.0.7"
|
|
12
14
|
__author__ = "Bright Debrah Owusu"
|
|
13
15
|
__email__ = "owusu.debrah@deladetech.com"
|
|
14
16
|
|
|
15
17
|
__all__ = [
|
|
16
18
|
"AuthService",
|
|
17
|
-
"NotificationService"
|
|
19
|
+
"NotificationService",
|
|
20
|
+
"StorageService"
|
|
18
21
|
]
|
|
@@ -4,7 +4,7 @@ from .auth_read_dto import AuthControllerReadDto
|
|
|
4
4
|
from .auth_service import AuthService
|
|
5
5
|
from ..entities.sh_response import Respons
|
|
6
6
|
|
|
7
|
-
auth_router = APIRouter()
|
|
7
|
+
auth_router = APIRouter(tags=["Auth"])
|
|
8
8
|
|
|
9
9
|
@auth_router.post("/auth", response_model=Respons[AuthControllerReadDto])
|
|
10
10
|
async def authorize(data: AuthControllerWriteDto):
|
trovesuite/configs/database.py
CHANGED
|
@@ -17,20 +17,25 @@ _connection_pool: Optional[psycopg2.pool.ThreadedConnectionPool] = None
|
|
|
17
17
|
|
|
18
18
|
class DatabaseConfig:
|
|
19
19
|
"""Database configuration and connection management"""
|
|
20
|
-
|
|
20
|
+
|
|
21
21
|
def __init__(self):
|
|
22
22
|
self.settings = db_settings
|
|
23
23
|
self.database_url = self.settings.database_url
|
|
24
24
|
self.pool_size = 5
|
|
25
25
|
self.max_overflow = 10
|
|
26
|
-
|
|
26
|
+
|
|
27
27
|
def get_connection_params(self) -> dict:
|
|
28
28
|
"""Get database connection parameters"""
|
|
29
29
|
if self.settings.DATABASE_URL:
|
|
30
30
|
# Use full DATABASE_URL if available
|
|
31
31
|
return {
|
|
32
32
|
"dsn": self.settings.DATABASE_URL,
|
|
33
|
-
"cursor_factory": RealDictCursor
|
|
33
|
+
"cursor_factory": RealDictCursor,
|
|
34
|
+
"keepalives": 1,
|
|
35
|
+
"keepalives_idle": 30,
|
|
36
|
+
"keepalives_interval": 10,
|
|
37
|
+
"keepalives_count": 5,
|
|
38
|
+
"connect_timeout": 10
|
|
34
39
|
}
|
|
35
40
|
|
|
36
41
|
# fallback to individual DB_* variables
|
|
@@ -41,9 +46,14 @@ class DatabaseConfig:
|
|
|
41
46
|
"user": self.settings.DB_USER,
|
|
42
47
|
"password": self.settings.DB_PASSWORD,
|
|
43
48
|
"cursor_factory": RealDictCursor,
|
|
44
|
-
"application_name": f"{self.settings.APP_NAME}_{self.settings.ENVIRONMENT}"
|
|
49
|
+
"application_name": f"{self.settings.APP_NAME}_{self.settings.ENVIRONMENT}",
|
|
50
|
+
"keepalives": 1,
|
|
51
|
+
"keepalives_idle": 30,
|
|
52
|
+
"keepalives_interval": 10,
|
|
53
|
+
"keepalives_count": 5,
|
|
54
|
+
"connect_timeout": 10
|
|
45
55
|
}
|
|
46
|
-
|
|
56
|
+
|
|
47
57
|
def create_connection_pool(self) -> psycopg2.pool.ThreadedConnectionPool:
|
|
48
58
|
"""Create a connection pool for psycopg2"""
|
|
49
59
|
try:
|
|
@@ -57,7 +67,7 @@ class DatabaseConfig:
|
|
|
57
67
|
except Exception as e:
|
|
58
68
|
logger.error(f"Failed to create database connection pool: {str(e)}")
|
|
59
69
|
raise
|
|
60
|
-
|
|
70
|
+
|
|
61
71
|
def test_connection(self) -> bool:
|
|
62
72
|
"""Test database connection"""
|
|
63
73
|
try:
|
|
@@ -81,17 +91,17 @@ db_config = DatabaseConfig()
|
|
|
81
91
|
def initialize_database():
|
|
82
92
|
"""Initialize database connections and pool"""
|
|
83
93
|
global _connection_pool
|
|
84
|
-
|
|
94
|
+
|
|
85
95
|
try:
|
|
86
96
|
# Test connection first
|
|
87
97
|
if not db_config.test_connection():
|
|
88
98
|
raise Exception("Database connection test failed")
|
|
89
|
-
|
|
99
|
+
|
|
90
100
|
# Create connection pool
|
|
91
101
|
_connection_pool = db_config.create_connection_pool()
|
|
92
|
-
|
|
102
|
+
|
|
93
103
|
logger.info("Database initialization completed successfully")
|
|
94
|
-
|
|
104
|
+
|
|
95
105
|
except Exception as e:
|
|
96
106
|
logger.error(f"Database initialization failed: {str(e)}")
|
|
97
107
|
raise
|
|
@@ -114,6 +124,17 @@ def get_connection_pool() -> psycopg2.pool.ThreadedConnectionPool:
|
|
|
114
124
|
return _connection_pool
|
|
115
125
|
|
|
116
126
|
|
|
127
|
+
def _validate_connection(conn) -> bool:
|
|
128
|
+
"""Validate if a connection is still alive"""
|
|
129
|
+
try:
|
|
130
|
+
# Test if connection is alive with a simple query
|
|
131
|
+
with conn.cursor() as cursor:
|
|
132
|
+
cursor.execute("SELECT 1")
|
|
133
|
+
return True
|
|
134
|
+
except (psycopg2.OperationalError, psycopg2.InterfaceError):
|
|
135
|
+
return False
|
|
136
|
+
|
|
137
|
+
|
|
117
138
|
@contextmanager
|
|
118
139
|
def get_db_connection():
|
|
119
140
|
"""Get a database connection from the pool (context manager)"""
|
|
@@ -121,52 +142,79 @@ def get_db_connection():
|
|
|
121
142
|
conn = None
|
|
122
143
|
try:
|
|
123
144
|
conn = pool.getconn()
|
|
145
|
+
|
|
146
|
+
# Validate connection before using it
|
|
147
|
+
if not _validate_connection(conn):
|
|
148
|
+
logger.warning("Stale connection detected, getting new connection")
|
|
149
|
+
pool.putconn(conn, close=True)
|
|
150
|
+
conn = pool.getconn()
|
|
151
|
+
|
|
124
152
|
logger.debug("Database connection acquired from pool")
|
|
125
153
|
yield conn
|
|
126
154
|
except Exception as e:
|
|
127
155
|
logger.error(f"Database connection error: {str(e)}")
|
|
128
156
|
if conn:
|
|
129
|
-
|
|
157
|
+
try:
|
|
158
|
+
# Only rollback if connection is still open
|
|
159
|
+
if not conn.closed:
|
|
160
|
+
conn.rollback()
|
|
161
|
+
except (psycopg2.OperationalError, psycopg2.InterfaceError) as rollback_error:
|
|
162
|
+
logger.warning(f"Could not rollback closed connection: {str(rollback_error)}")
|
|
130
163
|
raise
|
|
131
164
|
finally:
|
|
132
165
|
if conn:
|
|
133
|
-
|
|
134
|
-
|
|
166
|
+
try:
|
|
167
|
+
# If connection is broken, close it instead of returning to pool
|
|
168
|
+
if conn.closed:
|
|
169
|
+
pool.putconn(conn, close=True)
|
|
170
|
+
else:
|
|
171
|
+
pool.putconn(conn)
|
|
172
|
+
logger.debug("Database connection returned to pool")
|
|
173
|
+
except Exception as put_error:
|
|
174
|
+
logger.error(f"Error returning connection to pool: {str(put_error)}")
|
|
135
175
|
|
|
136
176
|
|
|
137
177
|
@contextmanager
|
|
138
178
|
def get_db_cursor():
|
|
139
179
|
"""Get a database cursor (context manager)"""
|
|
140
180
|
with get_db_connection() as conn:
|
|
141
|
-
cursor = conn.cursor()
|
|
181
|
+
cursor = conn.cursor(cursor_factory=RealDictCursor)
|
|
142
182
|
try:
|
|
143
183
|
yield cursor
|
|
144
|
-
conn.
|
|
184
|
+
if not conn.closed:
|
|
185
|
+
conn.commit()
|
|
145
186
|
except Exception as e:
|
|
146
|
-
conn.
|
|
187
|
+
if not conn.closed:
|
|
188
|
+
try:
|
|
189
|
+
conn.rollback()
|
|
190
|
+
except (psycopg2.OperationalError, psycopg2.InterfaceError) as rollback_error:
|
|
191
|
+
logger.warning(f"Could not rollback transaction on closed connection: {str(rollback_error)}")
|
|
147
192
|
logger.error(f"Database cursor error: {str(e)}")
|
|
148
193
|
raise
|
|
149
194
|
finally:
|
|
150
|
-
|
|
195
|
+
try:
|
|
196
|
+
cursor.close()
|
|
197
|
+
except Exception as close_error:
|
|
198
|
+
logger.warning(f"Error closing cursor: {str(close_error)}")
|
|
151
199
|
|
|
152
200
|
|
|
153
201
|
class DatabaseManager:
|
|
154
202
|
"""Database manager for common operations"""
|
|
155
|
-
|
|
203
|
+
|
|
156
204
|
@staticmethod
|
|
157
205
|
def execute_query(query: str, params: tuple = None) -> list:
|
|
158
206
|
"""Execute a SELECT query and return results"""
|
|
159
207
|
with get_db_cursor() as cursor:
|
|
160
208
|
cursor.execute(query, params)
|
|
161
209
|
return cursor.fetchall()
|
|
162
|
-
|
|
210
|
+
|
|
163
211
|
@staticmethod
|
|
164
212
|
def execute_update(query: str, params: tuple = None) -> int:
|
|
165
213
|
"""Execute an INSERT/UPDATE/DELETE query and return affected rows"""
|
|
166
214
|
with get_db_cursor() as cursor:
|
|
167
215
|
cursor.execute(query, params)
|
|
168
216
|
return cursor.rowcount
|
|
169
|
-
|
|
217
|
+
|
|
170
218
|
@staticmethod
|
|
171
219
|
def execute_scalar(query: str, params: tuple = None):
|
|
172
220
|
"""Execute a query and return a single value"""
|
|
@@ -182,7 +230,41 @@ class DatabaseManager:
|
|
|
182
230
|
# Handle tuple result
|
|
183
231
|
return result[0] if len(result) > 0 else None
|
|
184
232
|
return None
|
|
185
|
-
|
|
233
|
+
|
|
234
|
+
@staticmethod
|
|
235
|
+
@contextmanager
|
|
236
|
+
def transaction():
|
|
237
|
+
"""
|
|
238
|
+
Context manager for database transactions.
|
|
239
|
+
Wraps multiple operations in a single transaction.
|
|
240
|
+
|
|
241
|
+
Usage:
|
|
242
|
+
with DatabaseManager.transaction() as cursor:
|
|
243
|
+
cursor.execute("INSERT INTO table1 ...")
|
|
244
|
+
cursor.execute("INSERT INTO table2 ...")
|
|
245
|
+
# Auto-commits on success, auto-rollbacks on exception
|
|
246
|
+
"""
|
|
247
|
+
with get_db_connection() as conn:
|
|
248
|
+
cursor = conn.cursor(cursor_factory=RealDictCursor)
|
|
249
|
+
try:
|
|
250
|
+
yield cursor
|
|
251
|
+
if not conn.closed:
|
|
252
|
+
conn.commit()
|
|
253
|
+
logger.debug("Transaction committed successfully")
|
|
254
|
+
except Exception as e:
|
|
255
|
+
if not conn.closed:
|
|
256
|
+
try:
|
|
257
|
+
conn.rollback()
|
|
258
|
+
logger.warning(f"Transaction rolled back due to error: {str(e)}")
|
|
259
|
+
except (psycopg2.OperationalError, psycopg2.InterfaceError) as rollback_error:
|
|
260
|
+
logger.error(f"Could not rollback transaction: {str(rollback_error)}")
|
|
261
|
+
raise
|
|
262
|
+
finally:
|
|
263
|
+
try:
|
|
264
|
+
cursor.close()
|
|
265
|
+
except Exception as close_error:
|
|
266
|
+
logger.warning(f"Error closing transaction cursor: {str(close_error)}")
|
|
267
|
+
|
|
186
268
|
@staticmethod
|
|
187
269
|
def health_check() -> dict:
|
|
188
270
|
"""Perform database health check"""
|
|
@@ -190,7 +272,7 @@ class DatabaseManager:
|
|
|
190
272
|
with get_db_cursor() as cursor:
|
|
191
273
|
cursor.execute("SELECT version(), current_database(), current_user")
|
|
192
274
|
result = cursor.fetchone()
|
|
193
|
-
|
|
275
|
+
|
|
194
276
|
if result:
|
|
195
277
|
# Handle RealDictRow (dictionary-like) result
|
|
196
278
|
if hasattr(result, 'get'):
|
|
@@ -10,7 +10,7 @@ from .notification_service import NotificationService
|
|
|
10
10
|
from ..entities.sh_response import Respons
|
|
11
11
|
from fastapi import APIRouter
|
|
12
12
|
|
|
13
|
-
notification_router = APIRouter()
|
|
13
|
+
notification_router = APIRouter(tags=["Notification"])
|
|
14
14
|
|
|
15
15
|
@notification_router.post("/send_email", response_model=Respons[NotificationEmailControllerReadDto])
|
|
16
16
|
async def send_email(data: NotificationEmailControllerWriteDto):
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TroveSuite Storage Service
|
|
3
|
+
|
|
4
|
+
Provides Azure Storage blob management capabilities for TroveSuite applications.
|
|
5
|
+
Includes container creation, file upload/download/update/delete, and presigned URL generation.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from .storage_service import StorageService
|
|
9
|
+
from .storage_write_dto import (
|
|
10
|
+
StorageContainerCreateServiceWriteDto,
|
|
11
|
+
StorageFileUploadServiceWriteDto,
|
|
12
|
+
StorageFileUpdateServiceWriteDto,
|
|
13
|
+
StorageFileDeleteServiceWriteDto,
|
|
14
|
+
StorageFileDownloadServiceWriteDto,
|
|
15
|
+
StorageFileUrlServiceWriteDto
|
|
16
|
+
)
|
|
17
|
+
from .storage_read_dto import (
|
|
18
|
+
StorageContainerCreateServiceReadDto,
|
|
19
|
+
StorageFileUploadServiceReadDto,
|
|
20
|
+
StorageFileUpdateServiceReadDto,
|
|
21
|
+
StorageFileDeleteServiceReadDto,
|
|
22
|
+
StorageFileDownloadServiceReadDto,
|
|
23
|
+
StorageFileUrlServiceReadDto
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
__all__ = [
|
|
27
|
+
"StorageService",
|
|
28
|
+
# Write DTOs
|
|
29
|
+
"StorageContainerCreateServiceWriteDto",
|
|
30
|
+
"StorageFileUploadServiceWriteDto",
|
|
31
|
+
"StorageFileUpdateServiceWriteDto",
|
|
32
|
+
"StorageFileDeleteServiceWriteDto",
|
|
33
|
+
"StorageFileDownloadServiceWriteDto",
|
|
34
|
+
"StorageFileUrlServiceWriteDto",
|
|
35
|
+
# Read DTOs
|
|
36
|
+
"StorageContainerCreateServiceReadDto",
|
|
37
|
+
"StorageFileUploadServiceReadDto",
|
|
38
|
+
"StorageFileUpdateServiceReadDto",
|
|
39
|
+
"StorageFileDeleteServiceReadDto",
|
|
40
|
+
"StorageFileDownloadServiceReadDto",
|
|
41
|
+
"StorageFileUrlServiceReadDto",
|
|
42
|
+
]
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
from pydantic import BaseModel
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class StorageConnectionBase(BaseModel):
|
|
6
|
+
"""Base model for Azure Storage connection using Managed Identity"""
|
|
7
|
+
storage_account_url: str # e.g., https://<account-name>.blob.core.windows.net
|
|
8
|
+
container_name: str
|
|
9
|
+
managed_identity_client_id: Optional[str] = None # Optional: For user-assigned managed identity
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class StorageFileUploadBase(BaseModel):
|
|
13
|
+
"""Base model for file upload operations"""
|
|
14
|
+
storage_account_url: str
|
|
15
|
+
container_name: str
|
|
16
|
+
file_content: bytes
|
|
17
|
+
blob_name: str
|
|
18
|
+
directory_path: Optional[str] = None
|
|
19
|
+
content_type: Optional[str] = None
|
|
20
|
+
managed_identity_client_id: Optional[str] = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class StorageFileUpdateBase(BaseModel):
|
|
24
|
+
"""Base model for file update operations"""
|
|
25
|
+
storage_account_url: str
|
|
26
|
+
container_name: str
|
|
27
|
+
blob_name: str
|
|
28
|
+
file_content: bytes
|
|
29
|
+
content_type: Optional[str] = None
|
|
30
|
+
managed_identity_client_id: Optional[str] = None
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class StorageFileDeleteBase(BaseModel):
|
|
34
|
+
"""Base model for file delete operations"""
|
|
35
|
+
storage_account_url: str
|
|
36
|
+
container_name: str
|
|
37
|
+
blob_name: str
|
|
38
|
+
managed_identity_client_id: Optional[str] = None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class StorageFileDownloadBase(BaseModel):
|
|
42
|
+
"""Base model for file download operations"""
|
|
43
|
+
storage_account_url: str
|
|
44
|
+
container_name: str
|
|
45
|
+
blob_name: str
|
|
46
|
+
managed_identity_client_id: Optional[str] = None
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class StorageFileUrlBase(BaseModel):
|
|
50
|
+
"""Base model for getting presigned URL"""
|
|
51
|
+
storage_account_url: str
|
|
52
|
+
container_name: str
|
|
53
|
+
blob_name: str
|
|
54
|
+
expiry_hours: Optional[int] = 1
|
|
55
|
+
managed_identity_client_id: Optional[str] = None
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class StorageContainerCreateBase(BaseModel):
|
|
59
|
+
"""Base model for creating a container"""
|
|
60
|
+
storage_account_url: str
|
|
61
|
+
container_name: str
|
|
62
|
+
public_access: Optional[str] = None
|
|
63
|
+
managed_identity_client_id: Optional[str] = None
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
from typing import List
|
|
2
|
+
from fastapi import APIRouter, File, UploadFile, Form
|
|
3
|
+
from fastapi.responses import StreamingResponse
|
|
4
|
+
from io import BytesIO
|
|
5
|
+
from .storage_write_dto import (
|
|
6
|
+
StorageContainerCreateControllerWriteDto,
|
|
7
|
+
StorageFileUploadControllerWriteDto,
|
|
8
|
+
StorageFileUpdateControllerWriteDto,
|
|
9
|
+
StorageFileDeleteControllerWriteDto,
|
|
10
|
+
StorageFileDownloadControllerWriteDto,
|
|
11
|
+
StorageFileUrlControllerWriteDto
|
|
12
|
+
)
|
|
13
|
+
from .storage_read_dto import (
|
|
14
|
+
StorageContainerCreateControllerReadDto,
|
|
15
|
+
StorageFileUploadControllerReadDto,
|
|
16
|
+
StorageFileUpdateControllerReadDto,
|
|
17
|
+
StorageFileDeleteControllerReadDto,
|
|
18
|
+
StorageFileDownloadControllerReadDto,
|
|
19
|
+
StorageFileUrlControllerReadDto
|
|
20
|
+
)
|
|
21
|
+
from .storage_service import StorageService
|
|
22
|
+
from ..entities.sh_response import Respons
|
|
23
|
+
|
|
24
|
+
storage_router = APIRouter(tags=["File Storage"])
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@storage_router.post("/create-container", response_model=Respons[StorageContainerCreateControllerReadDto])
|
|
28
|
+
async def create_container(data: StorageContainerCreateControllerWriteDto):
|
|
29
|
+
"""
|
|
30
|
+
Create a new Azure Storage container.
|
|
31
|
+
|
|
32
|
+
Example request body:
|
|
33
|
+
{
|
|
34
|
+
"storage_account_url": "https://myaccount.blob.core.windows.net",
|
|
35
|
+
"container_name": "my-container",
|
|
36
|
+
"public_access": null,
|
|
37
|
+
"managed_identity_client_id": "your-client-id" // optional
|
|
38
|
+
}
|
|
39
|
+
"""
|
|
40
|
+
return StorageService.create_container(data=data)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@storage_router.post("/upload", response_model=Respons[StorageFileUploadControllerReadDto])
|
|
44
|
+
async def upload_file(
|
|
45
|
+
storage_account_url: str = Form(...),
|
|
46
|
+
container_name: str = Form(...),
|
|
47
|
+
blob_name: str = Form(...),
|
|
48
|
+
file: UploadFile = File(...),
|
|
49
|
+
directory_path: str = Form(None),
|
|
50
|
+
managed_identity_client_id: str = Form(None)
|
|
51
|
+
):
|
|
52
|
+
"""
|
|
53
|
+
Upload a file to Azure Storage.
|
|
54
|
+
|
|
55
|
+
Use form-data with the following fields:
|
|
56
|
+
- storage_account_url: Your Azure storage URL
|
|
57
|
+
- container_name: Container name
|
|
58
|
+
- blob_name: Name for the blob
|
|
59
|
+
- file: The file to upload
|
|
60
|
+
- directory_path: Optional directory path (e.g., "uploads/2024")
|
|
61
|
+
- managed_identity_client_id: Optional client ID for user-assigned managed identity
|
|
62
|
+
"""
|
|
63
|
+
content = await file.read()
|
|
64
|
+
|
|
65
|
+
upload_data = StorageFileUploadControllerWriteDto(
|
|
66
|
+
storage_account_url=storage_account_url,
|
|
67
|
+
container_name=container_name,
|
|
68
|
+
file_content=content,
|
|
69
|
+
blob_name=blob_name,
|
|
70
|
+
directory_path=directory_path,
|
|
71
|
+
content_type=file.content_type,
|
|
72
|
+
managed_identity_client_id=managed_identity_client_id
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
return StorageService.upload_file(data=upload_data)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@storage_router.put("/update", response_model=Respons[StorageFileUpdateControllerReadDto])
|
|
79
|
+
async def update_file(
|
|
80
|
+
storage_account_url: str = Form(...),
|
|
81
|
+
container_name: str = Form(...),
|
|
82
|
+
blob_name: str = Form(...),
|
|
83
|
+
file: UploadFile = File(...),
|
|
84
|
+
managed_identity_client_id: str = Form(None)
|
|
85
|
+
):
|
|
86
|
+
"""
|
|
87
|
+
Update an existing file in Azure Storage.
|
|
88
|
+
|
|
89
|
+
Use form-data with the following fields:
|
|
90
|
+
- storage_account_url: Your Azure storage URL
|
|
91
|
+
- container_name: Container name
|
|
92
|
+
- blob_name: Full blob name including path (e.g., "uploads/2024/file.pdf")
|
|
93
|
+
- file: The new file content
|
|
94
|
+
- managed_identity_client_id: Optional client ID for user-assigned managed identity
|
|
95
|
+
"""
|
|
96
|
+
content = await file.read()
|
|
97
|
+
|
|
98
|
+
update_data = StorageFileUpdateControllerWriteDto(
|
|
99
|
+
storage_account_url=storage_account_url,
|
|
100
|
+
container_name=container_name,
|
|
101
|
+
blob_name=blob_name,
|
|
102
|
+
file_content=content,
|
|
103
|
+
content_type=file.content_type,
|
|
104
|
+
managed_identity_client_id=managed_identity_client_id
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
return StorageService.update_file(data=update_data)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
@storage_router.delete("/delete", response_model=Respons[StorageFileDeleteControllerReadDto])
|
|
111
|
+
async def delete_file(data: StorageFileDeleteControllerWriteDto):
|
|
112
|
+
"""
|
|
113
|
+
Delete a file from Azure Storage.
|
|
114
|
+
|
|
115
|
+
Example request body:
|
|
116
|
+
{
|
|
117
|
+
"storage_account_url": "https://myaccount.blob.core.windows.net",
|
|
118
|
+
"container_name": "my-container",
|
|
119
|
+
"blob_name": "uploads/2024/file.pdf",
|
|
120
|
+
"managed_identity_client_id": "your-client-id" // optional
|
|
121
|
+
}
|
|
122
|
+
"""
|
|
123
|
+
return StorageService.delete_file(data=data)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
@storage_router.delete("/delete-multiple", response_model=Respons[StorageFileDeleteControllerReadDto])
|
|
127
|
+
async def delete_multiple_files(
|
|
128
|
+
storage_account_url: str,
|
|
129
|
+
container_name: str,
|
|
130
|
+
blob_names: List[str],
|
|
131
|
+
managed_identity_client_id: str = None
|
|
132
|
+
):
|
|
133
|
+
"""
|
|
134
|
+
Delete multiple files from Azure Storage.
|
|
135
|
+
|
|
136
|
+
Example request body:
|
|
137
|
+
{
|
|
138
|
+
"storage_account_url": "https://myaccount.blob.core.windows.net",
|
|
139
|
+
"container_name": "my-container",
|
|
140
|
+
"blob_names": ["file1.pdf", "file2.pdf", "folder/file3.jpg"],
|
|
141
|
+
"managed_identity_client_id": "your-client-id" // optional
|
|
142
|
+
}
|
|
143
|
+
"""
|
|
144
|
+
return StorageService.delete_multiple_files(
|
|
145
|
+
storage_account_url=storage_account_url,
|
|
146
|
+
container_name=container_name,
|
|
147
|
+
blob_names=blob_names,
|
|
148
|
+
managed_identity_client_id=managed_identity_client_id
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
@storage_router.post("/download")
|
|
153
|
+
async def download_file(data: StorageFileDownloadControllerWriteDto):
|
|
154
|
+
"""
|
|
155
|
+
Download a file from Azure Storage.
|
|
156
|
+
|
|
157
|
+
Returns the file as a streaming response.
|
|
158
|
+
|
|
159
|
+
Example request body:
|
|
160
|
+
{
|
|
161
|
+
"storage_account_url": "https://myaccount.blob.core.windows.net",
|
|
162
|
+
"container_name": "my-container",
|
|
163
|
+
"blob_name": "uploads/2024/file.pdf",
|
|
164
|
+
"managed_identity_client_id": "your-client-id" // optional
|
|
165
|
+
}
|
|
166
|
+
"""
|
|
167
|
+
result = StorageService.download_file(data=data)
|
|
168
|
+
|
|
169
|
+
if not result.success:
|
|
170
|
+
return result
|
|
171
|
+
|
|
172
|
+
file_data = result.data[0]
|
|
173
|
+
|
|
174
|
+
# Return as streaming response
|
|
175
|
+
return StreamingResponse(
|
|
176
|
+
BytesIO(file_data.content),
|
|
177
|
+
media_type=file_data.content_type or "application/octet-stream",
|
|
178
|
+
headers={
|
|
179
|
+
"Content-Disposition": f"attachment; filename={data.blob_name.split('/')[-1]}"
|
|
180
|
+
}
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
@storage_router.post("/get-url", response_model=Respons[StorageFileUrlControllerReadDto])
|
|
185
|
+
async def get_file_url(data: StorageFileUrlControllerWriteDto):
|
|
186
|
+
"""
|
|
187
|
+
Generate a presigned URL for a file.
|
|
188
|
+
|
|
189
|
+
Example request body:
|
|
190
|
+
{
|
|
191
|
+
"storage_account_url": "https://myaccount.blob.core.windows.net",
|
|
192
|
+
"container_name": "my-container",
|
|
193
|
+
"blob_name": "uploads/2024/file.pdf",
|
|
194
|
+
"expiry_hours": 2,
|
|
195
|
+
"managed_identity_client_id": "your-client-id" // optional
|
|
196
|
+
}
|
|
197
|
+
"""
|
|
198
|
+
return StorageService.get_file_url(data=data)
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
from pydantic import BaseModel
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
# Container Creation
|
|
6
|
+
|
|
7
|
+
class StorageContainerCreateControllerReadDto(BaseModel):
|
|
8
|
+
container_name: str
|
|
9
|
+
container_url: Optional[str] = None
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class StorageContainerCreateServiceReadDto(StorageContainerCreateControllerReadDto):
|
|
13
|
+
pass
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# File Upload
|
|
17
|
+
|
|
18
|
+
class StorageFileUploadControllerReadDto(BaseModel):
|
|
19
|
+
blob_name: str
|
|
20
|
+
blob_url: str
|
|
21
|
+
content_type: Optional[str] = None
|
|
22
|
+
size: Optional[int] = None
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class StorageFileUploadServiceReadDto(StorageFileUploadControllerReadDto):
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# File Update
|
|
30
|
+
|
|
31
|
+
class StorageFileUpdateControllerReadDto(BaseModel):
|
|
32
|
+
blob_name: str
|
|
33
|
+
blob_url: str
|
|
34
|
+
updated_at: Optional[str] = None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class StorageFileUpdateServiceReadDto(StorageFileUpdateControllerReadDto):
|
|
38
|
+
pass
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
# File Delete
|
|
42
|
+
|
|
43
|
+
class StorageFileDeleteControllerReadDto(BaseModel):
|
|
44
|
+
blob_name: str
|
|
45
|
+
deleted: bool
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class StorageFileDeleteServiceReadDto(StorageFileDeleteControllerReadDto):
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
# File Download
|
|
53
|
+
|
|
54
|
+
class StorageFileDownloadControllerReadDto(BaseModel):
|
|
55
|
+
blob_name: str
|
|
56
|
+
content: bytes
|
|
57
|
+
content_type: Optional[str] = None
|
|
58
|
+
size: Optional[int] = None
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class StorageFileDownloadServiceReadDto(StorageFileDownloadControllerReadDto):
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
# File URL
|
|
66
|
+
|
|
67
|
+
class StorageFileUrlControllerReadDto(BaseModel):
|
|
68
|
+
blob_name: str
|
|
69
|
+
presigned_url: str
|
|
70
|
+
expires_in_hours: int
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class StorageFileUrlServiceReadDto(StorageFileUrlControllerReadDto):
|
|
74
|
+
pass
|
|
@@ -0,0 +1,529 @@
|
|
|
1
|
+
from datetime import datetime, timedelta
|
|
2
|
+
from typing import List
|
|
3
|
+
from azure.storage.blob import BlobServiceClient, BlobSasPermissions, generate_blob_sas
|
|
4
|
+
from azure.identity import DefaultAzureCredential, ManagedIdentityCredential
|
|
5
|
+
from azure.core.exceptions import ResourceExistsError, ResourceNotFoundError
|
|
6
|
+
from ..entities.sh_response import Respons
|
|
7
|
+
from .storage_read_dto import (
|
|
8
|
+
StorageContainerCreateServiceReadDto,
|
|
9
|
+
StorageFileUploadServiceReadDto,
|
|
10
|
+
StorageFileUpdateServiceReadDto,
|
|
11
|
+
StorageFileDeleteServiceReadDto,
|
|
12
|
+
StorageFileDownloadServiceReadDto,
|
|
13
|
+
StorageFileUrlServiceReadDto
|
|
14
|
+
)
|
|
15
|
+
from .storage_write_dto import (
|
|
16
|
+
StorageContainerCreateServiceWriteDto,
|
|
17
|
+
StorageFileUploadServiceWriteDto,
|
|
18
|
+
StorageFileUpdateServiceWriteDto,
|
|
19
|
+
StorageFileDeleteServiceWriteDto,
|
|
20
|
+
StorageFileDownloadServiceWriteDto,
|
|
21
|
+
StorageFileUrlServiceWriteDto
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class StorageService:
|
|
26
|
+
"""
|
|
27
|
+
Azure Storage service for managing blob storage operations using Managed Identity authentication.
|
|
28
|
+
Supports both system-assigned and user-assigned managed identities.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
@staticmethod
|
|
32
|
+
def _get_credential(managed_identity_client_id: str = None):
|
|
33
|
+
"""
|
|
34
|
+
Get Azure credential for authentication.
|
|
35
|
+
- Tries Managed Identity (for Azure environments)
|
|
36
|
+
- Falls back to DefaultAzureCredential (for local dev)
|
|
37
|
+
"""
|
|
38
|
+
try:
|
|
39
|
+
if managed_identity_client_id:
|
|
40
|
+
return ManagedIdentityCredential(client_id=managed_identity_client_id)
|
|
41
|
+
else:
|
|
42
|
+
return DefaultAzureCredential(exclude_managed_identity_credential=True)
|
|
43
|
+
|
|
44
|
+
except Exception:
|
|
45
|
+
|
|
46
|
+
# Always fallback to DefaultAzureCredential
|
|
47
|
+
return DefaultAzureCredential(exclude_managed_identity_credential=True)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@staticmethod
|
|
51
|
+
def _get_blob_service_client(storage_account_url: str, managed_identity_client_id: str = None) -> BlobServiceClient:
|
|
52
|
+
"""
|
|
53
|
+
Create a BlobServiceClient using Managed Identity.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
storage_account_url: Azure Storage account URL (e.g., https://myaccount.blob.core.windows.net)
|
|
57
|
+
managed_identity_client_id: Optional client ID for user-assigned managed identity
|
|
58
|
+
|
|
59
|
+
Returns:
|
|
60
|
+
BlobServiceClient instance
|
|
61
|
+
"""
|
|
62
|
+
credential = StorageService._get_credential(managed_identity_client_id)
|
|
63
|
+
return BlobServiceClient(account_url=storage_account_url, credential=credential)
|
|
64
|
+
|
|
65
|
+
@staticmethod
|
|
66
|
+
def create_container(data: StorageContainerCreateServiceWriteDto) -> Respons[StorageContainerCreateServiceReadDto]:
|
|
67
|
+
"""
|
|
68
|
+
Create a new Azure Storage container.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
data: Container creation parameters including storage account URL and container name
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
Response object containing container details or error information
|
|
75
|
+
"""
|
|
76
|
+
try:
|
|
77
|
+
blob_service_client = StorageService._get_blob_service_client(
|
|
78
|
+
data.storage_account_url,
|
|
79
|
+
data.managed_identity_client_id
|
|
80
|
+
)
|
|
81
|
+
container_client = blob_service_client.create_container(
|
|
82
|
+
name=data.container_name,
|
|
83
|
+
public_access=data.public_access
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
container_url = container_client.url
|
|
87
|
+
|
|
88
|
+
result = StorageContainerCreateServiceReadDto(
|
|
89
|
+
container_name=data.container_name,
|
|
90
|
+
container_url=container_url
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
return Respons[StorageContainerCreateServiceReadDto](
|
|
94
|
+
detail=f"Container '{data.container_name}' created successfully",
|
|
95
|
+
error=None,
|
|
96
|
+
data=[result],
|
|
97
|
+
status_code=201,
|
|
98
|
+
success=True,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
except ResourceExistsError:
|
|
102
|
+
return Respons[StorageContainerCreateServiceReadDto](
|
|
103
|
+
detail=f"Container '{data.container_name}' already exists",
|
|
104
|
+
error="Resource already exists",
|
|
105
|
+
data=[],
|
|
106
|
+
status_code=409,
|
|
107
|
+
success=False,
|
|
108
|
+
)
|
|
109
|
+
except Exception as e:
|
|
110
|
+
return Respons[StorageContainerCreateServiceReadDto](
|
|
111
|
+
detail="Failed to create container",
|
|
112
|
+
error=str(e),
|
|
113
|
+
data=[],
|
|
114
|
+
status_code=500,
|
|
115
|
+
success=False,
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
@staticmethod
|
|
119
|
+
def upload_file(data: StorageFileUploadServiceWriteDto) -> Respons[StorageFileUploadServiceReadDto]:
|
|
120
|
+
"""
|
|
121
|
+
Upload a file to Azure Storage blob container.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
data: File upload parameters including storage account URL, container name,
|
|
125
|
+
file content, blob name, and optional directory path
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
Response object containing uploaded file details or error information
|
|
129
|
+
"""
|
|
130
|
+
try:
|
|
131
|
+
# Construct the full blob name with directory path if provided
|
|
132
|
+
blob_name = data.blob_name
|
|
133
|
+
if data.directory_path:
|
|
134
|
+
# Ensure directory path ends with / and doesn't start with /
|
|
135
|
+
dir_path = data.directory_path.strip('/')
|
|
136
|
+
blob_name = f"{dir_path}/{blob_name}"
|
|
137
|
+
|
|
138
|
+
blob_service_client = StorageService._get_blob_service_client(
|
|
139
|
+
data.storage_account_url,
|
|
140
|
+
data.managed_identity_client_id
|
|
141
|
+
)
|
|
142
|
+
blob_client = blob_service_client.get_blob_client(
|
|
143
|
+
container=data.container_name,
|
|
144
|
+
blob=blob_name
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
# Upload the file
|
|
148
|
+
content_settings = None
|
|
149
|
+
if data.content_type:
|
|
150
|
+
from azure.storage.blob import ContentSettings
|
|
151
|
+
content_settings = ContentSettings(content_type=data.content_type)
|
|
152
|
+
|
|
153
|
+
blob_client.upload_blob(
|
|
154
|
+
data.file_content,
|
|
155
|
+
overwrite=False,
|
|
156
|
+
content_settings=content_settings
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
# Get blob properties
|
|
160
|
+
properties = blob_client.get_blob_properties()
|
|
161
|
+
|
|
162
|
+
result = StorageFileUploadServiceReadDto(
|
|
163
|
+
blob_name=blob_name,
|
|
164
|
+
blob_url=blob_client.url,
|
|
165
|
+
content_type=properties.content_settings.content_type if properties.content_settings else None,
|
|
166
|
+
size=properties.size
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
return Respons[StorageFileUploadServiceReadDto](
|
|
170
|
+
detail=f"File '{blob_name}' uploaded successfully",
|
|
171
|
+
error=None,
|
|
172
|
+
data=[result],
|
|
173
|
+
status_code=201,
|
|
174
|
+
success=True,
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
except ResourceExistsError:
|
|
178
|
+
return Respons[StorageFileUploadServiceReadDto](
|
|
179
|
+
detail=f"File '{blob_name}' already exists",
|
|
180
|
+
error="Resource already exists. Use update_file to modify existing files.",
|
|
181
|
+
data=[],
|
|
182
|
+
status_code=409,
|
|
183
|
+
success=False,
|
|
184
|
+
)
|
|
185
|
+
except Exception as e:
|
|
186
|
+
return Respons[StorageFileUploadServiceReadDto](
|
|
187
|
+
detail="Failed to upload file",
|
|
188
|
+
error=str(e),
|
|
189
|
+
data=[],
|
|
190
|
+
status_code=500,
|
|
191
|
+
success=False,
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
@staticmethod
|
|
195
|
+
def update_file(data: StorageFileUpdateServiceWriteDto) -> Respons[StorageFileUpdateServiceReadDto]:
|
|
196
|
+
"""
|
|
197
|
+
Update an existing file in Azure Storage blob container.
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
data: File update parameters including storage account URL, container name,
|
|
201
|
+
blob name, and new file content
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
Response object containing updated file details or error information
|
|
205
|
+
"""
|
|
206
|
+
try:
|
|
207
|
+
blob_service_client = StorageService._get_blob_service_client(
|
|
208
|
+
data.storage_account_url,
|
|
209
|
+
data.managed_identity_client_id
|
|
210
|
+
)
|
|
211
|
+
blob_client = blob_service_client.get_blob_client(
|
|
212
|
+
container=data.container_name,
|
|
213
|
+
blob=data.blob_name
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
# Upload with overwrite=True to update
|
|
217
|
+
content_settings = None
|
|
218
|
+
if data.content_type:
|
|
219
|
+
from azure.storage.blob import ContentSettings
|
|
220
|
+
content_settings = ContentSettings(content_type=data.content_type)
|
|
221
|
+
|
|
222
|
+
blob_client.upload_blob(
|
|
223
|
+
data.file_content,
|
|
224
|
+
overwrite=True,
|
|
225
|
+
content_settings=content_settings
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
# Get updated properties
|
|
229
|
+
properties = blob_client.get_blob_properties()
|
|
230
|
+
|
|
231
|
+
result = StorageFileUpdateServiceReadDto(
|
|
232
|
+
blob_name=data.blob_name,
|
|
233
|
+
blob_url=blob_client.url,
|
|
234
|
+
updated_at=properties.last_modified.isoformat() if properties.last_modified else None
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
return Respons[StorageFileUpdateServiceReadDto](
|
|
238
|
+
detail=f"File '{data.blob_name}' updated successfully",
|
|
239
|
+
error=None,
|
|
240
|
+
data=[result],
|
|
241
|
+
status_code=200,
|
|
242
|
+
success=True,
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
except ResourceNotFoundError:
|
|
246
|
+
return Respons[StorageFileUpdateServiceReadDto](
|
|
247
|
+
detail=f"File '{data.blob_name}' not found",
|
|
248
|
+
error="Resource not found. Use upload_file to create new files.",
|
|
249
|
+
data=[],
|
|
250
|
+
status_code=404,
|
|
251
|
+
success=False,
|
|
252
|
+
)
|
|
253
|
+
except Exception as e:
|
|
254
|
+
return Respons[StorageFileUpdateServiceReadDto](
|
|
255
|
+
detail="Failed to update file",
|
|
256
|
+
error=str(e),
|
|
257
|
+
data=[],
|
|
258
|
+
status_code=500,
|
|
259
|
+
success=False,
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
@staticmethod
|
|
263
|
+
def delete_file(data: StorageFileDeleteServiceWriteDto) -> Respons[StorageFileDeleteServiceReadDto]:
|
|
264
|
+
"""
|
|
265
|
+
Delete a file from Azure Storage blob container.
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
data: File delete parameters including storage account URL, container name,
|
|
269
|
+
and blob name
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
Response object containing deletion status or error information
|
|
273
|
+
"""
|
|
274
|
+
try:
|
|
275
|
+
blob_service_client = StorageService._get_blob_service_client(
|
|
276
|
+
data.storage_account_url,
|
|
277
|
+
data.managed_identity_client_id
|
|
278
|
+
)
|
|
279
|
+
blob_client = blob_service_client.get_blob_client(
|
|
280
|
+
container=data.container_name,
|
|
281
|
+
blob=data.blob_name
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
# Delete the blob
|
|
285
|
+
blob_client.delete_blob()
|
|
286
|
+
|
|
287
|
+
result = StorageFileDeleteServiceReadDto(
|
|
288
|
+
blob_name=data.blob_name,
|
|
289
|
+
deleted=True
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
return Respons[StorageFileDeleteServiceReadDto](
|
|
293
|
+
detail=f"File '{data.blob_name}' deleted successfully",
|
|
294
|
+
error=None,
|
|
295
|
+
data=[result],
|
|
296
|
+
status_code=200,
|
|
297
|
+
success=True,
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
except ResourceNotFoundError:
|
|
301
|
+
return Respons[StorageFileDeleteServiceReadDto](
|
|
302
|
+
detail=f"File '{data.blob_name}' not found",
|
|
303
|
+
error="Resource not found",
|
|
304
|
+
data=[],
|
|
305
|
+
status_code=404,
|
|
306
|
+
success=False,
|
|
307
|
+
)
|
|
308
|
+
except Exception as e:
|
|
309
|
+
return Respons[StorageFileDeleteServiceReadDto](
|
|
310
|
+
detail="Failed to delete file",
|
|
311
|
+
error=str(e),
|
|
312
|
+
data=[],
|
|
313
|
+
status_code=500,
|
|
314
|
+
success=False,
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
@staticmethod
|
|
318
|
+
def delete_multiple_files(
|
|
319
|
+
storage_account_url: str,
|
|
320
|
+
container_name: str,
|
|
321
|
+
blob_names: List[str],
|
|
322
|
+
managed_identity_client_id: str = None
|
|
323
|
+
) -> Respons[StorageFileDeleteServiceReadDto]:
|
|
324
|
+
"""
|
|
325
|
+
Delete multiple files from Azure Storage blob container.
|
|
326
|
+
|
|
327
|
+
Args:
|
|
328
|
+
storage_account_url: Azure Storage account URL
|
|
329
|
+
container_name: Name of the container
|
|
330
|
+
blob_names: List of blob names to delete
|
|
331
|
+
managed_identity_client_id: Optional client ID for user-assigned managed identity
|
|
332
|
+
|
|
333
|
+
Returns:
|
|
334
|
+
Response object containing deletion status for all files
|
|
335
|
+
"""
|
|
336
|
+
try:
|
|
337
|
+
blob_service_client = StorageService._get_blob_service_client(
|
|
338
|
+
storage_account_url,
|
|
339
|
+
managed_identity_client_id
|
|
340
|
+
)
|
|
341
|
+
container_client = blob_service_client.get_container_client(container_name)
|
|
342
|
+
|
|
343
|
+
results = []
|
|
344
|
+
errors = []
|
|
345
|
+
|
|
346
|
+
for blob_name in blob_names:
|
|
347
|
+
try:
|
|
348
|
+
blob_client = container_client.get_blob_client(blob_name)
|
|
349
|
+
blob_client.delete_blob()
|
|
350
|
+
results.append(StorageFileDeleteServiceReadDto(
|
|
351
|
+
blob_name=blob_name,
|
|
352
|
+
deleted=True
|
|
353
|
+
))
|
|
354
|
+
except ResourceNotFoundError:
|
|
355
|
+
errors.append(f"File '{blob_name}' not found")
|
|
356
|
+
except Exception as e:
|
|
357
|
+
errors.append(f"Failed to delete '{blob_name}': {str(e)}")
|
|
358
|
+
|
|
359
|
+
if errors and not results:
|
|
360
|
+
return Respons[StorageFileDeleteServiceReadDto](
|
|
361
|
+
detail="Failed to delete any files",
|
|
362
|
+
error="; ".join(errors),
|
|
363
|
+
data=[],
|
|
364
|
+
status_code=500,
|
|
365
|
+
success=False,
|
|
366
|
+
)
|
|
367
|
+
elif errors:
|
|
368
|
+
return Respons[StorageFileDeleteServiceReadDto](
|
|
369
|
+
detail=f"Deleted {len(results)} file(s) with {len(errors)} error(s)",
|
|
370
|
+
error="; ".join(errors),
|
|
371
|
+
data=results,
|
|
372
|
+
status_code=207, # Multi-Status
|
|
373
|
+
success=True,
|
|
374
|
+
)
|
|
375
|
+
else:
|
|
376
|
+
return Respons[StorageFileDeleteServiceReadDto](
|
|
377
|
+
detail=f"Successfully deleted {len(results)} file(s)",
|
|
378
|
+
error=None,
|
|
379
|
+
data=results,
|
|
380
|
+
status_code=200,
|
|
381
|
+
success=True,
|
|
382
|
+
)
|
|
383
|
+
|
|
384
|
+
except Exception as e:
|
|
385
|
+
return Respons[StorageFileDeleteServiceReadDto](
|
|
386
|
+
detail="Failed to delete files",
|
|
387
|
+
error=str(e),
|
|
388
|
+
data=[],
|
|
389
|
+
status_code=500,
|
|
390
|
+
success=False,
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
@staticmethod
|
|
394
|
+
def download_file(data: StorageFileDownloadServiceWriteDto) -> Respons[StorageFileDownloadServiceReadDto]:
|
|
395
|
+
"""
|
|
396
|
+
Download a file from Azure Storage blob container.
|
|
397
|
+
|
|
398
|
+
Args:
|
|
399
|
+
data: File download parameters including storage account URL, container name,
|
|
400
|
+
and blob name
|
|
401
|
+
|
|
402
|
+
Returns:
|
|
403
|
+
Response object containing file content and metadata or error information
|
|
404
|
+
"""
|
|
405
|
+
try:
|
|
406
|
+
blob_service_client = StorageService._get_blob_service_client(
|
|
407
|
+
data.storage_account_url,
|
|
408
|
+
data.managed_identity_client_id
|
|
409
|
+
)
|
|
410
|
+
blob_client = blob_service_client.get_blob_client(
|
|
411
|
+
container=data.container_name,
|
|
412
|
+
blob=data.blob_name
|
|
413
|
+
)
|
|
414
|
+
|
|
415
|
+
# Download the blob
|
|
416
|
+
download_stream = blob_client.download_blob()
|
|
417
|
+
file_content = download_stream.readall()
|
|
418
|
+
|
|
419
|
+
# Get blob properties
|
|
420
|
+
properties = blob_client.get_blob_properties()
|
|
421
|
+
|
|
422
|
+
result = StorageFileDownloadServiceReadDto(
|
|
423
|
+
blob_name=data.blob_name,
|
|
424
|
+
content=file_content,
|
|
425
|
+
content_type=properties.content_settings.content_type if properties.content_settings else None,
|
|
426
|
+
size=properties.size
|
|
427
|
+
)
|
|
428
|
+
|
|
429
|
+
return Respons[StorageFileDownloadServiceReadDto](
|
|
430
|
+
detail=f"File '{data.blob_name}' downloaded successfully",
|
|
431
|
+
error=None,
|
|
432
|
+
data=[result],
|
|
433
|
+
status_code=200,
|
|
434
|
+
success=True,
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
except ResourceNotFoundError:
|
|
438
|
+
return Respons[StorageFileDownloadServiceReadDto](
|
|
439
|
+
detail=f"File '{data.blob_name}' not found",
|
|
440
|
+
error="Resource not found",
|
|
441
|
+
data=[],
|
|
442
|
+
status_code=404,
|
|
443
|
+
success=False,
|
|
444
|
+
)
|
|
445
|
+
except Exception as e:
|
|
446
|
+
return Respons[StorageFileDownloadServiceReadDto](
|
|
447
|
+
detail="Failed to download file",
|
|
448
|
+
error=str(e),
|
|
449
|
+
data=[],
|
|
450
|
+
status_code=500,
|
|
451
|
+
success=False,
|
|
452
|
+
)
|
|
453
|
+
|
|
454
|
+
@staticmethod
|
|
455
|
+
def get_file_url(data: StorageFileUrlServiceWriteDto) -> Respons[StorageFileUrlServiceReadDto]:
|
|
456
|
+
"""
|
|
457
|
+
Generate a presigned URL (SAS token) for a file in Azure Storage blob container.
|
|
458
|
+
Note: This requires the storage account to have a shared key available.
|
|
459
|
+
|
|
460
|
+
Args:
|
|
461
|
+
data: URL generation parameters including storage account URL, container name,
|
|
462
|
+
blob name, and expiry time in hours
|
|
463
|
+
|
|
464
|
+
Returns:
|
|
465
|
+
Response object containing presigned URL or error information
|
|
466
|
+
"""
|
|
467
|
+
try:
|
|
468
|
+
blob_service_client = StorageService._get_blob_service_client(
|
|
469
|
+
data.storage_account_url,
|
|
470
|
+
data.managed_identity_client_id
|
|
471
|
+
)
|
|
472
|
+
blob_client = blob_service_client.get_blob_client(
|
|
473
|
+
container=data.container_name,
|
|
474
|
+
blob=data.blob_name
|
|
475
|
+
)
|
|
476
|
+
|
|
477
|
+
# Check if blob exists
|
|
478
|
+
if not blob_client.exists():
|
|
479
|
+
return Respons[StorageFileUrlServiceReadDto](
|
|
480
|
+
detail=f"File '{data.blob_name}' not found",
|
|
481
|
+
error="Resource not found",
|
|
482
|
+
data=[],
|
|
483
|
+
status_code=404,
|
|
484
|
+
success=False,
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
# Generate user delegation key for SAS token (works with Managed Identity)
|
|
488
|
+
# This requires the managed identity to have "Storage Blob Delegator" role
|
|
489
|
+
delegation_key = blob_service_client.get_user_delegation_key(
|
|
490
|
+
key_start_time=datetime.utcnow(),
|
|
491
|
+
key_expiry_time=datetime.utcnow() + timedelta(hours=data.expiry_hours or 1)
|
|
492
|
+
)
|
|
493
|
+
|
|
494
|
+
# Generate SAS token using user delegation key
|
|
495
|
+
from azure.storage.blob import generate_blob_sas, BlobSasPermissions
|
|
496
|
+
sas_token = generate_blob_sas(
|
|
497
|
+
account_name=blob_service_client.account_name,
|
|
498
|
+
container_name=data.container_name,
|
|
499
|
+
blob_name=data.blob_name,
|
|
500
|
+
user_delegation_key=delegation_key,
|
|
501
|
+
permission=BlobSasPermissions(read=True),
|
|
502
|
+
expiry=datetime.utcnow() + timedelta(hours=data.expiry_hours or 1)
|
|
503
|
+
)
|
|
504
|
+
|
|
505
|
+
# Construct the full URL with SAS token
|
|
506
|
+
presigned_url = f"{blob_client.url}?{sas_token}"
|
|
507
|
+
|
|
508
|
+
result = StorageFileUrlServiceReadDto(
|
|
509
|
+
blob_name=data.blob_name,
|
|
510
|
+
presigned_url=presigned_url,
|
|
511
|
+
expires_in_hours=data.expiry_hours or 1
|
|
512
|
+
)
|
|
513
|
+
|
|
514
|
+
return Respons[StorageFileUrlServiceReadDto](
|
|
515
|
+
detail=f"Presigned URL generated for '{data.blob_name}'",
|
|
516
|
+
error=None,
|
|
517
|
+
data=[result],
|
|
518
|
+
status_code=200,
|
|
519
|
+
success=True,
|
|
520
|
+
)
|
|
521
|
+
|
|
522
|
+
except Exception as e:
|
|
523
|
+
return Respons[StorageFileUrlServiceReadDto](
|
|
524
|
+
detail="Failed to generate presigned URL",
|
|
525
|
+
error=str(e),
|
|
526
|
+
data=[],
|
|
527
|
+
status_code=500,
|
|
528
|
+
success=False,
|
|
529
|
+
)
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
from pydantic import BaseModel
|
|
2
|
+
from .storage_base import (
|
|
3
|
+
StorageConnectionBase,
|
|
4
|
+
StorageFileUploadBase,
|
|
5
|
+
StorageFileUpdateBase,
|
|
6
|
+
StorageFileDeleteBase,
|
|
7
|
+
StorageFileDownloadBase,
|
|
8
|
+
StorageFileUrlBase,
|
|
9
|
+
StorageContainerCreateBase
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# Container Creation
|
|
14
|
+
|
|
15
|
+
class StorageContainerCreateControllerWriteDto(StorageContainerCreateBase):
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class StorageContainerCreateServiceWriteDto(StorageContainerCreateControllerWriteDto):
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# File Upload
|
|
24
|
+
|
|
25
|
+
class StorageFileUploadControllerWriteDto(StorageFileUploadBase):
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class StorageFileUploadServiceWriteDto(StorageFileUploadControllerWriteDto):
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
# File Update
|
|
34
|
+
|
|
35
|
+
class StorageFileUpdateControllerWriteDto(StorageFileUpdateBase):
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class StorageFileUpdateServiceWriteDto(StorageFileUpdateControllerWriteDto):
|
|
40
|
+
pass
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# File Delete
|
|
44
|
+
|
|
45
|
+
class StorageFileDeleteControllerWriteDto(StorageFileDeleteBase):
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class StorageFileDeleteServiceWriteDto(StorageFileDeleteControllerWriteDto):
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
# File Download
|
|
54
|
+
|
|
55
|
+
class StorageFileDownloadControllerWriteDto(StorageFileDownloadBase):
|
|
56
|
+
pass
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class StorageFileDownloadServiceWriteDto(StorageFileDownloadControllerWriteDto):
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
# File URL
|
|
64
|
+
|
|
65
|
+
class StorageFileUrlControllerWriteDto(StorageFileUrlBase):
|
|
66
|
+
pass
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class StorageFileUrlServiceWriteDto(StorageFileUrlControllerWriteDto):
|
|
70
|
+
pass
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: trovesuite
|
|
3
|
-
Version: 1.0.
|
|
4
|
-
Summary: TroveSuite services package providing authentication, authorization, notifications, and other enterprise services for TroveSuite applications
|
|
3
|
+
Version: 1.0.7
|
|
4
|
+
Summary: TroveSuite services package providing authentication, authorization, notifications, Azure Storage, and other enterprise services for TroveSuite applications
|
|
5
5
|
Home-page: https://dev.azure.com/brightgclt/trovesuite/_git/packages
|
|
6
6
|
Author: Bright Debrah Owusu
|
|
7
7
|
Author-email: Bright Debrah Owusu <owusu.debrah@deladetech.com>
|
|
@@ -11,7 +11,7 @@ Project-URL: Homepage, https://dev.azure.com/brightgclt/trovesuite/_git/packages
|
|
|
11
11
|
Project-URL: Repository, https://dev.azure.com/brightgclt/trovesuite/_git/packages
|
|
12
12
|
Project-URL: Documentation, https://dev.azure.com/brightgclt/trovesuite/_git/packages
|
|
13
13
|
Project-URL: Bug Tracker, https://dev.azure.com/brightgclt/trovesuite/_workitems/create
|
|
14
|
-
Keywords: authentication,authorization,notifications,jwt,trovesuite,fastapi,security,tenant,permissions,enterprise,services
|
|
14
|
+
Keywords: authentication,authorization,notifications,jwt,trovesuite,fastapi,security,tenant,permissions,enterprise,services,azure,storage,blob,cloud-storage
|
|
15
15
|
Classifier: Development Status :: 5 - Production/Stable
|
|
16
16
|
Classifier: Intended Audience :: Developers
|
|
17
17
|
Classifier: License :: OSI Approved :: MIT License
|
|
@@ -36,6 +36,8 @@ Requires-Dist: passlib[bcrypt]>=1.7.4
|
|
|
36
36
|
Requires-Dist: passlib[argon2]<2.0.0,>=1.7.4
|
|
37
37
|
Requires-Dist: uvicorn<0.39.0,>=0.38.0
|
|
38
38
|
Requires-Dist: pyjwt<3.0.0,>=2.10.1
|
|
39
|
+
Requires-Dist: azure-storage-blob>=12.19.0
|
|
40
|
+
Requires-Dist: azure-identity>=1.15.0
|
|
39
41
|
Provides-Extra: dev
|
|
40
42
|
Requires-Dist: pytest>=8.4.2; extra == "dev"
|
|
41
43
|
Requires-Dist: pytest-asyncio>=0.21.1; extra == "dev"
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
trovesuite/__init__.py,sha256=
|
|
1
|
+
trovesuite/__init__.py,sha256=QIY7iN7TVyJKjCoB6CBA4pSqm_mL41frcFgFS4maO8k,555
|
|
2
2
|
trovesuite/auth/__init__.py,sha256=OjZllVvjul1glDazJ-d5TrNjgHFigFlQQi1G99DYshk,239
|
|
3
3
|
trovesuite/auth/auth_base.py,sha256=rZHQVLeJRBQ8GClgF5UwG-er4_HXVX5-nt8o6_Z29uY,75
|
|
4
|
-
trovesuite/auth/auth_controller.py,sha256=
|
|
4
|
+
trovesuite/auth/auth_controller.py,sha256=PAgaVlf5TYEfkSfK4vGGsvO84i8zEmeVVXyUF2YBppI,420
|
|
5
5
|
trovesuite/auth/auth_read_dto.py,sha256=pQT1ouRVZMAiJn4wAG7NQOKQKTquTMWUe-dYcpLTmEo,533
|
|
6
6
|
trovesuite/auth/auth_service.py,sha256=if2RFI6F1DpbNEuCTSpPbhHVBdYQEg4hVkoxTCnvC4k,14298
|
|
7
7
|
trovesuite/auth/auth_write_dto.py,sha256=rdwI7w6-9QZGv1H0PAGrjkLBCzaMHjgPIXeLb9RmNec,234
|
|
8
8
|
trovesuite/configs/__init__.py,sha256=h1mSZOaZ3kUy1ZMO_m9O9KklsxywM0RfMVZLh9h9WvQ,328
|
|
9
|
-
trovesuite/configs/database.py,sha256=
|
|
9
|
+
trovesuite/configs/database.py,sha256=gyQwn5phToZ95fvEvuGJ6F0IT-1C-TQYJfEw26XanXc,11732
|
|
10
10
|
trovesuite/configs/logging.py,sha256=mGjR2d4urVNry9l5_aXycMMtcY2RAFIpEL35hw33KZg,9308
|
|
11
11
|
trovesuite/configs/settings.py,sha256=yUbkiFi4QdO9JZG1RRFbP4tYurT47HmN-ohgYcs2SHM,2561
|
|
12
12
|
trovesuite/entities/__init__.py,sha256=Dbl_03Bueyh2vOP2hykd40MmNMrl5nNHSRGP-kqwwNo,160
|
|
@@ -14,14 +14,20 @@ trovesuite/entities/health.py,sha256=KaW7yxTQdymIPlnkJJkDqEebBXkD0a7A66i5GgNZLoE
|
|
|
14
14
|
trovesuite/entities/sh_response.py,sha256=1_sw3PpVaDxWsNiBU0W9YLHZgTFxEj4JJBLBfSY63Ho,1579
|
|
15
15
|
trovesuite/notification/__init__.py,sha256=mjglzmlk29SREP6LfvBYGmCSc-K1SKKAEx_OJdJ2Vrs,394
|
|
16
16
|
trovesuite/notification/notification_base.py,sha256=6Xo0Gnnpg3RgN1_SRkAcH-K4l7DwNDZvn1gRm3oMWyk,320
|
|
17
|
-
trovesuite/notification/notification_controller.py,sha256=
|
|
17
|
+
trovesuite/notification/notification_controller.py,sha256=VVy1xj4GL8_Wj7wMMTwozXXX8i_2WSJqC_NyNse35TI,847
|
|
18
18
|
trovesuite/notification/notification_read_dto.py,sha256=K0DFLyAArvtqOGS7q1VzmqO28StTMbcNCgpmhzhad8E,423
|
|
19
19
|
trovesuite/notification/notification_service.py,sha256=sf_lJWcrmRIUeEabdEYLaSdRz2H1D3RBEwKMOA1h2pY,2433
|
|
20
20
|
trovesuite/notification/notification_write_dto.py,sha256=PGpww3PomrmJgXTgpwGhXasv_fC8mgUQvTloaTqaSTA,452
|
|
21
|
+
trovesuite/storage/__init__.py,sha256=lO9E2QvzNFOfUocfITnIBuSR6-eg1zVYLrEVIEq3SXc,1334
|
|
22
|
+
trovesuite/storage/storage_base.py,sha256=nOdnjlwP4xiGdbpdeh5rLDAzkWHZd9M_7lznvNlqsV4,1899
|
|
23
|
+
trovesuite/storage/storage_controller.py,sha256=Yzki0L-jmSPbiw8spFm6Z84Bq-WZfih_0GsfbNRHssM,6569
|
|
24
|
+
trovesuite/storage/storage_read_dto.py,sha256=o7EVJdwrwVZAaeyGU9O01WMECGVaytkvLRwruA256hQ,1471
|
|
25
|
+
trovesuite/storage/storage_service.py,sha256=V7LIePIV6b_iuhm-9x8r4zwpZHgeRPL1YIe5IBnxhco,19768
|
|
26
|
+
trovesuite/storage/storage_write_dto.py,sha256=vl1iCZ93bpFmpvkCrn587QtMtOA_TPDseXSoTuj9RTQ,1355
|
|
21
27
|
trovesuite/utils/__init__.py,sha256=3UPKTz9cluTgAM-ldNsJxsnoPTZiqacXlAmzUEHy6q8,143
|
|
22
28
|
trovesuite/utils/helper.py,sha256=lvZ1mvaqY84dkIPB5Ov0uwYDOWBziAS8twobEJZh2Ik,1002
|
|
23
|
-
trovesuite-1.0.
|
|
24
|
-
trovesuite-1.0.
|
|
25
|
-
trovesuite-1.0.
|
|
26
|
-
trovesuite-1.0.
|
|
27
|
-
trovesuite-1.0.
|
|
29
|
+
trovesuite-1.0.7.dist-info/licenses/LICENSE,sha256=EJT35ct-Q794JYPdAQy3XNczQGKkU1HzToLeK1YVw2s,1070
|
|
30
|
+
trovesuite-1.0.7.dist-info/METADATA,sha256=z8DYMEuYE9WxlXpfvCp2BG5gRGtMOpS9bXGOsDjadEY,21736
|
|
31
|
+
trovesuite-1.0.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
32
|
+
trovesuite-1.0.7.dist-info/top_level.txt,sha256=GzKhG_-MTaxeHrIgkGkBH_nof2vroGFBrjeHKWUIwNc,11
|
|
33
|
+
trovesuite-1.0.7.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|