svc-infra 0.1.640__py3-none-any.whl → 0.1.664__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of svc-infra might be problematic. Click here for more details.
- svc_infra/api/fastapi/apf_payments/setup.py +0 -2
- svc_infra/api/fastapi/auth/add.py +0 -4
- svc_infra/api/fastapi/auth/routers/oauth_router.py +19 -4
- svc_infra/api/fastapi/cache/add.py +9 -5
- svc_infra/api/fastapi/db/nosql/mongo/add.py +33 -27
- svc_infra/api/fastapi/db/sql/add.py +8 -5
- svc_infra/api/fastapi/db/sql/crud_router.py +4 -4
- svc_infra/api/fastapi/docs/scoped.py +41 -6
- svc_infra/api/fastapi/setup.py +10 -12
- svc_infra/api/fastapi/versioned.py +101 -0
- svc_infra/db/sql/templates/models_schemas/auth/models.py.tmpl +7 -56
- svc_infra/db/sql/templates/setup/env_async.py.tmpl +25 -11
- svc_infra/db/sql/templates/setup/env_sync.py.tmpl +20 -5
- svc_infra/docs/acceptance-matrix.md +17 -0
- svc_infra/docs/adr/0012-generic-file-storage.md +498 -0
- svc_infra/docs/api.md +127 -0
- svc_infra/docs/storage.md +982 -0
- svc_infra/docs/versioned-integrations.md +146 -0
- svc_infra/security/models.py +27 -7
- svc_infra/security/oauth_models.py +59 -0
- svc_infra/storage/__init__.py +93 -0
- svc_infra/storage/add.py +250 -0
- svc_infra/storage/backends/__init__.py +11 -0
- svc_infra/storage/backends/local.py +331 -0
- svc_infra/storage/backends/memory.py +214 -0
- svc_infra/storage/backends/s3.py +329 -0
- svc_infra/storage/base.py +239 -0
- svc_infra/storage/easy.py +182 -0
- svc_infra/storage/settings.py +192 -0
- {svc_infra-0.1.640.dist-info → svc_infra-0.1.664.dist-info}/METADATA +8 -3
- {svc_infra-0.1.640.dist-info → svc_infra-0.1.664.dist-info}/RECORD +33 -19
- {svc_infra-0.1.640.dist-info → svc_infra-0.1.664.dist-info}/WHEEL +0 -0
- {svc_infra-0.1.640.dist-info → svc_infra-0.1.664.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,329 @@
|
|
|
1
|
+
"""
|
|
2
|
+
S3-compatible storage backend.
|
|
3
|
+
|
|
4
|
+
Works with AWS S3, DigitalOcean Spaces, Wasabi, Backblaze B2, Minio, and
|
|
5
|
+
any S3-compatible object storage service.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Optional
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
import aioboto3
|
|
12
|
+
from botocore.exceptions import ClientError, NoCredentialsError
|
|
13
|
+
except ImportError:
|
|
14
|
+
aioboto3 = None # type: ignore
|
|
15
|
+
ClientError = Exception # type: ignore
|
|
16
|
+
NoCredentialsError = Exception # type: ignore
|
|
17
|
+
|
|
18
|
+
from ..base import FileNotFoundError, InvalidKeyError, PermissionDeniedError, StorageError
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class S3Backend:
|
|
22
|
+
"""
|
|
23
|
+
S3-compatible storage backend.
|
|
24
|
+
|
|
25
|
+
Supports AWS S3, DigitalOcean Spaces, Wasabi, Backblaze B2, Minio,
|
|
26
|
+
and any S3-compatible object storage.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
bucket: S3 bucket name
|
|
30
|
+
region: AWS region (default: "us-east-1")
|
|
31
|
+
endpoint: Custom endpoint URL for S3-compatible services
|
|
32
|
+
access_key: AWS access key (uses AWS_ACCESS_KEY_ID env var if not provided)
|
|
33
|
+
secret_key: AWS secret key (uses AWS_SECRET_ACCESS_KEY env var if not provided)
|
|
34
|
+
|
|
35
|
+
Example:
|
|
36
|
+
>>> # AWS S3
|
|
37
|
+
>>> backend = S3Backend(
|
|
38
|
+
... bucket="my-uploads",
|
|
39
|
+
... region="us-west-2"
|
|
40
|
+
... )
|
|
41
|
+
>>>
|
|
42
|
+
>>> # DigitalOcean Spaces
|
|
43
|
+
>>> backend = S3Backend(
|
|
44
|
+
... bucket="my-uploads",
|
|
45
|
+
... region="nyc3",
|
|
46
|
+
... endpoint="https://nyc3.digitaloceanspaces.com",
|
|
47
|
+
... access_key="...",
|
|
48
|
+
... secret_key="..."
|
|
49
|
+
... )
|
|
50
|
+
>>>
|
|
51
|
+
>>> # Wasabi
|
|
52
|
+
>>> backend = S3Backend(
|
|
53
|
+
... bucket="my-uploads",
|
|
54
|
+
... region="us-east-1",
|
|
55
|
+
... endpoint="https://s3.wasabisys.com"
|
|
56
|
+
... )
|
|
57
|
+
|
|
58
|
+
Raises:
|
|
59
|
+
ImportError: If aioboto3 is not installed
|
|
60
|
+
"""
|
|
61
|
+
|
|
62
|
+
def __init__(
|
|
63
|
+
self,
|
|
64
|
+
bucket: str,
|
|
65
|
+
region: str = "us-east-1",
|
|
66
|
+
endpoint: Optional[str] = None,
|
|
67
|
+
access_key: Optional[str] = None,
|
|
68
|
+
secret_key: Optional[str] = None,
|
|
69
|
+
):
|
|
70
|
+
if aioboto3 is None:
|
|
71
|
+
raise ImportError(
|
|
72
|
+
"aioboto3 is required for S3Backend. " "Install it with: pip install aioboto3"
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
self.bucket = bucket
|
|
76
|
+
self.region = region
|
|
77
|
+
self.endpoint = endpoint
|
|
78
|
+
self.access_key = access_key
|
|
79
|
+
self.secret_key = secret_key
|
|
80
|
+
|
|
81
|
+
# Session configuration
|
|
82
|
+
self._session_config = {
|
|
83
|
+
"region_name": region,
|
|
84
|
+
}
|
|
85
|
+
if endpoint:
|
|
86
|
+
self._session_config["endpoint_url"] = endpoint
|
|
87
|
+
|
|
88
|
+
# Client configuration
|
|
89
|
+
self._client_config = {}
|
|
90
|
+
if access_key and secret_key:
|
|
91
|
+
self._client_config = {
|
|
92
|
+
"aws_access_key_id": access_key,
|
|
93
|
+
"aws_secret_access_key": secret_key,
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
def _validate_key(self, key: str) -> None:
|
|
97
|
+
"""Validate storage key format."""
|
|
98
|
+
if not key:
|
|
99
|
+
raise InvalidKeyError("Key cannot be empty")
|
|
100
|
+
|
|
101
|
+
if key.startswith("/"):
|
|
102
|
+
raise InvalidKeyError("Key cannot start with /")
|
|
103
|
+
|
|
104
|
+
if ".." in key:
|
|
105
|
+
raise InvalidKeyError("Key cannot contain .. (path traversal)")
|
|
106
|
+
|
|
107
|
+
if len(key) > 1024:
|
|
108
|
+
raise InvalidKeyError("Key cannot exceed 1024 characters")
|
|
109
|
+
|
|
110
|
+
async def put(
|
|
111
|
+
self,
|
|
112
|
+
key: str,
|
|
113
|
+
data: bytes,
|
|
114
|
+
content_type: str,
|
|
115
|
+
metadata: Optional[dict] = None,
|
|
116
|
+
) -> str:
|
|
117
|
+
"""Store file in S3."""
|
|
118
|
+
self._validate_key(key)
|
|
119
|
+
|
|
120
|
+
# Prepare S3 metadata (must be string key-value pairs)
|
|
121
|
+
s3_metadata = {}
|
|
122
|
+
if metadata:
|
|
123
|
+
for k, v in metadata.items():
|
|
124
|
+
s3_metadata[str(k)] = str(v)
|
|
125
|
+
|
|
126
|
+
try:
|
|
127
|
+
session = aioboto3.Session()
|
|
128
|
+
async with session.client("s3", **self._session_config, **self._client_config) as s3:
|
|
129
|
+
# Upload file
|
|
130
|
+
await s3.put_object(
|
|
131
|
+
Bucket=self.bucket,
|
|
132
|
+
Key=key,
|
|
133
|
+
Body=data,
|
|
134
|
+
ContentType=content_type,
|
|
135
|
+
Metadata=s3_metadata,
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
except NoCredentialsError as e:
|
|
139
|
+
raise PermissionDeniedError(f"S3 credentials not found: {e}")
|
|
140
|
+
except ClientError as e:
|
|
141
|
+
error_code = e.response.get("Error", {}).get("Code", "Unknown")
|
|
142
|
+
if error_code == "AccessDenied":
|
|
143
|
+
raise PermissionDeniedError(f"S3 access denied: {e}")
|
|
144
|
+
elif error_code == "NoSuchBucket":
|
|
145
|
+
raise StorageError(f"S3 bucket does not exist: {self.bucket}")
|
|
146
|
+
else:
|
|
147
|
+
raise StorageError(f"S3 upload failed: {e}")
|
|
148
|
+
except Exception as e:
|
|
149
|
+
raise StorageError(f"Failed to upload to S3: {e}")
|
|
150
|
+
|
|
151
|
+
# Return presigned URL (1 hour expiration)
|
|
152
|
+
return await self.get_url(key, expires_in=3600)
|
|
153
|
+
|
|
154
|
+
async def get(self, key: str) -> bytes:
|
|
155
|
+
"""Retrieve file from S3."""
|
|
156
|
+
self._validate_key(key)
|
|
157
|
+
|
|
158
|
+
try:
|
|
159
|
+
session = aioboto3.Session()
|
|
160
|
+
async with session.client("s3", **self._session_config, **self._client_config) as s3:
|
|
161
|
+
response = await s3.get_object(Bucket=self.bucket, Key=key)
|
|
162
|
+
async with response["Body"] as stream:
|
|
163
|
+
return await stream.read()
|
|
164
|
+
|
|
165
|
+
except ClientError as e:
|
|
166
|
+
error_code = e.response.get("Error", {}).get("Code", "Unknown")
|
|
167
|
+
if error_code == "NoSuchKey":
|
|
168
|
+
raise FileNotFoundError(f"File not found: {key}")
|
|
169
|
+
elif error_code == "AccessDenied":
|
|
170
|
+
raise PermissionDeniedError(f"S3 access denied: {e}")
|
|
171
|
+
else:
|
|
172
|
+
raise StorageError(f"S3 download failed: {e}")
|
|
173
|
+
except Exception as e:
|
|
174
|
+
raise StorageError(f"Failed to download from S3: {e}")
|
|
175
|
+
|
|
176
|
+
async def delete(self, key: str) -> bool:
|
|
177
|
+
"""Delete file from S3."""
|
|
178
|
+
self._validate_key(key)
|
|
179
|
+
|
|
180
|
+
# Check if file exists first
|
|
181
|
+
if not await self.exists(key):
|
|
182
|
+
return False
|
|
183
|
+
|
|
184
|
+
try:
|
|
185
|
+
session = aioboto3.Session()
|
|
186
|
+
async with session.client("s3", **self._session_config, **self._client_config) as s3:
|
|
187
|
+
await s3.delete_object(Bucket=self.bucket, Key=key)
|
|
188
|
+
return True
|
|
189
|
+
|
|
190
|
+
except ClientError as e:
|
|
191
|
+
error_code = e.response.get("Error", {}).get("Code", "Unknown")
|
|
192
|
+
if error_code == "AccessDenied":
|
|
193
|
+
raise PermissionDeniedError(f"S3 access denied: {e}")
|
|
194
|
+
else:
|
|
195
|
+
raise StorageError(f"S3 delete failed: {e}")
|
|
196
|
+
except Exception as e:
|
|
197
|
+
raise StorageError(f"Failed to delete from S3: {e}")
|
|
198
|
+
|
|
199
|
+
async def exists(self, key: str) -> bool:
|
|
200
|
+
"""Check if file exists in S3."""
|
|
201
|
+
self._validate_key(key)
|
|
202
|
+
|
|
203
|
+
try:
|
|
204
|
+
session = aioboto3.Session()
|
|
205
|
+
async with session.client("s3", **self._session_config, **self._client_config) as s3:
|
|
206
|
+
await s3.head_object(Bucket=self.bucket, Key=key)
|
|
207
|
+
return True
|
|
208
|
+
|
|
209
|
+
except ClientError as e:
|
|
210
|
+
error_code = e.response.get("Error", {}).get("Code", "Unknown")
|
|
211
|
+
if error_code in ("NoSuchKey", "404"):
|
|
212
|
+
return False
|
|
213
|
+
else:
|
|
214
|
+
raise StorageError(f"S3 head_object failed: {e}")
|
|
215
|
+
except Exception as e:
|
|
216
|
+
raise StorageError(f"Failed to check S3 file existence: {e}")
|
|
217
|
+
|
|
218
|
+
async def get_url(
|
|
219
|
+
self,
|
|
220
|
+
key: str,
|
|
221
|
+
expires_in: int = 3600,
|
|
222
|
+
download: bool = False,
|
|
223
|
+
) -> str:
|
|
224
|
+
"""
|
|
225
|
+
Generate presigned URL for file access.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
key: Storage key
|
|
229
|
+
expires_in: URL expiration in seconds (default: 1 hour)
|
|
230
|
+
download: If True, force download instead of inline display
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
Presigned S3 URL
|
|
234
|
+
|
|
235
|
+
Example:
|
|
236
|
+
>>> url = await backend.get_url("documents/invoice.pdf", expires_in=300)
|
|
237
|
+
"""
|
|
238
|
+
self._validate_key(key)
|
|
239
|
+
|
|
240
|
+
# Check if file exists
|
|
241
|
+
if not await self.exists(key):
|
|
242
|
+
raise FileNotFoundError(f"File not found: {key}")
|
|
243
|
+
|
|
244
|
+
try:
|
|
245
|
+
session = aioboto3.Session()
|
|
246
|
+
async with session.client("s3", **self._session_config, **self._client_config) as s3:
|
|
247
|
+
# Prepare parameters
|
|
248
|
+
params = {"Bucket": self.bucket, "Key": key}
|
|
249
|
+
|
|
250
|
+
# Add Content-Disposition for downloads
|
|
251
|
+
if download:
|
|
252
|
+
# Extract filename from key
|
|
253
|
+
filename = key.split("/")[-1]
|
|
254
|
+
params["ResponseContentDisposition"] = f'attachment; filename="{filename}"'
|
|
255
|
+
|
|
256
|
+
# Generate presigned URL
|
|
257
|
+
url = await s3.generate_presigned_url(
|
|
258
|
+
"get_object",
|
|
259
|
+
Params=params,
|
|
260
|
+
ExpiresIn=expires_in,
|
|
261
|
+
)
|
|
262
|
+
return url
|
|
263
|
+
|
|
264
|
+
except ClientError as e:
|
|
265
|
+
raise StorageError(f"Failed to generate presigned URL: {e}")
|
|
266
|
+
except Exception as e:
|
|
267
|
+
raise StorageError(f"Failed to generate presigned URL: {e}")
|
|
268
|
+
|
|
269
|
+
async def list_keys(
|
|
270
|
+
self,
|
|
271
|
+
prefix: str = "",
|
|
272
|
+
limit: int = 100,
|
|
273
|
+
) -> list[str]:
|
|
274
|
+
"""List stored keys with optional prefix filter."""
|
|
275
|
+
try:
|
|
276
|
+
session = aioboto3.Session()
|
|
277
|
+
async with session.client("s3", **self._session_config, **self._client_config) as s3:
|
|
278
|
+
params = {
|
|
279
|
+
"Bucket": self.bucket,
|
|
280
|
+
"MaxKeys": limit,
|
|
281
|
+
}
|
|
282
|
+
if prefix:
|
|
283
|
+
params["Prefix"] = prefix
|
|
284
|
+
|
|
285
|
+
response = await s3.list_objects_v2(**params)
|
|
286
|
+
|
|
287
|
+
# Extract keys from response
|
|
288
|
+
contents = response.get("Contents", [])
|
|
289
|
+
keys = [obj["Key"] for obj in contents]
|
|
290
|
+
return keys
|
|
291
|
+
|
|
292
|
+
except ClientError as e:
|
|
293
|
+
raise StorageError(f"S3 list failed: {e}")
|
|
294
|
+
except Exception as e:
|
|
295
|
+
raise StorageError(f"Failed to list S3 keys: {e}")
|
|
296
|
+
|
|
297
|
+
async def get_metadata(self, key: str) -> dict:
|
|
298
|
+
"""Get file metadata from S3."""
|
|
299
|
+
self._validate_key(key)
|
|
300
|
+
|
|
301
|
+
try:
|
|
302
|
+
session = aioboto3.Session()
|
|
303
|
+
async with session.client("s3", **self._session_config, **self._client_config) as s3:
|
|
304
|
+
response = await s3.head_object(Bucket=self.bucket, Key=key)
|
|
305
|
+
|
|
306
|
+
# Extract metadata
|
|
307
|
+
metadata = {
|
|
308
|
+
"size": response["ContentLength"],
|
|
309
|
+
"content_type": response.get("ContentType", "application/octet-stream"),
|
|
310
|
+
"created_at": response["LastModified"].isoformat(),
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
# Add custom metadata
|
|
314
|
+
if "Metadata" in response:
|
|
315
|
+
metadata.update(response["Metadata"])
|
|
316
|
+
|
|
317
|
+
return metadata
|
|
318
|
+
|
|
319
|
+
except ClientError as e:
|
|
320
|
+
error_code = e.response.get("Error", {}).get("Code", "Unknown")
|
|
321
|
+
if error_code == "NoSuchKey":
|
|
322
|
+
raise FileNotFoundError(f"File not found: {key}")
|
|
323
|
+
else:
|
|
324
|
+
raise StorageError(f"S3 head_object failed: {e}")
|
|
325
|
+
except Exception as e:
|
|
326
|
+
raise StorageError(f"Failed to get S3 metadata: {e}")
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
__all__ = ["S3Backend"]
|
|
@@ -0,0 +1,239 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Base storage abstractions and exceptions.
|
|
3
|
+
|
|
4
|
+
Defines the StorageBackend protocol that all storage implementations must follow.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import Optional, Protocol
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class StorageError(Exception):
|
|
11
|
+
"""Base exception for all storage operations."""
|
|
12
|
+
|
|
13
|
+
pass
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class FileNotFoundError(StorageError):
|
|
17
|
+
"""Raised when a requested file does not exist."""
|
|
18
|
+
|
|
19
|
+
pass
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class PermissionDeniedError(StorageError):
|
|
23
|
+
"""Raised when lacking permissions for an operation."""
|
|
24
|
+
|
|
25
|
+
pass
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class QuotaExceededError(StorageError):
|
|
29
|
+
"""Raised when storage quota is exceeded."""
|
|
30
|
+
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class InvalidKeyError(StorageError):
|
|
35
|
+
"""Raised when a key format is invalid."""
|
|
36
|
+
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class StorageBackend(Protocol):
|
|
41
|
+
"""
|
|
42
|
+
Abstract storage backend interface.
|
|
43
|
+
|
|
44
|
+
All storage backends must implement this protocol to be compatible
|
|
45
|
+
with the storage system.
|
|
46
|
+
|
|
47
|
+
Example:
|
|
48
|
+
>>> from svc_infra.storage import StorageBackend
|
|
49
|
+
>>>
|
|
50
|
+
>>> class MyBackend:
|
|
51
|
+
... async def put(self, key, data, content_type, metadata=None):
|
|
52
|
+
... # Custom implementation
|
|
53
|
+
... return "https://example.com/files/key"
|
|
54
|
+
>>>
|
|
55
|
+
>>> # MyBackend is now a valid StorageBackend
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
async def put(
|
|
59
|
+
self,
|
|
60
|
+
key: str,
|
|
61
|
+
data: bytes,
|
|
62
|
+
content_type: str,
|
|
63
|
+
metadata: Optional[dict] = None,
|
|
64
|
+
) -> str:
|
|
65
|
+
"""
|
|
66
|
+
Store file content and return its URL.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
key: Storage key (path) for the file
|
|
70
|
+
data: File content as bytes
|
|
71
|
+
content_type: MIME type (e.g., "image/jpeg", "application/pdf")
|
|
72
|
+
metadata: Optional metadata dict (user_id, tenant_id, etc.)
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
Public or signed URL to access the file
|
|
76
|
+
|
|
77
|
+
Raises:
|
|
78
|
+
InvalidKeyError: If key format is invalid
|
|
79
|
+
PermissionDeniedError: If lacking write permissions
|
|
80
|
+
QuotaExceededError: If storage quota exceeded
|
|
81
|
+
StorageError: For other storage errors
|
|
82
|
+
|
|
83
|
+
Example:
|
|
84
|
+
>>> url = await storage.put(
|
|
85
|
+
... key="avatars/user_123/profile.jpg",
|
|
86
|
+
... data=image_bytes,
|
|
87
|
+
... content_type="image/jpeg",
|
|
88
|
+
... metadata={"user_id": "user_123"}
|
|
89
|
+
... )
|
|
90
|
+
"""
|
|
91
|
+
...
|
|
92
|
+
|
|
93
|
+
async def get(self, key: str) -> bytes:
|
|
94
|
+
"""
|
|
95
|
+
Retrieve file content.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
key: Storage key (path) for the file
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
File content as bytes
|
|
102
|
+
|
|
103
|
+
Raises:
|
|
104
|
+
FileNotFoundError: If file does not exist
|
|
105
|
+
PermissionDeniedError: If lacking read permissions
|
|
106
|
+
StorageError: For other storage errors
|
|
107
|
+
|
|
108
|
+
Example:
|
|
109
|
+
>>> data = await storage.get("avatars/user_123/profile.jpg")
|
|
110
|
+
"""
|
|
111
|
+
...
|
|
112
|
+
|
|
113
|
+
async def delete(self, key: str) -> bool:
|
|
114
|
+
"""
|
|
115
|
+
Delete a file.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
key: Storage key (path) for the file
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
True if file was deleted, False if file did not exist
|
|
122
|
+
|
|
123
|
+
Raises:
|
|
124
|
+
PermissionDeniedError: If lacking delete permissions
|
|
125
|
+
StorageError: For other storage errors
|
|
126
|
+
|
|
127
|
+
Example:
|
|
128
|
+
>>> deleted = await storage.delete("avatars/user_123/profile.jpg")
|
|
129
|
+
"""
|
|
130
|
+
...
|
|
131
|
+
|
|
132
|
+
async def exists(self, key: str) -> bool:
|
|
133
|
+
"""
|
|
134
|
+
Check if a file exists.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
key: Storage key (path) for the file
|
|
138
|
+
|
|
139
|
+
Returns:
|
|
140
|
+
True if file exists, False otherwise
|
|
141
|
+
|
|
142
|
+
Example:
|
|
143
|
+
>>> if await storage.exists("avatars/user_123/profile.jpg"):
|
|
144
|
+
... print("File exists")
|
|
145
|
+
"""
|
|
146
|
+
...
|
|
147
|
+
|
|
148
|
+
async def get_url(
|
|
149
|
+
self,
|
|
150
|
+
key: str,
|
|
151
|
+
expires_in: int = 3600,
|
|
152
|
+
download: bool = False,
|
|
153
|
+
) -> str:
|
|
154
|
+
"""
|
|
155
|
+
Generate a signed or public URL for file access.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
key: Storage key (path) for the file
|
|
159
|
+
expires_in: URL expiration time in seconds (default: 1 hour)
|
|
160
|
+
download: If True, force download instead of inline display
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
Signed or public URL
|
|
164
|
+
|
|
165
|
+
Raises:
|
|
166
|
+
FileNotFoundError: If file does not exist
|
|
167
|
+
StorageError: For other storage errors
|
|
168
|
+
|
|
169
|
+
Example:
|
|
170
|
+
>>> # Get 1-hour signed URL for viewing
|
|
171
|
+
>>> url = await storage.get_url("documents/invoice.pdf")
|
|
172
|
+
>>>
|
|
173
|
+
>>> # Get 5-minute download URL
|
|
174
|
+
>>> url = await storage.get_url(
|
|
175
|
+
... "documents/invoice.pdf",
|
|
176
|
+
... expires_in=300,
|
|
177
|
+
... download=True
|
|
178
|
+
... )
|
|
179
|
+
"""
|
|
180
|
+
...
|
|
181
|
+
|
|
182
|
+
async def list_keys(
|
|
183
|
+
self,
|
|
184
|
+
prefix: str = "",
|
|
185
|
+
limit: int = 100,
|
|
186
|
+
) -> list[str]:
|
|
187
|
+
"""
|
|
188
|
+
List stored file keys with optional prefix filter.
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
prefix: Key prefix to filter by (e.g., "avatars/")
|
|
192
|
+
limit: Maximum number of keys to return (default: 100)
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
List of matching keys
|
|
196
|
+
|
|
197
|
+
Example:
|
|
198
|
+
>>> # List all avatars for a user
|
|
199
|
+
>>> keys = await storage.list_keys(prefix="avatars/user_123/")
|
|
200
|
+
>>>
|
|
201
|
+
>>> # List all files
|
|
202
|
+
>>> keys = await storage.list_keys()
|
|
203
|
+
"""
|
|
204
|
+
...
|
|
205
|
+
|
|
206
|
+
async def get_metadata(self, key: str) -> dict:
|
|
207
|
+
"""
|
|
208
|
+
Get file metadata.
|
|
209
|
+
|
|
210
|
+
Args:
|
|
211
|
+
key: Storage key (path) for the file
|
|
212
|
+
|
|
213
|
+
Returns:
|
|
214
|
+
Metadata dict containing:
|
|
215
|
+
- size: File size in bytes
|
|
216
|
+
- content_type: MIME type
|
|
217
|
+
- created_at: Creation timestamp (ISO 8601)
|
|
218
|
+
- Custom metadata from put() call
|
|
219
|
+
|
|
220
|
+
Raises:
|
|
221
|
+
FileNotFoundError: If file does not exist
|
|
222
|
+
StorageError: For other storage errors
|
|
223
|
+
|
|
224
|
+
Example:
|
|
225
|
+
>>> meta = await storage.get_metadata("avatars/user_123/profile.jpg")
|
|
226
|
+
>>> print(f"Size: {meta['size']} bytes")
|
|
227
|
+
>>> print(f"Type: {meta['content_type']}")
|
|
228
|
+
"""
|
|
229
|
+
...
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
__all__ = [
|
|
233
|
+
"StorageBackend",
|
|
234
|
+
"StorageError",
|
|
235
|
+
"FileNotFoundError",
|
|
236
|
+
"PermissionDeniedError",
|
|
237
|
+
"QuotaExceededError",
|
|
238
|
+
"InvalidKeyError",
|
|
239
|
+
]
|