fastapi-async-storages 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,86 @@
1
+ Metadata-Version: 2.3
2
+ Name: fastapi-async-storages
3
+ Version: 0.1.0
4
+ Summary: A powerful, extensible, and async-ready cloud object storage backend for FastAPI.
5
+ Author: stabldev
6
+ Author-email: stabldev <thestabldev@gmail.com>
7
+ Requires-Dist: aioboto3>=15.4.0 ; extra == 's3'
8
+ Requires-Python: >=3.12
9
+ Provides-Extra: s3
10
+ Description-Content-Type: text/markdown
11
+
12
+ # fastapi-async-storages
13
+
14
+ A powerful, extensible, and async-ready cloud object storage backend for FastAPI.
15
+
16
+ > Drop-in, plug-and-play cloud storage for your FastAPI apps; with full async support.\
17
+ > Inspired by [fastapi-storages](https://github.com/aminalaee/fastapi-storages), built on modern async patterns using [aioboto3](https://github.com/terricain/aioboto3).
18
+
19
+ ## Features
20
+
21
+ * Fully asynchronous storage interface designed for FastAPI applications
22
+ * Async S3 backend powered by [aioboto3](https://github.com/terricain/aioboto3)
23
+ * [SQLAlchemy](https://sqlalchemy.org/) and [SQLModel](https://sqlmodel.tiangolo.com/) integration
24
+ * Typed and extensible design
25
+ * Supports FastAPI dependency injection
26
+
27
+ ## Installation
28
+
29
+ ```bash
30
+ uv add fastapi-async-storages
31
+ # for s3 support:
32
+ uv add "fastapi-async-storages[s3]"
33
+ ```
34
+
35
+ ## Documentation
36
+
37
+ Full documentation is available on:\
38
+ https://fastapi-async-storages.readthedocs.io
39
+
40
+ ## Example: FastAPI
41
+
42
+ ```py
43
+ from fastapi import FastAPI, UploadFile
44
+ from sqlalchemy import Column, Integer
45
+ from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
46
+ from sqlalchemy.orm import sessionmaker, declarative_base
47
+ from async_storages import S3Storage
48
+ from async_storages.integrations.sqlalchemy import FileType
49
+
50
+ Base = declarative_base()
51
+
52
+ app = FastAPI()
53
+ storage = S3Storage(...)
54
+ engine = create_async_engine("sqlite+aiosqlite:///test.db", echo=True)
55
+
56
+ # create AsyncSession factory
57
+ AsyncSessionLocal = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
58
+
59
+ class Example(Base):
60
+ __tablename__ = "example"
61
+
62
+ id = Column(Integer, primary_key=True)
63
+ file = Column(FileType(storage=storage))
64
+
65
+ # create tables inside an async context
66
+ @app.on_event("startup")
67
+ async def startup():
68
+ async with engine.begin() as conn:
69
+ await conn.run_sync(Base.metadata.create_all)
70
+
71
+ @app.post("/upload/")
72
+ async def create_upload_file(file: UploadFile):
73
+ file_name = f"uploads/{file.filename}"
74
+ # upload before commit due to the sqlalchemy binding being sync
75
+ await storage.upload(file.file, file_name)
76
+
77
+ example = Example(file=file)
78
+ async with AsyncSessionLocal() as session:
79
+ session.add(example)
80
+ await session.commit()
81
+ return {"filename": example.file.name}
82
+ ```
83
+
84
+ ## License
85
+
86
+ [MIT](LICENSE) © 2025 ^\_^ [`@stabldev`](https://github.com/stabldev)
@@ -0,0 +1,75 @@
1
+ # fastapi-async-storages
2
+
3
+ A powerful, extensible, and async-ready cloud object storage backend for FastAPI.
4
+
5
+ > Drop-in, plug-and-play cloud storage for your FastAPI apps; with full async support.\
6
+ > Inspired by [fastapi-storages](https://github.com/aminalaee/fastapi-storages), built on modern async patterns using [aioboto3](https://github.com/terricain/aioboto3).
7
+
8
+ ## Features
9
+
10
+ * Fully asynchronous storage interface designed for FastAPI applications
11
+ * Async S3 backend powered by [aioboto3](https://github.com/terricain/aioboto3)
12
+ * [SQLAlchemy](https://sqlalchemy.org/) and [SQLModel](https://sqlmodel.tiangolo.com/) integration
13
+ * Typed and extensible design
14
+ * Supports FastAPI dependency injection
15
+
16
+ ## Installation
17
+
18
+ ```bash
19
+ uv add fastapi-async-storages
20
+ # for s3 support:
21
+ uv add "fastapi-async-storages[s3]"
22
+ ```
23
+
24
+ ## Documentation
25
+
26
+ Full documentation is available on:\
27
+ https://fastapi-async-storages.readthedocs.io
28
+
29
+ ## Example: FastAPI
30
+
31
+ ```py
32
+ from fastapi import FastAPI, UploadFile
33
+ from sqlalchemy import Column, Integer
34
+ from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
35
+ from sqlalchemy.orm import sessionmaker, declarative_base
36
+ from async_storages import S3Storage
37
+ from async_storages.integrations.sqlalchemy import FileType
38
+
39
+ Base = declarative_base()
40
+
41
+ app = FastAPI()
42
+ storage = S3Storage(...)
43
+ engine = create_async_engine("sqlite+aiosqlite:///test.db", echo=True)
44
+
45
+ # create AsyncSession factory
46
+ AsyncSessionLocal = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
47
+
48
+ class Example(Base):
49
+ __tablename__ = "example"
50
+
51
+ id = Column(Integer, primary_key=True)
52
+ file = Column(FileType(storage=storage))
53
+
54
+ # create tables inside an async context
55
+ @app.on_event("startup")
56
+ async def startup():
57
+ async with engine.begin() as conn:
58
+ await conn.run_sync(Base.metadata.create_all)
59
+
60
+ @app.post("/upload/")
61
+ async def create_upload_file(file: UploadFile):
62
+ file_name = f"uploads/{file.filename}"
63
+ # upload before commit due to the sqlalchemy binding being sync
64
+ await storage.upload(file.file, file_name)
65
+
66
+ example = Example(file=file)
67
+ async with AsyncSessionLocal() as session:
68
+ session.add(example)
69
+ await session.commit()
70
+ return {"filename": example.file.name}
71
+ ```
72
+
73
+ ## License
74
+
75
+ [MIT](LICENSE) © 2025 ^\_^ [`@stabldev`](https://github.com/stabldev)
@@ -0,0 +1,46 @@
1
+ [project]
2
+ name = "fastapi-async-storages"
3
+ version = "0.1.0"
4
+ description = "A powerful, extensible, and async-ready cloud object storage backend for FastAPI."
5
+ readme = "README.md"
6
+ authors = [
7
+ { name = "stabldev", email = "thestabldev@gmail.com" }
8
+ ]
9
+ requires-python = ">=3.12"
10
+ dependencies = []
11
+
12
+ [project.optional-dependencies]
13
+ s3 = ["aioboto3>=15.4.0"]
14
+
15
+ [dependency-groups]
16
+ dev = [
17
+ "aiosqlite>=0.21.0",
18
+ "pillow>=12.0.0",
19
+ "pytest>=8.4.2",
20
+ "pytest-aioboto3>=0.6.0",
21
+ "pytest-asyncio>=1.2.0",
22
+ "sqlalchemy>=2.0.44",
23
+ ]
24
+ docs = [
25
+ "furo>=2025.9.25",
26
+ "sphinx>=8.2.3",
27
+ ]
28
+
29
+ [tool.uv.build-backend]
30
+ module-root = "src"
31
+ module-name = "async_storages"
32
+
33
+ [tool.pyright]
34
+ reportMissingTypeStubs = "none"
35
+ reportUnknownMemberType = "none"
36
+ reportUnusedCallResult = "none"
37
+ reportUnknownVariableType = "none"
38
+ reportAny = "none"
39
+ reportExplicitAny = "none"
40
+
41
+ [tool.pytest.ini_options]
42
+ asyncio_mode = "auto"
43
+
44
+ [build-system]
45
+ requires = ["uv_build>=0.8.20,<0.10.0"]
46
+ build-backend = "uv_build"
@@ -0,0 +1,5 @@
1
+ from .base import BaseStorage, StorageFile, StorageImage
2
+ from .s3 import S3Storage
3
+
4
+ __version__ = "0.1.0"
5
+ __all__ = ["BaseStorage", "StorageFile", "StorageImage", "S3Storage"]
@@ -0,0 +1,202 @@
1
+ # pyright: reportUnusedParameter=none
2
+ from abc import ABC, abstractmethod
3
+ import asyncio
4
+ from io import BytesIO
5
+ from PIL import Image
6
+ from typing import BinaryIO
7
+
8
+
9
+ class BaseStorage(ABC):
10
+ """
11
+ Abstract base class defining the interface for asynchronous file storage backends.
12
+
13
+ This class provides an asynchronous and pluggable contract for handling file
14
+ operations such as uploading, retrieving, and deleting files across different
15
+ storage systems.
16
+ """
17
+
18
+ @abstractmethod
19
+ def get_name(self, name: str) -> str:
20
+ """
21
+ Normalize or sanitize a given file name or path.
22
+
23
+ :param name: Original file name or path.
24
+ :type name: str
25
+ :return: A sanitized and valid file name or path for storage.
26
+ :rtype: str
27
+ """
28
+ pass
29
+
30
+ @abstractmethod
31
+ async def get_size(self, name: str) -> int:
32
+ """
33
+ Retrieve the size of a stored file in bytes.
34
+
35
+ :param name: Original file name or path.
36
+ :type name: str
37
+ :return: File size in bytes.
38
+ :rtype: int
39
+ """
40
+ pass
41
+
42
+ @abstractmethod
43
+ async def get_path(self, name: str) -> str:
44
+ """
45
+ Generate a URL or path to access the stored file.
46
+
47
+ :param name: Original file name or path.
48
+ :type name: str
49
+ :return: A URL or accessible path to the file.
50
+ :rtype: str
51
+ """
52
+ pass
53
+
54
+ @abstractmethod
55
+ async def open(self, name: str) -> BytesIO:
56
+ """
57
+ Open and return a stored file as an in-memory binary stream.
58
+
59
+ :param name: Original file name or path.
60
+ :type name: str
61
+ :return: A ``BytesIO`` object containing the file's binary data.
62
+ :rtype: BytesIO
63
+ """
64
+ pass
65
+
66
+ @abstractmethod
67
+ async def upload(self, file: BinaryIO, name: str) -> str:
68
+ """
69
+ Upload a binary file to the storage backend.
70
+
71
+ :param file: A binary file-like object to upload.
72
+ :type file: BinaryIO
73
+ :param name: Original file name or path.
74
+ :type name: str
75
+ :return: The final stored file name or path.
76
+ :rtype: str
77
+ """
78
+ pass
79
+
80
+ @abstractmethod
81
+ async def delete(self, name: str) -> None:
82
+ """
83
+ Delete a stored file from the backend.
84
+
85
+ :param name: Original file name or path.
86
+ :return: None
87
+ :rtype: None
88
+ """
89
+ pass
90
+
91
+
92
+ class StorageFile:
93
+ """
94
+ File object managed by a storage backend.
95
+
96
+ :param name: The name or identifier of the stored file.
97
+ :type name: str
98
+ :param storage: The storage backend handling file operations.
99
+ :type storage: BaseStorage
100
+ """
101
+
102
+ def __init__(self, name: str, storage: BaseStorage) -> None:
103
+ self._name: str = name
104
+ self._storage: BaseStorage = storage
105
+
106
+ @property
107
+ def name(self) -> str:
108
+ """
109
+ Get the name of the file.
110
+
111
+ :return: The name of the file in storage.
112
+ :rtype: str
113
+ """
114
+ return self._name
115
+
116
+ async def get_size(self) -> int:
117
+ """
118
+ Get the size of the file in bytes.
119
+
120
+ :return: The file size in bytes.
121
+ :rtype: int
122
+ """
123
+ return await self._storage.get_size(self._name)
124
+
125
+ async def get_path(self) -> str:
126
+ """
127
+ Get a URL or path to access the file.
128
+
129
+ :return: A URL or file path string.
130
+ :rtype: str
131
+ """
132
+ return await self._storage.get_path(self._name)
133
+
134
+ async def upload(self, file: BinaryIO) -> str:
135
+ """
136
+ Upload a file to the storage backend.
137
+
138
+ :param file: A binary file-like object to upload.
139
+ :type file: BinaryIO
140
+ :return: The name or path of the uploaded file.
141
+ :rtype: str
142
+ """
143
+ return await self._storage.upload(file=file, name=self._name)
144
+
145
+ async def delete(self) -> None:
146
+ """
147
+ Delete the file from the storage backend.
148
+
149
+ :return: None
150
+ :rtype: None
151
+ """
152
+ await self._storage.delete(self._name)
153
+
154
+
155
+ class StorageImage(StorageFile):
156
+ """
157
+ Image file object managed by a storage backend.
158
+ Extends :class:`StorageFile` by including optional image metadata such as width and height.
159
+
160
+ :param name: The name or identifier of the stored image file.
161
+ :type name: str
162
+ :param storage: The storage backend handling file operations.
163
+ :type storage: BaseStorage
164
+ :param width: The width of the image in pixels. Defaults to ``0`` if unknown.
165
+ :type width: int, optional
166
+ :param height: The height of the image in pixels. Defaults to ``0`` if unknown.
167
+ :type height: int, optional
168
+ """
169
+
170
+ def __init__(
171
+ self, name: str, storage: BaseStorage, width: int = 0, height: int = 0
172
+ ) -> None:
173
+ super().__init__(name, storage)
174
+ self._width: int = width
175
+ self._height: int = height
176
+ self._meta_loaded: bool = bool(width and height)
177
+
178
+ async def _load_meta(self) -> None:
179
+ data = await self._storage.open(self.name)
180
+
181
+ def _extract_meta() -> tuple[int, int]:
182
+ with Image.open(data) as image:
183
+ return image.size
184
+
185
+ self._width, self._height = await asyncio.to_thread(_extract_meta)
186
+ self._meta_loaded = True
187
+
188
+ async def get_dimensions(self) -> tuple[int, int]:
189
+ """
190
+ Retrieve the dimensions of the image (width and height).
191
+
192
+ If the image metadata has not been loaded yet, this method asynchronously
193
+ loads it from the storage backend before returning the values.
194
+
195
+ :return: A tuple containing the image width and height in pixels.
196
+ :rtype: tuple[int, int]
197
+ :raises OSError: If the image file cannot be opened or read from storage.
198
+ :raises ValueError: If the image file is not a valid image or dimensions cannot be determined.
199
+ """
200
+ if not self._meta_loaded:
201
+ await self._load_meta()
202
+ return self._width, self._height
@@ -0,0 +1,75 @@
1
+ from typing import Any, override
2
+ from sqlalchemy.engine.interfaces import Dialect
3
+ from sqlalchemy.types import TypeDecorator, TypeEngine, Unicode
4
+
5
+ from async_storages import StorageFile, StorageImage
6
+ from async_storages.base import BaseStorage
7
+
8
+
9
+ class FileType(TypeDecorator[Any]):
10
+ """
11
+ SQLAlchemy column type for representing stored files.
12
+
13
+ This type integrates with :class:`~async_storages.base.BaseStorage`
14
+ to automatically wrap database values (file names) into
15
+ :class:`~async_storages.StorageFile` objects when queried.
16
+
17
+ :param storage: The storage backend used to manage file operations.
18
+ :type storage: BaseStorage
19
+ :param args: Additional positional arguments passed to ``TypeDecorator``.
20
+ :param kwargs: Additional keyword arguments passed to ``TypeDecorator``.
21
+ """
22
+
23
+ impl: TypeEngine[Any] | type[TypeEngine[Any]] = Unicode
24
+ cache_ok: bool | None = True
25
+
26
+ def __init__(self, storage: BaseStorage, *args: Any, **kwargs: Any):
27
+ super().__init__(*args, **kwargs)
28
+ self.storage: BaseStorage = storage
29
+
30
+ @override
31
+ def process_bind_param(self, value: Any, dialect: Dialect) -> str:
32
+ if value is None:
33
+ return value
34
+ if isinstance(value, str):
35
+ return value
36
+
37
+ name = getattr(value, "name", None)
38
+ if name:
39
+ return name
40
+ return str(value)
41
+
42
+ @override
43
+ def process_result_value(
44
+ self, value: Any | None, dialect: Dialect
45
+ ) -> StorageFile | None:
46
+ if value is None:
47
+ return None
48
+ return StorageFile(name=value, storage=self.storage)
49
+
50
+
51
+ class ImageType(FileType):
52
+ """
53
+ SQLAlchemy column type for representing stored image files.
54
+
55
+ This type extends :class:`~.FileType` to automatically wrap
56
+ database values (image file names) into
57
+ :class:`~async_storages.StorageImage` objects when queried.
58
+
59
+ It integrates with a configured :class:`~async_storages.base.BaseStorage`
60
+ backend to provide convenient access to image operations such as
61
+ resizing, thumbnail generation, or metadata retrieval.
62
+
63
+ :param storage: The storage backend used to manage image file operations.
64
+ :type storage: BaseStorage
65
+ :param args: Additional positional arguments passed to ``FileType``.
66
+ :param kwargs: Additional keyword arguments passed to ``FileType``.
67
+ """
68
+
69
+ @override
70
+ def process_result_value(
71
+ self, value: Any | None, dialect: Dialect
72
+ ) -> StorageFile | None:
73
+ if value is None:
74
+ return None
75
+ return StorageImage(name=value, storage=self.storage)
@@ -0,0 +1,234 @@
1
+ # pyright: reportPrivateLocalImportUsage=none
2
+ from io import BytesIO
3
+ import mimetypes
4
+ from pathlib import Path
5
+ from typing import Any, BinaryIO, override
6
+
7
+ from async_storages.base import BaseStorage
8
+ from async_storages.utils import secure_filename
9
+
10
+ try:
11
+ import aioboto3
12
+ from botocore.exceptions import ClientError
13
+ except ImportError:
14
+ raise ImportError(
15
+ "'aioboto3' is not installed. Install with 'fastapi-async-storages[s3]'."
16
+ )
17
+
18
+
19
+ class S3Storage(BaseStorage):
20
+ """
21
+ Asynchronous storage backend for Amazon S3-compatible object storage.
22
+
23
+ This class provides async methods for uploading, retrieving, and deleting files
24
+ in an S3 bucket using the ``aioboto3`` client.
25
+
26
+ :param bucket_name: Name of the S3 bucket.
27
+ :type bucket_name: str
28
+ :param endpoint_url: The S3 endpoint hostname (without protocol).
29
+ :type endpoint_url: str
30
+ :param aws_access_key_id: AWS access key ID for authentication.
31
+ :type aws_access_key_id: str
32
+ :param aws_secret_access_key: AWS secret access key for authentication.
33
+ :type aws_secret_access_key: str
34
+ :param region_name: AWS region name (optional).
35
+ :type region_name: str or None
36
+ :param use_ssl: Whether to use HTTPS (True) or HTTP (False).
37
+ :type use_ssl: bool
38
+ :param default_acl: Default Access Control List (ACL) to apply when uploading files.
39
+ :type default_acl: str or None
40
+ :param custom_domain: Custom domain for serving files (e.g. CDN).
41
+ :type custom_domain: str or None
42
+ :param querystring_auth: Whether to generate presigned URLs with query parameters.
43
+ :type querystring_auth: bool
44
+ :raises ImportError: If ``aioboto3`` is not installed.
45
+ """
46
+
47
+ def __init__(
48
+ self,
49
+ bucket_name: str,
50
+ endpoint_url: str,
51
+ aws_access_key_id: str,
52
+ aws_secret_access_key: str,
53
+ region_name: str | None = None,
54
+ use_ssl: bool = True,
55
+ default_acl: str | None = None,
56
+ custom_domain: str | None = None,
57
+ querystring_auth: bool = False,
58
+ ) -> None:
59
+ assert not endpoint_url.startswith("http"), (
60
+ "Endpoint should not contain protocol"
61
+ )
62
+
63
+ self.bucket_name: str = bucket_name
64
+ self.endpoint_url: str = endpoint_url.rstrip("/")
65
+ self.aws_access_key_id: str = aws_access_key_id
66
+ self.aws_secret_access_key: str = aws_secret_access_key
67
+ self.region_name: str | None = region_name
68
+ self.use_ssl: bool = use_ssl
69
+ self.default_acl: str | None = default_acl
70
+ self.custom_domain: str | None = custom_domain
71
+ self.querystring_auth: bool = querystring_auth
72
+
73
+ self._http_scheme: str = "https" if self.use_ssl else "http"
74
+ self._url: str = f"{self._http_scheme}://{self.endpoint_url}"
75
+ self._session: "aioboto3.Session" = aioboto3.Session()
76
+
77
+ def _get_s3_client(self) -> Any:
78
+ return self._session.client(
79
+ "s3",
80
+ region_name=self.region_name,
81
+ use_ssl=self.use_ssl,
82
+ endpoint_url=self._url,
83
+ aws_access_key_id=self.aws_access_key_id,
84
+ aws_secret_access_key=self.aws_secret_access_key,
85
+ )
86
+
87
+ @override
88
+ def get_name(self, name: str) -> str:
89
+ """
90
+ Sanitize and normalize a file path before uploading to S3.
91
+
92
+ Removes unsafe path components (``..`` or ``.``) and ensures each
93
+ segment is a secure filename.
94
+
95
+ :param name: Original file name or path.
96
+ :type name: str
97
+ :return: Sanitized file path.
98
+ :rtype: str
99
+ """
100
+ parts = Path(name).parts
101
+ safe_parts: list[str] = []
102
+
103
+ for part in parts:
104
+ if part not in ("..", ".", ""):
105
+ safe_parts.append(secure_filename(part))
106
+
107
+ safe_path = Path(*safe_parts)
108
+ return str(safe_path)
109
+
110
+ @override
111
+ async def get_size(self, name: str) -> int:
112
+ """
113
+ Retrieve the size of an S3 object in bytes.
114
+
115
+ :param name: The object key (path) in the S3 bucket.
116
+ :type name: str
117
+ :return: The file size in bytes, or ``0`` if the object does not exist.
118
+ :rtype: int
119
+ :raises botocore.exceptions.ClientError: If an unexpected S3 error occurs.
120
+ """
121
+ name = self.get_name(name)
122
+
123
+ async with self._get_s3_client() as s3_client:
124
+ try:
125
+ res = await s3_client.head_object(Bucket=self.bucket_name, Key=name)
126
+ return int(res.get("ContentLength", 0))
127
+ except ClientError as e:
128
+ code = e.response.get("Error", {}).get("Code")
129
+ status = e.response.get("ResponseMetadata", {}).get("HTTPStatusCode")
130
+
131
+ if code in ("NoSuchKey", "NotFound") or status == 404:
132
+ return 0
133
+ raise
134
+
135
+ @override
136
+ async def get_path(self, name: str) -> str:
137
+ """
138
+ Generate a URL for accessing an S3 object.
139
+
140
+ If ``custom_domain`` is set, returns a static URL using that domain.
141
+ If ``querystring_auth`` is True, returns a presigned URL with temporary access.
142
+
143
+ :param name: The object key (path) in the S3 bucket.
144
+ :type name: str
145
+ :return: A direct or presigned URL for the file.
146
+ :rtype: str
147
+ """
148
+ if self.custom_domain:
149
+ return f"{self._http_scheme}://{self.custom_domain}/{name}"
150
+ elif self.querystring_auth:
151
+ async with self._get_s3_client() as s3_client:
152
+ params = {"Bucket": self.bucket_name, "Key": name}
153
+ return await s3_client.generate_presigned_url(
154
+ "get_object", Params=params
155
+ )
156
+ else:
157
+ url = f"{self._http_scheme}://{self.endpoint_url}/{self.bucket_name}/{name}"
158
+ return url
159
+
160
+ @override
161
+ async def open(self, name: str) -> BytesIO:
162
+ """
163
+ Open an object from S3 and return it as an in-memory binary stream.
164
+
165
+ This method fetches the file contents asynchronously and returns
166
+ a ``BytesIO`` object positioned at the start of the file.
167
+
168
+ :param name: The object key (path) in the S3 bucket.
169
+ :type name: str
170
+ :return: A BytesIO object containing the file's contents.
171
+ :rtype: BytesIO
172
+ :raises FileNotFoundError: If the object is not found.
173
+ :raises botocore.exceptions.ClientError: If the object cannot be fetched.
174
+ """
175
+ name = self.get_name(name)
176
+
177
+ async with self._get_s3_client() as s3_client:
178
+ try:
179
+ response = await s3_client.get_object(Bucket=self.bucket_name, Key=name)
180
+ except ClientError as e:
181
+ code = e.response.get("Error", {}).get("Code")
182
+ if code in ("NoSuchKey", "NotFound"):
183
+ raise FileNotFoundError(
184
+ f"Object not found in bucket: {name}"
185
+ ) from e
186
+ raise
187
+
188
+ async with response["Body"] as stream:
189
+ data = await stream.read()
190
+ return BytesIO(data)
191
+
192
+ @override
193
+ async def upload(self, file: BinaryIO, name: str) -> str:
194
+ """
195
+ Upload a file object to the configured S3 bucket.
196
+
197
+ :param file: Binary file-like object to upload.
198
+ :type file: BinaryIO
199
+ :param name: Target object key (path) in the S3 bucket.
200
+ :type name: str
201
+ :return: The name or key of the uploaded object.
202
+ :rtype: str
203
+ :raises botocore.exceptions.ClientError: If the upload fails.
204
+ """
205
+ name = self.get_name(name)
206
+ content_type, _ = mimetypes.guess_type(name)
207
+ extra_args = {"ContentType": content_type or "application/octet-stream"}
208
+ if self.default_acl:
209
+ extra_args["ACL"] = self.default_acl
210
+
211
+ async with self._get_s3_client() as s3_client:
212
+ file.seek(0)
213
+ await s3_client.put_object(
214
+ Bucket=self.bucket_name, Key=name, Body=file, **extra_args
215
+ )
216
+ return name
217
+
218
+ @override
219
+ async def delete(self, name: str) -> None:
220
+ """
221
+ Delete an object from the S3 bucket.
222
+
223
+ :param name: The object key (path) to delete.
224
+ :type name: str
225
+ :return: None
226
+ :rtype: None
227
+ :raises botocore.exceptions.ClientError: If the delete operation fails.
228
+ """
229
+ async with self._get_s3_client() as s3_client:
230
+ try:
231
+ await s3_client.delete_object(Bucket=self.bucket_name, Key=name)
232
+ except ClientError as e:
233
+ if e.response.get("Error", {}).get("Code") != "NoSuchKey":
234
+ raise
@@ -0,0 +1,16 @@
1
+ import os
2
+ import re
3
+
4
+ _filename_ascii_strip_re = re.compile(r"[^A-Za-z0-9_.-]")
5
+
6
+
7
+ # https://werkzeug.palletsprojects.com/en/stable/utils/#werkzeug.utils.secure_filename
8
+ # https://github.com/pallets/werkzeug/blob/504a8c4fbda9b8b2fd09e817544ffd228f23458e/src/werkzeug/utils.py#L195
9
+ def secure_filename(filename: str) -> str:
10
+ for sep in os.path.sep, os.path.altsep:
11
+ if sep:
12
+ filename = filename.replace(sep, " ")
13
+
14
+ normalized_filename = _filename_ascii_strip_re.sub("", "_".join(filename.split()))
15
+ filename = str(normalized_filename).strip("._")
16
+ return filename