python3-commons 0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of python3-commons might be problematic. Click here for more details.

@@ -0,0 +1,52 @@
1
+ from pydantic import Field, HttpUrl, PostgresDsn, RedisDsn, SecretStr
2
+ from pydantic_settings import BaseSettings, SettingsConfigDict
3
+
4
+
5
+ class CommonSettings(BaseSettings):
6
+ logging_level: str = 'INFO'
7
+ logging_format: str = '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
8
+ logging_formatter: str = 'default'
9
+
10
+
11
+ class OIDCSettings(BaseSettings):
12
+ model_config = SettingsConfigDict(env_prefix='OIDC_')
13
+
14
+ enabled: bool = True
15
+ authority_url: HttpUrl | None = None
16
+ client_id: str | None = None
17
+
18
+
19
+ class ValkeySettings(BaseSettings):
20
+ model_config = SettingsConfigDict(env_prefix='VALKEY_')
21
+
22
+ dsn: RedisDsn | None = None
23
+ sentinel_dsn: RedisDsn | None = None
24
+
25
+
26
+ class DBSettings(BaseSettings):
27
+ model_config = SettingsConfigDict(env_prefix='DB_')
28
+
29
+ dsn: PostgresDsn | None = Field(default=None, serialization_alias='url')
30
+ echo: bool = False
31
+ pool_size: int = 20
32
+ max_overflow: int = 0
33
+ pool_timeout: int = 30
34
+ pool_recycle: int = 1800 # 30 minutes
35
+
36
+
37
+ class S3Settings(BaseSettings):
38
+ s3_endpoint_url: str | None = None
39
+ s3_region_name: str | None = None
40
+ s3_access_key_id: SecretStr = ''
41
+ s3_secret_access_key: SecretStr = ''
42
+ s3_secure: bool = True
43
+ s3_bucket: str | None = None
44
+ s3_bucket_root: str | None = None
45
+ s3_cert_verify: bool = True
46
+
47
+
48
+ settings = CommonSettings()
49
+ oidc_settings = OIDCSettings()
50
+ valkey_settings = ValkeySettings()
51
+ db_settings = DBSettings()
52
+ s3_settings = S3Settings()
@@ -0,0 +1,83 @@
1
+ import contextlib
2
+ import logging
3
+ from typing import AsyncGenerator, Callable, Mapping
4
+
5
+ from sqlalchemy import MetaData
6
+ from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_engine_from_config
7
+ from sqlalchemy.ext.asyncio.session import async_sessionmaker
8
+ from sqlalchemy.orm import declarative_base
9
+
10
+ from python3_commons.conf import DBSettings
11
+
12
+ logger = logging.getLogger(__name__)
13
+ metadata = MetaData()
14
+ Base = declarative_base(metadata=metadata)
15
+
16
+
17
+ class AsyncSessionManager:
18
+ def __init__(self, db_settings: Mapping[str, DBSettings]):
19
+ self.db_settings: Mapping[str, DBSettings] = db_settings
20
+ self.engines: dict[str, AsyncEngine] = {}
21
+ self.session_makers: dict = {}
22
+
23
+ def get_db_settings(self, name: str) -> DBSettings:
24
+ try:
25
+ return self.db_settings[name]
26
+ except KeyError:
27
+ logger.error(f'Missing database settings: {name}')
28
+
29
+ raise
30
+
31
+ def async_engine_from_db_settings(self, name):
32
+ db_settings = self.get_db_settings(name)
33
+ configuration = db_settings.model_dump(by_alias=True)
34
+ configuration['url'] = str(configuration['url'])
35
+ engine = async_engine_from_config(configuration, prefix='')
36
+
37
+ return engine
38
+
39
+ def get_engine(self, name: str) -> AsyncEngine:
40
+ try:
41
+ engine = self.engines[name]
42
+ except KeyError:
43
+ logger.debug(f'Creating engine: {name}')
44
+ engine = self.async_engine_from_db_settings(name)
45
+ self.engines[name] = engine
46
+
47
+ return engine
48
+
49
+ def get_session_maker(self, name: str):
50
+ try:
51
+ session_maker = self.session_makers[name]
52
+ except KeyError:
53
+ logger.debug(f'Creating session maker: {name}')
54
+ engine = self.get_engine(name)
55
+ session_maker = async_sessionmaker(engine, expire_on_commit=False)
56
+ self.session_makers[name] = session_maker
57
+
58
+ return session_maker
59
+
60
+ def get_async_session(self, name: str) -> Callable[[], AsyncGenerator[AsyncSession, None]]:
61
+ async def get_session() -> AsyncGenerator[AsyncSession, None]:
62
+ session_maker = self.get_session_maker(name)
63
+
64
+ async with session_maker() as session:
65
+ yield session
66
+
67
+ return get_session
68
+
69
+ def get_session_context(self, name: str):
70
+ # TODO: cache
71
+ return contextlib.asynccontextmanager(self.get_async_session(name))
72
+
73
+
74
+ async def is_healthy(engine: AsyncEngine) -> bool:
75
+ try:
76
+ async with engine.begin() as conn:
77
+ result = await conn.execute('SELECT 1;')
78
+
79
+ return result.scalar() == 1
80
+ except Exception as e:
81
+ logger.error(f'Database connection is not healthy: {e}')
82
+
83
+ return False
@@ -0,0 +1,62 @@
1
+ import logging
2
+ from typing import Mapping
3
+
4
+ import sqlalchemy as sa
5
+ from sqlalchemy import asc, desc, func
6
+ from sqlalchemy.sql.elements import BooleanClauseList, UnaryExpression
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+
11
+ def get_query(
12
+ search: Mapping[str, str] | None = None, order_by: str | None = None, columns: Mapping | None = None
13
+ ) -> tuple[BooleanClauseList, UnaryExpression]:
14
+ """
15
+ :columns:
16
+ Param name ->
17
+ 0: Model column
18
+ 1: case-insensitive if True
19
+ 2: cast value to type
20
+ 3: exact match if True, LIKE %value% if False
21
+ """
22
+
23
+ order_by_cols = {}
24
+
25
+ if order_by:
26
+ for order_by_col in order_by.split(','):
27
+ if order_by_col.startswith('-'):
28
+ direction = desc
29
+ order_by_col = order_by_col[1:]
30
+ else:
31
+ direction = asc
32
+
33
+ order_by_cols[order_by_col] = direction
34
+
35
+ order_by_clauses = tuple(
36
+ direction(columns[order_by_col][0]) for order_by_col, direction in order_by_cols.items()
37
+ )
38
+ else:
39
+ order_by_clauses = None
40
+
41
+ if search:
42
+ where_parts = [
43
+ *(
44
+ (func.upper(columns[k][0]) if columns[k][1] else columns[k][0]) == columns[k][2](v)
45
+ for k, v in search.items()
46
+ if columns[k][3]
47
+ ),
48
+ *(
49
+ (func.upper(columns[k][0]) if columns[k][1] else columns[k][0]).like(f'%{v.upper()}%')
50
+ for k, v in search.items()
51
+ if not columns[k][3]
52
+ ),
53
+ ]
54
+ else:
55
+ where_parts = None
56
+
57
+ if where_parts:
58
+ where_clause = sa.and_(*where_parts)
59
+ else:
60
+ where_clause = None
61
+
62
+ return where_clause, order_by_clauses
@@ -0,0 +1,2 @@
1
+ from python3_commons.db.models.auth import ApiKey, User, UserGroup
2
+ from python3_commons.db.models.rbac import RBACApiKeyRole, RBACPermission, RBACRole, RBACRolePermission, RBACUserRole
@@ -0,0 +1,35 @@
1
+ import uuid
2
+
3
+ from fastapi_users_db_sqlalchemy import GUID, SQLAlchemyBaseUserTableUUID
4
+ from pydantic import AwareDatetime
5
+ from sqlalchemy import BIGINT, DateTime, ForeignKey, String
6
+ from sqlalchemy.orm import Mapped, mapped_column
7
+
8
+ from python3_commons.db import Base
9
+ from python3_commons.db.models.common import BaseDBModel, BaseDBUUIDModel
10
+
11
+
12
+ class UserGroup(BaseDBModel, Base):
13
+ __tablename__ = 'user_groups'
14
+
15
+ name: Mapped[str] = mapped_column(String, nullable=False)
16
+
17
+
18
+ class User(SQLAlchemyBaseUserTableUUID, Base):
19
+ __tablename__ = 'users'
20
+
21
+ username: Mapped[str] = mapped_column(String, unique=True, index=True, nullable=False)
22
+ group_id: Mapped[int | None] = mapped_column(BIGINT, ForeignKey('user_groups.id'))
23
+
24
+
25
+ class ApiKey(BaseDBUUIDModel, Base):
26
+ __tablename__ = 'api_keys'
27
+
28
+ user_id: Mapped[uuid.UUID | None] = mapped_column(
29
+ GUID,
30
+ ForeignKey('users.id', name='fk_api_key_user', ondelete='RESTRICT'),
31
+ index=True,
32
+ )
33
+ partner_name: Mapped[str] = mapped_column(String, unique=True)
34
+ key: Mapped[str] = mapped_column(String, unique=True)
35
+ expires_at: Mapped[AwareDatetime | None] = mapped_column(DateTime(timezone=True))
@@ -0,0 +1,39 @@
1
+ from pydantic import AwareDatetime
2
+ from sqlalchemy import BIGINT, DateTime
3
+ from sqlalchemy.dialects.postgresql import UUID
4
+ from sqlalchemy.ext.compiler import compiles
5
+ from sqlalchemy.orm import Mapped, mapped_column
6
+ from sqlalchemy.sql import expression
7
+ from sqlalchemy.sql.ddl import CreateColumn
8
+
9
+
10
+ class UTCNow(expression.FunctionElement):
11
+ type = DateTime(timezone=True)
12
+
13
+
14
+ @compiles(UTCNow, 'postgresql')
15
+ def pg_utcnow(element, compiler, **kw):
16
+ return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
17
+
18
+
19
+ @compiles(CreateColumn, 'postgresql')
20
+ def use_identity(element, compiler, **kw):
21
+ result = compiler.visit_create_column(element, **kw).replace('SERIAL', 'INT GENERATED BY DEFAULT AS IDENTITY')
22
+
23
+ return result.replace('BIGSERIAL', 'BIGINT GENERATED BY DEFAULT AS IDENTITY')
24
+
25
+
26
+ class BaseDBModel:
27
+ id: Mapped[int] = mapped_column(BIGINT, primary_key=True, sort_order=-3)
28
+ created_at: Mapped[AwareDatetime] = mapped_column(
29
+ DateTime(timezone=True), nullable=False, server_default=UTCNow(), sort_order=-2
30
+ )
31
+ updated_at: Mapped[AwareDatetime] = mapped_column(DateTime(timezone=True), onupdate=UTCNow(), sort_order=-1)
32
+
33
+
34
+ class BaseDBUUIDModel:
35
+ uid: Mapped[UUID] = mapped_column(UUID, primary_key=True, sort_order=-3)
36
+ created_at: Mapped[AwareDatetime] = mapped_column(
37
+ DateTime(timezone=True), nullable=False, server_default=UTCNow(), sort_order=-2
38
+ )
39
+ updated_at: Mapped[AwareDatetime | None] = mapped_column(DateTime(timezone=True), onupdate=UTCNow(), sort_order=-1)
@@ -0,0 +1,91 @@
1
+ import uuid
2
+
3
+ from fastapi_users_db_sqlalchemy import GUID
4
+ from pydantic import AwareDatetime
5
+ from sqlalchemy import CheckConstraint, DateTime, ForeignKey, PrimaryKeyConstraint, String
6
+ from sqlalchemy.dialects.postgresql import UUID
7
+ from sqlalchemy.orm import Mapped, mapped_column
8
+
9
+ from python3_commons.db import Base
10
+
11
+
12
+ class RBACRole(Base):
13
+ __tablename__ = 'rbac_roles'
14
+
15
+ uid: Mapped[uuid.UUID] = mapped_column(UUID, primary_key=True)
16
+ name: Mapped[str] = mapped_column(String, unique=True, nullable=False)
17
+
18
+
19
+ class RBACPermission(Base):
20
+ __tablename__ = 'rbac_permissions'
21
+
22
+ uid: Mapped[uuid.UUID] = mapped_column(UUID, primary_key=True)
23
+ name: Mapped[str] = mapped_column(String, unique=True, nullable=False)
24
+
25
+ __table_args__ = (CheckConstraint("name ~ '^[a-z0-9_.]+$'", name='check_rbac_permissions_name'),)
26
+
27
+
28
+ class RBACRolePermission(Base):
29
+ __tablename__ = 'rbac_role_permissions'
30
+
31
+ role_uid: Mapped[uuid.UUID | None] = mapped_column(
32
+ UUID,
33
+ ForeignKey('rbac_roles.uid', name='fk_rbac_role_permissions_role', ondelete='CASCADE'),
34
+ index=True,
35
+ )
36
+ permission_uid: Mapped[uuid.UUID | None] = mapped_column(
37
+ UUID,
38
+ ForeignKey('rbac_permissions.uid', name='fk_rbac_role_permissions_permission', ondelete='CASCADE'),
39
+ index=True,
40
+ )
41
+
42
+ __table_args__ = (PrimaryKeyConstraint('role_uid', 'permission_uid', name='pk_rbac_role_permissions'),)
43
+
44
+
45
+ class RBACUserRole(Base):
46
+ __tablename__ = 'rbac_user_roles'
47
+
48
+ user_id: Mapped[uuid.UUID | None] = mapped_column(
49
+ GUID,
50
+ ForeignKey('users.id', name='fk_rbac_user_roles_user', ondelete='CASCADE'),
51
+ index=True,
52
+ )
53
+ role_uid: Mapped[uuid.UUID | None] = mapped_column(
54
+ UUID,
55
+ ForeignKey('rbac_roles.uid', name='fk_rbac_user_roles_role', ondelete='CASCADE'),
56
+ index=True,
57
+ )
58
+ starts_at: Mapped[AwareDatetime] = mapped_column(DateTime(timezone=True), nullable=False)
59
+ expires_at: Mapped[AwareDatetime | None] = mapped_column(DateTime(timezone=True))
60
+
61
+ __table_args__ = (PrimaryKeyConstraint('user_id', 'role_uid', name='pk_rbac_user_roles'),)
62
+
63
+
64
+ class RBACApiKeyRole(Base):
65
+ __tablename__ = 'rbac_api_key_roles'
66
+
67
+ api_key_uid: Mapped[uuid.UUID | None] = mapped_column(
68
+ UUID,
69
+ ForeignKey('api_keys.uid', name='fk_rbac_api_key_roles_user', ondelete='CASCADE'),
70
+ index=True,
71
+ )
72
+ role_uid: Mapped[uuid.UUID | None] = mapped_column(
73
+ UUID,
74
+ ForeignKey('rbac_roles.uid', name='fk_rbac_api_key_roles_role', ondelete='CASCADE'),
75
+ index=True,
76
+ )
77
+ starts_at: Mapped[AwareDatetime] = mapped_column(DateTime(timezone=True), nullable=False)
78
+ expires_at: Mapped[AwareDatetime | None] = mapped_column(DateTime(timezone=True))
79
+
80
+ __table_args__ = (PrimaryKeyConstraint('api_key_uid', 'role_uid', name='pk_rbac_api_key_roles'),)
81
+
82
+
83
+ # class RBACRoleRelation(Base):
84
+ # __tablename__ = 'rbac_role_relations'
85
+ #
86
+ # parent_uid: Mapped[uuid.UUID] = mapped_column(UUID)
87
+ # child_uid: Mapped[uuid.UUID] = mapped_column(UUID)
88
+ #
89
+ # __table_args__ = (
90
+ # PrimaryKeyConstraint('parent_uid', 'child_uid', name='pk_rbac_role_relations'),
91
+ # )
python3_commons/fs.py ADDED
@@ -0,0 +1,10 @@
1
+ from pathlib import Path
2
+ from typing import Generator
3
+
4
+
5
+ def iter_files(root: Path, recursive: bool = True) -> Generator[Path, None, None]:
6
+ for item in root.iterdir():
7
+ if item.is_file():
8
+ yield item
9
+ elif item.is_dir() and recursive and not item.name.startswith('.'):
10
+ yield from iter_files(item)
@@ -0,0 +1,108 @@
1
+ import logging
2
+ import shlex
3
+ import threading
4
+ from datetime import date, datetime, timedelta
5
+ from decimal import ROUND_HALF_UP, Decimal
6
+ from json import dumps
7
+ from typing import Literal, Mapping, Sequence
8
+ from urllib.parse import urlencode
9
+
10
+ from python3_commons.serializers.json import CustomJSONEncoder
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class SingletonMeta(type):
16
+ """
17
+ A metaclass that creates a Singleton base class when called.
18
+ """
19
+
20
+ _instances = {}
21
+ _lock = threading.Lock()
22
+
23
+ def __call__(cls, *args, **kwargs):
24
+ try:
25
+ return cls._instances[cls]
26
+ except KeyError:
27
+ with cls._lock:
28
+ try:
29
+ return cls._instances[cls]
30
+ except KeyError:
31
+ instance = super(SingletonMeta, cls).__call__(*args, **kwargs)
32
+ cls._instances[cls] = instance
33
+
34
+ return instance
35
+
36
+
37
+ def date_from_string(string: str, fmt: str = '%d.%m.%Y') -> date:
38
+ try:
39
+ return datetime.strptime(string, fmt).date()
40
+ except ValueError:
41
+ return date.fromisoformat(string)
42
+
43
+
44
+ def datetime_from_string(string: str) -> datetime:
45
+ try:
46
+ return datetime.strptime(string, '%d.%m.%Y %H:%M:%S')
47
+ except ValueError:
48
+ return datetime.fromisoformat(string)
49
+
50
+
51
+ def date_range(start_date, end_date):
52
+ for n in range(int((end_date - start_date).days + 1)):
53
+ yield start_date + timedelta(days=n)
54
+
55
+
56
+ def tries(times):
57
+ def func_wrapper(f):
58
+ async def wrapper(*args, **kwargs):
59
+ for time in range(times if times > 0 else 1):
60
+ # noinspection PyBroadException
61
+ try:
62
+ return await f(*args, **kwargs)
63
+ except Exception as exc:
64
+ if time >= times:
65
+ raise exc
66
+
67
+ return wrapper
68
+
69
+ return func_wrapper
70
+
71
+
72
+ def round_decimal(value: Decimal, decimal_places=2, rounding_mode=ROUND_HALF_UP) -> Decimal:
73
+ try:
74
+ return value.quantize(Decimal(10) ** -decimal_places, rounding=rounding_mode)
75
+ except AttributeError:
76
+ return value
77
+
78
+
79
+ def request_to_curl(
80
+ url: str,
81
+ query: Mapping | None = None,
82
+ method: Literal['get', 'post', 'put', 'patch', 'options', 'head', 'delete'] = 'get',
83
+ headers: Mapping | None = None,
84
+ json: Mapping | Sequence | str | None = None,
85
+ data: bytes | None = None,
86
+ ) -> str:
87
+ if query:
88
+ url = f'{url}?{urlencode(query)}'
89
+
90
+ curl_cmd = ['curl', '-i', '-X', method.upper(), shlex.quote(url)]
91
+
92
+ if headers:
93
+ for key, value in headers.items():
94
+ header_line = f'{key}: {value}'
95
+ curl_cmd.append('-H')
96
+ curl_cmd.append(shlex.quote(header_line))
97
+
98
+ if json:
99
+ curl_cmd.append('-H')
100
+ curl_cmd.append(shlex.quote('Content-Type: application/json'))
101
+
102
+ curl_cmd.append('-d')
103
+ curl_cmd.append(shlex.quote(dumps(json, cls=CustomJSONEncoder)))
104
+ elif data:
105
+ curl_cmd.append('-d')
106
+ curl_cmd.append(shlex.quote(data.decode('utf-8')))
107
+
108
+ return ' '.join(curl_cmd)
File without changes
@@ -0,0 +1,10 @@
1
+ import logging
2
+
3
+
4
+ def filter_maker(level):
5
+ level = getattr(logging, level)
6
+
7
+ def record_filter(record):
8
+ return record.levelno <= level
9
+
10
+ return record_filter
@@ -0,0 +1,25 @@
1
+ import json
2
+ import logging
3
+ import traceback
4
+ from contextvars import ContextVar
5
+
6
+ from python3_commons.serializers.json import CustomJSONEncoder
7
+
8
+ correlation_id: ContextVar[str | None] = ContextVar('correlation_id', default=None)
9
+
10
+
11
+ class JSONFormatter(logging.Formatter):
12
+ @staticmethod
13
+ def format_exception(exc_info):
14
+ return ''.join(traceback.format_exception(*exc_info))
15
+
16
+ def format(self, record):
17
+ if corr_id := correlation_id.get():
18
+ record.correlation_id = corr_id
19
+
20
+ if record.exc_info:
21
+ record.exc_text = self.format_exception(record.exc_info)
22
+ else:
23
+ record.exc_text = None
24
+
25
+ return json.dumps(record.__dict__, cls=CustomJSONEncoder)
@@ -0,0 +1,127 @@
1
+ import io
2
+ import logging
3
+ from contextlib import contextmanager
4
+ from datetime import datetime
5
+ from typing import Generator, Iterable
6
+
7
+ from minio import Minio
8
+ from minio.datatypes import Object
9
+ from minio.deleteobjects import DeleteError, DeleteObject
10
+
11
+ from python3_commons.conf import S3Settings, s3_settings
12
+ from python3_commons.helpers import SingletonMeta
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class ObjectStorage(metaclass=SingletonMeta):
18
+ def __init__(self, settings: S3Settings):
19
+ if not s3_settings.s3_endpoint_url:
20
+ raise ValueError('s3_settings.s3_endpoint_url must be set')
21
+
22
+ self._client = Minio(
23
+ settings.s3_endpoint_url,
24
+ region=settings.s3_region_name,
25
+ access_key=settings.s3_access_key_id.get_secret_value(),
26
+ secret_key=settings.s3_secret_access_key.get_secret_value(),
27
+ secure=settings.s3_secure,
28
+ cert_check=settings.s3_cert_verify,
29
+ )
30
+
31
+ def get_client(self) -> Minio:
32
+ return self._client
33
+
34
+
35
+ def get_absolute_path(path: str) -> str:
36
+ if path.startswith('/'):
37
+ path = path[1:]
38
+
39
+ if bucket_root := s3_settings.s3_bucket_root:
40
+ path = f'{bucket_root[:1] if bucket_root.startswith("/") else bucket_root}/{path}'
41
+
42
+ return path
43
+
44
+
45
+ def put_object(bucket_name: str, path: str, data: io.BytesIO, length: int, part_size: int = 0) -> str | None:
46
+ if s3_client := ObjectStorage(s3_settings).get_client():
47
+ result = s3_client.put_object(bucket_name, path, data, length, part_size=part_size)
48
+
49
+ logger.debug(f'Stored object into object storage: {bucket_name}:{path}')
50
+
51
+ return result.location
52
+ else:
53
+ logger.warning('No S3 client available, skipping object put')
54
+
55
+
56
+ @contextmanager
57
+ def get_object_stream(bucket_name: str, path: str):
58
+ if s3_client := ObjectStorage(s3_settings).get_client():
59
+ logger.debug(f'Getting object from object storage: {bucket_name}:{path}')
60
+
61
+ try:
62
+ response = s3_client.get_object(bucket_name, path)
63
+ except Exception as e:
64
+ logger.debug(f'Failed getting object from object storage: {bucket_name}:{path}', exc_info=e)
65
+
66
+ raise
67
+
68
+ yield response
69
+
70
+ response.close()
71
+ response.release_conn()
72
+ else:
73
+ logger.warning('No S3 client available, skipping object put')
74
+
75
+
76
+ def get_object(bucket_name: str, path: str) -> bytes:
77
+ with get_object_stream(bucket_name, path) as stream:
78
+ body = stream.read()
79
+
80
+ logger.debug(f'Loaded object from object storage: {bucket_name}:{path}')
81
+
82
+ return body
83
+
84
+
85
+ def list_objects(bucket_name: str, prefix: str, recursive: bool = True) -> Generator[Object, None, None]:
86
+ s3_client = ObjectStorage(s3_settings).get_client()
87
+
88
+ yield from s3_client.list_objects(bucket_name, prefix=prefix, recursive=recursive)
89
+
90
+
91
+ def get_objects(
92
+ bucket_name: str, path: str, recursive: bool = True
93
+ ) -> Generator[tuple[str, datetime, bytes], None, None]:
94
+ for obj in list_objects(bucket_name, path, recursive):
95
+ object_name = obj.object_name
96
+
97
+ if obj.size:
98
+ data = get_object(bucket_name, object_name)
99
+ else:
100
+ data = b''
101
+
102
+ yield object_name, obj.last_modified, data
103
+
104
+
105
+ def remove_object(bucket_name: str, object_name: str):
106
+ s3_client = ObjectStorage(s3_settings).get_client()
107
+ s3_client.remove_object(bucket_name, object_name)
108
+
109
+
110
+ def remove_objects(
111
+ bucket_name: str, prefix: str = None, object_names: Iterable[str] = None
112
+ ) -> Iterable[DeleteError] | None:
113
+ s3_client = ObjectStorage(s3_settings).get_client()
114
+
115
+ if prefix:
116
+ delete_object_list = map(
117
+ lambda obj: DeleteObject(obj.object_name),
118
+ s3_client.list_objects(bucket_name, prefix=prefix, recursive=True),
119
+ )
120
+ elif object_names:
121
+ delete_object_list = map(DeleteObject, object_names)
122
+ else:
123
+ return None
124
+
125
+ errors = s3_client.remove_objects(bucket_name, delete_object_list)
126
+
127
+ return errors