fast-clean 1.3.0__py3-none-any.whl → 1.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,6 @@
1
+ """
2
+ Пакет, содержащий команды Typer.
3
+ """
4
+
5
+ from .cryptography import use_cryptography as use_cryptography
6
+ from .load_seed import use_load_seed as use_load_seed
@@ -0,0 +1,53 @@
1
+ """
2
+ Модуль, содержащий команды криптографии для шифрования секретных параметров.
3
+ """
4
+
5
+ from typing import Annotated
6
+
7
+ import typer
8
+ from rich import print
9
+
10
+ from fast_clean.container import get_container
11
+ from fast_clean.services import CryptographicAlgorithmEnum, CryptographyServiceFactory
12
+ from fast_clean.utils import typer_async
13
+
14
+
15
+ @typer_async
16
+ async def encrypt(
17
+ data: Annotated[str, typer.Argument(help='Данные для шифровки.')],
18
+ algorithm: Annotated[
19
+ CryptographicAlgorithmEnum, typer.Option(help='Криптографический алгоритм')
20
+ ] = CryptographicAlgorithmEnum.AES_GCM,
21
+ ) -> None:
22
+ """
23
+ Зашифровываем данные.
24
+ """
25
+ async with get_container() as container:
26
+ cryptography_service_factory = await container.get(CryptographyServiceFactory)
27
+ cryptography_service = await cryptography_service_factory.make(algorithm)
28
+ print(cryptography_service.encrypt(data))
29
+
30
+
31
+ @typer_async
32
+ async def decrypt(
33
+ data: Annotated[str, typer.Argument(help='Данные для расшифровки.')],
34
+ algorithm: Annotated[
35
+ CryptographicAlgorithmEnum, typer.Option(help='Криптографический алгоритм')
36
+ ] = CryptographicAlgorithmEnum.AES_GCM,
37
+ ) -> None:
38
+ """
39
+ Расшифровываем данные.
40
+ """
41
+ async with get_container() as container:
42
+ cryptography_service_factory = await container.get(CryptographyServiceFactory)
43
+ cryptography_service = await cryptography_service_factory.make(algorithm)
44
+ print(cryptography_service.decrypt(data))
45
+
46
+
47
+ def use_cryptography(app: typer.Typer) -> None:
48
+ """
49
+ Регистрируем команды криптографии для шифрования секретных параметров.
50
+ """
51
+
52
+ app.command()(encrypt)
53
+ app.command()(decrypt)
@@ -0,0 +1,31 @@
1
+ """
2
+ Модуль, содержащий команды загрузки данных из файлов.
3
+ """
4
+
5
+ from typing import Annotated
6
+
7
+ import typer
8
+
9
+ from fast_clean.container import get_container
10
+ from fast_clean.services import SeedService
11
+ from fast_clean.utils import typer_async
12
+
13
+
14
+ @typer_async
15
+ async def load_seed(
16
+ path: Annotated[str | None, typer.Argument(help='Путь к директории для загрузки данных.')] = None,
17
+ ) -> None:
18
+ """
19
+ Загружаем данные из файлов.
20
+ """
21
+ async with get_container() as container:
22
+ seed_service = await container.get(SeedService)
23
+ await seed_service.load_data(path)
24
+
25
+
26
+ def use_load_seed(app: typer.Typer) -> None:
27
+ """
28
+ Регистрируем команды загрузки данных из файлов.
29
+ """
30
+
31
+ app.command()(load_seed)
@@ -1,4 +1,3 @@
1
-
2
1
  from fast_clean.schemas.request_response import ResponseSchema
3
2
 
4
3
 
@@ -3,4 +3,4 @@ from fastapi import FastAPI
3
3
 
4
4
 
5
5
  def use_middleware(app: FastAPI) -> None:
6
- app.add_middleware(MetricsMiddleware) # type: ignore
6
+ app.add_middleware(MetricsMiddleware) # type: ignore
fast_clean/db.py CHANGED
@@ -7,7 +7,7 @@ from __future__ import annotations
7
7
  import uuid
8
8
  from collections.abc import AsyncIterator
9
9
  from contextlib import asynccontextmanager
10
- from typing import TYPE_CHECKING, AsyncContextManager, Protocol, Self
10
+ from typing import TYPE_CHECKING, Any, AsyncContextManager, Protocol, Self
11
11
 
12
12
  import sqlalchemy as sa
13
13
  from sqlalchemy import MetaData
@@ -18,11 +18,9 @@ from sqlalchemy.ext.asyncio import (
18
18
  async_sessionmaker,
19
19
  create_async_engine,
20
20
  )
21
- from sqlalchemy.ext.declarative import declared_attr
22
21
  from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
23
22
  from sqlalchemy.sql import func
24
- from sqlalchemy_utils.types.uuid import UUIDType
25
- from stringcase import snakecase
23
+ from sqlalchemy_utils.types import UUIDType
26
24
 
27
25
  from .settings import CoreDbSettingsSchema, CoreSettingsSchema
28
26
 
@@ -40,24 +38,46 @@ POSTGRES_INDEXES_NAMING_CONVENTION = {
40
38
  metadata = MetaData(naming_convention=POSTGRES_INDEXES_NAMING_CONVENTION)
41
39
 
42
40
 
43
- def make_async_engine(db_dsn: str, *, scheme: str = 'public', echo: bool = False) -> AsyncEngine:
41
+ def make_async_engine(
42
+ db_dsn: str,
43
+ *,
44
+ scheme: str = 'public',
45
+ echo: bool = False,
46
+ pool_pre_ping: bool = True,
47
+ disable_prepared_statements: bool = True,
48
+ ) -> AsyncEngine:
44
49
  """
45
50
  Создаем асинхронный движок.
46
51
  """
52
+ connect_args: dict[str, Any] = {}
53
+ if disable_prepared_statements:
54
+ connect_args['prepare_threshold'] = None
47
55
  return create_async_engine(
48
56
  db_dsn,
49
- connect_args={'options': f'-csearch_path={scheme}'},
50
57
  echo=echo,
58
+ pool_pre_ping=pool_pre_ping,
59
+ connect_args=connect_args,
51
60
  )
52
61
 
53
62
 
54
63
  def make_async_session_factory(
55
- db_dsn: str, *, scheme: str = 'public', echo: bool = False
64
+ db_dsn: str,
65
+ *,
66
+ scheme: str = 'public',
67
+ echo: bool = False,
68
+ pool_pre_ping: bool = True,
69
+ disable_prepared_statements: bool = True,
56
70
  ) -> async_sessionmaker[AsyncSession]:
57
71
  """
58
72
  Создаем фабрику асинхронных сессий.
59
73
  """
60
- asyncio_engine = make_async_engine(db_dsn, scheme=scheme, echo=echo)
74
+ asyncio_engine = make_async_engine(
75
+ db_dsn,
76
+ scheme=scheme,
77
+ echo=echo,
78
+ pool_pre_ping=pool_pre_ping,
79
+ disable_prepared_statements=disable_prepared_statements,
80
+ )
61
81
  return async_sessionmaker(asyncio_engine, expire_on_commit=False, autoflush=False)
62
82
 
63
83
 
@@ -85,10 +105,6 @@ class BaseUUID(Base):
85
105
  server_default=func.gen_random_uuid(),
86
106
  )
87
107
 
88
- @declared_attr.directive
89
- def __tablename__(cls) -> str:
90
- return snakecase(cls.__name__)
91
-
92
108
 
93
109
  class BaseInt(Base):
94
110
  """
@@ -100,10 +116,6 @@ class BaseInt(Base):
100
116
  id: Mapped[int] = mapped_column(primary_key=True)
101
117
 
102
118
 
103
- @declared_attr.directive
104
- def __tablename__(cls) -> str:
105
- return snakecase(cls.__name__)
106
-
107
119
  class SessionFactory:
108
120
  """
109
121
  Фабрика сессий.
@@ -125,16 +137,13 @@ class SessionFactory:
125
137
  yield session
126
138
 
127
139
  @classmethod
128
- @asynccontextmanager
129
140
  async def make_async_session_dynamic(
130
141
  cls, settings_repository: SettingsRepositoryProtocol
131
- ) -> AsyncIterator[AsyncSession]:
142
+ ) -> async_sessionmaker[AsyncSession]:
132
143
  """
133
144
  Создаем асинхронную сессию с помощью динамической фабрики.
134
145
  """
135
- async_session_factory = await cls.make_async_session_factory(settings_repository)
136
- async with async_session_factory() as session:
137
- yield session
146
+ return await cls.make_async_session_factory(settings_repository)
138
147
 
139
148
  @staticmethod
140
149
  async def make_async_session_factory(
@@ -145,7 +154,13 @@ class SessionFactory:
145
154
  """
146
155
  settings = await settings_repository.get(CoreSettingsSchema)
147
156
  db_settings = await settings_repository.get(CoreDbSettingsSchema)
148
- return make_async_session_factory(db_settings.dsn, scheme=db_settings.scheme, echo=settings.debug)
157
+ return make_async_session_factory(
158
+ db_settings.dsn,
159
+ scheme=db_settings.scheme,
160
+ echo=settings.debug,
161
+ pool_pre_ping=db_settings.pool_pre_ping,
162
+ disable_prepared_statements=db_settings.disable_prepared_statements,
163
+ )
149
164
 
150
165
 
151
166
  class SessionManagerProtocol(Protocol):
fast_clean/depends.py CHANGED
@@ -16,7 +16,6 @@ from stringcase import snakecase
16
16
 
17
17
  from .broker import BrokerFactory
18
18
  from .db import SessionFactory, SessionManagerImpl, SessionManagerProtocol
19
- from .redis import RedisManager
20
19
  from .repositories import (
21
20
  CacheManager,
22
21
  CacheRepositoryProtocol,
@@ -82,18 +81,14 @@ class CoreProvider(Provider):
82
81
  Провайдер зависимостей.
83
82
  """
84
83
 
85
- scope = Scope.REQUEST
84
+ scope = Scope.APP
86
85
 
87
86
  # --- repositories ---
88
87
 
89
- settings_repository_factory = provide(
90
- SettingsRepositoryFactoryImpl, provides=SettingsRepositoryFactoryProtocol, scope=Scope.APP
91
- )
92
- storage_repository_factory = provide(
93
- StorageRepositoryFactoryImpl, provides=StorageRepositoryFactoryProtocol, scope=Scope.APP
94
- )
88
+ settings_repository_factory = provide(SettingsRepositoryFactoryImpl, provides=SettingsRepositoryFactoryProtocol)
89
+ storage_repository_factory = provide(StorageRepositoryFactoryImpl, provides=StorageRepositoryFactoryProtocol)
95
90
 
96
- @provide(scope=Scope.APP)
91
+ @provide
97
92
  @staticmethod
98
93
  async def get_settings_repository(
99
94
  settings_repository_factory: SettingsRepositoryFactoryProtocol,
@@ -103,7 +98,7 @@ class CoreProvider(Provider):
103
98
  """
104
99
  return await settings_repository_factory.make(SettingsSourceEnum.ENV)
105
100
 
106
- @provide(scope=Scope.APP)
101
+ @provide
107
102
  @staticmethod
108
103
  async def get_settings(settings_repository: SettingsRepositoryProtocol) -> CoreSettingsSchema:
109
104
  """
@@ -119,41 +114,38 @@ class CoreProvider(Provider):
119
114
  kafka_settings = await settings_repository.get(CoreKafkaSettingsSchema)
120
115
  yield BrokerFactory.make_static(kafka_settings)
121
116
 
122
- @provide(scope=Scope.APP)
117
+ @provide
123
118
  @staticmethod
124
119
  async def get_cache_repository(settings_repository: SettingsRepositoryProtocol) -> CacheRepositoryProtocol:
125
120
  """
126
121
  Получаем репозиторий кеша.
127
122
  """
128
- settings = await settings_repository.get(CoreSettingsSchema)
129
- if settings.redis_dsn is not None:
130
- RedisManager.init(settings.redis_dsn)
131
123
  cache_settings = await settings_repository.get(CoreCacheSettingsSchema)
132
- if CacheManager.cache_repository is None:
133
- CacheManager.init(cache_settings, RedisManager.redis)
134
- if CacheManager.cache_repository is not None:
135
- return CacheManager.cache_repository
136
- raise ValueError('Cache is not initialized')
124
+ return CacheManager.init(cache_settings)
137
125
 
138
- @provide(scope=Scope.APP)
126
+ @provide(scope=Scope.REQUEST)
139
127
  @staticmethod
140
128
  async def get_storage_repository(
141
129
  settings_repository: SettingsRepositoryProtocol,
142
130
  storage_repository_factory: StorageRepositoryFactoryProtocol,
143
- ) -> StorageRepositoryProtocol:
131
+ ) -> AsyncIterator[StorageRepositoryProtocol]:
144
132
  """
145
133
  Получаем репозиторий файлового хранилища.
146
134
  """
147
135
  storage_settings = await settings_repository.get(CoreStorageSettingsSchema)
148
136
  if storage_settings.provider == 's3' and storage_settings.s3 is not None:
149
- return await storage_repository_factory.make(
137
+ storage_repository = await storage_repository_factory.make(
150
138
  StorageTypeEnum.S3,
151
139
  S3StorageParamsSchema.model_validate(storage_settings.s3.model_dump()),
152
140
  )
153
- elif storage_settings.provider == 'local' and storage_settings.dir is not None:
154
- return await storage_repository_factory.make(
141
+ async with storage_repository:
142
+ yield storage_repository
143
+ elif storage_settings.provider == 'local':
144
+ storage_repository = await storage_repository_factory.make(
155
145
  StorageTypeEnum.LOCAL, LocalStorageParamsSchema(path=storage_settings.dir)
156
146
  )
147
+ async with storage_repository:
148
+ yield storage_repository
157
149
  raise NotImplementedError(f'Storage {storage_settings.provider} not allowed')
158
150
 
159
151
  # --- db ---
@@ -164,7 +156,8 @@ class CoreProvider(Provider):
164
156
  """
165
157
  Получаем асинхронную сессию.
166
158
  """
167
- async with SessionFactory.make_async_session_static(settings_repository) as session:
159
+ session_maker = await SessionFactory.make_async_session_dynamic(settings_repository)
160
+ async with session_maker() as session:
168
161
  yield session
169
162
 
170
163
  @provide
@@ -180,7 +173,7 @@ class CoreProvider(Provider):
180
173
  seed_service = provide(SeedService)
181
174
  transaction_service = provide(TransactionService)
182
175
 
183
- @provide(scope=Scope.APP)
176
+ @provide
184
177
  @staticmethod
185
178
  def get_cryptography_service_factory(settings: CoreSettingsSchema) -> CryptographyServiceFactory:
186
179
  """
@@ -188,7 +181,7 @@ class CoreProvider(Provider):
188
181
  """
189
182
  return CryptographyServiceFactory(settings.secret_key)
190
183
 
191
- @provide(scope=Scope.APP)
184
+ @provide
192
185
  @staticmethod
193
186
  async def get_cryptography_service(
194
187
  cryptography_service_factory: CryptographyServiceFactory,
@@ -198,16 +191,14 @@ class CoreProvider(Provider):
198
191
  """
199
192
  return await cryptography_service_factory.make(CryptographicAlgorithmEnum.AES_GCM)
200
193
 
201
- @provide(scope=Scope.APP)
194
+ @provide
202
195
  @staticmethod
203
- def get_lock_service(settings: CoreSettingsSchema) -> LockServiceProtocol:
196
+ def get_lock_service(cache_settings: CoreCacheSettingsSchema) -> LockServiceProtocol:
204
197
  """
205
198
  Получаем сервис распределенной блокировки.
206
199
  """
207
- assert settings.redis_dsn is not None
208
- RedisManager.init(settings.redis_dsn)
209
- assert RedisManager.redis is not None
210
- return RedisLockService(RedisManager.redis)
200
+ redis_client = CacheManager.init(cache_settings)
201
+ return RedisLockService(redis_client) # type: ignore
211
202
 
212
203
 
213
204
  provider = CoreProvider()
fast_clean/middleware.py CHANGED
@@ -2,22 +2,39 @@
2
2
  Модуль, содержащий middleware.
3
3
  """
4
4
 
5
- from fastapi import FastAPI
5
+ import time
6
+ from typing import Awaitable, Callable
7
+
8
+ from fastapi import FastAPI, Request, Response
6
9
  from starlette.middleware.cors import CORSMiddleware
7
10
 
8
- from .contrib.monitoring.middleware import use_middleware as use_monitoring_middleware
11
+
12
+ async def add_process_time_header(request: Request, call_next: Callable[[Request], Awaitable[Response]]) -> Response:
13
+ start_time = time.perf_counter()
14
+ response = await call_next(request)
15
+ response.headers['x-process-time'] = f'{time.perf_counter() - start_time}'
16
+ return response
9
17
 
10
18
 
11
- def use_middleware(app: FastAPI, cors_origins: list[str]) -> FastAPI:
19
+ def use_middleware(
20
+ app: FastAPI,
21
+ name: str,
22
+ cors_origins: list[str],
23
+ *,
24
+ allow_methods: list[str] | None = None,
25
+ allow_headers: list[str] | None = None,
26
+ ) -> FastAPI:
12
27
  """
13
28
  Регистрируем middleware.
14
29
  """
30
+
15
31
  app.add_middleware(
16
32
  CORSMiddleware,
17
33
  allow_origins=cors_origins,
18
34
  allow_credentials=True,
19
- allow_methods=['*'],
20
- allow_headers=['*'],
35
+ allow_methods=allow_methods or ['*'],
36
+ allow_headers=allow_headers or ['*'],
21
37
  )
22
- use_monitoring_middleware(app)
38
+
39
+ app.middleware('http')(add_process_time_header)
23
40
  return app
fast_clean/models.py CHANGED
@@ -9,9 +9,9 @@ from sqlalchemy.orm import Mapped, mapped_column
9
9
  from sqlalchemy.sql import func
10
10
 
11
11
 
12
- class TimestampMixin:
12
+ class CreatedAtMixin:
13
13
  """
14
- Миксин, содержащий дату и время создания и обновления модели.
14
+ Миксин, содержащий дату и время создания записи.
15
15
  """
16
16
 
17
17
  created_at: Mapped[dt.datetime] = mapped_column(
@@ -19,15 +19,22 @@ class TimestampMixin:
19
19
  default=lambda: dt.datetime.now(dt.UTC),
20
20
  server_default=func.now(),
21
21
  )
22
+
23
+
24
+ class UpdatedAtMixin:
22
25
  """
23
- Дата и время создания.
26
+ Миксин, содержащий дату и время обновления записи.
24
27
  """
28
+
25
29
  updated_at: Mapped[dt.datetime] = mapped_column(
26
30
  DateTime(timezone=True),
27
31
  default=lambda: dt.datetime.now(dt.UTC),
28
32
  server_default=func.now(),
29
33
  onupdate=lambda: dt.datetime.now(dt.UTC),
30
34
  )
35
+
36
+
37
+ class TimestampMixin(CreatedAtMixin, UpdatedAtMixin):
31
38
  """
32
- Дата и время обновления.
39
+ Миксин, содержащий дату и время создания и обновления записи.
33
40
  """
@@ -9,9 +9,9 @@
9
9
  from typing import ClassVar, Protocol, Self, cast
10
10
 
11
11
  from fastapi_cache import FastAPICache
12
+ from redis import asyncio as aioredis
12
13
 
13
14
  from fast_clean.settings import CoreCacheSettingsSchema
14
- from redis import asyncio as aioredis
15
15
 
16
16
  from .in_memory import InMemoryCacheRepository as InMemoryCacheRepository
17
17
  from .redis import RedisCacheRepository as RedisCacheRepository
@@ -67,7 +67,7 @@ class CacheManager:
67
67
  cache_repository: ClassVar[CacheRepositoryProtocol | None] = None
68
68
 
69
69
  @classmethod
70
- def init(cls, cache_settings: CoreCacheSettingsSchema, redis: aioredis.Redis | None) -> None:
70
+ def init(cls, cache_settings: CoreCacheSettingsSchema):
71
71
  """
72
72
  Инициализируем кеш.
73
73
  """
@@ -77,7 +77,13 @@ class CacheManager:
77
77
  case 'in_memory':
78
78
  cache_backend = InMemoryCacheRepository()
79
79
  case 'redis':
80
- assert redis is not None
81
- cache_backend = RedisCacheRepository(redis)
80
+ if not cache_settings.redis:
81
+ raise ValueError('Redis not configured in settings')
82
+ cache_backend = RedisCacheRepository(
83
+ aioredis.from_url(url=str(cache_settings.redis.dsn), decode_responses=True) # type: ignore
84
+ )
85
+ case _:
86
+ raise ValueError('Cache is not initialized')
82
87
  FastAPICache.init(cache_backend, prefix=cache_settings.prefix)
83
88
  cls.cache_repository = cast(CacheRepositoryProtocol, cache_backend)
89
+ return cls.cache_repository
@@ -6,7 +6,6 @@ from typing import Self
6
6
 
7
7
  from fastapi_cache.backends.redis import RedisBackend
8
8
  from overrides import override
9
-
10
9
  from redis.asyncio.client import Redis
11
10
 
12
11
 
@@ -502,11 +502,11 @@ class DbCrudRepositoryBase(
502
502
  count = (await s.execute(count_statement)).scalar_one()
503
503
  return PaginationResultSchema(count=count, objects=objects)
504
504
 
505
- def get_order_by_expr(self: Self, sorting: Iterable[str]) -> list[sa.UnaryExpression]:
505
+ def get_order_by_expr(self: Self, sorting: Iterable[str]) -> list[sa.UnaryExpression[Any]]:
506
506
  """
507
507
  Получаем выражение сортировки.
508
508
  """
509
- order_by_expr: list[sa.UnaryExpression] = []
509
+ order_by_expr: list[sa.UnaryExpression[Any]] = []
510
510
  for st in sorting:
511
511
  try:
512
512
  if st[0] == '-':
@@ -6,11 +6,13 @@
6
6
  - S3
7
7
  """
8
8
 
9
+ from collections.abc import AsyncIterator
9
10
  from pathlib import Path
10
11
  from typing import AsyncContextManager, Protocol, Self
11
12
 
12
13
  from .enums import StorageTypeEnum
13
14
  from .local import LocalStorageRepository
15
+ from .reader import AsyncStreamReaderProtocol as AsyncStreamReaderProtocol
14
16
  from .reader import StreamReaderProtocol, StreamReadProtocol
15
17
  from .s3 import S3StorageRepository
16
18
  from .schemas import (
@@ -25,6 +27,18 @@ class StorageRepositoryProtocol(Protocol):
25
27
  Протокол репозитория файлового хранилища.
26
28
  """
27
29
 
30
+ async def __aenter__(self: Self) -> Self:
31
+ """
32
+ Вход в контекст менеджера.
33
+ """
34
+ ...
35
+
36
+ async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
37
+ """
38
+ Выход из контекст менеджера.
39
+ """
40
+ ...
41
+
28
42
  async def exists(self: Self, path: str | Path) -> bool:
29
43
  """
30
44
  Проверяем существует ли файл.
@@ -71,14 +85,18 @@ class StorageRepositoryProtocol(Protocol):
71
85
  self: Self,
72
86
  path: str | Path,
73
87
  stream: StreamReadProtocol,
74
- length: int = -1,
75
- part_size: int = 0,
76
88
  ) -> None:
77
89
  """
78
90
  Создаем файл или переписываем существующий в потоковом режиме.
79
91
  """
80
92
  ...
81
93
 
94
+ def straming_read(self: Self, path: str | Path) -> AsyncIterator[bytes]:
95
+ """
96
+ Возвращаем асинхронные итератор потока байт.
97
+ """
98
+ ...
99
+
82
100
  async def delete(self: Self, path: str | Path) -> None:
83
101
  """
84
102
  Удаляем файл.
@@ -4,7 +4,7 @@
4
4
 
5
5
  import asyncio
6
6
  import os
7
- from collections.abc import AsyncGenerator, Awaitable, Callable
7
+ from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable
8
8
  from contextlib import asynccontextmanager
9
9
  from logging import getLogger
10
10
  from pathlib import Path
@@ -28,6 +28,14 @@ class LocalStorageRepository:
28
28
  os.makedirs(self.work_dir)
29
29
  self.logger = getLogger(__name__)
30
30
 
31
+ async def __aenter__(self: Self) -> Self:
32
+ """
33
+ Вход в контекст менеджера.
34
+ """
35
+ return self
36
+
37
+ async def __aexit__(self, exc_type, exc_val, exc_tb): ...
38
+
31
39
  async def exists(self: Self, path: str | Path) -> bool:
32
40
  """
33
41
  Проверяем существует ли файл.
@@ -78,6 +86,12 @@ class LocalStorageRepository:
78
86
  async with open(path, 'rb') as f:
79
87
  yield AiofilesStreamReader(f)
80
88
 
89
+ async def straming_read(self: Self, path: str | Path) -> AsyncIterator[bytes]:
90
+ path = self.work_dir / path
91
+ async with open(path, 'rb') as f:
92
+ async for chunk in f:
93
+ yield chunk
94
+
81
95
  async def write(self: Self, path: str | Path, content: str | bytes) -> None:
82
96
  """
83
97
  Создаем файл или переписываем существующий.
@@ -91,15 +105,11 @@ class LocalStorageRepository:
91
105
  self: Self,
92
106
  path: str | Path,
93
107
  stream: StreamReadProtocol,
94
- length: int = -1,
95
- part_size: int = 0,
96
108
  ) -> None:
97
109
  """
98
110
  Создаем файл или переписываем существующий в потоковом режиме.
99
111
  """
100
- if length != -1:
101
- self.logger.warning('Параметр length не используется для LocalStorage.')
102
- part_size = part_size or 1024
112
+ part_size = 1024 * 1024
103
113
  path = self.work_dir / path
104
114
  is_co_function = asyncio.iscoroutinefunction(stream.read)
105
115
  async with open(path, 'wb') as f:
@@ -35,6 +35,14 @@ class StreamReadAsyncProtocol(Protocol):
35
35
  ...
36
36
 
37
37
 
38
+ class AsyncStreamReaderProtocol(Protocol):
39
+ async def read(self: Self, size: int = -1) -> bytes:
40
+ """
41
+ Потоковое чтение файлов.
42
+ """
43
+ ...
44
+
45
+
38
46
  StreamReadProtocol = StreamReadAsyncProtocol | StreamReadSyncProtocol
39
47
 
40
48
 
@@ -1,112 +1,163 @@
1
1
  """
2
- Модуль, содержащий репозиторий хранилища S3.
2
+ Модуль содержит имплементация работы с репозиторием по средствам протокола S3.
3
3
  """
4
4
 
5
- import io
6
- from collections.abc import AsyncGenerator
5
+ from collections.abc import AsyncIterator
7
6
  from contextlib import asynccontextmanager
8
7
  from pathlib import Path
9
- from typing import Self
8
+ from typing import TYPE_CHECKING, Self, cast
10
9
 
11
- import aiohttp
12
- import miniopy_async
13
- from miniopy_async.error import S3Error
10
+ import aiobotocore
11
+ import aiobotocore.session
12
+ from aiobotocore.response import StreamingBody
13
+ from aiobotocore.session import AioSession
14
+ from botocore.exceptions import ClientError
14
15
 
15
- from .reader import AiohttpStreamReader, StreamReaderProtocol, StreamReadProtocol
16
- from .schemas import S3StorageParamsSchema
16
+ from fast_clean.repositories.storage.schemas import S3StorageParamsSchema
17
+
18
+ from .reader import AiofilesStreamReader, StreamReaderProtocol, StreamReadProtocol
19
+
20
+ if TYPE_CHECKING:
21
+ from types_aiobotocore_s3.client import S3Client as AioBaseClient
22
+ else:
23
+ from aiobotocore.client import AioBaseClient
17
24
 
18
25
 
19
26
  class S3StorageRepository:
20
27
  """
21
- Репозиторий хранилища S3.
28
+ Репозиторий хранилища S3 с использованием aiobotocore.
22
29
  """
23
30
 
24
- def __init__(self: Self, params: S3StorageParamsSchema):
31
+ def __init__(self, params: S3StorageParamsSchema) -> None:
25
32
  self.params = params
26
33
  self.bucket = self.params.bucket
27
- self.client = miniopy_async.Minio( # type: ignore
28
- f'{self.params.endpoint}:{self.params.port}',
29
- access_key=self.params.access_key,
30
- secret_key=self.params.secret_key,
31
- secure=self.params.secure,
32
- )
33
34
 
34
- async def exists(self: Self, path: str | Path) -> bool:
35
+ self.session: AioSession | None = None
36
+ self.client: AioBaseClient | None = None
37
+
38
+ protocol = 'https' if self.params.secure else 'http'
39
+ self.endpoint_url = f'{protocol}://{self.params.endpoint}:{self.params.port}'
40
+
41
+ async def __aenter__(self: Self) -> Self:
42
+ self.session = aiobotocore.session.get_session()
43
+ self.client = await self.session.create_client(
44
+ 's3',
45
+ endpoint_url=self.endpoint_url,
46
+ aws_access_key_id=self.params.aws_access_key_id,
47
+ aws_secret_access_key=self.params.aws_secret_access_key,
48
+ region_name=self.params.region_name,
49
+ ).__aenter__()
50
+ return self
51
+
52
+ async def __aexit__(self: Self, exc_type, exc_val, exc_tb) -> None:
35
53
  """
36
- Проверяем существует ли файл.
54
+ Выход из контектсного менеджера сессии.
37
55
  """
56
+ if self.client:
57
+ await self.client.__aexit__(exc_type, exc_val, exc_tb)
58
+ self.client = None
59
+ self.session = None
60
+
61
+ async def exists(self: Self, path: str | Path) -> bool:
62
+ assert self.client
63
+ key = self.get_str_path(path)
64
+ if key == '':
65
+ key = '/'
38
66
  try:
39
- await self.client.stat_object(self.bucket, self.get_str_path(path))
67
+ await self.client.head_object(Bucket=self.bucket, Key=key)
40
68
  return True
41
- except S3Error:
69
+ except ClientError:
42
70
  return False
43
71
 
44
72
  async def listdir(self: Self, path: str | Path) -> list[str]:
45
73
  """
46
- Получаем список файлов и директорий в заданной директории.
47
- """
48
- str_path = self.get_str_path(path)
49
- if not str_path or str_path[-1] != '/':
50
- str_path += '/'
51
- objects = await self.client.list_objects(self.bucket, prefix=str_path if str_path != '/' else None)
52
- return [str(obj.object_name) for obj in objects] if objects else []
74
+ Получаем список файлов и директорий в указанной лиректории.
75
+ """
76
+ assert self.client
77
+ prefix = self.get_str_path(path)
78
+ if prefix and not prefix.endswith('/'):
79
+ prefix += '/'
80
+ objects = []
81
+ paginator = self.client.get_paginator('list_objects_v2')
82
+ async for page in paginator.paginate(Bucket=self.bucket, Prefix=prefix, Delimiter='/'):
83
+ if 'Contents' in page:
84
+ objects.extend([obj.get('Key') for obj in page['Contents'] if obj.get('Key') != prefix])
85
+ if 'CommonPrefixes' in page:
86
+ objects.extend([folder.get('Prefix') for folder in page['CommonPrefixes']])
87
+ return objects
53
88
 
54
89
  async def is_file(self: Self, path: str | Path) -> bool:
55
90
  """
56
- Проверяем находится ли файл по пути.
91
+ Проверяем, является ли путь файлом.
57
92
  """
58
- return not await self.is_dir(path)
93
+ return await self.exists(path)
59
94
 
60
95
  async def is_dir(self: Self, path: str | Path) -> bool:
61
96
  """
62
- Проверяем находится ли директория по пути.
63
- """
64
- return len(await self.listdir(path)) > 0
97
+ Проверяем, является ли путь дирректорией.
98
+ """
99
+ assert self.client
100
+ prefix = self.get_str_path(path)
101
+ if prefix != '' and not prefix.endswith('/'):
102
+ prefix += '/'
103
+ response = await self.client.list_objects_v2(
104
+ Bucket=self.bucket,
105
+ Prefix=prefix,
106
+ MaxKeys=1,
107
+ Delimiter='/',
108
+ )
109
+ return 'Contents' in response or 'CommonPrefixes' in response
65
110
 
66
111
  async def read(self: Self, path: str | Path) -> bytes:
67
112
  """
68
113
  Читаем содержимое файла.
69
114
  """
70
- async with aiohttp.ClientSession() as session:
71
- response: aiohttp.ClientResponse = await self.client.get_object(
72
- self.bucket, self.get_str_path(path), session
73
- )
74
- return await response.read()
115
+ assert self.client
116
+ key = self.get_str_path(path)
117
+ response = await self.client.get_object(Bucket=self.bucket, Key=key)
118
+ async with response['Body'] as stream:
119
+ return await stream.read()
75
120
 
76
- @asynccontextmanager
77
- async def stream_read(self: Self, path: str | Path) -> AsyncGenerator[StreamReaderProtocol, None]:
121
+ async def write(self: Self, path: str | Path, content: str | bytes) -> None:
78
122
  """
79
- Читаем содержимое файла в потоковом режиме.
123
+ Создаем файл или перезаписываем существующий.
80
124
  """
81
- async with aiohttp.ClientSession() as session:
82
- reader = await self.client.get_object(self.bucket, self.get_str_path(path), session)
83
- yield AiohttpStreamReader(reader)
125
+ assert self.client
126
+ key = self.get_str_path(path)
127
+ content = content.encode('utf-8') if isinstance(content, str) else content
128
+ await self.client.put_object(Bucket=self.bucket, Key=key, Body=content)
84
129
 
85
- async def write(self: Self, path: str | Path, content: str | bytes) -> None:
130
+ @asynccontextmanager
131
+ async def stream_read(self: Self, path: str | Path) -> AsyncIterator[StreamReaderProtocol]:
86
132
  """
87
- Создаем файл или переписываем существующий.
133
+ Читаем содержимое в потоковом режиме.
88
134
  """
89
- content = content.encode('utf-8') if isinstance(content, str) else content
90
- data = io.BytesIO(content)
91
- await self.client.put_object(self.bucket, self.get_str_path(path), data, len(content))
135
+ assert self.client
136
+ key = self.get_str_path(path)
137
+ response = await self.client.get_object(Bucket=self.bucket, Key=key)
138
+ yield AiofilesStreamReader(response['Body'])
139
+
140
+ async def straming_read(self: Self, path: str | Path) -> AsyncIterator[bytes]:
141
+ assert self.client
142
+ key = self.get_str_path(path)
143
+ response = await self.client.get_object(Bucket=self.bucket, Key=key)
144
+ async for chunk in response['Body']:
145
+ yield chunk
92
146
 
93
- async def stream_write(
94
- self: Self,
95
- path: str | Path,
96
- stream: StreamReadProtocol,
97
- length: int = -1,
98
- part_size: int = 0,
99
- ) -> None:
147
+ async def stream_write(self: Self, path: str | Path, stream: StreamReadProtocol) -> None:
100
148
  """
101
- Создаем файл или переписываем существующий в потоковом режиме.
149
+ Создаем поток на запись файла и перезаписываем существующий, если он есть.
102
150
  """
103
- await self.client.put_object(self.bucket, self.get_str_path(path), stream, length=length, part_size=part_size)
151
+ assert self.client
152
+ await self.client.put_object(Bucket=self.bucket, Key=self.get_str_path(path), Body=cast(StreamingBody, stream))
104
153
 
105
154
  async def delete(self: Self, path: str | Path) -> None:
106
155
  """
107
- Удаляем файл.
156
+ Удаление файла.
108
157
  """
109
- await self.client.remove_object(self.bucket, self.get_str_path(path))
158
+ assert self.client
159
+ key = self.get_str_path(path)
160
+ await self.client.delete_object(Bucket=self.bucket, Key=key)
110
161
 
111
162
  @staticmethod
112
163
  def get_str_path(path: str | Path) -> str:
@@ -114,5 +165,5 @@ class S3StorageRepository:
114
165
  Получаем путь в виде строки.
115
166
  """
116
167
  if isinstance(path, Path):
117
- return '/' if path == Path('') else str(path)
168
+ return '' if path == Path('') else str(path)
118
169
  return path
@@ -12,12 +12,17 @@ class S3StorageParamsSchema(BaseModel):
12
12
  Параметры настроек для S3Storage.
13
13
  """
14
14
 
15
+ """
16
+ Параметры настроек для S3Storage.
17
+ """
18
+
15
19
  endpoint: str
16
- access_key: str
17
- secret_key: str
20
+ aws_secret_access_key: str
21
+ aws_access_key_id: str
18
22
  port: int
19
23
  bucket: str
20
- secure: bool = False
24
+ secure: bool = True
25
+ region_name: str = 'us-east-1'
21
26
 
22
27
 
23
28
  class LocalStorageParamsSchema(BaseModel):
@@ -43,4 +43,4 @@ class CryptographyServiceFactory:
43
43
  case CryptographicAlgorithmEnum.AES_GCM:
44
44
  return AesGcmCryptographyService(self.secret_key)
45
45
  case _:
46
- raise NotImplementedError(algorithm)
46
+ raise NotImplementedError(algorithm)
@@ -6,10 +6,11 @@ from collections.abc import AsyncIterator
6
6
  from contextlib import asynccontextmanager
7
7
  from typing import AsyncContextManager, Protocol
8
8
 
9
- from fast_clean.exceptions import LockError
10
9
  from redis import asyncio as aioredis
11
10
  from redis.exceptions import LockError as AIORedisLockError
12
11
 
12
+ from fast_clean.exceptions import LockError
13
+
13
14
 
14
15
  class LockServiceProtocol(Protocol):
15
16
  """
fast_clean/settings.py CHANGED
@@ -27,6 +27,8 @@ class CoreDbSettingsSchema(BaseModel):
27
27
  password: str
28
28
  name: str
29
29
 
30
+ pool_pre_ping: bool = True
31
+ disable_prepared_statements: bool = True
30
32
  scheme: str = 'public'
31
33
 
32
34
  @property
@@ -37,6 +39,10 @@ class CoreDbSettingsSchema(BaseModel):
37
39
  return f'{self.provider}://{self.user}:{self.password}@{self.host}:{self.port}/{self.name}'
38
40
 
39
41
 
42
+ class CoreRedisSettingsSchema(BaseModel):
43
+ dsn: RedisDsn
44
+
45
+
40
46
  class CoreCacheSettingsSchema(BaseModel):
41
47
  """
42
48
  Схема настроек кеша.
@@ -46,6 +52,8 @@ class CoreCacheSettingsSchema(BaseModel):
46
52
 
47
53
  prefix: str
48
54
 
55
+ redis: CoreRedisSettingsSchema | None = None
56
+
49
57
 
50
58
  class CoreS3SettingsSchema(BaseModel):
51
59
  """
@@ -53,9 +61,8 @@ class CoreS3SettingsSchema(BaseModel):
53
61
  """
54
62
 
55
63
  endpoint: str
56
- endpoint_url: str
57
- access_key: str
58
- secret_key: str
64
+ aws_access_key_id: str
65
+ aws_secret_access_key: str
59
66
  port: int
60
67
  bucket: str
61
68
  secure: bool = False
@@ -177,7 +184,6 @@ class CoreSettingsSchema(BaseSettingsSchema):
177
184
  base_dir: Path = Path(os.getcwd())
178
185
  secret_key: str
179
186
  cors_origins: Annotated[list[str], Field(default_factory=list)]
180
- redis_dsn: RedisDsn | None = None
181
187
  sentry_dsn: str | None = None
182
188
 
183
189
  model_config = SettingsConfigDict(
@@ -0,0 +1,34 @@
1
+ import tomllib
2
+ from functools import lru_cache
3
+ from pathlib import Path
4
+ from typing import Any
5
+
6
+ from pydantic import BaseModel
7
+
8
+ __all__ = (
9
+ 'use_toml_info',
10
+ 'ProjectInfo',
11
+ )
12
+
13
+
14
+ class ProjectInfo(BaseModel):
15
+ """
16
+ Схема для получения информации о проекте.
17
+ """
18
+
19
+ name: str
20
+ version: str
21
+ description: str | None = None
22
+
23
+
24
+ def use_toml(dir: Path) -> dict[str, Any]:
25
+ with open(Path(dir) / 'pyproject.toml', 'rb') as f:
26
+ return tomllib.load(f)
27
+
28
+
29
+ @lru_cache(maxsize=1)
30
+ def use_toml_info(dir: Path) -> ProjectInfo:
31
+ """
32
+ Получение версии приложения из pyproject.toml.
33
+ """
34
+ return ProjectInfo.model_validate(use_toml(dir)['project'])
@@ -1,10 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fast-clean
3
- Version: 1.3.0
3
+ Version: 1.4.1
4
4
  Summary: FastAPI Clean Architecture implementation
5
5
  Author-email: Luferov Victor <luferovvs@yandex.ru>, Orlov Artem <squakrazv@yandex.ru>, Kashapov Rustam <hardtechnik91@gmail.com>
6
6
  Requires-Python: >=3.13
7
7
  Description-Content-Type: text/markdown
8
+ Requires-Dist: aiobotocore>=2.23.2
8
9
  Requires-Dist: aiofiles>=24.1.0
9
10
  Requires-Dist: aiokafka>=0.12.0
10
11
  Requires-Dist: aioprometheus>=23.12.0
@@ -15,14 +16,12 @@ Requires-Dist: fastapi>=0.115.8
15
16
  Requires-Dist: fastapi-cache2[redis]>=0.2.2
16
17
  Requires-Dist: faststream>=0.5.34
17
18
  Requires-Dist: flatten-dict>=0.4.2
18
- Requires-Dist: miniopy-async>=1.21.1
19
19
  Requires-Dist: overrides>=7.7.0
20
20
  Requires-Dist: psycopg[binary]>=3.2.4
21
21
  Requires-Dist: pydantic>=2.10.6
22
22
  Requires-Dist: pydantic-settings>=2.8.0
23
23
  Requires-Dist: pyyaml>=6.0.2
24
24
  Requires-Dist: sentry-sdk[fastapi]>=2.32.0
25
- Requires-Dist: snakecase>=1.0.1
26
25
  Requires-Dist: sqlalchemy-utils>=0.41.2
27
26
  Requires-Dist: sqlalchemy[asyncio]>=2.0.38
28
27
  Requires-Dist: stringcase>=1.2.0
@@ -1,34 +1,36 @@
1
1
  fast_clean/__init__.py,sha256=sT4tb75t5PXws8W_7wpA0jNtNxkWPFLAMrPlDGS7RHw,51
2
2
  fast_clean/broker.py,sha256=CHnL4Jd6jF5gKgtUXi33j9QFG2EUM4uqhVqdLuxIrZs,4474
3
3
  fast_clean/container.py,sha256=E1e0H1JqGOacH4uBNwkjTDXYhzN56yZi0AmWXQ3DkEQ,3535
4
- fast_clean/db.py,sha256=8lfs9SS41naPkvyKcsStdBAXo07p_6Kwr_6zx7--oGU,5552
5
- fast_clean/depends.py,sha256=nSsn1-c7A0VmtudCI70z5wi0b7TmSUqvHjyZpTE3_wk,7750
4
+ fast_clean/db.py,sha256=uZHXXHdLstqhyzGtBL5Z7VvXwIe6mxuPOUheJEzSMyM,5776
5
+ fast_clean/depends.py,sha256=InU6x629CyRGskMbA4LzX5btzw_a1YeVwUckBH5tJNg,7330
6
6
  fast_clean/enums.py,sha256=lPhC_2_r6YFby7Mq-9u_JSiuyZ0e57F2VxBfUwnBZ18,826
7
7
  fast_clean/exceptions.py,sha256=Sp-k-a5z1Gedu0slzj1-rORnr4GP1FXDHKCKRaJq-7o,9485
8
8
  fast_clean/loggers.py,sha256=hVvZSDMMxYnK-p_yyjd4R7SyHpmxQF3eKQEeMu9Q-jo,705
9
- fast_clean/middleware.py,sha256=G75qKJ7WxSySTIFUFa7ZwlvnZ16lm4giS0Hx4Ogu1SM,584
10
- fast_clean/models.py,sha256=qnNUSwLf0gOW8C98PMIs6vTw7UP3-Nk-k6YoFvHstVM,880
9
+ fast_clean/middleware.py,sha256=p0Tv_qu89ZQtzKZ10tNepArJyHlFX2II9UmivGWnycw,1037
10
+ fast_clean/models.py,sha256=H7-Hk3gZP9i2TiJHu0u2Nex76c8ZbDhMR4lF41_PMyI,1057
11
11
  fast_clean/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- fast_clean/redis.py,sha256=H_SLnDhY_ute8pYHdhIypUGlCwMcVfFA4S2j8vLUph0,578
13
- fast_clean/settings.py,sha256=NPWBmCB8eL6EiHPtlryW5rQyrE1EqeytnBLrcz5pQLI,4792
12
+ fast_clean/settings.py,sha256=Ox2aDEOxw4LgrszCNpUMNlxCqozc6kcv7406786ADAw,4938
13
+ fast_clean/cli/__init__.py,sha256=m8n09uN47JGtAfgWVbXCJOxpzlrUazogqtLo6xPWe3s,181
14
+ fast_clean/cli/cryptography.py,sha256=ACaYAOn4KEKIdsTuCYkX1m6g2YpMczNCjJcVfLE2Rzo,1936
15
+ fast_clean/cli/load_seed.py,sha256=Tm5_r_myrC5dl_WyC6Bx2WKFAkfLf-Pch4ZK6zWN2Qg,867
14
16
  fast_clean/contrib/__init__.py,sha256=AcFNyhc0QGsOnYvzQGanDN3QIAsKpn4d8RIj73F-sGc,63
15
17
  fast_clean/contrib/healthcheck/__init__.py,sha256=p8hUCLdv2qGngTwAeTGIV4h_ZGDm9ZNWMrA5_k3Yi0E,106
16
18
  fast_clean/contrib/healthcheck/router.py,sha256=7uq0D6ldhxB3Jsa9Ia1zpiRAyC3hQgUv8jF4W8SPi88,398
17
- fast_clean/contrib/healthcheck/schemas.py,sha256=nMdUezXvwyJdlHiJbPiLUIa6bJ_JeF1Tr3S-TllMENU,200
19
+ fast_clean/contrib/healthcheck/schemas.py,sha256=s9HcXDWYUsFXHownoem5qOEL741IZWp5yOLu2LKtkkU,199
18
20
  fast_clean/contrib/logging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
21
  fast_clean/contrib/logging/enums.py,sha256=a-Tz3k4j0aAbUXDvTV6sl2pKKEGuKG94qc3plXixezU,154
20
22
  fast_clean/contrib/logging/sentry.py,sha256=gey6ynlkZtrU2qzwdKvpkYy0JO0AEyHDpiiRcIzfiDg,593
21
23
  fast_clean/contrib/monitoring/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- fast_clean/contrib/monitoring/middleware.py,sha256=F6EDY_hNBmFlji-cCJwzsm5Wl1S0CsO373rjxfqjlGA,191
24
+ fast_clean/contrib/monitoring/middleware.py,sha256=nRhiARjpHm21qwBgkdh2Sdkuc8maDQcb6ofCRthv_O0,190
23
25
  fast_clean/contrib/monitoring/router.py,sha256=94gffX34VE_Yb6TLaQOP4YyXDQsClzOn4APb45V_HyA,153
24
26
  fast_clean/contrib/sqlalchemy_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
27
  fast_clean/contrib/sqlalchemy_utils/utils.py,sha256=5xktmJlQifGpIXMruhe7qofEEu_ExncBTUmlrtFK1YQ,1061
26
28
  fast_clean/repositories/__init__.py,sha256=mHJ6CW1fYkkiSnnYiO3GRAa5YVCPN1otOOKkjbutuhs,1753
27
- fast_clean/repositories/cache/__init__.py,sha256=pD7qIS6H8DrnhOptJiXrlGcWYUCIU3VmVQCLccyxx4Q,2511
29
+ fast_clean/repositories/cache/__init__.py,sha256=aVl1ReoU0doH1i78-Ef8rS-LaVQacF0SS9xLFWzmcgQ,2811
28
30
  fast_clean/repositories/cache/in_memory.py,sha256=Hb68UrTmQozALcyLrmYPBIfJfi67NvsCTDe1RfqwBHQ,2259
29
- fast_clean/repositories/cache/redis.py,sha256=UjrA2CXQtMfHTpowz6Ot952y73YjTEr6zJlBbWblaws,1908
31
+ fast_clean/repositories/cache/redis.py,sha256=-iX6x-sfXj-pDYfcRIyKjq9nCjnlRQKeJtfeMmDpT-4,1907
30
32
  fast_clean/repositories/crud/__init__.py,sha256=z_-zY3esEbUEHSGb9WInU-vvRuTpTu4M-Qe5UhCN0Pc,4359
31
- fast_clean/repositories/crud/db.py,sha256=KzqQXt0srcBFt2ftlkDcGPEKC_dvwAybFi5UvRTyIq0,23202
33
+ fast_clean/repositories/crud/db.py,sha256=wyvDvEjvncfSVHlaguhrgCP7wIsiKRoGZEesxHzDVHI,23212
32
34
  fast_clean/repositories/crud/in_memory.py,sha256=37VBQJTIV4z1_Om9DhYqpa1t98hGGhY8gumoyV-fhDg,13172
33
35
  fast_clean/repositories/crud/type_vars.py,sha256=Gb4ew1T1NkitL87hJ75KtpTjOi6PuML5fU_zFAsVUqA,1318
34
36
  fast_clean/repositories/settings/__init__.py,sha256=ZxrncvTDs8pNkhWSy2cxV3a8uElTnrM-b1-vq4ouJok,1485
@@ -36,22 +38,22 @@ fast_clean/repositories/settings/enums.py,sha256=coqZg6xe_mRFWeihBfnSkCByLuD0pT8
36
38
  fast_clean/repositories/settings/env.py,sha256=maQttYENMJyTf4vnSXa4L3R6tKiLmb-d0Q5VS-r9ZuE,2153
37
39
  fast_clean/repositories/settings/exceptions.py,sha256=SKU45z-ahPzI_G6k4A9twupx1v3GaXDj2pbFkg3YgFE,348
38
40
  fast_clean/repositories/settings/type_vars.py,sha256=_Oe8x4JwwrN9WOVjLA05BN6gv7cBcBmq2YR2ZI4Hz5w,197
39
- fast_clean/repositories/storage/__init__.py,sha256=mP_2NTx_Ec19WCmxecJsbjvNjhy8Oj8001lJC-BTGB8,3582
41
+ fast_clean/repositories/storage/__init__.py,sha256=HoSOCWbntw74W0OlXM0Nn4QN29AXftKbo5ZtVcfycQU,4155
40
42
  fast_clean/repositories/storage/enums.py,sha256=bS4L63aEXNaGnJql8A1jmsK4KY916cWnzTW5p_PyLmg,375
41
- fast_clean/repositories/storage/local.py,sha256=s5REPU7xczvzin13sKyZtFdiocrgAMk8bnDIbJ90KT4,4270
42
- fast_clean/repositories/storage/reader.py,sha256=yAxj51ITWJf0u-KGC3DJ0iTB3pDI1p9ixi_h0ZcWoZ4,3299
43
- fast_clean/repositories/storage/s3.py,sha256=z3YHGk89Ac87qAy3FApCXDpEx_CRdLfLzXBIng-BfUQ,4318
44
- fast_clean/repositories/storage/schemas.py,sha256=etlogfK_1uUZPQjHWQN6LWy6-8YY2Sago3Zbf6p0KcQ,623
43
+ fast_clean/repositories/storage/local.py,sha256=W7aV-vRbN1E2Sn-V-n7ztDZiZe1xFmFa9AJucDM1XJc,4525
44
+ fast_clean/repositories/storage/reader.py,sha256=T-5BLkiUSg-3fo3ACrHO0qqE-OKi40L6Wu2fXJJO9L4,3491
45
+ fast_clean/repositories/storage/s3.py,sha256=70bkMd48YNYyZxLKIOD8vwoaePfD9mUNkHJtMo93JwU,6445
46
+ fast_clean/repositories/storage/schemas.py,sha256=zJcjl3jrxcO0A6d7ohD5GRLsgRL5f2mcGZQ95pJimc8,755
45
47
  fast_clean/schemas/__init__.py,sha256=VgJKIY20qoZZOV55zLGnH2FYWoHpPfJS31HJAj_nGIo,1283
46
48
  fast_clean/schemas/exceptions.py,sha256=E7G9jv4G82Ede7OQ3619vPGwEywc7tKmXW6EolOGRFQ,723
47
49
  fast_clean/schemas/pagination.py,sha256=GEQ-Tbhx6xkMMXhDNWrTEhPv8IdnAOJxH2P1tscmn60,1384
48
50
  fast_clean/schemas/repository.py,sha256=ASxMJb23H3zwavr7P0_ZpCWZX7FjqAuC75qAIYqejvQ,889
49
51
  fast_clean/schemas/request_response.py,sha256=i4HTpjelWl4DxJ1sQaeanTWB_PThlhVJRhtMMGqRAiQ,693
50
52
  fast_clean/services/__init__.py,sha256=Lvdb5ZibRGwoMn_WENrk9wERUViTsPrU8E_71XtPFJc,617
51
- fast_clean/services/lock.py,sha256=SLF9_wRx3rgHMw829XwflJgAlGJyXj57o4iVPvGwe78,1653
53
+ fast_clean/services/lock.py,sha256=SlO-wkULBtm9X7jvtUTiEpmDWuiNTscMD-73PPlmI0o,1654
52
54
  fast_clean/services/seed.py,sha256=M0yA2I5z-jLM2UcW_x7287mwIFW5Vt0fPFaplakGFc0,2836
53
55
  fast_clean/services/transaction.py,sha256=djXR6e6ukgpBXDbVmU095MvRJAIqdOPMgAcege52Qxg,762
54
- fast_clean/services/cryptography/__init__.py,sha256=4bey1z11YvSv1V6PgBNYI6brgE1zMvDYh-1ZYhqDFYU,1560
56
+ fast_clean/services/cryptography/__init__.py,sha256=XK9-z6HT1Jgfc4-IpNY6fZihjW2dqeO83cz5ZvjJIbo,1559
55
57
  fast_clean/services/cryptography/aes.py,sha256=_k0WtnKDaEKdUBegfwmqerE75ER44307CEQ-I2W0abo,4616
56
58
  fast_clean/services/cryptography/enums.py,sha256=cLibSGv6LNVTUI2rm3_DtDwU68GYIAf4kY3GGbtnw1A,494
57
59
  fast_clean/utils/__init__.py,sha256=Q3OiJNdWl51Vd_wSP7iuZQIq4_SjM1mYkqIWPaw94WU,709
@@ -61,9 +63,10 @@ fast_clean/utils/ssl_context.py,sha256=I3tM9bDB6LVMaKCDcrpREzBE4AoTWr3NQDU3_A0Kt
61
63
  fast_clean/utils/string.py,sha256=8Dy3MeDHn-V9SUknuYZp8M6iakuU_UAmkMC9UreoN8k,630
62
64
  fast_clean/utils/thread.py,sha256=ChEWBLupnSEMq4Wro_aiW0QvCLUKedKc0TQFMu7Zg4g,565
63
65
  fast_clean/utils/time.py,sha256=nvavbtG4zR_gkrGSbsqKAsBdePxO3LuTeoISbFZIgn0,307
66
+ fast_clean/utils/toml.py,sha256=NbP7EfgKNYQ18LH8Hc-DmY1gks92bUSBW3D3-tMrY4E,737
64
67
  fast_clean/utils/type_converters.py,sha256=bMEJeoQB9Q6Qok1-ppn4Ii8ZpIkZwJbD2IzCydSStHw,523
65
68
  fast_clean/utils/typer.py,sha256=1O7BsNGn68bBzNbj0-Ycfhv35WpLzwvYTKn510YNXQQ,663
66
- fast_clean-1.3.0.dist-info/METADATA,sha256=RIsstyzjbBZFKcJmzekGf1Hb_d8ZQfjhG5I3P_DtPIU,1234
67
- fast_clean-1.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
68
- fast_clean-1.3.0.dist-info/top_level.txt,sha256=QfsGs-QLmPCZWWPFOukD0zhMnokH68FoO2KeObl6ZIA,11
69
- fast_clean-1.3.0.dist-info/RECORD,,
69
+ fast_clean-1.4.1.dist-info/METADATA,sha256=mXmRInGHMorgthwk9OC2GgE0O3zeelJbj5j9_JvG8pA,1200
70
+ fast_clean-1.4.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
71
+ fast_clean-1.4.1.dist-info/top_level.txt,sha256=QfsGs-QLmPCZWWPFOukD0zhMnokH68FoO2KeObl6ZIA,11
72
+ fast_clean-1.4.1.dist-info/RECORD,,
fast_clean/redis.py DELETED
@@ -1,23 +0,0 @@
1
- """
2
- Модуль, содержащий функционал, связанный с Redis.
3
- """
4
-
5
- from pydantic import RedisDsn
6
-
7
- from redis import asyncio as aioredis
8
-
9
-
10
- class RedisManager:
11
- """
12
- Менеджер для управления клиентом Redis.
13
- """
14
-
15
- redis: aioredis.Redis | None = None
16
-
17
- @classmethod
18
- def init(cls, redis_dsn: RedisDsn) -> None:
19
- """
20
- Инициализируем клиент Redis.
21
- """
22
- if cls.redis is None:
23
- cls.redis = aioredis.from_url(url=str(redis_dsn), decode_responses=True)