fast-clean 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fast_clean/__init__.py +3 -0
- fast_clean/broker.py +123 -0
- fast_clean/container.py +235 -0
- fast_clean/contrib/__init__.py +3 -0
- fast_clean/contrib/healthcheck/__init__.py +3 -0
- fast_clean/contrib/healthcheck/router.py +17 -0
- fast_clean/db.py +179 -0
- fast_clean/depends.py +255 -0
- fast_clean/enums.py +39 -0
- fast_clean/exceptions.py +281 -0
- fast_clean/loggers.py +26 -0
- fast_clean/middleware.py +20 -0
- fast_clean/models.py +33 -0
- fast_clean/py.typed +0 -0
- fast_clean/redis.py +23 -0
- fast_clean/repositories/__init__.py +30 -0
- fast_clean/repositories/cache/__init__.py +83 -0
- fast_clean/repositories/cache/in_memory.py +62 -0
- fast_clean/repositories/cache/redis.py +58 -0
- fast_clean/repositories/crud/__init__.py +149 -0
- fast_clean/repositories/crud/db.py +559 -0
- fast_clean/repositories/crud/in_memory.py +369 -0
- fast_clean/repositories/crud/type_vars.py +35 -0
- fast_clean/repositories/settings/__init__.py +52 -0
- fast_clean/repositories/settings/enums.py +16 -0
- fast_clean/repositories/settings/env.py +55 -0
- fast_clean/repositories/settings/exceptions.py +13 -0
- fast_clean/repositories/settings/type_vars.py +9 -0
- fast_clean/repositories/storage/__init__.py +114 -0
- fast_clean/repositories/storage/enums.py +20 -0
- fast_clean/repositories/storage/local.py +118 -0
- fast_clean/repositories/storage/reader.py +122 -0
- fast_clean/repositories/storage/s3.py +118 -0
- fast_clean/repositories/storage/schemas.py +31 -0
- fast_clean/schemas/__init__.py +25 -0
- fast_clean/schemas/exceptions.py +32 -0
- fast_clean/schemas/pagination.py +65 -0
- fast_clean/schemas/repository.py +43 -0
- fast_clean/schemas/request_response.py +36 -0
- fast_clean/schemas/status_response.py +13 -0
- fast_clean/services/__init__.py +16 -0
- fast_clean/services/cryptography/__init__.py +57 -0
- fast_clean/services/cryptography/aes.py +120 -0
- fast_clean/services/cryptography/enums.py +20 -0
- fast_clean/services/lock.py +57 -0
- fast_clean/services/seed.py +91 -0
- fast_clean/services/transaction.py +40 -0
- fast_clean/settings.py +189 -0
- fast_clean/tools/__init__.py +6 -0
- fast_clean/tools/cryptography.py +56 -0
- fast_clean/tools/load_seed.py +31 -0
- fast_clean/use_cases.py +38 -0
- fast_clean/utils/__init__.py +15 -0
- fast_clean/utils/process.py +31 -0
- fast_clean/utils/pydantic.py +23 -0
- fast_clean/utils/ssl_context.py +31 -0
- fast_clean/utils/string.py +28 -0
- fast_clean/utils/thread.py +21 -0
- fast_clean/utils/time.py +14 -0
- fast_clean/utils/type_converters.py +18 -0
- fast_clean/utils/typer.py +23 -0
- fast_clean-0.4.0.dist-info/METADATA +38 -0
- fast_clean-0.4.0.dist-info/RECORD +65 -0
- fast_clean-0.4.0.dist-info/WHEEL +5 -0
- fast_clean-0.4.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,118 @@
|
|
1
|
+
"""
|
2
|
+
Модуль, содержащий репозиторий локального файлового хранилища.
|
3
|
+
"""
|
4
|
+
|
5
|
+
import asyncio
|
6
|
+
import os
|
7
|
+
from collections.abc import AsyncGenerator, Awaitable, Callable
|
8
|
+
from contextlib import asynccontextmanager
|
9
|
+
from logging import getLogger
|
10
|
+
from pathlib import Path
|
11
|
+
from typing import Self, cast
|
12
|
+
|
13
|
+
from aiofiles import open
|
14
|
+
from aiofiles import os as aos
|
15
|
+
|
16
|
+
from .reader import AiofilesStreamReader, StreamReaderProtocol, StreamReadProtocol
|
17
|
+
from .schemas import LocalStorageParamsSchema
|
18
|
+
|
19
|
+
|
20
|
+
class LocalStorageRepository:
|
21
|
+
"""
|
22
|
+
Репозиторий локального файлового хранилища.
|
23
|
+
"""
|
24
|
+
|
25
|
+
def __init__(self: Self, params: LocalStorageParamsSchema) -> None:
|
26
|
+
self.work_dir = Path(params.path)
|
27
|
+
if not os.path.exists(self.work_dir):
|
28
|
+
os.makedirs(self.work_dir)
|
29
|
+
self.logger = getLogger(__name__)
|
30
|
+
|
31
|
+
async def exists(self: Self, path: str | Path) -> bool:
|
32
|
+
"""
|
33
|
+
Проверяем существует ли файл.
|
34
|
+
"""
|
35
|
+
path = self.work_dir / path
|
36
|
+
return await aos.path.exists(path)
|
37
|
+
|
38
|
+
async def listdir(self: Self, path: str | Path) -> list[str]:
|
39
|
+
"""
|
40
|
+
Получаем список файлов и директорий в заданной директории.
|
41
|
+
"""
|
42
|
+
paths: list[str] = []
|
43
|
+
for item in await aos.listdir(self.work_dir / path):
|
44
|
+
item_path = str(Path(path) / item)
|
45
|
+
if await aos.path.isdir(self.work_dir / item_path):
|
46
|
+
item_path += '/'
|
47
|
+
paths.append(item_path)
|
48
|
+
return paths
|
49
|
+
|
50
|
+
async def is_file(self: Self, path: str | Path) -> bool:
|
51
|
+
"""
|
52
|
+
Проверяем находится ли файл по пути.
|
53
|
+
"""
|
54
|
+
path = self.work_dir / path
|
55
|
+
return await aos.path.isfile(path)
|
56
|
+
|
57
|
+
async def is_dir(self: Self, path: str | Path) -> bool:
|
58
|
+
"""
|
59
|
+
Проверяем находится ли директория по пути.
|
60
|
+
"""
|
61
|
+
path = self.work_dir / path
|
62
|
+
return await aos.path.isdir(path)
|
63
|
+
|
64
|
+
async def read(self: Self, path: str | Path) -> bytes:
|
65
|
+
"""
|
66
|
+
Читаем содержимое файла.
|
67
|
+
"""
|
68
|
+
path = self.work_dir / path
|
69
|
+
async with open(path, 'rb') as f:
|
70
|
+
return await f.read()
|
71
|
+
|
72
|
+
@asynccontextmanager
|
73
|
+
async def stream_read(self: Self, path: str | Path) -> AsyncGenerator[StreamReaderProtocol, None]:
|
74
|
+
"""
|
75
|
+
Читаем содержимое файла в потоковом режиме.
|
76
|
+
"""
|
77
|
+
path = self.work_dir / path
|
78
|
+
async with open(path, 'rb') as f:
|
79
|
+
yield AiofilesStreamReader(f)
|
80
|
+
|
81
|
+
async def write(self: Self, path: str | Path, content: str | bytes) -> None:
|
82
|
+
"""
|
83
|
+
Создаем файл или переписываем существующий.
|
84
|
+
"""
|
85
|
+
path = self.work_dir / path
|
86
|
+
await aos.makedirs(path.parent, exist_ok=True)
|
87
|
+
async with open(path, mode='wb') as f:
|
88
|
+
await f.write(content.encode('utf-8') if isinstance(content, str) else content)
|
89
|
+
|
90
|
+
async def stream_write(
|
91
|
+
self: Self,
|
92
|
+
path: str | Path,
|
93
|
+
stream: StreamReadProtocol,
|
94
|
+
length: int = -1,
|
95
|
+
part_size: int = 0,
|
96
|
+
) -> None:
|
97
|
+
"""
|
98
|
+
Создаем файл или переписываем существующий в потоковом режиме.
|
99
|
+
"""
|
100
|
+
if length != -1:
|
101
|
+
self.logger.warning('Параметр length не используется для LocalStorage.')
|
102
|
+
part_size = part_size or 1024
|
103
|
+
path = self.work_dir / path
|
104
|
+
is_co_function = asyncio.iscoroutinefunction(stream.read)
|
105
|
+
async with open(path, 'wb') as f:
|
106
|
+
while chunk := (
|
107
|
+
await cast(Callable[[int], Awaitable[bytes]], stream.read)(part_size)
|
108
|
+
if is_co_function
|
109
|
+
else cast(Callable[[int], bytes], stream.read)(part_size)
|
110
|
+
):
|
111
|
+
await f.write(chunk)
|
112
|
+
|
113
|
+
async def delete(self: Self, path: str | Path) -> None:
|
114
|
+
"""
|
115
|
+
Удаляем файл.
|
116
|
+
"""
|
117
|
+
path = self.work_dir / path
|
118
|
+
await aos.remove(path)
|
@@ -0,0 +1,122 @@
|
|
1
|
+
"""
|
2
|
+
Модуль, содержащий классы для потокового чтения данных.
|
3
|
+
"""
|
4
|
+
|
5
|
+
from collections.abc import AsyncIterator
|
6
|
+
from typing import Protocol, Self
|
7
|
+
|
8
|
+
from aiofiles.threadpool.binary import AsyncBufferedReader
|
9
|
+
from aiohttp import ClientResponse
|
10
|
+
|
11
|
+
READ_SIZE = 5 * 1024 * 1024
|
12
|
+
|
13
|
+
|
14
|
+
class StreamReadSyncProtocol(Protocol):
|
15
|
+
"""
|
16
|
+
Синхронный протокол потокового чтения данных.
|
17
|
+
"""
|
18
|
+
|
19
|
+
def read(self: Self, size: int | None = READ_SIZE) -> bytes:
|
20
|
+
"""
|
21
|
+
Читаем данные.
|
22
|
+
"""
|
23
|
+
...
|
24
|
+
|
25
|
+
|
26
|
+
class StreamReadAsyncProtocol(Protocol):
|
27
|
+
"""
|
28
|
+
Асинхронный протокол потокового чтения данных.
|
29
|
+
"""
|
30
|
+
|
31
|
+
async def read(self: Self, size: int | None = READ_SIZE) -> bytes:
|
32
|
+
"""
|
33
|
+
Читаем данные.
|
34
|
+
"""
|
35
|
+
...
|
36
|
+
|
37
|
+
|
38
|
+
StreamReadProtocol = StreamReadAsyncProtocol | StreamReadSyncProtocol
|
39
|
+
|
40
|
+
|
41
|
+
class StreamReaderProtocol(Protocol):
|
42
|
+
"""
|
43
|
+
Протокол потокового чтения данных с контекстным менеджером.
|
44
|
+
"""
|
45
|
+
|
46
|
+
async def read(self: Self, size: int = READ_SIZE) -> bytes:
|
47
|
+
"""
|
48
|
+
Читаем данные.
|
49
|
+
"""
|
50
|
+
...
|
51
|
+
|
52
|
+
def __aiter__(self: Self) -> AsyncIterator[bytes]:
|
53
|
+
"""
|
54
|
+
Заходим в контекстный менеджер.
|
55
|
+
"""
|
56
|
+
...
|
57
|
+
|
58
|
+
async def __anext__(self: Self) -> bytes:
|
59
|
+
"""
|
60
|
+
Читаем следующую порцию данных.
|
61
|
+
"""
|
62
|
+
...
|
63
|
+
|
64
|
+
|
65
|
+
class AiofilesStreamReader:
|
66
|
+
"""
|
67
|
+
Реализация потокового чтения данных для библиотеки `aiofiles`.
|
68
|
+
"""
|
69
|
+
|
70
|
+
def __init__(self, reader: AsyncBufferedReader) -> None:
|
71
|
+
self.reader = reader
|
72
|
+
|
73
|
+
async def read(self: Self, size: int = READ_SIZE) -> bytes:
|
74
|
+
"""
|
75
|
+
Читаем данные.
|
76
|
+
"""
|
77
|
+
return await self.reader.read(size)
|
78
|
+
|
79
|
+
def __aiter__(self: Self) -> AsyncIterator[bytes]:
|
80
|
+
"""
|
81
|
+
Заходим в контекстный менеджер.
|
82
|
+
"""
|
83
|
+
return self
|
84
|
+
|
85
|
+
async def __anext__(self: Self) -> bytes:
|
86
|
+
"""
|
87
|
+
Читаем следующую порцию данных.
|
88
|
+
"""
|
89
|
+
chunk = await self.reader.read(READ_SIZE)
|
90
|
+
if chunk:
|
91
|
+
return chunk
|
92
|
+
raise StopAsyncIteration()
|
93
|
+
|
94
|
+
|
95
|
+
class AiohttpStreamReader:
|
96
|
+
"""
|
97
|
+
Реализация потокового чтения данных для библиотеки `aiohttp`.
|
98
|
+
"""
|
99
|
+
|
100
|
+
def __init__(self, response: ClientResponse) -> None:
|
101
|
+
self.response = response
|
102
|
+
|
103
|
+
async def read(self: Self, size: int = READ_SIZE) -> bytes:
|
104
|
+
"""
|
105
|
+
Читаем данные.
|
106
|
+
"""
|
107
|
+
return await self.response.content.read(size)
|
108
|
+
|
109
|
+
def __aiter__(self: Self) -> AsyncIterator[bytes]:
|
110
|
+
"""
|
111
|
+
Заходим в контекстный менеджер.
|
112
|
+
"""
|
113
|
+
return self
|
114
|
+
|
115
|
+
async def __anext__(self: Self) -> bytes:
|
116
|
+
"""
|
117
|
+
Читаем следующую порцию данных.
|
118
|
+
"""
|
119
|
+
chunk = await self.response.content.read(READ_SIZE)
|
120
|
+
if chunk:
|
121
|
+
return chunk
|
122
|
+
raise StopAsyncIteration()
|
@@ -0,0 +1,118 @@
|
|
1
|
+
"""
|
2
|
+
Модуль, содержащий репозиторий хранилища S3.
|
3
|
+
"""
|
4
|
+
|
5
|
+
import io
|
6
|
+
from collections.abc import AsyncGenerator
|
7
|
+
from contextlib import asynccontextmanager
|
8
|
+
from pathlib import Path
|
9
|
+
from typing import Self
|
10
|
+
|
11
|
+
import aiohttp
|
12
|
+
import miniopy_async
|
13
|
+
from miniopy_async.error import S3Error
|
14
|
+
|
15
|
+
from .reader import AiohttpStreamReader, StreamReaderProtocol, StreamReadProtocol
|
16
|
+
from .schemas import S3StorageParamsSchema
|
17
|
+
|
18
|
+
|
19
|
+
class S3StorageRepository:
|
20
|
+
"""
|
21
|
+
Репозиторий хранилища S3.
|
22
|
+
"""
|
23
|
+
|
24
|
+
def __init__(self: Self, params: S3StorageParamsSchema):
|
25
|
+
self.params = params
|
26
|
+
self.bucket = self.params.bucket
|
27
|
+
self.client = miniopy_async.Minio(
|
28
|
+
f'{self.params.endpoint}:{self.params.port}',
|
29
|
+
access_key=self.params.access_key,
|
30
|
+
secret_key=self.params.secret_key,
|
31
|
+
secure=self.params.secure,
|
32
|
+
)
|
33
|
+
|
34
|
+
async def exists(self: Self, path: str | Path) -> bool:
|
35
|
+
"""
|
36
|
+
Проверяем существует ли файл.
|
37
|
+
"""
|
38
|
+
try:
|
39
|
+
await self.client.stat_object(self.bucket, self.get_str_path(path))
|
40
|
+
return True
|
41
|
+
except S3Error:
|
42
|
+
return False
|
43
|
+
|
44
|
+
async def listdir(self: Self, path: str | Path) -> list[str]:
|
45
|
+
"""
|
46
|
+
Получаем список файлов и директорий в заданной директории.
|
47
|
+
"""
|
48
|
+
str_path = self.get_str_path(path)
|
49
|
+
if not str_path or str_path[-1] != '/':
|
50
|
+
str_path += '/'
|
51
|
+
objects = await self.client.list_objects(self.bucket, prefix=str_path if str_path != '/' else None)
|
52
|
+
return [str(obj.object_name) for obj in objects] if objects else []
|
53
|
+
|
54
|
+
async def is_file(self: Self, path: str | Path) -> bool:
|
55
|
+
"""
|
56
|
+
Проверяем находится ли файл по пути.
|
57
|
+
"""
|
58
|
+
return not await self.is_dir(path)
|
59
|
+
|
60
|
+
async def is_dir(self: Self, path: str | Path) -> bool:
|
61
|
+
"""
|
62
|
+
Проверяем находится ли директория по пути.
|
63
|
+
"""
|
64
|
+
return len(await self.listdir(path)) > 0
|
65
|
+
|
66
|
+
async def read(self: Self, path: str | Path) -> bytes:
|
67
|
+
"""
|
68
|
+
Читаем содержимое файла.
|
69
|
+
"""
|
70
|
+
async with aiohttp.ClientSession() as session:
|
71
|
+
response: aiohttp.ClientResponse = await self.client.get_object(
|
72
|
+
self.bucket, self.get_str_path(path), session
|
73
|
+
)
|
74
|
+
return await response.read()
|
75
|
+
|
76
|
+
@asynccontextmanager
|
77
|
+
async def stream_read(self: Self, path: str | Path) -> AsyncGenerator[StreamReaderProtocol, None]:
|
78
|
+
"""
|
79
|
+
Читаем содержимое файла в потоковом режиме.
|
80
|
+
"""
|
81
|
+
async with aiohttp.ClientSession() as session:
|
82
|
+
reader = await self.client.get_object(self.bucket, self.get_str_path(path), session)
|
83
|
+
yield AiohttpStreamReader(reader)
|
84
|
+
|
85
|
+
async def write(self: Self, path: str | Path, content: str | bytes) -> None:
|
86
|
+
"""
|
87
|
+
Создаем файл или переписываем существующий.
|
88
|
+
"""
|
89
|
+
content = content.encode('utf-8') if isinstance(content, str) else content
|
90
|
+
data = io.BytesIO(content)
|
91
|
+
await self.client.put_object(self.bucket, self.get_str_path(path), data, len(content))
|
92
|
+
|
93
|
+
async def stream_write(
|
94
|
+
self: Self,
|
95
|
+
path: str | Path,
|
96
|
+
stream: StreamReadProtocol,
|
97
|
+
length: int = -1,
|
98
|
+
part_size: int = 0,
|
99
|
+
) -> None:
|
100
|
+
"""
|
101
|
+
Создаем файл или переписываем существующий в потоковом режиме.
|
102
|
+
"""
|
103
|
+
await self.client.put_object(self.bucket, self.get_str_path(path), stream, length=length, part_size=part_size)
|
104
|
+
|
105
|
+
async def delete(self: Self, path: str | Path) -> None:
|
106
|
+
"""
|
107
|
+
Удаляем файл.
|
108
|
+
"""
|
109
|
+
await self.client.remove_object(self.bucket, self.get_str_path(path))
|
110
|
+
|
111
|
+
@staticmethod
|
112
|
+
def get_str_path(path: str | Path) -> str:
|
113
|
+
"""
|
114
|
+
Получаем путь в виде строки.
|
115
|
+
"""
|
116
|
+
if isinstance(path, Path):
|
117
|
+
return '/' if path == Path('') else str(path)
|
118
|
+
return path
|
@@ -0,0 +1,31 @@
|
|
1
|
+
"""
|
2
|
+
Модуль, содержащий схемы файлового хранилища.
|
3
|
+
"""
|
4
|
+
|
5
|
+
from pathlib import Path
|
6
|
+
|
7
|
+
from pydantic import BaseModel
|
8
|
+
|
9
|
+
|
10
|
+
class S3StorageParamsSchema(BaseModel):
|
11
|
+
"""
|
12
|
+
Параметры настроек для S3Storage.
|
13
|
+
"""
|
14
|
+
|
15
|
+
endpoint: str
|
16
|
+
access_key: str
|
17
|
+
secret_key: str
|
18
|
+
port: int
|
19
|
+
bucket: str
|
20
|
+
secure: bool = False
|
21
|
+
|
22
|
+
|
23
|
+
class LocalStorageParamsSchema(BaseModel):
|
24
|
+
"""
|
25
|
+
Параметры настроек для LocalStorage.
|
26
|
+
"""
|
27
|
+
|
28
|
+
path: Path
|
29
|
+
|
30
|
+
|
31
|
+
StorageParamsSchema = S3StorageParamsSchema | LocalStorageParamsSchema
|
@@ -0,0 +1,25 @@
|
|
1
|
+
"""
|
2
|
+
Пакет, содержащий схемы.
|
3
|
+
"""
|
4
|
+
|
5
|
+
from .exceptions import BusinessLogicExceptionSchema as BusinessLogicExceptionSchema
|
6
|
+
from .exceptions import ModelAlreadyExistsErrorSchema as ModelAlreadyExistsErrorSchema
|
7
|
+
from .exceptions import ValidationErrorSchema as ValidationErrorSchema
|
8
|
+
from .pagination import (
|
9
|
+
AppliedPaginationResponseSchema as AppliedPaginationResponseSchema,
|
10
|
+
)
|
11
|
+
from .pagination import PaginationRequestSchema as PaginationRequestSchema
|
12
|
+
from .pagination import PaginationResponseSchema as PaginationResponseSchema
|
13
|
+
from .pagination import PaginationResultSchema as PaginationResultSchema
|
14
|
+
from .pagination import PaginationSchema as PaginationSchema
|
15
|
+
from .repository import CreateSchema as CreateSchema
|
16
|
+
from .repository import CreateSchemaOld as CreateSchemaOld
|
17
|
+
from .repository import ReadSchema as ReadSchema
|
18
|
+
from .repository import ReadSchemaOld as ReadSchemaOld
|
19
|
+
from .repository import UpdateSchema as UpdateSchema
|
20
|
+
from .repository import UpdateSchemaOld as UpdateSchemaOld
|
21
|
+
from .request_response import RemoteRequestSchema as RemoteRequestSchema
|
22
|
+
from .request_response import RemoteResponseSchema as RemoteResponseSchema
|
23
|
+
from .request_response import RequestSchema as RequestSchema
|
24
|
+
from .request_response import ResponseSchema as ResponseSchema
|
25
|
+
from .status_response import StatusOkResponseSchema as StatusOkResponseSchema
|
@@ -0,0 +1,32 @@
|
|
1
|
+
"""
|
2
|
+
Модуль, содержащий схемы исключений.
|
3
|
+
"""
|
4
|
+
|
5
|
+
from pydantic import BaseModel
|
6
|
+
|
7
|
+
|
8
|
+
class BusinessLogicExceptionSchema(BaseModel):
|
9
|
+
"""
|
10
|
+
Схема базового исключения бизнес-логики.
|
11
|
+
"""
|
12
|
+
|
13
|
+
type: str
|
14
|
+
msg: str
|
15
|
+
traceback: str | None
|
16
|
+
|
17
|
+
|
18
|
+
class ModelAlreadyExistsErrorSchema(BusinessLogicExceptionSchema):
|
19
|
+
"""
|
20
|
+
Схема ошибки, возникающей при попытке создать модель с существующим уникальным
|
21
|
+
полем.
|
22
|
+
"""
|
23
|
+
|
24
|
+
field: str
|
25
|
+
|
26
|
+
|
27
|
+
class ValidationErrorSchema(BusinessLogicExceptionSchema):
|
28
|
+
"""
|
29
|
+
Схема ошибки валидации.
|
30
|
+
"""
|
31
|
+
|
32
|
+
fields: list[str]
|
@@ -0,0 +1,65 @@
|
|
1
|
+
"""
|
2
|
+
Модуль, содержащий схемы пагинации.
|
3
|
+
"""
|
4
|
+
|
5
|
+
from __future__ import annotations
|
6
|
+
|
7
|
+
from typing import Generic, Self, TypeVar
|
8
|
+
|
9
|
+
from pydantic import BaseModel, Field
|
10
|
+
|
11
|
+
from .request_response import ResponseSchema
|
12
|
+
|
13
|
+
|
14
|
+
class PaginationRequestSchema(BaseModel):
|
15
|
+
"""
|
16
|
+
Схема запроса пагинации.
|
17
|
+
"""
|
18
|
+
|
19
|
+
page: int = Field(gt=0)
|
20
|
+
page_size: int = Field(gt=0)
|
21
|
+
|
22
|
+
def to_pagination_schema(self: Self) -> PaginationSchema:
|
23
|
+
"""
|
24
|
+
Преобразуем к схеме пагинации с помощью limit и offset.
|
25
|
+
"""
|
26
|
+
return PaginationSchema(limit=self.page_size, offset=(self.page - 1) * self.page_size)
|
27
|
+
|
28
|
+
|
29
|
+
class AppliedPaginationResponseSchema(ResponseSchema):
|
30
|
+
"""
|
31
|
+
Схема ответа примененной пагинации.
|
32
|
+
"""
|
33
|
+
|
34
|
+
page: int
|
35
|
+
page_size: int
|
36
|
+
count: int
|
37
|
+
|
38
|
+
|
39
|
+
class PaginationResponseSchema(ResponseSchema):
|
40
|
+
"""
|
41
|
+
Схема ответа пагинации.
|
42
|
+
"""
|
43
|
+
|
44
|
+
pagination: AppliedPaginationResponseSchema
|
45
|
+
|
46
|
+
|
47
|
+
class PaginationSchema(BaseModel):
|
48
|
+
"""
|
49
|
+
Схема пагинации с помощью limit и offset.
|
50
|
+
"""
|
51
|
+
|
52
|
+
limit: int
|
53
|
+
offset: int
|
54
|
+
|
55
|
+
|
56
|
+
T = TypeVar('T')
|
57
|
+
|
58
|
+
|
59
|
+
class PaginationResultSchema(BaseModel, Generic[T]):
|
60
|
+
"""
|
61
|
+
Схема результата пагинации.
|
62
|
+
"""
|
63
|
+
|
64
|
+
objects: list[T]
|
65
|
+
count: int
|
@@ -0,0 +1,43 @@
|
|
1
|
+
"""
|
2
|
+
Модуль, содержащий схемы репозиториев.
|
3
|
+
"""
|
4
|
+
|
5
|
+
import uuid
|
6
|
+
from typing import Generic, TypeVar
|
7
|
+
|
8
|
+
from pydantic import BaseModel
|
9
|
+
|
10
|
+
IdType = TypeVar('IdType')
|
11
|
+
|
12
|
+
|
13
|
+
class CreateSchemaGeneric(BaseModel, Generic[IdType]):
|
14
|
+
"""
|
15
|
+
Схема для создания модели.
|
16
|
+
"""
|
17
|
+
|
18
|
+
id: IdType | None = None
|
19
|
+
|
20
|
+
|
21
|
+
class ReadSchemaGeneric(BaseModel, Generic[IdType]):
|
22
|
+
"""
|
23
|
+
Схема для чтения модели.
|
24
|
+
"""
|
25
|
+
|
26
|
+
id: IdType
|
27
|
+
|
28
|
+
|
29
|
+
class UpdateSchemaGeneric(BaseModel, Generic[IdType]):
|
30
|
+
"""
|
31
|
+
Схема для обновления модели.
|
32
|
+
"""
|
33
|
+
|
34
|
+
id: IdType
|
35
|
+
|
36
|
+
|
37
|
+
CreateSchemaOld = CreateSchemaGeneric[int]
|
38
|
+
ReadSchemaOld = ReadSchemaGeneric[int]
|
39
|
+
UpdateSchemaOld = UpdateSchemaGeneric[int]
|
40
|
+
|
41
|
+
CreateSchema = CreateSchemaGeneric[uuid.UUID]
|
42
|
+
ReadSchema = ReadSchemaGeneric[uuid.UUID]
|
43
|
+
UpdateSchema = UpdateSchemaGeneric[uuid.UUID]
|
@@ -0,0 +1,36 @@
|
|
1
|
+
"""
|
2
|
+
Модуль, содержащий схемы запросов и ответов.
|
3
|
+
"""
|
4
|
+
|
5
|
+
from pydantic import AliasGenerator, BaseModel, ConfigDict
|
6
|
+
from pydantic.alias_generators import to_camel
|
7
|
+
|
8
|
+
|
9
|
+
class RequestSchema(BaseModel):
|
10
|
+
"""
|
11
|
+
Схема запроса.
|
12
|
+
"""
|
13
|
+
|
14
|
+
model_config = ConfigDict(
|
15
|
+
alias_generator=AliasGenerator(
|
16
|
+
validation_alias=to_camel,
|
17
|
+
)
|
18
|
+
)
|
19
|
+
|
20
|
+
|
21
|
+
RemoteResponseSchema = RequestSchema
|
22
|
+
|
23
|
+
|
24
|
+
class ResponseSchema(BaseModel):
|
25
|
+
"""
|
26
|
+
Схема ответа.
|
27
|
+
"""
|
28
|
+
|
29
|
+
model_config = ConfigDict(
|
30
|
+
alias_generator=AliasGenerator(
|
31
|
+
serialization_alias=to_camel,
|
32
|
+
)
|
33
|
+
)
|
34
|
+
|
35
|
+
|
36
|
+
RemoteRequestSchema = ResponseSchema
|
@@ -0,0 +1,16 @@
|
|
1
|
+
"""
|
2
|
+
Пакет, содержащий сервисы.
|
3
|
+
"""
|
4
|
+
|
5
|
+
from .cryptography import AesCbcCryptographyService as AesCbcCryptographyService
|
6
|
+
from .cryptography import AesGcmCryptographyService as AesGcmCryptographyService
|
7
|
+
from .cryptography import CryptographicAlgorithmEnum as CryptographicAlgorithmEnum
|
8
|
+
from .cryptography import CryptographyServiceFactoryImpl as CryptographyServiceFactoryImpl
|
9
|
+
from .cryptography import CryptographyServiceFactoryProtocol as CryptographyServiceFactoryProtocol
|
10
|
+
from .cryptography import CryptographyServiceProtocol as CryptographyServiceProtocol
|
11
|
+
from .lock import LockServiceProtocol as LockServiceProtocol
|
12
|
+
from .lock import RedisLockService as RedisLockService
|
13
|
+
from .seed import SeedServiceImpl as SeedServiceImpl
|
14
|
+
from .seed import SeedServiceProtocol as SeedServiceProtocol
|
15
|
+
from .transaction import TransactionServiceImpl as TransactionServiceImpl
|
16
|
+
from .transaction import TransactionServiceProtocol as TransactionServiceProtocol
|
@@ -0,0 +1,57 @@
|
|
1
|
+
"""
|
2
|
+
Пакет, содержащий сервис криптографии для шифрования секретных параметров.
|
3
|
+
"""
|
4
|
+
|
5
|
+
from typing import Protocol, Self
|
6
|
+
|
7
|
+
from .aes import AesCbcCryptographyService, AesGcmCryptographyService
|
8
|
+
from .enums import CryptographicAlgorithmEnum
|
9
|
+
|
10
|
+
|
11
|
+
class CryptographyServiceProtocol(Protocol):
|
12
|
+
"""
|
13
|
+
Протокол сервиса криптографии для шифрования секретных параметров.
|
14
|
+
"""
|
15
|
+
|
16
|
+
def encrypt(self: Self, data: str) -> str:
|
17
|
+
"""
|
18
|
+
Зашифровываем данные.
|
19
|
+
"""
|
20
|
+
...
|
21
|
+
|
22
|
+
def decrypt(self: Self, encrypted_data: str) -> str:
|
23
|
+
"""
|
24
|
+
Расшифровываем данные.
|
25
|
+
"""
|
26
|
+
...
|
27
|
+
|
28
|
+
|
29
|
+
class CryptographyServiceFactoryProtocol(Protocol):
|
30
|
+
"""
|
31
|
+
Протокол фабрики сервисов криптографии для шифрования секретных параметров.
|
32
|
+
"""
|
33
|
+
|
34
|
+
async def make(self: Self, algorithm: CryptographicAlgorithmEnum) -> CryptographyServiceProtocol:
|
35
|
+
"""
|
36
|
+
Создаем сервис криптографии для шифрования секретных параметров.
|
37
|
+
"""
|
38
|
+
...
|
39
|
+
|
40
|
+
|
41
|
+
class CryptographyServiceFactoryImpl:
|
42
|
+
"""
|
43
|
+
Реализация фабрики сервисов криптографии для шифрования секретных параметров.
|
44
|
+
"""
|
45
|
+
|
46
|
+
def __init__(self, secret_key: str) -> None:
|
47
|
+
self.secret_key = secret_key
|
48
|
+
|
49
|
+
async def make(self: Self, algorithm: CryptographicAlgorithmEnum) -> CryptographyServiceProtocol:
|
50
|
+
"""
|
51
|
+
Создаем сервис криптографии для шифрования секретных параметров.
|
52
|
+
"""
|
53
|
+
match algorithm:
|
54
|
+
case CryptographicAlgorithmEnum.AES_GCM:
|
55
|
+
return AesGcmCryptographyService(self.secret_key)
|
56
|
+
case CryptographicAlgorithmEnum.AES_CBC:
|
57
|
+
return AesCbcCryptographyService(self.secret_key)
|