sotkalib 0.0.4__tar.gz → 0.0.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sotkalib-0.0.4 → sotkalib-0.0.5}/PKG-INFO +5 -5
- {sotkalib-0.0.4 → sotkalib-0.0.5}/pyproject.toml +5 -5
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/__init__.py +1 -1
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/config/__init__.py +2 -2
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/config/field.py +3 -3
- sotkalib-0.0.5/src/sotkalib/enum/mixins.py +59 -0
- sotkalib-0.0.5/src/sotkalib/exceptions/__init__.py +3 -0
- sotkalib-0.0.5/src/sotkalib/exceptions/api/__init__.py +1 -0
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/exceptions/handlers/__init__.py +1 -1
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/exceptions/handlers/args_incl_error.py +1 -1
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/http/__init__.py +12 -0
- sotkalib-0.0.5/src/sotkalib/http/client_session.py +457 -0
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/log/factory.py +2 -2
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/redis/__init__.py +1 -5
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/sqla/__init__.py +1 -1
- sotkalib-0.0.5/src/sotkalib/sqla/db.py +102 -0
- sotkalib-0.0.4/src/sotkalib/enum/mixins.py +0 -59
- sotkalib-0.0.4/src/sotkalib/exceptions/__init__.py +0 -3
- sotkalib-0.0.4/src/sotkalib/exceptions/api/__init__.py +0 -1
- sotkalib-0.0.4/src/sotkalib/http/client_session.py +0 -258
- sotkalib-0.0.4/src/sotkalib/sqla/db.py +0 -101
- {sotkalib-0.0.4 → sotkalib-0.0.5}/README.md +0 -0
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/config/struct.py +0 -0
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/enum/__init__.py +0 -0
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/exceptions/api/exc.py +0 -0
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/exceptions/handlers/core.py +0 -0
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/log/__init__.py +0 -0
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/py.typed +0 -0
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/redis/client.py +0 -0
- {sotkalib-0.0.4 → sotkalib-0.0.5}/src/sotkalib/redis/lock.py +0 -0
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: sotkalib
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.5
|
|
4
4
|
Summary:
|
|
5
5
|
Author: alexey
|
|
6
6
|
Author-email: alexey <me@pyrorhythm.dev>
|
|
7
|
-
Requires-Dist: aiohttp>=3.13.
|
|
7
|
+
Requires-Dist: aiohttp>=3.13.0
|
|
8
8
|
Requires-Dist: dotenv>=0.9.9
|
|
9
9
|
Requires-Dist: loguru>=0.7.3
|
|
10
|
-
Requires-Dist: pydantic>=2.12.
|
|
11
|
-
Requires-Dist: redis>=
|
|
12
|
-
Requires-Dist: sqlalchemy[asyncio]>=2.0.
|
|
10
|
+
Requires-Dist: pydantic>=2.12.0
|
|
11
|
+
Requires-Dist: redis>=6.4.0,<8.0.0
|
|
12
|
+
Requires-Dist: sqlalchemy[asyncio]>=2.0.0
|
|
13
13
|
Requires-Python: >=3.13
|
|
14
14
|
Description-Content-Type: text/markdown
|
|
15
15
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "sotkalib"
|
|
3
|
-
version = "0.0.
|
|
3
|
+
version = "0.0.5"
|
|
4
4
|
description = ""
|
|
5
5
|
authors = [
|
|
6
6
|
{ email = "me@pyrorhythm.dev", name = "alexey" }
|
|
@@ -8,12 +8,12 @@ authors = [
|
|
|
8
8
|
readme = "README.md"
|
|
9
9
|
requires-python = ">=3.13"
|
|
10
10
|
dependencies = [
|
|
11
|
-
"aiohttp>=3.13.
|
|
11
|
+
"aiohttp>=3.13.0",
|
|
12
12
|
"dotenv>=0.9.9",
|
|
13
13
|
"loguru>=0.7.3",
|
|
14
|
-
"pydantic>=2.12.
|
|
15
|
-
"redis>=
|
|
16
|
-
"sqlalchemy[asyncio]>=2.0.
|
|
14
|
+
"pydantic>=2.12.0",
|
|
15
|
+
"redis>=6.4.0,<8.0.0",
|
|
16
|
+
"sqlalchemy[asyncio]>=2.0.0",
|
|
17
17
|
]
|
|
18
18
|
|
|
19
19
|
|
|
@@ -6,6 +6,6 @@ type AllowedTypes = int | float | complex | str | bool | None
|
|
|
6
6
|
|
|
7
7
|
@dataclass(init=True, slots=True, frozen=True)
|
|
8
8
|
class SettingsField[T: AllowedTypes]:
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
9
|
+
default: T | None = None
|
|
10
|
+
factory: Callable[[], T] | str | None = None
|
|
11
|
+
nullable: bool = False
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
from collections.abc import Sequence
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Any, Literal, Self, overload
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class UppercaseStrEnumMixin(str, Enum):
|
|
7
|
+
@staticmethod
|
|
8
|
+
def _generate_next_value_(name: str, start: int, count: int, last_values: Sequence) -> str: # noqa
|
|
9
|
+
return name.upper()
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ValidatorStrEnumMixin(str, Enum):
|
|
13
|
+
@classmethod
|
|
14
|
+
def _normalize_value(cls, val: Any) -> str:
|
|
15
|
+
if isinstance(val, (str, bytes, bytearray)):
|
|
16
|
+
return val.decode("utf-8") if isinstance(val, (bytes, bytearray)) else val
|
|
17
|
+
raise TypeError("value must be str-like")
|
|
18
|
+
|
|
19
|
+
@overload
|
|
20
|
+
@classmethod
|
|
21
|
+
def validate(cls, *, val: Any, req: Literal[False] = False) -> Self | None: ...
|
|
22
|
+
|
|
23
|
+
@overload
|
|
24
|
+
@classmethod
|
|
25
|
+
def validate(cls, *, val: Any, req: Literal[True]) -> Self: ...
|
|
26
|
+
|
|
27
|
+
@classmethod
|
|
28
|
+
def validate(cls, *, val: Any, req: bool = False) -> Self | None:
|
|
29
|
+
if val is None:
|
|
30
|
+
if req:
|
|
31
|
+
raise ValueError("value is None and req=True")
|
|
32
|
+
return None
|
|
33
|
+
normalized = cls._normalize_value(val)
|
|
34
|
+
try:
|
|
35
|
+
return cls(normalized)
|
|
36
|
+
except ValueError as e:
|
|
37
|
+
raise TypeError(f"{normalized=} not valid: {e}") from e
|
|
38
|
+
|
|
39
|
+
@overload
|
|
40
|
+
@classmethod
|
|
41
|
+
def get(cls, val: Any, default: Literal[None] = None) -> Self | None: ...
|
|
42
|
+
|
|
43
|
+
@overload
|
|
44
|
+
@classmethod
|
|
45
|
+
def get(cls, val: Any, default: Self) -> Self: ...
|
|
46
|
+
|
|
47
|
+
@classmethod
|
|
48
|
+
def get(cls, val: Any, default: Self | None = None) -> Self | None:
|
|
49
|
+
try:
|
|
50
|
+
return cls.validate(val=val, req=False) or default
|
|
51
|
+
except (ValueError, TypeError):
|
|
52
|
+
return default
|
|
53
|
+
|
|
54
|
+
def in_(self, *enum_values: Self) -> bool:
|
|
55
|
+
return self in enum_values
|
|
56
|
+
|
|
57
|
+
@classmethod
|
|
58
|
+
def values(cls) -> Sequence[Self]:
|
|
59
|
+
return list(cls)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .exc import APIError, ErrorSchema
|
|
@@ -11,5 +11,5 @@ class ArgsIncludedError(Exception):
|
|
|
11
11
|
args, _, _, values = inspect.getargvalues(frame)
|
|
12
12
|
f_locals = frame.f_locals
|
|
13
13
|
args_with_values = {arg: values[arg] for arg in args}
|
|
14
|
-
stack_args_to_exc.append(args_with_values | f_locals | {
|
|
14
|
+
stack_args_to_exc.append(args_with_values | f_locals | {"frame_name": frame.f_code.co_name})
|
|
15
15
|
super().__init__(*_args, *stack_args_to_exc)
|
|
@@ -1,17 +1,29 @@
|
|
|
1
1
|
from .client_session import (
|
|
2
2
|
ClientSettings,
|
|
3
|
+
# Exceptions
|
|
4
|
+
CriticalStatusError,
|
|
3
5
|
ExceptionSettings,
|
|
4
6
|
Handler,
|
|
5
7
|
HTTPSession,
|
|
6
8
|
Middleware,
|
|
9
|
+
Next,
|
|
10
|
+
RanOutOfAttemptsError,
|
|
11
|
+
RequestContext,
|
|
12
|
+
StatusRetryError,
|
|
7
13
|
StatusSettings,
|
|
8
14
|
)
|
|
9
15
|
|
|
10
16
|
__all__ = (
|
|
11
17
|
"HTTPSession",
|
|
18
|
+
"RequestContext",
|
|
12
19
|
"ExceptionSettings",
|
|
13
20
|
"StatusSettings",
|
|
14
21
|
"ClientSettings",
|
|
15
22
|
"Handler",
|
|
16
23
|
"Middleware",
|
|
24
|
+
"Next",
|
|
25
|
+
# Exceptions
|
|
26
|
+
"CriticalStatusError",
|
|
27
|
+
"RanOutOfAttemptsError",
|
|
28
|
+
"StatusRetryError",
|
|
17
29
|
)
|
|
@@ -0,0 +1,457 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import ssl
|
|
3
|
+
import time
|
|
4
|
+
from collections.abc import Awaitable, Callable, Mapping, Sequence
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from http import HTTPStatus
|
|
7
|
+
from typing import Any, Literal, Self
|
|
8
|
+
|
|
9
|
+
import aiohttp
|
|
10
|
+
from aiohttp import client_exceptions
|
|
11
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
12
|
+
|
|
13
|
+
from sotkalib.log import get_logger
|
|
14
|
+
|
|
15
|
+
MAXIMUM_BACKOFF: float = 120
|
|
16
|
+
|
|
17
|
+
try:
|
|
18
|
+
import certifi
|
|
19
|
+
except ImportError:
|
|
20
|
+
certifi = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class RanOutOfAttemptsError(Exception):
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class CriticalStatusError(Exception):
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class StatusRetryError(Exception):
|
|
32
|
+
status: int
|
|
33
|
+
context: str
|
|
34
|
+
|
|
35
|
+
def __init__(self, status: int, context: str) -> None:
|
|
36
|
+
super().__init__(f"{status}: {context}")
|
|
37
|
+
self.status = status
|
|
38
|
+
self.context = context
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class RequestContext:
|
|
43
|
+
method: str
|
|
44
|
+
url: str
|
|
45
|
+
params: dict[str, Any] | None = None
|
|
46
|
+
headers: dict[str, Any] | None = None
|
|
47
|
+
data: Any = None
|
|
48
|
+
json: Any = None
|
|
49
|
+
kwargs: dict[str, Any] = field(default_factory=dict)
|
|
50
|
+
|
|
51
|
+
attempt: int = 0
|
|
52
|
+
max_attempts: int = 1
|
|
53
|
+
|
|
54
|
+
response: aiohttp.ClientResponse | None = None
|
|
55
|
+
response_body: Any = None
|
|
56
|
+
response_text: str | None = None
|
|
57
|
+
response_json: Any = None
|
|
58
|
+
|
|
59
|
+
started_at: float | None = None
|
|
60
|
+
finished_at: float | None = None
|
|
61
|
+
attempt_started_at: float | None = None
|
|
62
|
+
|
|
63
|
+
errors: list[Exception] = field(default_factory=list)
|
|
64
|
+
last_error: Exception | None = None
|
|
65
|
+
|
|
66
|
+
state: dict[str, Any] = field(default_factory=dict)
|
|
67
|
+
|
|
68
|
+
@property
|
|
69
|
+
def elapsed(self) -> float | None:
|
|
70
|
+
if self.started_at is None:
|
|
71
|
+
return None
|
|
72
|
+
end = self.finished_at if self.finished_at else time.monotonic()
|
|
73
|
+
return end - self.started_at
|
|
74
|
+
|
|
75
|
+
@property
|
|
76
|
+
def attempt_elapsed(self) -> float | None:
|
|
77
|
+
if self.attempt_started_at is None:
|
|
78
|
+
return None
|
|
79
|
+
return time.monotonic() - self.attempt_started_at
|
|
80
|
+
|
|
81
|
+
@property
|
|
82
|
+
def is_retry(self) -> bool:
|
|
83
|
+
return self.attempt > 0
|
|
84
|
+
|
|
85
|
+
@property
|
|
86
|
+
def status(self) -> int | None:
|
|
87
|
+
return self.response.status if self.response else None
|
|
88
|
+
|
|
89
|
+
def merge_headers(self, headers: dict[str, str]) -> None:
|
|
90
|
+
if self.headers is None:
|
|
91
|
+
self.headers = {}
|
|
92
|
+
self.headers.update(headers)
|
|
93
|
+
|
|
94
|
+
def to_request_kwargs(self) -> dict[str, Any]:
|
|
95
|
+
kw = dict(self.kwargs)
|
|
96
|
+
if self.params is not None:
|
|
97
|
+
kw["params"] = self.params
|
|
98
|
+
if self.headers is not None:
|
|
99
|
+
kw["headers"] = self.headers
|
|
100
|
+
if self.data is not None:
|
|
101
|
+
kw["data"] = self.data
|
|
102
|
+
if self.json is not None:
|
|
103
|
+
kw["json"] = self.json
|
|
104
|
+
return kw
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
type Next[T] = Callable[[RequestContext], Awaitable[T]]
|
|
108
|
+
type Middleware[T, R] = Callable[[RequestContext, Next[T]], Awaitable[R]]
|
|
109
|
+
|
|
110
|
+
type ExcArgFunc = Callable[..., tuple[Sequence[Any], Mapping[str, Any] | None]]
|
|
111
|
+
type StatArgFunc = Callable[..., Any]
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
async def default_stat_arg_func(resp: aiohttp.ClientResponse) -> tuple[Sequence[Any], None]:
|
|
115
|
+
return (f"[{resp.status}]; {await resp.text()=}",), None
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def default_exc_arg_func(exc: Exception, attempt: int, url: str, method: str, **kw) -> tuple[Sequence[Any], None]:
|
|
119
|
+
return (f"exception {type(exc)}: ({exc=}) {attempt=}; {url=} {method=} {kw=}",), None
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
class StatusSettings(BaseModel):
|
|
123
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
124
|
+
|
|
125
|
+
to_raise: set[HTTPStatus] = Field(default={HTTPStatus.FORBIDDEN})
|
|
126
|
+
to_retry: set[HTTPStatus] = Field(default={HTTPStatus.TOO_MANY_REQUESTS, HTTPStatus.FORBIDDEN})
|
|
127
|
+
exc_to_raise: type[Exception] = Field(default=CriticalStatusError)
|
|
128
|
+
not_found_as_none: bool = Field(default=True)
|
|
129
|
+
args_for_exc_func: StatArgFunc = Field(default=default_stat_arg_func)
|
|
130
|
+
unspecified: Literal["retry", "raise"] = Field(default="retry")
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
class ExceptionSettings(BaseModel):
|
|
134
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
135
|
+
|
|
136
|
+
to_raise: tuple[type[Exception], ...] = Field(
|
|
137
|
+
default=(
|
|
138
|
+
client_exceptions.ConnectionTimeoutError,
|
|
139
|
+
client_exceptions.ClientProxyConnectionError,
|
|
140
|
+
client_exceptions.ContentTypeError,
|
|
141
|
+
),
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
to_retry: tuple[type[Exception], ...] = Field(
|
|
145
|
+
default=(
|
|
146
|
+
TimeoutError,
|
|
147
|
+
client_exceptions.ServerDisconnectedError,
|
|
148
|
+
client_exceptions.ClientConnectionResetError,
|
|
149
|
+
client_exceptions.ClientOSError,
|
|
150
|
+
client_exceptions.ClientHttpProxyError,
|
|
151
|
+
),
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
exc_to_raise: type[Exception] | None = Field(default=None)
|
|
155
|
+
args_for_exc_func: ExcArgFunc = Field(default=default_exc_arg_func)
|
|
156
|
+
unspecified: Literal["retry", "raise"] = Field(default="retry")
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
class ClientSettings(BaseModel):
|
|
160
|
+
timeout: float = Field(default=5.0, gt=0)
|
|
161
|
+
base: float = Field(default=1.0, gt=0)
|
|
162
|
+
backoff: float = Field(default=2.0, gt=0)
|
|
163
|
+
maximum_retries: int = Field(default=3, ge=1)
|
|
164
|
+
|
|
165
|
+
useragent_factory: Callable[[], str] | None = Field(default=None)
|
|
166
|
+
|
|
167
|
+
status_settings: StatusSettings = Field(default_factory=StatusSettings)
|
|
168
|
+
exception_settings: ExceptionSettings = Field(default_factory=ExceptionSettings)
|
|
169
|
+
|
|
170
|
+
session_kwargs: dict[str, Any] = Field(default_factory=dict)
|
|
171
|
+
use_cookies_from_response: bool = Field(default=False)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
# ============================================================================
|
|
175
|
+
# SSL Context
|
|
176
|
+
# ============================================================================
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def _make_ssl_context(disable_tls13: bool = False) -> ssl.SSLContext:
|
|
180
|
+
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
|
181
|
+
ctx.load_default_certs()
|
|
182
|
+
|
|
183
|
+
if certifi:
|
|
184
|
+
ctx.load_verify_locations(certifi.where())
|
|
185
|
+
|
|
186
|
+
ctx.minimum_version = ssl.TLSVersion.TLSv1_2
|
|
187
|
+
ctx.maximum_version = ssl.TLSVersion.TLSv1_2 if disable_tls13 else ssl.TLSVersion.TLSv1_3
|
|
188
|
+
|
|
189
|
+
ctx.set_ciphers(
|
|
190
|
+
"TLS_AES_256_GCM_SHA384:TLS_AES_128_GCM_SHA256:"
|
|
191
|
+
"TLS_CHACHA20_POLY1305_SHA256:"
|
|
192
|
+
"ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:"
|
|
193
|
+
"ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256"
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
ctx.check_hostname = True
|
|
197
|
+
ctx.verify_mode = ssl.CERT_REQUIRED
|
|
198
|
+
|
|
199
|
+
return ctx
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
# ============================================================================
|
|
203
|
+
# HTTP Session
|
|
204
|
+
# ============================================================================
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
class HTTPSession[R = aiohttp.ClientResponse | None]:
|
|
208
|
+
config: ClientSettings
|
|
209
|
+
_session: aiohttp.ClientSession | None
|
|
210
|
+
_middlewares: list[Middleware[Any, Any]]
|
|
211
|
+
_logger: Any
|
|
212
|
+
|
|
213
|
+
def __init__(
|
|
214
|
+
self,
|
|
215
|
+
config: ClientSettings | None = None,
|
|
216
|
+
_middlewares: list[Middleware[Any, Any]] | None = None,
|
|
217
|
+
) -> None:
|
|
218
|
+
self.config = config if config is not None else ClientSettings()
|
|
219
|
+
self._session = None
|
|
220
|
+
self._middlewares = _middlewares or []
|
|
221
|
+
self._logger = get_logger("http.client_session")
|
|
222
|
+
|
|
223
|
+
def use[NewR](self, middleware: Middleware[R, NewR]) -> HTTPSession[NewR]:
|
|
224
|
+
return HTTPSession[NewR](
|
|
225
|
+
config=self.config,
|
|
226
|
+
_middlewares=[*self._middlewares, middleware],
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
async def __aenter__(self) -> Self:
|
|
230
|
+
ctx = _make_ssl_context(disable_tls13=False)
|
|
231
|
+
|
|
232
|
+
session_kwargs = dict(self.config.session_kwargs)
|
|
233
|
+
if session_kwargs.get("connector") is None:
|
|
234
|
+
session_kwargs["connector"] = aiohttp.TCPConnector(ssl=ctx)
|
|
235
|
+
if session_kwargs.get("trust_env") is None:
|
|
236
|
+
session_kwargs["trust_env"] = False
|
|
237
|
+
|
|
238
|
+
self._session = aiohttp.ClientSession(
|
|
239
|
+
timeout=aiohttp.ClientTimeout(total=self.config.timeout),
|
|
240
|
+
**session_kwargs,
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
self._logger.debug(f"HTTPSession initialized with timeout: {self.config.timeout}")
|
|
244
|
+
return self
|
|
245
|
+
|
|
246
|
+
async def __aexit__(
|
|
247
|
+
self,
|
|
248
|
+
exc_type: type[BaseException] | None,
|
|
249
|
+
exc_val: BaseException | None,
|
|
250
|
+
exc_tb: Any,
|
|
251
|
+
) -> None:
|
|
252
|
+
if self._session:
|
|
253
|
+
await self._session.close()
|
|
254
|
+
|
|
255
|
+
def _build_pipeline(self) -> Next[R]:
|
|
256
|
+
"""Build the middleware pipeline with the core request at the end."""
|
|
257
|
+
|
|
258
|
+
async def core_request(ctx: RequestContext) -> aiohttp.ClientResponse | None:
|
|
259
|
+
"""The innermost handler that actually makes the HTTP request."""
|
|
260
|
+
return await self._execute_request(ctx)
|
|
261
|
+
|
|
262
|
+
pipeline: Next[Any] = core_request
|
|
263
|
+
for middleware in reversed(self._middlewares):
|
|
264
|
+
pipeline = (lambda mw, nxt: lambda c: mw(c, nxt))(middleware, pipeline)
|
|
265
|
+
|
|
266
|
+
return pipeline
|
|
267
|
+
|
|
268
|
+
async def _execute_request(self, ctx: RequestContext) -> aiohttp.ClientResponse | None:
|
|
269
|
+
"""Execute the actual HTTP request and handle status codes."""
|
|
270
|
+
if self._session is None:
|
|
271
|
+
raise RuntimeError("HTTPSession must be used as async context manager")
|
|
272
|
+
|
|
273
|
+
response = await self._session.request(ctx.method, ctx.url, **ctx.to_request_kwargs())
|
|
274
|
+
ctx.response = response
|
|
275
|
+
|
|
276
|
+
return await self._handle_status(ctx, response)
|
|
277
|
+
|
|
278
|
+
async def _handle_status(
|
|
279
|
+
self,
|
|
280
|
+
ctx: RequestContext,
|
|
281
|
+
response: aiohttp.ClientResponse,
|
|
282
|
+
) -> aiohttp.ClientResponse | None:
|
|
283
|
+
"""Handle HTTP status codes according to settings."""
|
|
284
|
+
status = response.status
|
|
285
|
+
settings = self.config.status_settings
|
|
286
|
+
|
|
287
|
+
if self.config.use_cookies_from_response and self._session:
|
|
288
|
+
self._session.cookie_jar.update_cookies(response.cookies)
|
|
289
|
+
|
|
290
|
+
if HTTPStatus(status) in settings.to_retry:
|
|
291
|
+
text = await response.text()
|
|
292
|
+
ctx.response_text = text
|
|
293
|
+
raise StatusRetryError(status=status, context=text)
|
|
294
|
+
|
|
295
|
+
if HTTPStatus(status) in settings.to_raise:
|
|
296
|
+
exc_cls = settings.exc_to_raise
|
|
297
|
+
args, kwargs = await settings.args_for_exc_func(response)
|
|
298
|
+
if kwargs is None:
|
|
299
|
+
raise exc_cls(*args)
|
|
300
|
+
raise exc_cls(*args, **kwargs)
|
|
301
|
+
|
|
302
|
+
if settings.not_found_as_none and status == HTTPStatus.NOT_FOUND:
|
|
303
|
+
return None
|
|
304
|
+
|
|
305
|
+
return response
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
async def _request_with_retry(self, ctx: RequestContext) -> R:
|
|
309
|
+
"""Execute request with retry logic."""
|
|
310
|
+
ctx.started_at = time.monotonic()
|
|
311
|
+
ctx.max_attempts = self.config.maximum_retries + 1
|
|
312
|
+
|
|
313
|
+
pipeline = self._build_pipeline()
|
|
314
|
+
|
|
315
|
+
for attempt in range(ctx.max_attempts):
|
|
316
|
+
ctx.attempt = attempt
|
|
317
|
+
ctx.attempt_started_at = time.monotonic()
|
|
318
|
+
ctx.response = None
|
|
319
|
+
|
|
320
|
+
try:
|
|
321
|
+
result = await pipeline(ctx)
|
|
322
|
+
ctx.finished_at = time.monotonic()
|
|
323
|
+
return result
|
|
324
|
+
|
|
325
|
+
except merge_tuples(self.config.exception_settings.to_retry, (StatusRetryError,)) as e:
|
|
326
|
+
ctx.errors.append(e)
|
|
327
|
+
ctx.last_error = e
|
|
328
|
+
await self._handle_retry(ctx, e)
|
|
329
|
+
|
|
330
|
+
except self.config.exception_settings.to_raise as e:
|
|
331
|
+
ctx.errors.append(e)
|
|
332
|
+
ctx.last_error = e
|
|
333
|
+
ctx.finished_at = time.monotonic()
|
|
334
|
+
await self._handle_to_raise(ctx, e)
|
|
335
|
+
|
|
336
|
+
except Exception as e:
|
|
337
|
+
ctx.errors.append(e)
|
|
338
|
+
ctx.last_error = e
|
|
339
|
+
await self._handle_exception(ctx, e)
|
|
340
|
+
|
|
341
|
+
ctx.finished_at = time.monotonic()
|
|
342
|
+
raise RanOutOfAttemptsError(
|
|
343
|
+
f"failed after {self.config.maximum_retries} retries: {type(ctx.last_error).__name__}: {ctx.last_error}"
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
async def _handle_retry(self, ctx: RequestContext, e: Exception) -> None:
|
|
347
|
+
if ctx.attempt >= self.config.maximum_retries:
|
|
348
|
+
raise RanOutOfAttemptsError(
|
|
349
|
+
f"failed after {self.config.maximum_retries} retries: {type(e).__name__}: {e}"
|
|
350
|
+
) from e
|
|
351
|
+
|
|
352
|
+
delay = self.config.base * min(MAXIMUM_BACKOFF, self.config.backoff**ctx.attempt)
|
|
353
|
+
self._logger.debug(
|
|
354
|
+
f"Retry {ctx.attempt + 1}/{ctx.max_attempts} for {ctx.method} {ctx.url} "
|
|
355
|
+
f"after {delay:.2f}s (error: {type(e).__name__})"
|
|
356
|
+
)
|
|
357
|
+
await asyncio.sleep(delay)
|
|
358
|
+
|
|
359
|
+
async def _handle_to_raise(self, ctx: RequestContext, e: Exception) -> None:
|
|
360
|
+
"""Handle exceptions that should be re-raised (possibly wrapped)."""
|
|
361
|
+
exc_cls = self.config.exception_settings.exc_to_raise
|
|
362
|
+
if exc_cls is None:
|
|
363
|
+
raise e
|
|
364
|
+
|
|
365
|
+
args, kwargs = self.config.exception_settings.args_for_exc_func(
|
|
366
|
+
e, ctx.attempt, ctx.url, ctx.method, **ctx.to_request_kwargs()
|
|
367
|
+
)
|
|
368
|
+
if kwargs is None:
|
|
369
|
+
raise exc_cls(*args) from e
|
|
370
|
+
raise exc_cls(*args, **kwargs) from e
|
|
371
|
+
|
|
372
|
+
async def _handle_exception(self, ctx: RequestContext, e: Exception) -> None:
|
|
373
|
+
"""Handle unspecified exceptions according to settings."""
|
|
374
|
+
if self.config.exception_settings.unspecified == "raise":
|
|
375
|
+
raise e
|
|
376
|
+
await self._handle_retry(ctx, e)
|
|
377
|
+
|
|
378
|
+
def _create_context(
|
|
379
|
+
self,
|
|
380
|
+
method: str,
|
|
381
|
+
url: str,
|
|
382
|
+
params: dict[str, Any] | None = None,
|
|
383
|
+
headers: dict[str, Any] | None = None,
|
|
384
|
+
data: Any = None,
|
|
385
|
+
json: Any = None,
|
|
386
|
+
**kwargs: Any,
|
|
387
|
+
) -> RequestContext:
|
|
388
|
+
"""Create a RequestContext for the given request parameters."""
|
|
389
|
+
# Apply user agent if configured
|
|
390
|
+
if self.config.useragent_factory is not None:
|
|
391
|
+
if headers is None:
|
|
392
|
+
headers = {}
|
|
393
|
+
headers["User-Agent"] = self.config.useragent_factory()
|
|
394
|
+
|
|
395
|
+
return RequestContext(
|
|
396
|
+
method=method,
|
|
397
|
+
url=url,
|
|
398
|
+
params=params,
|
|
399
|
+
headers=headers,
|
|
400
|
+
data=data,
|
|
401
|
+
json=json,
|
|
402
|
+
kwargs=kwargs,
|
|
403
|
+
)
|
|
404
|
+
|
|
405
|
+
async def request(
|
|
406
|
+
self,
|
|
407
|
+
method: str,
|
|
408
|
+
url: str,
|
|
409
|
+
*,
|
|
410
|
+
params: dict[str, Any] | None = None,
|
|
411
|
+
headers: dict[str, Any] | None = None,
|
|
412
|
+
data: Any = None,
|
|
413
|
+
json: Any = None,
|
|
414
|
+
**kwargs: Any,
|
|
415
|
+
) -> R:
|
|
416
|
+
ctx = self._create_context(method, url, params, headers, data, json, **kwargs)
|
|
417
|
+
return await self._request_with_retry(ctx)
|
|
418
|
+
|
|
419
|
+
async def get(self, url: str, **kwargs: Any) -> R:
|
|
420
|
+
"""Make a GET request."""
|
|
421
|
+
return await self.request("GET", url, **kwargs)
|
|
422
|
+
|
|
423
|
+
async def post(self, url: str, **kwargs: Any) -> R:
|
|
424
|
+
"""Make a POST request."""
|
|
425
|
+
return await self.request("POST", url, **kwargs)
|
|
426
|
+
|
|
427
|
+
async def put(self, url: str, **kwargs: Any) -> R:
|
|
428
|
+
"""Make a PUT request."""
|
|
429
|
+
return await self.request("PUT", url, **kwargs)
|
|
430
|
+
|
|
431
|
+
async def delete(self, url: str, **kwargs: Any) -> R:
|
|
432
|
+
"""Make a DELETE request."""
|
|
433
|
+
return await self.request("DELETE", url, **kwargs)
|
|
434
|
+
|
|
435
|
+
async def patch(self, url: str, **kwargs: Any) -> R:
|
|
436
|
+
"""Make a PATCH request."""
|
|
437
|
+
return await self.request("PATCH", url, **kwargs)
|
|
438
|
+
|
|
439
|
+
|
|
440
|
+
def merge_tuples[T](t1: tuple[T, ...], t2: tuple[T, ...]) -> tuple[T, ...]:
|
|
441
|
+
return t1 + t2
|
|
442
|
+
# ============================================================================
|
|
443
|
+
# Legacy compatibility aliases
|
|
444
|
+
# ============================================================================
|
|
445
|
+
|
|
446
|
+
# Old Handler protocol - kept for backwards compatibility but deprecated
|
|
447
|
+
from typing import Protocol
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
class Handler[**P, T](Protocol):
|
|
451
|
+
"""
|
|
452
|
+
DEPRECATED: Use Middleware type instead.
|
|
453
|
+
|
|
454
|
+
Old handler protocol for backwards compatibility.
|
|
455
|
+
"""
|
|
456
|
+
|
|
457
|
+
async def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: ...
|
|
@@ -4,9 +4,9 @@ from typing import TYPE_CHECKING
|
|
|
4
4
|
from loguru import logger
|
|
5
5
|
|
|
6
6
|
if TYPE_CHECKING:
|
|
7
|
-
|
|
7
|
+
from loguru import Logger
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
@lru_cache
|
|
11
11
|
def get_logger(logger_name: str | None = None) -> Logger:
|
|
12
|
-
|
|
12
|
+
return logger if logger_name is None else logger.bind(name=logger_name.replace(".", " -> "))
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
from pydantic import BaseModel, Field
|
|
2
|
+
from sqlalchemy import Engine, create_engine
|
|
3
|
+
from sqlalchemy.ext.asyncio import AsyncEngine, async_sessionmaker, create_async_engine
|
|
4
|
+
from sqlalchemy.ext.asyncio.session import AsyncSession
|
|
5
|
+
from sqlalchemy.orm import Session, sessionmaker
|
|
6
|
+
|
|
7
|
+
from sotkalib.log import get_logger
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ConnectionTimeoutError(Exception):
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class DatabaseSettings(BaseModel):
|
|
15
|
+
uri: str = Field(examples=["postgresql://username:password@localhost:5432/database"])
|
|
16
|
+
async_driver: str = "asyncpg"
|
|
17
|
+
echo: bool = False
|
|
18
|
+
pool_size: int = 10
|
|
19
|
+
|
|
20
|
+
@property
|
|
21
|
+
def async_uri(self) -> str:
|
|
22
|
+
return self.uri.replace("postgresql://", "postgresql" + self.async_driver + "://")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class Database:
|
|
26
|
+
_sync_engine: Engine | None
|
|
27
|
+
_async_engine: AsyncEngine | None
|
|
28
|
+
_sync_session_factory: sessionmaker = None
|
|
29
|
+
_async_session_factory: async_sessionmaker = None
|
|
30
|
+
|
|
31
|
+
logger = get_logger("sqldb.instance")
|
|
32
|
+
|
|
33
|
+
def __init__(self, settings: DatabaseSettings):
|
|
34
|
+
self.__async_uri = settings.async_uri
|
|
35
|
+
self.__sync_uri = settings.uri
|
|
36
|
+
self.echo = settings.echo
|
|
37
|
+
self.pool_size = settings.pool_size
|
|
38
|
+
|
|
39
|
+
def __enter__(self):
|
|
40
|
+
return self
|
|
41
|
+
|
|
42
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
43
|
+
if self._sync_engine:
|
|
44
|
+
self._sync_engine.dispose()
|
|
45
|
+
self.logger.info("closed sync db connection")
|
|
46
|
+
|
|
47
|
+
async def __aenter__(self):
|
|
48
|
+
return self
|
|
49
|
+
|
|
50
|
+
async def __aexit__(self, *args):
|
|
51
|
+
if self._async_engine:
|
|
52
|
+
await self._async_engine.dispose()
|
|
53
|
+
self.logger.info("closed async db connection")
|
|
54
|
+
|
|
55
|
+
def __async_init(self):
|
|
56
|
+
self._async_engine = create_async_engine(
|
|
57
|
+
url=self.__async_uri,
|
|
58
|
+
echo=self.echo,
|
|
59
|
+
pool_size=self.pool_size,
|
|
60
|
+
)
|
|
61
|
+
self._async_session_factory = async_sessionmaker(bind=self._async_engine, expire_on_commit=False)
|
|
62
|
+
self.logger.debug( # noqa: PLE1205
|
|
63
|
+
"successfully initialized async db connection, engine.status = {} sessionmaker.status = {}",
|
|
64
|
+
self._async_engine.name is not None,
|
|
65
|
+
self._async_session_factory is not None,
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
@property
|
|
69
|
+
def async_session(self) -> async_sessionmaker[AsyncSession]:
|
|
70
|
+
if self._async_engine is None or self._async_session_factory is None:
|
|
71
|
+
self.logger.debug("async_sf not found, initializing")
|
|
72
|
+
self.__async_init()
|
|
73
|
+
if self._async_engine is None or self._async_session_factory is None:
|
|
74
|
+
self.logger.error(c := "could not asynchronously connect to pgsql")
|
|
75
|
+
raise ConnectionTimeoutError(c)
|
|
76
|
+
self.logger.debug("success getting (asyncmaker)")
|
|
77
|
+
return self._async_session_factory
|
|
78
|
+
|
|
79
|
+
def __sync_init(self):
|
|
80
|
+
self._sync_engine = create_engine(
|
|
81
|
+
url=self.__sync_uri,
|
|
82
|
+
echo=self.echo,
|
|
83
|
+
pool_size=self.pool_size,
|
|
84
|
+
)
|
|
85
|
+
self._sync_session_factory = sessionmaker(bind=self._sync_engine, expire_on_commit=False)
|
|
86
|
+
self.logger.debug( # noqa
|
|
87
|
+
" -> (__sync_init) successfully initialized sync db connection,\n"
|
|
88
|
+
"\t\t\t\tengine.status = {} sessionmaker.status = {}",
|
|
89
|
+
self._sync_engine.name is not None,
|
|
90
|
+
self._sync_session_factory is not None,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
@property
|
|
94
|
+
def session(self) -> sessionmaker[Session]:
|
|
95
|
+
if self._sync_engine is None or self._sync_session_factory is None:
|
|
96
|
+
self.logger.debug("not found, initializing...")
|
|
97
|
+
self.__sync_init()
|
|
98
|
+
if self._sync_engine is None or self._sync_session_factory is None:
|
|
99
|
+
self.logger.error(c := "could not synchronously connect to pgsql")
|
|
100
|
+
raise ConnectionTimeoutError(c)
|
|
101
|
+
self.logger.debug("success getting (syncmaker)")
|
|
102
|
+
return self._sync_session_factory
|
|
@@ -1,59 +0,0 @@
|
|
|
1
|
-
from collections.abc import Sequence
|
|
2
|
-
from enum import Enum
|
|
3
|
-
from typing import Any, Literal, Self, overload
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
class UppercaseStrEnumMixin(str, Enum):
|
|
7
|
-
@staticmethod
|
|
8
|
-
def _generate_next_value_(name: str, start: int, count: int, last_values: Sequence) -> str: # noqa
|
|
9
|
-
return name.upper()
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class ValidatorStrEnumMixin(str, Enum):
|
|
13
|
-
@classmethod
|
|
14
|
-
def _normalize_value(cls, val: Any) -> str:
|
|
15
|
-
if isinstance(val, (str, bytes, bytearray)):
|
|
16
|
-
return val.decode("utf-8") if isinstance(val, (bytes, bytearray)) else val
|
|
17
|
-
raise TypeError("value must be str-like")
|
|
18
|
-
|
|
19
|
-
@overload
|
|
20
|
-
@classmethod
|
|
21
|
-
def validate(cls, *, val: Any, req: Literal[False] = False) -> Self | None: ...
|
|
22
|
-
|
|
23
|
-
@overload
|
|
24
|
-
@classmethod
|
|
25
|
-
def validate(cls, *, val: Any, req: Literal[True]) -> Self: ...
|
|
26
|
-
|
|
27
|
-
@classmethod
|
|
28
|
-
def validate(cls, *, val: Any, req: bool = False) -> Self | None:
|
|
29
|
-
if val is None:
|
|
30
|
-
if req:
|
|
31
|
-
raise ValueError("value is None and req=True")
|
|
32
|
-
return None
|
|
33
|
-
normalized = cls._normalize_value(val)
|
|
34
|
-
try:
|
|
35
|
-
return cls(normalized)
|
|
36
|
-
except ValueError as e:
|
|
37
|
-
raise TypeError(f"{normalized=} not valid: {e}") from e
|
|
38
|
-
|
|
39
|
-
@overload
|
|
40
|
-
@classmethod
|
|
41
|
-
def get(cls, val: Any, default: Literal[None] = None) -> Self | None: ...
|
|
42
|
-
|
|
43
|
-
@overload
|
|
44
|
-
@classmethod
|
|
45
|
-
def get(cls, val: Any, default: Self) -> Self: ...
|
|
46
|
-
|
|
47
|
-
@classmethod
|
|
48
|
-
def get(cls, val: Any, default: Self | None = None) -> Self | None:
|
|
49
|
-
try:
|
|
50
|
-
return cls.validate(val=val, req=False) or default
|
|
51
|
-
except (ValueError, TypeError):
|
|
52
|
-
return default
|
|
53
|
-
|
|
54
|
-
def in_(self, *enum_values: Self) -> bool:
|
|
55
|
-
return self in enum_values
|
|
56
|
-
|
|
57
|
-
@classmethod
|
|
58
|
-
def values(cls) -> Sequence[Self]:
|
|
59
|
-
return list(cls)
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
from .exc import APIError, ErrorSchema
|
|
@@ -1,258 +0,0 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
import ssl
|
|
3
|
-
from collections.abc import Callable, Mapping, Sequence
|
|
4
|
-
from functools import reduce
|
|
5
|
-
from http import HTTPStatus
|
|
6
|
-
from typing import Any, Literal, Protocol, Self
|
|
7
|
-
|
|
8
|
-
import aiohttp
|
|
9
|
-
from aiohttp import client_exceptions
|
|
10
|
-
from pydantic import BaseModel, ConfigDict, Field
|
|
11
|
-
|
|
12
|
-
from sotkalib.log import get_logger
|
|
13
|
-
|
|
14
|
-
MAXIMUM_BACKOFF: float = 120
|
|
15
|
-
|
|
16
|
-
try:
|
|
17
|
-
import certifi
|
|
18
|
-
except ImportError:
|
|
19
|
-
certifi = None
|
|
20
|
-
|
|
21
|
-
class RanOutOfAttemptsError(Exception):
|
|
22
|
-
pass
|
|
23
|
-
|
|
24
|
-
class CriticalStatusError(Exception):
|
|
25
|
-
pass
|
|
26
|
-
|
|
27
|
-
class StatusRetryError(Exception):
|
|
28
|
-
status: int
|
|
29
|
-
context: str
|
|
30
|
-
|
|
31
|
-
def __init__(self, status: int, context: str) -> None:
|
|
32
|
-
super().__init__(f"{status}: {context}")
|
|
33
|
-
self.status = status
|
|
34
|
-
self.context = context
|
|
35
|
-
|
|
36
|
-
type ExcArgFunc = Callable[..., tuple[Sequence[Any], Mapping[str, Any] | None]]
|
|
37
|
-
type StatArgFunc = Callable[..., Any]
|
|
38
|
-
|
|
39
|
-
async def default_stat_arg_func(resp: aiohttp.ClientResponse) -> tuple[Sequence[Any], None]:
|
|
40
|
-
return (f"[{resp.status}]; {await resp.text()=}",), None
|
|
41
|
-
|
|
42
|
-
class StatusSettings(BaseModel):
|
|
43
|
-
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
44
|
-
|
|
45
|
-
to_raise: set[HTTPStatus] = Field(default={HTTPStatus.FORBIDDEN})
|
|
46
|
-
to_retry: set[HTTPStatus] = Field(default={HTTPStatus.TOO_MANY_REQUESTS, HTTPStatus.FORBIDDEN})
|
|
47
|
-
exc_to_raise: type[Exception] = Field(default=CriticalStatusError)
|
|
48
|
-
not_found_as_none: bool = Field(default=True)
|
|
49
|
-
args_for_exc_func: StatArgFunc = Field(default=default_stat_arg_func)
|
|
50
|
-
unspecified: Literal["retry", "raise"] = Field(default="retry")
|
|
51
|
-
|
|
52
|
-
def default_exc_arg_func(exc: Exception, attempt: int, url: str, method: str, **kw) -> tuple[Sequence[Any], None]:
|
|
53
|
-
return (f"exception {type(exc)}: ({exc=}) {attempt=}; {url=} {method=} {kw=}",), None
|
|
54
|
-
|
|
55
|
-
class ExceptionSettings(BaseModel):
|
|
56
|
-
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
57
|
-
|
|
58
|
-
to_raise: tuple[type[Exception]] = Field(
|
|
59
|
-
default=(
|
|
60
|
-
client_exceptions.ConnectionTimeoutError,
|
|
61
|
-
client_exceptions.ClientProxyConnectionError,
|
|
62
|
-
client_exceptions.ContentTypeError,
|
|
63
|
-
),
|
|
64
|
-
)
|
|
65
|
-
|
|
66
|
-
to_retry: tuple[type[Exception]] = Field(
|
|
67
|
-
default=(
|
|
68
|
-
TimeoutError,
|
|
69
|
-
client_exceptions.ServerDisconnectedError,
|
|
70
|
-
client_exceptions.ClientConnectionResetError,
|
|
71
|
-
client_exceptions.ClientOSError,
|
|
72
|
-
client_exceptions.ClientHttpProxyError,
|
|
73
|
-
),
|
|
74
|
-
)
|
|
75
|
-
|
|
76
|
-
exc_to_raise: type[Exception] | None = Field(default=None)
|
|
77
|
-
args_for_exc_func: ExcArgFunc = Field(default=default_exc_arg_func)
|
|
78
|
-
unspecified: Literal["retry", "raise"] = Field(default="retry")
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
class ClientSettings(BaseModel):
|
|
82
|
-
timeout: float = Field(default=5.0, gt=0)
|
|
83
|
-
base: float = Field(default=1.0, gt=0)
|
|
84
|
-
backoff: float = Field(default=2.0, gt=0)
|
|
85
|
-
maximum_retries: int = Field(default=3, ge=1)
|
|
86
|
-
|
|
87
|
-
useragent_factory: Callable[[], str] | None = Field(default=None)
|
|
88
|
-
|
|
89
|
-
status_settings: StatusSettings = Field(default_factory=StatusSettings)
|
|
90
|
-
exception_settings: ExceptionSettings = Field(default_factory=ExceptionSettings)
|
|
91
|
-
|
|
92
|
-
session_kwargs: dict[str, Any] = Field(default_factory=dict)
|
|
93
|
-
use_cookies_from_response: bool = Field(default=False)
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
class Handler[**P, T](Protocol):
|
|
97
|
-
async def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: ...
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
type Middleware[**P, T, R] = Callable[[Handler[P, T]], Handler[P, R]]
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
def _make_ssl_context(disable_tls13: bool = False) -> ssl.SSLContext:
|
|
104
|
-
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
|
105
|
-
ctx.load_default_certs()
|
|
106
|
-
|
|
107
|
-
if certifi:
|
|
108
|
-
ctx.load_verify_locations(certifi.where())
|
|
109
|
-
|
|
110
|
-
ctx.minimum_version = ssl.TLSVersion.TLSv1_2
|
|
111
|
-
ctx.maximum_version = ssl.TLSVersion.TLSv1_2 if disable_tls13 else ssl.TLSVersion.TLSv1_3
|
|
112
|
-
|
|
113
|
-
ctx.set_ciphers(
|
|
114
|
-
"TLS_AES_256_GCM_SHA384:TLS_AES_128_GCM_SHA256:"
|
|
115
|
-
"TLS_CHACHA20_POLY1305_SHA256:"
|
|
116
|
-
"ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:"
|
|
117
|
-
"ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256"
|
|
118
|
-
)
|
|
119
|
-
|
|
120
|
-
ctx.check_hostname = True
|
|
121
|
-
ctx.verify_mode = ssl.CERT_REQUIRED
|
|
122
|
-
|
|
123
|
-
return ctx
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
class HTTPSession[R = aiohttp.ClientResponse | None]:
|
|
128
|
-
config: ClientSettings
|
|
129
|
-
_session: aiohttp.ClientSession
|
|
130
|
-
_middlewares: list[Middleware]
|
|
131
|
-
|
|
132
|
-
def __init__(
|
|
133
|
-
self,
|
|
134
|
-
config: ClientSettings | None = None,
|
|
135
|
-
_middlewares: list[Middleware] | None = None,
|
|
136
|
-
) -> None:
|
|
137
|
-
self.config = config if config is not None else ClientSettings()
|
|
138
|
-
self._session = None
|
|
139
|
-
self._middlewares = _middlewares or []
|
|
140
|
-
|
|
141
|
-
def use[**P, NewR](self, mw: Middleware[P, R, NewR]) -> HTTPSession[NewR]:
|
|
142
|
-
new_session: HTTPSession[NewR] = HTTPSession(
|
|
143
|
-
config=self.config,
|
|
144
|
-
_middlewares=[*self._middlewares, mw],
|
|
145
|
-
)
|
|
146
|
-
return new_session
|
|
147
|
-
|
|
148
|
-
async def __aenter__(self) -> Self:
|
|
149
|
-
ctx = _make_ssl_context(disable_tls13=False)
|
|
150
|
-
|
|
151
|
-
if self.config.session_kwargs.get("connector") is None:
|
|
152
|
-
self.config.session_kwargs["connector"] = aiohttp.TCPConnector(ssl=ctx)
|
|
153
|
-
if self.config.session_kwargs.get("trust_env") is None:
|
|
154
|
-
self.config.session_kwargs["trust_env"] = False
|
|
155
|
-
|
|
156
|
-
self._session = aiohttp.ClientSession(
|
|
157
|
-
timeout=aiohttp.ClientTimeout(total=self.config.timeout),
|
|
158
|
-
**self.config.session_kwargs,
|
|
159
|
-
)
|
|
160
|
-
|
|
161
|
-
get_logger("http.client_session").debug(
|
|
162
|
-
f"RetryableClientSession initialized with timeout: {self.config.timeout}"
|
|
163
|
-
)
|
|
164
|
-
|
|
165
|
-
return self
|
|
166
|
-
|
|
167
|
-
async def __aexit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: Any) -> None:
|
|
168
|
-
if self._session:
|
|
169
|
-
await self._session.close()
|
|
170
|
-
|
|
171
|
-
async def _handle_statuses(self, response: aiohttp.ClientResponse) -> aiohttp.ClientResponse | None:
|
|
172
|
-
sc = response.status
|
|
173
|
-
exc, argfunc = self.config.status_settings.exc_to_raise, self.config.status_settings.args_for_exc_func
|
|
174
|
-
if self.config.use_cookies_from_response:
|
|
175
|
-
self._session.cookie_jar.update_cookies(response.cookies)
|
|
176
|
-
if sc in self.config.status_settings.to_retry:
|
|
177
|
-
raise StatusRetryError(status=sc, context=(await response.text()))
|
|
178
|
-
elif sc in self.config.status_settings.to_raise:
|
|
179
|
-
a, kw = await argfunc(response)
|
|
180
|
-
if kw is None:
|
|
181
|
-
raise exc(*a)
|
|
182
|
-
raise exc(*a, **kw)
|
|
183
|
-
elif self.config.status_settings.not_found_as_none and sc == HTTPStatus.NOT_FOUND:
|
|
184
|
-
return None
|
|
185
|
-
|
|
186
|
-
return response
|
|
187
|
-
|
|
188
|
-
def _get_make_request_func(self) -> Callable[..., Any]:
|
|
189
|
-
async def _make_request(*args: Any, **kwargs: Any) -> aiohttp.ClientResponse | None:
|
|
190
|
-
return await self._handle_statuses(await self._session.request(*args, **kwargs))
|
|
191
|
-
|
|
192
|
-
return reduce(lambda t, s: s(t), reversed(self._middlewares), _make_request)
|
|
193
|
-
|
|
194
|
-
async def _handle_request(
|
|
195
|
-
self,
|
|
196
|
-
method: str,
|
|
197
|
-
url: str,
|
|
198
|
-
make_request_func: Callable[..., Any],
|
|
199
|
-
**kw: Any,
|
|
200
|
-
) -> R:
|
|
201
|
-
if self.config.useragent_factory is not None:
|
|
202
|
-
user_agent_header = {"User-Agent": self.config.useragent_factory()}
|
|
203
|
-
kw["headers"] = kw.get("headers", {}) | user_agent_header
|
|
204
|
-
|
|
205
|
-
return await make_request_func(method, url, **kw)
|
|
206
|
-
|
|
207
|
-
async def _handle_retry(self, e: Exception, attempt: int, url: str, method: str, **kws: Any) -> None:
|
|
208
|
-
if attempt == self.config.maximum_retries:
|
|
209
|
-
raise RanOutOfAttemptsError(f"failed after {self.config.maximum_retries} retries: {type(e)} {e}") from e
|
|
210
|
-
|
|
211
|
-
await asyncio.sleep(self.config.base * min(MAXIMUM_BACKOFF, self.config.backoff**attempt))
|
|
212
|
-
|
|
213
|
-
async def _handle_to_raise(self, e: Exception, attempt: int, url: str, method: str, **kw: Any) -> None:
|
|
214
|
-
if self.config.exception_settings.exc_to_raise is None:
|
|
215
|
-
raise e
|
|
216
|
-
|
|
217
|
-
exc, argfunc = self.config.exception_settings.exc_to_raise, self.config.exception_settings.args_for_exc_func
|
|
218
|
-
|
|
219
|
-
a, exckw = argfunc(e, attempt, url, method, **kw)
|
|
220
|
-
if exckw is None:
|
|
221
|
-
raise exc(*a) from e
|
|
222
|
-
|
|
223
|
-
raise exc(*a, **exckw) from e
|
|
224
|
-
|
|
225
|
-
async def _handle_exception(self, e: Exception, attempt: int, url: str, method: str, **kw: Any) -> None:
|
|
226
|
-
if self.config.exception_settings.unspecified == "raise":
|
|
227
|
-
raise e
|
|
228
|
-
|
|
229
|
-
await self._handle_retry(e, attempt, url, method, **kw)
|
|
230
|
-
|
|
231
|
-
async def _request_with_retry(self, method: str, url: str, **kw: Any) -> R:
|
|
232
|
-
_make_request = self._get_make_request_func()
|
|
233
|
-
for attempt in range(self.config.maximum_retries + 1):
|
|
234
|
-
try:
|
|
235
|
-
return await self._handle_request(method, url, _make_request, **kw)
|
|
236
|
-
except self.config.exception_settings.to_retry + (StatusRetryError,) as e:
|
|
237
|
-
await self._handle_retry(e, attempt, url, method, **kw)
|
|
238
|
-
except self.config.exception_settings.to_raise as e:
|
|
239
|
-
await self._handle_to_raise(e, attempt, url, method, **kw)
|
|
240
|
-
except Exception as e:
|
|
241
|
-
await self._handle_exception(e, attempt, url, method, **kw)
|
|
242
|
-
|
|
243
|
-
return await _make_request()
|
|
244
|
-
|
|
245
|
-
async def get(self, url: str, **kwargs: Any) -> R:
|
|
246
|
-
return await self._request_with_retry("GET", url, **kwargs)
|
|
247
|
-
|
|
248
|
-
async def post(self, url: str, **kwargs: Any) -> R:
|
|
249
|
-
return await self._request_with_retry("POST", url, **kwargs)
|
|
250
|
-
|
|
251
|
-
async def put(self, url: str, **kwargs: Any) -> R:
|
|
252
|
-
return await self._request_with_retry("PUT", url, **kwargs)
|
|
253
|
-
|
|
254
|
-
async def delete(self, url: str, **kwargs: Any) -> R:
|
|
255
|
-
return await self._request_with_retry("DELETE", url, **kwargs)
|
|
256
|
-
|
|
257
|
-
async def patch(self, url: str, **kwargs: Any) -> R:
|
|
258
|
-
return await self._request_with_retry("PATCH", url, **kwargs)
|
|
@@ -1,101 +0,0 @@
|
|
|
1
|
-
from pydantic import BaseModel, Field
|
|
2
|
-
from sqlalchemy import Engine, create_engine
|
|
3
|
-
from sqlalchemy.ext.asyncio import AsyncEngine, async_sessionmaker, create_async_engine
|
|
4
|
-
from sqlalchemy.ext.asyncio.session import AsyncSession
|
|
5
|
-
from sqlalchemy.orm import Session, sessionmaker
|
|
6
|
-
|
|
7
|
-
from sotkalib.log import get_logger
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class ConnectionTimeoutError(Exception): pass
|
|
11
|
-
|
|
12
|
-
class DatabaseSettings(BaseModel):
|
|
13
|
-
uri: str = Field(examples=[
|
|
14
|
-
"postgresql://username:password@localhost:5432/database"
|
|
15
|
-
])
|
|
16
|
-
async_driver: str = "asyncpg"
|
|
17
|
-
echo: bool = False
|
|
18
|
-
pool_size: int = 10
|
|
19
|
-
|
|
20
|
-
@property
|
|
21
|
-
def async_uri(self) -> str:
|
|
22
|
-
return self.uri.replace("postgresql://", "postgresql" + self.async_driver + "://")
|
|
23
|
-
|
|
24
|
-
class Database:
|
|
25
|
-
_sync_engine: Engine | None
|
|
26
|
-
_async_engine: AsyncEngine | None
|
|
27
|
-
_sync_session_factory: sessionmaker = None
|
|
28
|
-
_async_session_factory: async_sessionmaker = None
|
|
29
|
-
|
|
30
|
-
logger = get_logger("sqldb.instance")
|
|
31
|
-
|
|
32
|
-
def __init__(self, settings: DatabaseSettings):
|
|
33
|
-
self.__async_uri = settings.async_uri
|
|
34
|
-
self.__sync_uri = settings.uri
|
|
35
|
-
self.echo = settings.echo
|
|
36
|
-
self.pool_size = settings.pool_size
|
|
37
|
-
|
|
38
|
-
def __enter__(self):
|
|
39
|
-
return self
|
|
40
|
-
|
|
41
|
-
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
42
|
-
if self._sync_engine:
|
|
43
|
-
self._sync_engine.dispose()
|
|
44
|
-
self.logger.info("closed sync db connection")
|
|
45
|
-
|
|
46
|
-
async def __aenter__(self):
|
|
47
|
-
return self
|
|
48
|
-
|
|
49
|
-
async def __aexit__(self, *args):
|
|
50
|
-
if self._async_engine:
|
|
51
|
-
await self._async_engine.dispose()
|
|
52
|
-
self.logger.info("closed async db connection")
|
|
53
|
-
|
|
54
|
-
def __async_init(self):
|
|
55
|
-
self._async_engine = create_async_engine(
|
|
56
|
-
url=self.__async_uri,
|
|
57
|
-
echo=self.echo,
|
|
58
|
-
pool_size=self.pool_size,
|
|
59
|
-
)
|
|
60
|
-
self._async_session_factory = async_sessionmaker(bind=self._async_engine, expire_on_commit=False)
|
|
61
|
-
self.logger.debug( # noqa: PLE1205
|
|
62
|
-
"successfully initialized async db connection, engine.status = {} sessionmaker.status = {}",
|
|
63
|
-
self._async_engine.name is not None,
|
|
64
|
-
self._async_session_factory is not None,
|
|
65
|
-
)
|
|
66
|
-
|
|
67
|
-
@property
|
|
68
|
-
def async_session(self) -> async_sessionmaker[AsyncSession]:
|
|
69
|
-
if self._async_engine is None or self._async_session_factory is None:
|
|
70
|
-
self.logger.debug("async_sf not found, initializing")
|
|
71
|
-
self.__async_init()
|
|
72
|
-
if self._async_engine is None or self._async_session_factory is None:
|
|
73
|
-
self.logger.error(c := "could not asynchronously connect to pgsql")
|
|
74
|
-
raise ConnectionTimeoutError(c)
|
|
75
|
-
self.logger.debug("success getting (asyncmaker)")
|
|
76
|
-
return self._async_session_factory
|
|
77
|
-
|
|
78
|
-
def __sync_init(self):
|
|
79
|
-
self._sync_engine = create_engine(
|
|
80
|
-
url=self.__sync_uri,
|
|
81
|
-
echo=self.echo,
|
|
82
|
-
pool_size=self.pool_size,
|
|
83
|
-
)
|
|
84
|
-
self._sync_session_factory = sessionmaker(bind=self._sync_engine, expire_on_commit=False)
|
|
85
|
-
self.logger.debug( # noqa
|
|
86
|
-
" -> (__sync_init) successfully initialized sync db connection,\n"
|
|
87
|
-
"\t\t\t\tengine.status = {} sessionmaker.status = {}",
|
|
88
|
-
self._sync_engine.name is not None,
|
|
89
|
-
self._sync_session_factory is not None,
|
|
90
|
-
)
|
|
91
|
-
|
|
92
|
-
@property
|
|
93
|
-
def session(self) -> sessionmaker[Session]:
|
|
94
|
-
if self._sync_engine is None or self._sync_session_factory is None:
|
|
95
|
-
self.logger.debug("not found, initializing...")
|
|
96
|
-
self.__sync_init()
|
|
97
|
-
if self._sync_engine is None or self._sync_session_factory is None:
|
|
98
|
-
self.logger.error(c := "could not synchronously connect to pgsql")
|
|
99
|
-
raise ConnectionTimeoutError(c)
|
|
100
|
-
self.logger.debug("success getting (syncmaker)")
|
|
101
|
-
return self._sync_session_factory
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|