sotkalib 0.0.4.post1__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sotkalib/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  from . import config, enum, exceptions, http, log, redis, sqla
2
2
 
3
- __all__ = ["config", "enum", "exceptions", "http", "log", "redis", "sqla"]
3
+ __all__ = ["config", "enum", "exceptions", "http", "log", "redis", "sqla"]
@@ -1,6 +1,6 @@
1
1
  from .struct import AppSettings, SettingsField
2
2
 
3
3
  __all__ = [
4
- "AppSettings",
5
- "SettingsField",
4
+ "AppSettings",
5
+ "SettingsField",
6
6
  ]
sotkalib/config/field.py CHANGED
@@ -6,6 +6,6 @@ type AllowedTypes = int | float | complex | str | bool | None
6
6
 
7
7
  @dataclass(init=True, slots=True, frozen=True)
8
8
  class SettingsField[T: AllowedTypes]:
9
- default: T | None = None
10
- factory: Callable[[], T] | str | None = None
11
- nullable: bool = False
9
+ default: T | None = None
10
+ factory: Callable[[], T] | str | None = None
11
+ nullable: bool = False
sotkalib/enum/mixins.py CHANGED
@@ -4,56 +4,56 @@ from typing import Any, Literal, Self, overload
4
4
 
5
5
 
6
6
  class UppercaseStrEnumMixin(str, Enum):
7
- @staticmethod
8
- def _generate_next_value_(name: str, start: int, count: int, last_values: Sequence) -> str: # noqa
9
- return name.upper()
7
+ @staticmethod
8
+ def _generate_next_value_(name: str, start: int, count: int, last_values: Sequence) -> str: # noqa
9
+ return name.upper()
10
10
 
11
11
 
12
12
  class ValidatorStrEnumMixin(str, Enum):
13
- @classmethod
14
- def _normalize_value(cls, val: Any) -> str:
15
- if isinstance(val, (str, bytes, bytearray)):
16
- return val.decode("utf-8") if isinstance(val, (bytes, bytearray)) else val
17
- raise TypeError("value must be str-like")
18
-
19
- @overload
20
- @classmethod
21
- def validate(cls, *, val: Any, req: Literal[False] = False) -> Self | None: ...
22
-
23
- @overload
24
- @classmethod
25
- def validate(cls, *, val: Any, req: Literal[True]) -> Self: ...
26
-
27
- @classmethod
28
- def validate(cls, *, val: Any, req: bool = False) -> Self | None:
29
- if val is None:
30
- if req:
31
- raise ValueError("value is None and req=True")
32
- return None
33
- normalized = cls._normalize_value(val)
34
- try:
35
- return cls(normalized)
36
- except ValueError as e:
37
- raise TypeError(f"{normalized=} not valid: {e}") from e
38
-
39
- @overload
40
- @classmethod
41
- def get(cls, val: Any, default: Literal[None] = None) -> Self | None: ...
42
-
43
- @overload
44
- @classmethod
45
- def get(cls, val: Any, default: Self) -> Self: ...
46
-
47
- @classmethod
48
- def get(cls, val: Any, default: Self | None = None) -> Self | None:
49
- try:
50
- return cls.validate(val=val, req=False) or default
51
- except (ValueError, TypeError):
52
- return default
53
-
54
- def in_(self, *enum_values: Self) -> bool:
55
- return self in enum_values
56
-
57
- @classmethod
58
- def values(cls) -> Sequence[Self]:
59
- return list(cls)
13
+ @classmethod
14
+ def _normalize_value(cls, val: Any) -> str:
15
+ if isinstance(val, (str, bytes, bytearray)):
16
+ return val.decode("utf-8") if isinstance(val, (bytes, bytearray)) else val
17
+ raise TypeError("value must be str-like")
18
+
19
+ @overload
20
+ @classmethod
21
+ def validate(cls, *, val: Any, req: Literal[False] = False) -> Self | None: ...
22
+
23
+ @overload
24
+ @classmethod
25
+ def validate(cls, *, val: Any, req: Literal[True]) -> Self: ...
26
+
27
+ @classmethod
28
+ def validate(cls, *, val: Any, req: bool = False) -> Self | None:
29
+ if val is None:
30
+ if req:
31
+ raise ValueError("value is None and req=True")
32
+ return None
33
+ normalized = cls._normalize_value(val)
34
+ try:
35
+ return cls(normalized)
36
+ except ValueError as e:
37
+ raise TypeError(f"{normalized=} not valid: {e}") from e
38
+
39
+ @overload
40
+ @classmethod
41
+ def get(cls, val: Any, default: Literal[None] = None) -> Self | None: ...
42
+
43
+ @overload
44
+ @classmethod
45
+ def get(cls, val: Any, default: Self) -> Self: ...
46
+
47
+ @classmethod
48
+ def get(cls, val: Any, default: Self | None = None) -> Self | None:
49
+ try:
50
+ return cls.validate(val=val, req=False) or default
51
+ except (ValueError, TypeError):
52
+ return default
53
+
54
+ def in_(self, *enum_values: Self) -> bool:
55
+ return self in enum_values
56
+
57
+ @classmethod
58
+ def values(cls) -> Sequence[Self]:
59
+ return list(cls)
@@ -1,3 +1,3 @@
1
1
  from . import api, handlers
2
2
 
3
- __all__ = ["api", "handlers"]
3
+ __all__ = ["api", "handlers"]
@@ -1 +1 @@
1
- from .exc import APIError, ErrorSchema
1
+ from .exc import APIError, ErrorSchema
@@ -1,4 +1,4 @@
1
1
  from .args_incl_error import ArgsIncludedError
2
2
  from .core import aexception_handler, exception_handler
3
3
 
4
- __all__ = ["aexception_handler", "exception_handler", "ArgsIncludedError"]
4
+ __all__ = ["aexception_handler", "exception_handler", "ArgsIncludedError"]
@@ -11,5 +11,5 @@ class ArgsIncludedError(Exception):
11
11
  args, _, _, values = inspect.getargvalues(frame)
12
12
  f_locals = frame.f_locals
13
13
  args_with_values = {arg: values[arg] for arg in args}
14
- stack_args_to_exc.append(args_with_values | f_locals | { "frame_name": frame.f_code.co_name })
14
+ stack_args_to_exc.append(args_with_values | f_locals | {"frame_name": frame.f_code.co_name})
15
15
  super().__init__(*_args, *stack_args_to_exc)
sotkalib/http/__init__.py CHANGED
@@ -1,17 +1,29 @@
1
1
  from .client_session import (
2
2
  ClientSettings,
3
+ # Exceptions
4
+ CriticalStatusError,
3
5
  ExceptionSettings,
4
6
  Handler,
5
7
  HTTPSession,
6
8
  Middleware,
9
+ Next,
10
+ RanOutOfAttemptsError,
11
+ RequestContext,
12
+ StatusRetryError,
7
13
  StatusSettings,
8
14
  )
9
15
 
10
16
  __all__ = (
11
17
  "HTTPSession",
18
+ "RequestContext",
12
19
  "ExceptionSettings",
13
20
  "StatusSettings",
14
21
  "ClientSettings",
15
22
  "Handler",
16
23
  "Middleware",
24
+ "Next",
25
+ # Exceptions
26
+ "CriticalStatusError",
27
+ "RanOutOfAttemptsError",
28
+ "StatusRetryError",
17
29
  )
@@ -1,9 +1,10 @@
1
1
  import asyncio
2
2
  import ssl
3
- from collections.abc import Callable, Mapping, Sequence
4
- from functools import reduce
3
+ import time
4
+ from collections.abc import Awaitable, Callable, Mapping, Sequence
5
+ from dataclasses import dataclass, field
5
6
  from http import HTTPStatus
6
- from typing import Any, Literal, Protocol, Self
7
+ from typing import Any, Literal, Self
7
8
 
8
9
  import aiohttp
9
10
  from aiohttp import client_exceptions
@@ -18,12 +19,15 @@ try:
18
19
  except ImportError:
19
20
  certifi = None
20
21
 
22
+
21
23
  class RanOutOfAttemptsError(Exception):
22
24
  pass
23
25
 
26
+
24
27
  class CriticalStatusError(Exception):
25
28
  pass
26
29
 
30
+
27
31
  class StatusRetryError(Exception):
28
32
  status: int
29
33
  context: str
@@ -33,12 +37,88 @@ class StatusRetryError(Exception):
33
37
  self.status = status
34
38
  self.context = context
35
39
 
40
+
41
+ @dataclass
42
+ class RequestContext:
43
+ method: str
44
+ url: str
45
+ params: dict[str, Any] | None = None
46
+ headers: dict[str, Any] | None = None
47
+ data: Any = None
48
+ json: Any = None
49
+ kwargs: dict[str, Any] = field(default_factory=dict)
50
+
51
+ attempt: int = 0
52
+ max_attempts: int = 1
53
+
54
+ response: aiohttp.ClientResponse | None = None
55
+ response_body: Any = None
56
+ response_text: str | None = None
57
+ response_json: Any = None
58
+
59
+ started_at: float | None = None
60
+ finished_at: float | None = None
61
+ attempt_started_at: float | None = None
62
+
63
+ errors: list[Exception] = field(default_factory=list)
64
+ last_error: Exception | None = None
65
+
66
+ state: dict[str, Any] = field(default_factory=dict)
67
+
68
+ @property
69
+ def elapsed(self) -> float | None:
70
+ if self.started_at is None:
71
+ return None
72
+ end = self.finished_at if self.finished_at else time.monotonic()
73
+ return end - self.started_at
74
+
75
+ @property
76
+ def attempt_elapsed(self) -> float | None:
77
+ if self.attempt_started_at is None:
78
+ return None
79
+ return time.monotonic() - self.attempt_started_at
80
+
81
+ @property
82
+ def is_retry(self) -> bool:
83
+ return self.attempt > 0
84
+
85
+ @property
86
+ def status(self) -> int | None:
87
+ return self.response.status if self.response else None
88
+
89
+ def merge_headers(self, headers: dict[str, str]) -> None:
90
+ if self.headers is None:
91
+ self.headers = {}
92
+ self.headers.update(headers)
93
+
94
+ def to_request_kwargs(self) -> dict[str, Any]:
95
+ kw = dict(self.kwargs)
96
+ if self.params is not None:
97
+ kw["params"] = self.params
98
+ if self.headers is not None:
99
+ kw["headers"] = self.headers
100
+ if self.data is not None:
101
+ kw["data"] = self.data
102
+ if self.json is not None:
103
+ kw["json"] = self.json
104
+ return kw
105
+
106
+
107
+ type Next[T] = Callable[[RequestContext], Awaitable[T]]
108
+ type Middleware[T, R] = Callable[[RequestContext, Next[T]], Awaitable[R]]
109
+
36
110
  type ExcArgFunc = Callable[..., tuple[Sequence[Any], Mapping[str, Any] | None]]
37
111
  type StatArgFunc = Callable[..., Any]
38
112
 
113
+
39
114
  async def default_stat_arg_func(resp: aiohttp.ClientResponse) -> tuple[Sequence[Any], None]:
40
115
  return (f"[{resp.status}]; {await resp.text()=}",), None
41
116
 
117
+
118
+ def default_exc_arg_func(exc: Exception, attempt: int, url: str, method: str, **kw) -> tuple[Sequence[Any], None]:
119
+ return (f"exception {type(exc)}: ({exc=}) {attempt=}; {url=} {method=} {kw=}",), None
120
+
121
+
42
122
  class StatusSettings(BaseModel):
43
123
  model_config = ConfigDict(arbitrary_types_allowed=True)
44
124
 
@@ -49,13 +129,11 @@ class StatusSettings(BaseModel):
49
129
  args_for_exc_func: StatArgFunc = Field(default=default_stat_arg_func)
50
130
  unspecified: Literal["retry", "raise"] = Field(default="retry")
51
131
 
52
- def default_exc_arg_func(exc: Exception, attempt: int, url: str, method: str, **kw) -> tuple[Sequence[Any], None]:
53
- return (f"exception {type(exc)}: ({exc=}) {attempt=}; {url=} {method=} {kw=}",), None
54
132
 
55
133
  class ExceptionSettings(BaseModel):
56
134
  model_config = ConfigDict(arbitrary_types_allowed=True)
57
135
 
58
- to_raise: tuple[type[Exception]] = Field(
136
+ to_raise: tuple[type[Exception], ...] = Field(
59
137
  default=(
60
138
  client_exceptions.ConnectionTimeoutError,
61
139
  client_exceptions.ClientProxyConnectionError,
@@ -63,7 +141,7 @@ class ExceptionSettings(BaseModel):
63
141
  ),
64
142
  )
65
143
 
66
- to_retry: tuple[type[Exception]] = Field(
144
+ to_retry: tuple[type[Exception], ...] = Field(
67
145
  default=(
68
146
  TimeoutError,
69
147
  client_exceptions.ServerDisconnectedError,
@@ -93,11 +171,9 @@ class ClientSettings(BaseModel):
93
171
  use_cookies_from_response: bool = Field(default=False)
94
172
 
95
173
 
96
- class Handler[**P, T](Protocol):
97
- async def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: ...
98
-
99
-
100
- type Middleware[**P, T, R] = Callable[[Handler[P, T]], Handler[P, R]]
174
+ # ============================================================================
175
+ # SSL Context
176
+ # ============================================================================
101
177
 
102
178
 
103
179
  def _make_ssl_context(disable_tls13: bool = False) -> ssl.SSLContext:
@@ -123,136 +199,259 @@ def _make_ssl_context(disable_tls13: bool = False) -> ssl.SSLContext:
123
199
  return ctx
124
200
 
125
201
 
202
+ # ============================================================================
203
+ # HTTP Session
204
+ # ============================================================================
205
+
126
206
 
127
207
  class HTTPSession[R = aiohttp.ClientResponse | None]:
128
208
  config: ClientSettings
129
- _session: aiohttp.ClientSession
130
- _middlewares: list[Middleware]
209
+ _session: aiohttp.ClientSession | None
210
+ _middlewares: list[Middleware[Any, Any]]
211
+ _logger: Any
131
212
 
132
213
  def __init__(
133
214
  self,
134
215
  config: ClientSettings | None = None,
135
- _middlewares: list[Middleware] | None = None,
216
+ _middlewares: list[Middleware[Any, Any]] | None = None,
136
217
  ) -> None:
137
218
  self.config = config if config is not None else ClientSettings()
138
219
  self._session = None
139
220
  self._middlewares = _middlewares or []
221
+ self._logger = get_logger("http.client_session")
140
222
 
141
- def use[**P, NewR](self, mw: Middleware[P, R, NewR]) -> HTTPSession[NewR]:
142
- new_session: HTTPSession[NewR] = HTTPSession(
223
+ def use[NewR](self, middleware: Middleware[R, NewR]) -> HTTPSession[NewR]:
224
+ return HTTPSession[NewR](
143
225
  config=self.config,
144
- _middlewares=[*self._middlewares, mw],
226
+ _middlewares=[*self._middlewares, middleware],
145
227
  )
146
- return new_session
147
228
 
148
229
  async def __aenter__(self) -> Self:
149
230
  ctx = _make_ssl_context(disable_tls13=False)
150
231
 
151
- if self.config.session_kwargs.get("connector") is None:
152
- self.config.session_kwargs["connector"] = aiohttp.TCPConnector(ssl=ctx)
153
- if self.config.session_kwargs.get("trust_env") is None:
154
- self.config.session_kwargs["trust_env"] = False
232
+ session_kwargs = dict(self.config.session_kwargs)
233
+ if session_kwargs.get("connector") is None:
234
+ session_kwargs["connector"] = aiohttp.TCPConnector(ssl=ctx)
235
+ if session_kwargs.get("trust_env") is None:
236
+ session_kwargs["trust_env"] = False
155
237
 
156
238
  self._session = aiohttp.ClientSession(
157
239
  timeout=aiohttp.ClientTimeout(total=self.config.timeout),
158
- **self.config.session_kwargs,
159
- )
160
-
161
- get_logger("http.client_session").debug(
162
- f"RetryableClientSession initialized with timeout: {self.config.timeout}"
240
+ **session_kwargs,
163
241
  )
164
242
 
243
+ self._logger.debug(f"HTTPSession initialized with timeout: {self.config.timeout}")
165
244
  return self
166
245
 
167
- async def __aexit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: Any) -> None:
246
+ async def __aexit__(
247
+ self,
248
+ exc_type: type[BaseException] | None,
249
+ exc_val: BaseException | None,
250
+ exc_tb: Any,
251
+ ) -> None:
168
252
  if self._session:
169
253
  await self._session.close()
170
254
 
171
- async def _handle_statuses(self, response: aiohttp.ClientResponse) -> aiohttp.ClientResponse | None:
172
- sc = response.status
173
- exc, argfunc = self.config.status_settings.exc_to_raise, self.config.status_settings.args_for_exc_func
174
- if self.config.use_cookies_from_response:
255
+ def _build_pipeline(self) -> Next[R]:
256
+ """Build the middleware pipeline with the core request at the end."""
257
+
258
+ async def core_request(ctx: RequestContext) -> aiohttp.ClientResponse | None:
259
+ """The innermost handler that actually makes the HTTP request."""
260
+ return await self._execute_request(ctx)
261
+
262
+ pipeline: Next[Any] = core_request
263
+ for middleware in reversed(self._middlewares):
264
+ pipeline = (lambda mw, nxt: lambda c: mw(c, nxt))(middleware, pipeline)
265
+
266
+ return pipeline
267
+
268
+ async def _execute_request(self, ctx: RequestContext) -> aiohttp.ClientResponse | None:
269
+ """Execute the actual HTTP request and handle status codes."""
270
+ if self._session is None:
271
+ raise RuntimeError("HTTPSession must be used as async context manager")
272
+
273
+ response = await self._session.request(ctx.method, ctx.url, **ctx.to_request_kwargs())
274
+ ctx.response = response
275
+
276
+ return await self._handle_status(ctx, response)
277
+
278
+ async def _handle_status(
279
+ self,
280
+ ctx: RequestContext,
281
+ response: aiohttp.ClientResponse,
282
+ ) -> aiohttp.ClientResponse | None:
283
+ """Handle HTTP status codes according to settings."""
284
+ status = response.status
285
+ settings = self.config.status_settings
286
+
287
+ if self.config.use_cookies_from_response and self._session:
175
288
  self._session.cookie_jar.update_cookies(response.cookies)
176
- if sc in self.config.status_settings.to_retry:
177
- raise StatusRetryError(status=sc, context=(await response.text()))
178
- elif sc in self.config.status_settings.to_raise:
179
- a, kw = await argfunc(response)
180
- if kw is None:
181
- raise exc(*a)
182
- raise exc(*a, **kw)
183
- elif self.config.status_settings.not_found_as_none and sc == HTTPStatus.NOT_FOUND:
289
+
290
+ if HTTPStatus(status) in settings.to_retry:
291
+ text = await response.text()
292
+ ctx.response_text = text
293
+ raise StatusRetryError(status=status, context=text)
294
+
295
+ if HTTPStatus(status) in settings.to_raise:
296
+ exc_cls = settings.exc_to_raise
297
+ args, kwargs = await settings.args_for_exc_func(response)
298
+ if kwargs is None:
299
+ raise exc_cls(*args)
300
+ raise exc_cls(*args, **kwargs)
301
+
302
+ if settings.not_found_as_none and status == HTTPStatus.NOT_FOUND:
184
303
  return None
185
304
 
186
305
  return response
187
306
 
188
- def _get_make_request_func(self) -> Callable[..., Any]:
189
- async def _make_request(*args: Any, **kwargs: Any) -> aiohttp.ClientResponse | None:
190
- return await self._handle_statuses(await self._session.request(*args, **kwargs))
191
307
 
192
- return reduce(lambda t, s: s(t), reversed(self._middlewares), _make_request)
308
+ async def _request_with_retry(self, ctx: RequestContext) -> R:
309
+ """Execute request with retry logic."""
310
+ ctx.started_at = time.monotonic()
311
+ ctx.max_attempts = self.config.maximum_retries + 1
193
312
 
194
- async def _handle_request(
195
- self,
196
- method: str,
197
- url: str,
198
- make_request_func: Callable[..., Any],
199
- **kw: Any,
200
- ) -> R:
201
- if self.config.useragent_factory is not None:
202
- user_agent_header = {"User-Agent": self.config.useragent_factory()}
203
- kw["headers"] = kw.get("headers", {}) | user_agent_header
313
+ pipeline = self._build_pipeline()
314
+
315
+ for attempt in range(ctx.max_attempts):
316
+ ctx.attempt = attempt
317
+ ctx.attempt_started_at = time.monotonic()
318
+ ctx.response = None
204
319
 
205
- return await make_request_func(method, url, **kw)
320
+ try:
321
+ result = await pipeline(ctx)
322
+ ctx.finished_at = time.monotonic()
323
+ return result
206
324
 
207
- async def _handle_retry(self, e: Exception, attempt: int, url: str, method: str, **kws: Any) -> None:
208
- if attempt == self.config.maximum_retries:
209
- raise RanOutOfAttemptsError(f"failed after {self.config.maximum_retries} retries: {type(e)} {e}") from e
325
+ except merge_tuples(self.config.exception_settings.to_retry, (StatusRetryError,)) as e:
326
+ ctx.errors.append(e)
327
+ ctx.last_error = e
328
+ await self._handle_retry(ctx, e)
210
329
 
211
- await asyncio.sleep(self.config.base * min(MAXIMUM_BACKOFF, self.config.backoff**attempt))
330
+ except self.config.exception_settings.to_raise as e:
331
+ ctx.errors.append(e)
332
+ ctx.last_error = e
333
+ ctx.finished_at = time.monotonic()
334
+ await self._handle_to_raise(ctx, e)
212
335
 
213
- async def _handle_to_raise(self, e: Exception, attempt: int, url: str, method: str, **kw: Any) -> None:
214
- if self.config.exception_settings.exc_to_raise is None:
215
- raise e
336
+ except Exception as e:
337
+ ctx.errors.append(e)
338
+ ctx.last_error = e
339
+ await self._handle_exception(ctx, e)
216
340
 
217
- exc, argfunc = self.config.exception_settings.exc_to_raise, self.config.exception_settings.args_for_exc_func
341
+ ctx.finished_at = time.monotonic()
342
+ raise RanOutOfAttemptsError(
343
+ f"failed after {self.config.maximum_retries} retries: {type(ctx.last_error).__name__}: {ctx.last_error}"
344
+ )
218
345
 
219
- a, exckw = argfunc(e, attempt, url, method, **kw)
220
- if exckw is None:
221
- raise exc(*a) from e
346
+ async def _handle_retry(self, ctx: RequestContext, e: Exception) -> None:
347
+ if ctx.attempt >= self.config.maximum_retries:
348
+ raise RanOutOfAttemptsError(
349
+ f"failed after {self.config.maximum_retries} retries: {type(e).__name__}: {e}"
350
+ ) from e
222
351
 
223
- raise exc(*a, **exckw) from e
352
+ delay = self.config.base * min(MAXIMUM_BACKOFF, self.config.backoff**ctx.attempt)
353
+ self._logger.debug(
354
+ f"Retry {ctx.attempt + 1}/{ctx.max_attempts} for {ctx.method} {ctx.url} "
355
+ f"after {delay:.2f}s (error: {type(e).__name__})"
356
+ )
357
+ await asyncio.sleep(delay)
224
358
 
225
- async def _handle_exception(self, e: Exception, attempt: int, url: str, method: str, **kw: Any) -> None:
226
- if self.config.exception_settings.unspecified == "raise":
359
+ async def _handle_to_raise(self, ctx: RequestContext, e: Exception) -> None:
360
+ """Handle exceptions that should be re-raised (possibly wrapped)."""
361
+ exc_cls = self.config.exception_settings.exc_to_raise
362
+ if exc_cls is None:
227
363
  raise e
228
364
 
229
- await self._handle_retry(e, attempt, url, method, **kw)
365
+ args, kwargs = self.config.exception_settings.args_for_exc_func(
366
+ e, ctx.attempt, ctx.url, ctx.method, **ctx.to_request_kwargs()
367
+ )
368
+ if kwargs is None:
369
+ raise exc_cls(*args) from e
370
+ raise exc_cls(*args, **kwargs) from e
230
371
 
231
- async def _request_with_retry(self, method: str, url: str, **kw: Any) -> R:
232
- _make_request = self._get_make_request_func()
233
- for attempt in range(self.config.maximum_retries + 1):
234
- try:
235
- return await self._handle_request(method, url, _make_request, **kw)
236
- except self.config.exception_settings.to_retry + (StatusRetryError,) as e:
237
- await self._handle_retry(e, attempt, url, method, **kw)
238
- except self.config.exception_settings.to_raise as e:
239
- await self._handle_to_raise(e, attempt, url, method, **kw)
240
- except Exception as e:
241
- await self._handle_exception(e, attempt, url, method, **kw)
372
+ async def _handle_exception(self, ctx: RequestContext, e: Exception) -> None:
373
+ """Handle unspecified exceptions according to settings."""
374
+ if self.config.exception_settings.unspecified == "raise":
375
+ raise e
376
+ await self._handle_retry(ctx, e)
242
377
 
243
- return await _make_request()
378
+ def _create_context(
379
+ self,
380
+ method: str,
381
+ url: str,
382
+ params: dict[str, Any] | None = None,
383
+ headers: dict[str, Any] | None = None,
384
+ data: Any = None,
385
+ json: Any = None,
386
+ **kwargs: Any,
387
+ ) -> RequestContext:
388
+ """Create a RequestContext for the given request parameters."""
389
+ # Apply user agent if configured
390
+ if self.config.useragent_factory is not None:
391
+ if headers is None:
392
+ headers = {}
393
+ headers["User-Agent"] = self.config.useragent_factory()
394
+
395
+ return RequestContext(
396
+ method=method,
397
+ url=url,
398
+ params=params,
399
+ headers=headers,
400
+ data=data,
401
+ json=json,
402
+ kwargs=kwargs,
403
+ )
404
+
405
+ async def request(
406
+ self,
407
+ method: str,
408
+ url: str,
409
+ *,
410
+ params: dict[str, Any] | None = None,
411
+ headers: dict[str, Any] | None = None,
412
+ data: Any = None,
413
+ json: Any = None,
414
+ **kwargs: Any,
415
+ ) -> R:
416
+ ctx = self._create_context(method, url, params, headers, data, json, **kwargs)
417
+ return await self._request_with_retry(ctx)
244
418
 
245
419
  async def get(self, url: str, **kwargs: Any) -> R:
246
- return await self._request_with_retry("GET", url, **kwargs)
420
+ """Make a GET request."""
421
+ return await self.request("GET", url, **kwargs)
247
422
 
248
423
  async def post(self, url: str, **kwargs: Any) -> R:
249
- return await self._request_with_retry("POST", url, **kwargs)
424
+ """Make a POST request."""
425
+ return await self.request("POST", url, **kwargs)
250
426
 
251
427
  async def put(self, url: str, **kwargs: Any) -> R:
252
- return await self._request_with_retry("PUT", url, **kwargs)
428
+ """Make a PUT request."""
429
+ return await self.request("PUT", url, **kwargs)
253
430
 
254
431
  async def delete(self, url: str, **kwargs: Any) -> R:
255
- return await self._request_with_retry("DELETE", url, **kwargs)
432
+ """Make a DELETE request."""
433
+ return await self.request("DELETE", url, **kwargs)
256
434
 
257
435
  async def patch(self, url: str, **kwargs: Any) -> R:
258
- return await self._request_with_retry("PATCH", url, **kwargs)
436
+ """Make a PATCH request."""
437
+ return await self.request("PATCH", url, **kwargs)
438
+
439
+
440
+ def merge_tuples[T](t1: tuple[T, ...], t2: tuple[T, ...]) -> tuple[T, ...]:
441
+ return t1 + t2
442
+ # ============================================================================
443
+ # Legacy compatibility aliases
444
+ # ============================================================================
445
+
446
+ # Old Handler protocol - kept for backwards compatibility but deprecated
447
+ from typing import Protocol
448
+
449
+
450
+ class Handler[**P, T](Protocol):
451
+ """
452
+ DEPRECATED: Use Middleware type instead.
453
+
454
+ Old handler protocol for backwards compatibility.
455
+ """
456
+
457
+ async def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: ...
sotkalib/log/factory.py CHANGED
@@ -4,9 +4,9 @@ from typing import TYPE_CHECKING
4
4
  from loguru import logger
5
5
 
6
6
  if TYPE_CHECKING:
7
- from loguru import Logger
7
+ from loguru import Logger
8
8
 
9
9
 
10
10
  @lru_cache
11
11
  def get_logger(logger_name: str | None = None) -> Logger:
12
- return logger if logger_name is None else logger.bind(name=logger_name.replace(".", " -> "))
12
+ return logger if logger_name is None else logger.bind(name=logger_name.replace(".", " -> "))
@@ -1,8 +1,4 @@
1
1
  from .client import RedisPool, RedisPoolSettings
2
2
  from .lock import redis_context_lock
3
3
 
4
- __all__ = [
5
- "redis_context_lock",
6
- "RedisPool",
7
- "RedisPoolSettings"
8
- ]
4
+ __all__ = ["redis_context_lock", "RedisPool", "RedisPoolSettings"]
sotkalib/sqla/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  from .db import Database, DatabaseSettings
2
2
 
3
- __all__ = ("Database", "DatabaseSettings")
3
+ __all__ = ("Database", "DatabaseSettings")
sotkalib/sqla/db.py CHANGED
@@ -7,95 +7,96 @@ from sqlalchemy.orm import Session, sessionmaker
7
7
  from sotkalib.log import get_logger
8
8
 
9
9
 
10
- class ConnectionTimeoutError(Exception): pass
10
+ class ConnectionTimeoutError(Exception):
11
+ pass
12
+
11
13
 
12
14
  class DatabaseSettings(BaseModel):
13
- uri: str = Field(examples=[
14
- "postgresql://username:password@localhost:5432/database"
15
- ])
16
- async_driver: str = "asyncpg"
17
- echo: bool = False
18
- pool_size: int = 10
15
+ uri: str = Field(examples=["postgresql://username:password@localhost:5432/database"])
16
+ async_driver: str = "asyncpg"
17
+ echo: bool = False
18
+ pool_size: int = 10
19
+
20
+ @property
21
+ def async_uri(self) -> str:
22
+ return self.uri.replace("postgresql://", "postgresql" + self.async_driver + "://")
19
23
 
20
- @property
21
- def async_uri(self) -> str:
22
- return self.uri.replace("postgresql://", "postgresql" + self.async_driver + "://")
23
24
 
24
25
  class Database:
25
- _sync_engine: Engine | None
26
- _async_engine: AsyncEngine | None
27
- _sync_session_factory: sessionmaker = None
28
- _async_session_factory: async_sessionmaker = None
29
-
30
- logger = get_logger("sqldb.instance")
31
-
32
- def __init__(self, settings: DatabaseSettings):
33
- self.__async_uri = settings.async_uri
34
- self.__sync_uri = settings.uri
35
- self.echo = settings.echo
36
- self.pool_size = settings.pool_size
37
-
38
- def __enter__(self):
39
- return self
40
-
41
- def __exit__(self, exc_type, exc_val, exc_tb):
42
- if self._sync_engine:
43
- self._sync_engine.dispose()
44
- self.logger.info("closed sync db connection")
45
-
46
- async def __aenter__(self):
47
- return self
48
-
49
- async def __aexit__(self, *args):
50
- if self._async_engine:
51
- await self._async_engine.dispose()
52
- self.logger.info("closed async db connection")
53
-
54
- def __async_init(self):
55
- self._async_engine = create_async_engine(
56
- url=self.__async_uri,
57
- echo=self.echo,
58
- pool_size=self.pool_size,
59
- )
60
- self._async_session_factory = async_sessionmaker(bind=self._async_engine, expire_on_commit=False)
61
- self.logger.debug( # noqa: PLE1205
62
- "successfully initialized async db connection, engine.status = {} sessionmaker.status = {}",
63
- self._async_engine.name is not None,
64
- self._async_session_factory is not None,
65
- )
66
-
67
- @property
68
- def async_session(self) -> async_sessionmaker[AsyncSession]:
69
- if self._async_engine is None or self._async_session_factory is None:
70
- self.logger.debug("async_sf not found, initializing")
71
- self.__async_init()
72
- if self._async_engine is None or self._async_session_factory is None:
73
- self.logger.error(c := "could not asynchronously connect to pgsql")
74
- raise ConnectionTimeoutError(c)
75
- self.logger.debug("success getting (asyncmaker)")
76
- return self._async_session_factory
77
-
78
- def __sync_init(self):
79
- self._sync_engine = create_engine(
80
- url=self.__sync_uri,
81
- echo=self.echo,
82
- pool_size=self.pool_size,
83
- )
84
- self._sync_session_factory = sessionmaker(bind=self._sync_engine, expire_on_commit=False)
85
- self.logger.debug( # noqa
86
- " -> (__sync_init) successfully initialized sync db connection,\n"
87
- "\t\t\t\tengine.status = {} sessionmaker.status = {}",
88
- self._sync_engine.name is not None,
89
- self._sync_session_factory is not None,
90
- )
91
-
92
- @property
93
- def session(self) -> sessionmaker[Session]:
94
- if self._sync_engine is None or self._sync_session_factory is None:
95
- self.logger.debug("not found, initializing...")
96
- self.__sync_init()
97
- if self._sync_engine is None or self._sync_session_factory is None:
98
- self.logger.error(c := "could not synchronously connect to pgsql")
99
- raise ConnectionTimeoutError(c)
100
- self.logger.debug("success getting (syncmaker)")
101
- return self._sync_session_factory
26
+ _sync_engine: Engine | None
27
+ _async_engine: AsyncEngine | None
28
+ _sync_session_factory: sessionmaker = None
29
+ _async_session_factory: async_sessionmaker = None
30
+
31
+ logger = get_logger("sqldb.instance")
32
+
33
+ def __init__(self, settings: DatabaseSettings):
34
+ self.__async_uri = settings.async_uri
35
+ self.__sync_uri = settings.uri
36
+ self.echo = settings.echo
37
+ self.pool_size = settings.pool_size
38
+
39
+ def __enter__(self):
40
+ return self
41
+
42
+ def __exit__(self, exc_type, exc_val, exc_tb):
43
+ if self._sync_engine:
44
+ self._sync_engine.dispose()
45
+ self.logger.info("closed sync db connection")
46
+
47
+ async def __aenter__(self):
48
+ return self
49
+
50
+ async def __aexit__(self, *args):
51
+ if self._async_engine:
52
+ await self._async_engine.dispose()
53
+ self.logger.info("closed async db connection")
54
+
55
+ def __async_init(self):
56
+ self._async_engine = create_async_engine(
57
+ url=self.__async_uri,
58
+ echo=self.echo,
59
+ pool_size=self.pool_size,
60
+ )
61
+ self._async_session_factory = async_sessionmaker(bind=self._async_engine, expire_on_commit=False)
62
+ self.logger.debug( # noqa: PLE1205
63
+ "successfully initialized async db connection, engine.status = {} sessionmaker.status = {}",
64
+ self._async_engine.name is not None,
65
+ self._async_session_factory is not None,
66
+ )
67
+
68
+ @property
69
+ def async_session(self) -> async_sessionmaker[AsyncSession]:
70
+ if self._async_engine is None or self._async_session_factory is None:
71
+ self.logger.debug("async_sf not found, initializing")
72
+ self.__async_init()
73
+ if self._async_engine is None or self._async_session_factory is None:
74
+ self.logger.error(c := "could not asynchronously connect to pgsql")
75
+ raise ConnectionTimeoutError(c)
76
+ self.logger.debug("success getting (asyncmaker)")
77
+ return self._async_session_factory
78
+
79
+ def __sync_init(self):
80
+ self._sync_engine = create_engine(
81
+ url=self.__sync_uri,
82
+ echo=self.echo,
83
+ pool_size=self.pool_size,
84
+ )
85
+ self._sync_session_factory = sessionmaker(bind=self._sync_engine, expire_on_commit=False)
86
+ self.logger.debug( # noqa
87
+ " -> (__sync_init) successfully initialized sync db connection,\n"
88
+ "\t\t\t\tengine.status = {} sessionmaker.status = {}",
89
+ self._sync_engine.name is not None,
90
+ self._sync_session_factory is not None,
91
+ )
92
+
93
+ @property
94
+ def session(self) -> sessionmaker[Session]:
95
+ if self._sync_engine is None or self._sync_session_factory is None:
96
+ self.logger.debug("not found, initializing...")
97
+ self.__sync_init()
98
+ if self._sync_engine is None or self._sync_session_factory is None:
99
+ self.logger.error(c := "could not synchronously connect to pgsql")
100
+ raise ConnectionTimeoutError(c)
101
+ self.logger.debug("success getting (syncmaker)")
102
+ return self._sync_session_factory
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: sotkalib
3
- Version: 0.0.4.post1
3
+ Version: 0.0.5
4
4
  Summary:
5
5
  Author: alexey
6
6
  Author-email: alexey <me@pyrorhythm.dev>
@@ -0,0 +1,25 @@
1
+ sotkalib/__init__.py,sha256=TDJPQ_pOk73TTDkjgNpvn4nvn3siktb1sTogogLCwa0,139
2
+ sotkalib/config/__init__.py,sha256=_F7rSYgBsSxnNL1JtxrJYlw3lBXyVg0JdsOrxbWtcDA,96
3
+ sotkalib/config/field.py,sha256=vbKGAEevEmdvyw4eaZprfR2g7ZVAB-5AbYPx0f4uusc,317
4
+ sotkalib/config/struct.py,sha256=gv1jFrSRytMC6bZTUDOUQf00Zo1iKxQvuTxGv9qnyHI,5679
5
+ sotkalib/enum/__init__.py,sha256=pKpLPm8fqHO4Et21TWIybIPRiehN1KrmxcBh6hPRsxM,127
6
+ sotkalib/enum/mixins.py,sha256=CQrgKftnmZSWkNb-56Z9PZ3um0_lHGEsnEYy9GwCmhM,1611
7
+ sotkalib/exceptions/__init__.py,sha256=r-DwSwJIkuQ2UGAorKvkIVv87n4Yt8H0mk_uxKcBGTw,59
8
+ sotkalib/exceptions/api/__init__.py,sha256=yTbg2p5mB0-8ZHtzlLL6e0ZkC3LRUZmjmWMxU9Uh8-Q,39
9
+ sotkalib/exceptions/api/exc.py,sha256=gqx4GrHXUvKcR7tEmJpRqPbDWOT2AgKoyck8-FovQCc,1329
10
+ sotkalib/exceptions/handlers/__init__.py,sha256=Pz1akT2x3SaRsPezNPYnCoTcejxy4n4_cO4cXRJUBIk,179
11
+ sotkalib/exceptions/handlers/args_incl_error.py,sha256=rYiBximsXVw1YDUBbdsqeqsfTWxshyX4EdISXWYkPDE,533
12
+ sotkalib/exceptions/handlers/core.py,sha256=5fhusoxBhUz59TaVWobplBvD-sbkZKBnmmu-fcSyRk4,836
13
+ sotkalib/http/__init__.py,sha256=TGjBT5pgmNuBO1TpXU_YqSfXdWsGTEHpGO8Qwf2x2w4,457
14
+ sotkalib/http/client_session.py,sha256=LI1D0i7vezQhYRbSnlUq3q654JcEXZSL9TT_VLBz-ZI,13578
15
+ sotkalib/log/__init__.py,sha256=xrBx--c8QU5xkb3_n61LuqF8ySUaxlQkHCxHyH_D8aE,58
16
+ sotkalib/log/factory.py,sha256=oyvHOum8jwLGr_XC0c44VIVLzWQQqHSbQOnf85bP9Co,303
17
+ sotkalib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
+ sotkalib/redis/__init__.py,sha256=-0ZXD-cC-Xi6RmAvnrAgU-8Z6g4l88XmXf3kvIgBD9k,154
19
+ sotkalib/redis/client.py,sha256=0TWe-gYqFiuCjqimCQrVrnTHSM0EROIoJL36M3qwOtQ,1118
20
+ sotkalib/redis/lock.py,sha256=nEZjIyXmgq3vH-Urs8qXC_N8lmXNho00SaTZ7wJIEIo,2528
21
+ sotkalib/sqla/__init__.py,sha256=n-I_hoRS-N7XN02yYCTtw6Dh4BBSQRmolS19tEB2KMM,87
22
+ sotkalib/sqla/db.py,sha256=6ZckKQ8kmRlYrwCAzKidc_JNPwqp38tSGEy3XMGnv08,3376
23
+ sotkalib-0.0.5.dist-info/WHEEL,sha256=5DEXXimM34_d4Gx1AuF9ysMr1_maoEtGKjaILM3s4w4,80
24
+ sotkalib-0.0.5.dist-info/METADATA,sha256=0iajfVojpZXymiZhrv_vWzgPVENv36UaAB1KLumy30g,386
25
+ sotkalib-0.0.5.dist-info/RECORD,,
@@ -1,25 +0,0 @@
1
- sotkalib/__init__.py,sha256=t3dEAlrtHABFRZdF7RM5K2N1fDUTVwUPmBPG4jFdvSY,138
2
- sotkalib/config/__init__.py,sha256=CSjn02NCnBPO14QOg4OzKI-lTxyKoBxQ4ODsiWamlIM,102
3
- sotkalib/config/field.py,sha256=596_6DLnUIMN86h2uob9YN5IrHnc7fguWScSbIYsTH4,326
4
- sotkalib/config/struct.py,sha256=gv1jFrSRytMC6bZTUDOUQf00Zo1iKxQvuTxGv9qnyHI,5679
5
- sotkalib/enum/__init__.py,sha256=pKpLPm8fqHO4Et21TWIybIPRiehN1KrmxcBh6hPRsxM,127
6
- sotkalib/enum/mixins.py,sha256=rgXb0eXaBSozrviOMJo1671x4DiN9SELtw3-x6PvhDM,1821
7
- sotkalib/exceptions/__init__.py,sha256=H2h-yW1o0_X1Z9O-hWgj6h2spFxpzJJQ_N2ITMr600A,58
8
- sotkalib/exceptions/api/__init__.py,sha256=tIFOiRlbPkgCRNv5OPZ1M98nRnAMkFIuqSK7dZpKMRI,38
9
- sotkalib/exceptions/api/exc.py,sha256=gqx4GrHXUvKcR7tEmJpRqPbDWOT2AgKoyck8-FovQCc,1329
10
- sotkalib/exceptions/handlers/__init__.py,sha256=rA6o6_LVa-0TToyPhT1vB_Lz2E0U8EUfHICAYgAUd78,178
11
- sotkalib/exceptions/handlers/args_incl_error.py,sha256=DB8TMhZdSkwZwEahOJ99zXWknqBTkSC4IKZsZ5psxdg,535
12
- sotkalib/exceptions/handlers/core.py,sha256=5fhusoxBhUz59TaVWobplBvD-sbkZKBnmmu-fcSyRk4,836
13
- sotkalib/http/__init__.py,sha256=HxOuGbHbz39MD0ICjOSh4zv-nGPZca9TgxP4rCceamw,241
14
- sotkalib/http/client_session.py,sha256=OtGTpwr8I7tsgOxRGS8c7tby5sGT_T6B0Zc8HTC9Sxs,8838
15
- sotkalib/log/__init__.py,sha256=xrBx--c8QU5xkb3_n61LuqF8ySUaxlQkHCxHyH_D8aE,58
16
- sotkalib/log/factory.py,sha256=Wl8qY2-vimpctRlRYSWPLjC0KgeEGgSSuDDJaxWtvK8,309
17
- sotkalib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
- sotkalib/redis/__init__.py,sha256=wv3AIRw3aXXLgTTrt6my8C8jSBl4-R3dWRfGLbGTHng,167
19
- sotkalib/redis/client.py,sha256=0TWe-gYqFiuCjqimCQrVrnTHSM0EROIoJL36M3qwOtQ,1118
20
- sotkalib/redis/lock.py,sha256=nEZjIyXmgq3vH-Urs8qXC_N8lmXNho00SaTZ7wJIEIo,2528
21
- sotkalib/sqla/__init__.py,sha256=fYT8O-bPcdXxJ3QVu3KbypwbZ_hpm8Eq1CuQgjvyNJ8,86
22
- sotkalib/sqla/db.py,sha256=lxE6XLgX-CowyhfRwTAA_FGJybiihaGWWzlNe8M41CE,3866
23
- sotkalib-0.0.4.post1.dist-info/WHEEL,sha256=5DEXXimM34_d4Gx1AuF9ysMr1_maoEtGKjaILM3s4w4,80
24
- sotkalib-0.0.4.post1.dist-info/METADATA,sha256=aHmKbSM7SWEwCVkr-557q0o_Q5_EJ59H5d5F5QMvnbs,392
25
- sotkalib-0.0.4.post1.dist-info/RECORD,,