qena-shared-lib 0.1.17__tar.gz → 0.1.18__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/.gitignore +1 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/CHANGELOG.md +21 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/PKG-INFO +4 -1
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/pyproject.toml +20 -1
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/__init__.py +3 -2
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/application.py +4 -4
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/background.py +9 -7
- qena_shared_lib-0.1.18/src/qena_shared_lib/exception_handling.py +409 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/exceptions.py +170 -57
- qena_shared_lib-0.1.18/src/qena_shared_lib/http/__init__.py +90 -0
- qena_shared_lib-0.1.17/src/qena_shared_lib/http.py → qena_shared_lib-0.1.18/src/qena_shared_lib/http/_base.py +36 -36
- qena_shared_lib-0.1.18/src/qena_shared_lib/http/_exception_handlers.py +202 -0
- qena_shared_lib-0.1.18/src/qena_shared_lib/kafka/__init__.py +21 -0
- qena_shared_lib-0.1.18/src/qena_shared_lib/kafka/_base.py +233 -0
- qena_shared_lib-0.1.18/src/qena_shared_lib/kafka/_consumer.py +597 -0
- qena_shared_lib-0.1.18/src/qena_shared_lib/kafka/_exception_handlers.py +124 -0
- qena_shared_lib-0.1.18/src/qena_shared_lib/kafka/_producer.py +133 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/logging.py +17 -13
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/rabbitmq/__init__.py +4 -6
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/rabbitmq/_base.py +68 -132
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/rabbitmq/_channel.py +2 -4
- qena_shared_lib-0.1.18/src/qena_shared_lib/rabbitmq/_exception_handlers.py +109 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/rabbitmq/_listener.py +246 -157
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/rabbitmq/_publisher.py +5 -5
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/rabbitmq/_rpc_client.py +21 -22
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/remotelogging/_base.py +20 -20
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/remotelogging/logstash/_base.py +2 -2
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/remotelogging/logstash/_http_sender.py +2 -4
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/remotelogging/logstash/_tcp_sender.py +2 -2
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/scheduler.py +24 -15
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/security.py +39 -32
- qena_shared_lib-0.1.18/src/qena_shared_lib/utils.py +46 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/tests/conftest.py +7 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/tests/test_application.py +11 -14
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/tests/test_background.py +2 -2
- qena_shared_lib-0.1.18/tests/test_kafka.py +1211 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/tests/test_logstash.py +27 -27
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/tests/test_rabbitmq.py +514 -103
- qena_shared_lib-0.1.18/tests/test_security.py +1244 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/uv.lock +114 -3
- qena_shared_lib-0.1.17/src/qena_shared_lib/exception_handlers.py +0 -235
- qena_shared_lib-0.1.17/src/qena_shared_lib/rabbitmq/_exception_handlers.py +0 -182
- qena_shared_lib-0.1.17/src/qena_shared_lib/utils.py +0 -44
- qena_shared_lib-0.1.17/tests/test_security.py +0 -1004
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/.pre-commit-config.yaml +0 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/README.md +0 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/dependencies/__init__.py +0 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/dependencies/http.py +0 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/dependencies/miscellaneous.py +0 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/py.typed +0 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/rabbitmq/_pool.py +0 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/remotelogging/__init__.py +0 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/src/qena_shared_lib/remotelogging/logstash/__init__.py +0 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/tests/test_dependencies.py +0 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/tests/test_scheduler.py +0 -0
- {qena_shared_lib-0.1.17 → qena_shared_lib-0.1.18}/tests/utils.py +0 -0
@@ -1,5 +1,25 @@
|
|
1
1
|
# Changelog
|
2
2
|
|
3
|
+
## [0.1.18] - 2025-10-10
|
4
|
+
|
5
|
+
### Added
|
6
|
+
|
7
|
+
- Added a kafka client wrapper
|
8
|
+
- Added `GenericServiceExceptionFactory`
|
9
|
+
|
10
|
+
### Changed
|
11
|
+
|
12
|
+
- Rename LoggerProvider to LoggerFactory
|
13
|
+
- Renamed log record labels to extra to make it more generalized
|
14
|
+
|
15
|
+
### Fixed
|
16
|
+
|
17
|
+
- Added check for blocked broker before reply or redeliver
|
18
|
+
- Fix infinite check for cause or context to retry message consumption
|
19
|
+
- Fix rpc client return type not accepting union type
|
20
|
+
- Added validation for method decorator for rpc worker and regular consumer
|
21
|
+
|
22
|
+
|
3
23
|
## [0.1.17] - 2025-07-16
|
4
24
|
|
5
25
|
### Changed
|
@@ -52,6 +72,7 @@
|
|
52
72
|
- Added a re-export for rabbitmq channel pool (ChannelPool) class.
|
53
73
|
|
54
74
|
|
75
|
+
[0.1.18]: https://github.com/Qena-Digital-Lending/qena-shared-kernel/compare/v0.1.17...v0.1.18
|
55
76
|
[0.1.17]: https://github.com/Qena-Digital-Lending/qena-shared-kernel/compare/v0.1.16...v0.1.17
|
56
77
|
[0.1.16]: https://github.com/Qena-Digital-Lending/qena-shared-kernel/compare/v0.1.15...v0.1.16
|
57
78
|
[0.1.15]: https://github.com/Qena-Digital-Lending/qena-shared-kernel/compare/v0.1.14...v0.1.15
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: qena-shared-lib
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.18
|
4
4
|
Summary: A shared tools for other services
|
5
5
|
Requires-Python: >=3.10
|
6
6
|
Requires-Dist: fastapi[all]==0.115.6
|
@@ -12,10 +12,13 @@ Requires-Dist: pydantic==2.10.3
|
|
12
12
|
Requires-Dist: starlette==0.41.3
|
13
13
|
Requires-Dist: typing-extensions==4.12.2
|
14
14
|
Provides-Extra: all
|
15
|
+
Requires-Dist: aiokafka==0.12.0; extra == 'all'
|
15
16
|
Requires-Dist: cronsim==2.6; extra == 'all'
|
16
17
|
Requires-Dist: jwt==1.3.1; extra == 'all'
|
17
18
|
Requires-Dist: passlib[bcrypt]==1.7.4; extra == 'all'
|
18
19
|
Requires-Dist: pika==1.3.2; extra == 'all'
|
20
|
+
Provides-Extra: kafka
|
21
|
+
Requires-Dist: aiokafka==0.12.0; extra == 'kafka'
|
19
22
|
Provides-Extra: rabbitmq
|
20
23
|
Requires-Dist: pika==1.3.2; extra == 'rabbitmq'
|
21
24
|
Provides-Extra: scheduler
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[project]
|
2
2
|
name = "qena-shared-lib"
|
3
|
-
version = "0.1.
|
3
|
+
version = "0.1.18"
|
4
4
|
description = "A shared tools for other services"
|
5
5
|
readme = "README.md"
|
6
6
|
requires-python = ">=3.10"
|
@@ -16,6 +16,9 @@ dependencies = [
|
|
16
16
|
]
|
17
17
|
|
18
18
|
[project.optional-dependencies]
|
19
|
+
kafka = [
|
20
|
+
"aiokafka==0.12.0",
|
21
|
+
]
|
19
22
|
rabbitmq = [
|
20
23
|
"pika==1.3.2",
|
21
24
|
]
|
@@ -27,6 +30,7 @@ security = [
|
|
27
30
|
"passlib[bcrypt]==1.7.4",
|
28
31
|
]
|
29
32
|
all = [
|
33
|
+
"aiokafka==0.12.0",
|
30
34
|
"cronsim==2.6",
|
31
35
|
"jwt==1.3.1",
|
32
36
|
"passlib[bcrypt]==1.7.4",
|
@@ -44,6 +48,8 @@ dev-dependencies = [
|
|
44
48
|
"pytest==8.3.3",
|
45
49
|
"testcontainers==4.8.2",
|
46
50
|
"pytest-cov==6.0.0",
|
51
|
+
"pytest-profiling==1.8.1",
|
52
|
+
"pytest-env==1.1.5",
|
47
53
|
]
|
48
54
|
|
49
55
|
[tool.ruff]
|
@@ -96,3 +102,16 @@ explicit_package_bases = true
|
|
96
102
|
disallow_untyped_decorators = false
|
97
103
|
disallow_subclassing_any = false
|
98
104
|
warn_unused_ignores = false
|
105
|
+
|
106
|
+
[tool.pytest.ini_options]
|
107
|
+
addopts = "--capture no --profile --cov"
|
108
|
+
testpaths = ["tests"]
|
109
|
+
env = [
|
110
|
+
"QENA_SHARED_LIB_LOGGING_LOGGER_NAME=qena-shared-lib",
|
111
|
+
"QENA_SHARED_LIB_SECURITY_UNAUTHORIZED_RESPONSE_CODE=0",
|
112
|
+
"QENA_SHARED_LIB_SECURITY_TOKEN_HEADER=x-security-token-header",
|
113
|
+
]
|
114
|
+
asyncio_default_fixture_loop_scope = "session"
|
115
|
+
|
116
|
+
[tool.coverage.run]
|
117
|
+
source = ["src/qena_shared_lib"]
|
@@ -1,5 +1,5 @@
|
|
1
1
|
try:
|
2
|
-
from . import rabbitmq, scheduler, security
|
2
|
+
from . import kafka, rabbitmq, scheduler, security
|
3
3
|
except NameError:
|
4
4
|
pass
|
5
5
|
from . import (
|
@@ -19,9 +19,10 @@ __all__ = [
|
|
19
19
|
"dependencies",
|
20
20
|
"exceptions",
|
21
21
|
"http",
|
22
|
+
"kafka",
|
22
23
|
"logging",
|
23
|
-
"remotelogging",
|
24
24
|
"rabbitmq",
|
25
|
+
"remotelogging",
|
25
26
|
"scheduler",
|
26
27
|
"security",
|
27
28
|
"utils",
|
@@ -7,13 +7,13 @@ from punq import Container, Scope, empty
|
|
7
7
|
from starlette.types import Lifespan
|
8
8
|
from typing_extensions import Self
|
9
9
|
|
10
|
-
from .
|
10
|
+
from .http import ControllerBase
|
11
|
+
from .http._exception_handlers import (
|
11
12
|
AbstractHttpExceptionHandler,
|
12
|
-
|
13
|
+
HttpGeneralExceptionHandler,
|
13
14
|
HTTPServiceExceptionHandler,
|
14
15
|
RequestValidationErrorHandler,
|
15
16
|
)
|
16
|
-
from .http import ControllerBase
|
17
17
|
|
18
18
|
__all__ = [
|
19
19
|
"Builder",
|
@@ -127,7 +127,7 @@ class Builder:
|
|
127
127
|
|
128
128
|
def with_default_exception_handlers(self) -> Self:
|
129
129
|
self.with_exception_handlers(
|
130
|
-
|
130
|
+
HttpGeneralExceptionHandler,
|
131
131
|
HTTPServiceExceptionHandler,
|
132
132
|
RequestValidationErrorHandler,
|
133
133
|
)
|
@@ -9,7 +9,7 @@ from uuid import uuid4
|
|
9
9
|
from prometheus_client import Enum as PrometheusEnum
|
10
10
|
from starlette.background import BackgroundTask
|
11
11
|
|
12
|
-
from .logging import
|
12
|
+
from .logging import LoggerFactory
|
13
13
|
from .remotelogging import BaseRemoteLogSender
|
14
14
|
from .utils import AsyncEventLoopMixin
|
15
15
|
|
@@ -20,7 +20,7 @@ __all__ = [
|
|
20
20
|
|
21
21
|
|
22
22
|
class Background(AsyncEventLoopMixin):
|
23
|
-
|
23
|
+
_BACKGROUND_RUNNER_STATE = PrometheusEnum(
|
24
24
|
name="background_runner_state",
|
25
25
|
documentation="Background runner state",
|
26
26
|
states=["running", "stopped"],
|
@@ -34,7 +34,7 @@ class Background(AsyncEventLoopMixin):
|
|
34
34
|
self._started = False
|
35
35
|
self._stopped = False
|
36
36
|
self._remote_logger = remote_logger
|
37
|
-
self._logger =
|
37
|
+
self._logger = LoggerFactory.get_logger("background")
|
38
38
|
self._tasks: dict[str, Task[Any]] = {}
|
39
39
|
|
40
40
|
async def _task_manager(
|
@@ -54,8 +54,10 @@ class Background(AsyncEventLoopMixin):
|
|
54
54
|
|
55
55
|
await self._tasks[task_id]
|
56
56
|
except Exception:
|
57
|
-
self._remote_logger.
|
58
|
-
"exception occured
|
57
|
+
self._remote_logger.exception(
|
58
|
+
message=f"exception occured while running background task {task.func.__name__} with id {task_id}",
|
59
|
+
tags=["background", "task_execution_failed", task_id],
|
60
|
+
extra={"serviceType": "background", "taskId": task_id},
|
59
61
|
)
|
60
62
|
finally:
|
61
63
|
self._logger.info("finished running %s", task.func.__name__)
|
@@ -91,7 +93,7 @@ class Background(AsyncEventLoopMixin):
|
|
91
93
|
raise RuntimeError("background runner already running")
|
92
94
|
|
93
95
|
self.loop.create_task(self._run_tasks())
|
94
|
-
self.
|
96
|
+
self._BACKGROUND_RUNNER_STATE.state("running")
|
95
97
|
|
96
98
|
self._started = True
|
97
99
|
|
@@ -101,7 +103,7 @@ class Background(AsyncEventLoopMixin):
|
|
101
103
|
|
102
104
|
self._stopped = True
|
103
105
|
self._queue.put_nowait((None, None))
|
104
|
-
self.
|
106
|
+
self._BACKGROUND_RUNNER_STATE.state("stopped")
|
105
107
|
|
106
108
|
def is_alive(self, task_id: str) -> bool:
|
107
109
|
if task_id in self._tasks and not self._tasks[task_id].done():
|
@@ -0,0 +1,409 @@
|
|
1
|
+
from asyncio import Future, Task, iscoroutinefunction
|
2
|
+
from dataclasses import dataclass
|
3
|
+
from enum import Enum
|
4
|
+
from functools import partial
|
5
|
+
from typing import Any, Callable, TypeVar, cast
|
6
|
+
|
7
|
+
from prometheus_client import Counter
|
8
|
+
from punq import Container, Scope
|
9
|
+
from pydantic import ValidationError
|
10
|
+
from pydantic.alias_generators import to_snake
|
11
|
+
from typing_extensions import Self
|
12
|
+
|
13
|
+
from .exceptions import (
|
14
|
+
HTTPServiceError,
|
15
|
+
RabbitMQServiceException,
|
16
|
+
ServiceException,
|
17
|
+
Severity,
|
18
|
+
)
|
19
|
+
from .logging import LoggerFactory
|
20
|
+
from .remotelogging import BaseRemoteLogSender
|
21
|
+
from .utils import AsyncEventLoopMixin
|
22
|
+
|
23
|
+
__all__ = [
|
24
|
+
"AbstractServiceExceptionHandler",
|
25
|
+
"ExceptionHandlerServiceType",
|
26
|
+
"ExceptionHandlingManager",
|
27
|
+
"GeneralExceptionHandler",
|
28
|
+
"ServiceContext",
|
29
|
+
"ServiceInformation",
|
30
|
+
"ServiceExceptionHandler",
|
31
|
+
"ValidationErrorHandler",
|
32
|
+
]
|
33
|
+
|
34
|
+
|
35
|
+
ServiceContextDataType = TypeVar("ServiceContextDataType")
|
36
|
+
|
37
|
+
|
38
|
+
class ExceptionHandlerServiceType(str, Enum):
|
39
|
+
RABBIT_MQ = "RABBITMQ"
|
40
|
+
HTTP = "HTTP"
|
41
|
+
KAFKA = "KAFKA"
|
42
|
+
|
43
|
+
|
44
|
+
class ServiceContext:
|
45
|
+
def add_data(
|
46
|
+
self,
|
47
|
+
data_type: type[ServiceContextDataType],
|
48
|
+
value: ServiceContextDataType,
|
49
|
+
) -> Self:
|
50
|
+
if getattr(self, "_data", None):
|
51
|
+
self._data = {}
|
52
|
+
|
53
|
+
self._data[data_type] = value
|
54
|
+
|
55
|
+
return self
|
56
|
+
|
57
|
+
def get_data(
|
58
|
+
self, data_type: type[ServiceContextDataType]
|
59
|
+
) -> ServiceContextDataType | None:
|
60
|
+
if getattr(self, "_data", None) is None:
|
61
|
+
return None
|
62
|
+
|
63
|
+
return cast(
|
64
|
+
dict[type[ServiceContextDataType], ServiceContextDataType],
|
65
|
+
self._data,
|
66
|
+
)[data_type]
|
67
|
+
|
68
|
+
def set_labels(self, labels: dict[str, Any]) -> Self:
|
69
|
+
self._labels = labels
|
70
|
+
|
71
|
+
return self
|
72
|
+
|
73
|
+
def get_labels(self) -> dict[str, Any]:
|
74
|
+
if getattr(self, "_labels", None) is None:
|
75
|
+
raise ValueError("service context labels not set")
|
76
|
+
|
77
|
+
return self._labels
|
78
|
+
|
79
|
+
|
80
|
+
@dataclass
|
81
|
+
class ServiceInformation:
|
82
|
+
service_type: ExceptionHandlerServiceType
|
83
|
+
tags: list[str]
|
84
|
+
extra: dict[str, str]
|
85
|
+
message: str | None = None
|
86
|
+
|
87
|
+
|
88
|
+
class AbstractServiceExceptionHandler:
|
89
|
+
@property
|
90
|
+
def exception(self) -> type[Exception]:
|
91
|
+
raise NotImplementedError()
|
92
|
+
|
93
|
+
def handle(
|
94
|
+
self, service_information: ServiceInformation, exception: BaseException
|
95
|
+
) -> None:
|
96
|
+
del service_information, exception
|
97
|
+
|
98
|
+
raise NotImplementedError()
|
99
|
+
|
100
|
+
|
101
|
+
@dataclass
|
102
|
+
class ExceptionHandlerMetadata:
|
103
|
+
exception_handler: AbstractServiceExceptionHandler
|
104
|
+
|
105
|
+
def __post_init__(self) -> None:
|
106
|
+
self._is_async_exception_handler = self._check_async_exception_handler(
|
107
|
+
self.exception_handler
|
108
|
+
)
|
109
|
+
|
110
|
+
def _check_async_exception_handler(
|
111
|
+
self, exception_handler: AbstractServiceExceptionHandler
|
112
|
+
) -> bool:
|
113
|
+
exception_handler_callable = getattr(
|
114
|
+
exception_handler, "__call__", None
|
115
|
+
)
|
116
|
+
|
117
|
+
if exception_handler_callable is None:
|
118
|
+
raise RuntimeError(
|
119
|
+
"exception handler has no `__call__(ServiceContext, BaseException)` method"
|
120
|
+
)
|
121
|
+
|
122
|
+
return iscoroutinefunction(exception_handler_callable)
|
123
|
+
|
124
|
+
@property
|
125
|
+
def is_async_listener(self) -> bool:
|
126
|
+
return self._is_async_exception_handler
|
127
|
+
|
128
|
+
|
129
|
+
class ExceptionHandlingManager(AsyncEventLoopMixin):
|
130
|
+
_HANDLER_EXCEPTIONS_COUNTER_METRICS: dict[
|
131
|
+
ExceptionHandlerServiceType, Counter
|
132
|
+
] = {}
|
133
|
+
|
134
|
+
def __init__(
|
135
|
+
self,
|
136
|
+
service_type: ExceptionHandlerServiceType,
|
137
|
+
container: Container,
|
138
|
+
remote_logger: BaseRemoteLogSender,
|
139
|
+
label_name: list[str],
|
140
|
+
):
|
141
|
+
self._service_type = service_type
|
142
|
+
self._container = container
|
143
|
+
self._exception_handlers: dict[
|
144
|
+
type[Exception], ExceptionHandlerMetadata
|
145
|
+
] = {}
|
146
|
+
self._remote_logger = remote_logger
|
147
|
+
self._exception_handling_done_hook: (
|
148
|
+
Callable[[ServiceContext], None] | None
|
149
|
+
) = None
|
150
|
+
|
151
|
+
if service_type not in self._HANDLER_EXCEPTIONS_COUNTER_METRICS:
|
152
|
+
self._HANDLER_EXCEPTIONS_COUNTER_METRICS[service_type] = Counter(
|
153
|
+
name=f"{to_snake(service_type.name)}_handled_exceptions",
|
154
|
+
documentation=f"{service_type.name.capitalize()} handled exceptions",
|
155
|
+
labelnames=label_name,
|
156
|
+
)
|
157
|
+
|
158
|
+
def set_exception_handlers(
|
159
|
+
self, *exception_handlers: type[AbstractServiceExceptionHandler]
|
160
|
+
) -> None:
|
161
|
+
for index, exception_handler in enumerate(exception_handlers):
|
162
|
+
if not isinstance(exception_handler, type) or not issubclass(
|
163
|
+
exception_handler, AbstractServiceExceptionHandler
|
164
|
+
):
|
165
|
+
raise TypeError(
|
166
|
+
f"exception handler {index} is {type(exception_handler)}, expected instance of type or subclass of `AbstractServiceExceptionHandler`"
|
167
|
+
)
|
168
|
+
|
169
|
+
self._container.register(
|
170
|
+
service=AbstractServiceExceptionHandler,
|
171
|
+
factory=exception_handler,
|
172
|
+
scope=Scope.singleton,
|
173
|
+
)
|
174
|
+
|
175
|
+
def set_exception_handling_done_hook(
|
176
|
+
self, exception_handling_done_hook: Callable[[ServiceContext], None]
|
177
|
+
) -> None:
|
178
|
+
if not callable(exception_handling_done_hook):
|
179
|
+
raise ValueError("`exception_handler_done_hook` is not a callable")
|
180
|
+
|
181
|
+
self._exception_handling_done_hook = exception_handling_done_hook
|
182
|
+
|
183
|
+
def resolve_exception_handlers(self) -> None:
|
184
|
+
for exception_handler in self._container.resolve_all(
|
185
|
+
AbstractServiceExceptionHandler
|
186
|
+
):
|
187
|
+
exception_handler = cast(
|
188
|
+
AbstractServiceExceptionHandler, exception_handler
|
189
|
+
)
|
190
|
+
|
191
|
+
if not callable(exception_handler):
|
192
|
+
raise ValueError(
|
193
|
+
f"exception handler {exception_handler.__class__.__name__} is not callable"
|
194
|
+
)
|
195
|
+
|
196
|
+
self._exception_handlers[exception_handler.exception] = (
|
197
|
+
ExceptionHandlerMetadata(exception_handler)
|
198
|
+
)
|
199
|
+
|
200
|
+
def submit_exception(
|
201
|
+
self,
|
202
|
+
context: ServiceContext,
|
203
|
+
exception: BaseException,
|
204
|
+
) -> bool:
|
205
|
+
exception_handler_metadata = None
|
206
|
+
|
207
|
+
for exception_type in type(exception).mro():
|
208
|
+
exception_handler_metadata = self._exception_handlers.get(
|
209
|
+
exception_type
|
210
|
+
)
|
211
|
+
|
212
|
+
if exception_handler_metadata is not None:
|
213
|
+
break
|
214
|
+
|
215
|
+
if exception_handler_metadata is None:
|
216
|
+
return False
|
217
|
+
|
218
|
+
assert callable(exception_handler_metadata.exception_handler)
|
219
|
+
|
220
|
+
if exception_handler_metadata.is_async_listener:
|
221
|
+
self.loop.create_task(
|
222
|
+
exception_handler_metadata.exception_handler(context, exception)
|
223
|
+
).add_done_callback(
|
224
|
+
partial(self._on_exception_handler_done, context)
|
225
|
+
)
|
226
|
+
else:
|
227
|
+
self.loop.run_in_executor(
|
228
|
+
executor=None,
|
229
|
+
func=partial(
|
230
|
+
exception_handler_metadata.exception_handler,
|
231
|
+
context,
|
232
|
+
exception,
|
233
|
+
),
|
234
|
+
).add_done_callback(
|
235
|
+
partial(self._on_exception_handler_done, context)
|
236
|
+
)
|
237
|
+
|
238
|
+
self._HANDLER_EXCEPTIONS_COUNTER_METRICS[self._service_type].labels(
|
239
|
+
*context.get_labels()
|
240
|
+
).inc()
|
241
|
+
|
242
|
+
return True
|
243
|
+
|
244
|
+
def _on_exception_handler_done(
|
245
|
+
self, context: ServiceContext, task_or_future: Task[Any] | Future[Any]
|
246
|
+
) -> None:
|
247
|
+
if task_or_future.cancelled():
|
248
|
+
return
|
249
|
+
|
250
|
+
exception = task_or_future.exception()
|
251
|
+
service_information = context.get_data(ServiceInformation)
|
252
|
+
|
253
|
+
if service_information is not None:
|
254
|
+
service_type = service_information.service_type.name.lower()
|
255
|
+
tags = service_information.tags
|
256
|
+
extra = service_information.extra
|
257
|
+
else:
|
258
|
+
service_type = "unknown"
|
259
|
+
tags = ["exception_handling"]
|
260
|
+
extra = {"serviceType": "exception_handling"}
|
261
|
+
|
262
|
+
if exception is not None:
|
263
|
+
self._remote_logger.error(
|
264
|
+
message=f"error occured in {service_type} service exception handler",
|
265
|
+
tags=tags,
|
266
|
+
extra=extra,
|
267
|
+
exception=exception,
|
268
|
+
)
|
269
|
+
|
270
|
+
if self._exception_handling_done_hook is None:
|
271
|
+
return
|
272
|
+
|
273
|
+
try:
|
274
|
+
self._exception_handling_done_hook(context)
|
275
|
+
except:
|
276
|
+
tags.append("exception_handler_done_hook")
|
277
|
+
self._remote_logger.exception(
|
278
|
+
message="error occured while executing `exception_handler_done_hook`",
|
279
|
+
tags=tags,
|
280
|
+
extra=extra,
|
281
|
+
)
|
282
|
+
|
283
|
+
|
284
|
+
EXCEPTION_HANDLING_LOGGER_NAME = "exception_handling"
|
285
|
+
|
286
|
+
|
287
|
+
class ServiceExceptionHandler(AbstractServiceExceptionHandler):
|
288
|
+
@property
|
289
|
+
def exception(self) -> type[Exception]:
|
290
|
+
return cast(type[Exception], ServiceException)
|
291
|
+
|
292
|
+
def __init__(self, remote_logger: BaseRemoteLogSender):
|
293
|
+
self._logger = LoggerFactory.get_logger(EXCEPTION_HANDLING_LOGGER_NAME)
|
294
|
+
self._remote_logger = remote_logger
|
295
|
+
|
296
|
+
def handle(
|
297
|
+
self,
|
298
|
+
service_information: ServiceInformation,
|
299
|
+
exception: BaseException,
|
300
|
+
) -> None:
|
301
|
+
if not isinstance(exception, ServiceException):
|
302
|
+
self._logger.warning(
|
303
|
+
"%s cannot be handled by handler", exception.__class__.__name__
|
304
|
+
)
|
305
|
+
|
306
|
+
return
|
307
|
+
|
308
|
+
match exception:
|
309
|
+
case HTTPServiceError() as http_service_error:
|
310
|
+
if http_service_error.status_code is not None:
|
311
|
+
str_status_code = str(http_service_error.status_code)
|
312
|
+
service_information.extra["statusCode"] = str_status_code
|
313
|
+
|
314
|
+
service_information.tags.append(str_status_code)
|
315
|
+
|
316
|
+
if http_service_error.response_code is not None:
|
317
|
+
str_response_code = str(http_service_error.response_code)
|
318
|
+
service_information.extra["responseCode"] = (
|
319
|
+
str_response_code
|
320
|
+
)
|
321
|
+
|
322
|
+
service_information.tags.append(str_response_code)
|
323
|
+
case RabbitMQServiceException() as rabbitmq_service_exception:
|
324
|
+
str_error_code = str(rabbitmq_service_exception.code)
|
325
|
+
service_information.extra["code"] = str_error_code
|
326
|
+
|
327
|
+
service_information.tags.append(str_error_code)
|
328
|
+
|
329
|
+
if exception.tags:
|
330
|
+
service_information.tags.extend(exception.tags)
|
331
|
+
|
332
|
+
if exception.extra:
|
333
|
+
service_information.extra.update(exception.extra)
|
334
|
+
|
335
|
+
exc_info = (
|
336
|
+
(type(exception), exception, exception.__traceback__)
|
337
|
+
if exception.extract_exc_info
|
338
|
+
else None
|
339
|
+
)
|
340
|
+
|
341
|
+
match exception.severity:
|
342
|
+
case Severity.HIGH:
|
343
|
+
remote_logger_method = self._remote_logger.error
|
344
|
+
logger_method = self._logger.error
|
345
|
+
case Severity.MEDIUM:
|
346
|
+
remote_logger_method = self._remote_logger.warning
|
347
|
+
logger_method = self._logger.warning
|
348
|
+
case _:
|
349
|
+
remote_logger_method = self._remote_logger.info
|
350
|
+
logger_method = self._logger.info
|
351
|
+
|
352
|
+
if exception.remote_logging:
|
353
|
+
remote_logger_method(
|
354
|
+
message=service_information.message or exception.message,
|
355
|
+
tags=service_information.tags,
|
356
|
+
extra=service_information.extra,
|
357
|
+
exception=exception if exception.extract_exc_info else None,
|
358
|
+
)
|
359
|
+
else:
|
360
|
+
logger_method(
|
361
|
+
"[service_type = `%s`] `%s`",
|
362
|
+
service_information.service_type.name.lower(),
|
363
|
+
service_information.message or exception.message,
|
364
|
+
exc_info=exc_info,
|
365
|
+
)
|
366
|
+
|
367
|
+
|
368
|
+
class ValidationErrorHandler(AbstractServiceExceptionHandler):
|
369
|
+
@property
|
370
|
+
def exception(self) -> type[Exception]:
|
371
|
+
return cast(type[Exception], ValidationError)
|
372
|
+
|
373
|
+
def __init__(self, remote_logger: BaseRemoteLogSender):
|
374
|
+
self._remote_logger = remote_logger
|
375
|
+
|
376
|
+
def handle(
|
377
|
+
self,
|
378
|
+
service_information: ServiceInformation,
|
379
|
+
exception: ValidationError,
|
380
|
+
) -> None:
|
381
|
+
self._remote_logger.error(
|
382
|
+
message=service_information.message
|
383
|
+
or f"invalid request data for {service_information.service_type.name.lower()} service",
|
384
|
+
tags=service_information.tags,
|
385
|
+
extra=service_information.extra,
|
386
|
+
exception=exception,
|
387
|
+
)
|
388
|
+
|
389
|
+
|
390
|
+
class GeneralExceptionHandler(AbstractServiceExceptionHandler):
|
391
|
+
@property
|
392
|
+
def exception(self) -> type[Exception]:
|
393
|
+
return Exception
|
394
|
+
|
395
|
+
def __init__(self, remote_logger: BaseRemoteLogSender):
|
396
|
+
self._remote_logger = remote_logger
|
397
|
+
|
398
|
+
def handle(
|
399
|
+
self,
|
400
|
+
service_information: ServiceInformation,
|
401
|
+
exception: BaseException,
|
402
|
+
) -> None:
|
403
|
+
self._remote_logger.error(
|
404
|
+
message=service_information.message
|
405
|
+
or f"something went wrong while processing data for {service_information.service_type.name.lower()} service",
|
406
|
+
tags=service_information.tags,
|
407
|
+
extra=service_information.extra,
|
408
|
+
exception=exception,
|
409
|
+
)
|