qena-shared-lib 0.1.16__py3-none-any.whl → 0.1.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qena_shared_lib/__init__.py +3 -2
- qena_shared_lib/application.py +4 -4
- qena_shared_lib/background.py +9 -7
- qena_shared_lib/exception_handling.py +409 -0
- qena_shared_lib/exceptions.py +170 -57
- qena_shared_lib/http/__init__.py +90 -0
- qena_shared_lib/{http.py → http/_base.py} +36 -36
- qena_shared_lib/http/_exception_handlers.py +202 -0
- qena_shared_lib/kafka/__init__.py +21 -0
- qena_shared_lib/kafka/_base.py +233 -0
- qena_shared_lib/kafka/_consumer.py +597 -0
- qena_shared_lib/kafka/_exception_handlers.py +124 -0
- qena_shared_lib/kafka/_producer.py +133 -0
- qena_shared_lib/logging.py +17 -13
- qena_shared_lib/rabbitmq/__init__.py +4 -6
- qena_shared_lib/rabbitmq/_base.py +68 -132
- qena_shared_lib/rabbitmq/_channel.py +2 -4
- qena_shared_lib/rabbitmq/_exception_handlers.py +69 -142
- qena_shared_lib/rabbitmq/_listener.py +246 -157
- qena_shared_lib/rabbitmq/_publisher.py +5 -5
- qena_shared_lib/rabbitmq/_rpc_client.py +21 -22
- qena_shared_lib/remotelogging/_base.py +20 -20
- qena_shared_lib/remotelogging/logstash/_base.py +2 -2
- qena_shared_lib/remotelogging/logstash/_http_sender.py +2 -4
- qena_shared_lib/remotelogging/logstash/_tcp_sender.py +2 -2
- qena_shared_lib/scheduler.py +24 -15
- qena_shared_lib/security.py +39 -32
- qena_shared_lib/utils.py +13 -11
- {qena_shared_lib-0.1.16.dist-info → qena_shared_lib-0.1.18.dist-info}/METADATA +9 -1
- qena_shared_lib-0.1.18.dist-info/RECORD +38 -0
- qena_shared_lib/exception_handlers.py +0 -235
- qena_shared_lib-0.1.16.dist-info/RECORD +0 -31
- {qena_shared_lib-0.1.16.dist-info → qena_shared_lib-0.1.18.dist-info}/WHEEL +0 -0
@@ -0,0 +1,202 @@
|
|
1
|
+
from collections.abc import Iterable
|
2
|
+
from typing import Any, cast
|
3
|
+
|
4
|
+
from fastapi import Request, Response, status
|
5
|
+
from fastapi.exceptions import RequestValidationError
|
6
|
+
from fastapi.responses import JSONResponse
|
7
|
+
from pydantic_core import to_jsonable_python
|
8
|
+
|
9
|
+
from ..exception_handling import (
|
10
|
+
ExceptionHandlerServiceType,
|
11
|
+
GeneralExceptionHandler,
|
12
|
+
ServiceExceptionHandler,
|
13
|
+
ServiceInformation,
|
14
|
+
)
|
15
|
+
from ..exceptions import (
|
16
|
+
HTTPServiceError,
|
17
|
+
ServiceException,
|
18
|
+
Severity,
|
19
|
+
)
|
20
|
+
from ..logging import LoggerFactory
|
21
|
+
from ..remotelogging import BaseRemoteLogSender
|
22
|
+
|
23
|
+
__all__ = [
|
24
|
+
"AbstractHttpExceptionHandler",
|
25
|
+
"HttpGeneralExceptionHandler",
|
26
|
+
"HTTPServiceExceptionHandler",
|
27
|
+
"RequestValidationErrorHandler",
|
28
|
+
]
|
29
|
+
|
30
|
+
HTTP_EXCEPTION_HANDLER_LOGGER_NAME = "http.exception_handler"
|
31
|
+
|
32
|
+
|
33
|
+
class AbstractHttpExceptionHandler:
|
34
|
+
@property
|
35
|
+
def exception(self) -> type[Exception]:
|
36
|
+
raise NotImplementedError()
|
37
|
+
|
38
|
+
|
39
|
+
class HTTPServiceExceptionHandler(
|
40
|
+
ServiceExceptionHandler, AbstractHttpExceptionHandler
|
41
|
+
):
|
42
|
+
def __init__(self, remote_logger: BaseRemoteLogSender):
|
43
|
+
super().__init__(remote_logger)
|
44
|
+
|
45
|
+
self._logger = LoggerFactory.get_logger(
|
46
|
+
HTTP_EXCEPTION_HANDLER_LOGGER_NAME
|
47
|
+
)
|
48
|
+
|
49
|
+
def __call__(
|
50
|
+
self, request: Request, exception: ServiceException
|
51
|
+
) -> Response:
|
52
|
+
severity = exception.severity or Severity.LOW
|
53
|
+
user_agent = request.headers.get("user-agent", "__unknown__")
|
54
|
+
tags = [
|
55
|
+
"HTTP",
|
56
|
+
request.method,
|
57
|
+
request.url.path,
|
58
|
+
exception.__class__.__name__,
|
59
|
+
]
|
60
|
+
extra = {
|
61
|
+
"serviceType": "HTTP",
|
62
|
+
"method": request.method,
|
63
|
+
"path": request.url.path,
|
64
|
+
"userAgent": user_agent,
|
65
|
+
"exception": exception.__class__.__name__,
|
66
|
+
}
|
67
|
+
message = exception.message
|
68
|
+
|
69
|
+
if severity is Severity.HIGH:
|
70
|
+
message = "something went wrong"
|
71
|
+
|
72
|
+
content: dict[str, Any] = {
|
73
|
+
"severity": severity.name,
|
74
|
+
"message": message,
|
75
|
+
}
|
76
|
+
status_code = self._status_code_from_severity(exception.severity)
|
77
|
+
headers = None
|
78
|
+
|
79
|
+
if isinstance(exception, HTTPServiceError):
|
80
|
+
if exception.body is not None:
|
81
|
+
extra_body = to_jsonable_python(exception.body)
|
82
|
+
is_updated = False
|
83
|
+
|
84
|
+
try:
|
85
|
+
if isinstance(extra_body, Iterable):
|
86
|
+
content.update(extra_body)
|
87
|
+
|
88
|
+
is_updated = True
|
89
|
+
except:
|
90
|
+
pass
|
91
|
+
|
92
|
+
if not is_updated:
|
93
|
+
content["data"] = extra_body
|
94
|
+
|
95
|
+
if exception.response_code is not None:
|
96
|
+
content["code"] = exception.response_code
|
97
|
+
str_response_code = str(exception.response_code)
|
98
|
+
extra["responseCode"] = str_response_code
|
99
|
+
|
100
|
+
tags.append(str_response_code)
|
101
|
+
|
102
|
+
if exception.corrective_action is not None:
|
103
|
+
content["correctiveAction"] = exception.corrective_action
|
104
|
+
|
105
|
+
if exception.status_code is not None:
|
106
|
+
status_code = exception.status_code
|
107
|
+
str_status_code = str(status_code)
|
108
|
+
extra["statusCode"] = str_status_code
|
109
|
+
|
110
|
+
tags.append(str_status_code)
|
111
|
+
|
112
|
+
if exception.headers is not None:
|
113
|
+
headers = exception.headers
|
114
|
+
|
115
|
+
self.handle(
|
116
|
+
service_information=ServiceInformation(
|
117
|
+
service_type=ExceptionHandlerServiceType.HTTP,
|
118
|
+
tags=tags,
|
119
|
+
extra=extra,
|
120
|
+
),
|
121
|
+
exception=exception,
|
122
|
+
)
|
123
|
+
|
124
|
+
return JSONResponse(
|
125
|
+
content=content,
|
126
|
+
status_code=status_code,
|
127
|
+
headers=headers,
|
128
|
+
)
|
129
|
+
|
130
|
+
def _status_code_from_severity(self, severity: Severity | None) -> int:
|
131
|
+
if (
|
132
|
+
severity is None
|
133
|
+
or severity is Severity.LOW
|
134
|
+
or severity is Severity.MEDIUM
|
135
|
+
):
|
136
|
+
return cast(int, status.HTTP_400_BAD_REQUEST)
|
137
|
+
|
138
|
+
return cast(int, status.HTTP_500_INTERNAL_SERVER_ERROR)
|
139
|
+
|
140
|
+
|
141
|
+
class RequestValidationErrorHandler(AbstractHttpExceptionHandler):
|
142
|
+
@property
|
143
|
+
def exception(self) -> type[Exception]:
|
144
|
+
return cast(type[Exception], RequestValidationError)
|
145
|
+
|
146
|
+
def __init__(self) -> None:
|
147
|
+
self._logger = LoggerFactory.get_logger("http.exception_handler")
|
148
|
+
|
149
|
+
def __call__(
|
150
|
+
self, request: Request, error: RequestValidationError
|
151
|
+
) -> Response:
|
152
|
+
message = "invalid request data"
|
153
|
+
|
154
|
+
self._logger.warning(
|
155
|
+
"\n%s %s\n%s", request.method, request.url.path, message
|
156
|
+
)
|
157
|
+
|
158
|
+
return JSONResponse(
|
159
|
+
content={
|
160
|
+
"severity": Severity.MEDIUM.name,
|
161
|
+
"message": message,
|
162
|
+
"code": 100,
|
163
|
+
"detail": to_jsonable_python(error.errors()),
|
164
|
+
},
|
165
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
166
|
+
)
|
167
|
+
|
168
|
+
|
169
|
+
class HttpGeneralExceptionHandler(
|
170
|
+
GeneralExceptionHandler, AbstractHttpExceptionHandler
|
171
|
+
):
|
172
|
+
def __call__(self, request: Request, exception: Exception) -> Response:
|
173
|
+
user_agent = request.get("user-agent", "__unknown__")
|
174
|
+
|
175
|
+
self.handle(
|
176
|
+
service_information=ServiceInformation(
|
177
|
+
service_type=ExceptionHandlerServiceType.HTTP,
|
178
|
+
tags=[
|
179
|
+
"HTTP",
|
180
|
+
request.method,
|
181
|
+
request.url.path,
|
182
|
+
exception.__class__.__name__,
|
183
|
+
],
|
184
|
+
extra={
|
185
|
+
"serviceType": "HTTP",
|
186
|
+
"method": request.method,
|
187
|
+
"path": request.url.path,
|
188
|
+
"userAgent": user_agent,
|
189
|
+
"exception": exception.__class__.__name__,
|
190
|
+
},
|
191
|
+
message=f"something went wrong on endpoint `{request.method} {request.url.path}`",
|
192
|
+
),
|
193
|
+
exception=exception,
|
194
|
+
)
|
195
|
+
|
196
|
+
return JSONResponse(
|
197
|
+
content={
|
198
|
+
"severity": Severity.HIGH.name,
|
199
|
+
"message": "something went wrong",
|
200
|
+
},
|
201
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
202
|
+
)
|
@@ -0,0 +1,21 @@
|
|
1
|
+
from ._base import KafkaManager, SaslMechanism, SecurityProtocol
|
2
|
+
from ._consumer import (
|
3
|
+
CONSUMER_ATTRIBUTE,
|
4
|
+
Consumer,
|
5
|
+
ConsumerBase,
|
6
|
+
ConsumerContext,
|
7
|
+
consume,
|
8
|
+
consumer,
|
9
|
+
)
|
10
|
+
|
11
|
+
__all__ = [
|
12
|
+
"consume",
|
13
|
+
"CONSUMER_ATTRIBUTE",
|
14
|
+
"consumer",
|
15
|
+
"Consumer",
|
16
|
+
"ConsumerBase",
|
17
|
+
"ConsumerContext",
|
18
|
+
"KafkaManager",
|
19
|
+
"SaslMechanism",
|
20
|
+
"SecurityProtocol",
|
21
|
+
]
|
@@ -0,0 +1,233 @@
|
|
1
|
+
from asyncio import gather
|
2
|
+
from dataclasses import dataclass
|
3
|
+
from enum import Enum
|
4
|
+
from typing import Any, cast
|
5
|
+
|
6
|
+
from prometheus_client import Enum as PrometheusEnum
|
7
|
+
from punq import Container, Scope
|
8
|
+
|
9
|
+
from ..exception_handling import (
|
10
|
+
AbstractServiceExceptionHandler,
|
11
|
+
ExceptionHandlerServiceType,
|
12
|
+
ExceptionHandlingManager,
|
13
|
+
)
|
14
|
+
from ..exceptions import KafkaDisconnectedError
|
15
|
+
from ..logging import LoggerFactory
|
16
|
+
from ..remotelogging import BaseRemoteLogSender
|
17
|
+
from ._consumer import (
|
18
|
+
CONSUMER_ATTRIBUTE,
|
19
|
+
Consumer,
|
20
|
+
ConsumerBase,
|
21
|
+
ConsumerConfigs,
|
22
|
+
)
|
23
|
+
from ._exception_handlers import (
|
24
|
+
KafkaGeneralExceptionHandler,
|
25
|
+
KafkaServiceExceptionHandler,
|
26
|
+
KafkaValidationErrorHandler,
|
27
|
+
)
|
28
|
+
from ._producer import Producer, ProducerConfigs, ProducerManager
|
29
|
+
|
30
|
+
|
31
|
+
class SecurityProtocol(str, Enum):
|
32
|
+
PLAINTEXT = "PLAINTEXT"
|
33
|
+
SSL = "SSL"
|
34
|
+
SASL_PLAINTEXT = "SASL_PLAINTEXT"
|
35
|
+
SASL_SSL = "SASL_SSL"
|
36
|
+
|
37
|
+
|
38
|
+
class SaslMechanism(str, Enum):
|
39
|
+
PLAIN = "PLAIN"
|
40
|
+
GSSAPI = "GSSAPI"
|
41
|
+
SCRAM_SHA_256 = "SCRAM-SHA-256"
|
42
|
+
SCRAM_SHA_512 = "SCRAM-SHA-512"
|
43
|
+
OAUTHBEARER = "OAUTHBEARER"
|
44
|
+
|
45
|
+
|
46
|
+
@dataclass
|
47
|
+
class KafkaCommonConfigs:
|
48
|
+
bootstrap_servers: str
|
49
|
+
security_protocol: SecurityProtocol = SecurityProtocol.PLAINTEXT
|
50
|
+
sasl_mechanism: SaslMechanism = SaslMechanism.PLAIN
|
51
|
+
sasl_plain_username: str | None = None
|
52
|
+
sasl_plain_password: str | None = None
|
53
|
+
extra_consumer_configs: dict[str, Any] | None = None
|
54
|
+
extra_producer_configs: dict[str, Any] | None = None
|
55
|
+
|
56
|
+
|
57
|
+
class KafkaManager:
|
58
|
+
_KAFKA_CONNECTION_STATE = PrometheusEnum(
|
59
|
+
name="kafka_connection_state",
|
60
|
+
documentation="Kafka connection state",
|
61
|
+
states=["connected", "disconnected"],
|
62
|
+
)
|
63
|
+
|
64
|
+
def __init__(
|
65
|
+
self,
|
66
|
+
remote_logger: BaseRemoteLogSender,
|
67
|
+
bootstrap_servers: str,
|
68
|
+
security_protocol: SecurityProtocol = SecurityProtocol.PLAINTEXT,
|
69
|
+
sasl_mechanism: SaslMechanism = SaslMechanism.PLAIN,
|
70
|
+
sasl_plain_username: str | None = None,
|
71
|
+
sasl_plain_password: str | None = None,
|
72
|
+
extra_consumer_configs: dict[str, Any] | None = None,
|
73
|
+
extra_producer_configs: dict[str, Any] | None = None,
|
74
|
+
container: Container | None = None,
|
75
|
+
):
|
76
|
+
self._kafka_common_configs = KafkaCommonConfigs(
|
77
|
+
bootstrap_servers=bootstrap_servers,
|
78
|
+
security_protocol=security_protocol,
|
79
|
+
sasl_mechanism=sasl_mechanism,
|
80
|
+
sasl_plain_username=sasl_plain_username,
|
81
|
+
sasl_plain_password=sasl_plain_password,
|
82
|
+
extra_consumer_configs=extra_consumer_configs,
|
83
|
+
extra_producer_configs=extra_producer_configs,
|
84
|
+
)
|
85
|
+
self._remote_logger = remote_logger
|
86
|
+
self._connected = False
|
87
|
+
self._disconnected = False
|
88
|
+
self._container = container or Container()
|
89
|
+
self._exception_handler = ExceptionHandlingManager(
|
90
|
+
service_type=ExceptionHandlerServiceType.KAFKA,
|
91
|
+
container=self._container,
|
92
|
+
remote_logger=self._remote_logger,
|
93
|
+
label_name=["topics", "group_id", "target", "exception"],
|
94
|
+
)
|
95
|
+
self._consumers: list[Consumer] = []
|
96
|
+
self._producer_manager = ProducerManager()
|
97
|
+
self._logger = LoggerFactory.get_logger("rabbitmq")
|
98
|
+
|
99
|
+
@property
|
100
|
+
def container(self) -> Container:
|
101
|
+
return self._container
|
102
|
+
|
103
|
+
def init_default_exception_handlers(self) -> None:
|
104
|
+
self._exception_handler.set_exception_handlers(
|
105
|
+
KafkaGeneralExceptionHandler,
|
106
|
+
KafkaServiceExceptionHandler,
|
107
|
+
KafkaValidationErrorHandler,
|
108
|
+
)
|
109
|
+
|
110
|
+
def include_consumer(self, consumer: Consumer | type[ConsumerBase]) -> None:
|
111
|
+
if isinstance(consumer, Consumer):
|
112
|
+
self._consumers.append(consumer)
|
113
|
+
|
114
|
+
return
|
115
|
+
|
116
|
+
if isinstance(consumer, type) and issubclass(consumer, ConsumerBase):
|
117
|
+
self._register_consumer_classes(consumer)
|
118
|
+
|
119
|
+
return
|
120
|
+
|
121
|
+
raise TypeError(
|
122
|
+
f"consumer is {type(consumer)}, expected instance of type or subclass of `ConsumerBase` or `type[ConsumerBase]`"
|
123
|
+
)
|
124
|
+
|
125
|
+
def set_exception_handlers(
|
126
|
+
self, *exception_handlers: type[AbstractServiceExceptionHandler]
|
127
|
+
) -> None:
|
128
|
+
self._exception_handler.set_exception_handlers(*exception_handlers)
|
129
|
+
|
130
|
+
def _register_consumer_classes(self, consumer_class: type) -> None:
|
131
|
+
inner_consumer = getattr(consumer_class, CONSUMER_ATTRIBUTE, None)
|
132
|
+
|
133
|
+
if inner_consumer is None:
|
134
|
+
raise AttributeError("consumer is not valid")
|
135
|
+
|
136
|
+
if not isinstance(inner_consumer, Consumer):
|
137
|
+
raise TypeError(
|
138
|
+
f"consumer class {type(consumer_class)} is not a type `Consumer`"
|
139
|
+
)
|
140
|
+
|
141
|
+
self._container.register(
|
142
|
+
service=ConsumerBase, factory=consumer_class, scope=Scope.singleton
|
143
|
+
)
|
144
|
+
|
145
|
+
async def connect(self) -> None:
|
146
|
+
self._resolve_consumer_classes()
|
147
|
+
self._exception_handler.resolve_exception_handlers()
|
148
|
+
consumer_configs = ConsumerConfigs(
|
149
|
+
bootstrap_servers=self._kafka_common_configs.bootstrap_servers,
|
150
|
+
security_protocol=self._kafka_common_configs.security_protocol,
|
151
|
+
sasl_mechanism=self._kafka_common_configs.sasl_mechanism,
|
152
|
+
sasl_plain_username=self._kafka_common_configs.sasl_plain_username,
|
153
|
+
sasl_plain_password=self._kafka_common_configs.sasl_plain_password,
|
154
|
+
extra_configs=self._kafka_common_configs.extra_consumer_configs
|
155
|
+
or {},
|
156
|
+
)
|
157
|
+
|
158
|
+
for consumer in self._consumers:
|
159
|
+
await consumer.configure(
|
160
|
+
configs=consumer_configs,
|
161
|
+
container=self._container,
|
162
|
+
remote_logger=self._remote_logger,
|
163
|
+
on_exception_callback=self._exception_handler.submit_exception,
|
164
|
+
)
|
165
|
+
|
166
|
+
self._connected = True
|
167
|
+
consumer_count = 0
|
168
|
+
consumer_label = "consumer"
|
169
|
+
|
170
|
+
if (consumer_count := len(self._consumers)) > 1:
|
171
|
+
consumer_label = "consumers"
|
172
|
+
|
173
|
+
self._logger.info(
|
174
|
+
"connected to kafka, `%s` with `%d` `%s`",
|
175
|
+
self._kafka_common_configs.bootstrap_servers,
|
176
|
+
consumer_count,
|
177
|
+
consumer_label,
|
178
|
+
)
|
179
|
+
self._KAFKA_CONNECTION_STATE.state("connected")
|
180
|
+
|
181
|
+
async def disconnect(self) -> None:
|
182
|
+
if self._disconnected:
|
183
|
+
raise RuntimeError("already disconnected from kafka")
|
184
|
+
|
185
|
+
if not self._connected:
|
186
|
+
raise RuntimeError("not connected to kafka yet")
|
187
|
+
|
188
|
+
self._disconnected = True
|
189
|
+
|
190
|
+
await self._wait_for_consumers()
|
191
|
+
self._KAFKA_CONNECTION_STATE.state("disconnected")
|
192
|
+
|
193
|
+
async def _wait_for_consumers(self) -> None:
|
194
|
+
_ = await gather(
|
195
|
+
*(consumer.cancel() for consumer in self._consumers),
|
196
|
+
return_exceptions=True,
|
197
|
+
)
|
198
|
+
|
199
|
+
def _resolve_consumer_classes(self) -> None:
|
200
|
+
self._consumers.extend(
|
201
|
+
consumer.register_consumer_methods()
|
202
|
+
for consumer in cast(
|
203
|
+
list[ConsumerBase], self._container.resolve_all(ConsumerBase)
|
204
|
+
)
|
205
|
+
)
|
206
|
+
|
207
|
+
async def producer(
|
208
|
+
self,
|
209
|
+
topic: str,
|
210
|
+
target: str | None = None,
|
211
|
+
partition: int | None = None,
|
212
|
+
timestamp_ms: int | None = None,
|
213
|
+
headers: dict[str, Any] | None = None,
|
214
|
+
) -> Producer:
|
215
|
+
if not self._connected or self._disconnected:
|
216
|
+
raise KafkaDisconnectedError("not connected to kafka yet")
|
217
|
+
|
218
|
+
return await self._producer_manager.get_producer(
|
219
|
+
configs=ProducerConfigs(
|
220
|
+
bootstrap_servers=self._kafka_common_configs.bootstrap_servers,
|
221
|
+
security_protocol=self._kafka_common_configs.security_protocol,
|
222
|
+
sasl_mechanism=self._kafka_common_configs.sasl_mechanism,
|
223
|
+
sasl_plain_username=self._kafka_common_configs.sasl_plain_username,
|
224
|
+
sasl_plain_password=self._kafka_common_configs.sasl_plain_password,
|
225
|
+
extra_configs=self._kafka_common_configs.extra_producer_configs
|
226
|
+
or {},
|
227
|
+
),
|
228
|
+
topic=topic,
|
229
|
+
target=target or "__default__",
|
230
|
+
partition=partition,
|
231
|
+
timestamp_ms=timestamp_ms,
|
232
|
+
headers=headers,
|
233
|
+
)
|