qena-shared-lib 0.1.16__py3-none-any.whl → 0.1.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. qena_shared_lib/__init__.py +3 -2
  2. qena_shared_lib/application.py +4 -4
  3. qena_shared_lib/background.py +9 -7
  4. qena_shared_lib/exception_handling.py +409 -0
  5. qena_shared_lib/exceptions.py +170 -57
  6. qena_shared_lib/http/__init__.py +90 -0
  7. qena_shared_lib/{http.py → http/_base.py} +36 -36
  8. qena_shared_lib/http/_exception_handlers.py +202 -0
  9. qena_shared_lib/kafka/__init__.py +21 -0
  10. qena_shared_lib/kafka/_base.py +233 -0
  11. qena_shared_lib/kafka/_consumer.py +597 -0
  12. qena_shared_lib/kafka/_exception_handlers.py +124 -0
  13. qena_shared_lib/kafka/_producer.py +133 -0
  14. qena_shared_lib/logging.py +17 -13
  15. qena_shared_lib/rabbitmq/__init__.py +4 -6
  16. qena_shared_lib/rabbitmq/_base.py +68 -132
  17. qena_shared_lib/rabbitmq/_channel.py +2 -4
  18. qena_shared_lib/rabbitmq/_exception_handlers.py +69 -142
  19. qena_shared_lib/rabbitmq/_listener.py +246 -157
  20. qena_shared_lib/rabbitmq/_publisher.py +5 -5
  21. qena_shared_lib/rabbitmq/_rpc_client.py +21 -22
  22. qena_shared_lib/remotelogging/_base.py +20 -20
  23. qena_shared_lib/remotelogging/logstash/_base.py +2 -2
  24. qena_shared_lib/remotelogging/logstash/_http_sender.py +2 -4
  25. qena_shared_lib/remotelogging/logstash/_tcp_sender.py +2 -2
  26. qena_shared_lib/scheduler.py +24 -15
  27. qena_shared_lib/security.py +39 -32
  28. qena_shared_lib/utils.py +13 -11
  29. {qena_shared_lib-0.1.16.dist-info → qena_shared_lib-0.1.18.dist-info}/METADATA +9 -1
  30. qena_shared_lib-0.1.18.dist-info/RECORD +38 -0
  31. qena_shared_lib/exception_handlers.py +0 -235
  32. qena_shared_lib-0.1.16.dist-info/RECORD +0 -31
  33. {qena_shared_lib-0.1.16.dist-info → qena_shared_lib-0.1.18.dist-info}/WHEEL +0 -0
@@ -0,0 +1,124 @@
1
+ from pydantic import ValidationError
2
+
3
+ from ..exception_handling import (
4
+ ExceptionHandlerServiceType,
5
+ GeneralExceptionHandler,
6
+ ServiceExceptionHandler,
7
+ ServiceInformation,
8
+ ValidationErrorHandler,
9
+ )
10
+ from ..exceptions import ServiceException
11
+ from ..logging import LoggerFactory
12
+ from ..remotelogging import BaseRemoteLogSender
13
+ from ._consumer import ConsumerContext, group_id_repr, topics_repr
14
+
15
+ __all__ = [
16
+ "KafkaServiceExceptionHandler",
17
+ "KafkaValidationErrorHandler",
18
+ "KafkaGeneralExceptionHandler",
19
+ ]
20
+
21
+ KAFKA_EXCEPTION_HANDLER_LOGGER_NAME = "kafka.exception_handler"
22
+
23
+
24
+ class KafkaServiceExceptionHandler(ServiceExceptionHandler):
25
+ def __init__(self, remote_logger: BaseRemoteLogSender):
26
+ super().__init__(remote_logger)
27
+
28
+ self._logger = LoggerFactory.get_logger(
29
+ KAFKA_EXCEPTION_HANDLER_LOGGER_NAME
30
+ )
31
+
32
+ def __call__(
33
+ self,
34
+ context: ConsumerContext,
35
+ exception: ServiceException,
36
+ ) -> None:
37
+ topics = topics_repr(context.topics)
38
+ group_id = group_id_repr(context.group_id)
39
+
40
+ self.handle(
41
+ service_information=ServiceInformation(
42
+ service_type=ExceptionHandlerServiceType.KAFKA,
43
+ tags=[
44
+ "Kafka",
45
+ *context.topics,
46
+ group_id,
47
+ context.target,
48
+ exception.__class__.__name__,
49
+ ],
50
+ extra={
51
+ "serviceType": "Kafka",
52
+ "topics": topics,
53
+ "groupId": group_id,
54
+ "target": context.target,
55
+ "exception": exception.__class__.__name__,
56
+ },
57
+ message=f"topics = `{topics}` , group id = `{group_id}` , target = `{context.target}` {exception.message}",
58
+ ),
59
+ exception=exception,
60
+ )
61
+
62
+
63
+ class KafkaValidationErrorHandler(ValidationErrorHandler):
64
+ def __call__(
65
+ self,
66
+ context: ConsumerContext,
67
+ exception: ValidationError,
68
+ ) -> None:
69
+ topics = topics_repr(context.topics)
70
+ group_id = group_id_repr(context.group_id)
71
+
72
+ self.handle(
73
+ service_information=ServiceInformation(
74
+ service_type=ExceptionHandlerServiceType.KAFKA,
75
+ tags=[
76
+ "Kafka",
77
+ *context.topics,
78
+ group_id,
79
+ context.target,
80
+ "ValidationError",
81
+ ],
82
+ extra={
83
+ "serviceType": "Kafka",
84
+ "topics": topics,
85
+ "groupId": group_id,
86
+ "target": context.target,
87
+ "exception": "ValidationError",
88
+ },
89
+ message=f"invalid kafka event at topics `{topics}` , group id `{group_id}` and target `{context.target}`",
90
+ ),
91
+ exception=exception,
92
+ )
93
+
94
+
95
+ class KafkaGeneralExceptionHandler(GeneralExceptionHandler):
96
+ def __call__(
97
+ self,
98
+ context: ConsumerContext,
99
+ exception: Exception,
100
+ ) -> None:
101
+ topics = topics_repr(context.topics)
102
+ group_id = group_id_repr(context.group_id)
103
+
104
+ self.handle(
105
+ service_information=ServiceInformation(
106
+ service_type=ExceptionHandlerServiceType.KAFKA,
107
+ tags=[
108
+ "Kafka",
109
+ *context.topics,
110
+ group_id,
111
+ context.target,
112
+ exception.__class__.__name__,
113
+ ],
114
+ extra={
115
+ "serviceType": "Kafka",
116
+ "topics": topics,
117
+ "groupId": group_id,
118
+ "target": context.target,
119
+ "exception": exception.__class__.__name__,
120
+ },
121
+ message=f"something went wrong while consuming event on topics `{topics}` , group id `{group_id}` and target `{context.target}`",
122
+ ),
123
+ exception=exception,
124
+ )
@@ -0,0 +1,133 @@
1
+ from asyncio import Lock
2
+ from dataclasses import dataclass
3
+ from inspect import Traceback
4
+ from typing import Any
5
+
6
+ from aiokafka.producer import AIOKafkaProducer
7
+ from pydantic_core import to_json
8
+ from typing_extensions import Self
9
+
10
+ from ..utils import yield_now
11
+
12
+
13
+ @dataclass
14
+ class ProducerConfigs:
15
+ bootstrap_servers: str
16
+ security_protocol: str
17
+ sasl_mechanism: str
18
+ sasl_plain_username: str | None
19
+ sasl_plain_password: str | None
20
+ extra_configs: dict[str, Any]
21
+
22
+
23
+ class KafkaProducerContainer:
24
+ def __init__(self, configs: ProducerConfigs) -> None:
25
+ self._lock = Lock()
26
+ self._kafka_producer = AIOKafkaProducer(
27
+ bootstrap_servers=configs.bootstrap_servers,
28
+ security_protocol=configs.security_protocol,
29
+ sasl_mechanism=configs.sasl_mechanism,
30
+ sasl_plain_username=configs.sasl_plain_username,
31
+ sasl_plain_password=configs.sasl_plain_password,
32
+ **configs.extra_configs,
33
+ )
34
+
35
+ @classmethod
36
+ async def create(cls, configs: ProducerConfigs) -> "KafkaProducerContainer":
37
+ kafka_producer_container = KafkaProducerContainer(configs)
38
+
39
+ await kafka_producer_container.start()
40
+
41
+ return kafka_producer_container
42
+
43
+ async def start(self) -> None:
44
+ await self._kafka_producer.start()
45
+
46
+ async def aquire(self) -> None:
47
+ await self._lock.acquire()
48
+
49
+ def get_kafka_producer(self) -> AIOKafkaProducer:
50
+ return self._kafka_producer
51
+
52
+ def release(self) -> None:
53
+ self._lock.release()
54
+
55
+
56
+ class Producer:
57
+ def __init__(
58
+ self,
59
+ topic: str,
60
+ target: str,
61
+ partition: int | None,
62
+ timestamp_ms: int | None,
63
+ headers: dict[str, Any] | None,
64
+ kafka_producer_container: KafkaProducerContainer,
65
+ ) -> None:
66
+ self._topic = topic
67
+ self._partition = partition
68
+ self._timestamp_ms = timestamp_ms
69
+ self._headers = headers or {}
70
+ self._headers["target"] = target
71
+ self._kafka_producer_container = kafka_producer_container
72
+
73
+ async def __aenter__(self) -> Self:
74
+ await self._kafka_producer_container.aquire()
75
+
76
+ return self
77
+
78
+ async def send(self, key: Any, value: Any) -> None:
79
+ await self._kafka_producer_container.get_kafka_producer().send_and_wait(
80
+ topic=self._topic,
81
+ key=to_json(key),
82
+ value=to_json(value),
83
+ partition=self._partition,
84
+ timestamp_ms=self._timestamp_ms,
85
+ headers=[(k, to_json(v)) for k, v in self._headers.items()],
86
+ )
87
+
88
+ async def __aexit__(
89
+ self,
90
+ exception_type: type[Exception],
91
+ exception: Exception,
92
+ traceback: Traceback,
93
+ ) -> None:
94
+ del exception_type, exception, traceback
95
+
96
+ await yield_now()
97
+ self._kafka_producer_container.release()
98
+
99
+
100
+ class ProducerManager:
101
+ def __init__(self) -> None:
102
+ self._producers: dict[str, KafkaProducerContainer] = {}
103
+
104
+ async def get_producer(
105
+ self,
106
+ configs: ProducerConfigs,
107
+ topic: str,
108
+ target: str,
109
+ partition: int | None,
110
+ timestamp_ms: int | None,
111
+ headers: dict[str, Any] | None,
112
+ ) -> Producer:
113
+ if not self._kafka_producer_exits(topic):
114
+ await self._register_kafka_producer(configs=configs, topic=topic)
115
+
116
+ kafka_producer_container = self._producers[topic]
117
+
118
+ return Producer(
119
+ topic=topic,
120
+ target=target,
121
+ partition=partition,
122
+ timestamp_ms=timestamp_ms,
123
+ headers=headers,
124
+ kafka_producer_container=kafka_producer_container,
125
+ )
126
+
127
+ def _kafka_producer_exits(self, topic: str) -> bool:
128
+ return topic in self._producers
129
+
130
+ async def _register_kafka_producer(
131
+ self, configs: ProducerConfigs, topic: str
132
+ ) -> None:
133
+ self._producers[topic] = await KafkaProducerContainer.create(configs)
@@ -1,4 +1,3 @@
1
- from functools import lru_cache
2
1
  from logging import (
3
2
  Formatter,
4
3
  Handler,
@@ -10,7 +9,7 @@ from os import environ
10
9
  from typing import Optional
11
10
 
12
11
  __all__ = [
13
- "LoggerProvider",
12
+ "LoggerFactory",
14
13
  ]
15
14
 
16
15
  ROOT_LOGGER_NAME = (
@@ -18,14 +17,11 @@ ROOT_LOGGER_NAME = (
18
17
  )
19
18
 
20
19
 
21
- class LoggerProvider:
22
- @lru_cache
23
- @staticmethod
24
- def default() -> "LoggerProvider":
25
- return LoggerProvider()
20
+ class LoggerFactory:
21
+ _LOGGERS: dict[str, Logger] = {}
26
22
 
27
- @lru_cache
28
- def get_logger(self, name: str | None = None) -> Logger:
23
+ @classmethod
24
+ def get_logger(cls, name: str | None = None) -> Logger:
29
25
  logger_name = ROOT_LOGGER_NAME
30
26
 
31
27
  if name:
@@ -35,7 +31,7 @@ class LoggerProvider:
35
31
  handlers = [handler.__class__ for handler in logger.handlers]
36
32
 
37
33
  if logger.parent is not None:
38
- self._check_handler(handlers=handlers, logger=logger.parent)
34
+ cls._check_handler(handlers=handlers, logger=logger.parent)
39
35
 
40
36
  if StreamHandler not in handlers:
41
37
  stream_handler = StreamHandler()
@@ -47,13 +43,21 @@ class LoggerProvider:
47
43
  )
48
44
  logger.addHandler(stream_handler)
49
45
 
50
- return logger
46
+ return cls._set_and_get_logger(logger_name=logger_name, logger=logger)
51
47
 
48
+ @classmethod
49
+ def _set_and_get_logger(cls, logger_name: str, logger: Logger) -> Logger:
50
+ if logger_name not in cls._LOGGERS:
51
+ cls._LOGGERS[logger_name] = logger
52
+
53
+ return cls._LOGGERS[logger_name]
54
+
55
+ @classmethod
52
56
  def _check_handler(
53
- self, handlers: list[type[Handler]], logger: Optional[Logger] = None
57
+ cls, handlers: list[type[Handler]], logger: Optional[Logger] = None
54
58
  ) -> None:
55
59
  if logger is None:
56
60
  return
57
61
 
58
62
  handlers.extend([handler.__class__ for handler in logger.handlers])
59
- self._check_handler(handlers=handlers, logger=logger.parent)
63
+ cls._check_handler(handlers=handlers, logger=logger.parent)
@@ -1,10 +1,9 @@
1
1
  from ._base import AbstractRabbitMQService, RabbitMqManager
2
2
  from ._channel import BaseChannel
3
3
  from ._exception_handlers import (
4
- AbstractRabbitMqExceptionHandler,
5
- GeneralMqExceptionHandler,
4
+ RabbitMqGeneralExceptionHandler,
6
5
  RabbitMqServiceExceptionHandler,
7
- ValidationErrorHandler,
6
+ RabbitMqValidationErrorHandler,
8
7
  )
9
8
  from ._listener import (
10
9
  CONSUMER_ATTRIBUTE,
@@ -28,7 +27,6 @@ from ._publisher import Publisher
28
27
  from ._rpc_client import RpcClient
29
28
 
30
29
  __all__ = [
31
- "AbstractRabbitMqExceptionHandler",
32
30
  "AbstractRabbitMQService",
33
31
  "BackoffRetryDelay",
34
32
  "BaseChannel",
@@ -39,18 +37,18 @@ __all__ = [
39
37
  "Consumer",
40
38
  "execute",
41
39
  "FixedRetryDelay",
42
- "GeneralMqExceptionHandler",
43
40
  "LISTENER_ATTRIBUTE",
44
41
  "ListenerBase",
45
42
  "ListenerContext",
46
43
  "Publisher",
44
+ "RabbitMqGeneralExceptionHandler",
47
45
  "RabbitMqManager",
48
46
  "RabbitMqServiceExceptionHandler",
47
+ "RabbitMqValidationErrorHandler",
49
48
  "RetryDelayJitter",
50
49
  "RetryPolicy",
51
50
  "RPC_WORKER_ATTRIBUTE",
52
51
  "rpc_worker",
53
52
  "RpcClient",
54
53
  "RpcWorker",
55
- "ValidationErrorHandler",
56
54
  ]