buz 2.13.1rc8__py3-none-any.whl → 2.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- buz/event/async_event_bus.py +15 -0
- buz/event/event_bus.py +2 -2
- buz/event/infrastructure/buz_kafka/async_buz_kafka_event_bus.py +107 -0
- buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py +5 -10
- buz/event/infrastructure/buz_kafka/buz_aiokafka_async_consumer.py +3 -4
- buz/event/infrastructure/buz_kafka/buz_aiokafka_multi_threaded_consumer.py +2 -4
- buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py +4 -6
- buz/event/infrastructure/buz_kafka/kafka_event_sync_subscriber_executor.py +2 -7
- buz/event/infrastructure/kombu/kombu_consumer.py +1 -0
- buz/event/infrastructure/kombu/kombu_event_bus.py +6 -7
- buz/event/middleware/async_publish_middleware.py +13 -0
- buz/event/middleware/async_publish_middleware_chain_resolver.py +22 -0
- buz/event/sync/sync_event_bus.py +2 -2
- buz/event/transactional_outbox/outbox_repository.py +5 -1
- buz/event/transactional_outbox/transactional_outbox_event_bus.py +12 -11
- buz/kafka/__init__.py +28 -2
- buz/kafka/domain/exceptions/not_all_partition_assigned_exception.py +8 -0
- buz/kafka/domain/exceptions/topic_not_found_exception.py +6 -0
- buz/kafka/domain/models/__init__.py +0 -0
- buz/kafka/domain/models/kafka_supported_compression_type.py +8 -0
- buz/kafka/domain/services/__init__.py +0 -0
- buz/kafka/domain/services/async_kafka_producer.py +21 -0
- buz/kafka/domain/services/kafka_admin_client.py +16 -1
- buz/kafka/domain/services/kafka_producer.py +3 -1
- buz/kafka/infrastructure/aiokafka/aiokafka_consumer.py +12 -9
- buz/kafka/infrastructure/aiokafka/aiokafka_producer.py +98 -0
- buz/kafka/infrastructure/interfaces/__init__.py +0 -0
- buz/kafka/infrastructure/interfaces/async_connection_manager.py +11 -0
- buz/kafka/infrastructure/interfaces/connection_manager.py +11 -0
- buz/kafka/infrastructure/kafka_python/kafka_python_admin_client.py +132 -16
- buz/kafka/infrastructure/kafka_python/kafka_python_admin_test_client.py +11 -2
- buz/kafka/infrastructure/kafka_python/kafka_python_producer.py +51 -22
- {buz-2.13.1rc8.dist-info → buz-2.14.0.dist-info}/METADATA +1 -1
- {buz-2.13.1rc8.dist-info → buz-2.14.0.dist-info}/RECORD +37 -24
- buz/kafka/infrastructure/kafka_python/factories/kafka_python_producer_factory.py +0 -20
- /buz/kafka/{infrastructure/kafka_python/factories → domain/exceptions}/__init__.py +0 -0
- {buz-2.13.1rc8.dist-info → buz-2.14.0.dist-info}/LICENSE +0 -0
- {buz-2.13.1rc8.dist-info → buz-2.14.0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from typing import Collection
|
|
3
|
+
|
|
4
|
+
from buz.event import Event
|
|
5
|
+
from buz.kafka.infrastructure.interfaces.async_connection_manager import AsyncConnectionManager
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class AsyncEventBus(AsyncConnectionManager, ABC):
|
|
9
|
+
@abstractmethod
|
|
10
|
+
async def publish(self, event: Event) -> None:
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
@abstractmethod
|
|
14
|
+
async def bulk_publish(self, events: Collection[Event]) -> None:
|
|
15
|
+
pass
|
buz/event/event_bus.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from abc import ABC, abstractmethod
|
|
2
|
-
from typing import
|
|
2
|
+
from typing import Iterable
|
|
3
3
|
|
|
4
4
|
from buz.event import Event
|
|
5
5
|
|
|
@@ -10,5 +10,5 @@ class EventBus(ABC):
|
|
|
10
10
|
pass
|
|
11
11
|
|
|
12
12
|
@abstractmethod
|
|
13
|
-
def bulk_publish(self, events:
|
|
13
|
+
def bulk_publish(self, events: Iterable[Event]) -> None:
|
|
14
14
|
pass
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
from logging import Logger
|
|
2
|
+
from typing import Collection, Optional
|
|
3
|
+
|
|
4
|
+
from buz.event import Event
|
|
5
|
+
from buz.event.async_event_bus import AsyncEventBus
|
|
6
|
+
from buz.event.exceptions.event_not_published_exception import EventNotPublishedException
|
|
7
|
+
from buz.event.infrastructure.buz_kafka.exceptions.kafka_event_bus_config_not_valid_exception import (
|
|
8
|
+
KafkaEventBusConfigNotValidException,
|
|
9
|
+
)
|
|
10
|
+
from buz.event.infrastructure.buz_kafka.publish_strategy.publish_strategy import KafkaPublishStrategy
|
|
11
|
+
from buz.event.middleware.async_publish_middleware import AsyncPublishMiddleware
|
|
12
|
+
from buz.event.middleware.async_publish_middleware_chain_resolver import AsyncPublishMiddlewareChainResolver
|
|
13
|
+
from buz.kafka.domain.exceptions.topic_already_created_exception import KafkaTopicsAlreadyCreatedException
|
|
14
|
+
from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
|
|
15
|
+
from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
|
|
16
|
+
from buz.kafka.domain.services.async_kafka_producer import AsyncKafkaProducer
|
|
17
|
+
from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class AsyncBuzKafkaEventBus(AsyncEventBus):
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
*,
|
|
24
|
+
publish_strategy: KafkaPublishStrategy,
|
|
25
|
+
producer: AsyncKafkaProducer,
|
|
26
|
+
logger: Logger,
|
|
27
|
+
kafka_admin_client: Optional[KafkaAdminClient] = None,
|
|
28
|
+
publish_middlewares: Optional[list[AsyncPublishMiddleware]] = None,
|
|
29
|
+
auto_create_topic_configuration: Optional[AutoCreateTopicConfiguration] = None,
|
|
30
|
+
):
|
|
31
|
+
self.__publish_middleware_chain_resolver = AsyncPublishMiddlewareChainResolver(publish_middlewares or [])
|
|
32
|
+
self.__publish_strategy = publish_strategy
|
|
33
|
+
self.__producer = producer
|
|
34
|
+
self.__topics_checked: dict[str, bool] = {}
|
|
35
|
+
self.__kafka_admin_client = kafka_admin_client
|
|
36
|
+
self.__auto_create_topic_configuration = auto_create_topic_configuration
|
|
37
|
+
self.__logger = logger
|
|
38
|
+
self.__check_kafka_admin_client_is_needed()
|
|
39
|
+
|
|
40
|
+
def __check_kafka_admin_client_is_needed(self) -> None:
|
|
41
|
+
if self.__kafka_admin_client is None and self.__auto_create_topic_configuration is not None:
|
|
42
|
+
raise KafkaEventBusConfigNotValidException(
|
|
43
|
+
"A KafkaAdminClient is needed to create topics when 'auto_create_topic_configuration' is set."
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
async def publish(self, event: Event) -> None:
|
|
47
|
+
await self.__publish_middleware_chain_resolver.resolve(event, self.__perform_publish)
|
|
48
|
+
|
|
49
|
+
async def __perform_publish(self, event: Event) -> None:
|
|
50
|
+
try:
|
|
51
|
+
topic = self.__publish_strategy.get_topic(event)
|
|
52
|
+
|
|
53
|
+
if self.__auto_create_topic_configuration is not None and self.__is_topic_created(topic) is False:
|
|
54
|
+
try:
|
|
55
|
+
self.__logger.info(f"Creating missing topic: {topic}..")
|
|
56
|
+
self.__get_kafka_admin_client().create_topics(
|
|
57
|
+
topics=[
|
|
58
|
+
CreateKafkaTopic(
|
|
59
|
+
name=topic,
|
|
60
|
+
partitions=self.__auto_create_topic_configuration.partitions,
|
|
61
|
+
replication_factor=self.__auto_create_topic_configuration.replication_factor,
|
|
62
|
+
configs=self.__auto_create_topic_configuration.configs,
|
|
63
|
+
)
|
|
64
|
+
]
|
|
65
|
+
)
|
|
66
|
+
self.__logger.info(f"Created missing topic: {topic}")
|
|
67
|
+
self.__topics_checked[topic] = True
|
|
68
|
+
except KafkaTopicsAlreadyCreatedException:
|
|
69
|
+
pass
|
|
70
|
+
|
|
71
|
+
headers = self.__get_event_headers(event)
|
|
72
|
+
await self.__producer.produce(
|
|
73
|
+
message=event,
|
|
74
|
+
headers=headers,
|
|
75
|
+
topic=topic,
|
|
76
|
+
)
|
|
77
|
+
except Exception as exc:
|
|
78
|
+
raise EventNotPublishedException(event) from exc
|
|
79
|
+
|
|
80
|
+
def __get_kafka_admin_client(self) -> KafkaAdminClient:
|
|
81
|
+
if self.__kafka_admin_client is None:
|
|
82
|
+
raise KafkaEventBusConfigNotValidException("KafkaAdminClient is not set.")
|
|
83
|
+
return self.__kafka_admin_client
|
|
84
|
+
|
|
85
|
+
def __is_topic_created(self, topic: str) -> bool:
|
|
86
|
+
is_topic_created = self.__topics_checked.get(topic, None)
|
|
87
|
+
|
|
88
|
+
if is_topic_created is not None:
|
|
89
|
+
return is_topic_created
|
|
90
|
+
|
|
91
|
+
is_topic_created = self.__get_kafka_admin_client().is_topic_created(topic)
|
|
92
|
+
self.__topics_checked[topic] = is_topic_created
|
|
93
|
+
|
|
94
|
+
return is_topic_created
|
|
95
|
+
|
|
96
|
+
async def bulk_publish(self, events: Collection[Event]) -> None:
|
|
97
|
+
for event in events:
|
|
98
|
+
await self.publish(event)
|
|
99
|
+
|
|
100
|
+
def __get_event_headers(self, event: Event) -> dict:
|
|
101
|
+
return {"id": event.id}
|
|
102
|
+
|
|
103
|
+
async def connect(self) -> None:
|
|
104
|
+
await self.__producer.connect()
|
|
105
|
+
|
|
106
|
+
async def disconnect(self) -> None:
|
|
107
|
+
await self.__producer.disconnect()
|
|
@@ -17,11 +17,9 @@ from buz.event.infrastructure.buz_kafka.consume_strategy.consume_strategy import
|
|
|
17
17
|
from buz.event.infrastructure.buz_kafka.kafka_event_subscriber_executor import KafkaEventSubscriberExecutor
|
|
18
18
|
from buz.event.infrastructure.models.consuming_task import ConsumingTask
|
|
19
19
|
from buz.event.meta_subscriber import MetaSubscriber
|
|
20
|
-
from buz.kafka import (
|
|
21
|
-
KafkaConnectionConfig,
|
|
22
|
-
ConsumerInitialOffsetPosition,
|
|
23
|
-
)
|
|
24
20
|
from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
|
|
21
|
+
from buz.kafka.domain.models.consumer_initial_offset_position import ConsumerInitialOffsetPosition
|
|
22
|
+
from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
|
|
25
23
|
from buz.kafka.domain.models.kafka_poll_record import KafkaPollRecord
|
|
26
24
|
from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
|
|
27
25
|
from buz.kafka.infrastructure.aiokafka.aiokafka_consumer import AIOKafkaConsumer
|
|
@@ -107,12 +105,9 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
|
|
|
107
105
|
|
|
108
106
|
if self.__exceptions_are_thrown(worker_errors):
|
|
109
107
|
consume_events_exception, polling_task_exception = worker_errors
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
self._logger.exception(polling_task_exception)
|
|
114
|
-
|
|
115
|
-
raise WorkerExecutionException("The worker was closed by an unexpected exception")
|
|
108
|
+
raise WorkerExecutionException(
|
|
109
|
+
"The worker was closed by an unexpected exception"
|
|
110
|
+
) from consume_events_exception or polling_task_exception
|
|
116
111
|
|
|
117
112
|
async def __run_worker(self) -> tuple[Optional[Exception], Optional[Exception]]:
|
|
118
113
|
consume_events_task = create_task(self.__consume_events_task())
|
|
@@ -15,11 +15,10 @@ from buz.event.meta_subscriber import MetaSubscriber
|
|
|
15
15
|
from buz.event.middleware.async_consume_middleware import AsyncConsumeMiddleware
|
|
16
16
|
from buz.event.strategies.retry.consume_retrier import ConsumeRetrier
|
|
17
17
|
from buz.event.strategies.retry.reject_callback import RejectCallback
|
|
18
|
-
|
|
19
|
-
KafkaConnectionConfig,
|
|
20
|
-
ConsumerInitialOffsetPosition,
|
|
21
|
-
)
|
|
18
|
+
|
|
22
19
|
from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
|
|
20
|
+
from buz.kafka.domain.models.consumer_initial_offset_position import ConsumerInitialOffsetPosition
|
|
21
|
+
from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
|
|
23
22
|
from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
|
|
24
23
|
from buz.kafka.infrastructure.deserializers.bytes_to_message_deserializer import BytesToMessageDeserializer
|
|
25
24
|
from buz.kafka.infrastructure.deserializers.implementations.json_bytes_to_message_deserializer import (
|
|
@@ -15,11 +15,9 @@ from buz.event.meta_subscriber import MetaSubscriber
|
|
|
15
15
|
from buz.event.middleware.consume_middleware import ConsumeMiddleware
|
|
16
16
|
from buz.event.strategies.retry.consume_retrier import ConsumeRetrier
|
|
17
17
|
from buz.event.strategies.retry.reject_callback import RejectCallback
|
|
18
|
-
from buz.kafka import (
|
|
19
|
-
KafkaConnectionConfig,
|
|
20
|
-
ConsumerInitialOffsetPosition,
|
|
21
|
-
)
|
|
22
18
|
from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
|
|
19
|
+
from buz.kafka.domain.models.consumer_initial_offset_position import ConsumerInitialOffsetPosition
|
|
20
|
+
from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
|
|
23
21
|
from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
|
|
24
22
|
from buz.kafka.infrastructure.deserializers.bytes_to_message_deserializer import BytesToMessageDeserializer
|
|
25
23
|
from buz.kafka.infrastructure.deserializers.implementations.json_bytes_to_message_deserializer import (
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from logging import Logger
|
|
2
|
-
from typing import
|
|
2
|
+
from typing import Optional, Iterable
|
|
3
3
|
|
|
4
4
|
from buz.event import Event, EventBus
|
|
5
5
|
from buz.event.exceptions.event_not_published_exception import EventNotPublishedException
|
|
@@ -11,13 +11,11 @@ from buz.event.middleware import (
|
|
|
11
11
|
PublishMiddleware,
|
|
12
12
|
)
|
|
13
13
|
from buz.event.middleware.publish_middleware_chain_resolver import PublishMiddlewareChainResolver
|
|
14
|
-
from buz.kafka import (
|
|
15
|
-
KafkaPythonProducer,
|
|
16
|
-
)
|
|
17
14
|
from buz.kafka.domain.exceptions.topic_already_created_exception import KafkaTopicsAlreadyCreatedException
|
|
18
15
|
from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
|
|
19
16
|
from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
|
|
20
17
|
from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
|
|
18
|
+
from buz.kafka.domain.services.kafka_producer import KafkaProducer
|
|
21
19
|
|
|
22
20
|
|
|
23
21
|
class BuzKafkaEventBus(EventBus):
|
|
@@ -25,7 +23,7 @@ class BuzKafkaEventBus(EventBus):
|
|
|
25
23
|
self,
|
|
26
24
|
*,
|
|
27
25
|
publish_strategy: KafkaPublishStrategy,
|
|
28
|
-
producer:
|
|
26
|
+
producer: KafkaProducer,
|
|
29
27
|
logger: Logger,
|
|
30
28
|
kafka_admin_client: Optional[KafkaAdminClient] = None,
|
|
31
29
|
publish_middlewares: Optional[list[PublishMiddleware]] = None,
|
|
@@ -96,7 +94,7 @@ class BuzKafkaEventBus(EventBus):
|
|
|
96
94
|
|
|
97
95
|
return is_topic_created
|
|
98
96
|
|
|
99
|
-
def bulk_publish(self, events:
|
|
97
|
+
def bulk_publish(self, events: Iterable[Event]) -> None:
|
|
100
98
|
for event in events:
|
|
101
99
|
self.publish(event)
|
|
102
100
|
|
|
@@ -67,11 +67,6 @@ class KafkaEventSyncSubscriberExecutor(KafkaEventSubscriberExecutor):
|
|
|
67
67
|
self.__logger.error(
|
|
68
68
|
f'The message "{str(kafka_poll_record.value)}" is not valid, it will be consumed but not processed'
|
|
69
69
|
)
|
|
70
|
-
except Exception as exception:
|
|
71
|
-
if self.__on_fail_strategy == KafkaOnFailStrategy.CONSUME_ON_FAIL:
|
|
72
|
-
self.__logger.error(f"Error consuming event: {exception}")
|
|
73
|
-
return
|
|
74
|
-
raise exception
|
|
75
70
|
|
|
76
71
|
def __execution_callback(self, subscriber: Subscriber, message: KafkaConsumerRecord[Event]) -> None:
|
|
77
72
|
self.__consume_middleware_chain_resolver.resolve(
|
|
@@ -86,13 +81,13 @@ class KafkaEventSyncSubscriberExecutor(KafkaEventSubscriberExecutor):
|
|
|
86
81
|
return
|
|
87
82
|
except Exception as exception:
|
|
88
83
|
self.__logger.warning(f"Event {event.id} could not be consumed by the subscriber {subscriber.fqn}")
|
|
89
|
-
self.__logger.error(exception, exc_info=True)
|
|
90
|
-
|
|
91
84
|
if self.__should_retry(event, subscriber) is True:
|
|
92
85
|
self.__register_retry(event, subscriber)
|
|
93
86
|
time.sleep(self.__seconds_between_retires)
|
|
94
87
|
continue
|
|
95
88
|
|
|
89
|
+
self.__logger.exception(exception)
|
|
90
|
+
|
|
96
91
|
if self.__reject_callback:
|
|
97
92
|
self.__reject_callback.on_reject(event=event, subscribers=[subscriber], exception=exception)
|
|
98
93
|
|
|
@@ -1,21 +1,20 @@
|
|
|
1
1
|
from dataclasses import asdict
|
|
2
|
-
from typing import
|
|
2
|
+
from typing import Optional, Iterable
|
|
3
3
|
|
|
4
4
|
from kombu import Connection, Exchange, Producer
|
|
5
5
|
from kombu.entity import PERSISTENT_DELIVERY_MODE
|
|
6
6
|
|
|
7
7
|
from buz.event import Event, EventBus
|
|
8
|
-
from buz.event.
|
|
8
|
+
from buz.event.exceptions.event_not_published_exception import EventNotPublishedException
|
|
9
9
|
from buz.event.infrastructure.kombu.allowed_kombu_serializer import AllowedKombuSerializer
|
|
10
|
+
from buz.event.infrastructure.kombu.publish_strategy import PublishStrategy
|
|
11
|
+
from buz.event.infrastructure.kombu.retry_strategy.publish_retry_policy import PublishRetryPolicy
|
|
12
|
+
from buz.event.infrastructure.kombu.retry_strategy.simple_publish_retry_policy import SimplePublishRetryPolicy
|
|
10
13
|
from buz.event.middleware import (
|
|
11
14
|
PublishMiddleware,
|
|
12
15
|
PublishMiddlewareChainResolver,
|
|
13
16
|
)
|
|
14
17
|
|
|
15
|
-
from buz.event.exceptions.event_not_published_exception import EventNotPublishedException
|
|
16
|
-
from buz.event.infrastructure.kombu.retry_strategy.publish_retry_policy import PublishRetryPolicy
|
|
17
|
-
from buz.event.infrastructure.kombu.retry_strategy.simple_publish_retry_policy import SimplePublishRetryPolicy
|
|
18
|
-
|
|
19
18
|
|
|
20
19
|
class KombuEventBus(EventBus):
|
|
21
20
|
def __init__(
|
|
@@ -96,6 +95,6 @@ class KombuEventBus(EventBus):
|
|
|
96
95
|
def __get_headers(self, event: Event) -> dict:
|
|
97
96
|
return {"fqn": event.fqn()}
|
|
98
97
|
|
|
99
|
-
def bulk_publish(self, events:
|
|
98
|
+
def bulk_publish(self, events: Iterable[Event]) -> None:
|
|
100
99
|
for event in events:
|
|
101
100
|
self.publish(event)
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from abc import abstractmethod
|
|
2
|
+
from typing import Awaitable, Callable
|
|
3
|
+
|
|
4
|
+
from buz.event import Event
|
|
5
|
+
from buz.middleware import Middleware
|
|
6
|
+
|
|
7
|
+
AsyncPublishCallable = Callable[[Event], Awaitable[None]]
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class AsyncPublishMiddleware(Middleware):
|
|
11
|
+
@abstractmethod
|
|
12
|
+
async def on_publish(self, event: Event, publish: AsyncPublishCallable) -> None:
|
|
13
|
+
pass
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from buz.event import Event
|
|
2
|
+
from buz.event.middleware.async_publish_middleware import AsyncPublishCallable, AsyncPublishMiddleware
|
|
3
|
+
from buz.middleware import MiddlewareChainBuilder
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class AsyncPublishMiddlewareChainResolver:
|
|
7
|
+
def __init__(self, middlewares: list[AsyncPublishMiddleware]):
|
|
8
|
+
self.__middlewares = middlewares
|
|
9
|
+
self.__middleware_chain_builder: MiddlewareChainBuilder[
|
|
10
|
+
AsyncPublishCallable, AsyncPublishMiddleware
|
|
11
|
+
] = MiddlewareChainBuilder(middlewares)
|
|
12
|
+
|
|
13
|
+
async def resolve(self, event: Event, publish: AsyncPublishCallable) -> None:
|
|
14
|
+
chain_callable: AsyncPublishCallable = self.__middleware_chain_builder.get_chain_callable(
|
|
15
|
+
publish, self.__get_middleware_callable
|
|
16
|
+
)
|
|
17
|
+
await chain_callable(event)
|
|
18
|
+
|
|
19
|
+
def __get_middleware_callable(
|
|
20
|
+
self, middleware: AsyncPublishMiddleware, publish_callable: AsyncPublishCallable
|
|
21
|
+
) -> AsyncPublishCallable:
|
|
22
|
+
return lambda event: middleware.on_publish(event, publish_callable)
|
buz/event/sync/sync_event_bus.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import
|
|
1
|
+
from typing import Optional, Iterable
|
|
2
2
|
|
|
3
3
|
from buz.event import Event, EventBus, Subscriber
|
|
4
4
|
from buz.event.middleware import (
|
|
@@ -32,6 +32,6 @@ class SyncEventBus(EventBus):
|
|
|
32
32
|
def __perform_consume(self, event: Event, subscriber: Subscriber) -> None:
|
|
33
33
|
subscriber.consume(event)
|
|
34
34
|
|
|
35
|
-
def bulk_publish(self, events:
|
|
35
|
+
def bulk_publish(self, events: Iterable[Event]) -> None:
|
|
36
36
|
for event in events:
|
|
37
37
|
self.publish(event)
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from abc import ABC, abstractmethod
|
|
2
|
-
from typing import Sequence
|
|
2
|
+
from typing import Sequence, Iterable
|
|
3
3
|
|
|
4
4
|
from buz.event.transactional_outbox import OutboxRecord
|
|
5
5
|
from buz.event.transactional_outbox import OutboxCriteria
|
|
@@ -10,6 +10,10 @@ class OutboxRepository(ABC):
|
|
|
10
10
|
def save(self, outbox_record: OutboxRecord) -> None:
|
|
11
11
|
pass
|
|
12
12
|
|
|
13
|
+
@abstractmethod
|
|
14
|
+
def bulk_create(self, outbox_records: Iterable[OutboxRecord]) -> None:
|
|
15
|
+
pass
|
|
16
|
+
|
|
13
17
|
@abstractmethod
|
|
14
18
|
def find(self, criteria: OutboxCriteria) -> Sequence[OutboxRecord]:
|
|
15
19
|
pass
|
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
from typing import
|
|
1
|
+
from typing import Optional, Iterable
|
|
2
2
|
|
|
3
3
|
from buz.event import Event, EventBus
|
|
4
|
+
from buz.event.transactional_outbox import OutboxRecord
|
|
4
5
|
from buz.event.transactional_outbox.event_to_outbox_record_translator import EventToOutboxRecordTranslator
|
|
5
6
|
from buz.event.transactional_outbox.outbox_record_validation.outbox_record_validator import OutboxRecordValidator
|
|
6
7
|
from buz.event.transactional_outbox.outbox_repository import OutboxRepository
|
|
@@ -18,16 +19,16 @@ class TransactionalOutboxEventBus(EventBus):
|
|
|
18
19
|
self.__outbox_record_validator = outbox_record_validator
|
|
19
20
|
|
|
20
21
|
def publish(self, event: Event) -> None:
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
22
|
+
outbox_record = self.__translate_and_validate(event)
|
|
23
|
+
self.__outbox_repository.save(outbox_record)
|
|
24
|
+
|
|
25
|
+
def bulk_publish(self, events: Iterable[Event]) -> None:
|
|
26
|
+
outbox_records = map(self.__translate_and_validate, events)
|
|
27
|
+
self.__outbox_repository.bulk_create(outbox_records)
|
|
28
|
+
|
|
29
|
+
# Raises OutboxRecordValidationException: If any validation inside outbox_record_validator fails
|
|
30
|
+
def __translate_and_validate(self, event: Event) -> OutboxRecord:
|
|
25
31
|
outbox_record = self.__event_to_outbox_record_translator.translate(event)
|
|
26
32
|
if self.__outbox_record_validator is not None:
|
|
27
33
|
self.__outbox_record_validator.validate(record=outbox_record)
|
|
28
|
-
|
|
29
|
-
self.__outbox_repository.save(outbox_record)
|
|
30
|
-
|
|
31
|
-
def bulk_publish(self, events: Collection[Event]) -> None:
|
|
32
|
-
for event in events:
|
|
33
|
-
self.publish(event)
|
|
34
|
+
return outbox_record
|
buz/kafka/__init__.py
CHANGED
|
@@ -1,18 +1,32 @@
|
|
|
1
|
+
from buz.kafka.domain.exceptions.not_all_partition_assigned_exception import NotAllPartitionAssignedException
|
|
2
|
+
from buz.kafka.domain.exceptions.not_valid_kafka_message_exception import NotValidKafkaMessageException
|
|
1
3
|
from buz.kafka.domain.exceptions.topic_already_created_exception import KafkaTopicsAlreadyCreatedException
|
|
4
|
+
from buz.kafka.domain.exceptions.topic_not_found_exception import TopicNotFoundException
|
|
5
|
+
from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
|
|
2
6
|
from buz.kafka.domain.models.consumer_initial_offset_position import ConsumerInitialOffsetPosition
|
|
3
7
|
from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
|
|
8
|
+
from buz.kafka.domain.models.kafka_connection_credentials import KafkaConnectionCredentials
|
|
9
|
+
from buz.kafka.domain.models.kafka_connection_plain_text_credentials import KafkaConnectionPlainTextCredentials
|
|
10
|
+
from buz.kafka.domain.models.kafka_connection_sasl_credentials import KafkaConnectionSaslCredentials
|
|
4
11
|
from buz.kafka.domain.models.kafka_consumer_record import KafkaConsumerRecord
|
|
12
|
+
from buz.kafka.domain.models.kafka_supported_sasl_mechanisms import KafkaSupportedSaslMechanisms
|
|
5
13
|
from buz.kafka.domain.models.kafka_supported_security_protocols import KafkaSupportedSecurityProtocols
|
|
6
14
|
from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
|
|
7
15
|
from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
|
|
8
16
|
from buz.kafka.domain.services.kafka_admin_test_client import KafkaAdminTestClient
|
|
9
17
|
from buz.kafka.domain.services.kafka_producer import KafkaProducer
|
|
10
|
-
from buz.kafka.infrastructure.
|
|
18
|
+
from buz.kafka.infrastructure.aiokafka.aiokafka_producer import AIOKafkaProducer
|
|
11
19
|
from buz.kafka.infrastructure.kafka_python.kafka_python_admin_client import KafkaPythonAdminClient
|
|
12
20
|
from buz.kafka.infrastructure.kafka_python.kafka_python_admin_test_client import KafkaPythonAdminTestClient
|
|
13
21
|
from buz.kafka.infrastructure.kafka_python.kafka_python_producer import KafkaPythonProducer
|
|
14
22
|
from buz.kafka.infrastructure.serializers.byte_serializer import ByteSerializer
|
|
15
23
|
from buz.kafka.infrastructure.serializers.implementations.json_byte_serializer import JSONByteSerializer
|
|
24
|
+
from buz.kafka.domain.models.kafka_supported_compression_type import KafkaSupportedCompressionType
|
|
25
|
+
from buz.event.infrastructure.buz_kafka.exceptions.kafka_event_bus_config_not_valid_exception import (
|
|
26
|
+
KafkaEventBusConfigNotValidException,
|
|
27
|
+
)
|
|
28
|
+
from buz.event.infrastructure.buz_kafka.async_buz_kafka_event_bus import AsyncBuzKafkaEventBus
|
|
29
|
+
from buz.event.infrastructure.buz_kafka.buz_kafka_event_bus import BuzKafkaEventBus
|
|
16
30
|
|
|
17
31
|
|
|
18
32
|
__all__ = [
|
|
@@ -22,7 +36,6 @@ __all__ = [
|
|
|
22
36
|
"KafkaAdminTestClient",
|
|
23
37
|
"KafkaPythonAdminClient",
|
|
24
38
|
"KafkaPythonAdminTestClient",
|
|
25
|
-
"KafkaPythonProducerFactory",
|
|
26
39
|
"KafkaTopicsAlreadyCreatedException",
|
|
27
40
|
"KafkaConsumerRecord",
|
|
28
41
|
"CreateKafkaTopic",
|
|
@@ -31,4 +44,17 @@ __all__ = [
|
|
|
31
44
|
"ByteSerializer",
|
|
32
45
|
"JSONByteSerializer",
|
|
33
46
|
"ConsumerInitialOffsetPosition",
|
|
47
|
+
"KafkaSupportedCompressionType",
|
|
48
|
+
"KafkaEventBusConfigNotValidException",
|
|
49
|
+
"AsyncBuzKafkaEventBus",
|
|
50
|
+
"BuzKafkaEventBus",
|
|
51
|
+
"AutoCreateTopicConfiguration",
|
|
52
|
+
"NotAllPartitionAssignedException",
|
|
53
|
+
"NotValidKafkaMessageException",
|
|
54
|
+
"TopicNotFoundException",
|
|
55
|
+
"KafkaConnectionCredentials",
|
|
56
|
+
"KafkaConnectionPlainTextCredentials",
|
|
57
|
+
"KafkaConnectionSaslCredentials",
|
|
58
|
+
"KafkaSupportedSaslMechanisms",
|
|
59
|
+
"AIOKafkaProducer",
|
|
34
60
|
]
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class NotAllPartitionAssignedException(Exception):
|
|
5
|
+
def __init__(self, topic_name: str) -> None:
|
|
6
|
+
super().__init__(
|
|
7
|
+
f'Not all the partition were assigned for the topic "{topic_name}", please disconnect the rest of subscribers'
|
|
8
|
+
)
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from abc import abstractmethod, ABC
|
|
4
|
+
from typing import Generic, Optional, TypeVar
|
|
5
|
+
|
|
6
|
+
from buz.kafka.infrastructure.interfaces.async_connection_manager import AsyncConnectionManager
|
|
7
|
+
|
|
8
|
+
T = TypeVar("T")
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class AsyncKafkaProducer(AsyncConnectionManager, ABC, Generic[T]):
|
|
12
|
+
@abstractmethod
|
|
13
|
+
async def produce(
|
|
14
|
+
self,
|
|
15
|
+
*,
|
|
16
|
+
topic: str,
|
|
17
|
+
message: T,
|
|
18
|
+
partition_key: Optional[str] = None,
|
|
19
|
+
headers: Optional[dict[str, str]] = None,
|
|
20
|
+
) -> None:
|
|
21
|
+
pass
|
|
@@ -1,14 +1,16 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from abc import abstractmethod, ABC
|
|
4
|
+
from datetime import datetime
|
|
4
5
|
from typing import Sequence
|
|
5
6
|
|
|
6
7
|
from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
|
|
8
|
+
from buz.kafka.infrastructure.interfaces.connection_manager import ConnectionManager
|
|
7
9
|
|
|
8
10
|
DEFAULT_NUMBER_OF_MESSAGES_TO_POLLING = 999
|
|
9
11
|
|
|
10
12
|
|
|
11
|
-
class KafkaAdminClient(ABC):
|
|
13
|
+
class KafkaAdminClient(ConnectionManager, ABC):
|
|
12
14
|
@abstractmethod
|
|
13
15
|
def create_topics(
|
|
14
16
|
self,
|
|
@@ -37,3 +39,16 @@ class KafkaAdminClient(ABC):
|
|
|
37
39
|
self,
|
|
38
40
|
) -> set[str]:
|
|
39
41
|
pass
|
|
42
|
+
|
|
43
|
+
# This function moves the following offset from the provided date
|
|
44
|
+
# if there are no messages with a date greater than the provided offset
|
|
45
|
+
# the offset will be moved to the end
|
|
46
|
+
@abstractmethod
|
|
47
|
+
def move_offsets_to_datetime(
|
|
48
|
+
self,
|
|
49
|
+
*,
|
|
50
|
+
consumer_group: str,
|
|
51
|
+
topic: str,
|
|
52
|
+
target_datetime: datetime,
|
|
53
|
+
) -> None:
|
|
54
|
+
pass
|
|
@@ -3,10 +3,12 @@ from __future__ import annotations
|
|
|
3
3
|
from abc import abstractmethod, ABC
|
|
4
4
|
from typing import Generic, Optional, TypeVar
|
|
5
5
|
|
|
6
|
+
from buz.kafka.infrastructure.interfaces.connection_manager import ConnectionManager
|
|
7
|
+
|
|
6
8
|
T = TypeVar("T")
|
|
7
9
|
|
|
8
10
|
|
|
9
|
-
class KafkaProducer(ABC, Generic[T]):
|
|
11
|
+
class KafkaProducer(ConnectionManager, ABC, Generic[T]):
|
|
10
12
|
@abstractmethod
|
|
11
13
|
def produce(
|
|
12
14
|
self,
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from logging import Logger
|
|
4
|
-
from ssl import SSLContext
|
|
5
4
|
from typing import Awaitable, Callable, Optional, Sequence, cast
|
|
6
5
|
|
|
7
6
|
from aiokafka import AIOKafkaConsumer as AIOKafkaNativeConsumer, TopicPartition, OffsetAndMetadata
|
|
@@ -69,21 +68,25 @@ class AIOKafkaConsumer:
|
|
|
69
68
|
)
|
|
70
69
|
|
|
71
70
|
def __generate_consumer(self) -> AIOKafkaNativeConsumer:
|
|
72
|
-
sasl_mechanism: Optional[str] = None
|
|
73
|
-
ssl_context: Optional[SSLContext] = None
|
|
74
|
-
|
|
75
71
|
if self.__auto_create_topic_configuration is not None:
|
|
76
72
|
self.__ensure_topics_are_created(self.__auto_create_topic_configuration)
|
|
77
73
|
|
|
78
|
-
|
|
79
|
-
|
|
74
|
+
sasl_mechanism = (
|
|
75
|
+
self.__connection_config.credentials.sasl_mechanism.value
|
|
76
|
+
if self.__connection_config.credentials.sasl_mechanism
|
|
77
|
+
else "PLAIN"
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
ssl_context = (
|
|
81
|
+
create_ssl_context()
|
|
82
|
+
if self.__connection_config.credentials.security_protocol == KafkaSupportedSecurityProtocols.SASL_SSL
|
|
83
|
+
else None
|
|
84
|
+
)
|
|
80
85
|
|
|
81
|
-
if self.__connection_config.credentials.security_protocol == KafkaSupportedSecurityProtocols.SASL_SSL:
|
|
82
|
-
ssl_context = create_ssl_context()
|
|
83
86
|
consumer = AIOKafkaNativeConsumer(
|
|
84
87
|
None,
|
|
85
88
|
ssl_context=ssl_context,
|
|
86
|
-
bootstrap_servers=self.__connection_config.bootstrap_servers,
|
|
89
|
+
bootstrap_servers=",".join(self.__connection_config.bootstrap_servers),
|
|
87
90
|
security_protocol=self.__connection_config.credentials.security_protocol.value,
|
|
88
91
|
sasl_mechanism=sasl_mechanism,
|
|
89
92
|
sasl_plain_username=self.__connection_config.credentials.user,
|