buz 2.13.1rc9__py3-none-any.whl → 2.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. buz/event/async_event_bus.py +15 -0
  2. buz/event/event_bus.py +2 -2
  3. buz/event/infrastructure/buz_kafka/async_buz_kafka_event_bus.py +107 -0
  4. buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py +5 -10
  5. buz/event/infrastructure/buz_kafka/buz_aiokafka_async_consumer.py +3 -4
  6. buz/event/infrastructure/buz_kafka/buz_aiokafka_multi_threaded_consumer.py +2 -4
  7. buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py +4 -6
  8. buz/event/infrastructure/buz_kafka/kafka_event_sync_subscriber_executor.py +2 -2
  9. buz/event/infrastructure/kombu/kombu_consumer.py +1 -0
  10. buz/event/infrastructure/kombu/kombu_event_bus.py +6 -7
  11. buz/event/middleware/async_publish_middleware.py +13 -0
  12. buz/event/middleware/async_publish_middleware_chain_resolver.py +22 -0
  13. buz/event/sync/sync_event_bus.py +2 -2
  14. buz/event/transactional_outbox/outbox_repository.py +5 -1
  15. buz/event/transactional_outbox/transactional_outbox_event_bus.py +12 -11
  16. buz/kafka/__init__.py +28 -2
  17. buz/kafka/domain/models/__init__.py +0 -0
  18. buz/kafka/domain/models/kafka_supported_compression_type.py +8 -0
  19. buz/kafka/domain/services/__init__.py +0 -0
  20. buz/kafka/domain/services/async_kafka_producer.py +21 -0
  21. buz/kafka/domain/services/kafka_admin_client.py +2 -1
  22. buz/kafka/domain/services/kafka_producer.py +3 -1
  23. buz/kafka/infrastructure/aiokafka/aiokafka_consumer.py +12 -9
  24. buz/kafka/infrastructure/aiokafka/aiokafka_producer.py +98 -0
  25. buz/kafka/infrastructure/interfaces/__init__.py +0 -0
  26. buz/kafka/infrastructure/interfaces/async_connection_manager.py +11 -0
  27. buz/kafka/infrastructure/interfaces/connection_manager.py +11 -0
  28. buz/kafka/infrastructure/kafka_python/kafka_python_admin_client.py +35 -12
  29. buz/kafka/infrastructure/kafka_python/kafka_python_admin_test_client.py +2 -2
  30. buz/kafka/infrastructure/kafka_python/kafka_python_producer.py +51 -22
  31. {buz-2.13.1rc9.dist-info → buz-2.14.0.dist-info}/METADATA +1 -1
  32. {buz-2.13.1rc9.dist-info → buz-2.14.0.dist-info}/RECORD +35 -24
  33. buz/kafka/infrastructure/kafka_python/factories/kafka_python_producer_factory.py +0 -20
  34. /buz/kafka/{infrastructure/kafka_python/factories → domain/exceptions}/__init__.py +0 -0
  35. {buz-2.13.1rc9.dist-info → buz-2.14.0.dist-info}/LICENSE +0 -0
  36. {buz-2.13.1rc9.dist-info → buz-2.14.0.dist-info}/WHEEL +0 -0
@@ -0,0 +1,15 @@
1
+ from abc import ABC, abstractmethod
2
+ from typing import Collection
3
+
4
+ from buz.event import Event
5
+ from buz.kafka.infrastructure.interfaces.async_connection_manager import AsyncConnectionManager
6
+
7
+
8
+ class AsyncEventBus(AsyncConnectionManager, ABC):
9
+ @abstractmethod
10
+ async def publish(self, event: Event) -> None:
11
+ pass
12
+
13
+ @abstractmethod
14
+ async def bulk_publish(self, events: Collection[Event]) -> None:
15
+ pass
buz/event/event_bus.py CHANGED
@@ -1,5 +1,5 @@
1
1
  from abc import ABC, abstractmethod
2
- from typing import Collection
2
+ from typing import Iterable
3
3
 
4
4
  from buz.event import Event
5
5
 
@@ -10,5 +10,5 @@ class EventBus(ABC):
10
10
  pass
11
11
 
12
12
  @abstractmethod
13
- def bulk_publish(self, events: Collection[Event]) -> None:
13
+ def bulk_publish(self, events: Iterable[Event]) -> None:
14
14
  pass
@@ -0,0 +1,107 @@
1
+ from logging import Logger
2
+ from typing import Collection, Optional
3
+
4
+ from buz.event import Event
5
+ from buz.event.async_event_bus import AsyncEventBus
6
+ from buz.event.exceptions.event_not_published_exception import EventNotPublishedException
7
+ from buz.event.infrastructure.buz_kafka.exceptions.kafka_event_bus_config_not_valid_exception import (
8
+ KafkaEventBusConfigNotValidException,
9
+ )
10
+ from buz.event.infrastructure.buz_kafka.publish_strategy.publish_strategy import KafkaPublishStrategy
11
+ from buz.event.middleware.async_publish_middleware import AsyncPublishMiddleware
12
+ from buz.event.middleware.async_publish_middleware_chain_resolver import AsyncPublishMiddlewareChainResolver
13
+ from buz.kafka.domain.exceptions.topic_already_created_exception import KafkaTopicsAlreadyCreatedException
14
+ from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
15
+ from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
16
+ from buz.kafka.domain.services.async_kafka_producer import AsyncKafkaProducer
17
+ from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
18
+
19
+
20
+ class AsyncBuzKafkaEventBus(AsyncEventBus):
21
+ def __init__(
22
+ self,
23
+ *,
24
+ publish_strategy: KafkaPublishStrategy,
25
+ producer: AsyncKafkaProducer,
26
+ logger: Logger,
27
+ kafka_admin_client: Optional[KafkaAdminClient] = None,
28
+ publish_middlewares: Optional[list[AsyncPublishMiddleware]] = None,
29
+ auto_create_topic_configuration: Optional[AutoCreateTopicConfiguration] = None,
30
+ ):
31
+ self.__publish_middleware_chain_resolver = AsyncPublishMiddlewareChainResolver(publish_middlewares or [])
32
+ self.__publish_strategy = publish_strategy
33
+ self.__producer = producer
34
+ self.__topics_checked: dict[str, bool] = {}
35
+ self.__kafka_admin_client = kafka_admin_client
36
+ self.__auto_create_topic_configuration = auto_create_topic_configuration
37
+ self.__logger = logger
38
+ self.__check_kafka_admin_client_is_needed()
39
+
40
+ def __check_kafka_admin_client_is_needed(self) -> None:
41
+ if self.__kafka_admin_client is None and self.__auto_create_topic_configuration is not None:
42
+ raise KafkaEventBusConfigNotValidException(
43
+ "A KafkaAdminClient is needed to create topics when 'auto_create_topic_configuration' is set."
44
+ )
45
+
46
+ async def publish(self, event: Event) -> None:
47
+ await self.__publish_middleware_chain_resolver.resolve(event, self.__perform_publish)
48
+
49
+ async def __perform_publish(self, event: Event) -> None:
50
+ try:
51
+ topic = self.__publish_strategy.get_topic(event)
52
+
53
+ if self.__auto_create_topic_configuration is not None and self.__is_topic_created(topic) is False:
54
+ try:
55
+ self.__logger.info(f"Creating missing topic: {topic}..")
56
+ self.__get_kafka_admin_client().create_topics(
57
+ topics=[
58
+ CreateKafkaTopic(
59
+ name=topic,
60
+ partitions=self.__auto_create_topic_configuration.partitions,
61
+ replication_factor=self.__auto_create_topic_configuration.replication_factor,
62
+ configs=self.__auto_create_topic_configuration.configs,
63
+ )
64
+ ]
65
+ )
66
+ self.__logger.info(f"Created missing topic: {topic}")
67
+ self.__topics_checked[topic] = True
68
+ except KafkaTopicsAlreadyCreatedException:
69
+ pass
70
+
71
+ headers = self.__get_event_headers(event)
72
+ await self.__producer.produce(
73
+ message=event,
74
+ headers=headers,
75
+ topic=topic,
76
+ )
77
+ except Exception as exc:
78
+ raise EventNotPublishedException(event) from exc
79
+
80
+ def __get_kafka_admin_client(self) -> KafkaAdminClient:
81
+ if self.__kafka_admin_client is None:
82
+ raise KafkaEventBusConfigNotValidException("KafkaAdminClient is not set.")
83
+ return self.__kafka_admin_client
84
+
85
+ def __is_topic_created(self, topic: str) -> bool:
86
+ is_topic_created = self.__topics_checked.get(topic, None)
87
+
88
+ if is_topic_created is not None:
89
+ return is_topic_created
90
+
91
+ is_topic_created = self.__get_kafka_admin_client().is_topic_created(topic)
92
+ self.__topics_checked[topic] = is_topic_created
93
+
94
+ return is_topic_created
95
+
96
+ async def bulk_publish(self, events: Collection[Event]) -> None:
97
+ for event in events:
98
+ await self.publish(event)
99
+
100
+ def __get_event_headers(self, event: Event) -> dict:
101
+ return {"id": event.id}
102
+
103
+ async def connect(self) -> None:
104
+ await self.__producer.connect()
105
+
106
+ async def disconnect(self) -> None:
107
+ await self.__producer.disconnect()
@@ -17,11 +17,9 @@ from buz.event.infrastructure.buz_kafka.consume_strategy.consume_strategy import
17
17
  from buz.event.infrastructure.buz_kafka.kafka_event_subscriber_executor import KafkaEventSubscriberExecutor
18
18
  from buz.event.infrastructure.models.consuming_task import ConsumingTask
19
19
  from buz.event.meta_subscriber import MetaSubscriber
20
- from buz.kafka import (
21
- KafkaConnectionConfig,
22
- ConsumerInitialOffsetPosition,
23
- )
24
20
  from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
21
+ from buz.kafka.domain.models.consumer_initial_offset_position import ConsumerInitialOffsetPosition
22
+ from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
25
23
  from buz.kafka.domain.models.kafka_poll_record import KafkaPollRecord
26
24
  from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
27
25
  from buz.kafka.infrastructure.aiokafka.aiokafka_consumer import AIOKafkaConsumer
@@ -107,12 +105,9 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
107
105
 
108
106
  if self.__exceptions_are_thrown(worker_errors):
109
107
  consume_events_exception, polling_task_exception = worker_errors
110
- if consume_events_exception:
111
- self._logger.exception(consume_events_exception)
112
- if polling_task_exception:
113
- self._logger.exception(polling_task_exception)
114
-
115
- raise WorkerExecutionException("The worker was closed by an unexpected exception")
108
+ raise WorkerExecutionException(
109
+ "The worker was closed by an unexpected exception"
110
+ ) from consume_events_exception or polling_task_exception
116
111
 
117
112
  async def __run_worker(self) -> tuple[Optional[Exception], Optional[Exception]]:
118
113
  consume_events_task = create_task(self.__consume_events_task())
@@ -15,11 +15,10 @@ from buz.event.meta_subscriber import MetaSubscriber
15
15
  from buz.event.middleware.async_consume_middleware import AsyncConsumeMiddleware
16
16
  from buz.event.strategies.retry.consume_retrier import ConsumeRetrier
17
17
  from buz.event.strategies.retry.reject_callback import RejectCallback
18
- from buz.kafka import (
19
- KafkaConnectionConfig,
20
- ConsumerInitialOffsetPosition,
21
- )
18
+
22
19
  from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
20
+ from buz.kafka.domain.models.consumer_initial_offset_position import ConsumerInitialOffsetPosition
21
+ from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
23
22
  from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
24
23
  from buz.kafka.infrastructure.deserializers.bytes_to_message_deserializer import BytesToMessageDeserializer
25
24
  from buz.kafka.infrastructure.deserializers.implementations.json_bytes_to_message_deserializer import (
@@ -15,11 +15,9 @@ from buz.event.meta_subscriber import MetaSubscriber
15
15
  from buz.event.middleware.consume_middleware import ConsumeMiddleware
16
16
  from buz.event.strategies.retry.consume_retrier import ConsumeRetrier
17
17
  from buz.event.strategies.retry.reject_callback import RejectCallback
18
- from buz.kafka import (
19
- KafkaConnectionConfig,
20
- ConsumerInitialOffsetPosition,
21
- )
22
18
  from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
19
+ from buz.kafka.domain.models.consumer_initial_offset_position import ConsumerInitialOffsetPosition
20
+ from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
23
21
  from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
24
22
  from buz.kafka.infrastructure.deserializers.bytes_to_message_deserializer import BytesToMessageDeserializer
25
23
  from buz.kafka.infrastructure.deserializers.implementations.json_bytes_to_message_deserializer import (
@@ -1,5 +1,5 @@
1
1
  from logging import Logger
2
- from typing import Collection, Optional
2
+ from typing import Optional, Iterable
3
3
 
4
4
  from buz.event import Event, EventBus
5
5
  from buz.event.exceptions.event_not_published_exception import EventNotPublishedException
@@ -11,13 +11,11 @@ from buz.event.middleware import (
11
11
  PublishMiddleware,
12
12
  )
13
13
  from buz.event.middleware.publish_middleware_chain_resolver import PublishMiddlewareChainResolver
14
- from buz.kafka import (
15
- KafkaPythonProducer,
16
- )
17
14
  from buz.kafka.domain.exceptions.topic_already_created_exception import KafkaTopicsAlreadyCreatedException
18
15
  from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
19
16
  from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
20
17
  from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
18
+ from buz.kafka.domain.services.kafka_producer import KafkaProducer
21
19
 
22
20
 
23
21
  class BuzKafkaEventBus(EventBus):
@@ -25,7 +23,7 @@ class BuzKafkaEventBus(EventBus):
25
23
  self,
26
24
  *,
27
25
  publish_strategy: KafkaPublishStrategy,
28
- producer: KafkaPythonProducer,
26
+ producer: KafkaProducer,
29
27
  logger: Logger,
30
28
  kafka_admin_client: Optional[KafkaAdminClient] = None,
31
29
  publish_middlewares: Optional[list[PublishMiddleware]] = None,
@@ -96,7 +94,7 @@ class BuzKafkaEventBus(EventBus):
96
94
 
97
95
  return is_topic_created
98
96
 
99
- def bulk_publish(self, events: Collection[Event]) -> None:
97
+ def bulk_publish(self, events: Iterable[Event]) -> None:
100
98
  for event in events:
101
99
  self.publish(event)
102
100
 
@@ -81,13 +81,13 @@ class KafkaEventSyncSubscriberExecutor(KafkaEventSubscriberExecutor):
81
81
  return
82
82
  except Exception as exception:
83
83
  self.__logger.warning(f"Event {event.id} could not be consumed by the subscriber {subscriber.fqn}")
84
- self.__logger.error(exception, exc_info=True)
85
-
86
84
  if self.__should_retry(event, subscriber) is True:
87
85
  self.__register_retry(event, subscriber)
88
86
  time.sleep(self.__seconds_between_retires)
89
87
  continue
90
88
 
89
+ self.__logger.exception(exception)
90
+
91
91
  if self.__reject_callback:
92
92
  self.__reject_callback.on_reject(event=event, subscribers=[subscriber], exception=exception)
93
93
 
@@ -135,6 +135,7 @@ class KombuConsumer(ConsumerMixin, Consumer):
135
135
  message.requeue()
136
136
  return
137
137
 
138
+ self.__logger.exception(exception)
138
139
  self.__reject_message(message, event, subscribers, exception)
139
140
 
140
141
  def __reject_message(
@@ -1,21 +1,20 @@
1
1
  from dataclasses import asdict
2
- from typing import Collection, Optional
2
+ from typing import Optional, Iterable
3
3
 
4
4
  from kombu import Connection, Exchange, Producer
5
5
  from kombu.entity import PERSISTENT_DELIVERY_MODE
6
6
 
7
7
  from buz.event import Event, EventBus
8
- from buz.event.infrastructure.kombu.publish_strategy import PublishStrategy
8
+ from buz.event.exceptions.event_not_published_exception import EventNotPublishedException
9
9
  from buz.event.infrastructure.kombu.allowed_kombu_serializer import AllowedKombuSerializer
10
+ from buz.event.infrastructure.kombu.publish_strategy import PublishStrategy
11
+ from buz.event.infrastructure.kombu.retry_strategy.publish_retry_policy import PublishRetryPolicy
12
+ from buz.event.infrastructure.kombu.retry_strategy.simple_publish_retry_policy import SimplePublishRetryPolicy
10
13
  from buz.event.middleware import (
11
14
  PublishMiddleware,
12
15
  PublishMiddlewareChainResolver,
13
16
  )
14
17
 
15
- from buz.event.exceptions.event_not_published_exception import EventNotPublishedException
16
- from buz.event.infrastructure.kombu.retry_strategy.publish_retry_policy import PublishRetryPolicy
17
- from buz.event.infrastructure.kombu.retry_strategy.simple_publish_retry_policy import SimplePublishRetryPolicy
18
-
19
18
 
20
19
  class KombuEventBus(EventBus):
21
20
  def __init__(
@@ -96,6 +95,6 @@ class KombuEventBus(EventBus):
96
95
  def __get_headers(self, event: Event) -> dict:
97
96
  return {"fqn": event.fqn()}
98
97
 
99
- def bulk_publish(self, events: Collection[Event]) -> None:
98
+ def bulk_publish(self, events: Iterable[Event]) -> None:
100
99
  for event in events:
101
100
  self.publish(event)
@@ -0,0 +1,13 @@
1
+ from abc import abstractmethod
2
+ from typing import Awaitable, Callable
3
+
4
+ from buz.event import Event
5
+ from buz.middleware import Middleware
6
+
7
+ AsyncPublishCallable = Callable[[Event], Awaitable[None]]
8
+
9
+
10
+ class AsyncPublishMiddleware(Middleware):
11
+ @abstractmethod
12
+ async def on_publish(self, event: Event, publish: AsyncPublishCallable) -> None:
13
+ pass
@@ -0,0 +1,22 @@
1
+ from buz.event import Event
2
+ from buz.event.middleware.async_publish_middleware import AsyncPublishCallable, AsyncPublishMiddleware
3
+ from buz.middleware import MiddlewareChainBuilder
4
+
5
+
6
+ class AsyncPublishMiddlewareChainResolver:
7
+ def __init__(self, middlewares: list[AsyncPublishMiddleware]):
8
+ self.__middlewares = middlewares
9
+ self.__middleware_chain_builder: MiddlewareChainBuilder[
10
+ AsyncPublishCallable, AsyncPublishMiddleware
11
+ ] = MiddlewareChainBuilder(middlewares)
12
+
13
+ async def resolve(self, event: Event, publish: AsyncPublishCallable) -> None:
14
+ chain_callable: AsyncPublishCallable = self.__middleware_chain_builder.get_chain_callable(
15
+ publish, self.__get_middleware_callable
16
+ )
17
+ await chain_callable(event)
18
+
19
+ def __get_middleware_callable(
20
+ self, middleware: AsyncPublishMiddleware, publish_callable: AsyncPublishCallable
21
+ ) -> AsyncPublishCallable:
22
+ return lambda event: middleware.on_publish(event, publish_callable)
@@ -1,4 +1,4 @@
1
- from typing import Collection, Optional
1
+ from typing import Optional, Iterable
2
2
 
3
3
  from buz.event import Event, EventBus, Subscriber
4
4
  from buz.event.middleware import (
@@ -32,6 +32,6 @@ class SyncEventBus(EventBus):
32
32
  def __perform_consume(self, event: Event, subscriber: Subscriber) -> None:
33
33
  subscriber.consume(event)
34
34
 
35
- def bulk_publish(self, events: Collection[Event]) -> None:
35
+ def bulk_publish(self, events: Iterable[Event]) -> None:
36
36
  for event in events:
37
37
  self.publish(event)
@@ -1,5 +1,5 @@
1
1
  from abc import ABC, abstractmethod
2
- from typing import Sequence
2
+ from typing import Sequence, Iterable
3
3
 
4
4
  from buz.event.transactional_outbox import OutboxRecord
5
5
  from buz.event.transactional_outbox import OutboxCriteria
@@ -10,6 +10,10 @@ class OutboxRepository(ABC):
10
10
  def save(self, outbox_record: OutboxRecord) -> None:
11
11
  pass
12
12
 
13
+ @abstractmethod
14
+ def bulk_create(self, outbox_records: Iterable[OutboxRecord]) -> None:
15
+ pass
16
+
13
17
  @abstractmethod
14
18
  def find(self, criteria: OutboxCriteria) -> Sequence[OutboxRecord]:
15
19
  pass
@@ -1,6 +1,7 @@
1
- from typing import Collection, Optional
1
+ from typing import Optional, Iterable
2
2
 
3
3
  from buz.event import Event, EventBus
4
+ from buz.event.transactional_outbox import OutboxRecord
4
5
  from buz.event.transactional_outbox.event_to_outbox_record_translator import EventToOutboxRecordTranslator
5
6
  from buz.event.transactional_outbox.outbox_record_validation.outbox_record_validator import OutboxRecordValidator
6
7
  from buz.event.transactional_outbox.outbox_repository import OutboxRepository
@@ -18,16 +19,16 @@ class TransactionalOutboxEventBus(EventBus):
18
19
  self.__outbox_record_validator = outbox_record_validator
19
20
 
20
21
  def publish(self, event: Event) -> None:
21
- """
22
- Raises:
23
- OutboxRecordValidationException: If any validation inside outbox_record_validator fails.
24
- """
22
+ outbox_record = self.__translate_and_validate(event)
23
+ self.__outbox_repository.save(outbox_record)
24
+
25
+ def bulk_publish(self, events: Iterable[Event]) -> None:
26
+ outbox_records = map(self.__translate_and_validate, events)
27
+ self.__outbox_repository.bulk_create(outbox_records)
28
+
29
+ # Raises OutboxRecordValidationException: If any validation inside outbox_record_validator fails
30
+ def __translate_and_validate(self, event: Event) -> OutboxRecord:
25
31
  outbox_record = self.__event_to_outbox_record_translator.translate(event)
26
32
  if self.__outbox_record_validator is not None:
27
33
  self.__outbox_record_validator.validate(record=outbox_record)
28
-
29
- self.__outbox_repository.save(outbox_record)
30
-
31
- def bulk_publish(self, events: Collection[Event]) -> None:
32
- for event in events:
33
- self.publish(event)
34
+ return outbox_record
buz/kafka/__init__.py CHANGED
@@ -1,18 +1,32 @@
1
+ from buz.kafka.domain.exceptions.not_all_partition_assigned_exception import NotAllPartitionAssignedException
2
+ from buz.kafka.domain.exceptions.not_valid_kafka_message_exception import NotValidKafkaMessageException
1
3
  from buz.kafka.domain.exceptions.topic_already_created_exception import KafkaTopicsAlreadyCreatedException
4
+ from buz.kafka.domain.exceptions.topic_not_found_exception import TopicNotFoundException
5
+ from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
2
6
  from buz.kafka.domain.models.consumer_initial_offset_position import ConsumerInitialOffsetPosition
3
7
  from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
8
+ from buz.kafka.domain.models.kafka_connection_credentials import KafkaConnectionCredentials
9
+ from buz.kafka.domain.models.kafka_connection_plain_text_credentials import KafkaConnectionPlainTextCredentials
10
+ from buz.kafka.domain.models.kafka_connection_sasl_credentials import KafkaConnectionSaslCredentials
4
11
  from buz.kafka.domain.models.kafka_consumer_record import KafkaConsumerRecord
12
+ from buz.kafka.domain.models.kafka_supported_sasl_mechanisms import KafkaSupportedSaslMechanisms
5
13
  from buz.kafka.domain.models.kafka_supported_security_protocols import KafkaSupportedSecurityProtocols
6
14
  from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
7
15
  from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
8
16
  from buz.kafka.domain.services.kafka_admin_test_client import KafkaAdminTestClient
9
17
  from buz.kafka.domain.services.kafka_producer import KafkaProducer
10
- from buz.kafka.infrastructure.kafka_python.factories.kafka_python_producer_factory import KafkaPythonProducerFactory
18
+ from buz.kafka.infrastructure.aiokafka.aiokafka_producer import AIOKafkaProducer
11
19
  from buz.kafka.infrastructure.kafka_python.kafka_python_admin_client import KafkaPythonAdminClient
12
20
  from buz.kafka.infrastructure.kafka_python.kafka_python_admin_test_client import KafkaPythonAdminTestClient
13
21
  from buz.kafka.infrastructure.kafka_python.kafka_python_producer import KafkaPythonProducer
14
22
  from buz.kafka.infrastructure.serializers.byte_serializer import ByteSerializer
15
23
  from buz.kafka.infrastructure.serializers.implementations.json_byte_serializer import JSONByteSerializer
24
+ from buz.kafka.domain.models.kafka_supported_compression_type import KafkaSupportedCompressionType
25
+ from buz.event.infrastructure.buz_kafka.exceptions.kafka_event_bus_config_not_valid_exception import (
26
+ KafkaEventBusConfigNotValidException,
27
+ )
28
+ from buz.event.infrastructure.buz_kafka.async_buz_kafka_event_bus import AsyncBuzKafkaEventBus
29
+ from buz.event.infrastructure.buz_kafka.buz_kafka_event_bus import BuzKafkaEventBus
16
30
 
17
31
 
18
32
  __all__ = [
@@ -22,7 +36,6 @@ __all__ = [
22
36
  "KafkaAdminTestClient",
23
37
  "KafkaPythonAdminClient",
24
38
  "KafkaPythonAdminTestClient",
25
- "KafkaPythonProducerFactory",
26
39
  "KafkaTopicsAlreadyCreatedException",
27
40
  "KafkaConsumerRecord",
28
41
  "CreateKafkaTopic",
@@ -31,4 +44,17 @@ __all__ = [
31
44
  "ByteSerializer",
32
45
  "JSONByteSerializer",
33
46
  "ConsumerInitialOffsetPosition",
47
+ "KafkaSupportedCompressionType",
48
+ "KafkaEventBusConfigNotValidException",
49
+ "AsyncBuzKafkaEventBus",
50
+ "BuzKafkaEventBus",
51
+ "AutoCreateTopicConfiguration",
52
+ "NotAllPartitionAssignedException",
53
+ "NotValidKafkaMessageException",
54
+ "TopicNotFoundException",
55
+ "KafkaConnectionCredentials",
56
+ "KafkaConnectionPlainTextCredentials",
57
+ "KafkaConnectionSaslCredentials",
58
+ "KafkaSupportedSaslMechanisms",
59
+ "AIOKafkaProducer",
34
60
  ]
File without changes
@@ -0,0 +1,8 @@
1
+ from enum import Enum
2
+
3
+
4
+ class KafkaSupportedCompressionType(Enum):
5
+ GZIP = "gzip"
6
+ SNAPPY = "snappy"
7
+ LZ4 = "lz4"
8
+ ZSTD = "zstd"
File without changes
@@ -0,0 +1,21 @@
1
+ from __future__ import annotations
2
+
3
+ from abc import abstractmethod, ABC
4
+ from typing import Generic, Optional, TypeVar
5
+
6
+ from buz.kafka.infrastructure.interfaces.async_connection_manager import AsyncConnectionManager
7
+
8
+ T = TypeVar("T")
9
+
10
+
11
+ class AsyncKafkaProducer(AsyncConnectionManager, ABC, Generic[T]):
12
+ @abstractmethod
13
+ async def produce(
14
+ self,
15
+ *,
16
+ topic: str,
17
+ message: T,
18
+ partition_key: Optional[str] = None,
19
+ headers: Optional[dict[str, str]] = None,
20
+ ) -> None:
21
+ pass
@@ -5,11 +5,12 @@ from datetime import datetime
5
5
  from typing import Sequence
6
6
 
7
7
  from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
8
+ from buz.kafka.infrastructure.interfaces.connection_manager import ConnectionManager
8
9
 
9
10
  DEFAULT_NUMBER_OF_MESSAGES_TO_POLLING = 999
10
11
 
11
12
 
12
- class KafkaAdminClient(ABC):
13
+ class KafkaAdminClient(ConnectionManager, ABC):
13
14
  @abstractmethod
14
15
  def create_topics(
15
16
  self,
@@ -3,10 +3,12 @@ from __future__ import annotations
3
3
  from abc import abstractmethod, ABC
4
4
  from typing import Generic, Optional, TypeVar
5
5
 
6
+ from buz.kafka.infrastructure.interfaces.connection_manager import ConnectionManager
7
+
6
8
  T = TypeVar("T")
7
9
 
8
10
 
9
- class KafkaProducer(ABC, Generic[T]):
11
+ class KafkaProducer(ConnectionManager, ABC, Generic[T]):
10
12
  @abstractmethod
11
13
  def produce(
12
14
  self,
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from logging import Logger
4
- from ssl import SSLContext
5
4
  from typing import Awaitable, Callable, Optional, Sequence, cast
6
5
 
7
6
  from aiokafka import AIOKafkaConsumer as AIOKafkaNativeConsumer, TopicPartition, OffsetAndMetadata
@@ -69,21 +68,25 @@ class AIOKafkaConsumer:
69
68
  )
70
69
 
71
70
  def __generate_consumer(self) -> AIOKafkaNativeConsumer:
72
- sasl_mechanism: Optional[str] = None
73
- ssl_context: Optional[SSLContext] = None
74
-
75
71
  if self.__auto_create_topic_configuration is not None:
76
72
  self.__ensure_topics_are_created(self.__auto_create_topic_configuration)
77
73
 
78
- if self.__connection_config.credentials.sasl_mechanism is not None:
79
- sasl_mechanism = self.__connection_config.credentials.sasl_mechanism.value
74
+ sasl_mechanism = (
75
+ self.__connection_config.credentials.sasl_mechanism.value
76
+ if self.__connection_config.credentials.sasl_mechanism
77
+ else "PLAIN"
78
+ )
79
+
80
+ ssl_context = (
81
+ create_ssl_context()
82
+ if self.__connection_config.credentials.security_protocol == KafkaSupportedSecurityProtocols.SASL_SSL
83
+ else None
84
+ )
80
85
 
81
- if self.__connection_config.credentials.security_protocol == KafkaSupportedSecurityProtocols.SASL_SSL:
82
- ssl_context = create_ssl_context()
83
86
  consumer = AIOKafkaNativeConsumer(
84
87
  None,
85
88
  ssl_context=ssl_context,
86
- bootstrap_servers=self.__connection_config.bootstrap_servers,
89
+ bootstrap_servers=",".join(self.__connection_config.bootstrap_servers),
87
90
  security_protocol=self.__connection_config.credentials.security_protocol.value,
88
91
  sasl_mechanism=sasl_mechanism,
89
92
  sasl_plain_username=self.__connection_config.credentials.user,
@@ -0,0 +1,98 @@
1
+ from __future__ import annotations
2
+
3
+ from ssl import SSLContext
4
+ from typing import Generic, Optional, TypeVar
5
+
6
+ from aiokafka import AIOKafkaProducer as NativeAIOKafkaProducer
7
+ from aiokafka.helpers import create_ssl_context
8
+
9
+ from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
10
+ from buz.kafka.domain.models.kafka_supported_compression_type import KafkaSupportedCompressionType
11
+ from buz.kafka.domain.models.kafka_supported_security_protocols import KafkaSupportedSecurityProtocols
12
+ from buz.kafka.domain.services.async_kafka_producer import AsyncKafkaProducer
13
+ from buz.kafka.infrastructure.serializers.byte_serializer import ByteSerializer
14
+ from buz.kafka.infrastructure.serializers.kafka_header_serializer import KafkaHeaderSerializer
15
+
16
+ T = TypeVar("T")
17
+
18
+
19
+ class AIOKafkaProducer(AsyncKafkaProducer, Generic[T]):
20
+ __DEFAULT_REQUEST_TIMEOUT_MS = 5000
21
+ __kafka_producer: Optional[NativeAIOKafkaProducer] = None
22
+
23
+ def __init__(
24
+ self,
25
+ *,
26
+ connection_config: KafkaConnectionConfig,
27
+ byte_serializer: ByteSerializer[T],
28
+ compression_type: Optional[KafkaSupportedCompressionType] = None,
29
+ retry_backoff_ms: int = 100,
30
+ ) -> None:
31
+ self.__connection_config = connection_config
32
+ self.__byte_serializer = byte_serializer
33
+ self.__header_serializer = KafkaHeaderSerializer()
34
+ self.__compression_type = compression_type
35
+ self.__retry_backoff_ms = retry_backoff_ms
36
+
37
+ async def _get_aiokafka_producer(self) -> NativeAIOKafkaProducer:
38
+ if self.__kafka_producer:
39
+ return self.__kafka_producer
40
+
41
+ ssl_context: Optional[SSLContext] = None
42
+
43
+ sasl_mechanism = (
44
+ self.__connection_config.credentials.sasl_mechanism.value
45
+ if self.__connection_config.credentials.sasl_mechanism
46
+ else "PLAIN"
47
+ )
48
+
49
+ if self.__connection_config.credentials.security_protocol == KafkaSupportedSecurityProtocols.SASL_SSL:
50
+ ssl_context = create_ssl_context()
51
+
52
+ self.__kafka_producer = NativeAIOKafkaProducer(
53
+ client_id=self.__connection_config.client_id,
54
+ bootstrap_servers=",".join(self.__connection_config.bootstrap_servers),
55
+ sasl_mechanism=sasl_mechanism,
56
+ ssl_context=ssl_context,
57
+ sasl_plain_username=self.__connection_config.credentials.user,
58
+ sasl_plain_password=self.__connection_config.credentials.password,
59
+ retry_backoff_ms=self.__retry_backoff_ms,
60
+ request_timeout_ms=self.__DEFAULT_REQUEST_TIMEOUT_MS,
61
+ compression_type=self.__compression_type.value if self.__compression_type else None,
62
+ )
63
+
64
+ await self.__kafka_producer.start()
65
+
66
+ return self.__kafka_producer
67
+
68
+ async def connect(self) -> None:
69
+ await self._get_aiokafka_producer()
70
+
71
+ async def disconnect(self) -> None:
72
+ if self.__kafka_producer is None:
73
+ return None
74
+ await self.__kafka_producer.stop()
75
+ self.__kafka_producer = None
76
+
77
+ async def produce(
78
+ self,
79
+ *,
80
+ topic: str,
81
+ message: T,
82
+ partition_key: Optional[str] = None,
83
+ headers: Optional[dict[str, str]] = None,
84
+ ) -> None:
85
+ serialized_headers = self.__header_serializer.serialize(headers) if headers is not None else None
86
+ kafka_producer = await self._get_aiokafka_producer()
87
+
88
+ await kafka_producer.send_and_wait(
89
+ topic=topic,
90
+ value=self.__byte_serializer.serialize(message),
91
+ headers=serialized_headers,
92
+ key=partition_key.encode("utf-8") if partition_key else None,
93
+ )
94
+
95
+ async def close(self) -> None:
96
+ if self.__kafka_producer is not None:
97
+ await self.__kafka_producer.stop()
98
+ self.__kafka_producer = None
File without changes
@@ -0,0 +1,11 @@
1
+ from abc import ABC, abstractmethod
2
+
3
+
4
+ class AsyncConnectionManager(ABC):
5
+ @abstractmethod
6
+ async def connect(self) -> None:
7
+ pass
8
+
9
+ @abstractmethod
10
+ async def disconnect(self) -> None:
11
+ pass
@@ -0,0 +1,11 @@
1
+ from abc import ABC, abstractmethod
2
+
3
+
4
+ class ConnectionManager(ABC):
5
+ @abstractmethod
6
+ def connect(self) -> None:
7
+ pass
8
+
9
+ @abstractmethod
10
+ def disconnect(self) -> None:
11
+ pass
@@ -30,18 +30,19 @@ TOPIC_CACHE_KEY = "topics"
30
30
  class KafkaPythonAdminClient(KafkaAdminClient):
31
31
  __PYTHON_KAFKA_DUPLICATED_TOPIC_ERROR_CODE = 36
32
32
 
33
+ _kafka_admin: Optional[KafkaPythonLibraryAdminClient] = None
34
+ _kafka_client: Optional[KafkaClient] = None
35
+
33
36
  def __init__(
34
37
  self,
35
38
  *,
36
39
  logger: Logger,
37
- config: KafkaConnectionConfig,
40
+ connection_config: KafkaConnectionConfig,
38
41
  cache_ttl_seconds: int = 0,
39
42
  ):
40
43
  self._logger = logger
41
- self._config = config
42
- self._config_in_library_format = self.__get_kafka_config_in_library_format(config)
43
- self._kafka_admin = KafkaPythonLibraryAdminClient(**self._config_in_library_format)
44
- self._kafka_client = KafkaClient(**self._config_in_library_format)
44
+ self.__connection_config = connection_config
45
+ self._config_in_library_format = self.__get_kafka_config_in_library_format(self.__connection_config)
45
46
  self.__ttl_cache: TTLCache[str, Any] = TTLCache(maxsize=1, ttl=cache_ttl_seconds)
46
47
 
47
48
  def __get_kafka_config_in_library_format(self, config: KafkaConnectionConfig) -> dict:
@@ -54,6 +55,28 @@ class KafkaPythonAdminClient(KafkaAdminClient):
54
55
  "sasl_plain_password": config.credentials.password,
55
56
  }
56
57
 
58
+ def connect(self):
59
+ self._get_kafka_admin()
60
+ self._get_kafka_client()
61
+
62
+ def disconnect(self):
63
+ if self._kafka_admin is not None:
64
+ self._kafka_admin.close()
65
+ self._kafka_admin = None
66
+ if self._kafka_client is not None:
67
+ self._kafka_client.close()
68
+ self._kafka_client = None
69
+
70
+ def _get_kafka_admin(self) -> KafkaPythonLibraryAdminClient:
71
+ if not self._kafka_admin:
72
+ self._kafka_admin = KafkaPythonLibraryAdminClient(**self._config_in_library_format)
73
+ return self._kafka_admin
74
+
75
+ def _get_kafka_client(self) -> KafkaClient:
76
+ if not self._kafka_client:
77
+ self._kafka_client = KafkaClient(**self._config_in_library_format)
78
+ return self._kafka_client
79
+
57
80
  def create_topics(
58
81
  self,
59
82
  *,
@@ -70,7 +93,7 @@ class KafkaPythonAdminClient(KafkaAdminClient):
70
93
  ]
71
94
 
72
95
  try:
73
- self._kafka_admin.create_topics(new_topics=new_topics)
96
+ self._get_kafka_admin().create_topics(new_topics=new_topics)
74
97
  except TopicAlreadyExistsError as error:
75
98
  topic_names = self.__get_list_of_kafka_topics_from_topic_already_exists_error(error)
76
99
  raise KafkaTopicsAlreadyCreatedException(topic_names=topic_names)
@@ -94,7 +117,7 @@ class KafkaPythonAdminClient(KafkaAdminClient):
94
117
  self,
95
118
  ) -> set[str]:
96
119
  return self.__resolve_cached_property(
97
- TOPIC_CACHE_KEY, lambda: set(self._kafka_admin.list_topics()) - INTERNAL_KAFKA_TOPICS
120
+ TOPIC_CACHE_KEY, lambda: set(self._get_kafka_admin().list_topics()) - INTERNAL_KAFKA_TOPICS
98
121
  )
99
122
 
100
123
  def __resolve_cached_property(self, property_key: str, callback: Callable) -> Any:
@@ -110,7 +133,7 @@ class KafkaPythonAdminClient(KafkaAdminClient):
110
133
  *,
111
134
  topics: set[str],
112
135
  ) -> None:
113
- self._kafka_admin.delete_topics(
136
+ self._get_kafka_admin().delete_topics(
114
137
  topics=topics,
115
138
  )
116
139
  self.__remove_cache_property(TOPIC_CACHE_KEY)
@@ -123,18 +146,18 @@ class KafkaPythonAdminClient(KafkaAdminClient):
123
146
  *,
124
147
  subscription_groups: set[str],
125
148
  ) -> None:
126
- self._kafka_admin.delete_consumer_groups(
149
+ self._get_kafka_admin().delete_consumer_groups(
127
150
  group_ids=subscription_groups,
128
151
  )
129
152
 
130
153
  def get_subscription_groups(
131
154
  self,
132
155
  ) -> set[str]:
133
- return set(self._kafka_admin.list_consumer_groups())
156
+ return set(self._get_kafka_admin().list_consumer_groups())
134
157
 
135
158
  def _wait_for_cluster_update(self) -> None:
136
- future = self._kafka_client.cluster.request_update()
137
- self._kafka_client.poll(future=future)
159
+ future = self._get_kafka_client().cluster.request_update()
160
+ self._get_kafka_client().poll(future=future)
138
161
 
139
162
  def move_offsets_to_datetime(
140
163
  self,
@@ -27,10 +27,10 @@ class KafkaPythonAdminTestClient(KafkaPythonAdminClient, KafkaAdminTestClient):
27
27
  self,
28
28
  *,
29
29
  logger: Logger,
30
- config: KafkaConnectionConfig,
30
+ connection_config: KafkaConnectionConfig,
31
31
  ):
32
32
  super().__init__(
33
- config=config,
33
+ connection_config=connection_config,
34
34
  logger=logger,
35
35
  )
36
36
 
@@ -1,10 +1,12 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Generic, Optional, TypeVar
3
+ from typing import Generic, Optional, TypeVar, cast
4
4
 
5
5
  from kafka import KafkaProducer as KafkaPythonLibraryProducer
6
+ from kafka.producer.future import FutureRecordMetadata
6
7
 
7
8
  from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
9
+ from buz.kafka.domain.models.kafka_supported_compression_type import KafkaSupportedCompressionType
8
10
  from buz.kafka.domain.services.kafka_producer import KafkaProducer
9
11
  from buz.kafka.infrastructure.serializers.byte_serializer import ByteSerializer
10
12
  from buz.kafka.infrastructure.serializers.kafka_header_serializer import KafkaHeaderSerializer
@@ -13,33 +15,55 @@ T = TypeVar("T")
13
15
 
14
16
 
15
17
  class KafkaPythonProducer(KafkaProducer, Generic[T]):
18
+ __kafka_producer: Optional[KafkaPythonLibraryProducer] = None
19
+ __SEND_TIMEOUT_SECONDS = 5
20
+
16
21
  def __init__(
17
22
  self,
18
23
  *,
19
- config: KafkaConnectionConfig,
24
+ connection_config: KafkaConnectionConfig,
20
25
  byte_serializer: ByteSerializer[T],
21
26
  retries: int = 0,
22
27
  retry_backoff_ms: int = 100,
28
+ compression_type: Optional[KafkaSupportedCompressionType] = None,
23
29
  ) -> None:
24
- self.__config = config
30
+ self.__connection_config = connection_config
25
31
  self.__byte_serializer = byte_serializer
26
32
  self.__header_serializer = KafkaHeaderSerializer()
33
+ self.__retries = retries
34
+ self.__retry_backoff_ms = retry_backoff_ms
35
+ self.__compression_type = compression_type
27
36
 
28
- sasl_mechanism: Optional[str] = None
37
+ def _get_kafka_producer(self) -> KafkaPythonLibraryProducer:
38
+ if self.__kafka_producer is None:
39
+ sasl_mechanism = (
40
+ self.__connection_config.credentials.sasl_mechanism.value
41
+ if self.__connection_config.credentials.sasl_mechanism
42
+ else None
43
+ )
44
+ compression_type = self.__compression_type.value if self.__compression_type else None
29
45
 
30
- if self.__config.credentials.sasl_mechanism is not None:
31
- sasl_mechanism = self.__config.credentials.sasl_mechanism.value
46
+ self.__kafka_producer = KafkaPythonLibraryProducer(
47
+ client_id=self.__connection_config.client_id,
48
+ bootstrap_servers=self.__connection_config.bootstrap_servers,
49
+ security_protocol=self.__connection_config.credentials.security_protocol.value,
50
+ sasl_mechanism=sasl_mechanism,
51
+ sasl_plain_username=self.__connection_config.credentials.user,
52
+ sasl_plain_password=self.__connection_config.credentials.password,
53
+ retries=self.__retries,
54
+ retry_backoff_ms=self.__retry_backoff_ms,
55
+ compression_type=compression_type,
56
+ )
32
57
 
33
- self.__kafka_producer = KafkaPythonLibraryProducer(
34
- client_id=self.__config.client_id,
35
- bootstrap_servers=self.__config.bootstrap_servers,
36
- security_protocol=self.__config.credentials.security_protocol.value,
37
- sasl_mechanism=sasl_mechanism,
38
- sasl_plain_username=self.__config.credentials.user,
39
- sasl_plain_password=self.__config.credentials.password,
40
- retries=retries,
41
- retry_backoff_ms=retry_backoff_ms,
42
- )
58
+ return self.__kafka_producer
59
+
60
+ def connect(self):
61
+ self._get_kafka_producer()
62
+
63
+ def disconnect(self) -> None:
64
+ if self.__kafka_producer is not None:
65
+ self.__kafka_producer.close()
66
+ self.__kafka_producer = None
43
67
 
44
68
  def produce(
45
69
  self,
@@ -50,12 +74,17 @@ class KafkaPythonProducer(KafkaProducer, Generic[T]):
50
74
  headers: Optional[dict[str, str]] = None,
51
75
  ) -> None:
52
76
  serialized_headers = self.__header_serializer.serialize(headers) if headers is not None else None
77
+ kafka_producer = self._get_kafka_producer()
53
78
 
54
- self.__kafka_producer.send(
55
- topic=topic,
56
- value=self.__byte_serializer.serialize(message),
57
- headers=serialized_headers,
58
- key=partition_key,
79
+ message_future = cast(
80
+ FutureRecordMetadata,
81
+ kafka_producer.send(
82
+ topic=topic,
83
+ value=self.__byte_serializer.serialize(message),
84
+ headers=serialized_headers,
85
+ key=partition_key,
86
+ ),
59
87
  )
88
+
60
89
  # We are forcing a flush because the task related with the send is asynchronous, and we want that the event to be sent after call produce
61
- self.__kafka_producer.flush()
90
+ message_future.get(self.__SEND_TIMEOUT_SECONDS)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: buz
3
- Version: 2.13.1rc9
3
+ Version: 2.14.0
4
4
  Summary: Buz is a set of light, simple and extensible implementations of event, command and query buses.
5
5
  License: MIT
6
6
  Author: Luis Pintado Lozano
@@ -26,6 +26,7 @@ buz/command/synchronous/synced_async/__init__.py,sha256=GXPmTcVC0ouP5bvpLXqcrzmy
26
26
  buz/command/synchronous/synced_async/synced_async_command_bus.py,sha256=8tvD1zR8j9AE6AmgkYfP5wDSPNHxr1Nx2X7CC74c3Q0,459
27
27
  buz/event/__init__.py,sha256=ey3c3fY85XpcWFlmIlbpanJfxv1BZI42Ia1njAtjcEs,588
28
28
  buz/event/async_consumer.py,sha256=k6v_WqQ8A8vWJzO_sMcjU75mroA_Il9D-rE-E-pu_lM,200
29
+ buz/event/async_event_bus.py,sha256=l627YtPplBprVO0Ccepgt4hkwtMJyI8uaqx6TzCQ9Lw,430
29
30
  buz/event/async_subscriber.py,sha256=GNenWsj1CZj1F1nJV0KQ_7L1elYToyT9K8sY7O4Gayk,248
30
31
  buz/event/async_worker.py,sha256=OR7g6cYWOWTh9DbfAfWwS6U6bZ1CDzScJHfH52PYj_k,881
31
32
  buz/event/base_async_subscriber.py,sha256=QCVSD36lR_FuX_B4R32nOZrGmq_Y24otb_GCrH_fL8k,218
@@ -36,7 +37,7 @@ buz/event/dead_letter_queue/dlq_criteria.py,sha256=hxcV-BMayKTEc5suEfQZhEYkc14H7
36
37
  buz/event/dead_letter_queue/dlq_record.py,sha256=wEa9CdWkHmxHQVwoHFjWeEU6sjNOi7X8dLr1E-gVmDc,1341
37
38
  buz/event/dead_letter_queue/dlq_repository.py,sha256=8XsXSfO2OzEq4qfQ_v0E0OExintDYI1g55Qu3PtoxKI,630
38
39
  buz/event/event.py,sha256=x1MCBydn3qk3AkvamsAwCG-nfxR9OyP4l1UNXtnhUwU,189
39
- buz/event/event_bus.py,sha256=DNr1cRLxYcn9qCu4_BKecpQHAx9D_PTxLnWXN2qVhFE,293
40
+ buz/event/event_bus.py,sha256=QnvZD_bKsv628TZ58s5uLntLJCA9Ngir_YHOwb4_UaU,289
40
41
  buz/event/exceptions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
42
  buz/event/exceptions/event_not_published_exception.py,sha256=gGEiRFGdKIS-VTKg8SN54vSS10WeSkgBhlO2Gpcll_0,215
42
43
  buz/event/exceptions/event_restore_exception.py,sha256=dYHp5i1E-VCUYYhOAVYR-eJfZ3CqPpR9gm1bZ1EFXfE,245
@@ -45,10 +46,11 @@ buz/event/exceptions/term_signal_interruption_exception.py,sha256=RkRRF0v_K9Hg48
45
46
  buz/event/exceptions/worker_execution_exception.py,sha256=6mgztvXOCG_9VZ_Jptkk72kZtNWQ2CPuQ3TjXEWFE14,123
46
47
  buz/event/infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
48
  buz/event/infrastructure/buz_kafka/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
- buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py,sha256=GfJ51noIkMfJ7DOQSUikphHEr6rvfvI2Np1k2mtAai4,13823
49
- buz/event/infrastructure/buz_kafka/buz_aiokafka_async_consumer.py,sha256=dqQDv7taAmINE9G2geMDExbcvSlntP09_rQ0JRbc4Rw,5507
50
- buz/event/infrastructure/buz_kafka/buz_aiokafka_multi_threaded_consumer.py,sha256=yrEU51OBjvLjCfYJFJPxux1bcIhoTVMw1Jf0HJMWbb0,5449
51
- buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py,sha256=sB8Cj_yTxqe8M9PT-HR4TcR9Mr39AnkIj3mVObKAe4U,4595
49
+ buz/event/infrastructure/buz_kafka/async_buz_kafka_event_bus.py,sha256=SyLblUVlwWOaNfZzK7vL6Ee4m-85vZVCH0rjOgqVAww,4913
50
+ buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py,sha256=E9Sy6IDZrywowcO9qIOJF5zjFvnE4CncTiZD3VC-554,13793
51
+ buz/event/infrastructure/buz_kafka/buz_aiokafka_async_consumer.py,sha256=J_9NhImjlotueksFQ5mJ80Uto3BSgCJvOxJ29pzbW-U,5601
52
+ buz/event/infrastructure/buz_kafka/buz_aiokafka_multi_threaded_consumer.py,sha256=ECuWXI2Es5W6mEplZ783JCGx6VvvG3VLp8TzobwuH_8,5542
53
+ buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py,sha256=ymRSvcYVgbVCPgHN6rMBVBHQ5heCSwCDl6EffyqGVX8,4601
52
54
  buz/event/infrastructure/buz_kafka/consume_strategy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
55
  buz/event/infrastructure/buz_kafka/consume_strategy/consume_strategy.py,sha256=RqlXe5W2S6rH3FTr--tcxzFJTAVLb-Dhl7m6qjgNz2M,331
54
56
  buz/event/infrastructure/buz_kafka/consume_strategy/kafka_on_fail_strategy.py,sha256=elNeyTubDuhHsLlTtDA1Nqz2hZe12PUcO9kz8upPby8,136
@@ -57,7 +59,7 @@ buz/event/infrastructure/buz_kafka/exceptions/__init__.py,sha256=47DEQpj8HBSa-_T
57
59
  buz/event/infrastructure/buz_kafka/exceptions/kafka_event_bus_config_not_valid_exception.py,sha256=VUKZXA2ygjg21P4DADFl_Tace6RwSXia1MRYvJypxbM,135
58
60
  buz/event/infrastructure/buz_kafka/kafka_event_async_subscriber_executor.py,sha256=ULM5I35hmgUAYJobTYyCgMXYYhDvLNhRd5S_7tMUMog,5073
59
61
  buz/event/infrastructure/buz_kafka/kafka_event_subscriber_executor.py,sha256=EyG2vsFYErWAyqxdXqSwxx5Zi_y0d6i0h05XavJMnxg,254
60
- buz/event/infrastructure/buz_kafka/kafka_event_sync_subscriber_executor.py,sha256=IHossnUlynEFkUvwcTyxHVwGUyDSlfH1t4UX1S09Dic,4846
62
+ buz/event/infrastructure/buz_kafka/kafka_event_sync_subscriber_executor.py,sha256=S24FMC4oEilO1kx5q1f-_p2Jl54ATQhINPwHeU_ZyME,4835
61
63
  buz/event/infrastructure/buz_kafka/publish_strategy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
64
  buz/event/infrastructure/buz_kafka/publish_strategy/publish_strategy.py,sha256=zIkgMnUU7ueG6QHEubMzdTHOtqdldIbS7k5FDLNmqVk,178
63
65
  buz/event/infrastructure/buz_kafka/publish_strategy/topic_per_event_kafka_publish_strategy.py,sha256=aLKj6GyLJNcMbuDA1QBa-RzWKBHEorBuPFkkqo_H60k,405
@@ -66,8 +68,8 @@ buz/event/infrastructure/kombu/allowed_kombu_serializer.py,sha256=LQ6futYsInawTC
66
68
  buz/event/infrastructure/kombu/consume_strategy/__init__.py,sha256=6dnAv-bOxoDL31gQD1dErRocdJvkLHTgdqeb4S33eWc,302
67
69
  buz/event/infrastructure/kombu/consume_strategy/consume_strategy.py,sha256=Zsv7QVpZXRLYvlV2nRbSdSwT_FgEELLyzUxdT6DyX8Q,179
68
70
  buz/event/infrastructure/kombu/consume_strategy/queue_per_subscriber_consume_strategy.py,sha256=Vsa1uC7dwS3jJ-dp_lvrE-hVWnN91-ma8oVqdLuXHMo,786
69
- buz/event/infrastructure/kombu/kombu_consumer.py,sha256=8mvpQ6ePOrhpJFCU46xqIwX9I_fy5HXN6lbWLN2WnPQ,6440
70
- buz/event/infrastructure/kombu/kombu_event_bus.py,sha256=OLZXkXmHBSRCu3T3N70ZicZl4TXmFgc5cj4PT2CHDYU,4022
71
+ buz/event/infrastructure/kombu/kombu_consumer.py,sha256=7EhNo_YZdXNpoVIuSm7Thk-Kv-wh3LtFP256g-IzljA,6483
72
+ buz/event/infrastructure/kombu/kombu_event_bus.py,sha256=VSLBtamp-YOta4KyqmfXvDurvPiHZSL9QPCozMK3Qyw,4017
71
73
  buz/event/infrastructure/kombu/publish_strategy/__init__.py,sha256=96ssn7ydJwLXYoVyrhfGcwCpXr4_5Sl0DbN6UCoeNc8,315
72
74
  buz/event/infrastructure/kombu/publish_strategy/fanout_exchange_per_event_publish_strategy.py,sha256=Pw85A1oI-cPtzHCQTr0XHQjb7-u9LVmKR3eBIonHsUU,397
73
75
  buz/event/infrastructure/kombu/publish_strategy/publish_strategy.py,sha256=mcpXSRPbIYedt1vsoiBBAzqzR3E6o77ZzF6IOFsVRUw,309
@@ -81,6 +83,8 @@ buz/event/meta_subscriber.py,sha256=ieCOtOD2JTXizyFxisBZ4-d_4MvCmIW4BksstngV8oI,
81
83
  buz/event/middleware/__init__.py,sha256=1_33sdvRejCF4mHuKVkbldeJde6Y2jYtSrB5vMs0Rfo,773
82
84
  buz/event/middleware/async_consume_middleware.py,sha256=314z7ZyhvQIvi90kEO0t-FlnHSyRjArk3RqKOdDE6bM,459
83
85
  buz/event/middleware/async_consume_middleware_chain_resolver.py,sha256=Hw75JAs5pyZVDi7-nD4I1nbUXjwYpHQW9PctafGS4ks,1193
86
+ buz/event/middleware/async_publish_middleware.py,sha256=JIxbRx7HVf_Q1iEziN_5RKGVJ-Oen_f1c3OL9QLmoxE,358
87
+ buz/event/middleware/async_publish_middleware_chain_resolver.py,sha256=Hqj8CRZXJD6h9KuJaKl88iToOFN7BijoatoDo66En8w,1016
84
88
  buz/event/middleware/base_consume_middleware.py,sha256=9G1jsr_Wm-avsTfWAndi5_tf4WW6dSLwQ3rN0-jc7AE,651
85
89
  buz/event/middleware/base_publish_middleware.py,sha256=vtM8oA4LZjbZn4omPy-cIAUxQQwL-_Xb4ScU85DwjMU,531
86
90
  buz/event/middleware/consume_middleware.py,sha256=BCcs1LgIc5YZx5nf7nE_bMuiiXxscqBE4LqD-nd4JS0,363
@@ -105,7 +109,7 @@ buz/event/strategies/retry/max_retries_negative_exception.py,sha256=UdM5T4cxRv_a
105
109
  buz/event/strategies/retry/reject_callback.py,sha256=TnmUt0AkB2DEQMieec9TtB7IAkRHdFAFepAclbiCRns,316
106
110
  buz/event/subscriber.py,sha256=WxppO8PFP5zO-gwLZNg1DKSY_uFdsF8JgWIJa6nTTds,237
107
111
  buz/event/sync/__init__.py,sha256=uJmU80PGVNNL2HoRFXp4loQTn1VK8gLo-hMEvgVPpBQ,91
108
- buz/event/sync/sync_event_bus.py,sha256=4yI1gLbaS7e0DI8a6I9I_a7PJcUUe0cJ_3OP4EJmQ4Q,1407
112
+ buz/event/sync/sync_event_bus.py,sha256=nXNJqoaWQWV3asYCAIReH2nN5AOjvJH2OFODHOrWM6M,1403
109
113
  buz/event/transactional_outbox/__init__.py,sha256=k8ZBWCi12pWKXchHfgW_Raw4sVR8XkBLuPNW9jB9X2k,1381
110
114
  buz/event/transactional_outbox/event_to_outbox_record_translator.py,sha256=oSn1iQuW-cZLvlXYIJPnSwm3JYUwGMm9f1pqnlF0cJI,534
111
115
  buz/event/transactional_outbox/fqn_to_event_mapper.py,sha256=ujcq6CfYqRJtM8f3SEEltbWN0Ru7NM5JfrbNdh4nvhQ,773
@@ -125,16 +129,18 @@ buz/event/transactional_outbox/outbox_record_validation/outbox_record_size_not_a
125
129
  buz/event/transactional_outbox/outbox_record_validation/outbox_record_validation_exception.py,sha256=rwG2f8KTfLc2xEpGXlEkWVoqf3ECUny4n5wcualiuIE,134
126
130
  buz/event/transactional_outbox/outbox_record_validation/outbox_record_validator.py,sha256=XGHTT1dH2CJOqhYYnyPJHmZsAuVXuDOeqgJzK7mRidc,328
127
131
  buz/event/transactional_outbox/outbox_record_validation/size_outbox_record_validator.py,sha256=f8sQ5IHfO4J8m5l7rS3JYUoBvx0B1EAFMRsJ0HPQKG8,2436
128
- buz/event/transactional_outbox/outbox_repository.py,sha256=nJ3p8jI0GmAJPLm5es6FcH4EooLM0iOH1fIdwG4_Lx0,509
129
- buz/event/transactional_outbox/transactional_outbox_event_bus.py,sha256=5Mv4wqbPZIJfewUdYnf3n5Fm0tIyrzLZAGYqblLNR3M,1436
132
+ buz/event/transactional_outbox/outbox_repository.py,sha256=Sn7aWaq1G6uiKXcV09l9L1eVQ_bPUTqY-OSD12_H2jU,628
133
+ buz/event/transactional_outbox/transactional_outbox_event_bus.py,sha256=S2VIrKCyZG8vztgBagKOJUhp2oJhbLx6oGVHPBplRZ4,1676
130
134
  buz/event/transactional_outbox/transactional_outbox_worker.py,sha256=x6kf-Oc4oYKu9S4MTcCqd3VqPNURScTReYJ3Ahx4rKA,2221
131
135
  buz/event/worker.py,sha256=BL9TXB_kyr0Avql9fIcFm3CDNnXPvZB6O6BxVwjtCdA,942
132
136
  buz/handler.py,sha256=cZqV1NDPGVZQgJ3YSBDhOQ1sdJGdUopxi57yQ6fbPvc,272
133
- buz/kafka/__init__.py,sha256=3rLTvjxQsZlTRfy7_XG6ojkVfxStlaS7y87oGvx5RfI,1826
137
+ buz/kafka/__init__.py,sha256=R3fcyET-SNEAvk_XlBQbHIbQVb63Qiz6lVrif3nDhNU,3435
138
+ buz/kafka/domain/exceptions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
134
139
  buz/kafka/domain/exceptions/not_all_partition_assigned_exception.py,sha256=9zDWoh0SbHLRuCvpfIGcvrmcscKsXpbAPIxr5-z-GYg,296
135
140
  buz/kafka/domain/exceptions/not_valid_kafka_message_exception.py,sha256=Dn6I_-eGQnOuu5WW24oKGOdKOu4EdM8ByH3DLAbz5SY,57
136
141
  buz/kafka/domain/exceptions/topic_already_created_exception.py,sha256=UrisdveZGa2BB0ko4mS7-5fwy8eGsIu409_grtq1r9k,333
137
142
  buz/kafka/domain/exceptions/topic_not_found_exception.py,sha256=kLuqGqfsb6YTCe5UCKpMwBm_QAnU9Udfb8bWajPoA8k,201
143
+ buz/kafka/domain/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
138
144
  buz/kafka/domain/models/auto_create_topic_configuration.py,sha256=naeArywtxwTXyLzguFitBqU8nRkDo4Ttd1DjPVYJY-Q,231
139
145
  buz/kafka/domain/models/consumer_initial_offset_position.py,sha256=mhnN7LaRhqnzODV6GFewQ5TbIYwCfVbtWaSTYvbGMU0,111
140
146
  buz/kafka/domain/models/create_kafka_topic.py,sha256=Ut6_Xh5finrPlqz2Unw3EP31lR0y8c55zRp8kXxUj7U,224
@@ -144,14 +150,18 @@ buz/kafka/domain/models/kafka_connection_plain_text_credentials.py,sha256=DzhJCl
144
150
  buz/kafka/domain/models/kafka_connection_sasl_credentials.py,sha256=SG45bU8EFlV0cUObkW_a0wvfRuZU6HelqQVPVu-EV0o,591
145
151
  buz/kafka/domain/models/kafka_consumer_record.py,sha256=2oJvTBAr8jQq4FglsSgtkno29XLmxgC49O6uriKCdqw,230
146
152
  buz/kafka/domain/models/kafka_poll_record.py,sha256=Yt55L6rGM_taJ615_YyO1gBJIgpVruD0VG0rgSLXSq4,305
153
+ buz/kafka/domain/models/kafka_supported_compression_type.py,sha256=ZEY1kPzYQlkPhEg0y2EMdZXUQ_oSHhjbGj9MIQvU09E,141
147
154
  buz/kafka/domain/models/kafka_supported_sasl_mechanisms.py,sha256=ASyDaFgseQRcUJA2kubQSdCkG6KhGmpMAzTFj5NwK5w,212
148
155
  buz/kafka/domain/models/kafka_supported_security_protocols.py,sha256=ffY2-9sOj4XIkJTSQVkqeOb4KnuqEYXISDarfDN8r9Q,161
149
- buz/kafka/domain/services/kafka_admin_client.py,sha256=tF0WJW-eCzgskpuhxzXOU5fcTIFqK0fWzMu7yTEzAdo,1118
156
+ buz/kafka/domain/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
157
+ buz/kafka/domain/services/async_kafka_producer.py,sha256=gSq3WwEVux_gp3EKDAMN1WsM027uklB58E-WnKpyhPs,533
158
+ buz/kafka/domain/services/kafka_admin_client.py,sha256=XE6H-4JWsjygKjdtLtGMX5ELhWkJcpR7ai9CO2kB98Y,1222
150
159
  buz/kafka/domain/services/kafka_admin_test_client.py,sha256=91l_vFIo1yhJLQQCC_OmeXZ5F429zP7Hx5g4FNllpfE,1625
151
- buz/kafka/domain/services/kafka_producer.py,sha256=CTiwGYwuzdJY5aeb2WFbJlyCpZ0YyhzcgKQYyogKzUM,401
160
+ buz/kafka/domain/services/kafka_producer.py,sha256=8bLTV328orrPHcARzkc6no4vyJzrArVtCsjmSRXDjos,506
152
161
  buz/kafka/infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
153
162
  buz/kafka/infrastructure/aiokafka/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
154
- buz/kafka/infrastructure/aiokafka/aiokafka_consumer.py,sha256=9bzCq18xTMCNN7IKoEygdA8xQ235qIza1rU6OnMnq9o,8725
163
+ buz/kafka/infrastructure/aiokafka/aiokafka_consumer.py,sha256=i6h4EeEyH3YdxWrQ-zQF3koni9AurdhKf0Va0K1dqBw,8695
164
+ buz/kafka/infrastructure/aiokafka/aiokafka_producer.py,sha256=LteHKIHpT6MKplwmwsPYMsd2GWNJCzus65XDHCIdoN8,3823
155
165
  buz/kafka/infrastructure/aiokafka/rebalance/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
156
166
  buz/kafka/infrastructure/aiokafka/rebalance/kafka_callback_rebalancer.py,sha256=3l7NkTrCt3rBktVIS73cTmCOvv6eFguoCbGMYIUfCFc,1774
157
167
  buz/kafka/infrastructure/aiokafka/translators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -165,13 +175,14 @@ buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_e
165
175
  buz/kafka/infrastructure/deserializers/implementations/cdc/not_valid_cdc_message_exception.py,sha256=hgLLwTcC-C2DuJSOWUhmQsrd1bO9I1469869IqfAPOk,414
166
176
  buz/kafka/infrastructure/deserializers/implementations/json_byte_deserializer.py,sha256=L4b164-KweiQUwyRONhTMIGnAz48UPk0btLqjGOTNdk,373
167
177
  buz/kafka/infrastructure/deserializers/implementations/json_bytes_to_message_deserializer.py,sha256=YwugXkmOudMNtkVfCC4BFe3pFVpbM8rAL9bT88bZMRk,756
178
+ buz/kafka/infrastructure/interfaces/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
179
+ buz/kafka/infrastructure/interfaces/async_connection_manager.py,sha256=JbaLu5UVV2ZPRLI_FGj1ijX1QekFysPts45G-F-oBkA,217
180
+ buz/kafka/infrastructure/interfaces/connection_manager.py,sha256=EWnvShJHOg8QYe6a3ma0urjKjmVMDBi7q8T2cv_i_MQ,200
168
181
  buz/kafka/infrastructure/kafka_python/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
169
182
  buz/kafka/infrastructure/kafka_python/exception/consumer_interrupted_exception.py,sha256=fqhgV7HILdVdv-p1CsOIaaESKY2ZXBtRGYbrVSdPLg0,164
170
- buz/kafka/infrastructure/kafka_python/factories/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
171
- buz/kafka/infrastructure/kafka_python/factories/kafka_python_producer_factory.py,sha256=rn-ZUuzHZfAMGon_lKl7gQ-gwijW1nYOHlGTCIjfDE4,858
172
- buz/kafka/infrastructure/kafka_python/kafka_python_admin_client.py,sha256=KjJciScn-WlnfF79-ZeWZWUzQKZkDiPZ_93oOwmDyiQ,8218
173
- buz/kafka/infrastructure/kafka_python/kafka_python_admin_test_client.py,sha256=g_xGakCMbv9ouzUyemmcrFQqUfKlnMWNnEPE4L9cxOI,2949
174
- buz/kafka/infrastructure/kafka_python/kafka_python_producer.py,sha256=CMkpTkrC10wy574Nc9t-TKvWO-6XCgj8F5EkWlwP0-U,2275
183
+ buz/kafka/infrastructure/kafka_python/kafka_python_admin_client.py,sha256=7HbwkaoedXbxZ4LcGWytU2q_GTdLm_c1ziyGTBoKgF0,9038
184
+ buz/kafka/infrastructure/kafka_python/kafka_python_admin_test_client.py,sha256=wLhnrHzyFJ9ETWNUfd-dmwm_CwZyiImaAP97cEdRgzE,2982
185
+ buz/kafka/infrastructure/kafka_python/kafka_python_producer.py,sha256=DkqqLSSXHBf4SXXf-IZwwLhxWrGE95Jg4MO_3RDsikU,3594
175
186
  buz/kafka/infrastructure/kafka_python/translators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
176
187
  buz/kafka/infrastructure/kafka_python/translators/consumer_initial_offset_position_translator.py,sha256=hJ48_eyMcnbFL_Y5TOiMbGXrQSryuKk9CvP59MdqNOY,620
177
188
  buz/kafka/infrastructure/serializers/byte_serializer.py,sha256=T83sLdX9V5Oh1mzjRwHi_1DsTFI7KefFj7kmnz7JVy4,207
@@ -232,7 +243,7 @@ buz/serializer/message_to_json_bytes_serializer.py,sha256=RGZJ64t4t4Pz2FCASZZCv-
232
243
  buz/wrapper/__init__.py,sha256=GnRdJFcncn-qp0hzDG9dBHLmTJSbHFVjE_yr-MdW_n4,77
233
244
  buz/wrapper/async_to_sync.py,sha256=OfK-vrVUhuN-LLLvekLdMbQYtH0ue5lfbvuasj6ovMI,698
234
245
  buz/wrapper/event_loop.py,sha256=pfBJ1g-8A2a3YgW8Gf9Fg0kkewoh3-wgTy2KIFDyfHk,266
235
- buz-2.13.1rc9.dist-info/LICENSE,sha256=Jytu2S-2SPEgsB0y6BF-_LUxIWY7402fl0JSh36TLZE,1062
236
- buz-2.13.1rc9.dist-info/METADATA,sha256=JDL25KyxeqMBWzF9LdEzga2tqIDmQBRGqn6ToEeZfyA,1620
237
- buz-2.13.1rc9.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
238
- buz-2.13.1rc9.dist-info/RECORD,,
246
+ buz-2.14.0.dist-info/LICENSE,sha256=Jytu2S-2SPEgsB0y6BF-_LUxIWY7402fl0JSh36TLZE,1062
247
+ buz-2.14.0.dist-info/METADATA,sha256=f8tigTI_Ee1HBkPHTPfsYHjkOyuBZwlCX6dTeDpjzCQ,1617
248
+ buz-2.14.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
249
+ buz-2.14.0.dist-info/RECORD,,
@@ -1,20 +0,0 @@
1
- from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
2
- from buz.kafka.infrastructure.kafka_python.kafka_python_producer import KafkaPythonProducer
3
- from buz.kafka.infrastructure.serializers.byte_serializer import ByteSerializer
4
- from buz.kafka.infrastructure.serializers.implementations.json_byte_serializer import JSONByteSerializer
5
-
6
-
7
- class KafkaPythonProducerFactory:
8
- def __init__(
9
- self,
10
- kafka_connection_config: KafkaConnectionConfig,
11
- byte_serializer: ByteSerializer = JSONByteSerializer(),
12
- ):
13
- self._kafka_connection_config = kafka_connection_config
14
- self._byte_serializer = byte_serializer
15
-
16
- def build(self) -> KafkaPythonProducer:
17
- return KafkaPythonProducer(
18
- config=self._kafka_connection_config,
19
- byte_serializer=self._byte_serializer,
20
- )
File without changes