buz 2.13.1rc8__py3-none-any.whl → 2.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. buz/event/async_event_bus.py +15 -0
  2. buz/event/event_bus.py +2 -2
  3. buz/event/infrastructure/buz_kafka/async_buz_kafka_event_bus.py +107 -0
  4. buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py +5 -10
  5. buz/event/infrastructure/buz_kafka/buz_aiokafka_async_consumer.py +3 -4
  6. buz/event/infrastructure/buz_kafka/buz_aiokafka_multi_threaded_consumer.py +2 -4
  7. buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py +4 -6
  8. buz/event/infrastructure/buz_kafka/kafka_event_sync_subscriber_executor.py +2 -7
  9. buz/event/infrastructure/kombu/kombu_consumer.py +1 -0
  10. buz/event/infrastructure/kombu/kombu_event_bus.py +6 -7
  11. buz/event/middleware/async_publish_middleware.py +13 -0
  12. buz/event/middleware/async_publish_middleware_chain_resolver.py +22 -0
  13. buz/event/sync/sync_event_bus.py +2 -2
  14. buz/event/transactional_outbox/outbox_repository.py +5 -1
  15. buz/event/transactional_outbox/transactional_outbox_event_bus.py +12 -11
  16. buz/kafka/__init__.py +28 -2
  17. buz/kafka/domain/exceptions/not_all_partition_assigned_exception.py +8 -0
  18. buz/kafka/domain/exceptions/topic_not_found_exception.py +6 -0
  19. buz/kafka/domain/models/__init__.py +0 -0
  20. buz/kafka/domain/models/kafka_supported_compression_type.py +8 -0
  21. buz/kafka/domain/services/__init__.py +0 -0
  22. buz/kafka/domain/services/async_kafka_producer.py +21 -0
  23. buz/kafka/domain/services/kafka_admin_client.py +16 -1
  24. buz/kafka/domain/services/kafka_producer.py +3 -1
  25. buz/kafka/infrastructure/aiokafka/aiokafka_consumer.py +12 -9
  26. buz/kafka/infrastructure/aiokafka/aiokafka_producer.py +98 -0
  27. buz/kafka/infrastructure/interfaces/__init__.py +0 -0
  28. buz/kafka/infrastructure/interfaces/async_connection_manager.py +11 -0
  29. buz/kafka/infrastructure/interfaces/connection_manager.py +11 -0
  30. buz/kafka/infrastructure/kafka_python/kafka_python_admin_client.py +132 -16
  31. buz/kafka/infrastructure/kafka_python/kafka_python_admin_test_client.py +11 -2
  32. buz/kafka/infrastructure/kafka_python/kafka_python_producer.py +51 -22
  33. {buz-2.13.1rc8.dist-info → buz-2.14.0.dist-info}/METADATA +1 -1
  34. {buz-2.13.1rc8.dist-info → buz-2.14.0.dist-info}/RECORD +37 -24
  35. buz/kafka/infrastructure/kafka_python/factories/kafka_python_producer_factory.py +0 -20
  36. /buz/kafka/{infrastructure/kafka_python/factories → domain/exceptions}/__init__.py +0 -0
  37. {buz-2.13.1rc8.dist-info → buz-2.14.0.dist-info}/LICENSE +0 -0
  38. {buz-2.13.1rc8.dist-info → buz-2.14.0.dist-info}/WHEEL +0 -0
@@ -0,0 +1,98 @@
1
+ from __future__ import annotations
2
+
3
+ from ssl import SSLContext
4
+ from typing import Generic, Optional, TypeVar
5
+
6
+ from aiokafka import AIOKafkaProducer as NativeAIOKafkaProducer
7
+ from aiokafka.helpers import create_ssl_context
8
+
9
+ from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
10
+ from buz.kafka.domain.models.kafka_supported_compression_type import KafkaSupportedCompressionType
11
+ from buz.kafka.domain.models.kafka_supported_security_protocols import KafkaSupportedSecurityProtocols
12
+ from buz.kafka.domain.services.async_kafka_producer import AsyncKafkaProducer
13
+ from buz.kafka.infrastructure.serializers.byte_serializer import ByteSerializer
14
+ from buz.kafka.infrastructure.serializers.kafka_header_serializer import KafkaHeaderSerializer
15
+
16
+ T = TypeVar("T")
17
+
18
+
19
+ class AIOKafkaProducer(AsyncKafkaProducer, Generic[T]):
20
+ __DEFAULT_REQUEST_TIMEOUT_MS = 5000
21
+ __kafka_producer: Optional[NativeAIOKafkaProducer] = None
22
+
23
+ def __init__(
24
+ self,
25
+ *,
26
+ connection_config: KafkaConnectionConfig,
27
+ byte_serializer: ByteSerializer[T],
28
+ compression_type: Optional[KafkaSupportedCompressionType] = None,
29
+ retry_backoff_ms: int = 100,
30
+ ) -> None:
31
+ self.__connection_config = connection_config
32
+ self.__byte_serializer = byte_serializer
33
+ self.__header_serializer = KafkaHeaderSerializer()
34
+ self.__compression_type = compression_type
35
+ self.__retry_backoff_ms = retry_backoff_ms
36
+
37
+ async def _get_aiokafka_producer(self) -> NativeAIOKafkaProducer:
38
+ if self.__kafka_producer:
39
+ return self.__kafka_producer
40
+
41
+ ssl_context: Optional[SSLContext] = None
42
+
43
+ sasl_mechanism = (
44
+ self.__connection_config.credentials.sasl_mechanism.value
45
+ if self.__connection_config.credentials.sasl_mechanism
46
+ else "PLAIN"
47
+ )
48
+
49
+ if self.__connection_config.credentials.security_protocol == KafkaSupportedSecurityProtocols.SASL_SSL:
50
+ ssl_context = create_ssl_context()
51
+
52
+ self.__kafka_producer = NativeAIOKafkaProducer(
53
+ client_id=self.__connection_config.client_id,
54
+ bootstrap_servers=",".join(self.__connection_config.bootstrap_servers),
55
+ sasl_mechanism=sasl_mechanism,
56
+ ssl_context=ssl_context,
57
+ sasl_plain_username=self.__connection_config.credentials.user,
58
+ sasl_plain_password=self.__connection_config.credentials.password,
59
+ retry_backoff_ms=self.__retry_backoff_ms,
60
+ request_timeout_ms=self.__DEFAULT_REQUEST_TIMEOUT_MS,
61
+ compression_type=self.__compression_type.value if self.__compression_type else None,
62
+ )
63
+
64
+ await self.__kafka_producer.start()
65
+
66
+ return self.__kafka_producer
67
+
68
+ async def connect(self) -> None:
69
+ await self._get_aiokafka_producer()
70
+
71
+ async def disconnect(self) -> None:
72
+ if self.__kafka_producer is None:
73
+ return None
74
+ await self.__kafka_producer.stop()
75
+ self.__kafka_producer = None
76
+
77
+ async def produce(
78
+ self,
79
+ *,
80
+ topic: str,
81
+ message: T,
82
+ partition_key: Optional[str] = None,
83
+ headers: Optional[dict[str, str]] = None,
84
+ ) -> None:
85
+ serialized_headers = self.__header_serializer.serialize(headers) if headers is not None else None
86
+ kafka_producer = await self._get_aiokafka_producer()
87
+
88
+ await kafka_producer.send_and_wait(
89
+ topic=topic,
90
+ value=self.__byte_serializer.serialize(message),
91
+ headers=serialized_headers,
92
+ key=partition_key.encode("utf-8") if partition_key else None,
93
+ )
94
+
95
+ async def close(self) -> None:
96
+ if self.__kafka_producer is not None:
97
+ await self.__kafka_producer.stop()
98
+ self.__kafka_producer = None
File without changes
@@ -0,0 +1,11 @@
1
+ from abc import ABC, abstractmethod
2
+
3
+
4
+ class AsyncConnectionManager(ABC):
5
+ @abstractmethod
6
+ async def connect(self) -> None:
7
+ pass
8
+
9
+ @abstractmethod
10
+ async def disconnect(self) -> None:
11
+ pass
@@ -0,0 +1,11 @@
1
+ from abc import ABC, abstractmethod
2
+
3
+
4
+ class ConnectionManager(ABC):
5
+ @abstractmethod
6
+ def connect(self) -> None:
7
+ pass
8
+
9
+ @abstractmethod
10
+ def disconnect(self) -> None:
11
+ pass
@@ -1,34 +1,48 @@
1
1
  from __future__ import annotations
2
2
 
3
+ from datetime import datetime
4
+ from logging import Logger
3
5
  import re
4
- from typing import Any, Callable, Sequence
6
+ from typing import Any, Callable, Optional, Sequence, cast
5
7
 
6
8
  from cachetools import TTLCache
7
- from kafka import KafkaClient
9
+ from kafka import KafkaClient, KafkaConsumer
8
10
  from kafka.admin import KafkaAdminClient as KafkaPythonLibraryAdminClient, NewTopic
9
11
  from kafka.errors import TopicAlreadyExistsError
12
+ from kafka.structs import TopicPartition, OffsetAndTimestamp
10
13
 
14
+ from buz.kafka.domain.exceptions.not_all_partition_assigned_exception import NotAllPartitionAssignedException
11
15
  from buz.kafka.domain.exceptions.topic_already_created_exception import KafkaTopicsAlreadyCreatedException
16
+ from buz.kafka.domain.exceptions.topic_not_found_exception import TopicNotFoundException
17
+ from buz.kafka.domain.models.consumer_initial_offset_position import ConsumerInitialOffsetPosition
12
18
  from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
13
19
  from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
14
20
  from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
15
21
 
22
+ from buz.kafka.infrastructure.kafka_python.translators.consumer_initial_offset_position_translator import (
23
+ KafkaPythonConsumerInitialOffsetPositionTranslator,
24
+ )
25
+
16
26
  INTERNAL_KAFKA_TOPICS = {"__consumer_offsets", "_schema"}
27
+ TOPIC_CACHE_KEY = "topics"
17
28
 
18
29
 
19
30
  class KafkaPythonAdminClient(KafkaAdminClient):
20
31
  __PYTHON_KAFKA_DUPLICATED_TOPIC_ERROR_CODE = 36
21
32
 
33
+ _kafka_admin: Optional[KafkaPythonLibraryAdminClient] = None
34
+ _kafka_client: Optional[KafkaClient] = None
35
+
22
36
  def __init__(
23
37
  self,
24
38
  *,
25
- config: KafkaConnectionConfig,
39
+ logger: Logger,
40
+ connection_config: KafkaConnectionConfig,
26
41
  cache_ttl_seconds: int = 0,
27
42
  ):
28
- self._config = config
29
- self._config_in_library_format = self.__get_kafka_config_in_library_format(config)
30
- self._kafka_admin = KafkaPythonLibraryAdminClient(**self._config_in_library_format)
31
- self._kafka_client = KafkaClient(**self._config_in_library_format)
43
+ self._logger = logger
44
+ self.__connection_config = connection_config
45
+ self._config_in_library_format = self.__get_kafka_config_in_library_format(self.__connection_config)
32
46
  self.__ttl_cache: TTLCache[str, Any] = TTLCache(maxsize=1, ttl=cache_ttl_seconds)
33
47
 
34
48
  def __get_kafka_config_in_library_format(self, config: KafkaConnectionConfig) -> dict:
@@ -41,6 +55,28 @@ class KafkaPythonAdminClient(KafkaAdminClient):
41
55
  "sasl_plain_password": config.credentials.password,
42
56
  }
43
57
 
58
+ def connect(self):
59
+ self._get_kafka_admin()
60
+ self._get_kafka_client()
61
+
62
+ def disconnect(self):
63
+ if self._kafka_admin is not None:
64
+ self._kafka_admin.close()
65
+ self._kafka_admin = None
66
+ if self._kafka_client is not None:
67
+ self._kafka_client.close()
68
+ self._kafka_client = None
69
+
70
+ def _get_kafka_admin(self) -> KafkaPythonLibraryAdminClient:
71
+ if not self._kafka_admin:
72
+ self._kafka_admin = KafkaPythonLibraryAdminClient(**self._config_in_library_format)
73
+ return self._kafka_admin
74
+
75
+ def _get_kafka_client(self) -> KafkaClient:
76
+ if not self._kafka_client:
77
+ self._kafka_client = KafkaClient(**self._config_in_library_format)
78
+ return self._kafka_client
79
+
44
80
  def create_topics(
45
81
  self,
46
82
  *,
@@ -57,7 +93,7 @@ class KafkaPythonAdminClient(KafkaAdminClient):
57
93
  ]
58
94
 
59
95
  try:
60
- self._kafka_admin.create_topics(new_topics=new_topics)
96
+ self._get_kafka_admin().create_topics(new_topics=new_topics)
61
97
  except TopicAlreadyExistsError as error:
62
98
  topic_names = self.__get_list_of_kafka_topics_from_topic_already_exists_error(error)
63
99
  raise KafkaTopicsAlreadyCreatedException(topic_names=topic_names)
@@ -75,14 +111,13 @@ class KafkaPythonAdminClient(KafkaAdminClient):
75
111
  self,
76
112
  topic: str,
77
113
  ) -> bool:
78
- topics = self.get_topics()
79
- return topic in topics
114
+ return topic in self.get_topics()
80
115
 
81
116
  def get_topics(
82
117
  self,
83
118
  ) -> set[str]:
84
119
  return self.__resolve_cached_property(
85
- "topics", lambda: set(self._kafka_admin.list_topics()) - INTERNAL_KAFKA_TOPICS
120
+ TOPIC_CACHE_KEY, lambda: set(self._get_kafka_admin().list_topics()) - INTERNAL_KAFKA_TOPICS
86
121
  )
87
122
 
88
123
  def __resolve_cached_property(self, property_key: str, callback: Callable) -> Any:
@@ -98,24 +133,105 @@ class KafkaPythonAdminClient(KafkaAdminClient):
98
133
  *,
99
134
  topics: set[str],
100
135
  ) -> None:
101
- self._kafka_admin.delete_topics(
136
+ self._get_kafka_admin().delete_topics(
102
137
  topics=topics,
103
138
  )
139
+ self.__remove_cache_property(TOPIC_CACHE_KEY)
140
+
141
+ def __remove_cache_property(self, property_key: str) -> None:
142
+ self.__ttl_cache.pop(property_key, None)
104
143
 
105
144
  def delete_subscription_groups(
106
145
  self,
107
146
  *,
108
147
  subscription_groups: set[str],
109
148
  ) -> None:
110
- self._kafka_admin.delete_consumer_groups(
149
+ self._get_kafka_admin().delete_consumer_groups(
111
150
  group_ids=subscription_groups,
112
151
  )
113
152
 
114
153
  def get_subscription_groups(
115
154
  self,
116
155
  ) -> set[str]:
117
- return set(self._kafka_admin.list_consumer_groups())
156
+ return set(self._get_kafka_admin().list_consumer_groups())
118
157
 
119
158
  def _wait_for_cluster_update(self) -> None:
120
- future = self._kafka_client.cluster.request_update()
121
- self._kafka_client.poll(future=future)
159
+ future = self._get_kafka_client().cluster.request_update()
160
+ self._get_kafka_client().poll(future=future)
161
+
162
+ def move_offsets_to_datetime(
163
+ self,
164
+ *,
165
+ consumer_group: str,
166
+ topic: str,
167
+ target_datetime: datetime,
168
+ ) -> None:
169
+ consumer = KafkaConsumer(
170
+ group_id=consumer_group,
171
+ enable_auto_commit=False,
172
+ auto_offset_reset=KafkaPythonConsumerInitialOffsetPositionTranslator.to_kafka_supported_format(
173
+ ConsumerInitialOffsetPosition.BEGINNING
174
+ ),
175
+ **self._config_in_library_format,
176
+ )
177
+
178
+ partitions = consumer.partitions_for_topic(topic)
179
+
180
+ if partitions is None:
181
+ raise TopicNotFoundException(topic)
182
+
183
+ topic_partitions = [TopicPartition(topic, p) for p in partitions]
184
+ consumer.subscribe(topics=[topic])
185
+
186
+ self.__force_partition_assignment(consumer)
187
+
188
+ # We need all the partitions in order to update the offsets
189
+ if len(consumer.assignment()) != len(topic_partitions):
190
+ raise NotAllPartitionAssignedException(topic)
191
+
192
+ offsets_for_date = self.__get_first_offset_after_date(
193
+ consumer=consumer,
194
+ topic_partitions=topic_partitions,
195
+ target_datetime=target_datetime,
196
+ )
197
+
198
+ end_offsets = consumer.end_offsets(topic_partitions)
199
+
200
+ if end_offsets is None or len(end_offsets.keys()) != len(topic_partitions):
201
+ raise Exception(f'There was an error extracting the end offsets of the topic "{topic}"')
202
+
203
+ for topic_partition in topic_partitions:
204
+ offset_and_timestamp = offsets_for_date.get(topic_partition)
205
+ if offset_and_timestamp:
206
+ self._logger.info(f'moving "{topic_partition}" to the offset "{offset_and_timestamp.offset}"')
207
+ consumer.seek(topic_partition, offset_and_timestamp.offset)
208
+ else:
209
+ self._logger.info(
210
+ f'moving "{topic_partition}" to the end of the topic because there are no messages later than "{target_datetime}"'
211
+ )
212
+ consumer.seek(topic_partition, end_offsets[topic_partition])
213
+
214
+ consumer.commit()
215
+ consumer.close()
216
+
217
+ def __get_first_offset_after_date(
218
+ self,
219
+ *,
220
+ consumer: KafkaConsumer,
221
+ topic_partitions: Sequence[TopicPartition],
222
+ target_datetime: datetime,
223
+ ) -> dict[TopicPartition, Optional[OffsetAndTimestamp]]:
224
+ offset_for_times: dict[TopicPartition, Optional[int]] = {}
225
+ timestamp_ms = int(target_datetime.timestamp() * 1000)
226
+
227
+ for topic_partition in topic_partitions:
228
+ offset_for_times[topic_partition] = timestamp_ms
229
+
230
+ return cast(
231
+ dict[TopicPartition, Optional[OffsetAndTimestamp]],
232
+ consumer.offsets_for_times(offset_for_times),
233
+ )
234
+
235
+ # We are not to commit the new offset, but we need to execute a polling in order to start the partition assignment
236
+ def __force_partition_assignment(self, consumer: KafkaConsumer) -> None:
237
+ consumer.poll(max_records=1, timeout_ms=0)
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ from logging import Logger
3
4
  from typing import Optional
4
5
 
5
6
  from kafka import KafkaConsumer, KafkaProducer
@@ -22,8 +23,16 @@ CONSUMER_POLL_TIMEOUT_MS = 1000
22
23
 
23
24
 
24
25
  class KafkaPythonAdminTestClient(KafkaPythonAdminClient, KafkaAdminTestClient):
25
- def __init__(self, *, config: KafkaConnectionConfig):
26
- super().__init__(config=config)
26
+ def __init__(
27
+ self,
28
+ *,
29
+ logger: Logger,
30
+ connection_config: KafkaConnectionConfig,
31
+ ):
32
+ super().__init__(
33
+ connection_config=connection_config,
34
+ logger=logger,
35
+ )
27
36
 
28
37
  def send_message_to_topic(
29
38
  self,
@@ -1,10 +1,12 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Generic, Optional, TypeVar
3
+ from typing import Generic, Optional, TypeVar, cast
4
4
 
5
5
  from kafka import KafkaProducer as KafkaPythonLibraryProducer
6
+ from kafka.producer.future import FutureRecordMetadata
6
7
 
7
8
  from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
9
+ from buz.kafka.domain.models.kafka_supported_compression_type import KafkaSupportedCompressionType
8
10
  from buz.kafka.domain.services.kafka_producer import KafkaProducer
9
11
  from buz.kafka.infrastructure.serializers.byte_serializer import ByteSerializer
10
12
  from buz.kafka.infrastructure.serializers.kafka_header_serializer import KafkaHeaderSerializer
@@ -13,33 +15,55 @@ T = TypeVar("T")
13
15
 
14
16
 
15
17
  class KafkaPythonProducer(KafkaProducer, Generic[T]):
18
+ __kafka_producer: Optional[KafkaPythonLibraryProducer] = None
19
+ __SEND_TIMEOUT_SECONDS = 5
20
+
16
21
  def __init__(
17
22
  self,
18
23
  *,
19
- config: KafkaConnectionConfig,
24
+ connection_config: KafkaConnectionConfig,
20
25
  byte_serializer: ByteSerializer[T],
21
26
  retries: int = 0,
22
27
  retry_backoff_ms: int = 100,
28
+ compression_type: Optional[KafkaSupportedCompressionType] = None,
23
29
  ) -> None:
24
- self.__config = config
30
+ self.__connection_config = connection_config
25
31
  self.__byte_serializer = byte_serializer
26
32
  self.__header_serializer = KafkaHeaderSerializer()
33
+ self.__retries = retries
34
+ self.__retry_backoff_ms = retry_backoff_ms
35
+ self.__compression_type = compression_type
27
36
 
28
- sasl_mechanism: Optional[str] = None
37
+ def _get_kafka_producer(self) -> KafkaPythonLibraryProducer:
38
+ if self.__kafka_producer is None:
39
+ sasl_mechanism = (
40
+ self.__connection_config.credentials.sasl_mechanism.value
41
+ if self.__connection_config.credentials.sasl_mechanism
42
+ else None
43
+ )
44
+ compression_type = self.__compression_type.value if self.__compression_type else None
29
45
 
30
- if self.__config.credentials.sasl_mechanism is not None:
31
- sasl_mechanism = self.__config.credentials.sasl_mechanism.value
46
+ self.__kafka_producer = KafkaPythonLibraryProducer(
47
+ client_id=self.__connection_config.client_id,
48
+ bootstrap_servers=self.__connection_config.bootstrap_servers,
49
+ security_protocol=self.__connection_config.credentials.security_protocol.value,
50
+ sasl_mechanism=sasl_mechanism,
51
+ sasl_plain_username=self.__connection_config.credentials.user,
52
+ sasl_plain_password=self.__connection_config.credentials.password,
53
+ retries=self.__retries,
54
+ retry_backoff_ms=self.__retry_backoff_ms,
55
+ compression_type=compression_type,
56
+ )
32
57
 
33
- self.__kafka_producer = KafkaPythonLibraryProducer(
34
- client_id=self.__config.client_id,
35
- bootstrap_servers=self.__config.bootstrap_servers,
36
- security_protocol=self.__config.credentials.security_protocol.value,
37
- sasl_mechanism=sasl_mechanism,
38
- sasl_plain_username=self.__config.credentials.user,
39
- sasl_plain_password=self.__config.credentials.password,
40
- retries=retries,
41
- retry_backoff_ms=retry_backoff_ms,
42
- )
58
+ return self.__kafka_producer
59
+
60
+ def connect(self):
61
+ self._get_kafka_producer()
62
+
63
+ def disconnect(self) -> None:
64
+ if self.__kafka_producer is not None:
65
+ self.__kafka_producer.close()
66
+ self.__kafka_producer = None
43
67
 
44
68
  def produce(
45
69
  self,
@@ -50,12 +74,17 @@ class KafkaPythonProducer(KafkaProducer, Generic[T]):
50
74
  headers: Optional[dict[str, str]] = None,
51
75
  ) -> None:
52
76
  serialized_headers = self.__header_serializer.serialize(headers) if headers is not None else None
77
+ kafka_producer = self._get_kafka_producer()
53
78
 
54
- self.__kafka_producer.send(
55
- topic=topic,
56
- value=self.__byte_serializer.serialize(message),
57
- headers=serialized_headers,
58
- key=partition_key,
79
+ message_future = cast(
80
+ FutureRecordMetadata,
81
+ kafka_producer.send(
82
+ topic=topic,
83
+ value=self.__byte_serializer.serialize(message),
84
+ headers=serialized_headers,
85
+ key=partition_key,
86
+ ),
59
87
  )
88
+
60
89
  # We are forcing a flush because the task related with the send is asynchronous, and we want that the event to be sent after call produce
61
- self.__kafka_producer.flush()
90
+ message_future.get(self.__SEND_TIMEOUT_SECONDS)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: buz
3
- Version: 2.13.1rc8
3
+ Version: 2.14.0
4
4
  Summary: Buz is a set of light, simple and extensible implementations of event, command and query buses.
5
5
  License: MIT
6
6
  Author: Luis Pintado Lozano
@@ -26,6 +26,7 @@ buz/command/synchronous/synced_async/__init__.py,sha256=GXPmTcVC0ouP5bvpLXqcrzmy
26
26
  buz/command/synchronous/synced_async/synced_async_command_bus.py,sha256=8tvD1zR8j9AE6AmgkYfP5wDSPNHxr1Nx2X7CC74c3Q0,459
27
27
  buz/event/__init__.py,sha256=ey3c3fY85XpcWFlmIlbpanJfxv1BZI42Ia1njAtjcEs,588
28
28
  buz/event/async_consumer.py,sha256=k6v_WqQ8A8vWJzO_sMcjU75mroA_Il9D-rE-E-pu_lM,200
29
+ buz/event/async_event_bus.py,sha256=l627YtPplBprVO0Ccepgt4hkwtMJyI8uaqx6TzCQ9Lw,430
29
30
  buz/event/async_subscriber.py,sha256=GNenWsj1CZj1F1nJV0KQ_7L1elYToyT9K8sY7O4Gayk,248
30
31
  buz/event/async_worker.py,sha256=OR7g6cYWOWTh9DbfAfWwS6U6bZ1CDzScJHfH52PYj_k,881
31
32
  buz/event/base_async_subscriber.py,sha256=QCVSD36lR_FuX_B4R32nOZrGmq_Y24otb_GCrH_fL8k,218
@@ -36,7 +37,7 @@ buz/event/dead_letter_queue/dlq_criteria.py,sha256=hxcV-BMayKTEc5suEfQZhEYkc14H7
36
37
  buz/event/dead_letter_queue/dlq_record.py,sha256=wEa9CdWkHmxHQVwoHFjWeEU6sjNOi7X8dLr1E-gVmDc,1341
37
38
  buz/event/dead_letter_queue/dlq_repository.py,sha256=8XsXSfO2OzEq4qfQ_v0E0OExintDYI1g55Qu3PtoxKI,630
38
39
  buz/event/event.py,sha256=x1MCBydn3qk3AkvamsAwCG-nfxR9OyP4l1UNXtnhUwU,189
39
- buz/event/event_bus.py,sha256=DNr1cRLxYcn9qCu4_BKecpQHAx9D_PTxLnWXN2qVhFE,293
40
+ buz/event/event_bus.py,sha256=QnvZD_bKsv628TZ58s5uLntLJCA9Ngir_YHOwb4_UaU,289
40
41
  buz/event/exceptions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
42
  buz/event/exceptions/event_not_published_exception.py,sha256=gGEiRFGdKIS-VTKg8SN54vSS10WeSkgBhlO2Gpcll_0,215
42
43
  buz/event/exceptions/event_restore_exception.py,sha256=dYHp5i1E-VCUYYhOAVYR-eJfZ3CqPpR9gm1bZ1EFXfE,245
@@ -45,10 +46,11 @@ buz/event/exceptions/term_signal_interruption_exception.py,sha256=RkRRF0v_K9Hg48
45
46
  buz/event/exceptions/worker_execution_exception.py,sha256=6mgztvXOCG_9VZ_Jptkk72kZtNWQ2CPuQ3TjXEWFE14,123
46
47
  buz/event/infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
48
  buz/event/infrastructure/buz_kafka/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
- buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py,sha256=GfJ51noIkMfJ7DOQSUikphHEr6rvfvI2Np1k2mtAai4,13823
49
- buz/event/infrastructure/buz_kafka/buz_aiokafka_async_consumer.py,sha256=dqQDv7taAmINE9G2geMDExbcvSlntP09_rQ0JRbc4Rw,5507
50
- buz/event/infrastructure/buz_kafka/buz_aiokafka_multi_threaded_consumer.py,sha256=yrEU51OBjvLjCfYJFJPxux1bcIhoTVMw1Jf0HJMWbb0,5449
51
- buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py,sha256=sB8Cj_yTxqe8M9PT-HR4TcR9Mr39AnkIj3mVObKAe4U,4595
49
+ buz/event/infrastructure/buz_kafka/async_buz_kafka_event_bus.py,sha256=SyLblUVlwWOaNfZzK7vL6Ee4m-85vZVCH0rjOgqVAww,4913
50
+ buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py,sha256=E9Sy6IDZrywowcO9qIOJF5zjFvnE4CncTiZD3VC-554,13793
51
+ buz/event/infrastructure/buz_kafka/buz_aiokafka_async_consumer.py,sha256=J_9NhImjlotueksFQ5mJ80Uto3BSgCJvOxJ29pzbW-U,5601
52
+ buz/event/infrastructure/buz_kafka/buz_aiokafka_multi_threaded_consumer.py,sha256=ECuWXI2Es5W6mEplZ783JCGx6VvvG3VLp8TzobwuH_8,5542
53
+ buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py,sha256=ymRSvcYVgbVCPgHN6rMBVBHQ5heCSwCDl6EffyqGVX8,4601
52
54
  buz/event/infrastructure/buz_kafka/consume_strategy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
55
  buz/event/infrastructure/buz_kafka/consume_strategy/consume_strategy.py,sha256=RqlXe5W2S6rH3FTr--tcxzFJTAVLb-Dhl7m6qjgNz2M,331
54
56
  buz/event/infrastructure/buz_kafka/consume_strategy/kafka_on_fail_strategy.py,sha256=elNeyTubDuhHsLlTtDA1Nqz2hZe12PUcO9kz8upPby8,136
@@ -57,7 +59,7 @@ buz/event/infrastructure/buz_kafka/exceptions/__init__.py,sha256=47DEQpj8HBSa-_T
57
59
  buz/event/infrastructure/buz_kafka/exceptions/kafka_event_bus_config_not_valid_exception.py,sha256=VUKZXA2ygjg21P4DADFl_Tace6RwSXia1MRYvJypxbM,135
58
60
  buz/event/infrastructure/buz_kafka/kafka_event_async_subscriber_executor.py,sha256=ULM5I35hmgUAYJobTYyCgMXYYhDvLNhRd5S_7tMUMog,5073
59
61
  buz/event/infrastructure/buz_kafka/kafka_event_subscriber_executor.py,sha256=EyG2vsFYErWAyqxdXqSwxx5Zi_y0d6i0h05XavJMnxg,254
60
- buz/event/infrastructure/buz_kafka/kafka_event_sync_subscriber_executor.py,sha256=gM9hBMgCpS86vg1WunUaNP_-b5rr27hXP6gAH3RQ39M,5090
62
+ buz/event/infrastructure/buz_kafka/kafka_event_sync_subscriber_executor.py,sha256=S24FMC4oEilO1kx5q1f-_p2Jl54ATQhINPwHeU_ZyME,4835
61
63
  buz/event/infrastructure/buz_kafka/publish_strategy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
64
  buz/event/infrastructure/buz_kafka/publish_strategy/publish_strategy.py,sha256=zIkgMnUU7ueG6QHEubMzdTHOtqdldIbS7k5FDLNmqVk,178
63
65
  buz/event/infrastructure/buz_kafka/publish_strategy/topic_per_event_kafka_publish_strategy.py,sha256=aLKj6GyLJNcMbuDA1QBa-RzWKBHEorBuPFkkqo_H60k,405
@@ -66,8 +68,8 @@ buz/event/infrastructure/kombu/allowed_kombu_serializer.py,sha256=LQ6futYsInawTC
66
68
  buz/event/infrastructure/kombu/consume_strategy/__init__.py,sha256=6dnAv-bOxoDL31gQD1dErRocdJvkLHTgdqeb4S33eWc,302
67
69
  buz/event/infrastructure/kombu/consume_strategy/consume_strategy.py,sha256=Zsv7QVpZXRLYvlV2nRbSdSwT_FgEELLyzUxdT6DyX8Q,179
68
70
  buz/event/infrastructure/kombu/consume_strategy/queue_per_subscriber_consume_strategy.py,sha256=Vsa1uC7dwS3jJ-dp_lvrE-hVWnN91-ma8oVqdLuXHMo,786
69
- buz/event/infrastructure/kombu/kombu_consumer.py,sha256=8mvpQ6ePOrhpJFCU46xqIwX9I_fy5HXN6lbWLN2WnPQ,6440
70
- buz/event/infrastructure/kombu/kombu_event_bus.py,sha256=OLZXkXmHBSRCu3T3N70ZicZl4TXmFgc5cj4PT2CHDYU,4022
71
+ buz/event/infrastructure/kombu/kombu_consumer.py,sha256=7EhNo_YZdXNpoVIuSm7Thk-Kv-wh3LtFP256g-IzljA,6483
72
+ buz/event/infrastructure/kombu/kombu_event_bus.py,sha256=VSLBtamp-YOta4KyqmfXvDurvPiHZSL9QPCozMK3Qyw,4017
71
73
  buz/event/infrastructure/kombu/publish_strategy/__init__.py,sha256=96ssn7ydJwLXYoVyrhfGcwCpXr4_5Sl0DbN6UCoeNc8,315
72
74
  buz/event/infrastructure/kombu/publish_strategy/fanout_exchange_per_event_publish_strategy.py,sha256=Pw85A1oI-cPtzHCQTr0XHQjb7-u9LVmKR3eBIonHsUU,397
73
75
  buz/event/infrastructure/kombu/publish_strategy/publish_strategy.py,sha256=mcpXSRPbIYedt1vsoiBBAzqzR3E6o77ZzF6IOFsVRUw,309
@@ -81,6 +83,8 @@ buz/event/meta_subscriber.py,sha256=ieCOtOD2JTXizyFxisBZ4-d_4MvCmIW4BksstngV8oI,
81
83
  buz/event/middleware/__init__.py,sha256=1_33sdvRejCF4mHuKVkbldeJde6Y2jYtSrB5vMs0Rfo,773
82
84
  buz/event/middleware/async_consume_middleware.py,sha256=314z7ZyhvQIvi90kEO0t-FlnHSyRjArk3RqKOdDE6bM,459
83
85
  buz/event/middleware/async_consume_middleware_chain_resolver.py,sha256=Hw75JAs5pyZVDi7-nD4I1nbUXjwYpHQW9PctafGS4ks,1193
86
+ buz/event/middleware/async_publish_middleware.py,sha256=JIxbRx7HVf_Q1iEziN_5RKGVJ-Oen_f1c3OL9QLmoxE,358
87
+ buz/event/middleware/async_publish_middleware_chain_resolver.py,sha256=Hqj8CRZXJD6h9KuJaKl88iToOFN7BijoatoDo66En8w,1016
84
88
  buz/event/middleware/base_consume_middleware.py,sha256=9G1jsr_Wm-avsTfWAndi5_tf4WW6dSLwQ3rN0-jc7AE,651
85
89
  buz/event/middleware/base_publish_middleware.py,sha256=vtM8oA4LZjbZn4omPy-cIAUxQQwL-_Xb4ScU85DwjMU,531
86
90
  buz/event/middleware/consume_middleware.py,sha256=BCcs1LgIc5YZx5nf7nE_bMuiiXxscqBE4LqD-nd4JS0,363
@@ -105,7 +109,7 @@ buz/event/strategies/retry/max_retries_negative_exception.py,sha256=UdM5T4cxRv_a
105
109
  buz/event/strategies/retry/reject_callback.py,sha256=TnmUt0AkB2DEQMieec9TtB7IAkRHdFAFepAclbiCRns,316
106
110
  buz/event/subscriber.py,sha256=WxppO8PFP5zO-gwLZNg1DKSY_uFdsF8JgWIJa6nTTds,237
107
111
  buz/event/sync/__init__.py,sha256=uJmU80PGVNNL2HoRFXp4loQTn1VK8gLo-hMEvgVPpBQ,91
108
- buz/event/sync/sync_event_bus.py,sha256=4yI1gLbaS7e0DI8a6I9I_a7PJcUUe0cJ_3OP4EJmQ4Q,1407
112
+ buz/event/sync/sync_event_bus.py,sha256=nXNJqoaWQWV3asYCAIReH2nN5AOjvJH2OFODHOrWM6M,1403
109
113
  buz/event/transactional_outbox/__init__.py,sha256=k8ZBWCi12pWKXchHfgW_Raw4sVR8XkBLuPNW9jB9X2k,1381
110
114
  buz/event/transactional_outbox/event_to_outbox_record_translator.py,sha256=oSn1iQuW-cZLvlXYIJPnSwm3JYUwGMm9f1pqnlF0cJI,534
111
115
  buz/event/transactional_outbox/fqn_to_event_mapper.py,sha256=ujcq6CfYqRJtM8f3SEEltbWN0Ru7NM5JfrbNdh4nvhQ,773
@@ -125,14 +129,18 @@ buz/event/transactional_outbox/outbox_record_validation/outbox_record_size_not_a
125
129
  buz/event/transactional_outbox/outbox_record_validation/outbox_record_validation_exception.py,sha256=rwG2f8KTfLc2xEpGXlEkWVoqf3ECUny4n5wcualiuIE,134
126
130
  buz/event/transactional_outbox/outbox_record_validation/outbox_record_validator.py,sha256=XGHTT1dH2CJOqhYYnyPJHmZsAuVXuDOeqgJzK7mRidc,328
127
131
  buz/event/transactional_outbox/outbox_record_validation/size_outbox_record_validator.py,sha256=f8sQ5IHfO4J8m5l7rS3JYUoBvx0B1EAFMRsJ0HPQKG8,2436
128
- buz/event/transactional_outbox/outbox_repository.py,sha256=nJ3p8jI0GmAJPLm5es6FcH4EooLM0iOH1fIdwG4_Lx0,509
129
- buz/event/transactional_outbox/transactional_outbox_event_bus.py,sha256=5Mv4wqbPZIJfewUdYnf3n5Fm0tIyrzLZAGYqblLNR3M,1436
132
+ buz/event/transactional_outbox/outbox_repository.py,sha256=Sn7aWaq1G6uiKXcV09l9L1eVQ_bPUTqY-OSD12_H2jU,628
133
+ buz/event/transactional_outbox/transactional_outbox_event_bus.py,sha256=S2VIrKCyZG8vztgBagKOJUhp2oJhbLx6oGVHPBplRZ4,1676
130
134
  buz/event/transactional_outbox/transactional_outbox_worker.py,sha256=x6kf-Oc4oYKu9S4MTcCqd3VqPNURScTReYJ3Ahx4rKA,2221
131
135
  buz/event/worker.py,sha256=BL9TXB_kyr0Avql9fIcFm3CDNnXPvZB6O6BxVwjtCdA,942
132
136
  buz/handler.py,sha256=cZqV1NDPGVZQgJ3YSBDhOQ1sdJGdUopxi57yQ6fbPvc,272
133
- buz/kafka/__init__.py,sha256=3rLTvjxQsZlTRfy7_XG6ojkVfxStlaS7y87oGvx5RfI,1826
137
+ buz/kafka/__init__.py,sha256=R3fcyET-SNEAvk_XlBQbHIbQVb63Qiz6lVrif3nDhNU,3435
138
+ buz/kafka/domain/exceptions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
139
+ buz/kafka/domain/exceptions/not_all_partition_assigned_exception.py,sha256=9zDWoh0SbHLRuCvpfIGcvrmcscKsXpbAPIxr5-z-GYg,296
134
140
  buz/kafka/domain/exceptions/not_valid_kafka_message_exception.py,sha256=Dn6I_-eGQnOuu5WW24oKGOdKOu4EdM8ByH3DLAbz5SY,57
135
141
  buz/kafka/domain/exceptions/topic_already_created_exception.py,sha256=UrisdveZGa2BB0ko4mS7-5fwy8eGsIu409_grtq1r9k,333
142
+ buz/kafka/domain/exceptions/topic_not_found_exception.py,sha256=kLuqGqfsb6YTCe5UCKpMwBm_QAnU9Udfb8bWajPoA8k,201
143
+ buz/kafka/domain/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
136
144
  buz/kafka/domain/models/auto_create_topic_configuration.py,sha256=naeArywtxwTXyLzguFitBqU8nRkDo4Ttd1DjPVYJY-Q,231
137
145
  buz/kafka/domain/models/consumer_initial_offset_position.py,sha256=mhnN7LaRhqnzODV6GFewQ5TbIYwCfVbtWaSTYvbGMU0,111
138
146
  buz/kafka/domain/models/create_kafka_topic.py,sha256=Ut6_Xh5finrPlqz2Unw3EP31lR0y8c55zRp8kXxUj7U,224
@@ -142,14 +150,18 @@ buz/kafka/domain/models/kafka_connection_plain_text_credentials.py,sha256=DzhJCl
142
150
  buz/kafka/domain/models/kafka_connection_sasl_credentials.py,sha256=SG45bU8EFlV0cUObkW_a0wvfRuZU6HelqQVPVu-EV0o,591
143
151
  buz/kafka/domain/models/kafka_consumer_record.py,sha256=2oJvTBAr8jQq4FglsSgtkno29XLmxgC49O6uriKCdqw,230
144
152
  buz/kafka/domain/models/kafka_poll_record.py,sha256=Yt55L6rGM_taJ615_YyO1gBJIgpVruD0VG0rgSLXSq4,305
153
+ buz/kafka/domain/models/kafka_supported_compression_type.py,sha256=ZEY1kPzYQlkPhEg0y2EMdZXUQ_oSHhjbGj9MIQvU09E,141
145
154
  buz/kafka/domain/models/kafka_supported_sasl_mechanisms.py,sha256=ASyDaFgseQRcUJA2kubQSdCkG6KhGmpMAzTFj5NwK5w,212
146
155
  buz/kafka/domain/models/kafka_supported_security_protocols.py,sha256=ffY2-9sOj4XIkJTSQVkqeOb4KnuqEYXISDarfDN8r9Q,161
147
- buz/kafka/domain/services/kafka_admin_client.py,sha256=rQdn_ZrhbC2PsbFKXEyxRjWEZBX0_WE5Mi3aKWAQsjU,708
156
+ buz/kafka/domain/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
157
+ buz/kafka/domain/services/async_kafka_producer.py,sha256=gSq3WwEVux_gp3EKDAMN1WsM027uklB58E-WnKpyhPs,533
158
+ buz/kafka/domain/services/kafka_admin_client.py,sha256=XE6H-4JWsjygKjdtLtGMX5ELhWkJcpR7ai9CO2kB98Y,1222
148
159
  buz/kafka/domain/services/kafka_admin_test_client.py,sha256=91l_vFIo1yhJLQQCC_OmeXZ5F429zP7Hx5g4FNllpfE,1625
149
- buz/kafka/domain/services/kafka_producer.py,sha256=CTiwGYwuzdJY5aeb2WFbJlyCpZ0YyhzcgKQYyogKzUM,401
160
+ buz/kafka/domain/services/kafka_producer.py,sha256=8bLTV328orrPHcARzkc6no4vyJzrArVtCsjmSRXDjos,506
150
161
  buz/kafka/infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
151
162
  buz/kafka/infrastructure/aiokafka/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
152
- buz/kafka/infrastructure/aiokafka/aiokafka_consumer.py,sha256=9bzCq18xTMCNN7IKoEygdA8xQ235qIza1rU6OnMnq9o,8725
163
+ buz/kafka/infrastructure/aiokafka/aiokafka_consumer.py,sha256=i6h4EeEyH3YdxWrQ-zQF3koni9AurdhKf0Va0K1dqBw,8695
164
+ buz/kafka/infrastructure/aiokafka/aiokafka_producer.py,sha256=LteHKIHpT6MKplwmwsPYMsd2GWNJCzus65XDHCIdoN8,3823
153
165
  buz/kafka/infrastructure/aiokafka/rebalance/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
154
166
  buz/kafka/infrastructure/aiokafka/rebalance/kafka_callback_rebalancer.py,sha256=3l7NkTrCt3rBktVIS73cTmCOvv6eFguoCbGMYIUfCFc,1774
155
167
  buz/kafka/infrastructure/aiokafka/translators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -163,13 +175,14 @@ buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_e
163
175
  buz/kafka/infrastructure/deserializers/implementations/cdc/not_valid_cdc_message_exception.py,sha256=hgLLwTcC-C2DuJSOWUhmQsrd1bO9I1469869IqfAPOk,414
164
176
  buz/kafka/infrastructure/deserializers/implementations/json_byte_deserializer.py,sha256=L4b164-KweiQUwyRONhTMIGnAz48UPk0btLqjGOTNdk,373
165
177
  buz/kafka/infrastructure/deserializers/implementations/json_bytes_to_message_deserializer.py,sha256=YwugXkmOudMNtkVfCC4BFe3pFVpbM8rAL9bT88bZMRk,756
178
+ buz/kafka/infrastructure/interfaces/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
179
+ buz/kafka/infrastructure/interfaces/async_connection_manager.py,sha256=JbaLu5UVV2ZPRLI_FGj1ijX1QekFysPts45G-F-oBkA,217
180
+ buz/kafka/infrastructure/interfaces/connection_manager.py,sha256=EWnvShJHOg8QYe6a3ma0urjKjmVMDBi7q8T2cv_i_MQ,200
166
181
  buz/kafka/infrastructure/kafka_python/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
167
182
  buz/kafka/infrastructure/kafka_python/exception/consumer_interrupted_exception.py,sha256=fqhgV7HILdVdv-p1CsOIaaESKY2ZXBtRGYbrVSdPLg0,164
168
- buz/kafka/infrastructure/kafka_python/factories/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
169
- buz/kafka/infrastructure/kafka_python/factories/kafka_python_producer_factory.py,sha256=rn-ZUuzHZfAMGon_lKl7gQ-gwijW1nYOHlGTCIjfDE4,858
170
- buz/kafka/infrastructure/kafka_python/kafka_python_admin_client.py,sha256=mSbRnsX0sh0WXHUTR6obbTvTj_lGwYIWY8cJvY59n0k,4350
171
- buz/kafka/infrastructure/kafka_python/kafka_python_admin_test_client.py,sha256=IPwpANcS7nESo9cbLqwJPY7TfnidOshuh3DfF1UPK4Q,2817
172
- buz/kafka/infrastructure/kafka_python/kafka_python_producer.py,sha256=CMkpTkrC10wy574Nc9t-TKvWO-6XCgj8F5EkWlwP0-U,2275
183
+ buz/kafka/infrastructure/kafka_python/kafka_python_admin_client.py,sha256=7HbwkaoedXbxZ4LcGWytU2q_GTdLm_c1ziyGTBoKgF0,9038
184
+ buz/kafka/infrastructure/kafka_python/kafka_python_admin_test_client.py,sha256=wLhnrHzyFJ9ETWNUfd-dmwm_CwZyiImaAP97cEdRgzE,2982
185
+ buz/kafka/infrastructure/kafka_python/kafka_python_producer.py,sha256=DkqqLSSXHBf4SXXf-IZwwLhxWrGE95Jg4MO_3RDsikU,3594
173
186
  buz/kafka/infrastructure/kafka_python/translators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
174
187
  buz/kafka/infrastructure/kafka_python/translators/consumer_initial_offset_position_translator.py,sha256=hJ48_eyMcnbFL_Y5TOiMbGXrQSryuKk9CvP59MdqNOY,620
175
188
  buz/kafka/infrastructure/serializers/byte_serializer.py,sha256=T83sLdX9V5Oh1mzjRwHi_1DsTFI7KefFj7kmnz7JVy4,207
@@ -230,7 +243,7 @@ buz/serializer/message_to_json_bytes_serializer.py,sha256=RGZJ64t4t4Pz2FCASZZCv-
230
243
  buz/wrapper/__init__.py,sha256=GnRdJFcncn-qp0hzDG9dBHLmTJSbHFVjE_yr-MdW_n4,77
231
244
  buz/wrapper/async_to_sync.py,sha256=OfK-vrVUhuN-LLLvekLdMbQYtH0ue5lfbvuasj6ovMI,698
232
245
  buz/wrapper/event_loop.py,sha256=pfBJ1g-8A2a3YgW8Gf9Fg0kkewoh3-wgTy2KIFDyfHk,266
233
- buz-2.13.1rc8.dist-info/LICENSE,sha256=Jytu2S-2SPEgsB0y6BF-_LUxIWY7402fl0JSh36TLZE,1062
234
- buz-2.13.1rc8.dist-info/METADATA,sha256=4ptRnjIHu5NspLmq0nUXRlSCplcfjYT5CgrdjHpTp9o,1620
235
- buz-2.13.1rc8.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
236
- buz-2.13.1rc8.dist-info/RECORD,,
246
+ buz-2.14.0.dist-info/LICENSE,sha256=Jytu2S-2SPEgsB0y6BF-_LUxIWY7402fl0JSh36TLZE,1062
247
+ buz-2.14.0.dist-info/METADATA,sha256=f8tigTI_Ee1HBkPHTPfsYHjkOyuBZwlCX6dTeDpjzCQ,1617
248
+ buz-2.14.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
249
+ buz-2.14.0.dist-info/RECORD,,