buz 2.13.1rc7__py3-none-any.whl → 2.13.1rc9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
- from abc import abstractmethod
2
1
  import traceback
2
+ from abc import abstractmethod
3
3
  from asyncio import Lock, Task, create_task, gather, Semaphore, Event as AsyncIOEvent, sleep
4
4
  from datetime import timedelta, datetime
5
5
  from itertools import cycle
@@ -28,7 +28,6 @@ from buz.kafka.infrastructure.aiokafka.aiokafka_consumer import AIOKafkaConsumer
28
28
  from buz.queue.in_memory.in_memory_multiqueue_repository import InMemoryMultiqueueRepository
29
29
  from buz.queue.multiqueue_repository import MultiqueueRepository
30
30
 
31
-
32
31
  T = TypeVar("T", bound=Event)
33
32
 
34
33
 
@@ -87,17 +86,14 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
87
86
  async def run(self) -> None:
88
87
  start_time = datetime.now()
89
88
  await self.__generate_kafka_consumers()
89
+ self.__initial_coroutines_created_elapsed_time = datetime.now() - start_time
90
90
 
91
91
  if len(self.__executor_per_consumer_mapper) == 0:
92
92
  self._logger.error("There are no valid subscribers to execute, finalizing consumer")
93
93
  return
94
94
 
95
- self.__initial_coroutines_created_elapsed_time = datetime.now() - start_time
96
-
97
95
  start_consumption_time = datetime.now()
98
-
99
96
  worker_errors = await self.__run_worker()
100
-
101
97
  self.__events_processed_elapsed_time = datetime.now() - start_consumption_time
102
98
 
103
99
  await self.__handle_graceful_stop(worker_errors)
@@ -112,9 +108,9 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
112
108
  if self.__exceptions_are_thrown(worker_errors):
113
109
  consume_events_exception, polling_task_exception = worker_errors
114
110
  if consume_events_exception:
115
- self._logger.error(consume_events_exception)
111
+ self._logger.exception(consume_events_exception)
116
112
  if polling_task_exception:
117
- self._logger.error(polling_task_exception)
113
+ self._logger.exception(polling_task_exception)
118
114
 
119
115
  raise WorkerExecutionException("The worker was closed by an unexpected exception")
120
116
 
@@ -227,41 +223,32 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
227
223
  if len(kafka_poll_records) == 0:
228
224
  await sleep(self.__seconds_between_polls_if_there_are_no_new_tasks)
229
225
 
230
- return
231
-
232
226
  async def __consume_events_task(self) -> None:
233
227
  self._logger.info("Initializing consuming task")
234
-
235
- blocked_tasks_iterator = self.generate_blocked_consuming_tasks_iterator()
228
+ blocked_tasks_iterator = self.__generate_blocked_consuming_tasks_iterator()
236
229
 
237
230
  async for consuming_task in blocked_tasks_iterator:
238
231
  consumer = consuming_task.consumer
239
232
  kafka_poll_record = consuming_task.kafka_poll_record
240
- executor = self.__executor_per_consumer_mapper[consuming_task.consumer]
241
233
 
234
+ executor = self.__executor_per_consumer_mapper[consumer]
242
235
  await executor.consume(kafka_poll_record=kafka_poll_record)
243
-
244
236
  await consumer.commit_poll_record(kafka_poll_record)
245
237
 
246
238
  self.__events_processed += 1
247
239
 
248
240
  # This iterator return a blocked task, that will be blocked for other process (like rebalancing), until the next task will be requested
249
- async def generate_blocked_consuming_tasks_iterator(self) -> AsyncIterator[ConsumingTask]:
241
+ async def __generate_blocked_consuming_tasks_iterator(self) -> AsyncIterator[ConsumingTask]:
250
242
  consumer_queues_cyclic_iterator = cycle(self.__queue_per_consumer_mapper.items())
251
243
  last_consumer, _ = next(consumer_queues_cyclic_iterator)
252
244
 
253
245
  while not self.__should_stop.is_set():
254
- all_queues_are_empty = all(
255
- [queue.is_totally_empty() for queue in self.__queue_per_consumer_mapper.values()]
256
- )
257
-
258
- if all_queues_are_empty:
246
+ if await self.__all_queues_are_empty():
259
247
  await sleep(self.__seconds_between_executions_if_there_are_no_tasks_in_the_queue)
260
248
  continue
261
249
 
262
250
  async with self.__task_execution_mutex:
263
251
  consumer: Optional[AIOKafkaConsumer] = None
264
- kafka_poll_record: Optional[KafkaPollRecord] = None
265
252
 
266
253
  while consumer != last_consumer:
267
254
  consumer, queue = next(consumer_queues_cyclic_iterator)
@@ -272,7 +259,8 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
272
259
  last_consumer = consumer
273
260
  break
274
261
 
275
- return
262
+ async def __all_queues_are_empty(self) -> bool:
263
+ return all([queue.is_totally_empty() for queue in self.__queue_per_consumer_mapper.values()])
276
264
 
277
265
  async def __on_partition_revoked(self, consumer: AIOKafkaConsumer, topics_partitions: set[TopicPartition]) -> None:
278
266
  async with self.__task_execution_mutex:
@@ -14,6 +14,7 @@ from buz.event.middleware.publish_middleware_chain_resolver import PublishMiddle
14
14
  from buz.kafka import (
15
15
  KafkaPythonProducer,
16
16
  )
17
+ from buz.kafka.domain.exceptions.topic_already_created_exception import KafkaTopicsAlreadyCreatedException
17
18
  from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
18
19
  from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
19
20
  from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
@@ -53,18 +54,22 @@ class BuzKafkaEventBus(EventBus):
53
54
  topic = self.__publish_strategy.get_topic(event)
54
55
 
55
56
  if self.__auto_create_topic_configuration is not None and self.__is_topic_created(topic) is False:
56
- self.__get_kafka_admin_client().create_topics(
57
- topics=[
58
- CreateKafkaTopic(
59
- name=topic,
60
- partitions=self.__auto_create_topic_configuration.partitions,
61
- replication_factor=self.__auto_create_topic_configuration.replication_factor,
62
- configs=self.__auto_create_topic_configuration.configs,
63
- )
64
- ]
65
- )
66
- self.__logger.info(f"Created missing topic: {topic}")
67
- self.__topics_checked[topic] = True
57
+ try:
58
+ self.__logger.info(f"Creating missing topic: {topic}..")
59
+ self.__get_kafka_admin_client().create_topics(
60
+ topics=[
61
+ CreateKafkaTopic(
62
+ name=topic,
63
+ partitions=self.__auto_create_topic_configuration.partitions,
64
+ replication_factor=self.__auto_create_topic_configuration.replication_factor,
65
+ configs=self.__auto_create_topic_configuration.configs,
66
+ )
67
+ ]
68
+ )
69
+ self.__logger.info(f"Created missing topic: {topic}")
70
+ self.__topics_checked[topic] = True
71
+ except KafkaTopicsAlreadyCreatedException:
72
+ pass
68
73
 
69
74
  headers = self.__get_event_headers(event)
70
75
  self.__producer.produce(
@@ -67,11 +67,6 @@ class KafkaEventSyncSubscriberExecutor(KafkaEventSubscriberExecutor):
67
67
  self.__logger.error(
68
68
  f'The message "{str(kafka_poll_record.value)}" is not valid, it will be consumed but not processed'
69
69
  )
70
- except Exception as exception:
71
- if self.__on_fail_strategy == KafkaOnFailStrategy.CONSUME_ON_FAIL:
72
- self.__logger.error(f"Error consuming event: {exception}")
73
- return
74
- raise exception
75
70
 
76
71
  def __execution_callback(self, subscriber: Subscriber, message: KafkaConsumerRecord[Event]) -> None:
77
72
  self.__consume_middleware_chain_resolver.resolve(
@@ -0,0 +1,8 @@
1
+ from __future__ import annotations
2
+
3
+
4
+ class NotAllPartitionAssignedException(Exception):
5
+ def __init__(self, topic_name: str) -> None:
6
+ super().__init__(
7
+ f'Not all the partition were assigned for the topic "{topic_name}", please disconnect the rest of subscribers'
8
+ )
@@ -0,0 +1,6 @@
1
+ from __future__ import annotations
2
+
3
+
4
+ class TopicNotFoundException(Exception):
5
+ def __init__(self, topic_name: str) -> None:
6
+ super().__init__(f'The topic "{topic_name}", has not been found')
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from abc import abstractmethod, ABC
4
+ from datetime import datetime
4
5
  from typing import Sequence
5
6
 
6
7
  from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
@@ -37,3 +38,16 @@ class KafkaAdminClient(ABC):
37
38
  self,
38
39
  ) -> set[str]:
39
40
  pass
41
+
42
+ # This function moves the following offset from the provided date
43
+ # if there are no messages with a date greater than the provided offset
44
+ # the offset will be moved to the end
45
+ @abstractmethod
46
+ def move_offsets_to_datetime(
47
+ self,
48
+ *,
49
+ consumer_group: str,
50
+ topic: str,
51
+ target_datetime: datetime,
52
+ ) -> None:
53
+ pass
@@ -123,7 +123,7 @@ class AIOKafkaConsumer:
123
123
  try:
124
124
  self.__logger.info(f"Creating missing topics: {non_created_topics}...")
125
125
  kafka_admin_client.create_topics(topics=topics_to_create)
126
- self.__logger.info(f"Created missing topics: {non_created_topics}...")
126
+ self.__logger.info(f"Created missing topics: {non_created_topics}")
127
127
  except KafkaTopicsAlreadyCreatedException:
128
128
  # there is a possibility to have a race condition between the check and the creation
129
129
  # but it does not matters, the important part is that the topic is created
@@ -3,9 +3,11 @@ from dataclasses import dataclass
3
3
 
4
4
  @dataclass(frozen=True)
5
5
  class CDCPayload:
6
+ DATE_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
7
+
6
8
  payload: str # json encoded
7
9
  event_id: str # uuid
8
- created_at: str # date and hour ISO 8601
10
+ created_at: str
9
11
  event_fqn: str
10
12
 
11
13
  def validate(self) -> None:
@@ -1,16 +1,17 @@
1
1
  from __future__ import annotations
2
2
 
3
+ from datetime import datetime
3
4
  from typing import TypeVar, Type, Generic
4
5
 
5
6
  import orjson
6
7
  from dacite import from_dict
7
8
 
8
- from buz.kafka.infrastructure.deserializers.implementations.cdc.not_valid_cdc_message_exception import (
9
- NotValidCDCMessageException,
10
- )
11
9
  from buz.event import Event
12
10
  from buz.kafka.infrastructure.cdc.cdc_message import CDCMessage, CDCPayload
13
11
  from buz.kafka.infrastructure.deserializers.bytes_to_message_deserializer import BytesToMessageDeserializer
12
+ from buz.kafka.infrastructure.deserializers.implementations.cdc.not_valid_cdc_message_exception import (
13
+ NotValidCDCMessageException,
14
+ )
14
15
 
15
16
  T = TypeVar("T", bound=Event)
16
17
 
@@ -27,12 +28,16 @@ class CDCRecordBytesToEventDeserializer(BytesToMessageDeserializer[Event], Gener
27
28
  cdc_message = self.__get_outbox_record_as_dict(decoded_string)
28
29
  return self.__event_class.restore(
29
30
  id=cdc_message.payload.event_id,
30
- created_at=cdc_message.payload.created_at,
31
+ created_at=self.__get_created_at_in_event_format(cdc_message.payload.created_at),
31
32
  **orjson.loads(cdc_message.payload.payload),
32
33
  )
33
34
  except Exception as exception:
34
35
  raise NotValidCDCMessageException(decoded_string, exception) from exception
35
36
 
37
+ def __get_created_at_in_event_format(self, cdc_payload_created_at: str) -> str:
38
+ created_at_datetime = datetime.strptime(cdc_payload_created_at, CDCPayload.DATE_TIME_FORMAT)
39
+ return created_at_datetime.strftime(Event.DATE_TIME_FORMAT)
40
+
36
41
  def __get_outbox_record_as_dict(self, decoded_string: str) -> CDCMessage:
37
42
  decoded_record: dict = orjson.loads(decoded_string)
38
43
 
@@ -1,19 +1,30 @@
1
1
  from __future__ import annotations
2
2
 
3
+ from datetime import datetime
4
+ from logging import Logger
3
5
  import re
4
- from typing import Any, Callable, Sequence
6
+ from typing import Any, Callable, Optional, Sequence, cast
5
7
 
6
8
  from cachetools import TTLCache
7
- from kafka import KafkaClient
9
+ from kafka import KafkaClient, KafkaConsumer
8
10
  from kafka.admin import KafkaAdminClient as KafkaPythonLibraryAdminClient, NewTopic
9
11
  from kafka.errors import TopicAlreadyExistsError
12
+ from kafka.structs import TopicPartition, OffsetAndTimestamp
10
13
 
14
+ from buz.kafka.domain.exceptions.not_all_partition_assigned_exception import NotAllPartitionAssignedException
11
15
  from buz.kafka.domain.exceptions.topic_already_created_exception import KafkaTopicsAlreadyCreatedException
16
+ from buz.kafka.domain.exceptions.topic_not_found_exception import TopicNotFoundException
17
+ from buz.kafka.domain.models.consumer_initial_offset_position import ConsumerInitialOffsetPosition
12
18
  from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
13
19
  from buz.kafka.domain.models.kafka_connection_config import KafkaConnectionConfig
14
20
  from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
15
21
 
22
+ from buz.kafka.infrastructure.kafka_python.translators.consumer_initial_offset_position_translator import (
23
+ KafkaPythonConsumerInitialOffsetPositionTranslator,
24
+ )
25
+
16
26
  INTERNAL_KAFKA_TOPICS = {"__consumer_offsets", "_schema"}
27
+ TOPIC_CACHE_KEY = "topics"
17
28
 
18
29
 
19
30
  class KafkaPythonAdminClient(KafkaAdminClient):
@@ -22,9 +33,11 @@ class KafkaPythonAdminClient(KafkaAdminClient):
22
33
  def __init__(
23
34
  self,
24
35
  *,
36
+ logger: Logger,
25
37
  config: KafkaConnectionConfig,
26
38
  cache_ttl_seconds: int = 0,
27
39
  ):
40
+ self._logger = logger
28
41
  self._config = config
29
42
  self._config_in_library_format = self.__get_kafka_config_in_library_format(config)
30
43
  self._kafka_admin = KafkaPythonLibraryAdminClient(**self._config_in_library_format)
@@ -75,14 +88,13 @@ class KafkaPythonAdminClient(KafkaAdminClient):
75
88
  self,
76
89
  topic: str,
77
90
  ) -> bool:
78
- topics = self.get_topics()
79
- return topic in topics
91
+ return topic in self.get_topics()
80
92
 
81
93
  def get_topics(
82
94
  self,
83
95
  ) -> set[str]:
84
96
  return self.__resolve_cached_property(
85
- "topics", lambda: set(self._kafka_admin.list_topics()) - INTERNAL_KAFKA_TOPICS
97
+ TOPIC_CACHE_KEY, lambda: set(self._kafka_admin.list_topics()) - INTERNAL_KAFKA_TOPICS
86
98
  )
87
99
 
88
100
  def __resolve_cached_property(self, property_key: str, callback: Callable) -> Any:
@@ -101,6 +113,10 @@ class KafkaPythonAdminClient(KafkaAdminClient):
101
113
  self._kafka_admin.delete_topics(
102
114
  topics=topics,
103
115
  )
116
+ self.__remove_cache_property(TOPIC_CACHE_KEY)
117
+
118
+ def __remove_cache_property(self, property_key: str) -> None:
119
+ self.__ttl_cache.pop(property_key, None)
104
120
 
105
121
  def delete_subscription_groups(
106
122
  self,
@@ -119,3 +135,80 @@ class KafkaPythonAdminClient(KafkaAdminClient):
119
135
  def _wait_for_cluster_update(self) -> None:
120
136
  future = self._kafka_client.cluster.request_update()
121
137
  self._kafka_client.poll(future=future)
138
+
139
+ def move_offsets_to_datetime(
140
+ self,
141
+ *,
142
+ consumer_group: str,
143
+ topic: str,
144
+ target_datetime: datetime,
145
+ ) -> None:
146
+ consumer = KafkaConsumer(
147
+ group_id=consumer_group,
148
+ enable_auto_commit=False,
149
+ auto_offset_reset=KafkaPythonConsumerInitialOffsetPositionTranslator.to_kafka_supported_format(
150
+ ConsumerInitialOffsetPosition.BEGINNING
151
+ ),
152
+ **self._config_in_library_format,
153
+ )
154
+
155
+ partitions = consumer.partitions_for_topic(topic)
156
+
157
+ if partitions is None:
158
+ raise TopicNotFoundException(topic)
159
+
160
+ topic_partitions = [TopicPartition(topic, p) for p in partitions]
161
+ consumer.subscribe(topics=[topic])
162
+
163
+ self.__force_partition_assignment(consumer)
164
+
165
+ # We need all the partitions in order to update the offsets
166
+ if len(consumer.assignment()) != len(topic_partitions):
167
+ raise NotAllPartitionAssignedException(topic)
168
+
169
+ offsets_for_date = self.__get_first_offset_after_date(
170
+ consumer=consumer,
171
+ topic_partitions=topic_partitions,
172
+ target_datetime=target_datetime,
173
+ )
174
+
175
+ end_offsets = consumer.end_offsets(topic_partitions)
176
+
177
+ if end_offsets is None or len(end_offsets.keys()) != len(topic_partitions):
178
+ raise Exception(f'There was an error extracting the end offsets of the topic "{topic}"')
179
+
180
+ for topic_partition in topic_partitions:
181
+ offset_and_timestamp = offsets_for_date.get(topic_partition)
182
+ if offset_and_timestamp:
183
+ self._logger.info(f'moving "{topic_partition}" to the offset "{offset_and_timestamp.offset}"')
184
+ consumer.seek(topic_partition, offset_and_timestamp.offset)
185
+ else:
186
+ self._logger.info(
187
+ f'moving "{topic_partition}" to the end of the topic because there are no messages later than "{target_datetime}"'
188
+ )
189
+ consumer.seek(topic_partition, end_offsets[topic_partition])
190
+
191
+ consumer.commit()
192
+ consumer.close()
193
+
194
+ def __get_first_offset_after_date(
195
+ self,
196
+ *,
197
+ consumer: KafkaConsumer,
198
+ topic_partitions: Sequence[TopicPartition],
199
+ target_datetime: datetime,
200
+ ) -> dict[TopicPartition, Optional[OffsetAndTimestamp]]:
201
+ offset_for_times: dict[TopicPartition, Optional[int]] = {}
202
+ timestamp_ms = int(target_datetime.timestamp() * 1000)
203
+
204
+ for topic_partition in topic_partitions:
205
+ offset_for_times[topic_partition] = timestamp_ms
206
+
207
+ return cast(
208
+ dict[TopicPartition, Optional[OffsetAndTimestamp]],
209
+ consumer.offsets_for_times(offset_for_times),
210
+ )
211
+
212
+ # We are not to commit the new offset, but we need to execute a polling in order to start the partition assignment
213
+ def __force_partition_assignment(self, consumer: KafkaConsumer) -> None:
214
+ consumer.poll(max_records=1, timeout_ms=0)
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ from logging import Logger
3
4
  from typing import Optional
4
5
 
5
6
  from kafka import KafkaConsumer, KafkaProducer
@@ -22,8 +23,16 @@ CONSUMER_POLL_TIMEOUT_MS = 1000
22
23
 
23
24
 
24
25
  class KafkaPythonAdminTestClient(KafkaPythonAdminClient, KafkaAdminTestClient):
25
- def __init__(self, *, config: KafkaConnectionConfig):
26
- super().__init__(config=config)
26
+ def __init__(
27
+ self,
28
+ *,
29
+ logger: Logger,
30
+ config: KafkaConnectionConfig,
31
+ ):
32
+ super().__init__(
33
+ config=config,
34
+ logger=logger,
35
+ )
27
36
 
28
37
  def send_message_to_topic(
29
38
  self,
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from dataclasses import asdict
4
+ from datetime import datetime
4
5
 
5
6
  from buz.event import Event
6
7
  from buz.kafka.infrastructure.cdc.cdc_message import CDCMessage, CDCPayload
@@ -16,13 +17,17 @@ class CDCRecordBytesToEventSerializer(ByteSerializer):
16
17
  cdc_message: CDCMessage = CDCMessage(
17
18
  payload=CDCPayload(
18
19
  event_id=data.id,
19
- created_at=data.created_at,
20
+ created_at=self.__adapt_created_to_cdc_format(data.created_at),
20
21
  event_fqn=data.fqn(),
21
22
  payload=self.__serialize_payload(data),
22
23
  )
23
24
  )
24
25
  return self.__json_serializer.serialize(asdict(cdc_message))
25
26
 
27
+ def __adapt_created_to_cdc_format(self, created_at: str) -> str:
28
+ created_at_datetime = datetime.strptime(created_at, Event.DATE_TIME_FORMAT)
29
+ return created_at_datetime.strftime(CDCPayload.DATE_TIME_FORMAT)
30
+
26
31
  def __serialize_payload(self, event: Event) -> str:
27
32
  # Remove id and created at, because Transactional outbox is not adding them
28
33
  payload = asdict(event)
@@ -1,37 +1,20 @@
1
- from threading import Lock
2
1
  from queue import Queue, Empty
3
2
  from typing import Optional, TypeVar, cast
4
3
 
5
4
  from buz.queue.multiqueue_repository import MultiqueueRepository
6
5
 
7
-
8
6
  K = TypeVar("K")
9
7
  R = TypeVar("R")
10
8
 
11
9
 
12
- def self_mutex(method):
13
- def call(self, *args, **kwargs):
14
- lock: Lock = self._get_method_lock() # type: ignore
15
- with lock:
16
- return method(self, *args, **kwargs)
17
-
18
- return call
19
-
20
-
21
10
  class InMemoryMultiqueueRepository(MultiqueueRepository[K, R]):
22
11
  def __init__(self):
23
12
  self.__queues = cast(dict[K, Queue[R]], {})
24
- self.__mutex = Lock()
25
13
  self.__last_key_index = 0
26
14
 
27
- def _get_method_lock(self) -> Lock:
28
- return self.__mutex
29
-
30
- @self_mutex
31
15
  def clear(self, key: K) -> None:
32
16
  self.__queues.pop(key, None)
33
17
 
34
- @self_mutex
35
18
  def push(self, key: K, record: R) -> None:
36
19
  if key not in self.__queues:
37
20
  self.__add_key(key)
@@ -41,7 +24,6 @@ class InMemoryMultiqueueRepository(MultiqueueRepository[K, R]):
41
24
  def __add_key(self, key: K) -> None:
42
25
  self.__queues[key] = Queue[R]()
43
26
 
44
- @self_mutex
45
27
  def pop(self) -> Optional[R]:
46
28
  if not self.__queues:
47
29
  return None
@@ -65,10 +47,8 @@ class InMemoryMultiqueueRepository(MultiqueueRepository[K, R]):
65
47
 
66
48
  return None
67
49
 
68
- @self_mutex
69
50
  def get_total_size(self) -> int:
70
51
  return sum([queue.qsize() for queue in self.__queues.values()])
71
52
 
72
- @self_mutex
73
53
  def is_totally_empty(self) -> bool:
74
54
  return all([queue.empty() for queue in self.__queues.values()])
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: buz
3
- Version: 2.13.1rc7
3
+ Version: 2.13.1rc9
4
4
  Summary: Buz is a set of light, simple and extensible implementations of event, command and query buses.
5
5
  License: MIT
6
6
  Author: Luis Pintado Lozano
@@ -45,10 +45,10 @@ buz/event/exceptions/term_signal_interruption_exception.py,sha256=RkRRF0v_K9Hg48
45
45
  buz/event/exceptions/worker_execution_exception.py,sha256=6mgztvXOCG_9VZ_Jptkk72kZtNWQ2CPuQ3TjXEWFE14,123
46
46
  buz/event/infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
47
  buz/event/infrastructure/buz_kafka/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
- buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py,sha256=6V54oD8dpdLyrBel98qlvt8ZMOCnukxfL-7aRreDaPI,13915
48
+ buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py,sha256=GfJ51noIkMfJ7DOQSUikphHEr6rvfvI2Np1k2mtAai4,13823
49
49
  buz/event/infrastructure/buz_kafka/buz_aiokafka_async_consumer.py,sha256=dqQDv7taAmINE9G2geMDExbcvSlntP09_rQ0JRbc4Rw,5507
50
50
  buz/event/infrastructure/buz_kafka/buz_aiokafka_multi_threaded_consumer.py,sha256=yrEU51OBjvLjCfYJFJPxux1bcIhoTVMw1Jf0HJMWbb0,5449
51
- buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py,sha256=f94fmS4AVfb3LQsp49e-4Cqzj00IqxHDzuUvDbN4u2s,4258
51
+ buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py,sha256=sB8Cj_yTxqe8M9PT-HR4TcR9Mr39AnkIj3mVObKAe4U,4595
52
52
  buz/event/infrastructure/buz_kafka/consume_strategy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
53
  buz/event/infrastructure/buz_kafka/consume_strategy/consume_strategy.py,sha256=RqlXe5W2S6rH3FTr--tcxzFJTAVLb-Dhl7m6qjgNz2M,331
54
54
  buz/event/infrastructure/buz_kafka/consume_strategy/kafka_on_fail_strategy.py,sha256=elNeyTubDuhHsLlTtDA1Nqz2hZe12PUcO9kz8upPby8,136
@@ -57,7 +57,7 @@ buz/event/infrastructure/buz_kafka/exceptions/__init__.py,sha256=47DEQpj8HBSa-_T
57
57
  buz/event/infrastructure/buz_kafka/exceptions/kafka_event_bus_config_not_valid_exception.py,sha256=VUKZXA2ygjg21P4DADFl_Tace6RwSXia1MRYvJypxbM,135
58
58
  buz/event/infrastructure/buz_kafka/kafka_event_async_subscriber_executor.py,sha256=ULM5I35hmgUAYJobTYyCgMXYYhDvLNhRd5S_7tMUMog,5073
59
59
  buz/event/infrastructure/buz_kafka/kafka_event_subscriber_executor.py,sha256=EyG2vsFYErWAyqxdXqSwxx5Zi_y0d6i0h05XavJMnxg,254
60
- buz/event/infrastructure/buz_kafka/kafka_event_sync_subscriber_executor.py,sha256=gM9hBMgCpS86vg1WunUaNP_-b5rr27hXP6gAH3RQ39M,5090
60
+ buz/event/infrastructure/buz_kafka/kafka_event_sync_subscriber_executor.py,sha256=IHossnUlynEFkUvwcTyxHVwGUyDSlfH1t4UX1S09Dic,4846
61
61
  buz/event/infrastructure/buz_kafka/publish_strategy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
62
  buz/event/infrastructure/buz_kafka/publish_strategy/publish_strategy.py,sha256=zIkgMnUU7ueG6QHEubMzdTHOtqdldIbS7k5FDLNmqVk,178
63
63
  buz/event/infrastructure/buz_kafka/publish_strategy/topic_per_event_kafka_publish_strategy.py,sha256=aLKj6GyLJNcMbuDA1QBa-RzWKBHEorBuPFkkqo_H60k,405
@@ -131,8 +131,10 @@ buz/event/transactional_outbox/transactional_outbox_worker.py,sha256=x6kf-Oc4oYK
131
131
  buz/event/worker.py,sha256=BL9TXB_kyr0Avql9fIcFm3CDNnXPvZB6O6BxVwjtCdA,942
132
132
  buz/handler.py,sha256=cZqV1NDPGVZQgJ3YSBDhOQ1sdJGdUopxi57yQ6fbPvc,272
133
133
  buz/kafka/__init__.py,sha256=3rLTvjxQsZlTRfy7_XG6ojkVfxStlaS7y87oGvx5RfI,1826
134
+ buz/kafka/domain/exceptions/not_all_partition_assigned_exception.py,sha256=9zDWoh0SbHLRuCvpfIGcvrmcscKsXpbAPIxr5-z-GYg,296
134
135
  buz/kafka/domain/exceptions/not_valid_kafka_message_exception.py,sha256=Dn6I_-eGQnOuu5WW24oKGOdKOu4EdM8ByH3DLAbz5SY,57
135
136
  buz/kafka/domain/exceptions/topic_already_created_exception.py,sha256=UrisdveZGa2BB0ko4mS7-5fwy8eGsIu409_grtq1r9k,333
137
+ buz/kafka/domain/exceptions/topic_not_found_exception.py,sha256=kLuqGqfsb6YTCe5UCKpMwBm_QAnU9Udfb8bWajPoA8k,201
136
138
  buz/kafka/domain/models/auto_create_topic_configuration.py,sha256=naeArywtxwTXyLzguFitBqU8nRkDo4Ttd1DjPVYJY-Q,231
137
139
  buz/kafka/domain/models/consumer_initial_offset_position.py,sha256=mhnN7LaRhqnzODV6GFewQ5TbIYwCfVbtWaSTYvbGMU0,111
138
140
  buz/kafka/domain/models/create_kafka_topic.py,sha256=Ut6_Xh5finrPlqz2Unw3EP31lR0y8c55zRp8kXxUj7U,224
@@ -144,22 +146,22 @@ buz/kafka/domain/models/kafka_consumer_record.py,sha256=2oJvTBAr8jQq4FglsSgtkno2
144
146
  buz/kafka/domain/models/kafka_poll_record.py,sha256=Yt55L6rGM_taJ615_YyO1gBJIgpVruD0VG0rgSLXSq4,305
145
147
  buz/kafka/domain/models/kafka_supported_sasl_mechanisms.py,sha256=ASyDaFgseQRcUJA2kubQSdCkG6KhGmpMAzTFj5NwK5w,212
146
148
  buz/kafka/domain/models/kafka_supported_security_protocols.py,sha256=ffY2-9sOj4XIkJTSQVkqeOb4KnuqEYXISDarfDN8r9Q,161
147
- buz/kafka/domain/services/kafka_admin_client.py,sha256=rQdn_ZrhbC2PsbFKXEyxRjWEZBX0_WE5Mi3aKWAQsjU,708
149
+ buz/kafka/domain/services/kafka_admin_client.py,sha256=tF0WJW-eCzgskpuhxzXOU5fcTIFqK0fWzMu7yTEzAdo,1118
148
150
  buz/kafka/domain/services/kafka_admin_test_client.py,sha256=91l_vFIo1yhJLQQCC_OmeXZ5F429zP7Hx5g4FNllpfE,1625
149
151
  buz/kafka/domain/services/kafka_producer.py,sha256=CTiwGYwuzdJY5aeb2WFbJlyCpZ0YyhzcgKQYyogKzUM,401
150
152
  buz/kafka/infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
151
153
  buz/kafka/infrastructure/aiokafka/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
152
- buz/kafka/infrastructure/aiokafka/aiokafka_consumer.py,sha256=ringqMhgtVzStx_k8Y2os1nlOIKmAx5XwQroTqMQbmg,8728
154
+ buz/kafka/infrastructure/aiokafka/aiokafka_consumer.py,sha256=9bzCq18xTMCNN7IKoEygdA8xQ235qIza1rU6OnMnq9o,8725
153
155
  buz/kafka/infrastructure/aiokafka/rebalance/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
154
156
  buz/kafka/infrastructure/aiokafka/rebalance/kafka_callback_rebalancer.py,sha256=3l7NkTrCt3rBktVIS73cTmCOvv6eFguoCbGMYIUfCFc,1774
155
157
  buz/kafka/infrastructure/aiokafka/translators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
156
158
  buz/kafka/infrastructure/aiokafka/translators/consumer_initial_offset_position_translator.py,sha256=WmxkQfoXeTy9mIJtGGhM0eDKeQxhcJczeVAGCbtonVI,617
157
- buz/kafka/infrastructure/cdc/cdc_message.py,sha256=Db_em56VmC0t1ny0yXUWx5g0MHUrN8Jz5ewic1kJZrE,1058
159
+ buz/kafka/infrastructure/cdc/cdc_message.py,sha256=zLWUbQ2-fLsh_fei-sF8oQse2w30z25JnaJGZDq5f0E,1080
158
160
  buz/kafka/infrastructure/deserializers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
159
161
  buz/kafka/infrastructure/deserializers/byte_deserializer.py,sha256=4fc6t-zvcFx6F5eoyEixH2uN0cM6aB0YRGwowIzz1RA,211
160
162
  buz/kafka/infrastructure/deserializers/bytes_to_message_deserializer.py,sha256=r40yq67DIElPi6ClmElbtR3VGrG2grNwgwuflXWOh20,345
161
163
  buz/kafka/infrastructure/deserializers/implementations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
162
- buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_event_deserializer.py,sha256=XvRyoXA2WeD-Ui-jk6O6AheR5toKFjuroJiZvgs4kQs,1679
164
+ buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_event_deserializer.py,sha256=JSXHYNdikMnFf0mSEeaWfsxzcYZphTdfR732-RrCQW0,2002
163
165
  buz/kafka/infrastructure/deserializers/implementations/cdc/not_valid_cdc_message_exception.py,sha256=hgLLwTcC-C2DuJSOWUhmQsrd1bO9I1469869IqfAPOk,414
164
166
  buz/kafka/infrastructure/deserializers/implementations/json_byte_deserializer.py,sha256=L4b164-KweiQUwyRONhTMIGnAz48UPk0btLqjGOTNdk,373
165
167
  buz/kafka/infrastructure/deserializers/implementations/json_bytes_to_message_deserializer.py,sha256=YwugXkmOudMNtkVfCC4BFe3pFVpbM8rAL9bT88bZMRk,756
@@ -167,13 +169,13 @@ buz/kafka/infrastructure/kafka_python/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCe
167
169
  buz/kafka/infrastructure/kafka_python/exception/consumer_interrupted_exception.py,sha256=fqhgV7HILdVdv-p1CsOIaaESKY2ZXBtRGYbrVSdPLg0,164
168
170
  buz/kafka/infrastructure/kafka_python/factories/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
169
171
  buz/kafka/infrastructure/kafka_python/factories/kafka_python_producer_factory.py,sha256=rn-ZUuzHZfAMGon_lKl7gQ-gwijW1nYOHlGTCIjfDE4,858
170
- buz/kafka/infrastructure/kafka_python/kafka_python_admin_client.py,sha256=mSbRnsX0sh0WXHUTR6obbTvTj_lGwYIWY8cJvY59n0k,4350
171
- buz/kafka/infrastructure/kafka_python/kafka_python_admin_test_client.py,sha256=IPwpANcS7nESo9cbLqwJPY7TfnidOshuh3DfF1UPK4Q,2817
172
+ buz/kafka/infrastructure/kafka_python/kafka_python_admin_client.py,sha256=KjJciScn-WlnfF79-ZeWZWUzQKZkDiPZ_93oOwmDyiQ,8218
173
+ buz/kafka/infrastructure/kafka_python/kafka_python_admin_test_client.py,sha256=g_xGakCMbv9ouzUyemmcrFQqUfKlnMWNnEPE4L9cxOI,2949
172
174
  buz/kafka/infrastructure/kafka_python/kafka_python_producer.py,sha256=CMkpTkrC10wy574Nc9t-TKvWO-6XCgj8F5EkWlwP0-U,2275
173
175
  buz/kafka/infrastructure/kafka_python/translators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
174
176
  buz/kafka/infrastructure/kafka_python/translators/consumer_initial_offset_position_translator.py,sha256=hJ48_eyMcnbFL_Y5TOiMbGXrQSryuKk9CvP59MdqNOY,620
175
177
  buz/kafka/infrastructure/serializers/byte_serializer.py,sha256=T83sLdX9V5Oh1mzjRwHi_1DsTFI7KefFj7kmnz7JVy4,207
176
- buz/kafka/infrastructure/serializers/implementations/cdc_record_bytes_to_event_serializer.py,sha256=sqSTswtGrMfh2dFP9f8PLLkwcMVYadClRGNjzkozU3g,1189
178
+ buz/kafka/infrastructure/serializers/implementations/cdc_record_bytes_to_event_serializer.py,sha256=rWIHE91COwrmo61GV9SyIE6CODOPzpYrdaKbMfPCSJc,1482
177
179
  buz/kafka/infrastructure/serializers/implementations/json_byte_serializer.py,sha256=KGkTQE7lq8VB048zCew_IlYgoWPozkmERYKg0t4DjOA,1510
178
180
  buz/kafka/infrastructure/serializers/kafka_header_serializer.py,sha256=ws9xr5lsJF6J-uVIplPym7vboo00KtXHfLJf8JjG0lo,649
179
181
  buz/locator/__init__.py,sha256=my8qfHL5htIT9RFFjzV4zGIPVW72tu4SMQbKKqBeSKo,293
@@ -221,7 +223,7 @@ buz/query/synchronous/self_process/self_process_query_bus.py,sha256=pKGJxXBWtqU4
221
223
  buz/query/synchronous/synced_async/__init__.py,sha256=TdFmIBeFIpl3Tvmh_FJpJMXJdPdfRxOstVqnPUi23mo,125
222
224
  buz/query/synchronous/synced_async/synced_async_query_bus.py,sha256=WxXHeEl1Pnh5Yiui8oMJKIOdzhQBGq1yAnAEk_gniRg,470
223
225
  buz/queue/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
224
- buz/queue/in_memory/in_memory_multiqueue_repository.py,sha256=6Oen6khk65sziqlrcyh8tlsTfctlj9tACv3h1dLgP4s,1939
226
+ buz/queue/in_memory/in_memory_multiqueue_repository.py,sha256=2jFSeVVbliOV-6m7AhjSkTT2stgFuHmfvNfrPTyyTzs,1523
225
227
  buz/queue/in_memory/in_memory_queue_repository.py,sha256=hUPuQRmbrzQtR5gv4XYCM9aFQHX0U3Q3lt0yi0MIPYk,505
226
228
  buz/queue/multiqueue_repository.py,sha256=Wc4OE_jDB4mLyyhWKnizXskvgYm2glxvGW2ez1oa9Zs,531
227
229
  buz/queue/queue_repository.py,sha256=W3_lkosFu71XoiqRTT7An4kVjJQ3w0fZmFlaAfHeraM,385
@@ -230,7 +232,7 @@ buz/serializer/message_to_json_bytes_serializer.py,sha256=RGZJ64t4t4Pz2FCASZZCv-
230
232
  buz/wrapper/__init__.py,sha256=GnRdJFcncn-qp0hzDG9dBHLmTJSbHFVjE_yr-MdW_n4,77
231
233
  buz/wrapper/async_to_sync.py,sha256=OfK-vrVUhuN-LLLvekLdMbQYtH0ue5lfbvuasj6ovMI,698
232
234
  buz/wrapper/event_loop.py,sha256=pfBJ1g-8A2a3YgW8Gf9Fg0kkewoh3-wgTy2KIFDyfHk,266
233
- buz-2.13.1rc7.dist-info/LICENSE,sha256=Jytu2S-2SPEgsB0y6BF-_LUxIWY7402fl0JSh36TLZE,1062
234
- buz-2.13.1rc7.dist-info/METADATA,sha256=P_PoCYhT33nqbij8OAWpKZ0bE7cBL6ensd1sMRfzdoQ,1620
235
- buz-2.13.1rc7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
236
- buz-2.13.1rc7.dist-info/RECORD,,
235
+ buz-2.13.1rc9.dist-info/LICENSE,sha256=Jytu2S-2SPEgsB0y6BF-_LUxIWY7402fl0JSh36TLZE,1062
236
+ buz-2.13.1rc9.dist-info/METADATA,sha256=JDL25KyxeqMBWzF9LdEzga2tqIDmQBRGqn6ToEeZfyA,1620
237
+ buz-2.13.1rc9.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
238
+ buz-2.13.1rc9.dist-info/RECORD,,