buz 2.13.1rc7__py3-none-any.whl → 2.13.1rc8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py +10 -22
- buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py +17 -12
- buz/kafka/infrastructure/aiokafka/aiokafka_consumer.py +1 -1
- buz/kafka/infrastructure/cdc/cdc_message.py +3 -1
- buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_event_deserializer.py +9 -4
- buz/kafka/infrastructure/serializers/implementations/cdc_record_bytes_to_event_serializer.py +6 -1
- buz/queue/in_memory/in_memory_multiqueue_repository.py +0 -20
- {buz-2.13.1rc7.dist-info → buz-2.13.1rc8.dist-info}/METADATA +1 -1
- {buz-2.13.1rc7.dist-info → buz-2.13.1rc8.dist-info}/RECORD +11 -11
- {buz-2.13.1rc7.dist-info → buz-2.13.1rc8.dist-info}/LICENSE +0 -0
- {buz-2.13.1rc7.dist-info → buz-2.13.1rc8.dist-info}/WHEEL +0 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
from abc import abstractmethod
|
|
2
1
|
import traceback
|
|
2
|
+
from abc import abstractmethod
|
|
3
3
|
from asyncio import Lock, Task, create_task, gather, Semaphore, Event as AsyncIOEvent, sleep
|
|
4
4
|
from datetime import timedelta, datetime
|
|
5
5
|
from itertools import cycle
|
|
@@ -28,7 +28,6 @@ from buz.kafka.infrastructure.aiokafka.aiokafka_consumer import AIOKafkaConsumer
|
|
|
28
28
|
from buz.queue.in_memory.in_memory_multiqueue_repository import InMemoryMultiqueueRepository
|
|
29
29
|
from buz.queue.multiqueue_repository import MultiqueueRepository
|
|
30
30
|
|
|
31
|
-
|
|
32
31
|
T = TypeVar("T", bound=Event)
|
|
33
32
|
|
|
34
33
|
|
|
@@ -87,17 +86,14 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
|
|
|
87
86
|
async def run(self) -> None:
|
|
88
87
|
start_time = datetime.now()
|
|
89
88
|
await self.__generate_kafka_consumers()
|
|
89
|
+
self.__initial_coroutines_created_elapsed_time = datetime.now() - start_time
|
|
90
90
|
|
|
91
91
|
if len(self.__executor_per_consumer_mapper) == 0:
|
|
92
92
|
self._logger.error("There are no valid subscribers to execute, finalizing consumer")
|
|
93
93
|
return
|
|
94
94
|
|
|
95
|
-
self.__initial_coroutines_created_elapsed_time = datetime.now() - start_time
|
|
96
|
-
|
|
97
95
|
start_consumption_time = datetime.now()
|
|
98
|
-
|
|
99
96
|
worker_errors = await self.__run_worker()
|
|
100
|
-
|
|
101
97
|
self.__events_processed_elapsed_time = datetime.now() - start_consumption_time
|
|
102
98
|
|
|
103
99
|
await self.__handle_graceful_stop(worker_errors)
|
|
@@ -112,9 +108,9 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
|
|
|
112
108
|
if self.__exceptions_are_thrown(worker_errors):
|
|
113
109
|
consume_events_exception, polling_task_exception = worker_errors
|
|
114
110
|
if consume_events_exception:
|
|
115
|
-
self._logger.
|
|
111
|
+
self._logger.exception(consume_events_exception)
|
|
116
112
|
if polling_task_exception:
|
|
117
|
-
self._logger.
|
|
113
|
+
self._logger.exception(polling_task_exception)
|
|
118
114
|
|
|
119
115
|
raise WorkerExecutionException("The worker was closed by an unexpected exception")
|
|
120
116
|
|
|
@@ -227,41 +223,32 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
|
|
|
227
223
|
if len(kafka_poll_records) == 0:
|
|
228
224
|
await sleep(self.__seconds_between_polls_if_there_are_no_new_tasks)
|
|
229
225
|
|
|
230
|
-
return
|
|
231
|
-
|
|
232
226
|
async def __consume_events_task(self) -> None:
|
|
233
227
|
self._logger.info("Initializing consuming task")
|
|
234
|
-
|
|
235
|
-
blocked_tasks_iterator = self.generate_blocked_consuming_tasks_iterator()
|
|
228
|
+
blocked_tasks_iterator = self.__generate_blocked_consuming_tasks_iterator()
|
|
236
229
|
|
|
237
230
|
async for consuming_task in blocked_tasks_iterator:
|
|
238
231
|
consumer = consuming_task.consumer
|
|
239
232
|
kafka_poll_record = consuming_task.kafka_poll_record
|
|
240
|
-
executor = self.__executor_per_consumer_mapper[consuming_task.consumer]
|
|
241
233
|
|
|
234
|
+
executor = self.__executor_per_consumer_mapper[consumer]
|
|
242
235
|
await executor.consume(kafka_poll_record=kafka_poll_record)
|
|
243
|
-
|
|
244
236
|
await consumer.commit_poll_record(kafka_poll_record)
|
|
245
237
|
|
|
246
238
|
self.__events_processed += 1
|
|
247
239
|
|
|
248
240
|
# This iterator return a blocked task, that will be blocked for other process (like rebalancing), until the next task will be requested
|
|
249
|
-
async def
|
|
241
|
+
async def __generate_blocked_consuming_tasks_iterator(self) -> AsyncIterator[ConsumingTask]:
|
|
250
242
|
consumer_queues_cyclic_iterator = cycle(self.__queue_per_consumer_mapper.items())
|
|
251
243
|
last_consumer, _ = next(consumer_queues_cyclic_iterator)
|
|
252
244
|
|
|
253
245
|
while not self.__should_stop.is_set():
|
|
254
|
-
|
|
255
|
-
[queue.is_totally_empty() for queue in self.__queue_per_consumer_mapper.values()]
|
|
256
|
-
)
|
|
257
|
-
|
|
258
|
-
if all_queues_are_empty:
|
|
246
|
+
if await self.__all_queues_are_empty():
|
|
259
247
|
await sleep(self.__seconds_between_executions_if_there_are_no_tasks_in_the_queue)
|
|
260
248
|
continue
|
|
261
249
|
|
|
262
250
|
async with self.__task_execution_mutex:
|
|
263
251
|
consumer: Optional[AIOKafkaConsumer] = None
|
|
264
|
-
kafka_poll_record: Optional[KafkaPollRecord] = None
|
|
265
252
|
|
|
266
253
|
while consumer != last_consumer:
|
|
267
254
|
consumer, queue = next(consumer_queues_cyclic_iterator)
|
|
@@ -272,7 +259,8 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
|
|
|
272
259
|
last_consumer = consumer
|
|
273
260
|
break
|
|
274
261
|
|
|
275
|
-
|
|
262
|
+
async def __all_queues_are_empty(self) -> bool:
|
|
263
|
+
return all([queue.is_totally_empty() for queue in self.__queue_per_consumer_mapper.values()])
|
|
276
264
|
|
|
277
265
|
async def __on_partition_revoked(self, consumer: AIOKafkaConsumer, topics_partitions: set[TopicPartition]) -> None:
|
|
278
266
|
async with self.__task_execution_mutex:
|
|
@@ -14,6 +14,7 @@ from buz.event.middleware.publish_middleware_chain_resolver import PublishMiddle
|
|
|
14
14
|
from buz.kafka import (
|
|
15
15
|
KafkaPythonProducer,
|
|
16
16
|
)
|
|
17
|
+
from buz.kafka.domain.exceptions.topic_already_created_exception import KafkaTopicsAlreadyCreatedException
|
|
17
18
|
from buz.kafka.domain.models.auto_create_topic_configuration import AutoCreateTopicConfiguration
|
|
18
19
|
from buz.kafka.domain.models.create_kafka_topic import CreateKafkaTopic
|
|
19
20
|
from buz.kafka.domain.services.kafka_admin_client import KafkaAdminClient
|
|
@@ -53,18 +54,22 @@ class BuzKafkaEventBus(EventBus):
|
|
|
53
54
|
topic = self.__publish_strategy.get_topic(event)
|
|
54
55
|
|
|
55
56
|
if self.__auto_create_topic_configuration is not None and self.__is_topic_created(topic) is False:
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
57
|
+
try:
|
|
58
|
+
self.__logger.info(f"Creating missing topic: {topic}..")
|
|
59
|
+
self.__get_kafka_admin_client().create_topics(
|
|
60
|
+
topics=[
|
|
61
|
+
CreateKafkaTopic(
|
|
62
|
+
name=topic,
|
|
63
|
+
partitions=self.__auto_create_topic_configuration.partitions,
|
|
64
|
+
replication_factor=self.__auto_create_topic_configuration.replication_factor,
|
|
65
|
+
configs=self.__auto_create_topic_configuration.configs,
|
|
66
|
+
)
|
|
67
|
+
]
|
|
68
|
+
)
|
|
69
|
+
self.__logger.info(f"Created missing topic: {topic}")
|
|
70
|
+
self.__topics_checked[topic] = True
|
|
71
|
+
except KafkaTopicsAlreadyCreatedException:
|
|
72
|
+
pass
|
|
68
73
|
|
|
69
74
|
headers = self.__get_event_headers(event)
|
|
70
75
|
self.__producer.produce(
|
|
@@ -123,7 +123,7 @@ class AIOKafkaConsumer:
|
|
|
123
123
|
try:
|
|
124
124
|
self.__logger.info(f"Creating missing topics: {non_created_topics}...")
|
|
125
125
|
kafka_admin_client.create_topics(topics=topics_to_create)
|
|
126
|
-
self.__logger.info(f"Created missing topics: {non_created_topics}
|
|
126
|
+
self.__logger.info(f"Created missing topics: {non_created_topics}")
|
|
127
127
|
except KafkaTopicsAlreadyCreatedException:
|
|
128
128
|
# there is a possibility to have a race condition between the check and the creation
|
|
129
129
|
# but it does not matters, the important part is that the topic is created
|
|
@@ -3,9 +3,11 @@ from dataclasses import dataclass
|
|
|
3
3
|
|
|
4
4
|
@dataclass(frozen=True)
|
|
5
5
|
class CDCPayload:
|
|
6
|
+
DATE_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
|
|
7
|
+
|
|
6
8
|
payload: str # json encoded
|
|
7
9
|
event_id: str # uuid
|
|
8
|
-
created_at: str
|
|
10
|
+
created_at: str
|
|
9
11
|
event_fqn: str
|
|
10
12
|
|
|
11
13
|
def validate(self) -> None:
|
buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_event_deserializer.py
CHANGED
|
@@ -1,16 +1,17 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
from datetime import datetime
|
|
3
4
|
from typing import TypeVar, Type, Generic
|
|
4
5
|
|
|
5
6
|
import orjson
|
|
6
7
|
from dacite import from_dict
|
|
7
8
|
|
|
8
|
-
from buz.kafka.infrastructure.deserializers.implementations.cdc.not_valid_cdc_message_exception import (
|
|
9
|
-
NotValidCDCMessageException,
|
|
10
|
-
)
|
|
11
9
|
from buz.event import Event
|
|
12
10
|
from buz.kafka.infrastructure.cdc.cdc_message import CDCMessage, CDCPayload
|
|
13
11
|
from buz.kafka.infrastructure.deserializers.bytes_to_message_deserializer import BytesToMessageDeserializer
|
|
12
|
+
from buz.kafka.infrastructure.deserializers.implementations.cdc.not_valid_cdc_message_exception import (
|
|
13
|
+
NotValidCDCMessageException,
|
|
14
|
+
)
|
|
14
15
|
|
|
15
16
|
T = TypeVar("T", bound=Event)
|
|
16
17
|
|
|
@@ -27,12 +28,16 @@ class CDCRecordBytesToEventDeserializer(BytesToMessageDeserializer[Event], Gener
|
|
|
27
28
|
cdc_message = self.__get_outbox_record_as_dict(decoded_string)
|
|
28
29
|
return self.__event_class.restore(
|
|
29
30
|
id=cdc_message.payload.event_id,
|
|
30
|
-
created_at=cdc_message.payload.created_at,
|
|
31
|
+
created_at=self.__get_created_at_in_event_format(cdc_message.payload.created_at),
|
|
31
32
|
**orjson.loads(cdc_message.payload.payload),
|
|
32
33
|
)
|
|
33
34
|
except Exception as exception:
|
|
34
35
|
raise NotValidCDCMessageException(decoded_string, exception) from exception
|
|
35
36
|
|
|
37
|
+
def __get_created_at_in_event_format(self, cdc_payload_created_at: str) -> str:
|
|
38
|
+
created_at_datetime = datetime.strptime(cdc_payload_created_at, CDCPayload.DATE_TIME_FORMAT)
|
|
39
|
+
return created_at_datetime.strftime(Event.DATE_TIME_FORMAT)
|
|
40
|
+
|
|
36
41
|
def __get_outbox_record_as_dict(self, decoded_string: str) -> CDCMessage:
|
|
37
42
|
decoded_record: dict = orjson.loads(decoded_string)
|
|
38
43
|
|
buz/kafka/infrastructure/serializers/implementations/cdc_record_bytes_to_event_serializer.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from dataclasses import asdict
|
|
4
|
+
from datetime import datetime
|
|
4
5
|
|
|
5
6
|
from buz.event import Event
|
|
6
7
|
from buz.kafka.infrastructure.cdc.cdc_message import CDCMessage, CDCPayload
|
|
@@ -16,13 +17,17 @@ class CDCRecordBytesToEventSerializer(ByteSerializer):
|
|
|
16
17
|
cdc_message: CDCMessage = CDCMessage(
|
|
17
18
|
payload=CDCPayload(
|
|
18
19
|
event_id=data.id,
|
|
19
|
-
created_at=data.created_at,
|
|
20
|
+
created_at=self.__adapt_created_to_cdc_format(data.created_at),
|
|
20
21
|
event_fqn=data.fqn(),
|
|
21
22
|
payload=self.__serialize_payload(data),
|
|
22
23
|
)
|
|
23
24
|
)
|
|
24
25
|
return self.__json_serializer.serialize(asdict(cdc_message))
|
|
25
26
|
|
|
27
|
+
def __adapt_created_to_cdc_format(self, created_at: str) -> str:
|
|
28
|
+
created_at_datetime = datetime.strptime(created_at, Event.DATE_TIME_FORMAT)
|
|
29
|
+
return created_at_datetime.strftime(CDCPayload.DATE_TIME_FORMAT)
|
|
30
|
+
|
|
26
31
|
def __serialize_payload(self, event: Event) -> str:
|
|
27
32
|
# Remove id and created at, because Transactional outbox is not adding them
|
|
28
33
|
payload = asdict(event)
|
|
@@ -1,37 +1,20 @@
|
|
|
1
|
-
from threading import Lock
|
|
2
1
|
from queue import Queue, Empty
|
|
3
2
|
from typing import Optional, TypeVar, cast
|
|
4
3
|
|
|
5
4
|
from buz.queue.multiqueue_repository import MultiqueueRepository
|
|
6
5
|
|
|
7
|
-
|
|
8
6
|
K = TypeVar("K")
|
|
9
7
|
R = TypeVar("R")
|
|
10
8
|
|
|
11
9
|
|
|
12
|
-
def self_mutex(method):
|
|
13
|
-
def call(self, *args, **kwargs):
|
|
14
|
-
lock: Lock = self._get_method_lock() # type: ignore
|
|
15
|
-
with lock:
|
|
16
|
-
return method(self, *args, **kwargs)
|
|
17
|
-
|
|
18
|
-
return call
|
|
19
|
-
|
|
20
|
-
|
|
21
10
|
class InMemoryMultiqueueRepository(MultiqueueRepository[K, R]):
|
|
22
11
|
def __init__(self):
|
|
23
12
|
self.__queues = cast(dict[K, Queue[R]], {})
|
|
24
|
-
self.__mutex = Lock()
|
|
25
13
|
self.__last_key_index = 0
|
|
26
14
|
|
|
27
|
-
def _get_method_lock(self) -> Lock:
|
|
28
|
-
return self.__mutex
|
|
29
|
-
|
|
30
|
-
@self_mutex
|
|
31
15
|
def clear(self, key: K) -> None:
|
|
32
16
|
self.__queues.pop(key, None)
|
|
33
17
|
|
|
34
|
-
@self_mutex
|
|
35
18
|
def push(self, key: K, record: R) -> None:
|
|
36
19
|
if key not in self.__queues:
|
|
37
20
|
self.__add_key(key)
|
|
@@ -41,7 +24,6 @@ class InMemoryMultiqueueRepository(MultiqueueRepository[K, R]):
|
|
|
41
24
|
def __add_key(self, key: K) -> None:
|
|
42
25
|
self.__queues[key] = Queue[R]()
|
|
43
26
|
|
|
44
|
-
@self_mutex
|
|
45
27
|
def pop(self) -> Optional[R]:
|
|
46
28
|
if not self.__queues:
|
|
47
29
|
return None
|
|
@@ -65,10 +47,8 @@ class InMemoryMultiqueueRepository(MultiqueueRepository[K, R]):
|
|
|
65
47
|
|
|
66
48
|
return None
|
|
67
49
|
|
|
68
|
-
@self_mutex
|
|
69
50
|
def get_total_size(self) -> int:
|
|
70
51
|
return sum([queue.qsize() for queue in self.__queues.values()])
|
|
71
52
|
|
|
72
|
-
@self_mutex
|
|
73
53
|
def is_totally_empty(self) -> bool:
|
|
74
54
|
return all([queue.empty() for queue in self.__queues.values()])
|
|
@@ -45,10 +45,10 @@ buz/event/exceptions/term_signal_interruption_exception.py,sha256=RkRRF0v_K9Hg48
|
|
|
45
45
|
buz/event/exceptions/worker_execution_exception.py,sha256=6mgztvXOCG_9VZ_Jptkk72kZtNWQ2CPuQ3TjXEWFE14,123
|
|
46
46
|
buz/event/infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
47
|
buz/event/infrastructure/buz_kafka/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
|
-
buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py,sha256=
|
|
48
|
+
buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py,sha256=GfJ51noIkMfJ7DOQSUikphHEr6rvfvI2Np1k2mtAai4,13823
|
|
49
49
|
buz/event/infrastructure/buz_kafka/buz_aiokafka_async_consumer.py,sha256=dqQDv7taAmINE9G2geMDExbcvSlntP09_rQ0JRbc4Rw,5507
|
|
50
50
|
buz/event/infrastructure/buz_kafka/buz_aiokafka_multi_threaded_consumer.py,sha256=yrEU51OBjvLjCfYJFJPxux1bcIhoTVMw1Jf0HJMWbb0,5449
|
|
51
|
-
buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py,sha256=
|
|
51
|
+
buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py,sha256=sB8Cj_yTxqe8M9PT-HR4TcR9Mr39AnkIj3mVObKAe4U,4595
|
|
52
52
|
buz/event/infrastructure/buz_kafka/consume_strategy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
53
53
|
buz/event/infrastructure/buz_kafka/consume_strategy/consume_strategy.py,sha256=RqlXe5W2S6rH3FTr--tcxzFJTAVLb-Dhl7m6qjgNz2M,331
|
|
54
54
|
buz/event/infrastructure/buz_kafka/consume_strategy/kafka_on_fail_strategy.py,sha256=elNeyTubDuhHsLlTtDA1Nqz2hZe12PUcO9kz8upPby8,136
|
|
@@ -149,17 +149,17 @@ buz/kafka/domain/services/kafka_admin_test_client.py,sha256=91l_vFIo1yhJLQQCC_Om
|
|
|
149
149
|
buz/kafka/domain/services/kafka_producer.py,sha256=CTiwGYwuzdJY5aeb2WFbJlyCpZ0YyhzcgKQYyogKzUM,401
|
|
150
150
|
buz/kafka/infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
151
151
|
buz/kafka/infrastructure/aiokafka/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
152
|
-
buz/kafka/infrastructure/aiokafka/aiokafka_consumer.py,sha256=
|
|
152
|
+
buz/kafka/infrastructure/aiokafka/aiokafka_consumer.py,sha256=9bzCq18xTMCNN7IKoEygdA8xQ235qIza1rU6OnMnq9o,8725
|
|
153
153
|
buz/kafka/infrastructure/aiokafka/rebalance/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
154
154
|
buz/kafka/infrastructure/aiokafka/rebalance/kafka_callback_rebalancer.py,sha256=3l7NkTrCt3rBktVIS73cTmCOvv6eFguoCbGMYIUfCFc,1774
|
|
155
155
|
buz/kafka/infrastructure/aiokafka/translators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
156
156
|
buz/kafka/infrastructure/aiokafka/translators/consumer_initial_offset_position_translator.py,sha256=WmxkQfoXeTy9mIJtGGhM0eDKeQxhcJczeVAGCbtonVI,617
|
|
157
|
-
buz/kafka/infrastructure/cdc/cdc_message.py,sha256=
|
|
157
|
+
buz/kafka/infrastructure/cdc/cdc_message.py,sha256=zLWUbQ2-fLsh_fei-sF8oQse2w30z25JnaJGZDq5f0E,1080
|
|
158
158
|
buz/kafka/infrastructure/deserializers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
159
159
|
buz/kafka/infrastructure/deserializers/byte_deserializer.py,sha256=4fc6t-zvcFx6F5eoyEixH2uN0cM6aB0YRGwowIzz1RA,211
|
|
160
160
|
buz/kafka/infrastructure/deserializers/bytes_to_message_deserializer.py,sha256=r40yq67DIElPi6ClmElbtR3VGrG2grNwgwuflXWOh20,345
|
|
161
161
|
buz/kafka/infrastructure/deserializers/implementations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
162
|
-
buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_event_deserializer.py,sha256=
|
|
162
|
+
buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_event_deserializer.py,sha256=JSXHYNdikMnFf0mSEeaWfsxzcYZphTdfR732-RrCQW0,2002
|
|
163
163
|
buz/kafka/infrastructure/deserializers/implementations/cdc/not_valid_cdc_message_exception.py,sha256=hgLLwTcC-C2DuJSOWUhmQsrd1bO9I1469869IqfAPOk,414
|
|
164
164
|
buz/kafka/infrastructure/deserializers/implementations/json_byte_deserializer.py,sha256=L4b164-KweiQUwyRONhTMIGnAz48UPk0btLqjGOTNdk,373
|
|
165
165
|
buz/kafka/infrastructure/deserializers/implementations/json_bytes_to_message_deserializer.py,sha256=YwugXkmOudMNtkVfCC4BFe3pFVpbM8rAL9bT88bZMRk,756
|
|
@@ -173,7 +173,7 @@ buz/kafka/infrastructure/kafka_python/kafka_python_producer.py,sha256=CMkpTkrC10
|
|
|
173
173
|
buz/kafka/infrastructure/kafka_python/translators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
174
174
|
buz/kafka/infrastructure/kafka_python/translators/consumer_initial_offset_position_translator.py,sha256=hJ48_eyMcnbFL_Y5TOiMbGXrQSryuKk9CvP59MdqNOY,620
|
|
175
175
|
buz/kafka/infrastructure/serializers/byte_serializer.py,sha256=T83sLdX9V5Oh1mzjRwHi_1DsTFI7KefFj7kmnz7JVy4,207
|
|
176
|
-
buz/kafka/infrastructure/serializers/implementations/cdc_record_bytes_to_event_serializer.py,sha256=
|
|
176
|
+
buz/kafka/infrastructure/serializers/implementations/cdc_record_bytes_to_event_serializer.py,sha256=rWIHE91COwrmo61GV9SyIE6CODOPzpYrdaKbMfPCSJc,1482
|
|
177
177
|
buz/kafka/infrastructure/serializers/implementations/json_byte_serializer.py,sha256=KGkTQE7lq8VB048zCew_IlYgoWPozkmERYKg0t4DjOA,1510
|
|
178
178
|
buz/kafka/infrastructure/serializers/kafka_header_serializer.py,sha256=ws9xr5lsJF6J-uVIplPym7vboo00KtXHfLJf8JjG0lo,649
|
|
179
179
|
buz/locator/__init__.py,sha256=my8qfHL5htIT9RFFjzV4zGIPVW72tu4SMQbKKqBeSKo,293
|
|
@@ -221,7 +221,7 @@ buz/query/synchronous/self_process/self_process_query_bus.py,sha256=pKGJxXBWtqU4
|
|
|
221
221
|
buz/query/synchronous/synced_async/__init__.py,sha256=TdFmIBeFIpl3Tvmh_FJpJMXJdPdfRxOstVqnPUi23mo,125
|
|
222
222
|
buz/query/synchronous/synced_async/synced_async_query_bus.py,sha256=WxXHeEl1Pnh5Yiui8oMJKIOdzhQBGq1yAnAEk_gniRg,470
|
|
223
223
|
buz/queue/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
224
|
-
buz/queue/in_memory/in_memory_multiqueue_repository.py,sha256=
|
|
224
|
+
buz/queue/in_memory/in_memory_multiqueue_repository.py,sha256=2jFSeVVbliOV-6m7AhjSkTT2stgFuHmfvNfrPTyyTzs,1523
|
|
225
225
|
buz/queue/in_memory/in_memory_queue_repository.py,sha256=hUPuQRmbrzQtR5gv4XYCM9aFQHX0U3Q3lt0yi0MIPYk,505
|
|
226
226
|
buz/queue/multiqueue_repository.py,sha256=Wc4OE_jDB4mLyyhWKnizXskvgYm2glxvGW2ez1oa9Zs,531
|
|
227
227
|
buz/queue/queue_repository.py,sha256=W3_lkosFu71XoiqRTT7An4kVjJQ3w0fZmFlaAfHeraM,385
|
|
@@ -230,7 +230,7 @@ buz/serializer/message_to_json_bytes_serializer.py,sha256=RGZJ64t4t4Pz2FCASZZCv-
|
|
|
230
230
|
buz/wrapper/__init__.py,sha256=GnRdJFcncn-qp0hzDG9dBHLmTJSbHFVjE_yr-MdW_n4,77
|
|
231
231
|
buz/wrapper/async_to_sync.py,sha256=OfK-vrVUhuN-LLLvekLdMbQYtH0ue5lfbvuasj6ovMI,698
|
|
232
232
|
buz/wrapper/event_loop.py,sha256=pfBJ1g-8A2a3YgW8Gf9Fg0kkewoh3-wgTy2KIFDyfHk,266
|
|
233
|
-
buz-2.13.
|
|
234
|
-
buz-2.13.
|
|
235
|
-
buz-2.13.
|
|
236
|
-
buz-2.13.
|
|
233
|
+
buz-2.13.1rc8.dist-info/LICENSE,sha256=Jytu2S-2SPEgsB0y6BF-_LUxIWY7402fl0JSh36TLZE,1062
|
|
234
|
+
buz-2.13.1rc8.dist-info/METADATA,sha256=4ptRnjIHu5NspLmq0nUXRlSCplcfjYT5CgrdjHpTp9o,1620
|
|
235
|
+
buz-2.13.1rc8.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
236
|
+
buz-2.13.1rc8.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|