buz 2.17.0rc7__py3-none-any.whl → 2.19.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (19) hide show
  1. buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py +3 -0
  2. buz/event/infrastructure/buz_kafka/buz_aiokafka_async_consumer.py +19 -2
  3. buz/event/infrastructure/buz_kafka/buz_aiokafka_multi_threaded_consumer.py +19 -2
  4. buz/event/infrastructure/buz_kafka/kafka_event_async_subscriber_executor.py +27 -5
  5. buz/event/infrastructure/buz_kafka/kafka_event_sync_subscriber_executor.py +28 -5
  6. buz/event/infrastructure/buz_kafka/models/cdc_process_context.py +9 -0
  7. buz/event/infrastructure/kombu/kombu_consumer.py +6 -1
  8. buz/event/infrastructure/models/execution_context.py +2 -0
  9. buz/event/infrastructure/models/process_context.py +6 -0
  10. buz/event/sync/sync_event_bus.py +3 -1
  11. buz/kafka/infrastructure/cdc/cdc_payload.py +3 -0
  12. buz/kafka/infrastructure/deserializers/implementations/cdc/{not_valid_cdc_message_exception.py → cannot_decode_cdc_message_exception.py} +1 -1
  13. buz/kafka/infrastructure/deserializers/implementations/cdc/cannot_restore_event_from_cdc_payload_exception.py +11 -0
  14. buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_cdc_payload_deserializer.py +35 -0
  15. buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_event_deserializer.py +10 -17
  16. {buz-2.17.0rc7.dist-info → buz-2.19.0.dist-info}/METADATA +1 -1
  17. {buz-2.17.0rc7.dist-info → buz-2.19.0.dist-info}/RECORD +19 -15
  18. {buz-2.17.0rc7.dist-info → buz-2.19.0.dist-info}/LICENSE +0 -0
  19. {buz-2.17.0rc7.dist-info → buz-2.19.0.dist-info}/WHEEL +0 -0
@@ -66,6 +66,7 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
66
66
  heartbeat_interval_ms: Optional[int] = None,
67
67
  wait_for_connection_to_cluster_ms: Optional[int] = None,
68
68
  worker_instance_id: Optional[str] = None,
69
+ milliseconds_between_retries: int = 5000,
69
70
  ):
70
71
  self.__connection_config = connection_config
71
72
  self.__consume_strategy = consume_strategy
@@ -106,6 +107,7 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
106
107
  )
107
108
  self.__wait_for_connection_to_cluster_ms: Optional[int] = wait_for_connection_to_cluster_ms
108
109
  self.__worker_instance_id: Optional[str] = worker_instance_id
110
+ self.__milliseconds_between_retries: int = milliseconds_between_retries
109
111
  self.__polling_tasks_semaphore = Semaphore(self.__max_number_of_concurrent_polling_tasks)
110
112
  self.__consumer_and_partition_mutex: dict[str, Lock] = defaultdict(Lock)
111
113
  self.__is_worked_initialized = False
@@ -167,6 +169,7 @@ class BaseBuzAIOKafkaAsyncConsumer(AsyncConsumer):
167
169
  f" - Seconds between polls if there are no new tasks: {self.__seconds_between_polls_if_there_are_no_new_tasks}\n"
168
170
  f" - Max number of concurrent polling tasks: {self.__max_number_of_concurrent_polling_tasks}\n"
169
171
  f" - Wait for connection to cluster ms: {self.__wait_for_connection_to_cluster_ms}\n"
172
+ f" - Milliseconds between retries: {self.__milliseconds_between_retries}ms ({self.__milliseconds_between_retries / 1000.0}s)\n"
170
173
  f" - Health check port: {self.__health_check_port}\n"
171
174
  f" - Number of subscribers: {len(self.__subscribers)}",
172
175
  )
@@ -2,6 +2,12 @@ from logging import Logger
2
2
  from typing import Optional, Sequence, Type, TypeVar
3
3
 
4
4
  from aiokafka.coordinator.assignors.abstract import AbstractPartitionAssignor
5
+ from buz.kafka.infrastructure.deserializers.implementations.cdc.cdc_record_bytes_to_cdc_payload_deserializer import (
6
+ CDCRecordBytesToCDCPayloadDeserializer,
7
+ )
8
+ from buz.kafka.infrastructure.deserializers.implementations.cdc.cdc_record_bytes_to_event_deserializer import (
9
+ CDCRecordBytesToEventDeserializer,
10
+ )
5
11
 
6
12
  from buz.event import Event
7
13
  from buz.event.async_subscriber import AsyncSubscriber
@@ -57,6 +63,7 @@ class BuzAIOKafkaAsyncConsumer(BaseBuzAIOKafkaAsyncConsumer):
57
63
  heartbeat_interval_ms: Optional[int] = None,
58
64
  health_check_port: Optional[int] = None,
59
65
  wait_for_connection_to_cluster_ms: Optional[int] = None,
66
+ milliseconds_between_retries: int = 5000,
60
67
  ):
61
68
  super().__init__(
62
69
  connection_config=connection_config,
@@ -79,12 +86,14 @@ class BuzAIOKafkaAsyncConsumer(BaseBuzAIOKafkaAsyncConsumer):
79
86
  health_check_port=health_check_port,
80
87
  wait_for_connection_to_cluster_ms=wait_for_connection_to_cluster_ms,
81
88
  worker_instance_id=worker_instance_id,
89
+ milliseconds_between_retries=milliseconds_between_retries,
82
90
  )
83
91
  self.__on_fail_strategy = on_fail_strategy
84
92
  self.__consume_middlewares = consume_middlewares
85
93
  self.__consume_retrier = consume_retrier
86
94
  self.__reject_callback = reject_callback
87
95
  self._deserializers_per_subscriber = deserializers_per_subscriber
96
+ self.__milliseconds_between_retries = milliseconds_between_retries
88
97
 
89
98
  async def _create_kafka_consumer_executor(
90
99
  self,
@@ -102,13 +111,21 @@ class BuzAIOKafkaAsyncConsumer(BaseBuzAIOKafkaAsyncConsumer):
102
111
  event_class=subscriber.handles() # type: ignore
103
112
  )
104
113
 
114
+ cdc_payload_deserializer = (
115
+ CDCRecordBytesToCDCPayloadDeserializer()
116
+ if isinstance(byte_deserializer, CDCRecordBytesToEventDeserializer)
117
+ else None
118
+ )
119
+
105
120
  return KafkaEventAsyncSubscriberExecutor(
121
+ subscriber=subscriber,
106
122
  logger=self._logger,
123
+ consume_middlewares=self.__consume_middlewares,
124
+ milliseconds_between_retries=self.__milliseconds_between_retries,
107
125
  byte_deserializer=byte_deserializer,
108
126
  header_deserializer=KafkaHeaderSerializer(),
109
127
  on_fail_strategy=self.__on_fail_strategy,
110
- subscriber=subscriber,
111
- consume_middlewares=self.__consume_middlewares,
112
128
  consume_retrier=self.__consume_retrier,
113
129
  reject_callback=self.__reject_callback,
130
+ cdc_payload_deserializer=cdc_payload_deserializer,
114
131
  )
@@ -2,6 +2,12 @@ from logging import Logger
2
2
  from typing import Optional, Sequence, Type, TypeVar
3
3
 
4
4
  from aiokafka.coordinator.assignors.abstract import AbstractPartitionAssignor
5
+ from buz.kafka.infrastructure.deserializers.implementations.cdc.cdc_record_bytes_to_cdc_payload_deserializer import (
6
+ CDCRecordBytesToCDCPayloadDeserializer,
7
+ )
8
+ from buz.kafka.infrastructure.deserializers.implementations.cdc.cdc_record_bytes_to_event_deserializer import (
9
+ CDCRecordBytesToEventDeserializer,
10
+ )
5
11
 
6
12
  from buz.event import Event, Subscriber
7
13
  from buz.event.infrastructure.buz_kafka.base_buz_aiokafka_async_consumer import (
@@ -58,6 +64,7 @@ class BuzAIOKafkaMultiThreadedConsumer(BaseBuzAIOKafkaAsyncConsumer):
58
64
  heartbeat_interval_ms: Optional[int] = None,
59
65
  health_check_port: Optional[int] = None,
60
66
  wait_for_connection_to_cluster_ms: Optional[int] = None,
67
+ milliseconds_between_retries: int = 5000,
61
68
  ):
62
69
  super().__init__(
63
70
  connection_config=connection_config,
@@ -80,12 +87,14 @@ class BuzAIOKafkaMultiThreadedConsumer(BaseBuzAIOKafkaAsyncConsumer):
80
87
  heartbeat_interval_ms=heartbeat_interval_ms,
81
88
  wait_for_connection_to_cluster_ms=wait_for_connection_to_cluster_ms,
82
89
  worker_instance_id=worker_instance_id,
90
+ milliseconds_between_retries=milliseconds_between_retries,
83
91
  )
84
92
  self.__on_fail_strategy = on_fail_strategy
85
93
  self.__consume_middlewares = consume_middlewares
86
94
  self.__consume_retrier = consume_retrier
87
95
  self.__reject_callback = reject_callback
88
96
  self._deserializers_per_subscriber = deserializers_per_subscriber
97
+ self.__milliseconds_between_retries = milliseconds_between_retries
89
98
 
90
99
  async def _create_kafka_consumer_executor(
91
100
  self,
@@ -103,13 +112,21 @@ class BuzAIOKafkaMultiThreadedConsumer(BaseBuzAIOKafkaAsyncConsumer):
103
112
  event_class=subscriber.handles() # type: ignore
104
113
  )
105
114
 
115
+ cdc_payload_deserializer = (
116
+ CDCRecordBytesToCDCPayloadDeserializer()
117
+ if isinstance(byte_deserializer, CDCRecordBytesToEventDeserializer)
118
+ else None
119
+ )
120
+
106
121
  return KafkaEventSyncSubscriberExecutor(
122
+ subscriber=subscriber,
107
123
  logger=self._logger,
124
+ consume_middlewares=self.__consume_middlewares,
125
+ milliseconds_between_retries=self.__milliseconds_between_retries,
108
126
  byte_deserializer=byte_deserializer,
109
127
  header_deserializer=KafkaHeaderSerializer(),
110
128
  on_fail_strategy=self.__on_fail_strategy,
111
- subscriber=subscriber,
112
- consume_middlewares=self.__consume_middlewares,
113
129
  consume_retrier=self.__consume_retrier,
114
130
  reject_callback=self.__reject_callback,
131
+ cdc_payload_deserializer=cdc_payload_deserializer,
115
132
  )
@@ -2,12 +2,16 @@ from asyncio import sleep
2
2
  from logging import Logger
3
3
  from typing import Optional, Sequence, cast
4
4
 
5
+ from buz.event.infrastructure.models.process_context import ProcessContext
6
+
5
7
  from buz.event import Event
6
8
  from buz.event.async_subscriber import AsyncSubscriber
7
9
  from buz.event.infrastructure.buz_kafka.consume_strategy.kafka_on_fail_strategy import KafkaOnFailStrategy
8
10
  from buz.event.infrastructure.buz_kafka.exceptions.max_consumer_retry_exception import MaxConsumerRetryException
9
11
  from buz.event.infrastructure.buz_kafka.exceptions.retry_exception import ConsumerRetryException
10
12
  from buz.event.infrastructure.buz_kafka.kafka_event_subscriber_executor import KafkaEventSubscriberExecutor
13
+ from buz.event.infrastructure.buz_kafka.models.kafka_delivery_context import KafkaDeliveryContext
14
+ from buz.event.infrastructure.buz_kafka.models.cdc_process_context import CDCProcessContext
11
15
  from buz.event.infrastructure.models.execution_context import ExecutionContext
12
16
  from buz.event.middleware.async_consume_middleware import AsyncConsumeMiddleware
13
17
  from buz.event.middleware.async_consume_middleware_chain_resolver import AsyncConsumeMiddlewareChainResolver
@@ -16,8 +20,13 @@ from buz.event.strategies.retry.reject_callback import RejectCallback
16
20
  from buz.kafka.domain.exceptions.not_valid_kafka_message_exception import NotValidKafkaMessageException
17
21
  from buz.kafka.domain.models.kafka_consumer_record import KafkaConsumerRecord
18
22
  from buz.kafka.domain.models.kafka_poll_record import KafkaPollRecord
19
- from buz.event.infrastructure.buz_kafka.models.kafka_delivery_context import KafkaDeliveryContext
20
23
  from buz.kafka.infrastructure.deserializers.byte_deserializer import ByteDeserializer
24
+ from buz.kafka.infrastructure.deserializers.implementations.cdc.cannot_decode_cdc_message_exception import (
25
+ CannotDecodeCDCMessageException,
26
+ )
27
+ from buz.kafka.infrastructure.deserializers.implementations.cdc.cdc_record_bytes_to_cdc_payload_deserializer import (
28
+ CDCRecordBytesToCDCPayloadDeserializer,
29
+ )
21
30
  from buz.kafka.infrastructure.serializers.kafka_header_serializer import KafkaHeaderSerializer
22
31
 
23
32
 
@@ -28,22 +37,25 @@ class KafkaEventAsyncSubscriberExecutor(KafkaEventSubscriberExecutor):
28
37
  subscriber: AsyncSubscriber,
29
38
  logger: Logger,
30
39
  consume_middlewares: Optional[Sequence[AsyncConsumeMiddleware]] = None,
31
- seconds_between_retries: float = 5,
40
+ milliseconds_between_retries: int = 5000,
32
41
  byte_deserializer: ByteDeserializer[Event],
33
42
  header_deserializer: KafkaHeaderSerializer,
34
43
  on_fail_strategy: KafkaOnFailStrategy,
35
44
  consume_retrier: Optional[ConsumeRetrier] = None,
36
45
  reject_callback: Optional[RejectCallback] = None,
46
+ cdc_payload_deserializer: Optional[CDCRecordBytesToCDCPayloadDeserializer] = None,
37
47
  ):
38
48
  self.__subscriber = subscriber
39
49
  self.__logger = logger
40
50
  self.__consume_middleware_chain_resolver = AsyncConsumeMiddlewareChainResolver(consume_middlewares or [])
41
- self.__seconds_between_retires = seconds_between_retries
51
+ self.__milliseconds_between_retries = milliseconds_between_retries
52
+ self.__seconds_between_retries = milliseconds_between_retries / 1000.0
42
53
  self.__on_fail_strategy = on_fail_strategy
43
54
  self.__consume_retrier = consume_retrier
44
55
  self.__reject_callback = reject_callback
45
56
  self.__byte_deserializer = byte_deserializer
46
57
  self.__header_deserializer = header_deserializer
58
+ self.__cdc_payload_deserializer = cdc_payload_deserializer
47
59
 
48
60
  async def consume(
49
61
  self,
@@ -57,6 +69,15 @@ class KafkaEventAsyncSubscriberExecutor(KafkaEventSubscriberExecutor):
57
69
  kafka_record_value = cast(bytes, kafka_poll_record.value)
58
70
 
59
71
  deserialized_value = self.__byte_deserializer.deserialize(kafka_record_value)
72
+ process_context = ProcessContext()
73
+ if self.__cdc_payload_deserializer is not None:
74
+ try:
75
+ deserialized_cdc_payload = self.__cdc_payload_deserializer.deserialize(kafka_record_value)
76
+ process_context = CDCProcessContext(
77
+ captured_at_ms=deserialized_cdc_payload.captured_at_ms,
78
+ )
79
+ except CannotDecodeCDCMessageException as e:
80
+ self.__logger.error(e.message)
60
81
 
61
82
  self.__logger.info(
62
83
  f"consuming the event '{deserialized_value.id}' by the subscriber '{self.__subscriber.fqn()}', "
@@ -75,7 +96,8 @@ class KafkaEventAsyncSubscriberExecutor(KafkaEventSubscriberExecutor):
75
96
  consumer_group=self.__subscriber.fqn(),
76
97
  partition=kafka_poll_record.partition,
77
98
  timestamp=kafka_poll_record.timestamp,
78
- )
99
+ ),
100
+ process_context=process_context,
79
101
  ),
80
102
  )
81
103
  except NotValidKafkaMessageException:
@@ -137,7 +159,7 @@ class KafkaEventAsyncSubscriberExecutor(KafkaEventSubscriberExecutor):
137
159
  ),
138
160
  exc_info=exception,
139
161
  )
140
- await sleep(self.__seconds_between_retires)
162
+ await sleep(self.__seconds_between_retries)
141
163
  continue
142
164
 
143
165
  self.__logger.exception(exception)
@@ -1,13 +1,17 @@
1
+ import time
1
2
  from asyncio import get_running_loop
2
3
  from logging import Logger
3
- import time
4
4
  from typing import Optional, Sequence, cast
5
+
6
+ from buz.event.infrastructure.models.process_context import ProcessContext
7
+
5
8
  from buz.event import Event
6
9
  from buz.event.infrastructure.buz_kafka.consume_strategy.kafka_on_fail_strategy import KafkaOnFailStrategy
7
10
  from buz.event.infrastructure.buz_kafka.exceptions.max_consumer_retry_exception import MaxConsumerRetryException
8
11
  from buz.event.infrastructure.buz_kafka.exceptions.retry_exception import ConsumerRetryException
9
12
  from buz.event.infrastructure.buz_kafka.kafka_event_subscriber_executor import KafkaEventSubscriberExecutor
10
13
  from buz.event.infrastructure.buz_kafka.models.kafka_delivery_context import KafkaDeliveryContext
14
+ from buz.event.infrastructure.buz_kafka.models.cdc_process_context import CDCProcessContext
11
15
  from buz.event.infrastructure.models.execution_context import ExecutionContext
12
16
  from buz.event.middleware.consume_middleware import ConsumeMiddleware
13
17
  from buz.event.middleware.consume_middleware_chain_resolver import ConsumeMiddlewareChainResolver
@@ -18,6 +22,12 @@ from buz.kafka.domain.exceptions.not_valid_kafka_message_exception import NotVal
18
22
  from buz.kafka.domain.models.kafka_consumer_record import KafkaConsumerRecord
19
23
  from buz.kafka.domain.models.kafka_poll_record import KafkaPollRecord
20
24
  from buz.kafka.infrastructure.deserializers.byte_deserializer import ByteDeserializer
25
+ from buz.kafka.infrastructure.deserializers.implementations.cdc.cannot_decode_cdc_message_exception import (
26
+ CannotDecodeCDCMessageException,
27
+ )
28
+ from buz.kafka.infrastructure.deserializers.implementations.cdc.cdc_record_bytes_to_cdc_payload_deserializer import (
29
+ CDCRecordBytesToCDCPayloadDeserializer,
30
+ )
21
31
  from buz.kafka.infrastructure.serializers.kafka_header_serializer import KafkaHeaderSerializer
22
32
 
23
33
 
@@ -28,22 +38,25 @@ class KafkaEventSyncSubscriberExecutor(KafkaEventSubscriberExecutor):
28
38
  subscriber: Subscriber,
29
39
  logger: Logger,
30
40
  consume_middlewares: Optional[Sequence[ConsumeMiddleware]] = None,
31
- seconds_between_retries: float = 5,
41
+ milliseconds_between_retries: int = 5000,
32
42
  byte_deserializer: ByteDeserializer[Event],
33
43
  header_deserializer: KafkaHeaderSerializer,
34
44
  on_fail_strategy: KafkaOnFailStrategy,
35
45
  consume_retrier: Optional[ConsumeRetrier] = None,
36
46
  reject_callback: Optional[RejectCallback] = None,
47
+ cdc_payload_deserializer: Optional[CDCRecordBytesToCDCPayloadDeserializer] = None,
37
48
  ):
38
49
  self.__subscriber = subscriber
39
50
  self.__logger = logger
40
51
  self.__consume_middleware_chain_resolver = ConsumeMiddlewareChainResolver(consume_middlewares or [])
41
- self.__seconds_between_retires = seconds_between_retries
52
+ self.__milliseconds_between_retries = milliseconds_between_retries
53
+ self.__seconds_between_retries = milliseconds_between_retries / 1000.0
42
54
  self.__on_fail_strategy = on_fail_strategy
43
55
  self.__consume_retrier = consume_retrier
44
56
  self.__reject_callback = reject_callback
45
57
  self.__byte_deserializer = byte_deserializer
46
58
  self.__header_deserializer = header_deserializer
59
+ self.__cdc_payload_deserializer = cdc_payload_deserializer
47
60
 
48
61
  async def consume(
49
62
  self,
@@ -57,6 +70,15 @@ class KafkaEventSyncSubscriberExecutor(KafkaEventSubscriberExecutor):
57
70
  kafka_record_value = cast(bytes, kafka_poll_record.value)
58
71
 
59
72
  deserialized_value = self.__byte_deserializer.deserialize(kafka_record_value)
73
+ process_context = ProcessContext()
74
+ if self.__cdc_payload_deserializer is not None:
75
+ try:
76
+ deserialized_cdc_payload = self.__cdc_payload_deserializer.deserialize(kafka_record_value)
77
+ process_context = CDCProcessContext(
78
+ captured_at_ms=deserialized_cdc_payload.captured_at_ms,
79
+ )
80
+ except CannotDecodeCDCMessageException as e:
81
+ self.__logger.error(e.message)
60
82
 
61
83
  self.__logger.info(
62
84
  f"consuming the event '{deserialized_value.id}' by the subscriber '{self.__subscriber.fqn()}', "
@@ -77,7 +99,8 @@ class KafkaEventSyncSubscriberExecutor(KafkaEventSubscriberExecutor):
77
99
  consumer_group=self.__subscriber.fqn(),
78
100
  partition=kafka_poll_record.partition,
79
101
  timestamp=kafka_poll_record.timestamp,
80
- )
102
+ ),
103
+ process_context=process_context,
81
104
  ),
82
105
  ),
83
106
  )
@@ -139,7 +162,7 @@ class KafkaEventSyncSubscriberExecutor(KafkaEventSubscriberExecutor):
139
162
  ),
140
163
  exc_info=exception,
141
164
  )
142
- time.sleep(self.__seconds_between_retires)
165
+ time.sleep(self.__seconds_between_retries)
143
166
  continue
144
167
 
145
168
  self.__logger.exception(exception)
@@ -0,0 +1,9 @@
1
+ from dataclasses import dataclass
2
+ from typing import Optional
3
+
4
+ from buz.event.infrastructure.models.process_context import ProcessContext
5
+
6
+
7
+ @dataclass(frozen=True)
8
+ class CDCProcessContext(ProcessContext):
9
+ captured_at_ms: Optional[int] = None
@@ -2,6 +2,8 @@ import asyncio
2
2
  from logging import Logger
3
3
  from typing import Optional, Callable, cast
4
4
 
5
+ from buz.event.infrastructure.models.process_context import ProcessContext
6
+
5
7
  from buz.event.infrastructure.kombu.models.kombu_delivery_context import KombuDeliveryContext
6
8
  from buz.event.infrastructure.models.execution_context import ExecutionContext
7
9
  from kombu import Connection, Queue, Consumer as MessageConsumer, Message
@@ -112,7 +114,10 @@ class KombuConsumer(ConsumerMixin, Consumer):
112
114
  # The problem here is that the chain resolver works with syncsubscribers, an asyncsubscriber would require of a async function,
113
115
  # but we are using run-until-complete to run the async function, so we are not really using the async function, we are just running it as a sync function, so we can cast the asyncsubscriber to a subscriber
114
116
  subscriber = cast(Subscriber, meta_subscriber)
115
- execution_context = ExecutionContext(delivery_context=KombuDeliveryContext())
117
+ execution_context = ExecutionContext(
118
+ delivery_context=KombuDeliveryContext(),
119
+ process_context=ProcessContext(),
120
+ )
116
121
  self.__consume_middleware_chain_resolver.resolve(
117
122
  event=event,
118
123
  subscriber=subscriber,
@@ -1,8 +1,10 @@
1
1
  from dataclasses import dataclass
2
2
 
3
3
  from buz.event.infrastructure.models.delivery_context import DeliveryContext
4
+ from buz.event.infrastructure.models.process_context import ProcessContext
4
5
 
5
6
 
6
7
  @dataclass(frozen=True)
7
8
  class ExecutionContext:
8
9
  delivery_context: DeliveryContext
10
+ process_context: ProcessContext
@@ -0,0 +1,6 @@
1
+ from dataclasses import dataclass
2
+
3
+
4
+ @dataclass(frozen=True)
5
+ class ProcessContext:
6
+ pass
@@ -1,5 +1,7 @@
1
1
  from typing import Optional, Iterable
2
2
 
3
+ from buz.event.infrastructure.models.process_context import ProcessContext
4
+
3
5
  from buz.event import Event, EventBus, Subscriber
4
6
  from buz.event.infrastructure.models.execution_context import ExecutionContext
5
7
  from buz.event.middleware import (
@@ -28,7 +30,7 @@ class SyncEventBus(EventBus):
28
30
 
29
31
  def __perform_publish(self, event: Event) -> None:
30
32
  subscribers = self.__locator.get(event)
31
- execution_context = ExecutionContext(delivery_context=SyncDeliveryContext())
33
+ execution_context = ExecutionContext(delivery_context=SyncDeliveryContext(), process_context=ProcessContext())
32
34
  for subscriber in subscribers:
33
35
  self.__consume_middleware_chain_resolver.resolve(
34
36
  event=event,
@@ -11,6 +11,7 @@ class CDCPayload:
11
11
  created_at: str
12
12
  event_fqn: str
13
13
  metadata: Optional[str] = None # json encoded
14
+ captured_at_ms: Optional[int] = None
14
15
 
15
16
  def validate(self) -> None:
16
17
  if not isinstance(self.payload, str):
@@ -23,6 +24,8 @@ class CDCPayload:
23
24
  raise ValueError("The event_fqn value is not a valid value")
24
25
  if self.metadata is not None and not isinstance(self.metadata, str):
25
26
  raise ValueError("The metadata value is not a valid value")
27
+ if self.captured_at_ms is not None and not isinstance(self.captured_at_ms, int):
28
+ raise ValueError("The captured_at_ms value is not a valid value")
26
29
 
27
30
  def __post_init__(self) -> None:
28
31
  self.validate()
@@ -1,7 +1,7 @@
1
1
  from buz.kafka.domain.exceptions.not_valid_kafka_message_exception import NotValidKafkaMessageException
2
2
 
3
3
 
4
- class NotValidCDCMessageException(NotValidKafkaMessageException):
4
+ class CannotDecodeCDCMessageException(NotValidKafkaMessageException):
5
5
  def __init__(self, message: str, exception: Exception) -> None:
6
6
  self.exception = exception
7
7
  self.message = message
@@ -0,0 +1,11 @@
1
+ from buz.kafka.domain.exceptions.not_valid_kafka_message_exception import NotValidKafkaMessageException
2
+ from buz.kafka.infrastructure.cdc.cdc_payload import CDCPayload
3
+
4
+
5
+ class CannotRestoreEventFromCDCPayloadException(NotValidKafkaMessageException):
6
+ def __init__(self, cdc_payload: CDCPayload, exception: Exception) -> None:
7
+ self.exception = exception
8
+ self.cdc_payload = cdc_payload
9
+ super().__init__(
10
+ f'Couldn\'t build a message from CDC payload "{self.cdc_payload}". Raised from "{str(exception)}"'
11
+ )
@@ -0,0 +1,35 @@
1
+ from __future__ import annotations
2
+
3
+ import orjson
4
+ from cachetools import LRUCache
5
+ from dacite import from_dict
6
+
7
+ from buz.kafka.infrastructure.cdc.cdc_message import CDCPayload
8
+ from buz.kafka.infrastructure.deserializers.implementations.cdc.cannot_decode_cdc_message_exception import (
9
+ CannotDecodeCDCMessageException,
10
+ )
11
+
12
+
13
+ class CDCRecordBytesToCDCPayloadDeserializer:
14
+ __STRING_ENCODING = "utf-8"
15
+ __cached_deserialization: LRUCache[str, CDCPayload] = LRUCache(maxsize=128)
16
+
17
+ def deserialize(self, data: bytes) -> CDCPayload:
18
+ decoded_string = data.decode(self.__STRING_ENCODING)
19
+ if decoded_string not in CDCRecordBytesToCDCPayloadDeserializer.__cached_deserialization:
20
+ try:
21
+ CDCRecordBytesToCDCPayloadDeserializer.__cached_deserialization[
22
+ decoded_string
23
+ ] = self.__get_cdc_payload_from_string(decoded_string)
24
+ except Exception as exception:
25
+ raise CannotDecodeCDCMessageException(decoded_string, exception) from exception
26
+
27
+ return CDCRecordBytesToCDCPayloadDeserializer.__cached_deserialization[decoded_string]
28
+
29
+ def __get_cdc_payload_from_string(self, decoded_string: str) -> CDCPayload:
30
+ decoded_record: dict = orjson.loads(decoded_string)
31
+ payload = decoded_record.get("payload")
32
+ if not isinstance(payload, dict):
33
+ raise ValueError("The provided payload value is not valid")
34
+
35
+ return from_dict(CDCPayload, payload)
@@ -4,13 +4,15 @@ from datetime import datetime
4
4
  from typing import Optional, TypeVar, Type, Generic
5
5
 
6
6
  import orjson
7
- from dacite import from_dict
8
7
 
9
8
  from buz.event import Event
10
9
  from buz.kafka.infrastructure.cdc.cdc_message import CDCPayload
11
10
  from buz.kafka.infrastructure.deserializers.bytes_to_message_deserializer import BytesToMessageDeserializer
12
- from buz.kafka.infrastructure.deserializers.implementations.cdc.not_valid_cdc_message_exception import (
13
- NotValidCDCMessageException,
11
+ from buz.kafka.infrastructure.deserializers.implementations.cdc.cannot_restore_event_from_cdc_payload_exception import (
12
+ CannotRestoreEventFromCDCPayloadException,
13
+ )
14
+ from buz.kafka.infrastructure.deserializers.implementations.cdc.cdc_record_bytes_to_cdc_payload_deserializer import (
15
+ CDCRecordBytesToCDCPayloadDeserializer,
14
16
  )
15
17
 
16
18
  T = TypeVar("T", bound=Event)
@@ -21,34 +23,25 @@ class CDCRecordBytesToEventDeserializer(BytesToMessageDeserializer[Event], Gener
21
23
 
22
24
  def __init__(self, event_class: Type[T]) -> None:
23
25
  self.__event_class = event_class
26
+ self.__cdc_record_bytes_to_cdc_payload_deserializer = CDCRecordBytesToCDCPayloadDeserializer()
24
27
 
25
28
  def deserialize(self, data: bytes) -> T:
26
- decoded_string = data.decode(self.__STRING_ENCODING)
29
+ cdc_payload = self.__cdc_record_bytes_to_cdc_payload_deserializer.deserialize(data)
27
30
  try:
28
- cdc_payload = self.__get_outbox_record_as_dict(decoded_string)
31
+ payload_dict = orjson.loads(cdc_payload.payload)
29
32
  return self.__event_class.restore(
30
33
  id=cdc_payload.event_id,
31
34
  created_at=self.__get_created_at_in_event_format(cdc_payload.created_at),
32
35
  metadata=self.__deserialize_metadata(cdc_payload.metadata),
33
- **orjson.loads(cdc_payload.payload),
36
+ **payload_dict,
34
37
  )
35
38
  except Exception as exception:
36
- raise NotValidCDCMessageException(decoded_string, exception) from exception
39
+ raise CannotRestoreEventFromCDCPayloadException(cdc_payload, exception) from exception
37
40
 
38
41
  def __get_created_at_in_event_format(self, cdc_payload_created_at: str) -> str:
39
42
  created_at_datetime = datetime.strptime(cdc_payload_created_at, CDCPayload.DATE_TIME_FORMAT)
40
43
  return created_at_datetime.strftime(Event.DATE_TIME_FORMAT)
41
44
 
42
- def __get_outbox_record_as_dict(self, decoded_string: str) -> CDCPayload:
43
- decoded_record: dict = orjson.loads(decoded_string)
44
-
45
- payload = decoded_record.get("payload")
46
-
47
- if not isinstance(payload, dict):
48
- raise ValueError("The provided payload value is not valid")
49
-
50
- return from_dict(CDCPayload, payload)
51
-
52
45
  def __deserialize_metadata(self, metadata: Optional[str]) -> dict:
53
46
  if metadata is None:
54
47
  return {}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: buz
3
- Version: 2.17.0rc7
3
+ Version: 2.19.0
4
4
  Summary: Buz is a set of light, simple and extensible implementations of event, command and query buses.
5
5
  License: MIT
6
6
  Author: Luis Pintado Lozano
@@ -47,9 +47,9 @@ buz/event/exceptions/worker_execution_exception.py,sha256=6mgztvXOCG_9VZ_Jptkk72
47
47
  buz/event/infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
48
  buz/event/infrastructure/buz_kafka/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
49
  buz/event/infrastructure/buz_kafka/async_buz_kafka_event_bus.py,sha256=SyLblUVlwWOaNfZzK7vL6Ee4m-85vZVCH0rjOgqVAww,4913
50
- buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py,sha256=guVuZUQjxR-1ECuHjJ7Z7DTtIs2E7BSmazuP73dxgcY,21013
51
- buz/event/infrastructure/buz_kafka/buz_aiokafka_async_consumer.py,sha256=DRe3u69LD7Yt9WjA_hK_PRznM08_Mz4hxC_4poppjck,6446
52
- buz/event/infrastructure/buz_kafka/buz_aiokafka_multi_threaded_consumer.py,sha256=-pJVJq3b2SFmPT7SNmdPhqN2o64Hsjwds-shQ-Y7ytg,6389
50
+ buz/event/infrastructure/buz_kafka/base_buz_aiokafka_async_consumer.py,sha256=7ZhaKaFXBpD3HVkuQMpAJvY8lfy7__1wxftLIwCmnMQ,21284
51
+ buz/event/infrastructure/buz_kafka/buz_aiokafka_async_consumer.py,sha256=GmmuAZboDkrpNOLF8cE_F0t4I7ZnMiGsiGw4SYIvKGc,7303
52
+ buz/event/infrastructure/buz_kafka/buz_aiokafka_multi_threaded_consumer.py,sha256=ZRLRoBRomqrXAiePSMn4gePF59AWPn6VQpQui1UVnyM,7246
53
53
  buz/event/infrastructure/buz_kafka/buz_kafka_event_bus.py,sha256=ymRSvcYVgbVCPgHN6rMBVBHQ5heCSwCDl6EffyqGVX8,4601
54
54
  buz/event/infrastructure/buz_kafka/consume_strategy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
55
55
  buz/event/infrastructure/buz_kafka/consume_strategy/consume_strategy.py,sha256=RqlXe5W2S6rH3FTr--tcxzFJTAVLb-Dhl7m6qjgNz2M,331
@@ -59,9 +59,10 @@ buz/event/infrastructure/buz_kafka/exceptions/__init__.py,sha256=47DEQpj8HBSa-_T
59
59
  buz/event/infrastructure/buz_kafka/exceptions/kafka_event_bus_config_not_valid_exception.py,sha256=VUKZXA2ygjg21P4DADFl_Tace6RwSXia1MRYvJypxbM,135
60
60
  buz/event/infrastructure/buz_kafka/exceptions/max_consumer_retry_exception.py,sha256=5O33uUC8FLILY1C13tQwkfsLSXrmbe0vMaUfBmOuXdU,264
61
61
  buz/event/infrastructure/buz_kafka/exceptions/retry_exception.py,sha256=Fq9kvI3DpFsGD3x2icmQ1fYIsuKZAFqI3tCibAuEtSQ,441
62
- buz/event/infrastructure/buz_kafka/kafka_event_async_subscriber_executor.py,sha256=zQg2QJFT5-NUea5uP0RiWzmJHPqs_oJlcbDRDzstSeU,7536
62
+ buz/event/infrastructure/buz_kafka/kafka_event_async_subscriber_executor.py,sha256=JeKEoudfYNdUzuf8YOXHq-DY6wWHbUglE8W9aoETNp4,8833
63
63
  buz/event/infrastructure/buz_kafka/kafka_event_subscriber_executor.py,sha256=EyG2vsFYErWAyqxdXqSwxx5Zi_y0d6i0h05XavJMnxg,254
64
- buz/event/infrastructure/buz_kafka/kafka_event_sync_subscriber_executor.py,sha256=i3Gr173p91UZ4N-giQkZHVikCntu6A2ihVlqkUsReOc,7636
64
+ buz/event/infrastructure/buz_kafka/kafka_event_sync_subscriber_executor.py,sha256=UEiQabnIuFSyds_g7JPQHzWIWa_Jha7fEtlMnKaKLEY,8938
65
+ buz/event/infrastructure/buz_kafka/models/cdc_process_context.py,sha256=4843xU1x42XdF8iwocawrBuz2uVt9bOEeDTG5ghxmik,246
65
66
  buz/event/infrastructure/buz_kafka/models/kafka_delivery_context.py,sha256=Kvi1Pq9EvR_UQ6e-DbvB2l3m7rTvq2k4UmEZuHUg-qU,259
66
67
  buz/event/infrastructure/buz_kafka/publish_strategy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
67
68
  buz/event/infrastructure/buz_kafka/publish_strategy/publish_strategy.py,sha256=zIkgMnUU7ueG6QHEubMzdTHOtqdldIbS7k5FDLNmqVk,178
@@ -71,7 +72,7 @@ buz/event/infrastructure/kombu/allowed_kombu_serializer.py,sha256=LQ6futYsInawTC
71
72
  buz/event/infrastructure/kombu/consume_strategy/__init__.py,sha256=6dnAv-bOxoDL31gQD1dErRocdJvkLHTgdqeb4S33eWc,302
72
73
  buz/event/infrastructure/kombu/consume_strategy/consume_strategy.py,sha256=Zsv7QVpZXRLYvlV2nRbSdSwT_FgEELLyzUxdT6DyX8Q,179
73
74
  buz/event/infrastructure/kombu/consume_strategy/queue_per_subscriber_consume_strategy.py,sha256=Vsa1uC7dwS3jJ-dp_lvrE-hVWnN91-ma8oVqdLuXHMo,786
74
- buz/event/infrastructure/kombu/kombu_consumer.py,sha256=hGpPRUOVlu9zba_2XYsIhYXdXkja5j_3w-oh-cgovFM,8307
75
+ buz/event/infrastructure/kombu/kombu_consumer.py,sha256=NNpelD_GB83sLG4z-VSVuIvbPcSVsl_zhghT3Xbexs4,8476
75
76
  buz/event/infrastructure/kombu/kombu_event_bus.py,sha256=VSLBtamp-YOta4KyqmfXvDurvPiHZSL9QPCozMK3Qyw,4017
76
77
  buz/event/infrastructure/kombu/models/kombu_delivery_context.py,sha256=oj6IBEj19fUs0U1HwZll_uIEORABiyrr6Z_WotGJexs,191
77
78
  buz/event/infrastructure/kombu/publish_strategy/__init__.py,sha256=96ssn7ydJwLXYoVyrhfGcwCpXr4_5Sl0DbN6UCoeNc8,315
@@ -83,7 +84,8 @@ buz/event/infrastructure/kombu/retry_strategy/simple_publish_retry_policy.py,sha
83
84
  buz/event/infrastructure/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
84
85
  buz/event/infrastructure/models/consuming_task.py,sha256=GJvn6fGTN5ZQJaOuQCX17JP7SInIGvTLTk7DLoqnLQ4,302
85
86
  buz/event/infrastructure/models/delivery_context.py,sha256=D6_wppbYEkfoBgDaPeUaQPWFUMvZiHn-4QaAxDmWUZo,92
86
- buz/event/infrastructure/models/execution_context.py,sha256=tohrJMSHWA5U7WByGE47LSjteAN8_IMyHoXtjyrHJMM,200
87
+ buz/event/infrastructure/models/execution_context.py,sha256=NzCIWQNaSX_MU_MA2SSfQOvEVrzBGCtPzknU7C5R4Yg,311
88
+ buz/event/infrastructure/models/process_context.py,sha256=rPdirnYF_G_3Rdqj2us24GFYEbJzdvhRFK8uqGCWDeE,91
87
89
  buz/event/infrastructure/queue/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
88
90
  buz/event/meta_base_subscriber.py,sha256=cqB-iDtN4iuAVBZs2UwrrrAVB2t-u1VvzZw1aSClkwg,817
89
91
  buz/event/meta_subscriber.py,sha256=yH2_2OGqionoC86a3xc4K9kewiuNJs5UtiXdRAMViNM,451
@@ -119,7 +121,7 @@ buz/event/subscriber.py,sha256=gY43QIaNCXm8vcNbtkgWJs_I8F7oMkGaC3HiawHPi70,346
119
121
  buz/event/sync/__init__.py,sha256=uJmU80PGVNNL2HoRFXp4loQTn1VK8gLo-hMEvgVPpBQ,91
120
122
  buz/event/sync/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
121
123
  buz/event/sync/models/sync_delivery_context.py,sha256=LHjrS6gV-19NEKwtAXVmefjPd-Dsp_Ym8RZb84T3lm8,190
122
- buz/event/sync/sync_event_bus.py,sha256=LTJHNKy8LrbygO343AA4Zt_hmgTP9uY6TLdjKs8LuHM,1821
124
+ buz/event/sync/sync_event_bus.py,sha256=NUtuHRsSW4f2T44qGKy4Y4r8VukSwva4fsD0TwfisX8,1931
123
125
  buz/event/transactional_outbox/__init__.py,sha256=k8ZBWCi12pWKXchHfgW_Raw4sVR8XkBLuPNW9jB9X2k,1381
124
126
  buz/event/transactional_outbox/event_to_outbox_record_translator.py,sha256=d20JOeKIrCcpPEV66TzWiQmYqoyZGyL7J1ys0dUfHFs,615
125
127
  buz/event/transactional_outbox/fqn_to_event_mapper.py,sha256=ujcq6CfYqRJtM8f3SEEltbWN0Ru7NM5JfrbNdh4nvhQ,773
@@ -179,14 +181,16 @@ buz/kafka/infrastructure/aiokafka/translators/__init__.py,sha256=47DEQpj8HBSa-_T
179
181
  buz/kafka/infrastructure/aiokafka/translators/consumer_initial_offset_position_translator.py,sha256=WmxkQfoXeTy9mIJtGGhM0eDKeQxhcJczeVAGCbtonVI,617
180
182
  buz/kafka/infrastructure/cdc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
181
183
  buz/kafka/infrastructure/cdc/cdc_message.py,sha256=Cpv2nA19SG3HZGBLH-wIJbuokzTTZKT_HNFm1VwGSk4,585
182
- buz/kafka/infrastructure/cdc/cdc_payload.py,sha256=_kC80bX0HBMPwjeJy5HpPe1EmHVmVEe2Slut7Ucrpfk,1021
184
+ buz/kafka/infrastructure/cdc/cdc_payload.py,sha256=Dm5SNhXnNvIg1v-QtcSfzXGALIJDgn2_0o0NSXyNgHs,1229
183
185
  buz/kafka/infrastructure/cdc/cdc_schema.py,sha256=mCG32kqJ045nThViM8zUjUzdIf0VtASIlqxCHA5SsGQ,1153
184
186
  buz/kafka/infrastructure/deserializers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
185
187
  buz/kafka/infrastructure/deserializers/byte_deserializer.py,sha256=4fc6t-zvcFx6F5eoyEixH2uN0cM6aB0YRGwowIzz1RA,211
186
188
  buz/kafka/infrastructure/deserializers/bytes_to_message_deserializer.py,sha256=r40yq67DIElPi6ClmElbtR3VGrG2grNwgwuflXWOh20,345
187
189
  buz/kafka/infrastructure/deserializers/implementations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
188
- buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_event_deserializer.py,sha256=zEP7mfJfZXapG7xyJj_BvGR-GtTOZEUYweHgvNRzEYE,2160
189
- buz/kafka/infrastructure/deserializers/implementations/cdc/not_valid_cdc_message_exception.py,sha256=hgLLwTcC-C2DuJSOWUhmQsrd1bO9I1469869IqfAPOk,414
190
+ buz/kafka/infrastructure/deserializers/implementations/cdc/cannot_decode_cdc_message_exception.py,sha256=kbtc02mPNYZ6ojXgR6WOzpZK4t0o7Gh2TR9rtovOgjQ,418
191
+ buz/kafka/infrastructure/deserializers/implementations/cdc/cannot_restore_event_from_cdc_payload_exception.py,sha256=9DSi9cJluiYJoDbFDqpuhQOye_ec4VX98nVoCaxsLcc,550
192
+ buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_cdc_payload_deserializer.py,sha256=o2bm-BzeQY2WztwgL1wmx2cP0SOfTKeSPqQXakIZZuU,1487
193
+ buz/kafka/infrastructure/deserializers/implementations/cdc/cdc_record_bytes_to_event_deserializer.py,sha256=k3aT5b7VTZtGQdyWy2cIEND-I_EsW7U6DJvE87vOvLw,2085
190
194
  buz/kafka/infrastructure/deserializers/implementations/json_byte_deserializer.py,sha256=L4b164-KweiQUwyRONhTMIGnAz48UPk0btLqjGOTNdk,373
191
195
  buz/kafka/infrastructure/deserializers/implementations/json_bytes_to_message_deserializer.py,sha256=YwugXkmOudMNtkVfCC4BFe3pFVpbM8rAL9bT88bZMRk,756
192
196
  buz/kafka/infrastructure/interfaces/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -258,7 +262,7 @@ buz/serializer/message_to_json_bytes_serializer.py,sha256=RGZJ64t4t4Pz2FCASZZCv-
258
262
  buz/wrapper/__init__.py,sha256=GnRdJFcncn-qp0hzDG9dBHLmTJSbHFVjE_yr-MdW_n4,77
259
263
  buz/wrapper/async_to_sync.py,sha256=OfK-vrVUhuN-LLLvekLdMbQYtH0ue5lfbvuasj6ovMI,698
260
264
  buz/wrapper/event_loop.py,sha256=pfBJ1g-8A2a3YgW8Gf9Fg0kkewoh3-wgTy2KIFDyfHk,266
261
- buz-2.17.0rc7.dist-info/LICENSE,sha256=jcLgcIIVaBqaZNwe0kzGWSU99YgwMcI0IGv142wkYSM,1062
262
- buz-2.17.0rc7.dist-info/METADATA,sha256=tD8AIOQCaOpePJRAC_5jkpTshFpvkkLjoALYUaFbJ7Q,12583
263
- buz-2.17.0rc7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
264
- buz-2.17.0rc7.dist-info/RECORD,,
265
+ buz-2.19.0.dist-info/LICENSE,sha256=jcLgcIIVaBqaZNwe0kzGWSU99YgwMcI0IGv142wkYSM,1062
266
+ buz-2.19.0.dist-info/METADATA,sha256=WP5d5tDJHfipqBV92aBMidSml18fol-PT4bHHorjDLY,12580
267
+ buz-2.19.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
268
+ buz-2.19.0.dist-info/RECORD,,
File without changes