cledar-sdk 2.0.1__py3-none-any.whl → 2.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. cledar/__init__.py +0 -0
  2. cledar/kafka/README.md +239 -0
  3. cledar/kafka/__init__.py +40 -0
  4. cledar/kafka/clients/base.py +98 -0
  5. cledar/kafka/clients/consumer.py +110 -0
  6. cledar/kafka/clients/producer.py +80 -0
  7. cledar/kafka/config/schemas.py +178 -0
  8. cledar/kafka/exceptions.py +22 -0
  9. cledar/kafka/handlers/dead_letter.py +82 -0
  10. cledar/kafka/handlers/parser.py +49 -0
  11. cledar/kafka/logger.py +3 -0
  12. cledar/kafka/models/input.py +13 -0
  13. cledar/kafka/models/message.py +10 -0
  14. cledar/kafka/models/output.py +8 -0
  15. cledar/kafka/tests/.env.test.kafka +3 -0
  16. cledar/kafka/tests/README.md +216 -0
  17. cledar/kafka/tests/conftest.py +104 -0
  18. cledar/kafka/tests/integration/__init__.py +1 -0
  19. cledar/kafka/tests/integration/conftest.py +78 -0
  20. cledar/kafka/tests/integration/helpers.py +47 -0
  21. cledar/kafka/tests/integration/test_consumer_integration.py +375 -0
  22. cledar/kafka/tests/integration/test_integration.py +394 -0
  23. cledar/kafka/tests/integration/test_producer_consumer_interaction.py +388 -0
  24. cledar/kafka/tests/integration/test_producer_integration.py +217 -0
  25. cledar/kafka/tests/unit/__init__.py +1 -0
  26. cledar/kafka/tests/unit/test_base_kafka_client.py +391 -0
  27. cledar/kafka/tests/unit/test_config_validation.py +609 -0
  28. cledar/kafka/tests/unit/test_dead_letter_handler.py +443 -0
  29. cledar/kafka/tests/unit/test_error_handling.py +674 -0
  30. cledar/kafka/tests/unit/test_input_parser.py +310 -0
  31. cledar/kafka/tests/unit/test_input_parser_comprehensive.py +489 -0
  32. cledar/kafka/tests/unit/test_utils.py +25 -0
  33. cledar/kafka/tests/unit/test_utils_comprehensive.py +408 -0
  34. cledar/kafka/utils/callbacks.py +19 -0
  35. cledar/kafka/utils/messages.py +28 -0
  36. cledar/kafka/utils/topics.py +2 -0
  37. cledar/kserve/README.md +352 -0
  38. cledar/kserve/__init__.py +3 -0
  39. cledar/kserve/tests/__init__.py +0 -0
  40. cledar/kserve/tests/test_utils.py +64 -0
  41. cledar/kserve/utils.py +27 -0
  42. cledar/logging/README.md +53 -0
  43. cledar/logging/__init__.py +3 -0
  44. cledar/logging/tests/test_universal_plaintext_formatter.py +249 -0
  45. cledar/logging/universal_plaintext_formatter.py +94 -0
  46. cledar/monitoring/README.md +71 -0
  47. cledar/monitoring/__init__.py +3 -0
  48. cledar/monitoring/monitoring_server.py +112 -0
  49. cledar/monitoring/tests/integration/test_monitoring_server_int.py +162 -0
  50. cledar/monitoring/tests/test_monitoring_server.py +59 -0
  51. cledar/nonce/README.md +99 -0
  52. cledar/nonce/__init__.py +3 -0
  53. cledar/nonce/nonce_service.py +36 -0
  54. cledar/nonce/tests/__init__.py +0 -0
  55. cledar/nonce/tests/test_nonce_service.py +136 -0
  56. cledar/redis/README.md +536 -0
  57. cledar/redis/__init__.py +15 -0
  58. cledar/redis/async_example.py +111 -0
  59. cledar/redis/example.py +37 -0
  60. cledar/redis/exceptions.py +22 -0
  61. cledar/redis/logger.py +3 -0
  62. cledar/redis/model.py +10 -0
  63. cledar/redis/redis.py +525 -0
  64. cledar/redis/redis_config_store.py +252 -0
  65. cledar/redis/tests/test_async_integration_redis.py +158 -0
  66. cledar/redis/tests/test_async_redis_service.py +380 -0
  67. cledar/redis/tests/test_integration_redis.py +119 -0
  68. cledar/redis/tests/test_redis_service.py +319 -0
  69. cledar/storage/README.md +529 -0
  70. cledar/storage/__init__.py +4 -0
  71. cledar/storage/constants.py +3 -0
  72. cledar/storage/exceptions.py +50 -0
  73. cledar/storage/models.py +19 -0
  74. cledar/storage/object_storage.py +955 -0
  75. cledar/storage/tests/conftest.py +18 -0
  76. cledar/storage/tests/test_abfs.py +164 -0
  77. cledar/storage/tests/test_integration_filesystem.py +359 -0
  78. cledar/storage/tests/test_integration_s3.py +453 -0
  79. cledar/storage/tests/test_local.py +384 -0
  80. cledar/storage/tests/test_s3.py +521 -0
  81. {cledar_sdk-2.0.1.dist-info → cledar_sdk-2.0.3.dist-info}/METADATA +1 -1
  82. cledar_sdk-2.0.3.dist-info/RECORD +84 -0
  83. cledar_sdk-2.0.1.dist-info/RECORD +0 -4
  84. {cledar_sdk-2.0.1.dist-info → cledar_sdk-2.0.3.dist-info}/WHEEL +0 -0
  85. {cledar_sdk-2.0.1.dist-info → cledar_sdk-2.0.3.dist-info}/licenses/LICENSE +0 -0
cledar/__init__.py ADDED
File without changes
cledar/kafka/README.md ADDED
@@ -0,0 +1,239 @@
1
+ # Kafka Service
2
+
3
+ ## Purpose
4
+
5
+ The `cledar.kafka` package provides typed, testable wrappers around Kafka producer and consumer clients (Confluent Kafka), together with configuration schemas, message models, parsing and dead-letter handling utilities. It is designed for clarity, reliability, and easy testing (unit and integration).
6
+
7
+ ### Key Features
8
+
9
+ - **Typed Producer/Consumer**: Simple OO wrappers for Confluent Kafka
10
+ - **Pydantic Configs**: Validated, frozen dataclasses for producer/consumer configuration
11
+ - **Dead Letter Handling**: Helper to route failed messages to DLQ topics
12
+ - **Message Models**: Structured input/output models
13
+ - **Parsing Utilities**: Safe message parsing to typed payloads
14
+ - **Testability**: Comprehensive unit tests and Docker-based integration tests using testcontainers
15
+
16
+ ## Installation
17
+
18
+ This package is part of the Cledar SDK. Install it using:
19
+
20
+ ```bash
21
+ # Install with uv (recommended)
22
+ uv sync --all-groups
23
+
24
+ # Or with pip
25
+ pip install -e .
26
+ ```
27
+
28
+ ## Usage Examples
29
+
30
+ ### Producer: send messages
31
+
32
+ ```python
33
+ import time
34
+ from cledar.kafka.clients.producer import KafkaProducer
35
+ from cledar.kafka.config.schemas import KafkaProducerConfig
36
+
37
+ producer = KafkaProducer(
38
+ KafkaProducerConfig(
39
+ kafka_servers="localhost:9092", # or ["host1:9092", "host2:9092"]
40
+ kafka_group_id="example-producer",
41
+ kafka_topic_prefix="my-prefix.", # optional
42
+ kafka_block_buffer_time_sec=1,
43
+ )
44
+ )
45
+
46
+ producer.connect()
47
+
48
+ producer.send(
49
+ topic="example-topic", # final Kafka topic will include prefix
50
+ key="msg-1",
51
+ value='{"id":"1","message":"hello","timestamp": %f}' % time.time(),
52
+ )
53
+
54
+ # Optionally check connection status
55
+ assert producer.is_alive()
56
+
57
+ producer.shutdown()
58
+ ```
59
+
60
+ ### Consumer: subscribe and consume
61
+
62
+ ```python
63
+ from cledar.kafka.clients.consumer import KafkaConsumer
64
+ from cledar.kafka.config.schemas import KafkaConsumerConfig
65
+
66
+ consumer = KafkaConsumer(
67
+ KafkaConsumerConfig(
68
+ kafka_servers="localhost:9092",
69
+ kafka_group_id="example-consumer",
70
+ kafka_offset="earliest",
71
+ kafka_topic_prefix="my-prefix.", # optional
72
+ kafka_block_consumer_time_sec=1,
73
+ )
74
+ )
75
+
76
+ consumer.connect()
77
+ consumer.subscribe(["example-topic"]) # subscribes to prefixed topic
78
+
79
+ msg = consumer.consume_next() # returns KafkaMessage | None
80
+ if msg is not None:
81
+ print(msg.topic, msg.key, msg.value)
82
+
83
+ assert consumer.is_alive()
84
+ consumer.shutdown()
85
+ ```
86
+
87
+ ### Dead Letter Handling
88
+
89
+ ```python
90
+ from cledar.kafka.handlers.dead_letter import DeadLetterHandler
91
+ from cledar.kafka.models.output import FailedMessageData
92
+
93
+ # Assume you already have a connected producer and a consumed message
94
+ handler = DeadLetterHandler(producer, dlq_topic="errors-topic")
95
+
96
+ failure_details = [
97
+ FailedMessageData(
98
+ raised_at="2024-01-01T00:00:00Z",
99
+ exception_message="Processing failed",
100
+ exception_trace="Traceback...",
101
+ failure_reason="validation_error",
102
+ )
103
+ ]
104
+
105
+ handler.handle(message, failure_details)
106
+ ```
107
+
108
+ ### Parsing to Typed Payloads
109
+
110
+ ```python
111
+ from pydantic import BaseModel
112
+ from cledar.kafka.handlers.parser import InputParser
113
+
114
+ class Payload(BaseModel):
115
+ id: str
116
+ message: str
117
+
118
+ parser = InputParser(Payload)
119
+ parsed = parser.parse_message(message) # -> ParsedMessage[Payload]
120
+ print(parsed.payload.id, parsed.payload.message)
121
+ ```
122
+
123
+ ## Project Structure
124
+
125
+ ```
126
+ cledar/kafka/
127
+ ├── clients/
128
+ │ ├── base.py # BaseKafkaClient (shared logic)
129
+ │ ├── consumer.py # KafkaConsumer wrapper
130
+ │ └── producer.py # KafkaProducer wrapper
131
+ ├── config/
132
+ │ └── schemas.py # Pydantic frozen dataclass configs
133
+ ├── handlers/
134
+ │ ├── dead_letter.py # DeadLetterHandler
135
+ │ └── parser.py # InputParser and related utilities
136
+ ├── models/
137
+ │ ├── input.py # Input model definitions
138
+ │ ├── message.py # KafkaMessage, etc.
139
+ │ └── output.py # FailedMessageData, etc.
140
+ ├── utils/
141
+ │ ├── callbacks.py # Delivery callbacks
142
+ │ ├── messages.py # Message utilities (e.g., extract_id_from_value)
143
+ │ └── topics.py # Topic utilities/helpers
144
+ ├── logger.py # Module logger
145
+ └── tests/
146
+ ├── README.md # Tests documentation (how to run)
147
+ ├── conftest.py # Test-wide teardown (thread cleanup)
148
+ ├── unit/ # Unit tests (176)
149
+ └── integration/ # Integration tests (41) with helpers & shared fixtures
150
+ ```
151
+
152
+ ## Running Linters
153
+
154
+ Common commands from repo root:
155
+
156
+ ```bash
157
+ # Format (ruff)
158
+ uv run ruff format .
159
+
160
+ # Type-check (mypy)
161
+ uv run mypy kafka/
162
+
163
+ # Optional: pylint
164
+ uv run pylint kafka/
165
+ ```
166
+
167
+ ## Running Tests
168
+
169
+ See `kafka/tests/README.md` for full details. Quick start:
170
+
171
+ ```bash
172
+ # Unit tests
173
+ PYTHONPATH=. uv run pytest kafka/tests/unit/ -v
174
+
175
+ # Integration tests (requires Docker running)
176
+ PYTHONPATH=. uv run pytest kafka/tests/integration/ -v
177
+ ```
178
+
179
+ - Integration tests use `testcontainers` with Kafka image `confluentinc/cp-kafka:7.4.0`.
180
+ - Shared fixtures live in `kafka/tests/integration/conftest.py`.
181
+ - Helpers (e.g., `consume_until`) live in `kafka/tests/integration/helpers.py`.
182
+ - Test-wide teardown in `kafka/tests/conftest.py` ensures background threads do not block process exit.
183
+
184
+ ## API Overview
185
+
186
+ ### Configs (pydantic dataclasses)
187
+
188
+ ```python
189
+ from cledar.kafka.config.schemas import KafkaProducerConfig, KafkaConsumerConfig
190
+ ```
191
+
192
+ - Validated, frozen configs; construct with required `kafka_servers` and `kafka_group_id`.
193
+ - Optional fields include `kafka_topic_prefix`, timeouts, and intervals.
194
+
195
+ ### Producer
196
+
197
+ ```python
198
+ from cledar.kafka.clients.producer import KafkaProducer
199
+ ```
200
+
201
+ - `connect()` / `shutdown()`
202
+ - `send(topic: str, value: str, key: str | None = None, headers: list[tuple[str, bytes]] | None = None)`
203
+ - `check_connection()` / `is_alive()`
204
+
205
+ ### Consumer
206
+
207
+ ```python
208
+ from cledar.kafka.clients.consumer import KafkaConsumer
209
+ ```
210
+
211
+ - `connect()` / `shutdown()`
212
+ - `subscribe(topics: list[str])`
213
+ - `consume_next() -> KafkaMessage | None`
214
+ - `commit(message: KafkaMessage) -> None`
215
+ - `check_connection()` / `is_alive()`
216
+
217
+ ### Errors
218
+
219
+ ```python
220
+ from kafka.exceptions import (
221
+ KafkaConnectionError,
222
+ KafkaProducerNotConnectedError,
223
+ KafkaConsumerNotConnectedError,
224
+ )
225
+ ```
226
+
227
+ ## Notes
228
+
229
+ - Always run tests with `PYTHONPATH=.` from the repository root to ensure imports resolve.
230
+ - Integration tests require Docker and will pull testcontainers images on first run.
231
+ - Topics are automatically prefixed with `kafka_topic_prefix` if set in configs.
232
+
233
+ ## License
234
+
235
+ See the main repository LICENSE file.
236
+
237
+ ## Support
238
+
239
+ For issues, questions, or contributions, please refer to the repository contribution guidelines.
@@ -0,0 +1,40 @@
1
+ from .clients.base import BaseKafkaClient
2
+ from .clients.consumer import KafkaConsumer
3
+ from .clients.producer import KafkaProducer
4
+ from .config.schemas import (
5
+ KafkaConsumerConfig,
6
+ KafkaProducerConfig,
7
+ KafkaSaslMechanism,
8
+ KafkaSecurityProtocol,
9
+ )
10
+ from .exceptions import (
11
+ KafkaConnectionError,
12
+ KafkaConsumerError,
13
+ KafkaConsumerNotConnectedError,
14
+ KafkaProducerNotConnectedError,
15
+ )
16
+ from .handlers.dead_letter import DeadLetterHandler
17
+ from .handlers.parser import IncorrectMessageValueError, InputParser
18
+ from .models.input import InputKafkaMessage
19
+ from .models.message import KafkaMessage
20
+ from .models.output import FailedMessageData
21
+
22
+ __all__ = [
23
+ "KafkaConsumer",
24
+ "KafkaProducer",
25
+ "BaseKafkaClient",
26
+ "DeadLetterHandler",
27
+ "InputParser",
28
+ "IncorrectMessageValueError",
29
+ "InputKafkaMessage",
30
+ "FailedMessageData",
31
+ "KafkaMessage",
32
+ "KafkaProducerConfig",
33
+ "KafkaConsumerConfig",
34
+ "KafkaSecurityProtocol",
35
+ "KafkaSaslMechanism",
36
+ "KafkaConnectionError",
37
+ "KafkaConsumerNotConnectedError",
38
+ "KafkaProducerNotConnectedError",
39
+ "KafkaConsumerError",
40
+ ]
@@ -0,0 +1,98 @@
1
+ import threading
2
+
3
+ from confluent_kafka import Consumer, KafkaException, Producer
4
+ from pydantic import ConfigDict
5
+ from pydantic.dataclasses import dataclass
6
+
7
+ from ..config.schemas import KafkaConsumerConfig, KafkaProducerConfig
8
+ from ..exceptions import (
9
+ KafkaConnectionError,
10
+ KafkaConsumerNotConnectedError,
11
+ KafkaProducerNotConnectedError,
12
+ )
13
+ from ..logger import logger
14
+
15
+
16
+ @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
17
+ class BaseKafkaClient:
18
+ config: KafkaProducerConfig | KafkaConsumerConfig
19
+ client: Producer | Consumer | None = None
20
+ connection_check_thread: threading.Thread | None = None
21
+
22
+ def __post_init__(self) -> None:
23
+ # Create instance-level stop event
24
+ self._stop_event = threading.Event()
25
+ logger.info(
26
+ f"Initializing {self.__class__.__name__}.", extra={"config": self.config}
27
+ )
28
+
29
+ def start_connection_check_thread(self) -> None:
30
+ if self.connection_check_thread is None:
31
+ self.connection_check_thread = threading.Thread(
32
+ target=self._monitor_connection
33
+ )
34
+ self.connection_check_thread.start()
35
+ logger.info(
36
+ f"Started {self.__class__.__name__} connection check thread.",
37
+ extra={"interval": self.config.kafka_connection_check_interval_sec},
38
+ )
39
+
40
+ def _monitor_connection(self) -> None:
41
+ while not self._stop_event.wait(
42
+ self.config.kafka_connection_check_interval_sec
43
+ ):
44
+ try:
45
+ self.check_connection()
46
+ logger.info(f"{self.__class__.__name__} connection status: Connected.")
47
+ except KafkaConnectionError:
48
+ logger.exception(f"{self.__class__.__name__} connection check failed.")
49
+
50
+ def is_alive(self) -> bool:
51
+ try:
52
+ self.check_connection()
53
+ return True
54
+ except (
55
+ KafkaProducerNotConnectedError,
56
+ KafkaConsumerNotConnectedError,
57
+ KafkaConnectionError,
58
+ ):
59
+ return False
60
+
61
+ def check_connection(self) -> None:
62
+ """
63
+ when the broker is not available (or the address is wrong)
64
+ the 'connection refused' error is not caught
65
+ https://github.com/confluentinc/confluent-kafka-python/issues/941
66
+ the below is far-from-perfect workaround handling that
67
+ """
68
+ if self.client is None:
69
+ logger.error(
70
+ f"{self.__class__.__name__} is not connected. Call 'connect' first.",
71
+ )
72
+ raise (
73
+ KafkaProducerNotConnectedError
74
+ if isinstance(self.config, KafkaProducerConfig)
75
+ else KafkaConsumerNotConnectedError
76
+ )
77
+ try:
78
+ self.client.list_topics(
79
+ timeout=self.config.kafka_connection_check_timeout_sec
80
+ )
81
+ except KafkaException as exception:
82
+ logger.exception("Failed to connect to Kafka servers.")
83
+ raise KafkaConnectionError from exception
84
+
85
+ def shutdown(self) -> None:
86
+ logger.info("Closing %s...", self.__class__.__name__)
87
+ self._stop_event.set()
88
+ if self.connection_check_thread is not None:
89
+ self.connection_check_thread.join()
90
+ logger.info("Stopped connection check thread.")
91
+ if isinstance(self.client, Producer):
92
+ self.client.flush(-1)
93
+ logger.info("%s flushed.", self.__class__.__name__)
94
+ elif isinstance(self.client, Consumer):
95
+ self.client.close()
96
+ # Clear the client reference to indicate shutdown
97
+ self.client = None
98
+ logger.info("%s closed.", self.__class__.__name__)
@@ -0,0 +1,110 @@
1
+ from confluent_kafka import Consumer, KafkaException
2
+ from pydantic import ConfigDict
3
+ from pydantic.dataclasses import dataclass
4
+
5
+ from ..config.schemas import KafkaConsumerConfig
6
+ from ..exceptions import (
7
+ KafkaConsumerError,
8
+ KafkaConsumerNotConnectedError,
9
+ )
10
+ from ..logger import logger
11
+ from ..models.message import KafkaMessage
12
+ from ..utils.messages import consumer_not_connected_msg, extract_id_from_value
13
+ from ..utils.topics import build_topic
14
+ from .base import BaseKafkaClient
15
+
16
+
17
+ @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
18
+ class KafkaConsumer(BaseKafkaClient):
19
+ config: KafkaConsumerConfig
20
+ client: Consumer | None = None
21
+
22
+ def connect(self) -> None:
23
+ self.client = Consumer(self.config.to_kafka_config())
24
+ self.check_connection()
25
+ logger.info(
26
+ "Connected KafkaConsumer to Kafka servers.",
27
+ extra={"kafka_servers": self.config.kafka_servers},
28
+ )
29
+ self.start_connection_check_thread()
30
+
31
+ def subscribe(self, topics: list[str]) -> None:
32
+ if self.client is None:
33
+ logger.error(
34
+ consumer_not_connected_msg,
35
+ extra={"topics": topics},
36
+ )
37
+ raise KafkaConsumerNotConnectedError
38
+
39
+ topics = [
40
+ build_topic(topic_name=topic, prefix=self.config.kafka_topic_prefix)
41
+ for topic in topics
42
+ ]
43
+
44
+ try:
45
+ logger.info(
46
+ "Subscribing to topics.",
47
+ extra={"topics": topics},
48
+ )
49
+ self.client.subscribe(topics)
50
+
51
+ except KafkaException as exception:
52
+ logger.exception(
53
+ "Failed to subscribe to topics.",
54
+ extra={"topics": topics},
55
+ )
56
+ raise exception
57
+
58
+ def consume_next(self) -> KafkaMessage | None:
59
+ if self.client is None:
60
+ logger.error(consumer_not_connected_msg)
61
+ raise KafkaConsumerNotConnectedError
62
+
63
+ try:
64
+ msg = self.client.poll(self.config.kafka_block_consumer_time_sec)
65
+
66
+ if msg is None:
67
+ return None
68
+
69
+ if msg.error():
70
+ logger.error(
71
+ "Consumer error.",
72
+ extra={"error": msg.error()},
73
+ )
74
+ raise KafkaConsumerError(msg.error())
75
+
76
+ logger.debug(
77
+ "Received message.",
78
+ extra={
79
+ "topic": msg.topic(),
80
+ "msg_id": extract_id_from_value(msg.value().decode("utf-8")),
81
+ "key": msg.key(),
82
+ },
83
+ )
84
+ return KafkaMessage(
85
+ topic=msg.topic(),
86
+ value=msg.value().decode("utf-8") if msg.value() else None,
87
+ key=msg.key().decode("utf-8") if msg.key() else None,
88
+ offset=msg.offset(),
89
+ partition=msg.partition(),
90
+ )
91
+
92
+ except KafkaException as exception:
93
+ logger.exception("Failed to consume message.")
94
+ raise exception
95
+
96
+ def commit(self, message: KafkaMessage) -> None:
97
+ if self.client is None:
98
+ logger.error(consumer_not_connected_msg)
99
+ raise KafkaConsumerNotConnectedError
100
+
101
+ try:
102
+ self.client.commit(asynchronous=True)
103
+ logger.debug(
104
+ "Commit requested.",
105
+ extra={"offset": message.offset, "partition": message.partition},
106
+ )
107
+
108
+ except KafkaException as exception:
109
+ logger.exception("Failed to commit offsets.")
110
+ raise exception
@@ -0,0 +1,80 @@
1
+ from confluent_kafka import KafkaException, Producer
2
+ from pydantic import ConfigDict
3
+ from pydantic.dataclasses import dataclass
4
+
5
+ from ..config.schemas import KafkaProducerConfig
6
+ from ..exceptions import KafkaProducerNotConnectedError
7
+ from ..logger import logger
8
+ from ..utils.callbacks import delivery_callback
9
+ from ..utils.messages import extract_id_from_value
10
+ from ..utils.topics import build_topic
11
+ from .base import BaseKafkaClient
12
+
13
+
14
+ @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
15
+ class KafkaProducer(BaseKafkaClient):
16
+ config: KafkaProducerConfig
17
+ client: Producer | None = None
18
+
19
+ def connect(self) -> None:
20
+ self.client = Producer(self.config.to_kafka_config())
21
+ self.check_connection()
22
+ logger.info(
23
+ "Connected Producer to Kafka servers.",
24
+ extra={"kafka_servers": self.config.kafka_servers},
25
+ )
26
+ self.start_connection_check_thread()
27
+
28
+ def send(
29
+ self,
30
+ topic: str,
31
+ value: str | None,
32
+ key: str | None,
33
+ headers: list[tuple[str, bytes]] | None = None,
34
+ ) -> None:
35
+ if self.client is None:
36
+ logger.error(
37
+ "KafkaProducer is not connected. Call 'connect' first.",
38
+ extra={
39
+ "topic": topic,
40
+ "msg_id": extract_id_from_value(value),
41
+ "key": key,
42
+ },
43
+ )
44
+ raise KafkaProducerNotConnectedError
45
+
46
+ topic = build_topic(topic_name=topic, prefix=self.config.kafka_topic_prefix)
47
+
48
+ try:
49
+ logger.debug(
50
+ "Sending message to topic.",
51
+ extra={
52
+ "topic": topic,
53
+ "msg_id": extract_id_from_value(value),
54
+ "key": key,
55
+ "headers": headers,
56
+ },
57
+ )
58
+ self.client.produce(
59
+ topic=topic,
60
+ value=value,
61
+ key=key,
62
+ headers=headers,
63
+ callback=delivery_callback,
64
+ )
65
+ self.client.poll(0)
66
+
67
+ except BufferError:
68
+ logger.warning("Buffer full, waiting for free space on the queue")
69
+ self.client.poll(self.config.kafka_block_buffer_time_sec)
70
+ self.client.produce(
71
+ topic=topic,
72
+ value=value,
73
+ key=key,
74
+ headers=headers,
75
+ callback=delivery_callback,
76
+ )
77
+
78
+ except KafkaException as exception:
79
+ logger.exception("Failed to send message.")
80
+ raise exception