cledar-sdk 2.0.3__py3-none-any.whl → 2.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. cledar/__init__.py +1 -0
  2. cledar/kafka/__init__.py +2 -0
  3. cledar/kafka/clients/base.py +24 -5
  4. cledar/kafka/clients/consumer.py +28 -0
  5. cledar/kafka/clients/producer.py +17 -0
  6. cledar/kafka/config/schemas.py +91 -7
  7. cledar/kafka/exceptions.py +7 -12
  8. cledar/kafka/handlers/dead_letter.py +26 -20
  9. cledar/kafka/handlers/parser.py +36 -2
  10. cledar/kafka/logger.py +2 -0
  11. cledar/kafka/models/input.py +4 -0
  12. cledar/kafka/models/message.py +4 -0
  13. cledar/kafka/models/output.py +4 -0
  14. cledar/kafka/utils/callbacks.py +9 -0
  15. cledar/kafka/utils/messages.py +11 -0
  16. cledar/kafka/utils/topics.py +13 -0
  17. cledar/kserve/__init__.py +2 -0
  18. cledar/kserve/utils.py +3 -0
  19. cledar/logging/__init__.py +2 -0
  20. cledar/logging/universal_plaintext_formatter.py +17 -12
  21. cledar/monitoring/__init__.py +2 -0
  22. cledar/monitoring/monitoring_server.py +45 -1
  23. cledar/nonce/__init__.py +2 -0
  24. cledar/nonce/nonce_service.py +30 -4
  25. cledar/redis/__init__.py +2 -0
  26. cledar/redis/async_example.py +4 -3
  27. cledar/redis/example.py +30 -0
  28. cledar/redis/exceptions.py +3 -0
  29. cledar/redis/logger.py +2 -0
  30. cledar/redis/model.py +4 -0
  31. cledar/redis/redis.py +296 -18
  32. cledar/redis/redis_config_store.py +81 -0
  33. cledar/storage/__init__.py +2 -0
  34. cledar/storage/constants.py +2 -0
  35. cledar/storage/exceptions.py +29 -0
  36. cledar/storage/models.py +22 -0
  37. cledar/storage/object_storage.py +342 -23
  38. {cledar_sdk-2.0.3.dist-info → cledar_sdk-2.1.1.dist-info}/METADATA +1 -1
  39. {cledar_sdk-2.0.3.dist-info → cledar_sdk-2.1.1.dist-info}/RECORD +41 -41
  40. {cledar_sdk-2.0.3.dist-info → cledar_sdk-2.1.1.dist-info}/WHEEL +0 -0
  41. {cledar_sdk-2.0.3.dist-info → cledar_sdk-2.1.1.dist-info}/licenses/LICENSE +0 -0
cledar/__init__.py CHANGED
@@ -0,0 +1 @@
1
+ """Cledar Python SDK for data platform services and integrations."""
cledar/kafka/__init__.py CHANGED
@@ -1,3 +1,5 @@
1
+ """Kafka client and utilities for Cledar SDK."""
2
+
1
3
  from .clients.base import BaseKafkaClient
2
4
  from .clients.consumer import KafkaConsumer
3
5
  from .clients.producer import KafkaProducer
@@ -1,3 +1,5 @@
1
+ """Base Kafka client module."""
2
+
1
3
  import threading
2
4
 
3
5
  from confluent_kafka import Consumer, KafkaException, Producer
@@ -15,11 +17,18 @@ from ..logger import logger
15
17
 
16
18
  @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
17
19
  class BaseKafkaClient:
20
+ """Base class for Kafka clients.
21
+
22
+ This class provides common functionality for both producers and consumers,
23
+ such as connection monitoring and shutdown handling.
24
+ """
25
+
18
26
  config: KafkaProducerConfig | KafkaConsumerConfig
19
27
  client: Producer | Consumer | None = None
20
28
  connection_check_thread: threading.Thread | None = None
21
29
 
22
30
  def __post_init__(self) -> None:
31
+ """Initialize the client with instance-level events."""
23
32
  # Create instance-level stop event
24
33
  self._stop_event = threading.Event()
25
34
  logger.info(
@@ -27,6 +36,7 @@ class BaseKafkaClient:
27
36
  )
28
37
 
29
38
  def start_connection_check_thread(self) -> None:
39
+ """Start a background thread to monitor the Kafka connection."""
30
40
  if self.connection_check_thread is None:
31
41
  self.connection_check_thread = threading.Thread(
32
42
  target=self._monitor_connection
@@ -48,6 +58,12 @@ class BaseKafkaClient:
48
58
  logger.exception(f"{self.__class__.__name__} connection check failed.")
49
59
 
50
60
  def is_alive(self) -> bool:
61
+ """Check if the client is currently connected and alive.
62
+
63
+ Returns:
64
+ bool: True if connected, False otherwise.
65
+
66
+ """
51
67
  try:
52
68
  self.check_connection()
53
69
  return True
@@ -59,11 +75,13 @@ class BaseKafkaClient:
59
75
  return False
60
76
 
61
77
  def check_connection(self) -> None:
62
- """
63
- when the broker is not available (or the address is wrong)
64
- the 'connection refused' error is not caught
65
- https://github.com/confluentinc/confluent-kafka-python/issues/941
66
- the below is far-from-perfect workaround handling that
78
+ """Check the connection to Kafka servers.
79
+
80
+ When the broker is not available (or the address is wrong),
81
+ the 'connection refused' error is not caught.
82
+ Ref: https://github.com/confluentinc/confluent-kafka-python/issues/941
83
+ The below is far-from-perfect workaround handling that.
84
+
67
85
  """
68
86
  if self.client is None:
69
87
  logger.error(
@@ -83,6 +101,7 @@ class BaseKafkaClient:
83
101
  raise KafkaConnectionError from exception
84
102
 
85
103
  def shutdown(self) -> None:
104
+ """Shutdown the client and stop connection monitoring."""
86
105
  logger.info("Closing %s...", self.__class__.__name__)
87
106
  self._stop_event.set()
88
107
  if self.connection_check_thread is not None:
@@ -1,3 +1,5 @@
1
+ """Kafka consumer client module."""
2
+
1
3
  from confluent_kafka import Consumer, KafkaException
2
4
  from pydantic import ConfigDict
3
5
  from pydantic.dataclasses import dataclass
@@ -16,10 +18,17 @@ from .base import BaseKafkaClient
16
18
 
17
19
  @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
18
20
  class KafkaConsumer(BaseKafkaClient):
21
+ """Kafka consumer client.
22
+
23
+ This class provides methods to connect to Kafka, subscribe to topics,
24
+ and consume messages.
25
+ """
26
+
19
27
  config: KafkaConsumerConfig
20
28
  client: Consumer | None = None
21
29
 
22
30
  def connect(self) -> None:
31
+ """Connect to Kafka servers and start connection monitoring."""
23
32
  self.client = Consumer(self.config.to_kafka_config())
24
33
  self.check_connection()
25
34
  logger.info(
@@ -29,6 +38,12 @@ class KafkaConsumer(BaseKafkaClient):
29
38
  self.start_connection_check_thread()
30
39
 
31
40
  def subscribe(self, topics: list[str]) -> None:
41
+ """Subscribe to a list of topics.
42
+
43
+ Args:
44
+ topics: A list of topic names to subscribe to.
45
+
46
+ """
32
47
  if self.client is None:
33
48
  logger.error(
34
49
  consumer_not_connected_msg,
@@ -56,6 +71,13 @@ class KafkaConsumer(BaseKafkaClient):
56
71
  raise exception
57
72
 
58
73
  def consume_next(self) -> KafkaMessage | None:
74
+ """Consume the next message from subscribed topics.
75
+
76
+ Returns:
77
+ KafkaMessage | None: The consumed message or None if no message is
78
+ available.
79
+
80
+ """
59
81
  if self.client is None:
60
82
  logger.error(consumer_not_connected_msg)
61
83
  raise KafkaConsumerNotConnectedError
@@ -94,6 +116,12 @@ class KafkaConsumer(BaseKafkaClient):
94
116
  raise exception
95
117
 
96
118
  def commit(self, message: KafkaMessage) -> None:
119
+ """Commit offsets for the current message.
120
+
121
+ Args:
122
+ message: The message for which to commit offsets.
123
+
124
+ """
97
125
  if self.client is None:
98
126
  logger.error(consumer_not_connected_msg)
99
127
  raise KafkaConsumerNotConnectedError
@@ -1,3 +1,5 @@
1
+ """Kafka producer client module."""
2
+
1
3
  from confluent_kafka import KafkaException, Producer
2
4
  from pydantic import ConfigDict
3
5
  from pydantic.dataclasses import dataclass
@@ -13,10 +15,16 @@ from .base import BaseKafkaClient
13
15
 
14
16
  @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
15
17
  class KafkaProducer(BaseKafkaClient):
18
+ """Kafka producer client.
19
+
20
+ This class provides methods to connect to Kafka and send messages.
21
+ """
22
+
16
23
  config: KafkaProducerConfig
17
24
  client: Producer | None = None
18
25
 
19
26
  def connect(self) -> None:
27
+ """Connect to Kafka servers and start connection monitoring."""
20
28
  self.client = Producer(self.config.to_kafka_config())
21
29
  self.check_connection()
22
30
  logger.info(
@@ -32,6 +40,15 @@ class KafkaProducer(BaseKafkaClient):
32
40
  key: str | None,
33
41
  headers: list[tuple[str, bytes]] | None = None,
34
42
  ) -> None:
43
+ """Send a message to a Kafka topic.
44
+
45
+ Args:
46
+ topic: The name of the topic to send the message to.
47
+ value: The message value.
48
+ key: The message key.
49
+ headers: Optional list of message headers.
50
+
51
+ """
35
52
  if self.client is None:
36
53
  logger.error(
37
54
  "KafkaProducer is not connected. Call 'connect' first.",
@@ -1,10 +1,12 @@
1
- from enum import Enum
1
+ """Configuration schemas for Kafka clients."""
2
+
3
+ from enum import StrEnum
2
4
 
3
5
  from pydantic import field_validator
4
6
  from pydantic.dataclasses import dataclass
5
7
 
6
8
 
7
- class KafkaSecurityProtocol(str, Enum):
9
+ class KafkaSecurityProtocol(StrEnum):
8
10
  """Supported Kafka security protocols."""
9
11
 
10
12
  PLAINTEXT = "PLAINTEXT"
@@ -13,7 +15,7 @@ class KafkaSecurityProtocol(str, Enum):
13
15
  SASL_SSL = "SASL_SSL"
14
16
 
15
17
 
16
- class KafkaSaslMechanism(str, Enum):
18
+ class KafkaSaslMechanism(StrEnum):
17
19
  """Supported Kafka SASL mechanisms."""
18
20
 
19
21
  PLAIN = "PLAIN"
@@ -23,7 +25,18 @@ class KafkaSaslMechanism(str, Enum):
23
25
 
24
26
 
25
27
  def _validate_kafka_servers(v: list[str] | str) -> list[str] | str:
26
- """Validate kafka_servers is not empty."""
28
+ """Validate kafka_servers is not empty.
29
+
30
+ Args:
31
+ v: List of Kafka broker addresses or a comma-separated string.
32
+
33
+ Returns:
34
+ list[str] | str: The validated value.
35
+
36
+ Raises:
37
+ ValueError: If the value is empty.
38
+
39
+ """
27
40
  if isinstance(v, str) and v.strip() == "":
28
41
  raise ValueError("kafka_servers cannot be empty")
29
42
  if isinstance(v, list) and len(v) == 0:
@@ -32,7 +45,18 @@ def _validate_kafka_servers(v: list[str] | str) -> list[str] | str:
32
45
 
33
46
 
34
47
  def _validate_non_negative(v: int) -> int:
35
- """Validate that timeout values are non-negative."""
48
+ """Validate that timeout values are non-negative.
49
+
50
+ Args:
51
+ v: Timeout value to validate.
52
+
53
+ Returns:
54
+ int: The validated value.
55
+
56
+ Raises:
57
+ ValueError: If the value is negative.
58
+
59
+ """
36
60
  if v < 0:
37
61
  raise ValueError("timeout values must be non-negative")
38
62
  return v
@@ -52,6 +76,7 @@ class KafkaProducerConfig:
52
76
  kafka_partitioner: Partitioning strategy for messages
53
77
  compression_type: Compression type for messages (gzip, snappy, lz4, zstd,
54
78
  or None)
79
+
55
80
  """
56
81
 
57
82
  kafka_servers: list[str] | str
@@ -70,6 +95,15 @@ class KafkaProducerConfig:
70
95
  @field_validator("kafka_servers")
71
96
  @classmethod
72
97
  def validate_kafka_servers(cls, v: list[str] | str) -> list[str] | str:
98
+ """Validate kafka_servers field.
99
+
100
+ Args:
101
+ v: List of Kafka broker addresses or a comma-separated string.
102
+
103
+ Returns:
104
+ list[str] | str: The validated value.
105
+
106
+ """
73
107
  return _validate_kafka_servers(v)
74
108
 
75
109
  @field_validator(
@@ -79,10 +113,24 @@ class KafkaProducerConfig:
79
113
  )
80
114
  @classmethod
81
115
  def validate_positive_timeouts(cls, v: int) -> int:
116
+ """Validate positive timeout values.
117
+
118
+ Args:
119
+ v: Timeout value to validate.
120
+
121
+ Returns:
122
+ int: The validated value.
123
+
124
+ """
82
125
  return _validate_non_negative(v)
83
126
 
84
127
  def to_kafka_config(self) -> dict[str, list[str] | str | None]:
85
- """Build Kafka producer configuration dictionary."""
128
+ """Build Kafka producer configuration dictionary.
129
+
130
+ Returns:
131
+ dict[str, list[str] | str | None]: Kafka client configuration.
132
+
133
+ """
86
134
  config = {
87
135
  "bootstrap.servers": self.kafka_servers,
88
136
  "client.id": self.kafka_group_id,
@@ -117,6 +165,7 @@ class KafkaConsumerConfig:
117
165
  kafka_connection_check_timeout_sec: Timeout for connection health checks
118
166
  kafka_auto_commit_interval_ms: Interval for automatic offset commits
119
167
  kafka_connection_check_interval_sec: Interval between connection checks
168
+
120
169
  """
121
170
 
122
171
  kafka_servers: list[str] | str
@@ -135,11 +184,32 @@ class KafkaConsumerConfig:
135
184
  @field_validator("kafka_servers")
136
185
  @classmethod
137
186
  def validate_kafka_servers(cls, v: list[str] | str) -> list[str] | str:
187
+ """Validate kafka_servers field.
188
+
189
+ Args:
190
+ v: List of Kafka broker addresses or a comma-separated string.
191
+
192
+ Returns:
193
+ list[str] | str: The validated value.
194
+
195
+ """
138
196
  return _validate_kafka_servers(v)
139
197
 
140
198
  @field_validator("kafka_offset")
141
199
  @classmethod
142
200
  def validate_kafka_offset(cls, v: str) -> str:
201
+ """Validate kafka_offset field.
202
+
203
+ Args:
204
+ v: Offset value to validate.
205
+
206
+ Returns:
207
+ str: The validated offset value.
208
+
209
+ Raises:
210
+ ValueError: If the value is empty.
211
+
212
+ """
143
213
  if v.strip() == "":
144
214
  raise ValueError("kafka_offset cannot be empty")
145
215
  return v
@@ -152,10 +222,24 @@ class KafkaConsumerConfig:
152
222
  )
153
223
  @classmethod
154
224
  def validate_positive_timeouts(cls, v: int) -> int:
225
+ """Validate positive timeout values.
226
+
227
+ Args:
228
+ v: Timeout value to validate.
229
+
230
+ Returns:
231
+ int: The validated value.
232
+
233
+ """
155
234
  return _validate_non_negative(v)
156
235
 
157
236
  def to_kafka_config(self) -> dict[str, int | list[str] | str]:
158
- """Build Kafka consumer configuration dictionary."""
237
+ """Build Kafka consumer configuration dictionary.
238
+
239
+ Returns:
240
+ dict[str, int | list[str] | str]: Kafka client configuration.
241
+
242
+ """
159
243
  config = {
160
244
  "bootstrap.servers": self.kafka_servers,
161
245
  "enable.auto.commit": False,
@@ -1,22 +1,17 @@
1
+ """Kafka-related exceptions for the Cledar SDK."""
2
+
3
+
1
4
  class KafkaProducerNotConnectedError(Exception):
2
- """
3
- Custom exception for KafkaProducer to indicate it is not connected.
4
- """
5
+ """Custom exception for KafkaProducer to indicate it is not connected."""
5
6
 
6
7
 
7
8
  class KafkaConsumerNotConnectedError(Exception):
8
- """
9
- Custom exception for KafkaConsumer to indicate it is not connected.
10
- """
9
+ """Custom exception for KafkaConsumer to indicate it is not connected."""
11
10
 
12
11
 
13
12
  class KafkaConnectionError(Exception):
14
- """
15
- Custom exception to indicate connection failures.
16
- """
13
+ """Custom exception to indicate connection failures."""
17
14
 
18
15
 
19
16
  class KafkaConsumerError(Exception):
20
- """
21
- Custom exception for KafkaConsumer to indicate errors.
22
- """
17
+ """Custom exception for KafkaConsumer to indicate errors."""
@@ -1,3 +1,5 @@
1
+ """Dead letter handler for Kafka messages."""
2
+
1
3
  import json
2
4
 
3
5
  from ..clients.producer import KafkaProducer
@@ -7,16 +9,15 @@ from ..models.output import FailedMessageData
7
9
 
8
10
 
9
11
  class DeadLetterHandler:
10
- """
11
- A Handler for handling failed messages and sending them to a DLQ topic.
12
- """
12
+ """A handler for handling failed messages and sending them to a DLQ topic."""
13
13
 
14
14
  def __init__(self, producer: KafkaProducer, dlq_topic: str) -> None:
15
- """
16
- Initialize DeadLetterHandler with a Kafka producer and DLQ topic.
15
+ """Initialize DeadLetterHandler with a Kafka producer and DLQ topic.
16
+
17
+ Args:
18
+ producer: KafkaProducer instance.
19
+ dlq_topic: The name of the DLQ Kafka topic.
17
20
 
18
- :param producer: KafkaProducer instance.
19
- :param dlq_topic: The name of the DLQ Kafka topic.
20
21
  """
21
22
  self.producer: KafkaProducer = producer
22
23
  self.dlq_topic: str = dlq_topic
@@ -26,11 +27,12 @@ class DeadLetterHandler:
26
27
  message: KafkaMessage,
27
28
  failures_details: list[FailedMessageData] | None,
28
29
  ) -> None:
29
- """
30
- Handles a failed message by sending it to the DLQ topic.
30
+ """Handle a failed message by sending it to the DLQ topic.
31
+
32
+ Args:
33
+ message: The original Kafka message.
34
+ failures_details: A list of FailedMessageData.
31
35
 
32
- :param message: The original Kafka message.
33
- :param failures_details: A list of FailedMessageData.
34
36
  """
35
37
  logger.info("Handling message for DLQ.")
36
38
 
@@ -43,11 +45,14 @@ class DeadLetterHandler:
43
45
  self,
44
46
  failures_details: list[FailedMessageData] | None,
45
47
  ) -> list[tuple[str, bytes]]:
46
- """
47
- Builds Kafka headers containing exception details.
48
+ """Build Kafka headers containing exception details.
49
+
50
+ Args:
51
+ failures_details: A list of FailedMessageData.
52
+
53
+ Returns:
54
+ list[tuple[str, bytes]]: A list of Kafka headers.
48
55
 
49
- :param failures_details: A list of FailedMessageData.
50
- :return: A list of Kafka headers.
51
56
  """
52
57
  headers: list[tuple[str, bytes]] = []
53
58
 
@@ -65,12 +70,13 @@ class DeadLetterHandler:
65
70
  key: str | None,
66
71
  headers: list[tuple[str, bytes]],
67
72
  ) -> None:
68
- """
69
- Sends a DLQ message to the Kafka DLQ topic with headers.
73
+ """Send a DLQ message to the Kafka DLQ topic with headers.
74
+
75
+ Args:
76
+ message: The DLQ message payload.
77
+ key: The original Kafka message key.
78
+ headers: Kafka headers containing exception details.
70
79
 
71
- :param message: The DLQ message payload.
72
- :param key: The original Kafka message key.
73
- :param headers: Kafka headers containing exception details.
74
80
  """
75
81
  self.producer.send(
76
82
  topic=self.dlq_topic, value=message, key=key, headers=headers
@@ -1,3 +1,5 @@
1
+ """Kafka message parser module."""
2
+
1
3
  import json
2
4
 
3
5
  from pydantic import BaseModel
@@ -9,15 +11,25 @@ from ..models.message import KafkaMessage
9
11
 
10
12
 
11
13
  class IncorrectMessageValueError(Exception):
12
- """
13
- Message needs to have `value` field present in order to be parsed.
14
+ """Message needs to have `value` field present in order to be parsed.
14
15
 
15
16
  This is unless `model` is set to `None`.
16
17
  """
17
18
 
18
19
 
19
20
  class InputParser[Payload: BaseModel]:
21
+ """Parser for Kafka messages into Pydantic models.
22
+
23
+ Generic class for parsing Kafka messages.
24
+ """
25
+
20
26
  def __init__(self, model: type[Payload]) -> None:
27
+ """Initialize InputParser with a Pydantic model.
28
+
29
+ Args:
30
+ model: The Pydantic model to validate messages against.
31
+
32
+ """
21
33
  self.model: type[Payload] = model
22
34
 
23
35
  def parse_json(self, json_str: str) -> Payload:
@@ -25,6 +37,16 @@ class InputParser[Payload: BaseModel]:
25
37
 
26
38
  Invalid JSON should raise IncorrectMessageValueError, while schema
27
39
  validation errors should bubble up as ValidationError.
40
+
41
+ Args:
42
+ json_str: The JSON string to parse.
43
+
44
+ Returns:
45
+ Payload: The validated Pydantic model instance.
46
+
47
+ Raises:
48
+ IncorrectMessageValueError: If the JSON is invalid.
49
+
28
50
  """
29
51
  try:
30
52
  data = json.loads(json_str)
@@ -34,6 +56,18 @@ class InputParser[Payload: BaseModel]:
34
56
  return self.model.model_validate(data)
35
57
 
36
58
  def parse_message(self, message: KafkaMessage) -> InputKafkaMessage[Payload]:
59
+ """Parse a Kafka message into an InputKafkaMessage with a validated payload.
60
+
61
+ Args:
62
+ message: The Kafka message to parse.
63
+
64
+ Returns:
65
+ InputKafkaMessage[Payload]: The parsed message with payload.
66
+
67
+ Raises:
68
+ IncorrectMessageValueError: If message value is missing but required.
69
+
70
+ """
37
71
  if message.value is None and self.model is not None:
38
72
  raise IncorrectMessageValueError
39
73
 
cledar/kafka/logger.py CHANGED
@@ -1,3 +1,5 @@
1
+ """Logger configuration for the Kafka module."""
2
+
1
3
  import logging
2
4
 
3
5
  logger = logging.getLogger("kafka_service")
@@ -1,3 +1,5 @@
1
+ """Input Kafka message model."""
2
+
1
3
  import dataclasses
2
4
  from typing import TypeVar
3
5
 
@@ -10,4 +12,6 @@ Payload = TypeVar("Payload", bound=BaseModel)
10
12
 
11
13
  @dataclasses.dataclass
12
14
  class InputKafkaMessage[Payload](KafkaMessage):
15
+ """Kafka message with a parsed and validated Pydantic payload."""
16
+
13
17
  payload: Payload
@@ -1,8 +1,12 @@
1
+ """Kafka message data class."""
2
+
1
3
  from pydantic.dataclasses import dataclass
2
4
 
3
5
 
4
6
  @dataclass
5
7
  class KafkaMessage:
8
+ """Base Kafka message representation."""
9
+
6
10
  topic: str
7
11
  value: str | None
8
12
  key: str | None
@@ -1,7 +1,11 @@
1
+ """Output models for Kafka module."""
2
+
1
3
  import pydantic
2
4
 
3
5
 
4
6
  class FailedMessageData(pydantic.BaseModel):
7
+ """Data structure for recording message processing failures."""
8
+
5
9
  raised_at: str
6
10
  exception_message: str | None
7
11
  exception_trace: str | None
@@ -1,9 +1,18 @@
1
+ """Kafka callback utilities."""
2
+
1
3
  from confluent_kafka import KafkaError, Message
2
4
 
3
5
  from ..logger import logger
4
6
 
5
7
 
6
8
  def delivery_callback(error: KafkaError, msg: Message) -> None:
9
+ """Handle message delivery reports from Kafka producer.
10
+
11
+ Args:
12
+ error: The delivery error (if any).
13
+ msg: The message that was delivered (or failed).
14
+
15
+ """
7
16
  try:
8
17
  if msg is None:
9
18
  logger.warning("Callback received a None message.")
@@ -1,3 +1,5 @@
1
+ """Kafka message utilities."""
2
+
1
3
  import json
2
4
 
3
5
  from ..logger import logger
@@ -7,6 +9,15 @@ _ID_FIELD_KEY = "id"
7
9
 
8
10
 
9
11
  def extract_id_from_value(value: str | None) -> str:
12
+ """Extract an ID field from a JSON-formatted message value.
13
+
14
+ Args:
15
+ value: The message value string.
16
+
17
+ Returns:
18
+ str: The extracted ID or a placeholder if not found or invalid.
19
+
20
+ """
10
21
  msg_id: str = _UNKNOWN_ID_PLACEHOLDER
11
22
  if value is None:
12
23
  return msg_id
@@ -1,2 +1,15 @@
1
+ """Kafka topic utilities."""
2
+
3
+
1
4
  def build_topic(topic_name: str, prefix: str | None) -> str:
5
+ """Build a topic name by optionally prepending a prefix.
6
+
7
+ Args:
8
+ topic_name: The base topic name.
9
+ prefix: An optional prefix to prepend.
10
+
11
+ Returns:
12
+ str: The full topic name.
13
+
14
+ """
2
15
  return prefix + topic_name if prefix else topic_name
cledar/kserve/__init__.py CHANGED
@@ -1,3 +1,5 @@
1
+ """KServe integration utilities."""
2
+
1
3
  from .utils import get_input_topic
2
4
 
3
5
  __all__ = ["get_input_topic"]
cledar/kserve/utils.py CHANGED
@@ -1,3 +1,5 @@
1
+ """Utilities for KServe integration and CloudEvents processing."""
2
+
1
3
  CE_SOURCE_HEADER = "ce-source"
2
4
 
3
5
 
@@ -18,6 +20,7 @@ def get_input_topic(headers: dict[str, str]) -> str | None:
18
20
  >>> headers = {"ce-source": "kafka://cluster#my-topic"}
19
21
  >>> get_input_topic(headers)
20
22
  'my-topic'
23
+
21
24
  """
22
25
  source = headers.get(CE_SOURCE_HEADER)
23
26
  if not source or "#" not in source:
@@ -1,3 +1,5 @@
1
+ """Logging module with custom formatters for structured logging."""
2
+
1
3
  from .universal_plaintext_formatter import UniversalPlaintextFormatter
2
4
 
3
5
  __all__ = ["UniversalPlaintextFormatter"]