typedkafka 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- typedkafka/__init__.py +53 -0
- typedkafka/admin.py +336 -0
- typedkafka/aio.py +328 -0
- typedkafka/config.py +405 -0
- typedkafka/consumer.py +415 -0
- typedkafka/exceptions.py +130 -0
- typedkafka/producer.py +492 -0
- typedkafka/retry.py +154 -0
- typedkafka/serializers.py +293 -0
- typedkafka/testing.py +523 -0
- typedkafka-0.3.1.dist-info/METADATA +263 -0
- typedkafka-0.3.1.dist-info/RECORD +14 -0
- typedkafka-0.3.1.dist-info/WHEEL +4 -0
- typedkafka-0.3.1.dist-info/licenses/LICENSE +21 -0
typedkafka/consumer.py
ADDED
|
@@ -0,0 +1,415 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Kafka Consumer with comprehensive documentation and full type safety.
|
|
3
|
+
|
|
4
|
+
This module provides a well-documented, type-hinted wrapper around confluent-kafka's Consumer.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
from collections.abc import Iterator
|
|
9
|
+
from typing import Any, Callable, Optional
|
|
10
|
+
|
|
11
|
+
try:
|
|
12
|
+
from confluent_kafka import Consumer as ConfluentConsumer
|
|
13
|
+
from confluent_kafka import KafkaError as ConfluentKafkaError
|
|
14
|
+
from confluent_kafka import Message
|
|
15
|
+
except ImportError:
|
|
16
|
+
ConfluentConsumer = None # type: ignore[assignment,misc]
|
|
17
|
+
ConfluentKafkaError = None # type: ignore[assignment,misc]
|
|
18
|
+
Message = None # type: ignore[assignment,misc]
|
|
19
|
+
|
|
20
|
+
from typedkafka.exceptions import ConsumerError, SerializationError
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class KafkaMessage:
|
|
24
|
+
"""
|
|
25
|
+
A Kafka message with convenient access methods.
|
|
26
|
+
|
|
27
|
+
Wraps confluent-kafka's Message with better documentation and helper methods.
|
|
28
|
+
|
|
29
|
+
Attributes:
|
|
30
|
+
topic: The topic this message came from
|
|
31
|
+
partition: The partition number
|
|
32
|
+
offset: The message offset
|
|
33
|
+
key: The message key as bytes (None if no key)
|
|
34
|
+
value: The message value as bytes
|
|
35
|
+
timestamp: Message timestamp (type, value) tuple
|
|
36
|
+
headers: Message headers as list of (key, value) tuples
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
def __init__(self, message: Any):
|
|
40
|
+
"""
|
|
41
|
+
Initialize from a confluent-kafka Message.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
message: A confluent_kafka.Message object
|
|
45
|
+
"""
|
|
46
|
+
self._message = message
|
|
47
|
+
self.topic = message.topic()
|
|
48
|
+
self.partition = message.partition()
|
|
49
|
+
self.offset = message.offset()
|
|
50
|
+
self.key = message.key()
|
|
51
|
+
self.value = message.value()
|
|
52
|
+
self.timestamp_type, self.timestamp = message.timestamp()
|
|
53
|
+
self.headers = message.headers() or []
|
|
54
|
+
|
|
55
|
+
def value_as_string(self, encoding: str = "utf-8") -> str:
|
|
56
|
+
"""
|
|
57
|
+
Decode the message value as a UTF-8 string.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
encoding: Character encoding to use (default: utf-8)
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Decoded string value
|
|
64
|
+
|
|
65
|
+
Raises:
|
|
66
|
+
SerializationError: If decoding fails
|
|
67
|
+
|
|
68
|
+
Examples:
|
|
69
|
+
>>> msg = consumer.poll()
|
|
70
|
+
>>> text = msg.value_as_string()
|
|
71
|
+
>>> print(f"Received: {text}")
|
|
72
|
+
"""
|
|
73
|
+
try:
|
|
74
|
+
return self.value.decode(encoding) # type: ignore[no-any-return]
|
|
75
|
+
except (UnicodeDecodeError, AttributeError) as e:
|
|
76
|
+
raise SerializationError(
|
|
77
|
+
f"Failed to decode message value as {encoding} string: {e}",
|
|
78
|
+
value=self.value,
|
|
79
|
+
original_error=e,
|
|
80
|
+
) from e
|
|
81
|
+
|
|
82
|
+
def value_as_json(self) -> Any:
|
|
83
|
+
"""
|
|
84
|
+
Deserialize the message value as JSON.
|
|
85
|
+
|
|
86
|
+
Returns:
|
|
87
|
+
Parsed JSON object (dict, list, str, int, etc.)
|
|
88
|
+
|
|
89
|
+
Raises:
|
|
90
|
+
SerializationError: If JSON parsing fails
|
|
91
|
+
|
|
92
|
+
Examples:
|
|
93
|
+
>>> msg = consumer.poll()
|
|
94
|
+
>>> data = msg.value_as_json()
|
|
95
|
+
>>> print(f"User ID: {data['user_id']}")
|
|
96
|
+
"""
|
|
97
|
+
try:
|
|
98
|
+
return json.loads(self.value.decode("utf-8"))
|
|
99
|
+
except (json.JSONDecodeError, UnicodeDecodeError, AttributeError) as e:
|
|
100
|
+
raise SerializationError(
|
|
101
|
+
f"Failed to deserialize message value as JSON: {e}",
|
|
102
|
+
value=self.value,
|
|
103
|
+
original_error=e,
|
|
104
|
+
) from e
|
|
105
|
+
|
|
106
|
+
def key_as_string(self, encoding: str = "utf-8") -> Optional[str]:
|
|
107
|
+
"""
|
|
108
|
+
Decode the message key as a UTF-8 string.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
encoding: Character encoding to use (default: utf-8)
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
Decoded string key, or None if no key
|
|
115
|
+
|
|
116
|
+
Raises:
|
|
117
|
+
SerializationError: If decoding fails
|
|
118
|
+
|
|
119
|
+
Examples:
|
|
120
|
+
>>> msg = consumer.poll()
|
|
121
|
+
>>> if msg.key_as_string():
|
|
122
|
+
... print(f"Key: {msg.key_as_string()}")
|
|
123
|
+
"""
|
|
124
|
+
if self.key is None:
|
|
125
|
+
return None
|
|
126
|
+
try:
|
|
127
|
+
return self.key.decode(encoding) # type: ignore[no-any-return]
|
|
128
|
+
except (UnicodeDecodeError, AttributeError) as e:
|
|
129
|
+
raise SerializationError(
|
|
130
|
+
f"Failed to decode message key as {encoding} string: {e}",
|
|
131
|
+
value=self.key,
|
|
132
|
+
original_error=e,
|
|
133
|
+
) from e
|
|
134
|
+
|
|
135
|
+
def __repr__(self) -> str:
|
|
136
|
+
"""Return string representation of the message."""
|
|
137
|
+
return (
|
|
138
|
+
f"KafkaMessage(topic={self.topic!r}, partition={self.partition}, "
|
|
139
|
+
f"offset={self.offset}, key={self.key!r})"
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class KafkaConsumer:
|
|
144
|
+
"""
|
|
145
|
+
A well-documented Kafka consumer with full type hints.
|
|
146
|
+
|
|
147
|
+
This class wraps confluent-kafka's Consumer with:
|
|
148
|
+
- Comprehensive docstrings on every method
|
|
149
|
+
- Full type hints for IDE autocomplete
|
|
150
|
+
- Better error messages
|
|
151
|
+
- Convenient message deserialization methods
|
|
152
|
+
- Context manager support for automatic cleanup
|
|
153
|
+
- Iterator protocol for easy message consumption
|
|
154
|
+
|
|
155
|
+
Basic Usage:
|
|
156
|
+
>>> consumer = KafkaConsumer({
|
|
157
|
+
... "bootstrap.servers": "localhost:9092",
|
|
158
|
+
... "group.id": "my-group",
|
|
159
|
+
... "auto.offset.reset": "earliest"
|
|
160
|
+
... })
|
|
161
|
+
>>> consumer.subscribe(["my-topic"])
|
|
162
|
+
>>> for msg in consumer:
|
|
163
|
+
... print(f"Received: {msg.value_as_string()}")
|
|
164
|
+
|
|
165
|
+
With Context Manager:
|
|
166
|
+
>>> with KafkaConsumer(config) as consumer:
|
|
167
|
+
... consumer.subscribe(["topic"])
|
|
168
|
+
... for msg in consumer:
|
|
169
|
+
... process(msg)
|
|
170
|
+
|
|
171
|
+
Attributes:
|
|
172
|
+
config: The configuration dictionary used to initialize the consumer
|
|
173
|
+
"""
|
|
174
|
+
|
|
175
|
+
def __init__(self, config: dict[str, Any]):
|
|
176
|
+
"""
|
|
177
|
+
Initialize a Kafka consumer with the given configuration.
|
|
178
|
+
|
|
179
|
+
Args:
|
|
180
|
+
config: Configuration dictionary for the consumer. Common options:
|
|
181
|
+
- bootstrap.servers (str): Comma-separated list of broker addresses
|
|
182
|
+
- group.id (str): Consumer group ID (required for subscribe())
|
|
183
|
+
- client.id (str): An identifier for this client
|
|
184
|
+
- auto.offset.reset (str): What to do when there's no initial offset
|
|
185
|
+
"earliest" = start from beginning, "latest" = start from end
|
|
186
|
+
- enable.auto.commit (bool): Automatically commit offsets (default: True)
|
|
187
|
+
- auto.commit.interval.ms (int): Frequency of offset commits in milliseconds
|
|
188
|
+
- max.poll.interval.ms (int): Max time between polls before being kicked from group
|
|
189
|
+
- session.timeout.ms (int): Timeout for detecting consumer failures
|
|
190
|
+
|
|
191
|
+
Raises:
|
|
192
|
+
ConsumerError: If the consumer cannot be initialized
|
|
193
|
+
|
|
194
|
+
Examples:
|
|
195
|
+
>>> # Basic consumer
|
|
196
|
+
>>> consumer = KafkaConsumer({
|
|
197
|
+
... "bootstrap.servers": "localhost:9092",
|
|
198
|
+
... "group.id": "my-consumer-group",
|
|
199
|
+
... "auto.offset.reset": "earliest"
|
|
200
|
+
... })
|
|
201
|
+
|
|
202
|
+
>>> # Consumer with manual offset management
|
|
203
|
+
>>> consumer = KafkaConsumer({
|
|
204
|
+
... "bootstrap.servers": "localhost:9092",
|
|
205
|
+
... "group.id": "my-group",
|
|
206
|
+
... "enable.auto.commit": False
|
|
207
|
+
... })
|
|
208
|
+
"""
|
|
209
|
+
if ConfluentConsumer is None:
|
|
210
|
+
raise ImportError(
|
|
211
|
+
"confluent-kafka is required. Install with: pip install confluent-kafka"
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
self.config = config
|
|
215
|
+
self.poll_timeout: float = 1.0
|
|
216
|
+
try:
|
|
217
|
+
self._consumer = ConfluentConsumer(config)
|
|
218
|
+
except Exception as e:
|
|
219
|
+
raise ConsumerError(
|
|
220
|
+
f"Failed to initialize Kafka consumer: {e}",
|
|
221
|
+
original_error=e,
|
|
222
|
+
) from e
|
|
223
|
+
|
|
224
|
+
def subscribe(
|
|
225
|
+
self,
|
|
226
|
+
topics: list[str],
|
|
227
|
+
on_assign: Optional[Callable[[Any, Any], None]] = None,
|
|
228
|
+
on_revoke: Optional[Callable[[Any, Any], None]] = None,
|
|
229
|
+
on_lost: Optional[Callable[[Any, Any], None]] = None,
|
|
230
|
+
) -> None:
|
|
231
|
+
"""
|
|
232
|
+
Subscribe to one or more topics.
|
|
233
|
+
|
|
234
|
+
Args:
|
|
235
|
+
topics: List of topic names to subscribe to
|
|
236
|
+
on_assign: Callback invoked when partitions are assigned.
|
|
237
|
+
Signature: callback(consumer, partitions)
|
|
238
|
+
on_revoke: Callback invoked when partitions are revoked.
|
|
239
|
+
Signature: callback(consumer, partitions)
|
|
240
|
+
on_lost: Callback invoked when partitions are lost (unclean).
|
|
241
|
+
Signature: callback(consumer, partitions)
|
|
242
|
+
|
|
243
|
+
Raises:
|
|
244
|
+
ConsumerError: If subscription fails
|
|
245
|
+
|
|
246
|
+
Examples:
|
|
247
|
+
>>> # Subscribe to a single topic
|
|
248
|
+
>>> consumer.subscribe(["my-topic"])
|
|
249
|
+
|
|
250
|
+
>>> # Subscribe to multiple topics
|
|
251
|
+
>>> consumer.subscribe(["orders", "payments", "shipments"])
|
|
252
|
+
|
|
253
|
+
>>> # Subscribe with rebalance callbacks
|
|
254
|
+
>>> def on_assign(consumer, partitions):
|
|
255
|
+
... print(f"Assigned: {partitions}")
|
|
256
|
+
>>> def on_revoke(consumer, partitions):
|
|
257
|
+
... print(f"Revoked: {partitions}")
|
|
258
|
+
>>> consumer.subscribe(["my-topic"], on_assign=on_assign, on_revoke=on_revoke)
|
|
259
|
+
"""
|
|
260
|
+
try:
|
|
261
|
+
kwargs: dict[str, Any] = {}
|
|
262
|
+
if on_assign is not None:
|
|
263
|
+
kwargs["on_assign"] = on_assign
|
|
264
|
+
if on_revoke is not None:
|
|
265
|
+
kwargs["on_revoke"] = on_revoke
|
|
266
|
+
if on_lost is not None:
|
|
267
|
+
kwargs["on_lost"] = on_lost
|
|
268
|
+
self._consumer.subscribe(topics, **kwargs)
|
|
269
|
+
except Exception as e:
|
|
270
|
+
raise ConsumerError(
|
|
271
|
+
f"Failed to subscribe to topics {topics}: {e}",
|
|
272
|
+
original_error=e,
|
|
273
|
+
) from e
|
|
274
|
+
|
|
275
|
+
def poll(self, timeout: float = 1.0) -> Optional[KafkaMessage]:
|
|
276
|
+
"""
|
|
277
|
+
Poll for a single message.
|
|
278
|
+
|
|
279
|
+
Args:
|
|
280
|
+
timeout: Maximum time to wait for a message in seconds (default: 1.0)
|
|
281
|
+
|
|
282
|
+
Returns:
|
|
283
|
+
KafkaMessage if a message was received, None if timeout expired
|
|
284
|
+
|
|
285
|
+
Raises:
|
|
286
|
+
ConsumerError: If an error occurs during polling
|
|
287
|
+
|
|
288
|
+
Examples:
|
|
289
|
+
>>> # Poll with default 1 second timeout
|
|
290
|
+
>>> msg = consumer.poll()
|
|
291
|
+
>>> if msg:
|
|
292
|
+
... print(f"Received: {msg.value_as_string()}")
|
|
293
|
+
|
|
294
|
+
>>> # Poll with longer timeout
|
|
295
|
+
>>> msg = consumer.poll(timeout=5.0)
|
|
296
|
+
|
|
297
|
+
>>> # Poll in a loop
|
|
298
|
+
>>> while True:
|
|
299
|
+
... msg = consumer.poll(timeout=1.0)
|
|
300
|
+
... if msg:
|
|
301
|
+
... process(msg)
|
|
302
|
+
... consumer.commit(msg)
|
|
303
|
+
"""
|
|
304
|
+
try:
|
|
305
|
+
raw_msg = self._consumer.poll(timeout=timeout)
|
|
306
|
+
if raw_msg is None:
|
|
307
|
+
return None
|
|
308
|
+
if raw_msg.error():
|
|
309
|
+
raise ConsumerError(f"Consumer error: {raw_msg.error()}")
|
|
310
|
+
return KafkaMessage(raw_msg)
|
|
311
|
+
except ConsumerError:
|
|
312
|
+
raise
|
|
313
|
+
except Exception as e:
|
|
314
|
+
raise ConsumerError(
|
|
315
|
+
f"Error while polling: {e}",
|
|
316
|
+
original_error=e,
|
|
317
|
+
) from e
|
|
318
|
+
|
|
319
|
+
def commit(self, message: Optional[KafkaMessage] = None, asynchronous: bool = True) -> None:
|
|
320
|
+
"""
|
|
321
|
+
Commit offsets to Kafka.
|
|
322
|
+
|
|
323
|
+
Args:
|
|
324
|
+
message: Specific message to commit. If None, commits all consumed messages.
|
|
325
|
+
asynchronous: If True, commit asynchronously (default). If False, wait for confirmation.
|
|
326
|
+
|
|
327
|
+
Raises:
|
|
328
|
+
ConsumerError: If commit fails
|
|
329
|
+
|
|
330
|
+
Examples:
|
|
331
|
+
>>> # Commit after processing each message
|
|
332
|
+
>>> msg = consumer.poll()
|
|
333
|
+
>>> if msg:
|
|
334
|
+
... process(msg)
|
|
335
|
+
... consumer.commit(msg)
|
|
336
|
+
|
|
337
|
+
>>> # Commit all consumed messages
|
|
338
|
+
>>> consumer.commit()
|
|
339
|
+
|
|
340
|
+
>>> # Synchronous commit (wait for confirmation)
|
|
341
|
+
>>> consumer.commit(msg, asynchronous=False)
|
|
342
|
+
"""
|
|
343
|
+
try:
|
|
344
|
+
if message:
|
|
345
|
+
self._consumer.commit(message=message._message, asynchronous=asynchronous) # type: ignore[call-overload]
|
|
346
|
+
else:
|
|
347
|
+
self._consumer.commit(asynchronous=asynchronous) # type: ignore[call-overload]
|
|
348
|
+
except Exception as e:
|
|
349
|
+
raise ConsumerError(
|
|
350
|
+
f"Failed to commit offsets: {e}",
|
|
351
|
+
original_error=e,
|
|
352
|
+
) from e
|
|
353
|
+
|
|
354
|
+
def close(self) -> None:
|
|
355
|
+
"""
|
|
356
|
+
Close the consumer and leave the consumer group.
|
|
357
|
+
|
|
358
|
+
It's recommended to use the consumer as a context manager instead of calling
|
|
359
|
+
this method directly.
|
|
360
|
+
|
|
361
|
+
Examples:
|
|
362
|
+
>>> consumer = KafkaConsumer(config)
|
|
363
|
+
>>> try:
|
|
364
|
+
... consumer.subscribe(["topic"])
|
|
365
|
+
... for msg in consumer:
|
|
366
|
+
... process(msg)
|
|
367
|
+
... finally:
|
|
368
|
+
... consumer.close()
|
|
369
|
+
|
|
370
|
+
>>> # Better: use context manager
|
|
371
|
+
>>> with KafkaConsumer(config) as consumer:
|
|
372
|
+
... consumer.subscribe(["topic"])
|
|
373
|
+
... for msg in consumer:
|
|
374
|
+
... process(msg)
|
|
375
|
+
"""
|
|
376
|
+
try:
|
|
377
|
+
self._consumer.close()
|
|
378
|
+
except Exception as e:
|
|
379
|
+
raise ConsumerError(
|
|
380
|
+
f"Failed to close consumer: {e}",
|
|
381
|
+
original_error=e,
|
|
382
|
+
) from e
|
|
383
|
+
|
|
384
|
+
def __iter__(self) -> Iterator[KafkaMessage]:
|
|
385
|
+
"""
|
|
386
|
+
Iterate over messages indefinitely.
|
|
387
|
+
|
|
388
|
+
Uses the configured poll_timeout (default 1.0s). Configure via the
|
|
389
|
+
poll_timeout property.
|
|
390
|
+
|
|
391
|
+
Yields:
|
|
392
|
+
KafkaMessage objects as they arrive
|
|
393
|
+
|
|
394
|
+
Examples:
|
|
395
|
+
>>> for msg in consumer:
|
|
396
|
+
... print(f"Received: {msg.value_as_string()}")
|
|
397
|
+
... consumer.commit(msg)
|
|
398
|
+
|
|
399
|
+
>>> # With custom poll timeout
|
|
400
|
+
>>> consumer.poll_timeout = 5.0
|
|
401
|
+
>>> for msg in consumer:
|
|
402
|
+
... process(msg)
|
|
403
|
+
"""
|
|
404
|
+
while True:
|
|
405
|
+
msg = self.poll(timeout=self.poll_timeout)
|
|
406
|
+
if msg:
|
|
407
|
+
yield msg
|
|
408
|
+
|
|
409
|
+
def __enter__(self) -> "KafkaConsumer":
|
|
410
|
+
"""Enter context manager."""
|
|
411
|
+
return self
|
|
412
|
+
|
|
413
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
414
|
+
"""Exit context manager and cleanup resources."""
|
|
415
|
+
self.close()
|
typedkafka/exceptions.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
"""Exception classes for typedkafka with clear, actionable error messages."""
|
|
2
|
+
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class KafkaError(Exception):
|
|
7
|
+
"""
|
|
8
|
+
Base exception for all Kafka-related errors.
|
|
9
|
+
|
|
10
|
+
All typedkafka exceptions inherit from this base class, making it easy
|
|
11
|
+
to catch all Kafka-related errors with a single except clause.
|
|
12
|
+
|
|
13
|
+
Examples:
|
|
14
|
+
>>> try:
|
|
15
|
+
... producer.send("topic", "message")
|
|
16
|
+
... except KafkaError as e:
|
|
17
|
+
... logger.error(f"Kafka operation failed: {e}")
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ProducerError(KafkaError):
|
|
24
|
+
"""
|
|
25
|
+
Raised when a Producer operation fails.
|
|
26
|
+
|
|
27
|
+
This exception is raised when message production fails, such as:
|
|
28
|
+
- Message serialization errors
|
|
29
|
+
- Network connectivity issues
|
|
30
|
+
- Broker unavailability
|
|
31
|
+
- Invalid topic names
|
|
32
|
+
- Queue full errors
|
|
33
|
+
|
|
34
|
+
Attributes:
|
|
35
|
+
message: Human-readable error description
|
|
36
|
+
original_error: The underlying error from confluent-kafka (if any)
|
|
37
|
+
|
|
38
|
+
Examples:
|
|
39
|
+
>>> try:
|
|
40
|
+
... producer.send("invalid-topic!", {"key": "value"})
|
|
41
|
+
... except ProducerError as e:
|
|
42
|
+
... logger.error(f"Failed to produce message: {e}")
|
|
43
|
+
... # Handle retry logic or dead-letter queue
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
def __init__(self, message: str, original_error: Optional[Exception] = None):
|
|
47
|
+
"""
|
|
48
|
+
Initialize a ProducerError.
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
message: Human-readable error description
|
|
52
|
+
original_error: The underlying exception that caused this error
|
|
53
|
+
"""
|
|
54
|
+
super().__init__(message)
|
|
55
|
+
self.original_error = original_error
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class ConsumerError(KafkaError):
|
|
59
|
+
"""
|
|
60
|
+
Raised when a Consumer operation fails.
|
|
61
|
+
|
|
62
|
+
This exception is raised when message consumption fails, such as:
|
|
63
|
+
- Message deserialization errors
|
|
64
|
+
- Consumer group coordination failures
|
|
65
|
+
- Offset commit errors
|
|
66
|
+
- Network connectivity issues
|
|
67
|
+
- Invalid consumer configuration
|
|
68
|
+
|
|
69
|
+
Attributes:
|
|
70
|
+
message: Human-readable error description
|
|
71
|
+
original_error: The underlying error from confluent-kafka (if any)
|
|
72
|
+
|
|
73
|
+
Examples:
|
|
74
|
+
>>> try:
|
|
75
|
+
... for message in consumer:
|
|
76
|
+
... process(message)
|
|
77
|
+
... except ConsumerError as e:
|
|
78
|
+
... logger.error(f"Consumer error: {e}")
|
|
79
|
+
... # Handle reconnection or alerting
|
|
80
|
+
"""
|
|
81
|
+
|
|
82
|
+
def __init__(self, message: str, original_error: Optional[Exception] = None):
|
|
83
|
+
"""
|
|
84
|
+
Initialize a ConsumerError.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
message: Human-readable error description
|
|
88
|
+
original_error: The underlying exception that caused this error
|
|
89
|
+
"""
|
|
90
|
+
super().__init__(message)
|
|
91
|
+
self.original_error = original_error
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
class SerializationError(KafkaError):
|
|
95
|
+
"""
|
|
96
|
+
Raised when message serialization or deserialization fails.
|
|
97
|
+
|
|
98
|
+
This occurs when:
|
|
99
|
+
- JSON encoding/decoding fails
|
|
100
|
+
- Avro schema validation fails
|
|
101
|
+
- Custom serializer raises an exception
|
|
102
|
+
- Message format is invalid
|
|
103
|
+
|
|
104
|
+
Attributes:
|
|
105
|
+
message: Human-readable error description
|
|
106
|
+
value: The value that failed to serialize/deserialize
|
|
107
|
+
original_error: The underlying error (if any)
|
|
108
|
+
|
|
109
|
+
Examples:
|
|
110
|
+
>>> try:
|
|
111
|
+
... producer.send_json("topic", non_serializable_object)
|
|
112
|
+
... except SerializationError as e:
|
|
113
|
+
... logger.error(f"Failed to serialize message: {e}")
|
|
114
|
+
... # Log the problematic data for debugging
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
def __init__(
|
|
118
|
+
self, message: str, value: object = None, original_error: Optional[Exception] = None
|
|
119
|
+
):
|
|
120
|
+
"""
|
|
121
|
+
Initialize a SerializationError.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
message: Human-readable error description
|
|
125
|
+
value: The value that failed to serialize/deserialize
|
|
126
|
+
original_error: The underlying exception that caused this error
|
|
127
|
+
"""
|
|
128
|
+
super().__init__(message)
|
|
129
|
+
self.value = value
|
|
130
|
+
self.original_error = original_error
|