typedkafka 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- typedkafka/__init__.py +53 -0
- typedkafka/admin.py +336 -0
- typedkafka/aio.py +328 -0
- typedkafka/config.py +405 -0
- typedkafka/consumer.py +415 -0
- typedkafka/exceptions.py +130 -0
- typedkafka/producer.py +492 -0
- typedkafka/retry.py +154 -0
- typedkafka/serializers.py +293 -0
- typedkafka/testing.py +523 -0
- typedkafka-0.3.1.dist-info/METADATA +263 -0
- typedkafka-0.3.1.dist-info/RECORD +14 -0
- typedkafka-0.3.1.dist-info/WHEEL +4 -0
- typedkafka-0.3.1.dist-info/licenses/LICENSE +21 -0
typedkafka/__init__.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""
|
|
2
|
+
typedkafka - A well-documented, fully type-hinted Kafka client for Python.
|
|
3
|
+
|
|
4
|
+
Built on confluent-kafka with comprehensive docstrings, full type hints,
|
|
5
|
+
and a modern Pythonic API.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
# Testing utilities in separate namespace
|
|
9
|
+
from typedkafka import testing
|
|
10
|
+
from typedkafka.admin import AdminError, KafkaAdmin, TopicConfig
|
|
11
|
+
from typedkafka.config import ConsumerConfig, ProducerConfig
|
|
12
|
+
from typedkafka.consumer import KafkaConsumer
|
|
13
|
+
from typedkafka.exceptions import (
|
|
14
|
+
ConsumerError,
|
|
15
|
+
KafkaError,
|
|
16
|
+
ProducerError,
|
|
17
|
+
SerializationError,
|
|
18
|
+
)
|
|
19
|
+
from typedkafka.producer import KafkaProducer, TransactionContext
|
|
20
|
+
from typedkafka.retry import RetryPolicy, retry
|
|
21
|
+
from typedkafka.serializers import (
|
|
22
|
+
Deserializer,
|
|
23
|
+
JsonDeserializer,
|
|
24
|
+
JsonSerializer,
|
|
25
|
+
Serializer,
|
|
26
|
+
StringDeserializer,
|
|
27
|
+
StringSerializer,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
__version__ = "0.3.1"
|
|
31
|
+
__all__ = [
|
|
32
|
+
"KafkaProducer",
|
|
33
|
+
"KafkaConsumer",
|
|
34
|
+
"KafkaAdmin",
|
|
35
|
+
"ProducerConfig",
|
|
36
|
+
"ConsumerConfig",
|
|
37
|
+
"TopicConfig",
|
|
38
|
+
"KafkaError",
|
|
39
|
+
"ProducerError",
|
|
40
|
+
"ConsumerError",
|
|
41
|
+
"SerializationError",
|
|
42
|
+
"AdminError",
|
|
43
|
+
"TransactionContext",
|
|
44
|
+
"retry",
|
|
45
|
+
"RetryPolicy",
|
|
46
|
+
"Serializer",
|
|
47
|
+
"Deserializer",
|
|
48
|
+
"JsonSerializer",
|
|
49
|
+
"JsonDeserializer",
|
|
50
|
+
"StringSerializer",
|
|
51
|
+
"StringDeserializer",
|
|
52
|
+
"testing",
|
|
53
|
+
]
|
typedkafka/admin.py
ADDED
|
@@ -0,0 +1,336 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Kafka Admin Client with comprehensive documentation and full type safety.
|
|
3
|
+
|
|
4
|
+
Provides a well-documented wrapper for managing Kafka topics, configurations,
|
|
5
|
+
and cluster operations.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Any, Optional
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
from confluent_kafka.admin import AdminClient as ConfluentAdminClient
|
|
12
|
+
from confluent_kafka.admin import NewTopic
|
|
13
|
+
except ImportError:
|
|
14
|
+
ConfluentAdminClient = None # type: ignore[assignment,misc]
|
|
15
|
+
NewTopic = None # type: ignore[assignment,misc]
|
|
16
|
+
|
|
17
|
+
from typedkafka.exceptions import KafkaError
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class AdminError(KafkaError):
|
|
21
|
+
"""Raised when an admin operation fails."""
|
|
22
|
+
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class TopicConfig:
|
|
27
|
+
"""
|
|
28
|
+
Configuration for creating a new Kafka topic.
|
|
29
|
+
|
|
30
|
+
Examples:
|
|
31
|
+
>>> config = (TopicConfig("my-topic")
|
|
32
|
+
... .partitions(3)
|
|
33
|
+
... .replication_factor(2)
|
|
34
|
+
... .config("retention.ms", "86400000")) # 1 day retention
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
def __init__(self, name: str):
|
|
38
|
+
"""
|
|
39
|
+
Initialize topic configuration.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
name: Topic name
|
|
43
|
+
"""
|
|
44
|
+
self.name = name
|
|
45
|
+
self._num_partitions = 1
|
|
46
|
+
self._replication_factor = 1
|
|
47
|
+
self._config: dict[str, str] = {}
|
|
48
|
+
|
|
49
|
+
def partitions(self, count: int) -> "TopicConfig":
|
|
50
|
+
"""
|
|
51
|
+
Set number of partitions.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
count: Number of partitions (must be >= 1)
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
Self for method chaining
|
|
58
|
+
|
|
59
|
+
Examples:
|
|
60
|
+
>>> config = TopicConfig("my-topic").partitions(10)
|
|
61
|
+
"""
|
|
62
|
+
self._num_partitions = count
|
|
63
|
+
return self
|
|
64
|
+
|
|
65
|
+
def replication_factor(self, factor: int) -> "TopicConfig":
|
|
66
|
+
"""
|
|
67
|
+
Set replication factor.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
factor: Replication factor (typically 2 or 3)
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
Self for method chaining
|
|
74
|
+
|
|
75
|
+
Examples:
|
|
76
|
+
>>> config = TopicConfig("my-topic").replication_factor(3)
|
|
77
|
+
"""
|
|
78
|
+
self._replication_factor = factor
|
|
79
|
+
return self
|
|
80
|
+
|
|
81
|
+
def config(self, key: str, value: str) -> "TopicConfig":
|
|
82
|
+
"""
|
|
83
|
+
Set a topic configuration parameter.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
key: Configuration key (e.g., "retention.ms", "compression.type")
|
|
87
|
+
value: Configuration value
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
Self for method chaining
|
|
91
|
+
|
|
92
|
+
Examples:
|
|
93
|
+
>>> config = (TopicConfig("logs")
|
|
94
|
+
... .config("retention.ms", "604800000") # 7 days
|
|
95
|
+
... .config("compression.type", "gzip"))
|
|
96
|
+
"""
|
|
97
|
+
self._config[key] = value
|
|
98
|
+
return self
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class KafkaAdmin:
|
|
102
|
+
"""
|
|
103
|
+
A well-documented Kafka admin client with full type hints.
|
|
104
|
+
|
|
105
|
+
Provides methods for managing topics, configurations, and cluster operations
|
|
106
|
+
with comprehensive documentation and better error messages.
|
|
107
|
+
|
|
108
|
+
Examples:
|
|
109
|
+
>>> admin = KafkaAdmin({"bootstrap.servers": "localhost:9092"})
|
|
110
|
+
>>>
|
|
111
|
+
>>> # Create a topic
|
|
112
|
+
>>> admin.create_topic("events", num_partitions=3, replication_factor=2)
|
|
113
|
+
>>>
|
|
114
|
+
>>> # List all topics
|
|
115
|
+
>>> topics = admin.list_topics()
|
|
116
|
+
>>> print(topics)
|
|
117
|
+
>>>
|
|
118
|
+
>>> # Delete a topic
|
|
119
|
+
>>> admin.delete_topic("old-topic")
|
|
120
|
+
|
|
121
|
+
Attributes:
|
|
122
|
+
config: The configuration dictionary used to initialize the admin client
|
|
123
|
+
"""
|
|
124
|
+
|
|
125
|
+
def __init__(self, config: dict[str, Any]):
|
|
126
|
+
"""
|
|
127
|
+
Initialize a Kafka admin client.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
config: Configuration dictionary. Required option:
|
|
131
|
+
- bootstrap.servers (str): Comma-separated broker addresses
|
|
132
|
+
|
|
133
|
+
Raises:
|
|
134
|
+
AdminError: If the admin client cannot be initialized
|
|
135
|
+
|
|
136
|
+
Examples:
|
|
137
|
+
>>> admin = KafkaAdmin({"bootstrap.servers": "localhost:9092"})
|
|
138
|
+
>>>
|
|
139
|
+
>>> # With multiple brokers
|
|
140
|
+
>>> admin = KafkaAdmin({
|
|
141
|
+
... "bootstrap.servers": "broker1:9092,broker2:9092,broker3:9092"
|
|
142
|
+
... })
|
|
143
|
+
"""
|
|
144
|
+
if ConfluentAdminClient is None:
|
|
145
|
+
raise ImportError(
|
|
146
|
+
"confluent-kafka is required. Install with: pip install confluent-kafka"
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
self.config = config
|
|
150
|
+
try:
|
|
151
|
+
self._admin = ConfluentAdminClient(config)
|
|
152
|
+
except Exception as e:
|
|
153
|
+
raise AdminError(f"Failed to initialize Kafka admin client: {e}") from e
|
|
154
|
+
|
|
155
|
+
def create_topic(
|
|
156
|
+
self,
|
|
157
|
+
topic: str,
|
|
158
|
+
num_partitions: int = 1,
|
|
159
|
+
replication_factor: int = 1,
|
|
160
|
+
config: Optional[dict[str, str]] = None,
|
|
161
|
+
timeout: float = 30.0,
|
|
162
|
+
) -> None:
|
|
163
|
+
"""
|
|
164
|
+
Create a new Kafka topic.
|
|
165
|
+
|
|
166
|
+
Args:
|
|
167
|
+
topic: Topic name to create
|
|
168
|
+
num_partitions: Number of partitions (default: 1)
|
|
169
|
+
replication_factor: Replication factor (default: 1, recommended: 2-3)
|
|
170
|
+
config: Optional topic configuration dict
|
|
171
|
+
timeout: Operation timeout in seconds (default: 30.0)
|
|
172
|
+
|
|
173
|
+
Raises:
|
|
174
|
+
AdminError: If topic creation fails
|
|
175
|
+
|
|
176
|
+
Examples:
|
|
177
|
+
>>> admin = KafkaAdmin({"bootstrap.servers": "localhost:9092"})
|
|
178
|
+
>>>
|
|
179
|
+
>>> # Simple topic creation
|
|
180
|
+
>>> admin.create_topic("my-topic")
|
|
181
|
+
>>>
|
|
182
|
+
>>> # Topic with multiple partitions and replication
|
|
183
|
+
>>> admin.create_topic("events", num_partitions=10, replication_factor=3)
|
|
184
|
+
>>>
|
|
185
|
+
>>> # Topic with custom configuration
|
|
186
|
+
>>> admin.create_topic(
|
|
187
|
+
... "logs",
|
|
188
|
+
... num_partitions=5,
|
|
189
|
+
... config={"retention.ms": "604800000"} # 7 days
|
|
190
|
+
... )
|
|
191
|
+
"""
|
|
192
|
+
try:
|
|
193
|
+
new_topic = NewTopic(
|
|
194
|
+
topic,
|
|
195
|
+
num_partitions=num_partitions,
|
|
196
|
+
replication_factor=replication_factor,
|
|
197
|
+
config=config or {},
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
futures = self._admin.create_topics([new_topic], operation_timeout=timeout)
|
|
201
|
+
|
|
202
|
+
# Wait for operation to complete
|
|
203
|
+
for topic_name, future in futures.items():
|
|
204
|
+
try:
|
|
205
|
+
future.result() # Block until complete
|
|
206
|
+
except Exception as e:
|
|
207
|
+
raise AdminError(f"Failed to create topic '{topic_name}': {e}") from e
|
|
208
|
+
|
|
209
|
+
except AdminError:
|
|
210
|
+
raise
|
|
211
|
+
except Exception as e:
|
|
212
|
+
raise AdminError(f"Failed to create topic '{topic}': {e}") from e
|
|
213
|
+
|
|
214
|
+
def delete_topic(self, topic: str, timeout: float = 30.0) -> None:
|
|
215
|
+
"""
|
|
216
|
+
Delete a Kafka topic.
|
|
217
|
+
|
|
218
|
+
Args:
|
|
219
|
+
topic: Topic name to delete
|
|
220
|
+
timeout: Operation timeout in seconds (default: 30.0)
|
|
221
|
+
|
|
222
|
+
Raises:
|
|
223
|
+
AdminError: If topic deletion fails
|
|
224
|
+
|
|
225
|
+
Examples:
|
|
226
|
+
>>> admin = KafkaAdmin({"bootstrap.servers": "localhost:9092"})
|
|
227
|
+
>>> admin.delete_topic("old-topic")
|
|
228
|
+
"""
|
|
229
|
+
try:
|
|
230
|
+
futures = self._admin.delete_topics([topic], operation_timeout=timeout)
|
|
231
|
+
|
|
232
|
+
for topic_name, future in futures.items():
|
|
233
|
+
try:
|
|
234
|
+
future.result()
|
|
235
|
+
except Exception as e:
|
|
236
|
+
raise AdminError(f"Failed to delete topic '{topic_name}': {e}") from e
|
|
237
|
+
|
|
238
|
+
except AdminError:
|
|
239
|
+
raise
|
|
240
|
+
except Exception as e:
|
|
241
|
+
raise AdminError(f"Failed to delete topic '{topic}': {e}") from e
|
|
242
|
+
|
|
243
|
+
def list_topics(self, timeout: float = 10.0) -> list[str]:
|
|
244
|
+
"""
|
|
245
|
+
List all topics in the Kafka cluster.
|
|
246
|
+
|
|
247
|
+
Args:
|
|
248
|
+
timeout: Request timeout in seconds (default: 10.0)
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
List of topic names
|
|
252
|
+
|
|
253
|
+
Raises:
|
|
254
|
+
AdminError: If listing topics fails
|
|
255
|
+
|
|
256
|
+
Examples:
|
|
257
|
+
>>> admin = KafkaAdmin({"bootstrap.servers": "localhost:9092"})
|
|
258
|
+
>>> topics = admin.list_topics()
|
|
259
|
+
>>> for topic in topics:
|
|
260
|
+
... print(f"Topic: {topic}")
|
|
261
|
+
"""
|
|
262
|
+
try:
|
|
263
|
+
metadata = self._admin.list_topics(timeout=timeout)
|
|
264
|
+
return list(metadata.topics.keys())
|
|
265
|
+
except Exception as e:
|
|
266
|
+
raise AdminError(f"Failed to list topics: {e}") from e
|
|
267
|
+
|
|
268
|
+
def topic_exists(self, topic: str, timeout: float = 10.0) -> bool:
|
|
269
|
+
"""
|
|
270
|
+
Check if a topic exists.
|
|
271
|
+
|
|
272
|
+
Args:
|
|
273
|
+
topic: Topic name to check
|
|
274
|
+
timeout: Request timeout in seconds (default: 10.0)
|
|
275
|
+
|
|
276
|
+
Returns:
|
|
277
|
+
True if topic exists, False otherwise
|
|
278
|
+
|
|
279
|
+
Raises:
|
|
280
|
+
AdminError: If the check fails
|
|
281
|
+
|
|
282
|
+
Examples:
|
|
283
|
+
>>> admin = KafkaAdmin({"bootstrap.servers": "localhost:9092"})
|
|
284
|
+
>>> if admin.topic_exists("my-topic"):
|
|
285
|
+
... print("Topic exists!")
|
|
286
|
+
... else:
|
|
287
|
+
... admin.create_topic("my-topic")
|
|
288
|
+
"""
|
|
289
|
+
try:
|
|
290
|
+
topics = self.list_topics(timeout=timeout)
|
|
291
|
+
return topic in topics
|
|
292
|
+
except Exception as e:
|
|
293
|
+
raise AdminError(f"Failed to check if topic exists: {e}") from e
|
|
294
|
+
|
|
295
|
+
def describe_topic(self, topic: str, timeout: float = 10.0) -> dict[str, Any]:
|
|
296
|
+
"""
|
|
297
|
+
Get detailed information about a topic.
|
|
298
|
+
|
|
299
|
+
Args:
|
|
300
|
+
topic: Topic name
|
|
301
|
+
timeout: Request timeout in seconds (default: 10.0)
|
|
302
|
+
|
|
303
|
+
Returns:
|
|
304
|
+
Dict containing topic metadata (partitions, replication, etc.)
|
|
305
|
+
|
|
306
|
+
Raises:
|
|
307
|
+
AdminError: If describing the topic fails
|
|
308
|
+
|
|
309
|
+
Examples:
|
|
310
|
+
>>> admin = KafkaAdmin({"bootstrap.servers": "localhost:9092"})
|
|
311
|
+
>>> info = admin.describe_topic("my-topic")
|
|
312
|
+
>>> print(f"Partitions: {len(info['partitions'])}")
|
|
313
|
+
"""
|
|
314
|
+
try:
|
|
315
|
+
metadata = self._admin.list_topics(topic=topic, timeout=timeout)
|
|
316
|
+
topic_metadata = metadata.topics.get(topic)
|
|
317
|
+
|
|
318
|
+
if topic_metadata is None:
|
|
319
|
+
raise AdminError(f"Topic '{topic}' not found")
|
|
320
|
+
|
|
321
|
+
return {
|
|
322
|
+
"topic": topic,
|
|
323
|
+
"partitions": [
|
|
324
|
+
{
|
|
325
|
+
"id": p.id,
|
|
326
|
+
"leader": p.leader,
|
|
327
|
+
"replicas": p.replicas,
|
|
328
|
+
"isrs": p.isrs,
|
|
329
|
+
}
|
|
330
|
+
for p in topic_metadata.partitions.values()
|
|
331
|
+
],
|
|
332
|
+
}
|
|
333
|
+
except AdminError:
|
|
334
|
+
raise
|
|
335
|
+
except Exception as e:
|
|
336
|
+
raise AdminError(f"Failed to describe topic '{topic}': {e}") from e
|