typedkafka 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
typedkafka/config.py ADDED
@@ -0,0 +1,405 @@
1
+ """
2
+ Type-safe configuration builders for Kafka producer and consumer.
3
+
4
+ Provides fluent API for building Kafka configurations with IDE autocomplete
5
+ and validation, preventing common configuration errors.
6
+ """
7
+
8
+ from typing import Any, Literal, Union
9
+
10
+ _VALID_ACKS = {"0", "1", "all", 0, 1, -1}
11
+ _VALID_COMPRESSIONS = {"none", "gzip", "snappy", "lz4", "zstd"}
12
+ _VALID_OFFSET_RESETS = {"earliest", "latest", "none"}
13
+
14
+
15
+ class ProducerConfig:
16
+ """
17
+ Type-safe builder for Kafka producer configuration.
18
+
19
+ Provides a fluent API with full type hints and validation for common
20
+ producer configuration options.
21
+
22
+ Examples:
23
+ >>> config = (ProducerConfig()
24
+ ... .bootstrap_servers("localhost:9092")
25
+ ... .compression("gzip")
26
+ ... .acks("all")
27
+ ... .build())
28
+ >>>
29
+ >>> from typedkafka import KafkaProducer
30
+ >>> producer = KafkaProducer(config)
31
+
32
+ >>> # With multiple brokers
33
+ >>> config = (ProducerConfig()
34
+ ... .bootstrap_servers("broker1:9092,broker2:9092,broker3:9092")
35
+ ... .client_id("my-application")
36
+ ... .build())
37
+ """
38
+
39
+ def __init__(self) -> None:
40
+ """Initialize an empty producer configuration."""
41
+ self._config: dict[str, Any] = {}
42
+
43
+ def bootstrap_servers(self, servers: str) -> "ProducerConfig":
44
+ """
45
+ Set the Kafka broker addresses.
46
+
47
+ Args:
48
+ servers: Comma-separated list of broker addresses.
49
+ Example: "localhost:9092" or "broker1:9092,broker2:9092"
50
+
51
+ Returns:
52
+ Self for method chaining
53
+
54
+ Examples:
55
+ >>> config = ProducerConfig().bootstrap_servers("localhost:9092")
56
+ >>> config = ProducerConfig().bootstrap_servers("b1:9092,b2:9092,b3:9092")
57
+ """
58
+ self._config["bootstrap.servers"] = servers
59
+ return self
60
+
61
+ def client_id(self, client_id: str) -> "ProducerConfig":
62
+ """
63
+ Set the client ID for this producer.
64
+
65
+ Args:
66
+ client_id: Client identifier string
67
+
68
+ Returns:
69
+ Self for method chaining
70
+ """
71
+ self._config["client.id"] = client_id
72
+ return self
73
+
74
+ def acks(self, acks: Union[Literal["0", "1", "all"], int]) -> "ProducerConfig":
75
+ """
76
+ Set the number of acknowledgments required.
77
+
78
+ Args:
79
+ acks: Acknowledgment level:
80
+ - "0" or 0: No acknowledgment (fire and forget)
81
+ - "1" or 1: Leader acknowledgment only
82
+ - "all" or -1: All in-sync replicas must acknowledge
83
+
84
+ Returns:
85
+ Self for method chaining
86
+
87
+ Examples:
88
+ >>> config = ProducerConfig().acks("all") # Maximum durability
89
+ >>> config = ProducerConfig().acks("1") # Leader only
90
+ >>> config = ProducerConfig().acks("0") # No acknowledgment
91
+ """
92
+ if acks not in _VALID_ACKS:
93
+ raise ValueError(
94
+ f"Invalid acks value: {acks!r}. Must be one of: '0', '1', 'all', 0, 1, -1"
95
+ )
96
+ self._config["acks"] = acks
97
+ return self
98
+
99
+ def compression(
100
+ self, compression_type: Literal["none", "gzip", "snappy", "lz4", "zstd"]
101
+ ) -> "ProducerConfig":
102
+ """
103
+ Set the compression codec.
104
+
105
+ Args:
106
+ compression_type: Compression algorithm to use
107
+
108
+ Returns:
109
+ Self for method chaining
110
+
111
+ Examples:
112
+ >>> config = ProducerConfig().compression("gzip")
113
+ >>> config = ProducerConfig().compression("zstd") # Best compression
114
+ """
115
+ if compression_type not in _VALID_COMPRESSIONS:
116
+ raise ValueError(
117
+ f"Invalid compression type: {compression_type!r}. "
118
+ f"Must be one of: {', '.join(sorted(_VALID_COMPRESSIONS))}"
119
+ )
120
+ self._config["compression.type"] = compression_type
121
+ return self
122
+
123
+ def max_in_flight_requests(self, count: int) -> "ProducerConfig":
124
+ """
125
+ Set maximum number of unacknowledged requests.
126
+
127
+ Args:
128
+ count: Max in-flight requests per connection (1-5 recommended)
129
+
130
+ Returns:
131
+ Self for method chaining
132
+ """
133
+ self._config["max.in.flight.requests.per.connection"] = count
134
+ return self
135
+
136
+ def linger_ms(self, milliseconds: int) -> "ProducerConfig":
137
+ """
138
+ Set time to wait before sending a batch.
139
+
140
+ Args:
141
+ milliseconds: Time to wait for more messages before sending
142
+
143
+ Returns:
144
+ Self for method chaining
145
+
146
+ Examples:
147
+ >>> # Wait up to 10ms to batch messages
148
+ >>> config = ProducerConfig().linger_ms(10)
149
+ """
150
+ if milliseconds < 0:
151
+ raise ValueError(f"linger_ms must be non-negative, got {milliseconds}")
152
+ self._config["linger.ms"] = milliseconds
153
+ return self
154
+
155
+ def batch_size(self, bytes_size: int) -> "ProducerConfig":
156
+ """
157
+ Set maximum batch size in bytes.
158
+
159
+ Args:
160
+ bytes_size: Maximum batch size (default: 16384)
161
+
162
+ Returns:
163
+ Self for method chaining
164
+
165
+ Examples:
166
+ >>> config = ProducerConfig().batch_size(32768) # 32KB batches
167
+ """
168
+ if bytes_size < 0:
169
+ raise ValueError(f"batch_size must be non-negative, got {bytes_size}")
170
+ self._config["batch.size"] = bytes_size
171
+ return self
172
+
173
+ def retries(self, count: int) -> "ProducerConfig":
174
+ """
175
+ Set number of retries for failed sends.
176
+
177
+ Args:
178
+ count: Number of retries (default: 2147483647 for infinite)
179
+
180
+ Returns:
181
+ Self for method chaining
182
+ """
183
+ self._config["retries"] = count
184
+ return self
185
+
186
+ def set(self, key: str, value: Any) -> "ProducerConfig":
187
+ """
188
+ Set a custom configuration parameter.
189
+
190
+ Use this for advanced configurations not covered by type-safe methods.
191
+
192
+ Args:
193
+ key: Configuration key
194
+ value: Configuration value
195
+
196
+ Returns:
197
+ Self for method chaining
198
+
199
+ Examples:
200
+ >>> config = ProducerConfig().set("queue.buffering.max.messages", 100000)
201
+ """
202
+ self._config[key] = value
203
+ return self
204
+
205
+ def build(self) -> dict[str, Any]:
206
+ """
207
+ Build and return the configuration dictionary.
208
+
209
+ Returns:
210
+ Configuration dict ready for KafkaProducer
211
+
212
+ Examples:
213
+ >>> config = (ProducerConfig()
214
+ ... .bootstrap_servers("localhost:9092")
215
+ ... .acks("all")
216
+ ... .build())
217
+ >>> from typedkafka import KafkaProducer
218
+ >>> producer = KafkaProducer(config)
219
+ """
220
+ return self._config.copy()
221
+
222
+
223
+ class ConsumerConfig:
224
+ """
225
+ Type-safe builder for Kafka consumer configuration.
226
+
227
+ Provides a fluent API with full type hints and validation for common
228
+ consumer configuration options.
229
+
230
+ Examples:
231
+ >>> config = (ConsumerConfig()
232
+ ... .bootstrap_servers("localhost:9092")
233
+ ... .group_id("my-consumer-group")
234
+ ... .auto_offset_reset("earliest")
235
+ ... .build())
236
+ >>>
237
+ >>> from typedkafka import KafkaConsumer
238
+ >>> consumer = KafkaConsumer(config)
239
+ """
240
+
241
+ def __init__(self) -> None:
242
+ """Initialize an empty consumer configuration."""
243
+ self._config: dict[str, Any] = {}
244
+
245
+ def bootstrap_servers(self, servers: str) -> "ConsumerConfig":
246
+ """
247
+ Set the Kafka broker addresses.
248
+
249
+ Args:
250
+ servers: Comma-separated list of broker addresses
251
+
252
+ Returns:
253
+ Self for method chaining
254
+ """
255
+ self._config["bootstrap.servers"] = servers
256
+ return self
257
+
258
+ def group_id(self, group_id: str) -> "ConsumerConfig":
259
+ """
260
+ Set the consumer group ID (required for subscribe()).
261
+
262
+ Args:
263
+ group_id: Consumer group identifier
264
+
265
+ Returns:
266
+ Self for method chaining
267
+
268
+ Examples:
269
+ >>> config = ConsumerConfig().group_id("my-application-consumers")
270
+ """
271
+ self._config["group.id"] = group_id
272
+ return self
273
+
274
+ def client_id(self, client_id: str) -> "ConsumerConfig":
275
+ """
276
+ Set the client ID for this consumer.
277
+
278
+ Args:
279
+ client_id: Client identifier string
280
+
281
+ Returns:
282
+ Self for method chaining
283
+ """
284
+ self._config["client.id"] = client_id
285
+ return self
286
+
287
+ def auto_offset_reset(self, reset: Literal["earliest", "latest", "none"]) -> "ConsumerConfig":
288
+ """
289
+ Set behavior when no initial offset exists.
290
+
291
+ Args:
292
+ reset: Offset reset behavior:
293
+ - "earliest": Start from the beginning
294
+ - "latest": Start from the end (skip existing messages)
295
+ - "none": Throw error if no offset exists
296
+
297
+ Returns:
298
+ Self for method chaining
299
+
300
+ Examples:
301
+ >>> # Process all messages from the beginning
302
+ >>> config = ConsumerConfig().auto_offset_reset("earliest")
303
+ >>>
304
+ >>> # Only process new messages
305
+ >>> config = ConsumerConfig().auto_offset_reset("latest")
306
+ """
307
+ if reset not in _VALID_OFFSET_RESETS:
308
+ raise ValueError(
309
+ f"Invalid auto_offset_reset value: {reset!r}. "
310
+ f"Must be one of: 'earliest', 'latest', 'none'"
311
+ )
312
+ self._config["auto.offset.reset"] = reset
313
+ return self
314
+
315
+ def enable_auto_commit(self, enabled: bool = True) -> "ConsumerConfig":
316
+ """
317
+ Enable or disable automatic offset commits.
318
+
319
+ Args:
320
+ enabled: True to auto-commit, False for manual commits
321
+
322
+ Returns:
323
+ Self for method chaining
324
+
325
+ Examples:
326
+ >>> # Manual offset management
327
+ >>> config = ConsumerConfig().enable_auto_commit(False)
328
+ """
329
+ self._config["enable.auto.commit"] = enabled
330
+ return self
331
+
332
+ def auto_commit_interval_ms(self, milliseconds: int) -> "ConsumerConfig":
333
+ """
334
+ Set frequency of automatic offset commits.
335
+
336
+ Args:
337
+ milliseconds: Commit interval (default: 5000)
338
+
339
+ Returns:
340
+ Self for method chaining
341
+ """
342
+ self._config["auto.commit.interval.ms"] = milliseconds
343
+ return self
344
+
345
+ def session_timeout_ms(self, milliseconds: int) -> "ConsumerConfig":
346
+ """
347
+ Set consumer session timeout.
348
+
349
+ Args:
350
+ milliseconds: Session timeout (default: 10000)
351
+
352
+ Returns:
353
+ Self for method chaining
354
+ """
355
+ self._config["session.timeout.ms"] = milliseconds
356
+ return self
357
+
358
+ def max_poll_interval_ms(self, milliseconds: int) -> "ConsumerConfig":
359
+ """
360
+ Set maximum time between polls.
361
+
362
+ Args:
363
+ milliseconds: Max poll interval (default: 300000)
364
+
365
+ Returns:
366
+ Self for method chaining
367
+ """
368
+ self._config["max.poll.interval.ms"] = milliseconds
369
+ return self
370
+
371
+ def max_poll_records(self, count: int) -> "ConsumerConfig":
372
+ """
373
+ Set maximum records returned in a single poll.
374
+
375
+ Args:
376
+ count: Max records per poll
377
+
378
+ Returns:
379
+ Self for method chaining
380
+ """
381
+ self._config["max.poll.records"] = count
382
+ return self
383
+
384
+ def set(self, key: str, value: Any) -> "ConsumerConfig":
385
+ """
386
+ Set a custom configuration parameter.
387
+
388
+ Args:
389
+ key: Configuration key
390
+ value: Configuration value
391
+
392
+ Returns:
393
+ Self for method chaining
394
+ """
395
+ self._config[key] = value
396
+ return self
397
+
398
+ def build(self) -> dict[str, Any]:
399
+ """
400
+ Build and return the configuration dictionary.
401
+
402
+ Returns:
403
+ Configuration dict ready for KafkaConsumer
404
+ """
405
+ return self._config.copy()