chub-dev 0.1.0 → 0.1.2-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +55 -0
- package/bin/chub-mcp +2 -0
- package/dist/airtable/docs/database/javascript/DOC.md +1437 -0
- package/dist/airtable/docs/database/python/DOC.md +1735 -0
- package/dist/amplitude/docs/analytics/javascript/DOC.md +1282 -0
- package/dist/amplitude/docs/analytics/python/DOC.md +1199 -0
- package/dist/anthropic/docs/claude-api/javascript/DOC.md +503 -0
- package/dist/anthropic/docs/claude-api/python/DOC.md +389 -0
- package/dist/asana/docs/tasks/DOC.md +1396 -0
- package/dist/assemblyai/docs/transcription/DOC.md +1043 -0
- package/dist/atlassian/docs/confluence/javascript/DOC.md +1347 -0
- package/dist/atlassian/docs/confluence/python/DOC.md +1604 -0
- package/dist/auth0/docs/identity/javascript/DOC.md +968 -0
- package/dist/auth0/docs/identity/python/DOC.md +1199 -0
- package/dist/aws/docs/s3/javascript/DOC.md +1773 -0
- package/dist/aws/docs/s3/python/DOC.md +1807 -0
- package/dist/binance/docs/trading/javascript/DOC.md +1315 -0
- package/dist/binance/docs/trading/python/DOC.md +1454 -0
- package/dist/braintree/docs/gateway/javascript/DOC.md +1278 -0
- package/dist/braintree/docs/gateway/python/DOC.md +1179 -0
- package/dist/chromadb/docs/embeddings-db/javascript/DOC.md +1263 -0
- package/dist/chromadb/docs/embeddings-db/python/DOC.md +1707 -0
- package/dist/clerk/docs/auth/javascript/DOC.md +1220 -0
- package/dist/clerk/docs/auth/python/DOC.md +274 -0
- package/dist/cloudflare/docs/workers/javascript/DOC.md +918 -0
- package/dist/cloudflare/docs/workers/python/DOC.md +994 -0
- package/dist/cockroachdb/docs/distributed-db/DOC.md +1500 -0
- package/dist/cohere/docs/llm/DOC.md +1335 -0
- package/dist/datadog/docs/monitoring/javascript/DOC.md +1740 -0
- package/dist/datadog/docs/monitoring/python/DOC.md +1815 -0
- package/dist/deepgram/docs/speech/javascript/DOC.md +885 -0
- package/dist/deepgram/docs/speech/python/DOC.md +685 -0
- package/dist/deepl/docs/translation/javascript/DOC.md +887 -0
- package/dist/deepl/docs/translation/python/DOC.md +944 -0
- package/dist/deepseek/docs/llm/DOC.md +1220 -0
- package/dist/directus/docs/headless-cms/javascript/DOC.md +1128 -0
- package/dist/directus/docs/headless-cms/python/DOC.md +1276 -0
- package/dist/discord/docs/bot/javascript/DOC.md +1090 -0
- package/dist/discord/docs/bot/python/DOC.md +1130 -0
- package/dist/elasticsearch/docs/search/DOC.md +1634 -0
- package/dist/elevenlabs/docs/text-to-speech/javascript/DOC.md +336 -0
- package/dist/elevenlabs/docs/text-to-speech/python/DOC.md +552 -0
- package/dist/firebase/docs/auth/DOC.md +1015 -0
- package/dist/gemini/docs/genai/javascript/DOC.md +691 -0
- package/dist/gemini/docs/genai/python/DOC.md +555 -0
- package/dist/github/docs/octokit/DOC.md +1560 -0
- package/dist/google/docs/bigquery/javascript/DOC.md +1688 -0
- package/dist/google/docs/bigquery/python/DOC.md +1503 -0
- package/dist/hubspot/docs/crm/javascript/DOC.md +1805 -0
- package/dist/hubspot/docs/crm/python/DOC.md +2033 -0
- package/dist/huggingface/docs/transformers/DOC.md +948 -0
- package/dist/intercom/docs/messaging/javascript/DOC.md +1844 -0
- package/dist/intercom/docs/messaging/python/DOC.md +1797 -0
- package/dist/jira/docs/issues/javascript/DOC.md +1420 -0
- package/dist/jira/docs/issues/python/DOC.md +1492 -0
- package/dist/kafka/docs/streaming/javascript/DOC.md +1671 -0
- package/dist/kafka/docs/streaming/python/DOC.md +1464 -0
- package/dist/landingai-ade/docs/api/DOC.md +620 -0
- package/dist/landingai-ade/docs/sdk/python/DOC.md +489 -0
- package/dist/landingai-ade/docs/sdk/typescript/DOC.md +542 -0
- package/dist/landingai-ade/skills/SKILL.md +489 -0
- package/dist/launchdarkly/docs/feature-flags/javascript/DOC.md +1191 -0
- package/dist/launchdarkly/docs/feature-flags/python/DOC.md +1671 -0
- package/dist/linear/docs/tracker/DOC.md +1554 -0
- package/dist/livekit/docs/realtime/javascript/DOC.md +303 -0
- package/dist/livekit/docs/realtime/python/DOC.md +163 -0
- package/dist/mailchimp/docs/marketing/DOC.md +1420 -0
- package/dist/meilisearch/docs/search/DOC.md +1241 -0
- package/dist/microsoft/docs/onedrive/javascript/DOC.md +1421 -0
- package/dist/microsoft/docs/onedrive/python/DOC.md +1549 -0
- package/dist/mongodb/docs/atlas/DOC.md +2041 -0
- package/dist/notion/docs/workspace-api/javascript/DOC.md +1435 -0
- package/dist/notion/docs/workspace-api/python/DOC.md +1400 -0
- package/dist/okta/docs/identity/javascript/DOC.md +1171 -0
- package/dist/okta/docs/identity/python/DOC.md +1401 -0
- package/dist/openai/docs/chat/javascript/DOC.md +407 -0
- package/dist/openai/docs/chat/python/DOC.md +568 -0
- package/dist/paypal/docs/checkout/DOC.md +278 -0
- package/dist/pinecone/docs/sdk/javascript/DOC.md +984 -0
- package/dist/pinecone/docs/sdk/python/DOC.md +1395 -0
- package/dist/plaid/docs/banking/javascript/DOC.md +1163 -0
- package/dist/plaid/docs/banking/python/DOC.md +1203 -0
- package/dist/playwright-community/skills/login-flows/SKILL.md +108 -0
- package/dist/postmark/docs/transactional-email/DOC.md +1168 -0
- package/dist/prisma/docs/orm/javascript/DOC.md +1419 -0
- package/dist/prisma/docs/orm/python/DOC.md +1317 -0
- package/dist/qdrant/docs/vector-search/javascript/DOC.md +1221 -0
- package/dist/qdrant/docs/vector-search/python/DOC.md +1653 -0
- package/dist/rabbitmq/docs/message-queue/javascript/DOC.md +1193 -0
- package/dist/rabbitmq/docs/message-queue/python/DOC.md +1243 -0
- package/dist/razorpay/docs/payments/javascript/DOC.md +1219 -0
- package/dist/razorpay/docs/payments/python/DOC.md +1330 -0
- package/dist/redis/docs/key-value/javascript/DOC.md +1851 -0
- package/dist/redis/docs/key-value/python/DOC.md +2054 -0
- package/dist/registry.json +2817 -0
- package/dist/replicate/docs/model-hosting/DOC.md +1318 -0
- package/dist/resend/docs/email/DOC.md +1271 -0
- package/dist/salesforce/docs/crm/javascript/DOC.md +1241 -0
- package/dist/salesforce/docs/crm/python/DOC.md +1183 -0
- package/dist/search-index.json +1 -0
- package/dist/sendgrid/docs/email-api/javascript/DOC.md +371 -0
- package/dist/sendgrid/docs/email-api/python/DOC.md +656 -0
- package/dist/sentry/docs/error-tracking/javascript/DOC.md +1073 -0
- package/dist/sentry/docs/error-tracking/python/DOC.md +1309 -0
- package/dist/shopify/docs/storefront/DOC.md +457 -0
- package/dist/slack/docs/workspace/javascript/DOC.md +933 -0
- package/dist/slack/docs/workspace/python/DOC.md +271 -0
- package/dist/square/docs/payments/javascript/DOC.md +1855 -0
- package/dist/square/docs/payments/python/DOC.md +1728 -0
- package/dist/stripe/docs/api/DOC.md +1727 -0
- package/dist/stripe/docs/payments/DOC.md +1726 -0
- package/dist/stytch/docs/auth/javascript/DOC.md +1813 -0
- package/dist/stytch/docs/auth/python/DOC.md +1962 -0
- package/dist/supabase/docs/client/DOC.md +1606 -0
- package/dist/twilio/docs/messaging/python/DOC.md +469 -0
- package/dist/twilio/docs/messaging/typescript/DOC.md +946 -0
- package/dist/vercel/docs/platform/DOC.md +1940 -0
- package/dist/weaviate/docs/vector-db/javascript/DOC.md +1268 -0
- package/dist/weaviate/docs/vector-db/python/DOC.md +1388 -0
- package/dist/zendesk/docs/support/javascript/DOC.md +2150 -0
- package/dist/zendesk/docs/support/python/DOC.md +2297 -0
- package/package.json +22 -6
- package/skills/get-api-docs/SKILL.md +84 -0
- package/src/commands/annotate.js +83 -0
- package/src/commands/build.js +12 -1
- package/src/commands/feedback.js +150 -0
- package/src/commands/get.js +83 -42
- package/src/commands/search.js +7 -0
- package/src/index.js +43 -17
- package/src/lib/analytics.js +90 -0
- package/src/lib/annotations.js +57 -0
- package/src/lib/bm25.js +170 -0
- package/src/lib/cache.js +69 -6
- package/src/lib/config.js +8 -3
- package/src/lib/identity.js +99 -0
- package/src/lib/registry.js +103 -20
- package/src/lib/telemetry.js +86 -0
- package/src/mcp/server.js +177 -0
- package/src/mcp/tools.js +251 -0
|
@@ -0,0 +1,1464 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: streaming
|
|
3
|
+
description: "kafka-python - Apache Kafka client for Python streaming and messaging"
|
|
4
|
+
metadata:
|
|
5
|
+
languages: "python"
|
|
6
|
+
versions: "2.0.2"
|
|
7
|
+
updated-on: "2026-03-02"
|
|
8
|
+
source: maintainer
|
|
9
|
+
tags: "kafka,streaming,messaging,queue,events"
|
|
10
|
+
---
|
|
11
|
+
|
|
12
|
+
# kafka-python - Apache Kafka Client for Python
|
|
13
|
+
|
|
14
|
+
## Golden Rule
|
|
15
|
+
|
|
16
|
+
**ALWAYS use `kafka-python` version 2.0.2 or later.**
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
pip install kafka-python
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
**DO NOT use:**
|
|
23
|
+
- `pykafka` (different library)
|
|
24
|
+
- `confluent-kafka` (different library - C-based client)
|
|
25
|
+
- Outdated versions below 2.0.0
|
|
26
|
+
|
|
27
|
+
kafka-python is a pure Python client for Apache Kafka with support for producers, consumers, and admin operations.
|
|
28
|
+
|
|
29
|
+
## Installation
|
|
30
|
+
|
|
31
|
+
### Basic Installation
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
pip install kafka-python
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
### With UV
|
|
38
|
+
|
|
39
|
+
```bash
|
|
40
|
+
uv pip install kafka-python
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
### Adding to pyproject.toml
|
|
44
|
+
|
|
45
|
+
```toml
|
|
46
|
+
[project]
|
|
47
|
+
dependencies = [
|
|
48
|
+
"kafka-python>=2.0.2"
|
|
49
|
+
]
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
### With Optional Compression Support
|
|
53
|
+
|
|
54
|
+
```bash
|
|
55
|
+
pip install kafka-python[snappy]
|
|
56
|
+
pip install kafka-python[lz4]
|
|
57
|
+
pip install kafka-python[zstd]
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
Or all compression codecs:
|
|
61
|
+
|
|
62
|
+
```bash
|
|
63
|
+
pip install kafka-python[snappy,lz4,zstd]
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
### Environment Variables
|
|
67
|
+
|
|
68
|
+
Create a `.env` file:
|
|
69
|
+
|
|
70
|
+
```bash
|
|
71
|
+
KAFKA_BROKERS=localhost:9092
|
|
72
|
+
KAFKA_USERNAME=your-username
|
|
73
|
+
KAFKA_PASSWORD=your-password
|
|
74
|
+
KAFKA_TOPIC=my-topic
|
|
75
|
+
KAFKA_GROUP_ID=my-group
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
Load environment variables in your application:
|
|
79
|
+
|
|
80
|
+
```python
|
|
81
|
+
import os
|
|
82
|
+
from dotenv import load_dotenv
|
|
83
|
+
|
|
84
|
+
load_dotenv()
|
|
85
|
+
|
|
86
|
+
brokers = os.getenv('KAFKA_BROKERS', 'localhost:9092').split(',')
|
|
87
|
+
topic = os.getenv('KAFKA_TOPIC', 'my-topic')
|
|
88
|
+
group_id = os.getenv('KAFKA_GROUP_ID', 'my-group')
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
## Initialization
|
|
92
|
+
|
|
93
|
+
### Basic Producer
|
|
94
|
+
|
|
95
|
+
```python
|
|
96
|
+
from kafka import KafkaProducer
|
|
97
|
+
|
|
98
|
+
producer = KafkaProducer(bootstrap_servers='localhost:9092')
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
### Basic Consumer
|
|
102
|
+
|
|
103
|
+
```python
|
|
104
|
+
from kafka import KafkaConsumer
|
|
105
|
+
|
|
106
|
+
consumer = KafkaConsumer('my-topic',
|
|
107
|
+
bootstrap_servers='localhost:9092',
|
|
108
|
+
group_id='my-group')
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
### With Environment Variables
|
|
112
|
+
|
|
113
|
+
```python
|
|
114
|
+
import os
|
|
115
|
+
from kafka import KafkaProducer, KafkaConsumer
|
|
116
|
+
|
|
117
|
+
producer = KafkaProducer(
|
|
118
|
+
bootstrap_servers=os.getenv('KAFKA_BROKERS', 'localhost:9092').split(',')
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
consumer = KafkaConsumer(
|
|
122
|
+
os.getenv('KAFKA_TOPIC', 'my-topic'),
|
|
123
|
+
bootstrap_servers=os.getenv('KAFKA_BROKERS', 'localhost:9092').split(','),
|
|
124
|
+
group_id=os.getenv('KAFKA_GROUP_ID', 'my-group')
|
|
125
|
+
)
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
### With Multiple Brokers
|
|
129
|
+
|
|
130
|
+
```python
|
|
131
|
+
from kafka import KafkaProducer
|
|
132
|
+
|
|
133
|
+
producer = KafkaProducer(
|
|
134
|
+
bootstrap_servers=['kafka1:9092', 'kafka2:9092', 'kafka3:9092']
|
|
135
|
+
)
|
|
136
|
+
```
|
|
137
|
+
|
|
138
|
+
## Producer
|
|
139
|
+
|
|
140
|
+
### Creating a Producer with Configuration
|
|
141
|
+
|
|
142
|
+
```python
|
|
143
|
+
from kafka import KafkaProducer
|
|
144
|
+
|
|
145
|
+
producer = KafkaProducer(
|
|
146
|
+
bootstrap_servers=['localhost:9092'],
|
|
147
|
+
client_id='my-producer',
|
|
148
|
+
acks='all',
|
|
149
|
+
retries=5,
|
|
150
|
+
max_in_flight_requests_per_connection=5
|
|
151
|
+
)
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
### Producer with JSON Serialization
|
|
155
|
+
|
|
156
|
+
```python
|
|
157
|
+
import json
|
|
158
|
+
from kafka import KafkaProducer
|
|
159
|
+
|
|
160
|
+
producer = KafkaProducer(
|
|
161
|
+
bootstrap_servers=['localhost:9092'],
|
|
162
|
+
value_serializer=lambda v: json.dumps(v).encode('utf-8')
|
|
163
|
+
)
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
### Producer with Key and Value Serialization
|
|
167
|
+
|
|
168
|
+
```python
|
|
169
|
+
import json
|
|
170
|
+
from kafka import KafkaProducer
|
|
171
|
+
|
|
172
|
+
producer = KafkaProducer(
|
|
173
|
+
bootstrap_servers=['localhost:9092'],
|
|
174
|
+
key_serializer=lambda k: k.encode('utf-8'),
|
|
175
|
+
value_serializer=lambda v: json.dumps(v).encode('utf-8')
|
|
176
|
+
)
|
|
177
|
+
```
|
|
178
|
+
|
|
179
|
+
### Producer with Compression
|
|
180
|
+
|
|
181
|
+
```python
|
|
182
|
+
from kafka import KafkaProducer
|
|
183
|
+
|
|
184
|
+
# GZIP compression
|
|
185
|
+
producer = KafkaProducer(
|
|
186
|
+
bootstrap_servers=['localhost:9092'],
|
|
187
|
+
compression_type='gzip'
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
# Snappy compression
|
|
191
|
+
producer = KafkaProducer(
|
|
192
|
+
bootstrap_servers=['localhost:9092'],
|
|
193
|
+
compression_type='snappy'
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
# LZ4 compression
|
|
197
|
+
producer = KafkaProducer(
|
|
198
|
+
bootstrap_servers=['localhost:9092'],
|
|
199
|
+
compression_type='lz4'
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
# ZSTD compression
|
|
203
|
+
producer = KafkaProducer(
|
|
204
|
+
bootstrap_servers=['localhost:9092'],
|
|
205
|
+
compression_type='zstd'
|
|
206
|
+
)
|
|
207
|
+
```
|
|
208
|
+
|
|
209
|
+
### Producer Configuration Options
|
|
210
|
+
|
|
211
|
+
```python
|
|
212
|
+
from kafka import KafkaProducer
|
|
213
|
+
|
|
214
|
+
producer = KafkaProducer(
|
|
215
|
+
bootstrap_servers=['localhost:9092'],
|
|
216
|
+
client_id='my-app',
|
|
217
|
+
acks='all', # 0, 1, or 'all'
|
|
218
|
+
compression_type='gzip', # None, 'gzip', 'snappy', 'lz4', 'zstd'
|
|
219
|
+
retries=5, # Number of retries
|
|
220
|
+
batch_size=16384, # Batch size in bytes
|
|
221
|
+
linger_ms=10, # Wait time before sending
|
|
222
|
+
buffer_memory=33554432, # Total memory for buffering
|
|
223
|
+
max_block_ms=60000, # Max blocking time for send
|
|
224
|
+
max_request_size=1048576, # Max request size
|
|
225
|
+
request_timeout_ms=30000, # Request timeout
|
|
226
|
+
connections_max_idle_ms=540000, # Close idle connections after
|
|
227
|
+
retry_backoff_ms=100, # Retry backoff time
|
|
228
|
+
max_in_flight_requests_per_connection=5, # Max concurrent requests
|
|
229
|
+
enable_idempotence=False, # Idempotent producer
|
|
230
|
+
metadata_max_age_ms=300000 # Metadata refresh interval
|
|
231
|
+
)
|
|
232
|
+
```
|
|
233
|
+
|
|
234
|
+
### Sending Messages - Basic
|
|
235
|
+
|
|
236
|
+
```python
|
|
237
|
+
from kafka import KafkaProducer
|
|
238
|
+
|
|
239
|
+
producer = KafkaProducer(bootstrap_servers='localhost:9092')
|
|
240
|
+
|
|
241
|
+
# Send raw bytes
|
|
242
|
+
future = producer.send('my-topic', b'Hello Kafka')
|
|
243
|
+
|
|
244
|
+
# Wait for send to complete
|
|
245
|
+
record_metadata = future.get(timeout=10)
|
|
246
|
+
print(f'Sent to partition {record_metadata.partition} at offset {record_metadata.offset}')
|
|
247
|
+
```
|
|
248
|
+
|
|
249
|
+
### Sending Messages with Key
|
|
250
|
+
|
|
251
|
+
```python
|
|
252
|
+
from kafka import KafkaProducer
|
|
253
|
+
|
|
254
|
+
producer = KafkaProducer(bootstrap_servers='localhost:9092')
|
|
255
|
+
|
|
256
|
+
future = producer.send('my-topic', key=b'user-123', value=b'User logged in')
|
|
257
|
+
record_metadata = future.get(timeout=10)
|
|
258
|
+
```
|
|
259
|
+
|
|
260
|
+
### Sending Messages with Partition
|
|
261
|
+
|
|
262
|
+
```python
|
|
263
|
+
from kafka import KafkaProducer
|
|
264
|
+
|
|
265
|
+
producer = KafkaProducer(bootstrap_servers='localhost:9092')
|
|
266
|
+
|
|
267
|
+
# Send to specific partition
|
|
268
|
+
future = producer.send('my-topic', value=b'Hello', partition=0)
|
|
269
|
+
record_metadata = future.get(timeout=10)
|
|
270
|
+
```
|
|
271
|
+
|
|
272
|
+
### Sending Messages with Timestamp
|
|
273
|
+
|
|
274
|
+
```python
|
|
275
|
+
import time
|
|
276
|
+
from kafka import KafkaProducer
|
|
277
|
+
|
|
278
|
+
producer = KafkaProducer(bootstrap_servers='localhost:9092')
|
|
279
|
+
|
|
280
|
+
timestamp_ms = int(time.time() * 1000)
|
|
281
|
+
future = producer.send('my-topic', value=b'Hello', timestamp_ms=timestamp_ms)
|
|
282
|
+
record_metadata = future.get(timeout=10)
|
|
283
|
+
```
|
|
284
|
+
|
|
285
|
+
### Sending Messages with Headers
|
|
286
|
+
|
|
287
|
+
```python
|
|
288
|
+
from kafka import KafkaProducer
|
|
289
|
+
|
|
290
|
+
producer = KafkaProducer(bootstrap_servers='localhost:9092')
|
|
291
|
+
|
|
292
|
+
headers = [
|
|
293
|
+
('correlation-id', b'12345'),
|
|
294
|
+
('user-id', b'user-123')
|
|
295
|
+
]
|
|
296
|
+
|
|
297
|
+
future = producer.send('my-topic', value=b'Hello', headers=headers)
|
|
298
|
+
record_metadata = future.get(timeout=10)
|
|
299
|
+
```
|
|
300
|
+
|
|
301
|
+
### Sending JSON Messages
|
|
302
|
+
|
|
303
|
+
```python
|
|
304
|
+
import json
|
|
305
|
+
from kafka import KafkaProducer
|
|
306
|
+
|
|
307
|
+
producer = KafkaProducer(
|
|
308
|
+
bootstrap_servers='localhost:9092',
|
|
309
|
+
value_serializer=lambda v: json.dumps(v).encode('utf-8')
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
data = {
|
|
313
|
+
'user_id': 'user-123',
|
|
314
|
+
'action': 'login',
|
|
315
|
+
'timestamp': 1234567890
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
future = producer.send('user-events', data)
|
|
319
|
+
record_metadata = future.get(timeout=10)
|
|
320
|
+
```
|
|
321
|
+
|
|
322
|
+
### Sending with Key and Value Serialization
|
|
323
|
+
|
|
324
|
+
```python
|
|
325
|
+
import json
|
|
326
|
+
from kafka import KafkaProducer
|
|
327
|
+
|
|
328
|
+
producer = KafkaProducer(
|
|
329
|
+
bootstrap_servers='localhost:9092',
|
|
330
|
+
key_serializer=lambda k: k.encode('utf-8'),
|
|
331
|
+
value_serializer=lambda v: json.dumps(v).encode('utf-8')
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
future = producer.send('my-topic', key='user-123', value={'action': 'login'})
|
|
335
|
+
record_metadata = future.get(timeout=10)
|
|
336
|
+
```
|
|
337
|
+
|
|
338
|
+
### Async Send with Callback
|
|
339
|
+
|
|
340
|
+
```python
|
|
341
|
+
from kafka import KafkaProducer
|
|
342
|
+
from kafka.errors import KafkaError
|
|
343
|
+
|
|
344
|
+
producer = KafkaProducer(bootstrap_servers='localhost:9092')
|
|
345
|
+
|
|
346
|
+
def on_send_success(record_metadata):
|
|
347
|
+
print(f'Sent to partition {record_metadata.partition} at offset {record_metadata.offset}')
|
|
348
|
+
|
|
349
|
+
def on_send_error(excp):
|
|
350
|
+
print(f'Error: {excp}')
|
|
351
|
+
|
|
352
|
+
future = producer.send('my-topic', b'Hello Kafka')
|
|
353
|
+
future.add_callback(on_send_success)
|
|
354
|
+
future.add_errback(on_send_error)
|
|
355
|
+
|
|
356
|
+
# Continue without blocking
|
|
357
|
+
# Use flush() to wait for all messages
|
|
358
|
+
producer.flush()
|
|
359
|
+
```
|
|
360
|
+
|
|
361
|
+
### Flush Messages
|
|
362
|
+
|
|
363
|
+
```python
|
|
364
|
+
from kafka import KafkaProducer
|
|
365
|
+
|
|
366
|
+
producer = KafkaProducer(bootstrap_servers='localhost:9092')
|
|
367
|
+
|
|
368
|
+
producer.send('my-topic', b'Message 1')
|
|
369
|
+
producer.send('my-topic', b'Message 2')
|
|
370
|
+
producer.send('my-topic', b'Message 3')
|
|
371
|
+
|
|
372
|
+
# Wait for all messages to be sent
|
|
373
|
+
producer.flush()
|
|
374
|
+
```
|
|
375
|
+
|
|
376
|
+
### Close Producer
|
|
377
|
+
|
|
378
|
+
```python
|
|
379
|
+
from kafka import KafkaProducer
|
|
380
|
+
|
|
381
|
+
producer = KafkaProducer(bootstrap_servers='localhost:9092')
|
|
382
|
+
|
|
383
|
+
producer.send('my-topic', b'Hello')
|
|
384
|
+
producer.flush()
|
|
385
|
+
producer.close()
|
|
386
|
+
```
|
|
387
|
+
|
|
388
|
+
### Error Handling
|
|
389
|
+
|
|
390
|
+
```python
|
|
391
|
+
from kafka import KafkaProducer
|
|
392
|
+
from kafka.errors import KafkaError, KafkaTimeoutError
|
|
393
|
+
|
|
394
|
+
producer = KafkaProducer(bootstrap_servers='localhost:9092')
|
|
395
|
+
|
|
396
|
+
try:
|
|
397
|
+
future = producer.send('my-topic', b'Hello Kafka')
|
|
398
|
+
record_metadata = future.get(timeout=10)
|
|
399
|
+
print(f'Success: partition={record_metadata.partition}, offset={record_metadata.offset}')
|
|
400
|
+
except KafkaTimeoutError:
|
|
401
|
+
print('Request timed out')
|
|
402
|
+
except KafkaError as e:
|
|
403
|
+
print(f'Kafka error: {e}')
|
|
404
|
+
except Exception as e:
|
|
405
|
+
print(f'Unexpected error: {e}')
|
|
406
|
+
finally:
|
|
407
|
+
producer.close()
|
|
408
|
+
```
|
|
409
|
+
|
|
410
|
+
## Consumer
|
|
411
|
+
|
|
412
|
+
### Creating a Consumer
|
|
413
|
+
|
|
414
|
+
```python
|
|
415
|
+
from kafka import KafkaConsumer
|
|
416
|
+
|
|
417
|
+
consumer = KafkaConsumer(
|
|
418
|
+
'my-topic',
|
|
419
|
+
bootstrap_servers='localhost:9092',
|
|
420
|
+
group_id='my-group'
|
|
421
|
+
)
|
|
422
|
+
```
|
|
423
|
+
|
|
424
|
+
### Consumer with Multiple Topics
|
|
425
|
+
|
|
426
|
+
```python
|
|
427
|
+
from kafka import KafkaConsumer
|
|
428
|
+
|
|
429
|
+
consumer = KafkaConsumer(
|
|
430
|
+
'topic-a', 'topic-b', 'topic-c',
|
|
431
|
+
bootstrap_servers='localhost:9092',
|
|
432
|
+
group_id='my-group'
|
|
433
|
+
)
|
|
434
|
+
```
|
|
435
|
+
|
|
436
|
+
### Consumer with Pattern Subscription
|
|
437
|
+
|
|
438
|
+
```python
|
|
439
|
+
import re
|
|
440
|
+
from kafka import KafkaConsumer
|
|
441
|
+
|
|
442
|
+
consumer = KafkaConsumer(
|
|
443
|
+
bootstrap_servers='localhost:9092',
|
|
444
|
+
group_id='my-group'
|
|
445
|
+
)
|
|
446
|
+
|
|
447
|
+
# Subscribe to topics matching pattern
|
|
448
|
+
consumer.subscribe(pattern='^awesome.*')
|
|
449
|
+
```
|
|
450
|
+
|
|
451
|
+
### Consumer Configuration Options
|
|
452
|
+
|
|
453
|
+
```python
|
|
454
|
+
from kafka import KafkaConsumer
|
|
455
|
+
|
|
456
|
+
consumer = KafkaConsumer(
|
|
457
|
+
'my-topic',
|
|
458
|
+
bootstrap_servers=['localhost:9092'],
|
|
459
|
+
client_id='my-consumer',
|
|
460
|
+
group_id='my-group',
|
|
461
|
+
auto_offset_reset='earliest', # 'earliest' or 'latest'
|
|
462
|
+
enable_auto_commit=True, # Auto commit offsets
|
|
463
|
+
auto_commit_interval_ms=5000, # Auto commit interval
|
|
464
|
+
max_poll_records=500, # Max records per poll
|
|
465
|
+
max_poll_interval_ms=300000, # Max poll interval
|
|
466
|
+
session_timeout_ms=10000, # Session timeout
|
|
467
|
+
heartbeat_interval_ms=3000, # Heartbeat interval
|
|
468
|
+
fetch_min_bytes=1, # Min fetch bytes
|
|
469
|
+
fetch_max_bytes=52428800, # Max fetch bytes
|
|
470
|
+
fetch_max_wait_ms=500, # Max fetch wait time
|
|
471
|
+
max_partition_fetch_bytes=1048576, # Max bytes per partition
|
|
472
|
+
request_timeout_ms=305000, # Request timeout
|
|
473
|
+
connections_max_idle_ms=540000, # Close idle connections
|
|
474
|
+
consumer_timeout_ms=None # Stop iteration timeout
|
|
475
|
+
)
|
|
476
|
+
```
|
|
477
|
+
|
|
478
|
+
### Consumer with JSON Deserialization
|
|
479
|
+
|
|
480
|
+
```python
|
|
481
|
+
import json
|
|
482
|
+
from kafka import KafkaConsumer
|
|
483
|
+
|
|
484
|
+
consumer = KafkaConsumer(
|
|
485
|
+
'my-topic',
|
|
486
|
+
bootstrap_servers='localhost:9092',
|
|
487
|
+
group_id='my-group',
|
|
488
|
+
value_deserializer=lambda m: json.loads(m.decode('utf-8'))
|
|
489
|
+
)
|
|
490
|
+
```
|
|
491
|
+
|
|
492
|
+
### Consumer with Key and Value Deserialization
|
|
493
|
+
|
|
494
|
+
```python
|
|
495
|
+
import json
|
|
496
|
+
from kafka import KafkaConsumer
|
|
497
|
+
|
|
498
|
+
consumer = KafkaConsumer(
|
|
499
|
+
'my-topic',
|
|
500
|
+
bootstrap_servers='localhost:9092',
|
|
501
|
+
group_id='my-group',
|
|
502
|
+
key_deserializer=lambda k: k.decode('utf-8') if k else None,
|
|
503
|
+
value_deserializer=lambda v: json.loads(v.decode('utf-8'))
|
|
504
|
+
)
|
|
505
|
+
```
|
|
506
|
+
|
|
507
|
+
### Consuming Messages - Basic Iteration
|
|
508
|
+
|
|
509
|
+
```python
|
|
510
|
+
from kafka import KafkaConsumer
|
|
511
|
+
|
|
512
|
+
consumer = KafkaConsumer(
|
|
513
|
+
'my-topic',
|
|
514
|
+
bootstrap_servers='localhost:9092',
|
|
515
|
+
group_id='my-group'
|
|
516
|
+
)
|
|
517
|
+
|
|
518
|
+
for message in consumer:
|
|
519
|
+
print(f'Topic: {message.topic}')
|
|
520
|
+
print(f'Partition: {message.partition}')
|
|
521
|
+
print(f'Offset: {message.offset}')
|
|
522
|
+
print(f'Key: {message.key}')
|
|
523
|
+
print(f'Value: {message.value}')
|
|
524
|
+
print(f'Timestamp: {message.timestamp}')
|
|
525
|
+
print(f'Headers: {message.headers}')
|
|
526
|
+
```
|
|
527
|
+
|
|
528
|
+
### Consuming from Beginning
|
|
529
|
+
|
|
530
|
+
```python
|
|
531
|
+
from kafka import KafkaConsumer
|
|
532
|
+
|
|
533
|
+
consumer = KafkaConsumer(
|
|
534
|
+
'my-topic',
|
|
535
|
+
bootstrap_servers='localhost:9092',
|
|
536
|
+
group_id='my-group',
|
|
537
|
+
auto_offset_reset='earliest',
|
|
538
|
+
enable_auto_commit=False
|
|
539
|
+
)
|
|
540
|
+
|
|
541
|
+
for message in consumer:
|
|
542
|
+
print(message.value)
|
|
543
|
+
```
|
|
544
|
+
|
|
545
|
+
### Consuming with Timeout
|
|
546
|
+
|
|
547
|
+
```python
|
|
548
|
+
from kafka import KafkaConsumer
|
|
549
|
+
|
|
550
|
+
consumer = KafkaConsumer(
|
|
551
|
+
'my-topic',
|
|
552
|
+
bootstrap_servers='localhost:9092',
|
|
553
|
+
group_id='my-group',
|
|
554
|
+
consumer_timeout_ms=1000 # Stop iteration after 1 second of no messages
|
|
555
|
+
)
|
|
556
|
+
|
|
557
|
+
for message in consumer:
|
|
558
|
+
print(message.value)
|
|
559
|
+
|
|
560
|
+
print('No more messages')
|
|
561
|
+
```
|
|
562
|
+
|
|
563
|
+
### Consuming JSON Messages
|
|
564
|
+
|
|
565
|
+
```python
|
|
566
|
+
import json
|
|
567
|
+
from kafka import KafkaConsumer
|
|
568
|
+
|
|
569
|
+
consumer = KafkaConsumer(
|
|
570
|
+
'user-events',
|
|
571
|
+
bootstrap_servers='localhost:9092',
|
|
572
|
+
group_id='my-group',
|
|
573
|
+
value_deserializer=lambda m: json.loads(m.decode('utf-8'))
|
|
574
|
+
)
|
|
575
|
+
|
|
576
|
+
for message in consumer:
|
|
577
|
+
data = message.value
|
|
578
|
+
print(f"User: {data['user_id']}, Action: {data['action']}")
|
|
579
|
+
```
|
|
580
|
+
|
|
581
|
+
### Consuming with Headers
|
|
582
|
+
|
|
583
|
+
```python
|
|
584
|
+
from kafka import KafkaConsumer
|
|
585
|
+
|
|
586
|
+
consumer = KafkaConsumer(
|
|
587
|
+
'my-topic',
|
|
588
|
+
bootstrap_servers='localhost:9092',
|
|
589
|
+
group_id='my-group'
|
|
590
|
+
)
|
|
591
|
+
|
|
592
|
+
for message in consumer:
|
|
593
|
+
# Headers is a list of tuples: [(key, value), ...]
|
|
594
|
+
headers_dict = {k: v.decode('utf-8') for k, v in message.headers} if message.headers else {}
|
|
595
|
+
|
|
596
|
+
correlation_id = headers_dict.get('correlation-id')
|
|
597
|
+
user_id = headers_dict.get('user-id')
|
|
598
|
+
|
|
599
|
+
print(f'Correlation ID: {correlation_id}')
|
|
600
|
+
print(f'User ID: {user_id}')
|
|
601
|
+
print(f'Message: {message.value}')
|
|
602
|
+
```
|
|
603
|
+
|
|
604
|
+
### Batch Consumption with poll()
|
|
605
|
+
|
|
606
|
+
```python
|
|
607
|
+
from kafka import KafkaConsumer
|
|
608
|
+
|
|
609
|
+
consumer = KafkaConsumer(
|
|
610
|
+
'my-topic',
|
|
611
|
+
bootstrap_servers='localhost:9092',
|
|
612
|
+
group_id='my-group',
|
|
613
|
+
enable_auto_commit=False
|
|
614
|
+
)
|
|
615
|
+
|
|
616
|
+
while True:
|
|
617
|
+
messages = consumer.poll(timeout_ms=1000, max_records=100)
|
|
618
|
+
|
|
619
|
+
for topic_partition, records in messages.items():
|
|
620
|
+
print(f'Received {len(records)} messages from {topic_partition}')
|
|
621
|
+
|
|
622
|
+
for record in records:
|
|
623
|
+
print(f'Offset: {record.offset}, Value: {record.value}')
|
|
624
|
+
|
|
625
|
+
# Commit offsets after processing
|
|
626
|
+
consumer.commit()
|
|
627
|
+
```
|
|
628
|
+
|
|
629
|
+
### Manual Partition Assignment
|
|
630
|
+
|
|
631
|
+
```python
|
|
632
|
+
from kafka import KafkaConsumer, TopicPartition
|
|
633
|
+
|
|
634
|
+
consumer = KafkaConsumer(bootstrap_servers='localhost:9092')
|
|
635
|
+
|
|
636
|
+
# Manually assign partitions
|
|
637
|
+
partition = TopicPartition('my-topic', 0)
|
|
638
|
+
consumer.assign([partition])
|
|
639
|
+
|
|
640
|
+
for message in consumer:
|
|
641
|
+
print(message.value)
|
|
642
|
+
```
|
|
643
|
+
|
|
644
|
+
### Manual Offset Management
|
|
645
|
+
|
|
646
|
+
```python
|
|
647
|
+
from kafka import KafkaConsumer
|
|
648
|
+
|
|
649
|
+
consumer = KafkaConsumer(
|
|
650
|
+
'my-topic',
|
|
651
|
+
bootstrap_servers='localhost:9092',
|
|
652
|
+
group_id='my-group',
|
|
653
|
+
enable_auto_commit=False
|
|
654
|
+
)
|
|
655
|
+
|
|
656
|
+
for message in consumer:
|
|
657
|
+
process_message(message)
|
|
658
|
+
|
|
659
|
+
# Manual commit
|
|
660
|
+
consumer.commit()
|
|
661
|
+
```
|
|
662
|
+
|
|
663
|
+
### Commit Specific Offsets
|
|
664
|
+
|
|
665
|
+
```python
|
|
666
|
+
from kafka import KafkaConsumer, TopicPartition, OffsetAndMetadata
|
|
667
|
+
|
|
668
|
+
consumer = KafkaConsumer(
|
|
669
|
+
'my-topic',
|
|
670
|
+
bootstrap_servers='localhost:9092',
|
|
671
|
+
group_id='my-group',
|
|
672
|
+
enable_auto_commit=False
|
|
673
|
+
)
|
|
674
|
+
|
|
675
|
+
for message in consumer:
|
|
676
|
+
process_message(message)
|
|
677
|
+
|
|
678
|
+
# Commit specific offset
|
|
679
|
+
tp = TopicPartition(message.topic, message.partition)
|
|
680
|
+
offsets = {tp: OffsetAndMetadata(message.offset + 1, None)}
|
|
681
|
+
consumer.commit(offsets=offsets)
|
|
682
|
+
```
|
|
683
|
+
|
|
684
|
+
### Async Commit
|
|
685
|
+
|
|
686
|
+
```python
|
|
687
|
+
from kafka import KafkaConsumer
|
|
688
|
+
|
|
689
|
+
consumer = KafkaConsumer(
|
|
690
|
+
'my-topic',
|
|
691
|
+
bootstrap_servers='localhost:9092',
|
|
692
|
+
group_id='my-group',
|
|
693
|
+
enable_auto_commit=False
|
|
694
|
+
)
|
|
695
|
+
|
|
696
|
+
def on_commit_complete(offsets, response):
|
|
697
|
+
if isinstance(response, Exception):
|
|
698
|
+
print(f'Commit failed: {response}')
|
|
699
|
+
else:
|
|
700
|
+
print(f'Commit successful: {offsets}')
|
|
701
|
+
|
|
702
|
+
for message in consumer:
|
|
703
|
+
process_message(message)
|
|
704
|
+
consumer.commit_async(callback=on_commit_complete)
|
|
705
|
+
```
|
|
706
|
+
|
|
707
|
+
### Seek to Offset
|
|
708
|
+
|
|
709
|
+
```python
|
|
710
|
+
from kafka import KafkaConsumer, TopicPartition
|
|
711
|
+
|
|
712
|
+
consumer = KafkaConsumer(
|
|
713
|
+
'my-topic',
|
|
714
|
+
bootstrap_servers='localhost:9092',
|
|
715
|
+
group_id='my-group'
|
|
716
|
+
)
|
|
717
|
+
|
|
718
|
+
# Seek to specific offset
|
|
719
|
+
partition = TopicPartition('my-topic', 0)
|
|
720
|
+
consumer.seek(partition, 100)
|
|
721
|
+
|
|
722
|
+
for message in consumer:
|
|
723
|
+
print(f'Offset: {message.offset}')
|
|
724
|
+
```
|
|
725
|
+
|
|
726
|
+
### Seek to Beginning
|
|
727
|
+
|
|
728
|
+
```python
|
|
729
|
+
from kafka import KafkaConsumer, TopicPartition
|
|
730
|
+
|
|
731
|
+
consumer = KafkaConsumer(
|
|
732
|
+
'my-topic',
|
|
733
|
+
bootstrap_servers='localhost:9092',
|
|
734
|
+
group_id='my-group'
|
|
735
|
+
)
|
|
736
|
+
|
|
737
|
+
# Seek all assigned partitions to beginning
|
|
738
|
+
consumer.seek_to_beginning()
|
|
739
|
+
|
|
740
|
+
for message in consumer:
|
|
741
|
+
print(message.value)
|
|
742
|
+
```
|
|
743
|
+
|
|
744
|
+
### Seek to End
|
|
745
|
+
|
|
746
|
+
```python
|
|
747
|
+
from kafka import KafkaConsumer, TopicPartition
|
|
748
|
+
|
|
749
|
+
consumer = KafkaConsumer(
|
|
750
|
+
'my-topic',
|
|
751
|
+
bootstrap_servers='localhost:9092',
|
|
752
|
+
group_id='my-group'
|
|
753
|
+
)
|
|
754
|
+
|
|
755
|
+
# Seek all assigned partitions to end
|
|
756
|
+
consumer.seek_to_end()
|
|
757
|
+
|
|
758
|
+
for message in consumer:
|
|
759
|
+
print(message.value)
|
|
760
|
+
```
|
|
761
|
+
|
|
762
|
+
### Get Offset Information
|
|
763
|
+
|
|
764
|
+
```python
|
|
765
|
+
from kafka import KafkaConsumer, TopicPartition
|
|
766
|
+
|
|
767
|
+
consumer = KafkaConsumer(
|
|
768
|
+
'my-topic',
|
|
769
|
+
bootstrap_servers='localhost:9092',
|
|
770
|
+
group_id='my-group'
|
|
771
|
+
)
|
|
772
|
+
|
|
773
|
+
partition = TopicPartition('my-topic', 0)
|
|
774
|
+
|
|
775
|
+
# Get current position
|
|
776
|
+
position = consumer.position(partition)
|
|
777
|
+
print(f'Current position: {position}')
|
|
778
|
+
|
|
779
|
+
# Get committed offset
|
|
780
|
+
committed = consumer.committed(partition)
|
|
781
|
+
print(f'Committed offset: {committed}')
|
|
782
|
+
|
|
783
|
+
# Get beginning offset
|
|
784
|
+
beginning = consumer.beginning_offsets([partition])
|
|
785
|
+
print(f'Beginning offset: {beginning}')
|
|
786
|
+
|
|
787
|
+
# Get end offset
|
|
788
|
+
end = consumer.end_offsets([partition])
|
|
789
|
+
print(f'End offset: {end}')
|
|
790
|
+
```
|
|
791
|
+
|
|
792
|
+
### Pause and Resume
|
|
793
|
+
|
|
794
|
+
```python
|
|
795
|
+
from kafka import KafkaConsumer, TopicPartition
|
|
796
|
+
|
|
797
|
+
consumer = KafkaConsumer(
|
|
798
|
+
'my-topic',
|
|
799
|
+
bootstrap_servers='localhost:9092',
|
|
800
|
+
group_id='my-group'
|
|
801
|
+
)
|
|
802
|
+
|
|
803
|
+
partition = TopicPartition('my-topic', 0)
|
|
804
|
+
|
|
805
|
+
# Pause consumption
|
|
806
|
+
consumer.pause(partition)
|
|
807
|
+
|
|
808
|
+
# Check paused partitions
|
|
809
|
+
paused = consumer.paused()
|
|
810
|
+
print(f'Paused partitions: {paused}')
|
|
811
|
+
|
|
812
|
+
# Resume consumption
|
|
813
|
+
consumer.resume(partition)
|
|
814
|
+
```
|
|
815
|
+
|
|
816
|
+
### Get Topic and Partition Information
|
|
817
|
+
|
|
818
|
+
```python
|
|
819
|
+
from kafka import KafkaConsumer
|
|
820
|
+
|
|
821
|
+
consumer = KafkaConsumer(bootstrap_servers='localhost:9092')
|
|
822
|
+
|
|
823
|
+
# List all topics
|
|
824
|
+
topics = consumer.topics()
|
|
825
|
+
print(f'Topics: {topics}')
|
|
826
|
+
|
|
827
|
+
# Get partitions for a topic
|
|
828
|
+
partitions = consumer.partitions_for_topic('my-topic')
|
|
829
|
+
print(f'Partitions: {partitions}')
|
|
830
|
+
```
|
|
831
|
+
|
|
832
|
+
### Close Consumer
|
|
833
|
+
|
|
834
|
+
```python
|
|
835
|
+
from kafka import KafkaConsumer
|
|
836
|
+
|
|
837
|
+
consumer = KafkaConsumer(
|
|
838
|
+
'my-topic',
|
|
839
|
+
bootstrap_servers='localhost:9092',
|
|
840
|
+
group_id='my-group'
|
|
841
|
+
)
|
|
842
|
+
|
|
843
|
+
# Process some messages
|
|
844
|
+
for message in consumer:
|
|
845
|
+
if should_stop:
|
|
846
|
+
break
|
|
847
|
+
|
|
848
|
+
# Close consumer
|
|
849
|
+
consumer.close()
|
|
850
|
+
```
|
|
851
|
+
|
|
852
|
+
### Close with Auto Commit
|
|
853
|
+
|
|
854
|
+
```python
|
|
855
|
+
from kafka import KafkaConsumer
|
|
856
|
+
|
|
857
|
+
consumer = KafkaConsumer(
|
|
858
|
+
'my-topic',
|
|
859
|
+
bootstrap_servers='localhost:9092',
|
|
860
|
+
group_id='my-group',
|
|
861
|
+
enable_auto_commit=False
|
|
862
|
+
)
|
|
863
|
+
|
|
864
|
+
try:
|
|
865
|
+
for message in consumer:
|
|
866
|
+
process_message(message)
|
|
867
|
+
finally:
|
|
868
|
+
# Close and commit offsets
|
|
869
|
+
consumer.close(autocommit=True)
|
|
870
|
+
```
|
|
871
|
+
|
|
872
|
+
## Admin Operations
|
|
873
|
+
|
|
874
|
+
### Creating Admin Client
|
|
875
|
+
|
|
876
|
+
```python
|
|
877
|
+
from kafka import KafkaAdminClient
|
|
878
|
+
|
|
879
|
+
admin = KafkaAdminClient(
|
|
880
|
+
bootstrap_servers='localhost:9092',
|
|
881
|
+
client_id='admin-client'
|
|
882
|
+
)
|
|
883
|
+
```
|
|
884
|
+
|
|
885
|
+
### List Topics
|
|
886
|
+
|
|
887
|
+
```python
|
|
888
|
+
from kafka import KafkaAdminClient
|
|
889
|
+
|
|
890
|
+
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
|
|
891
|
+
|
|
892
|
+
topics = admin.list_topics()
|
|
893
|
+
print(f'Topics: {topics}')
|
|
894
|
+
```
|
|
895
|
+
|
|
896
|
+
### Create Topics
|
|
897
|
+
|
|
898
|
+
```python
|
|
899
|
+
from kafka import KafkaAdminClient
|
|
900
|
+
from kafka.admin import NewTopic
|
|
901
|
+
|
|
902
|
+
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
|
|
903
|
+
|
|
904
|
+
topic = NewTopic(
|
|
905
|
+
name='my-new-topic',
|
|
906
|
+
num_partitions=3,
|
|
907
|
+
replication_factor=1
|
|
908
|
+
)
|
|
909
|
+
|
|
910
|
+
admin.create_topics([topic])
|
|
911
|
+
```
|
|
912
|
+
|
|
913
|
+
### Create Topics with Configuration
|
|
914
|
+
|
|
915
|
+
```python
|
|
916
|
+
from kafka import KafkaAdminClient
|
|
917
|
+
from kafka.admin import NewTopic, ConfigResource, ConfigResourceType
|
|
918
|
+
|
|
919
|
+
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
|
|
920
|
+
|
|
921
|
+
topic = NewTopic(
|
|
922
|
+
name='my-new-topic',
|
|
923
|
+
num_partitions=3,
|
|
924
|
+
replication_factor=1,
|
|
925
|
+
topic_configs={
|
|
926
|
+
'retention.ms': '86400000',
|
|
927
|
+
'cleanup.policy': 'delete'
|
|
928
|
+
}
|
|
929
|
+
)
|
|
930
|
+
|
|
931
|
+
admin.create_topics([topic])
|
|
932
|
+
```
|
|
933
|
+
|
|
934
|
+
### Delete Topics
|
|
935
|
+
|
|
936
|
+
```python
|
|
937
|
+
from kafka import KafkaAdminClient
|
|
938
|
+
|
|
939
|
+
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
|
|
940
|
+
|
|
941
|
+
admin.delete_topics(['topic-to-delete'])
|
|
942
|
+
```
|
|
943
|
+
|
|
944
|
+
### Describe Topics
|
|
945
|
+
|
|
946
|
+
```python
|
|
947
|
+
from kafka import KafkaAdminClient
|
|
948
|
+
|
|
949
|
+
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
|
|
950
|
+
|
|
951
|
+
metadata = admin.describe_topics(['my-topic'])
|
|
952
|
+
print(metadata)
|
|
953
|
+
```
|
|
954
|
+
|
|
955
|
+
### Create Partitions
|
|
956
|
+
|
|
957
|
+
```python
|
|
958
|
+
from kafka import KafkaAdminClient
|
|
959
|
+
from kafka.admin import NewPartitions
|
|
960
|
+
|
|
961
|
+
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
|
|
962
|
+
|
|
963
|
+
topic_partitions = {
|
|
964
|
+
'my-topic': NewPartitions(total_count=5)
|
|
965
|
+
}
|
|
966
|
+
|
|
967
|
+
admin.create_partitions(topic_partitions)
|
|
968
|
+
```
|
|
969
|
+
|
|
970
|
+
### Describe Consumer Groups
|
|
971
|
+
|
|
972
|
+
```python
|
|
973
|
+
from kafka import KafkaAdminClient
|
|
974
|
+
|
|
975
|
+
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
|
|
976
|
+
|
|
977
|
+
groups = admin.describe_consumer_groups(['my-group'])
|
|
978
|
+
print(groups)
|
|
979
|
+
```
|
|
980
|
+
|
|
981
|
+
### List Consumer Groups
|
|
982
|
+
|
|
983
|
+
```python
|
|
984
|
+
from kafka import KafkaAdminClient
|
|
985
|
+
|
|
986
|
+
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
|
|
987
|
+
|
|
988
|
+
groups = admin.list_consumer_groups()
|
|
989
|
+
print(groups)
|
|
990
|
+
```
|
|
991
|
+
|
|
992
|
+
### List Consumer Group Offsets
|
|
993
|
+
|
|
994
|
+
```python
|
|
995
|
+
from kafka import KafkaAdminClient
|
|
996
|
+
from kafka.admin import ConsumerGroupOffsets
|
|
997
|
+
|
|
998
|
+
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
|
|
999
|
+
|
|
1000
|
+
offsets = admin.list_consumer_group_offsets('my-group')
|
|
1001
|
+
print(offsets)
|
|
1002
|
+
```
|
|
1003
|
+
|
|
1004
|
+
### Delete Consumer Groups
|
|
1005
|
+
|
|
1006
|
+
```python
|
|
1007
|
+
from kafka import KafkaAdminClient
|
|
1008
|
+
|
|
1009
|
+
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
|
|
1010
|
+
|
|
1011
|
+
admin.delete_consumer_groups(['my-group'])
|
|
1012
|
+
```
|
|
1013
|
+
|
|
1014
|
+
### Describe Configs
|
|
1015
|
+
|
|
1016
|
+
```python
|
|
1017
|
+
from kafka import KafkaAdminClient
|
|
1018
|
+
from kafka.admin import ConfigResource, ConfigResourceType
|
|
1019
|
+
|
|
1020
|
+
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
|
|
1021
|
+
|
|
1022
|
+
resource = ConfigResource(ConfigResourceType.TOPIC, 'my-topic')
|
|
1023
|
+
configs = admin.describe_configs([resource])
|
|
1024
|
+
print(configs)
|
|
1025
|
+
```
|
|
1026
|
+
|
|
1027
|
+
### Alter Configs
|
|
1028
|
+
|
|
1029
|
+
```python
|
|
1030
|
+
from kafka import KafkaAdminClient
|
|
1031
|
+
from kafka.admin import ConfigResource, ConfigResourceType
|
|
1032
|
+
|
|
1033
|
+
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
|
|
1034
|
+
|
|
1035
|
+
resource = ConfigResource(
|
|
1036
|
+
ConfigResourceType.TOPIC,
|
|
1037
|
+
'my-topic',
|
|
1038
|
+
configs={'retention.ms': '604800000'}
|
|
1039
|
+
)
|
|
1040
|
+
|
|
1041
|
+
admin.alter_configs([resource])
|
|
1042
|
+
```
|
|
1043
|
+
|
|
1044
|
+
## Security and Authentication
|
|
1045
|
+
|
|
1046
|
+
### SSL Configuration
|
|
1047
|
+
|
|
1048
|
+
```python
|
|
1049
|
+
from kafka import KafkaProducer
|
|
1050
|
+
|
|
1051
|
+
producer = KafkaProducer(
|
|
1052
|
+
bootstrap_servers='kafka:9093',
|
|
1053
|
+
security_protocol='SSL',
|
|
1054
|
+
ssl_check_hostname=True,
|
|
1055
|
+
ssl_cafile='/path/to/ca-cert',
|
|
1056
|
+
ssl_certfile='/path/to/client-cert',
|
|
1057
|
+
ssl_keyfile='/path/to/client-key'
|
|
1058
|
+
)
|
|
1059
|
+
```
|
|
1060
|
+
|
|
1061
|
+
### SSL Consumer
|
|
1062
|
+
|
|
1063
|
+
```python
|
|
1064
|
+
from kafka import KafkaConsumer
|
|
1065
|
+
|
|
1066
|
+
consumer = KafkaConsumer(
|
|
1067
|
+
'my-topic',
|
|
1068
|
+
bootstrap_servers='kafka:9093',
|
|
1069
|
+
group_id='my-group',
|
|
1070
|
+
security_protocol='SSL',
|
|
1071
|
+
ssl_check_hostname=True,
|
|
1072
|
+
ssl_cafile='/path/to/ca-cert',
|
|
1073
|
+
ssl_certfile='/path/to/client-cert',
|
|
1074
|
+
ssl_keyfile='/path/to/client-key'
|
|
1075
|
+
)
|
|
1076
|
+
```
|
|
1077
|
+
|
|
1078
|
+
### SASL PLAIN Authentication
|
|
1079
|
+
|
|
1080
|
+
```python
|
|
1081
|
+
from kafka import KafkaProducer
|
|
1082
|
+
|
|
1083
|
+
producer = KafkaProducer(
|
|
1084
|
+
bootstrap_servers='kafka:9092',
|
|
1085
|
+
security_protocol='SASL_PLAINTEXT',
|
|
1086
|
+
sasl_mechanism='PLAIN',
|
|
1087
|
+
sasl_plain_username='username',
|
|
1088
|
+
sasl_plain_password='password'
|
|
1089
|
+
)
|
|
1090
|
+
```
|
|
1091
|
+
|
|
1092
|
+
### SASL SCRAM-SHA-256
|
|
1093
|
+
|
|
1094
|
+
```python
|
|
1095
|
+
from kafka import KafkaProducer
|
|
1096
|
+
|
|
1097
|
+
producer = KafkaProducer(
|
|
1098
|
+
bootstrap_servers='kafka:9092',
|
|
1099
|
+
security_protocol='SASL_SSL',
|
|
1100
|
+
sasl_mechanism='SCRAM-SHA-256',
|
|
1101
|
+
sasl_plain_username='username',
|
|
1102
|
+
sasl_plain_password='password',
|
|
1103
|
+
ssl_cafile='/path/to/ca-cert'
|
|
1104
|
+
)
|
|
1105
|
+
```
|
|
1106
|
+
|
|
1107
|
+
### SASL SCRAM-SHA-512
|
|
1108
|
+
|
|
1109
|
+
```python
|
|
1110
|
+
from kafka import KafkaProducer
|
|
1111
|
+
|
|
1112
|
+
producer = KafkaProducer(
|
|
1113
|
+
bootstrap_servers='kafka:9092',
|
|
1114
|
+
security_protocol='SASL_SSL',
|
|
1115
|
+
sasl_mechanism='SCRAM-SHA-512',
|
|
1116
|
+
sasl_plain_username='username',
|
|
1117
|
+
sasl_plain_password='password',
|
|
1118
|
+
ssl_cafile='/path/to/ca-cert'
|
|
1119
|
+
)
|
|
1120
|
+
```
|
|
1121
|
+
|
|
1122
|
+
### SSL with SASL
|
|
1123
|
+
|
|
1124
|
+
```python
|
|
1125
|
+
from kafka import KafkaProducer
|
|
1126
|
+
|
|
1127
|
+
producer = KafkaProducer(
|
|
1128
|
+
bootstrap_servers='kafka:9093',
|
|
1129
|
+
security_protocol='SASL_SSL',
|
|
1130
|
+
sasl_mechanism='PLAIN',
|
|
1131
|
+
sasl_plain_username='username',
|
|
1132
|
+
sasl_plain_password='password',
|
|
1133
|
+
ssl_check_hostname=True,
|
|
1134
|
+
ssl_cafile='/path/to/ca-cert'
|
|
1135
|
+
)
|
|
1136
|
+
```
|
|
1137
|
+
|
|
1138
|
+
## Complete Examples
|
|
1139
|
+
|
|
1140
|
+
### Complete Producer Example
|
|
1141
|
+
|
|
1142
|
+
```python
|
|
1143
|
+
import json
|
|
1144
|
+
import time
|
|
1145
|
+
from kafka import KafkaProducer
|
|
1146
|
+
from kafka.errors import KafkaError
|
|
1147
|
+
|
|
1148
|
+
def create_producer():
|
|
1149
|
+
return KafkaProducer(
|
|
1150
|
+
bootstrap_servers=['localhost:9092'],
|
|
1151
|
+
client_id='order-producer',
|
|
1152
|
+
acks='all',
|
|
1153
|
+
retries=5,
|
|
1154
|
+
key_serializer=lambda k: k.encode('utf-8'),
|
|
1155
|
+
value_serializer=lambda v: json.dumps(v).encode('utf-8'),
|
|
1156
|
+
compression_type='gzip'
|
|
1157
|
+
)
|
|
1158
|
+
|
|
1159
|
+
def send_order(producer, order):
|
|
1160
|
+
try:
|
|
1161
|
+
future = producer.send(
|
|
1162
|
+
'orders',
|
|
1163
|
+
key=order['order_id'],
|
|
1164
|
+
value=order,
|
|
1165
|
+
headers=[
|
|
1166
|
+
('correlation-id', str(time.time()).encode('utf-8')),
|
|
1167
|
+
('source', b'order-service')
|
|
1168
|
+
]
|
|
1169
|
+
)
|
|
1170
|
+
|
|
1171
|
+
record_metadata = future.get(timeout=10)
|
|
1172
|
+
print(f"Order {order['order_id']} sent successfully")
|
|
1173
|
+
print(f" Partition: {record_metadata.partition}")
|
|
1174
|
+
print(f" Offset: {record_metadata.offset}")
|
|
1175
|
+
return True
|
|
1176
|
+
|
|
1177
|
+
except KafkaError as e:
|
|
1178
|
+
print(f"Failed to send order {order['order_id']}: {e}")
|
|
1179
|
+
return False
|
|
1180
|
+
|
|
1181
|
+
def main():
|
|
1182
|
+
producer = create_producer()
|
|
1183
|
+
|
|
1184
|
+
try:
|
|
1185
|
+
orders = [
|
|
1186
|
+
{'order_id': 'order-001', 'amount': 99.99, 'customer': 'customer-123'},
|
|
1187
|
+
{'order_id': 'order-002', 'amount': 149.99, 'customer': 'customer-456'},
|
|
1188
|
+
{'order_id': 'order-003', 'amount': 79.99, 'customer': 'customer-789'}
|
|
1189
|
+
]
|
|
1190
|
+
|
|
1191
|
+
for order in orders:
|
|
1192
|
+
send_order(producer, order)
|
|
1193
|
+
time.sleep(0.1)
|
|
1194
|
+
|
|
1195
|
+
# Wait for all messages to be sent
|
|
1196
|
+
producer.flush()
|
|
1197
|
+
print("All orders sent successfully")
|
|
1198
|
+
|
|
1199
|
+
except Exception as e:
|
|
1200
|
+
print(f"Error: {e}")
|
|
1201
|
+
finally:
|
|
1202
|
+
producer.close()
|
|
1203
|
+
|
|
1204
|
+
if __name__ == '__main__':
|
|
1205
|
+
main()
|
|
1206
|
+
```
|
|
1207
|
+
|
|
1208
|
+
### Complete Consumer Example
|
|
1209
|
+
|
|
1210
|
+
```python
|
|
1211
|
+
import json
|
|
1212
|
+
import signal
|
|
1213
|
+
import sys
|
|
1214
|
+
from kafka import KafkaConsumer
|
|
1215
|
+
from kafka.errors import KafkaError
|
|
1216
|
+
|
|
1217
|
+
class OrderConsumer:
|
|
1218
|
+
def __init__(self):
|
|
1219
|
+
self.consumer = KafkaConsumer(
|
|
1220
|
+
'orders',
|
|
1221
|
+
bootstrap_servers=['localhost:9092'],
|
|
1222
|
+
group_id='order-processor',
|
|
1223
|
+
client_id='order-consumer',
|
|
1224
|
+
auto_offset_reset='earliest',
|
|
1225
|
+
enable_auto_commit=False,
|
|
1226
|
+
max_poll_records=100,
|
|
1227
|
+
key_deserializer=lambda k: k.decode('utf-8') if k else None,
|
|
1228
|
+
value_deserializer=lambda v: json.loads(v.decode('utf-8'))
|
|
1229
|
+
)
|
|
1230
|
+
self.running = True
|
|
1231
|
+
|
|
1232
|
+
# Handle graceful shutdown
|
|
1233
|
+
signal.signal(signal.SIGINT, self.shutdown)
|
|
1234
|
+
signal.signal(signal.SIGTERM, self.shutdown)
|
|
1235
|
+
|
|
1236
|
+
def shutdown(self, signum, frame):
|
|
1237
|
+
print("\nShutting down consumer...")
|
|
1238
|
+
self.running = False
|
|
1239
|
+
|
|
1240
|
+
def process_order(self, order):
|
|
1241
|
+
print(f"Processing order {order['order_id']}")
|
|
1242
|
+
print(f" Customer: {order['customer']}")
|
|
1243
|
+
print(f" Amount: ${order['amount']}")
|
|
1244
|
+
# Process the order here
|
|
1245
|
+
return True
|
|
1246
|
+
|
|
1247
|
+
def run(self):
|
|
1248
|
+
print("Starting order consumer...")
|
|
1249
|
+
|
|
1250
|
+
try:
|
|
1251
|
+
while self.running:
|
|
1252
|
+
messages = self.consumer.poll(timeout_ms=1000, max_records=10)
|
|
1253
|
+
|
|
1254
|
+
for topic_partition, records in messages.items():
|
|
1255
|
+
for message in records:
|
|
1256
|
+
try:
|
|
1257
|
+
# Get headers
|
|
1258
|
+
headers = {k: v.decode('utf-8')
|
|
1259
|
+
for k, v in message.headers} if message.headers else {}
|
|
1260
|
+
|
|
1261
|
+
correlation_id = headers.get('correlation-id', 'unknown')
|
|
1262
|
+
|
|
1263
|
+
print(f"\nReceived message (correlation-id: {correlation_id})")
|
|
1264
|
+
print(f" Partition: {message.partition}")
|
|
1265
|
+
print(f" Offset: {message.offset}")
|
|
1266
|
+
|
|
1267
|
+
# Process the order
|
|
1268
|
+
if self.process_order(message.value):
|
|
1269
|
+
print(f" Order {message.value['order_id']} processed successfully")
|
|
1270
|
+
|
|
1271
|
+
except Exception as e:
|
|
1272
|
+
print(f"Error processing message: {e}")
|
|
1273
|
+
|
|
1274
|
+
# Commit after processing batch
|
|
1275
|
+
self.consumer.commit()
|
|
1276
|
+
|
|
1277
|
+
except KafkaError as e:
|
|
1278
|
+
print(f"Kafka error: {e}")
|
|
1279
|
+
finally:
|
|
1280
|
+
self.consumer.close()
|
|
1281
|
+
print("Consumer closed")
|
|
1282
|
+
|
|
1283
|
+
if __name__ == '__main__':
|
|
1284
|
+
consumer = OrderConsumer()
|
|
1285
|
+
consumer.run()
|
|
1286
|
+
```
|
|
1287
|
+
|
|
1288
|
+
### Complete Admin Example
|
|
1289
|
+
|
|
1290
|
+
```python
|
|
1291
|
+
from kafka import KafkaAdminClient
|
|
1292
|
+
from kafka.admin import NewTopic, ConfigResource, ConfigResourceType
|
|
1293
|
+
from kafka.errors import TopicAlreadyExistsError, KafkaError
|
|
1294
|
+
|
|
1295
|
+
def create_admin_client():
|
|
1296
|
+
return KafkaAdminClient(
|
|
1297
|
+
bootstrap_servers=['localhost:9092'],
|
|
1298
|
+
client_id='admin-client'
|
|
1299
|
+
)
|
|
1300
|
+
|
|
1301
|
+
def create_topic(admin, topic_name, num_partitions=3, replication_factor=1):
|
|
1302
|
+
topic = NewTopic(
|
|
1303
|
+
name=topic_name,
|
|
1304
|
+
num_partitions=num_partitions,
|
|
1305
|
+
replication_factor=replication_factor,
|
|
1306
|
+
topic_configs={
|
|
1307
|
+
'retention.ms': '86400000', # 1 day
|
|
1308
|
+
'cleanup.policy': 'delete',
|
|
1309
|
+
'compression.type': 'gzip'
|
|
1310
|
+
}
|
|
1311
|
+
)
|
|
1312
|
+
|
|
1313
|
+
try:
|
|
1314
|
+
admin.create_topics([topic], validate_only=False)
|
|
1315
|
+
print(f"Topic '{topic_name}' created successfully")
|
|
1316
|
+
return True
|
|
1317
|
+
except TopicAlreadyExistsError:
|
|
1318
|
+
print(f"Topic '{topic_name}' already exists")
|
|
1319
|
+
return False
|
|
1320
|
+
except KafkaError as e:
|
|
1321
|
+
print(f"Failed to create topic '{topic_name}': {e}")
|
|
1322
|
+
return False
|
|
1323
|
+
|
|
1324
|
+
def list_topics(admin):
|
|
1325
|
+
topics = admin.list_topics()
|
|
1326
|
+
print(f"Available topics: {topics}")
|
|
1327
|
+
return topics
|
|
1328
|
+
|
|
1329
|
+
def describe_topic(admin, topic_name):
|
|
1330
|
+
try:
|
|
1331
|
+
metadata = admin.describe_topics([topic_name])
|
|
1332
|
+
print(f"\nTopic '{topic_name}' details:")
|
|
1333
|
+
print(f" Metadata: {metadata}")
|
|
1334
|
+
return metadata
|
|
1335
|
+
except KafkaError as e:
|
|
1336
|
+
print(f"Failed to describe topic '{topic_name}': {e}")
|
|
1337
|
+
return None
|
|
1338
|
+
|
|
1339
|
+
def delete_topic(admin, topic_name):
|
|
1340
|
+
try:
|
|
1341
|
+
admin.delete_topics([topic_name])
|
|
1342
|
+
print(f"Topic '{topic_name}' deleted successfully")
|
|
1343
|
+
return True
|
|
1344
|
+
except KafkaError as e:
|
|
1345
|
+
print(f"Failed to delete topic '{topic_name}': {e}")
|
|
1346
|
+
return False
|
|
1347
|
+
|
|
1348
|
+
def list_consumer_groups(admin):
|
|
1349
|
+
try:
|
|
1350
|
+
groups = admin.list_consumer_groups()
|
|
1351
|
+
print(f"\nConsumer groups:")
|
|
1352
|
+
for group in groups:
|
|
1353
|
+
print(f" - {group}")
|
|
1354
|
+
return groups
|
|
1355
|
+
except KafkaError as e:
|
|
1356
|
+
print(f"Failed to list consumer groups: {e}")
|
|
1357
|
+
return []
|
|
1358
|
+
|
|
1359
|
+
def describe_consumer_group(admin, group_id):
|
|
1360
|
+
try:
|
|
1361
|
+
groups = admin.describe_consumer_groups([group_id])
|
|
1362
|
+
print(f"\nConsumer group '{group_id}' details:")
|
|
1363
|
+
for group in groups:
|
|
1364
|
+
print(f" {group}")
|
|
1365
|
+
return groups
|
|
1366
|
+
except KafkaError as e:
|
|
1367
|
+
print(f"Failed to describe consumer group '{group_id}': {e}")
|
|
1368
|
+
return None
|
|
1369
|
+
|
|
1370
|
+
def main():
|
|
1371
|
+
admin = create_admin_client()
|
|
1372
|
+
|
|
1373
|
+
try:
|
|
1374
|
+
# List existing topics
|
|
1375
|
+
print("=== Listing Topics ===")
|
|
1376
|
+
list_topics(admin)
|
|
1377
|
+
|
|
1378
|
+
# Create new topic
|
|
1379
|
+
print("\n=== Creating Topic ===")
|
|
1380
|
+
create_topic(admin, 'orders', num_partitions=3, replication_factor=1)
|
|
1381
|
+
|
|
1382
|
+
# Describe topic
|
|
1383
|
+
print("\n=== Describing Topic ===")
|
|
1384
|
+
describe_topic(admin, 'orders')
|
|
1385
|
+
|
|
1386
|
+
# List consumer groups
|
|
1387
|
+
print("\n=== Listing Consumer Groups ===")
|
|
1388
|
+
list_consumer_groups(admin)
|
|
1389
|
+
|
|
1390
|
+
except Exception as e:
|
|
1391
|
+
print(f"Error: {e}")
|
|
1392
|
+
finally:
|
|
1393
|
+
admin.close()
|
|
1394
|
+
|
|
1395
|
+
if __name__ == '__main__':
|
|
1396
|
+
main()
|
|
1397
|
+
```
|
|
1398
|
+
|
|
1399
|
+
### Error Handling Example
|
|
1400
|
+
|
|
1401
|
+
```python
|
|
1402
|
+
import json
|
|
1403
|
+
import time
|
|
1404
|
+
from kafka import KafkaProducer, KafkaConsumer
|
|
1405
|
+
from kafka.errors import (
|
|
1406
|
+
KafkaError,
|
|
1407
|
+
KafkaTimeoutError,
|
|
1408
|
+
NoBrokersAvailable,
|
|
1409
|
+
MessageSizeTooLargeError,
|
|
1410
|
+
UnknownTopicOrPartitionError
|
|
1411
|
+
)
|
|
1412
|
+
|
|
1413
|
+
def resilient_producer():
|
|
1414
|
+
max_retries = 3
|
|
1415
|
+
retry_count = 0
|
|
1416
|
+
|
|
1417
|
+
while retry_count < max_retries:
|
|
1418
|
+
try:
|
|
1419
|
+
producer = KafkaProducer(
|
|
1420
|
+
bootstrap_servers=['localhost:9092'],
|
|
1421
|
+
value_serializer=lambda v: json.dumps(v).encode('utf-8'),
|
|
1422
|
+
acks='all',
|
|
1423
|
+
retries=5,
|
|
1424
|
+
max_in_flight_requests_per_connection=1
|
|
1425
|
+
)
|
|
1426
|
+
|
|
1427
|
+
data = {'message': 'Hello Kafka', 'timestamp': time.time()}
|
|
1428
|
+
|
|
1429
|
+
future = producer.send('my-topic', data)
|
|
1430
|
+
record_metadata = future.get(timeout=10)
|
|
1431
|
+
|
|
1432
|
+
print(f"Message sent successfully to partition {record_metadata.partition}")
|
|
1433
|
+
producer.close()
|
|
1434
|
+
return True
|
|
1435
|
+
|
|
1436
|
+
except NoBrokersAvailable:
|
|
1437
|
+
print("No brokers available. Retrying...")
|
|
1438
|
+
retry_count += 1
|
|
1439
|
+
time.sleep(2)
|
|
1440
|
+
|
|
1441
|
+
except KafkaTimeoutError:
|
|
1442
|
+
print("Request timed out. Retrying...")
|
|
1443
|
+
retry_count += 1
|
|
1444
|
+
time.sleep(2)
|
|
1445
|
+
|
|
1446
|
+
except MessageSizeTooLargeError:
|
|
1447
|
+
print("Message too large. Cannot retry.")
|
|
1448
|
+
return False
|
|
1449
|
+
|
|
1450
|
+
except UnknownTopicOrPartitionError:
|
|
1451
|
+
print("Topic does not exist. Cannot retry.")
|
|
1452
|
+
return False
|
|
1453
|
+
|
|
1454
|
+
except KafkaError as e:
|
|
1455
|
+
print(f"Kafka error: {e}")
|
|
1456
|
+
retry_count += 1
|
|
1457
|
+
time.sleep(2)
|
|
1458
|
+
|
|
1459
|
+
print("Max retries exceeded")
|
|
1460
|
+
return False
|
|
1461
|
+
|
|
1462
|
+
if __name__ == '__main__':
|
|
1463
|
+
resilient_producer()
|
|
1464
|
+
```
|