cledar-sdk 2.0.2__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cledar/__init__.py +1 -0
- cledar/kafka/README.md +239 -0
- cledar/kafka/__init__.py +42 -0
- cledar/kafka/clients/base.py +117 -0
- cledar/kafka/clients/consumer.py +138 -0
- cledar/kafka/clients/producer.py +97 -0
- cledar/kafka/config/schemas.py +262 -0
- cledar/kafka/exceptions.py +17 -0
- cledar/kafka/handlers/dead_letter.py +88 -0
- cledar/kafka/handlers/parser.py +83 -0
- cledar/kafka/logger.py +5 -0
- cledar/kafka/models/input.py +17 -0
- cledar/kafka/models/message.py +14 -0
- cledar/kafka/models/output.py +12 -0
- cledar/kafka/tests/.env.test.kafka +3 -0
- cledar/kafka/tests/README.md +216 -0
- cledar/kafka/tests/conftest.py +104 -0
- cledar/kafka/tests/integration/__init__.py +1 -0
- cledar/kafka/tests/integration/conftest.py +78 -0
- cledar/kafka/tests/integration/helpers.py +47 -0
- cledar/kafka/tests/integration/test_consumer_integration.py +375 -0
- cledar/kafka/tests/integration/test_integration.py +394 -0
- cledar/kafka/tests/integration/test_producer_consumer_interaction.py +388 -0
- cledar/kafka/tests/integration/test_producer_integration.py +217 -0
- cledar/kafka/tests/unit/__init__.py +1 -0
- cledar/kafka/tests/unit/test_base_kafka_client.py +391 -0
- cledar/kafka/tests/unit/test_config_validation.py +609 -0
- cledar/kafka/tests/unit/test_dead_letter_handler.py +443 -0
- cledar/kafka/tests/unit/test_error_handling.py +674 -0
- cledar/kafka/tests/unit/test_input_parser.py +310 -0
- cledar/kafka/tests/unit/test_input_parser_comprehensive.py +489 -0
- cledar/kafka/tests/unit/test_utils.py +25 -0
- cledar/kafka/tests/unit/test_utils_comprehensive.py +408 -0
- cledar/kafka/utils/callbacks.py +28 -0
- cledar/kafka/utils/messages.py +39 -0
- cledar/kafka/utils/topics.py +15 -0
- cledar/kserve/README.md +352 -0
- cledar/kserve/__init__.py +5 -0
- cledar/kserve/tests/__init__.py +0 -0
- cledar/kserve/tests/test_utils.py +64 -0
- cledar/kserve/utils.py +30 -0
- cledar/logging/README.md +53 -0
- cledar/logging/__init__.py +5 -0
- cledar/logging/tests/test_universal_plaintext_formatter.py +249 -0
- cledar/logging/universal_plaintext_formatter.py +99 -0
- cledar/monitoring/README.md +71 -0
- cledar/monitoring/__init__.py +5 -0
- cledar/monitoring/monitoring_server.py +156 -0
- cledar/monitoring/tests/integration/test_monitoring_server_int.py +162 -0
- cledar/monitoring/tests/test_monitoring_server.py +59 -0
- cledar/nonce/README.md +99 -0
- cledar/nonce/__init__.py +5 -0
- cledar/nonce/nonce_service.py +62 -0
- cledar/nonce/tests/__init__.py +0 -0
- cledar/nonce/tests/test_nonce_service.py +136 -0
- cledar/redis/README.md +536 -0
- cledar/redis/__init__.py +17 -0
- cledar/redis/async_example.py +112 -0
- cledar/redis/example.py +67 -0
- cledar/redis/exceptions.py +25 -0
- cledar/redis/logger.py +5 -0
- cledar/redis/model.py +14 -0
- cledar/redis/redis.py +764 -0
- cledar/redis/redis_config_store.py +333 -0
- cledar/redis/tests/test_async_integration_redis.py +158 -0
- cledar/redis/tests/test_async_redis_service.py +380 -0
- cledar/redis/tests/test_integration_redis.py +119 -0
- cledar/redis/tests/test_redis_service.py +319 -0
- cledar/storage/README.md +529 -0
- cledar/storage/__init__.py +6 -0
- cledar/storage/constants.py +5 -0
- cledar/storage/exceptions.py +79 -0
- cledar/storage/models.py +41 -0
- cledar/storage/object_storage.py +1274 -0
- cledar/storage/tests/conftest.py +18 -0
- cledar/storage/tests/test_abfs.py +164 -0
- cledar/storage/tests/test_integration_filesystem.py +359 -0
- cledar/storage/tests/test_integration_s3.py +453 -0
- cledar/storage/tests/test_local.py +384 -0
- cledar/storage/tests/test_s3.py +521 -0
- {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.1.0.dist-info}/METADATA +1 -1
- cledar_sdk-2.1.0.dist-info/RECORD +84 -0
- cledar_sdk-2.0.2.dist-info/RECORD +0 -4
- {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.1.0.dist-info}/WHEEL +0 -0
- {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.1.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,388 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Integration tests for producer-consumer interaction patterns,
|
|
3
|
+
message ordering, error handling, and complex scenarios using real Kafka.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
import time
|
|
8
|
+
from collections.abc import Generator
|
|
9
|
+
|
|
10
|
+
import pytest
|
|
11
|
+
|
|
12
|
+
from cledar.kafka.clients.consumer import KafkaConsumer
|
|
13
|
+
from cledar.kafka.clients.producer import KafkaProducer
|
|
14
|
+
from cledar.kafka.config.schemas import KafkaConsumerConfig, KafkaProducerConfig
|
|
15
|
+
from cledar.kafka.handlers.dead_letter import DeadLetterHandler
|
|
16
|
+
from cledar.kafka.handlers.parser import InputParser
|
|
17
|
+
from cledar.kafka.models.output import FailedMessageData
|
|
18
|
+
from cledar.kafka.tests.integration.helpers import E2EData, consume_until
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class InteractionTestData(E2EData):
|
|
22
|
+
def to_json(self) -> str: # keep existing API usage
|
|
23
|
+
return super().to_json()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@pytest.fixture
|
|
27
|
+
def producer_config(kafka_bootstrap_servers: str) -> KafkaProducerConfig:
|
|
28
|
+
"""Create producer configuration for integration tests."""
|
|
29
|
+
return KafkaProducerConfig(
|
|
30
|
+
kafka_servers=kafka_bootstrap_servers,
|
|
31
|
+
kafka_group_id="integration-test-producer",
|
|
32
|
+
kafka_topic_prefix="integration-test.",
|
|
33
|
+
kafka_block_buffer_time_sec=1,
|
|
34
|
+
kafka_connection_check_timeout_sec=5,
|
|
35
|
+
kafka_connection_check_interval_sec=10,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@pytest.fixture
|
|
40
|
+
def consumer_config(kafka_bootstrap_servers: str) -> KafkaConsumerConfig:
|
|
41
|
+
"""Create consumer configuration for integration tests."""
|
|
42
|
+
return KafkaConsumerConfig(
|
|
43
|
+
kafka_servers=kafka_bootstrap_servers,
|
|
44
|
+
kafka_group_id="integration-test-consumer",
|
|
45
|
+
kafka_offset="earliest",
|
|
46
|
+
kafka_topic_prefix="integration-test.",
|
|
47
|
+
kafka_block_consumer_time_sec=1,
|
|
48
|
+
kafka_connection_check_timeout_sec=5,
|
|
49
|
+
kafka_auto_commit_interval_ms=1000,
|
|
50
|
+
kafka_connection_check_interval_sec=10,
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@pytest.fixture
|
|
55
|
+
def producer(
|
|
56
|
+
producer_config: KafkaProducerConfig,
|
|
57
|
+
) -> Generator[KafkaProducer, None, None]:
|
|
58
|
+
"""Create and connect a Kafka producer."""
|
|
59
|
+
producer = KafkaProducer(producer_config)
|
|
60
|
+
producer.connect()
|
|
61
|
+
yield producer
|
|
62
|
+
producer.shutdown()
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@pytest.fixture
|
|
66
|
+
def consumer(
|
|
67
|
+
consumer_config: KafkaConsumerConfig,
|
|
68
|
+
) -> Generator[KafkaConsumer, None, None]:
|
|
69
|
+
"""Create and connect a Kafka consumer."""
|
|
70
|
+
consumer = KafkaConsumer(consumer_config)
|
|
71
|
+
consumer.connect()
|
|
72
|
+
yield consumer
|
|
73
|
+
consumer.shutdown()
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def test_producer_consumer_basic_interaction(
|
|
77
|
+
producer: KafkaProducer, consumer: KafkaConsumer
|
|
78
|
+
) -> None:
|
|
79
|
+
"""Test basic producer-consumer interaction with real Kafka."""
|
|
80
|
+
topic = "test-basic-interaction"
|
|
81
|
+
|
|
82
|
+
# Send a message first to create the topic
|
|
83
|
+
test_data = InteractionTestData(
|
|
84
|
+
id="test-1", message="Hello Kafka!", timestamp=time.time()
|
|
85
|
+
)
|
|
86
|
+
producer.send(topic=topic, value=test_data.to_json(), key="test-key")
|
|
87
|
+
|
|
88
|
+
# Wait for topic to be created and message to be sent
|
|
89
|
+
time.sleep(2)
|
|
90
|
+
|
|
91
|
+
# Subscribe consumer to topic
|
|
92
|
+
consumer.subscribe([topic])
|
|
93
|
+
|
|
94
|
+
# Wait for subscription to take effect
|
|
95
|
+
time.sleep(1)
|
|
96
|
+
|
|
97
|
+
# Consume the message
|
|
98
|
+
message = consumer.consume_next()
|
|
99
|
+
assert message is not None
|
|
100
|
+
assert message.topic == f"integration-test.{topic}"
|
|
101
|
+
assert message.key == "test-key"
|
|
102
|
+
assert message.value == test_data.to_json()
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def test_producer_consumer_multiple_messages(
|
|
106
|
+
producer: KafkaProducer, consumer: KafkaConsumer
|
|
107
|
+
) -> None:
|
|
108
|
+
"""Test producer-consumer interaction with multiple messages."""
|
|
109
|
+
topic = "test-multiple-messages"
|
|
110
|
+
|
|
111
|
+
# Send multiple messages first to create the topic
|
|
112
|
+
messages = []
|
|
113
|
+
for i in range(5):
|
|
114
|
+
test_data = InteractionTestData(
|
|
115
|
+
id=f"test-{i}", message=f"Message {i}", timestamp=time.time()
|
|
116
|
+
)
|
|
117
|
+
messages.append(test_data)
|
|
118
|
+
producer.send(topic=topic, value=test_data.to_json(), key=f"key-{i}")
|
|
119
|
+
|
|
120
|
+
# Wait for topic to be created and messages to be sent
|
|
121
|
+
time.sleep(3)
|
|
122
|
+
|
|
123
|
+
# Subscribe consumer to topic
|
|
124
|
+
consumer.subscribe([topic])
|
|
125
|
+
|
|
126
|
+
# Wait for subscription to take effect
|
|
127
|
+
time.sleep(1)
|
|
128
|
+
|
|
129
|
+
# Consume all messages
|
|
130
|
+
received_messages = consume_until(consumer, expected_count=5, timeout_seconds=8)
|
|
131
|
+
|
|
132
|
+
assert len(received_messages) == 5
|
|
133
|
+
|
|
134
|
+
# Verify message content
|
|
135
|
+
for i, message in enumerate(received_messages):
|
|
136
|
+
assert message.topic == f"integration-test.{topic}"
|
|
137
|
+
assert message.key == f"key-{i}"
|
|
138
|
+
assert message.value == messages[i].to_json()
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def test_producer_consumer_with_parser(
|
|
142
|
+
producer: KafkaProducer, consumer: KafkaConsumer
|
|
143
|
+
) -> None:
|
|
144
|
+
"""Test producer-consumer interaction with message parsing."""
|
|
145
|
+
topic = "test-parser-interaction"
|
|
146
|
+
|
|
147
|
+
# Create parser
|
|
148
|
+
parser = InputParser(InteractionTestData)
|
|
149
|
+
|
|
150
|
+
# Send a message first to create the topic
|
|
151
|
+
test_data = InteractionTestData(
|
|
152
|
+
id="test-parse", message="Parsed message", timestamp=time.time()
|
|
153
|
+
)
|
|
154
|
+
producer.send(topic=topic, value=test_data.to_json(), key="parse-key")
|
|
155
|
+
|
|
156
|
+
# Wait for topic to be created and message to be sent
|
|
157
|
+
time.sleep(2)
|
|
158
|
+
|
|
159
|
+
# Subscribe consumer to topic
|
|
160
|
+
consumer.subscribe([topic])
|
|
161
|
+
|
|
162
|
+
# Wait for subscription to take effect
|
|
163
|
+
time.sleep(1)
|
|
164
|
+
|
|
165
|
+
# Consume and parse the message
|
|
166
|
+
message = consumer.consume_next()
|
|
167
|
+
assert message is not None
|
|
168
|
+
|
|
169
|
+
parsed_message = parser.parse_message(message)
|
|
170
|
+
assert parsed_message.payload.id == "test-parse"
|
|
171
|
+
assert parsed_message.payload.message == "Parsed message"
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def test_producer_consumer_error_handling(
|
|
175
|
+
producer: KafkaProducer, consumer: KafkaConsumer
|
|
176
|
+
) -> None:
|
|
177
|
+
"""Test error handling in producer-consumer interaction."""
|
|
178
|
+
topic = "test-error-handling"
|
|
179
|
+
|
|
180
|
+
# Send a message with invalid JSON first to create the topic
|
|
181
|
+
invalid_json = (
|
|
182
|
+
'{"id": "test", "message": "invalid json", "timestamp": "not-a-number"}'
|
|
183
|
+
)
|
|
184
|
+
producer.send(topic=topic, value=invalid_json, key="error-key")
|
|
185
|
+
|
|
186
|
+
# Wait for topic to be created and message to be sent
|
|
187
|
+
time.sleep(2)
|
|
188
|
+
|
|
189
|
+
# Subscribe consumer to topic
|
|
190
|
+
consumer.subscribe([topic])
|
|
191
|
+
|
|
192
|
+
# Wait for subscription to take effect
|
|
193
|
+
time.sleep(1)
|
|
194
|
+
|
|
195
|
+
# Consume the message
|
|
196
|
+
message = consumer.consume_next()
|
|
197
|
+
assert message is not None
|
|
198
|
+
assert message.value == invalid_json
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def test_producer_consumer_with_dead_letter_handler(
|
|
202
|
+
producer: KafkaProducer, consumer: KafkaConsumer
|
|
203
|
+
) -> None:
|
|
204
|
+
"""Test producer-consumer interaction with dead letter handler."""
|
|
205
|
+
topic = "test-dlq-interaction"
|
|
206
|
+
dlq_topic = "test-dlq-topic"
|
|
207
|
+
|
|
208
|
+
# Create dead letter handler
|
|
209
|
+
dlq_handler = DeadLetterHandler(producer, dlq_topic)
|
|
210
|
+
|
|
211
|
+
# Send a message first to create the topic
|
|
212
|
+
test_data = InteractionTestData(
|
|
213
|
+
id="test-dlq", message="DLQ test message", timestamp=time.time()
|
|
214
|
+
)
|
|
215
|
+
producer.send(topic=topic, value=test_data.to_json(), key="dlq-key")
|
|
216
|
+
|
|
217
|
+
# Wait for topic to be created and message to be sent
|
|
218
|
+
time.sleep(2)
|
|
219
|
+
|
|
220
|
+
# Subscribe consumer to topic
|
|
221
|
+
consumer.subscribe([topic])
|
|
222
|
+
|
|
223
|
+
# Wait for subscription to take effect
|
|
224
|
+
time.sleep(1)
|
|
225
|
+
|
|
226
|
+
# Consume the message
|
|
227
|
+
message = consumer.consume_next()
|
|
228
|
+
assert message is not None
|
|
229
|
+
|
|
230
|
+
# Simulate processing failure and send to DLQ
|
|
231
|
+
failure_details = [
|
|
232
|
+
FailedMessageData(
|
|
233
|
+
raised_at="2024-01-01T00:00:00Z",
|
|
234
|
+
exception_message="Test processing error",
|
|
235
|
+
exception_trace="Traceback...",
|
|
236
|
+
failure_reason="Test failure",
|
|
237
|
+
)
|
|
238
|
+
]
|
|
239
|
+
|
|
240
|
+
dlq_handler.handle(message, failure_details)
|
|
241
|
+
|
|
242
|
+
# Wait for DLQ message to be sent
|
|
243
|
+
time.sleep(1)
|
|
244
|
+
|
|
245
|
+
# Subscribe to DLQ topic and verify message was sent there
|
|
246
|
+
consumer.subscribe([dlq_topic])
|
|
247
|
+
dlq_message = consumer.consume_next()
|
|
248
|
+
|
|
249
|
+
assert dlq_message is not None
|
|
250
|
+
assert dlq_message.topic == f"integration-test.{dlq_topic}"
|
|
251
|
+
assert dlq_message.value == test_data.to_json()
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
def test_producer_consumer_message_ordering(
|
|
255
|
+
producer: KafkaProducer, consumer: KafkaConsumer
|
|
256
|
+
) -> None:
|
|
257
|
+
"""Test that messages are consumed in the order they were sent."""
|
|
258
|
+
topic = "test-message-ordering"
|
|
259
|
+
|
|
260
|
+
# Send messages with sequential IDs first to create the topic
|
|
261
|
+
messages = []
|
|
262
|
+
for i in range(10):
|
|
263
|
+
test_data = InteractionTestData(
|
|
264
|
+
id=f"order-{i:03d}", message=f"Ordered message {i}", timestamp=time.time()
|
|
265
|
+
)
|
|
266
|
+
messages.append(test_data)
|
|
267
|
+
producer.send(topic=topic, value=test_data.to_json(), key=f"order-key-{i}")
|
|
268
|
+
|
|
269
|
+
# Wait for topic to be created and messages to be sent
|
|
270
|
+
time.sleep(3)
|
|
271
|
+
|
|
272
|
+
# Subscribe consumer to topic
|
|
273
|
+
consumer.subscribe([topic])
|
|
274
|
+
|
|
275
|
+
# Wait for subscription to take effect
|
|
276
|
+
time.sleep(1)
|
|
277
|
+
|
|
278
|
+
# Consume messages and verify order
|
|
279
|
+
received_messages = consume_until(consumer, expected_count=10, timeout_seconds=10)
|
|
280
|
+
|
|
281
|
+
assert len(received_messages) == 10
|
|
282
|
+
|
|
283
|
+
# Verify messages are in order (by checking the ID in the JSON)
|
|
284
|
+
for i, message in enumerate(received_messages):
|
|
285
|
+
parsed_data = json.loads(message.value or "{}")
|
|
286
|
+
assert parsed_data["id"] == f"order-{i:03d}"
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
def test_producer_consumer_with_headers(
|
|
290
|
+
producer: KafkaProducer, consumer: KafkaConsumer
|
|
291
|
+
) -> None:
|
|
292
|
+
"""Test producer-consumer interaction with message headers."""
|
|
293
|
+
topic = "test-headers-interaction"
|
|
294
|
+
|
|
295
|
+
# Send a message with headers first to create the topic
|
|
296
|
+
test_data = InteractionTestData(
|
|
297
|
+
id="test-headers", message="Message with headers", timestamp=time.time()
|
|
298
|
+
)
|
|
299
|
+
headers = [
|
|
300
|
+
("custom-header", b"custom-value"),
|
|
301
|
+
("another-header", b"another-value"),
|
|
302
|
+
]
|
|
303
|
+
|
|
304
|
+
producer.send(
|
|
305
|
+
topic=topic,
|
|
306
|
+
value=test_data.to_json(),
|
|
307
|
+
key="headers-key",
|
|
308
|
+
headers=headers,
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
# Wait for topic to be created and message to be sent
|
|
312
|
+
time.sleep(2)
|
|
313
|
+
|
|
314
|
+
# Subscribe consumer to topic
|
|
315
|
+
consumer.subscribe([topic])
|
|
316
|
+
|
|
317
|
+
# Wait for subscription to take effect
|
|
318
|
+
time.sleep(1)
|
|
319
|
+
|
|
320
|
+
# Consume the message
|
|
321
|
+
message = consumer.consume_next()
|
|
322
|
+
assert message is not None
|
|
323
|
+
assert message.value == test_data.to_json()
|
|
324
|
+
|
|
325
|
+
# Validate received value; header visibility varies by consumer implementation.
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
def test_producer_consumer_commit_behavior(
|
|
329
|
+
producer: KafkaProducer, consumer: KafkaConsumer
|
|
330
|
+
) -> None:
|
|
331
|
+
"""Test consumer commit behavior."""
|
|
332
|
+
topic = "test-commit-behavior"
|
|
333
|
+
|
|
334
|
+
# Send a message first to create the topic
|
|
335
|
+
test_data = InteractionTestData(
|
|
336
|
+
id="test-commit", message="Commit test message", timestamp=time.time()
|
|
337
|
+
)
|
|
338
|
+
producer.send(topic=topic, value=test_data.to_json(), key="commit-key")
|
|
339
|
+
|
|
340
|
+
# Wait for topic to be created and message to be sent
|
|
341
|
+
time.sleep(2)
|
|
342
|
+
|
|
343
|
+
# Subscribe consumer to topic
|
|
344
|
+
consumer.subscribe([topic])
|
|
345
|
+
|
|
346
|
+
# Wait for subscription to take effect
|
|
347
|
+
time.sleep(1)
|
|
348
|
+
|
|
349
|
+
# Consume the message
|
|
350
|
+
message = consumer.consume_next()
|
|
351
|
+
assert message is not None
|
|
352
|
+
|
|
353
|
+
# Commit the message
|
|
354
|
+
consumer.commit(message)
|
|
355
|
+
|
|
356
|
+
# Verify commit was successful (no exception raised)
|
|
357
|
+
assert True # If we get here, commit succeeded
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
def test_producer_consumer_connection_recovery(
|
|
361
|
+
producer: KafkaProducer, consumer: KafkaConsumer
|
|
362
|
+
) -> None:
|
|
363
|
+
"""Test connection recovery behavior."""
|
|
364
|
+
topic = "test-connection-recovery"
|
|
365
|
+
|
|
366
|
+
# Send a message first to create the topic
|
|
367
|
+
test_data = InteractionTestData(
|
|
368
|
+
id="test-recovery", message="Recovery test message", timestamp=time.time()
|
|
369
|
+
)
|
|
370
|
+
producer.send(topic=topic, value=test_data.to_json(), key="recovery-key")
|
|
371
|
+
|
|
372
|
+
# Wait for topic to be created and message to be sent
|
|
373
|
+
time.sleep(2)
|
|
374
|
+
|
|
375
|
+
# Subscribe consumer to topic
|
|
376
|
+
consumer.subscribe([topic])
|
|
377
|
+
|
|
378
|
+
# Wait for subscription to take effect
|
|
379
|
+
time.sleep(1)
|
|
380
|
+
|
|
381
|
+
# Verify both producer and consumer are still alive
|
|
382
|
+
assert producer.is_alive()
|
|
383
|
+
assert consumer.is_alive()
|
|
384
|
+
|
|
385
|
+
# Consume the message to verify everything is working
|
|
386
|
+
message = consumer.consume_next()
|
|
387
|
+
assert message is not None
|
|
388
|
+
assert message.value == test_data.to_json()
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Integration tests for Kafka producer using real Kafka instance.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import time
|
|
6
|
+
from collections.abc import Generator
|
|
7
|
+
|
|
8
|
+
import pytest
|
|
9
|
+
|
|
10
|
+
from cledar.kafka.clients.producer import KafkaProducer
|
|
11
|
+
from cledar.kafka.config.schemas import KafkaProducerConfig
|
|
12
|
+
from cledar.kafka.exceptions import KafkaProducerNotConnectedError
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@pytest.fixture
|
|
16
|
+
def producer_config(kafka_bootstrap_servers: str) -> KafkaProducerConfig:
|
|
17
|
+
"""Create producer configuration for integration tests."""
|
|
18
|
+
return KafkaProducerConfig(
|
|
19
|
+
kafka_servers=kafka_bootstrap_servers,
|
|
20
|
+
kafka_group_id="integration-test-producer",
|
|
21
|
+
kafka_topic_prefix="integration-test.",
|
|
22
|
+
kafka_block_buffer_time_sec=1,
|
|
23
|
+
kafka_connection_check_timeout_sec=5,
|
|
24
|
+
kafka_connection_check_interval_sec=10,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@pytest.fixture
|
|
29
|
+
def producer(
|
|
30
|
+
producer_config: KafkaProducerConfig,
|
|
31
|
+
) -> Generator[KafkaProducer, None, None]:
|
|
32
|
+
"""Create and connect a Kafka producer."""
|
|
33
|
+
producer = KafkaProducer(producer_config)
|
|
34
|
+
producer.connect()
|
|
35
|
+
yield producer
|
|
36
|
+
producer.shutdown()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def test_producer_connect_and_send(producer: KafkaProducer) -> None:
|
|
40
|
+
"""Test producer connection and basic message sending."""
|
|
41
|
+
topic = "test-producer-basic"
|
|
42
|
+
test_value = '{"id": "test-1", "message": "Hello Kafka!"}'
|
|
43
|
+
test_key = "test-key"
|
|
44
|
+
|
|
45
|
+
# Send message
|
|
46
|
+
producer.send(topic=topic, value=test_value, key=test_key)
|
|
47
|
+
|
|
48
|
+
# Wait for message to be sent
|
|
49
|
+
time.sleep(1)
|
|
50
|
+
|
|
51
|
+
# Verify producer is still connected
|
|
52
|
+
assert producer.is_alive()
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def test_producer_send_multiple_messages(producer: KafkaProducer) -> None:
|
|
56
|
+
"""Test sending multiple messages."""
|
|
57
|
+
topic = "test-producer-multiple"
|
|
58
|
+
|
|
59
|
+
# Send multiple messages
|
|
60
|
+
for i in range(5):
|
|
61
|
+
test_value = f'{{"id": "test-{i}", "message": "Message {i}"}}'
|
|
62
|
+
test_key = f"key-{i}"
|
|
63
|
+
producer.send(topic=topic, value=test_value, key=test_key)
|
|
64
|
+
|
|
65
|
+
# Wait for messages to be sent
|
|
66
|
+
time.sleep(2)
|
|
67
|
+
|
|
68
|
+
# Verify producer is still connected
|
|
69
|
+
assert producer.is_alive()
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def test_producer_send_with_headers(producer: KafkaProducer) -> None:
|
|
73
|
+
"""Test sending messages with headers."""
|
|
74
|
+
topic = "test-producer-headers"
|
|
75
|
+
test_value = '{"id": "test-headers", "message": "Message with headers"}'
|
|
76
|
+
test_key = "headers-key"
|
|
77
|
+
headers = [
|
|
78
|
+
("custom-header", b"custom-value"),
|
|
79
|
+
("another-header", b"another-value"),
|
|
80
|
+
]
|
|
81
|
+
|
|
82
|
+
# Send message with headers
|
|
83
|
+
producer.send(topic=topic, value=test_value, key=test_key, headers=headers)
|
|
84
|
+
|
|
85
|
+
# Wait for message to be sent
|
|
86
|
+
time.sleep(1)
|
|
87
|
+
|
|
88
|
+
# Verify producer is still connected
|
|
89
|
+
assert producer.is_alive()
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def test_producer_send_large_message(producer: KafkaProducer) -> None:
|
|
93
|
+
"""Test sending large messages."""
|
|
94
|
+
topic = "test-producer-large"
|
|
95
|
+
|
|
96
|
+
# Create a large message
|
|
97
|
+
large_data = "x" * 10000
|
|
98
|
+
test_value = f'{{"id": "test-large", "data": "{large_data}"}}'
|
|
99
|
+
test_key = "large-key"
|
|
100
|
+
|
|
101
|
+
# Send large message
|
|
102
|
+
producer.send(topic=topic, value=test_value, key=test_key)
|
|
103
|
+
|
|
104
|
+
# Wait for message to be sent
|
|
105
|
+
time.sleep(2)
|
|
106
|
+
|
|
107
|
+
# Verify producer is still connected
|
|
108
|
+
assert producer.is_alive()
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def test_producer_send_with_special_characters(producer: KafkaProducer) -> None:
|
|
112
|
+
"""Test sending messages with special characters."""
|
|
113
|
+
topic = "test-producer-special-chars"
|
|
114
|
+
test_value = '{"id": "test-special", "message": "Special chars: @#$%^&*()"}'
|
|
115
|
+
test_key = "special-key-with-chars: @#$%^&*()"
|
|
116
|
+
|
|
117
|
+
# Send message with special characters
|
|
118
|
+
producer.send(topic=topic, value=test_value, key=test_key)
|
|
119
|
+
|
|
120
|
+
# Wait for message to be sent
|
|
121
|
+
time.sleep(1)
|
|
122
|
+
|
|
123
|
+
# Verify producer is still connected
|
|
124
|
+
assert producer.is_alive()
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def test_producer_send_with_unicode(producer: KafkaProducer) -> None:
|
|
128
|
+
"""Test sending messages with unicode characters."""
|
|
129
|
+
topic = "test-producer-unicode"
|
|
130
|
+
test_value = '{"id": "test-unicode", "message": "Unicode: 测试名称"}'
|
|
131
|
+
test_key = "unicode-key-测试"
|
|
132
|
+
|
|
133
|
+
# Send message with unicode
|
|
134
|
+
producer.send(topic=topic, value=test_value, key=test_key)
|
|
135
|
+
|
|
136
|
+
# Wait for message to be sent
|
|
137
|
+
time.sleep(1)
|
|
138
|
+
|
|
139
|
+
# Verify producer is still connected
|
|
140
|
+
assert producer.is_alive()
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def test_producer_connection_check(producer: KafkaProducer) -> None:
|
|
144
|
+
"""Test producer connection checking."""
|
|
145
|
+
# Verify producer is connected
|
|
146
|
+
assert producer.is_alive()
|
|
147
|
+
|
|
148
|
+
# Check connection explicitly
|
|
149
|
+
producer.check_connection()
|
|
150
|
+
|
|
151
|
+
# Should not raise any exception
|
|
152
|
+
assert True
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def test_producer_not_connected_error() -> None:
|
|
156
|
+
"""Test producer error when not connected."""
|
|
157
|
+
config = KafkaProducerConfig(
|
|
158
|
+
kafka_servers="localhost:9092",
|
|
159
|
+
kafka_group_id="test-group",
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
producer = KafkaProducer(config)
|
|
163
|
+
|
|
164
|
+
# Should raise error when trying to send without connecting
|
|
165
|
+
with pytest.raises(KafkaProducerNotConnectedError):
|
|
166
|
+
producer.send(topic="test", value="test", key="key")
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def test_producer_shutdown(producer_config: KafkaProducerConfig) -> None:
|
|
170
|
+
"""Test producer shutdown."""
|
|
171
|
+
producer = KafkaProducer(producer_config)
|
|
172
|
+
producer.connect()
|
|
173
|
+
|
|
174
|
+
# Verify producer is connected
|
|
175
|
+
assert producer.is_alive()
|
|
176
|
+
|
|
177
|
+
# Shutdown producer
|
|
178
|
+
producer.shutdown()
|
|
179
|
+
|
|
180
|
+
# Wait a moment for shutdown to complete
|
|
181
|
+
time.sleep(0.5)
|
|
182
|
+
|
|
183
|
+
# Producer should be disconnected after shutdown
|
|
184
|
+
assert not producer.is_alive()
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def test_producer_buffer_handling(producer: KafkaProducer) -> None:
|
|
188
|
+
"""Test producer buffer handling with rapid message sending."""
|
|
189
|
+
topic = "test-producer-buffer"
|
|
190
|
+
|
|
191
|
+
# Send many messages rapidly
|
|
192
|
+
for i in range(100):
|
|
193
|
+
test_value = f'{{"id": "buffer-test-{i}", "message": "Buffer test {i}"}}'
|
|
194
|
+
test_key = f"buffer-key-{i}"
|
|
195
|
+
producer.send(topic=topic, value=test_value, key=test_key)
|
|
196
|
+
|
|
197
|
+
# Wait for messages to be processed
|
|
198
|
+
time.sleep(3)
|
|
199
|
+
|
|
200
|
+
# Verify producer is still connected
|
|
201
|
+
assert producer.is_alive()
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def test_producer_topic_prefix(producer: KafkaProducer) -> None:
|
|
205
|
+
"""Test producer topic prefix functionality."""
|
|
206
|
+
topic = "test-prefix"
|
|
207
|
+
test_value = '{"id": "test-prefix", "message": "Prefix test"}'
|
|
208
|
+
test_key = "prefix-key"
|
|
209
|
+
|
|
210
|
+
# Send message (should use prefix from config)
|
|
211
|
+
producer.send(topic=topic, value=test_value, key=test_key)
|
|
212
|
+
|
|
213
|
+
# Wait for message to be sent
|
|
214
|
+
time.sleep(1)
|
|
215
|
+
|
|
216
|
+
# Verify producer is still connected
|
|
217
|
+
assert producer.is_alive()
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Unit tests for kafka_service."""
|