cledar-sdk 2.0.2__py3-none-any.whl → 2.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. cledar/__init__.py +0 -0
  2. cledar/kafka/README.md +239 -0
  3. cledar/kafka/__init__.py +40 -0
  4. cledar/kafka/clients/base.py +98 -0
  5. cledar/kafka/clients/consumer.py +110 -0
  6. cledar/kafka/clients/producer.py +80 -0
  7. cledar/kafka/config/schemas.py +178 -0
  8. cledar/kafka/exceptions.py +22 -0
  9. cledar/kafka/handlers/dead_letter.py +82 -0
  10. cledar/kafka/handlers/parser.py +49 -0
  11. cledar/kafka/logger.py +3 -0
  12. cledar/kafka/models/input.py +13 -0
  13. cledar/kafka/models/message.py +10 -0
  14. cledar/kafka/models/output.py +8 -0
  15. cledar/kafka/tests/.env.test.kafka +3 -0
  16. cledar/kafka/tests/README.md +216 -0
  17. cledar/kafka/tests/conftest.py +104 -0
  18. cledar/kafka/tests/integration/__init__.py +1 -0
  19. cledar/kafka/tests/integration/conftest.py +78 -0
  20. cledar/kafka/tests/integration/helpers.py +47 -0
  21. cledar/kafka/tests/integration/test_consumer_integration.py +375 -0
  22. cledar/kafka/tests/integration/test_integration.py +394 -0
  23. cledar/kafka/tests/integration/test_producer_consumer_interaction.py +388 -0
  24. cledar/kafka/tests/integration/test_producer_integration.py +217 -0
  25. cledar/kafka/tests/unit/__init__.py +1 -0
  26. cledar/kafka/tests/unit/test_base_kafka_client.py +391 -0
  27. cledar/kafka/tests/unit/test_config_validation.py +609 -0
  28. cledar/kafka/tests/unit/test_dead_letter_handler.py +443 -0
  29. cledar/kafka/tests/unit/test_error_handling.py +674 -0
  30. cledar/kafka/tests/unit/test_input_parser.py +310 -0
  31. cledar/kafka/tests/unit/test_input_parser_comprehensive.py +489 -0
  32. cledar/kafka/tests/unit/test_utils.py +25 -0
  33. cledar/kafka/tests/unit/test_utils_comprehensive.py +408 -0
  34. cledar/kafka/utils/callbacks.py +19 -0
  35. cledar/kafka/utils/messages.py +28 -0
  36. cledar/kafka/utils/topics.py +2 -0
  37. cledar/kserve/README.md +352 -0
  38. cledar/kserve/__init__.py +3 -0
  39. cledar/kserve/tests/__init__.py +0 -0
  40. cledar/kserve/tests/test_utils.py +64 -0
  41. cledar/kserve/utils.py +27 -0
  42. cledar/logging/README.md +53 -0
  43. cledar/logging/__init__.py +3 -0
  44. cledar/logging/tests/test_universal_plaintext_formatter.py +249 -0
  45. cledar/logging/universal_plaintext_formatter.py +94 -0
  46. cledar/monitoring/README.md +71 -0
  47. cledar/monitoring/__init__.py +3 -0
  48. cledar/monitoring/monitoring_server.py +112 -0
  49. cledar/monitoring/tests/integration/test_monitoring_server_int.py +162 -0
  50. cledar/monitoring/tests/test_monitoring_server.py +59 -0
  51. cledar/nonce/README.md +99 -0
  52. cledar/nonce/__init__.py +3 -0
  53. cledar/nonce/nonce_service.py +36 -0
  54. cledar/nonce/tests/__init__.py +0 -0
  55. cledar/nonce/tests/test_nonce_service.py +136 -0
  56. cledar/redis/README.md +536 -0
  57. cledar/redis/__init__.py +15 -0
  58. cledar/redis/async_example.py +111 -0
  59. cledar/redis/example.py +37 -0
  60. cledar/redis/exceptions.py +22 -0
  61. cledar/redis/logger.py +3 -0
  62. cledar/redis/model.py +10 -0
  63. cledar/redis/redis.py +525 -0
  64. cledar/redis/redis_config_store.py +252 -0
  65. cledar/redis/tests/test_async_integration_redis.py +158 -0
  66. cledar/redis/tests/test_async_redis_service.py +380 -0
  67. cledar/redis/tests/test_integration_redis.py +119 -0
  68. cledar/redis/tests/test_redis_service.py +319 -0
  69. cledar/storage/README.md +529 -0
  70. cledar/storage/__init__.py +4 -0
  71. cledar/storage/constants.py +3 -0
  72. cledar/storage/exceptions.py +50 -0
  73. cledar/storage/models.py +19 -0
  74. cledar/storage/object_storage.py +955 -0
  75. cledar/storage/tests/conftest.py +18 -0
  76. cledar/storage/tests/test_abfs.py +164 -0
  77. cledar/storage/tests/test_integration_filesystem.py +359 -0
  78. cledar/storage/tests/test_integration_s3.py +453 -0
  79. cledar/storage/tests/test_local.py +384 -0
  80. cledar/storage/tests/test_s3.py +521 -0
  81. {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.0.3.dist-info}/METADATA +1 -1
  82. cledar_sdk-2.0.3.dist-info/RECORD +84 -0
  83. cledar_sdk-2.0.2.dist-info/RECORD +0 -4
  84. {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.0.3.dist-info}/WHEEL +0 -0
  85. {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.0.3.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,394 @@
1
+ """
2
+ End-to-end integration tests using real Kafka instance via testcontainers.
3
+ These tests require Docker to be running and may take longer to execute.
4
+ """
5
+
6
+ import json
7
+ import time
8
+
9
+ from cledar.kafka import (
10
+ DeadLetterHandler,
11
+ FailedMessageData,
12
+ InputParser,
13
+ KafkaConsumer,
14
+ KafkaConsumerConfig,
15
+ KafkaProducer,
16
+ KafkaProducerConfig,
17
+ )
18
+ from cledar.kafka.tests.integration.helpers import E2EData, consume_until
19
+
20
+
21
+ class IntegrationTestData(E2EData):
22
+ """Alias over shared E2EData for local readability."""
23
+
24
+
25
+ """consume_until is provided by helpers.py"""
26
+
27
+
28
+ """Common fixtures are provided by kafka_service/tests/integration/conftest.py"""
29
+
30
+
31
+ def test_end_to_end_message_flow(
32
+ producer: KafkaProducer, consumer: KafkaConsumer
33
+ ) -> None:
34
+ """Test complete end-to-end message flow."""
35
+ topic = "test-e2e-flow"
36
+
37
+ # Send multiple messages first to create the topic
38
+ messages = []
39
+ for i in range(10):
40
+ test_data = IntegrationTestData(
41
+ id=f"e2e-{i}", message=f"End-to-end message {i}", timestamp=time.time()
42
+ )
43
+ messages.append(test_data)
44
+ producer.send(
45
+ topic=topic, value=test_data.model_dump_json(), key=f"e2e-key-{i}"
46
+ )
47
+
48
+ # Wait for topic to be created and messages to be sent
49
+ time.sleep(3)
50
+
51
+ # Subscribe consumer to topic
52
+ consumer.subscribe([topic])
53
+
54
+ # Wait for subscription to take effect
55
+ time.sleep(1)
56
+
57
+ # Consume all messages
58
+ received_messages = []
59
+ for _ in range(10):
60
+ message = consumer.consume_next()
61
+ if message:
62
+ received_messages.append(message)
63
+
64
+ assert len(received_messages) == 10
65
+
66
+ # Verify message content and order
67
+ for i, message in enumerate(received_messages):
68
+ assert message.topic == f"integration-test.{topic}"
69
+ assert message.key == f"e2e-key-{i}"
70
+
71
+ # Parse and verify message content
72
+ parsed_data = json.loads(message.value or "{}")
73
+ assert parsed_data["id"] == f"e2e-{i}"
74
+ assert parsed_data["message"] == f"End-to-end message {i}"
75
+
76
+
77
+ def test_end_to_end_with_parser(
78
+ producer: KafkaProducer, consumer: KafkaConsumer
79
+ ) -> None:
80
+ """Test end-to-end flow with message parsing."""
81
+ topic = "test-e2e-parser"
82
+
83
+ # Create parser
84
+ parser = InputParser(IntegrationTestData)
85
+
86
+ # Send messages first to create the topic
87
+ test_data = IntegrationTestData(
88
+ id="parser-test", message="Parser test message", timestamp=time.time()
89
+ )
90
+ producer.send(topic=topic, value=test_data.model_dump_json(), key="parser-key")
91
+
92
+ # Wait for topic to be created and message to be sent
93
+ time.sleep(2)
94
+
95
+ # Subscribe consumer to topic
96
+ consumer.subscribe([topic])
97
+
98
+ # Wait for subscription to take effect
99
+ time.sleep(1)
100
+
101
+ # Consume and parse the message
102
+ message = consumer.consume_next()
103
+ assert message is not None
104
+
105
+ parsed_message = parser.parse_message(message)
106
+ assert parsed_message.payload.id == "parser-test"
107
+ assert parsed_message.payload.message == "Parser test message"
108
+
109
+
110
+ def test_end_to_end_with_dead_letter_queue(
111
+ producer: KafkaProducer, consumer: KafkaConsumer
112
+ ) -> None:
113
+ """Test end-to-end flow with dead letter queue."""
114
+ topic = "test-e2e-dlq"
115
+ dlq_topic = "test-e2e-dlq-topic"
116
+
117
+ # Create dead letter handler
118
+ dlq_handler = DeadLetterHandler(producer, dlq_topic)
119
+
120
+ # Send a message first to create the topic
121
+ test_data = IntegrationTestData(
122
+ id="dlq-test", message="DLQ test message", timestamp=time.time()
123
+ )
124
+ producer.send(topic=topic, value=test_data.model_dump_json(), key="dlq-key")
125
+
126
+ # Wait for topic to be created and message to be sent
127
+ time.sleep(2)
128
+
129
+ # Subscribe consumer to topic
130
+ consumer.subscribe([topic])
131
+
132
+ # Wait for subscription to take effect
133
+ time.sleep(1)
134
+
135
+ # Consume the message
136
+ message = consumer.consume_next()
137
+ assert message is not None
138
+
139
+ # Simulate processing failure and send to DLQ
140
+ failure_details = [
141
+ FailedMessageData(
142
+ raised_at="2024-01-01T00:00:00Z",
143
+ exception_message="Test processing error",
144
+ exception_trace="Traceback...",
145
+ failure_reason="Test failure",
146
+ )
147
+ ]
148
+
149
+ dlq_handler.handle(message, failure_details)
150
+
151
+ # Wait for DLQ message to be sent
152
+ time.sleep(1)
153
+
154
+ # Subscribe to DLQ topic and verify message was sent there
155
+ consumer.subscribe([dlq_topic])
156
+ dlq_message = consumer.consume_next()
157
+
158
+ assert dlq_message is not None
159
+ assert dlq_message.topic == f"integration-test.{dlq_topic}"
160
+ assert dlq_message.value == test_data.model_dump_json()
161
+
162
+
163
+ def test_end_to_end_error_recovery(
164
+ producer: KafkaProducer, consumer: KafkaConsumer
165
+ ) -> None:
166
+ """Test end-to-end error recovery scenarios."""
167
+ topic = "test-e2e-recovery"
168
+
169
+ # Send messages with various error scenarios first to create the topic
170
+ error_scenarios = [
171
+ ('{"id": "valid", "message": "Valid message"}', "valid-key"),
172
+ ('{"id": "invalid-json", "message": "Invalid JSON', "invalid-key"),
173
+ ('{"id": "empty", "message": ""}', "empty-key"),
174
+ ('{"id": "special", "message": "Special chars: @#$%^&*()"}', "special-key"),
175
+ ]
176
+
177
+ for value, key in error_scenarios:
178
+ producer.send(topic=topic, value=value, key=key)
179
+
180
+ # Wait for topic to be created and messages to be sent
181
+ time.sleep(3)
182
+
183
+ # Subscribe consumer to topic
184
+ consumer.subscribe([topic])
185
+
186
+ # Wait for subscription to take effect
187
+ time.sleep(1)
188
+
189
+ # Consume messages and verify they were all received
190
+ received_count = 0
191
+ for _ in range(4):
192
+ message = consumer.consume_next()
193
+ if message:
194
+ received_count += 1
195
+
196
+ assert received_count == 4
197
+
198
+
199
+ def test_end_to_end_concurrent_producers_consumers(
200
+ producer_config: KafkaProducerConfig, consumer_config: KafkaConsumerConfig
201
+ ) -> None:
202
+ """Test end-to-end flow with multiple producers and consumers."""
203
+ topic = "test-e2e-concurrent"
204
+
205
+ # Create multiple producers
206
+ producers = []
207
+ for _i in range(3):
208
+ producer = KafkaProducer(producer_config)
209
+ producer.connect()
210
+ producers.append(producer)
211
+
212
+ # Create topic before consumers subscribe
213
+ bootstrap_producer = producers[0]
214
+ topic_init_data = IntegrationTestData(
215
+ id="concurrent-init", message="init", timestamp=time.time()
216
+ )
217
+ bootstrap_producer.send(
218
+ topic=topic, value=topic_init_data.model_dump_json(), key="init-key"
219
+ )
220
+ time.sleep(2)
221
+
222
+ # Create multiple consumers after topic exists
223
+ consumers = []
224
+ for _i in range(2):
225
+ consumer = KafkaConsumer(consumer_config)
226
+ consumer.connect()
227
+ consumer.subscribe([topic])
228
+ consumers.append(consumer)
229
+
230
+ # Wait for subscription to take effect
231
+ time.sleep(1.5)
232
+
233
+ # Send messages from multiple producers
234
+ for i in range(10):
235
+ producer_idx = i % 3
236
+ test_data = IntegrationTestData(
237
+ id=f"concurrent-{i}",
238
+ message=f"Concurrent message {i}",
239
+ timestamp=time.time(),
240
+ )
241
+ producers[producer_idx].send(
242
+ topic=topic,
243
+ value=test_data.model_dump_json(),
244
+ key=f"concurrent-key-{i}",
245
+ )
246
+
247
+ # Wait for messages to be sent
248
+ time.sleep(2)
249
+
250
+ # Consume messages from multiple consumers with a shared deadline
251
+ total_received = 0
252
+ start = time.time()
253
+ deadline = start + 10
254
+ while total_received < 10 and time.time() < deadline:
255
+ for c in consumers:
256
+ msg = c.consume_next()
257
+ if msg is not None:
258
+ total_received += 1
259
+
260
+ # Should have received all 10 messages across consumers
261
+ assert total_received == 10
262
+
263
+ # Cleanup
264
+ for producer in producers:
265
+ producer.shutdown()
266
+ for consumer in consumers:
267
+ consumer.shutdown()
268
+
269
+
270
+ def test_end_to_end_message_ordering(
271
+ producer: KafkaProducer, consumer: KafkaConsumer
272
+ ) -> None:
273
+ """Test end-to-end message ordering guarantees."""
274
+ topic = "test-e2e-ordering"
275
+
276
+ # Send a message first to create the topic
277
+ test_data = IntegrationTestData(
278
+ id="order-000", message="Ordered message 0", timestamp=time.time()
279
+ )
280
+ producer.send(topic=topic, value=test_data.model_dump_json(), key="order-key-0")
281
+
282
+ # Wait for topic to be created
283
+ time.sleep(2)
284
+
285
+ # Subscribe consumer to topic
286
+ consumer.subscribe([topic])
287
+
288
+ # Wait for subscription to take effect
289
+ time.sleep(1)
290
+
291
+ # Send messages with sequential IDs
292
+ messages = []
293
+ for i in range(20):
294
+ test_data = IntegrationTestData(
295
+ id=f"order-{i:03d}",
296
+ message=f"Ordered message {i}",
297
+ timestamp=time.time(),
298
+ )
299
+ messages.append(test_data)
300
+ producer.send(
301
+ topic=topic,
302
+ value=test_data.model_dump_json(),
303
+ key=f"order-key-{i}",
304
+ )
305
+
306
+ # Wait for messages to be sent
307
+ time.sleep(3)
308
+
309
+ # Consume messages and verify order
310
+ received_messages = consume_until(consumer, expected_count=21, timeout_seconds=12)
311
+
312
+ assert len(received_messages) == 21
313
+
314
+ # Verify messages are in order (by checking the ID in the JSON)
315
+ # Skip the first message (topic creation) and verify the rest
316
+ for i, message in enumerate(received_messages[1:], 1): # Start from index 1
317
+ parsed_data = json.loads(message.value or "{}")
318
+ assert parsed_data["id"] == f"order-{i - 1:03d}" # Adjust for 0-based indexing
319
+
320
+
321
+ def test_end_to_end_connection_monitoring(
322
+ producer: KafkaProducer, consumer: KafkaConsumer
323
+ ) -> None:
324
+ """Test end-to-end connection monitoring."""
325
+ topic = "test-e2e-monitoring"
326
+
327
+ # Send a message first to create the topic
328
+ test_data = IntegrationTestData(
329
+ id="monitoring-test", message="Monitoring test", timestamp=time.time()
330
+ )
331
+ producer.send(topic=topic, value=test_data.model_dump_json(), key="monitoring-key")
332
+
333
+ # Wait for topic to be created
334
+ time.sleep(2)
335
+
336
+ # Subscribe consumer to topic
337
+ consumer.subscribe([topic])
338
+
339
+ # Wait for subscription to take effect
340
+ time.sleep(1)
341
+
342
+ # Send another message for the consumer to receive
343
+ producer.send(
344
+ topic=topic, value=test_data.model_dump_json(), key="monitoring-key-2"
345
+ )
346
+
347
+ # Wait for message to be sent
348
+ time.sleep(2)
349
+
350
+ # Verify both producer and consumer are still alive
351
+ assert producer.is_alive()
352
+ assert consumer.is_alive()
353
+
354
+ # Consume the message to verify everything is working
355
+ messages = consume_until(consumer, expected_count=1, timeout_seconds=6)
356
+ assert len(messages) == 1
357
+ assert messages[0].value == test_data.model_dump_json()
358
+
359
+
360
+ def test_end_to_end_large_message_handling(
361
+ producer: KafkaProducer, consumer: KafkaConsumer
362
+ ) -> None:
363
+ """Test end-to-end handling of large messages."""
364
+ topic = "test-e2e-large"
365
+
366
+ # Create a large message
367
+ large_data = "x" * 50000 # 50KB message
368
+ test_data = IntegrationTestData(
369
+ id="large-test", message=large_data, timestamp=time.time()
370
+ )
371
+
372
+ # Send large message first to create the topic
373
+ producer.send(topic=topic, value=test_data.model_dump_json(), key="large-key")
374
+
375
+ # Wait for topic to be created
376
+ time.sleep(3)
377
+
378
+ # Subscribe consumer to topic
379
+ consumer.subscribe([topic])
380
+
381
+ # Wait for subscription to take effect
382
+ time.sleep(1)
383
+
384
+ # Send another large message for the consumer to receive
385
+ producer.send(topic=topic, value=test_data.model_dump_json(), key="large-key-2")
386
+
387
+ # Wait for message to be sent
388
+ time.sleep(3)
389
+
390
+ # Consume the message
391
+ messages = consume_until(consumer, expected_count=1, timeout_seconds=10)
392
+ assert len(messages) == 1
393
+ assert messages[0].value == test_data.model_dump_json()
394
+ assert len(messages[0].value) > 50000