cledar-sdk 2.0.2__py3-none-any.whl → 2.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. cledar/__init__.py +0 -0
  2. cledar/kafka/README.md +239 -0
  3. cledar/kafka/__init__.py +40 -0
  4. cledar/kafka/clients/base.py +98 -0
  5. cledar/kafka/clients/consumer.py +110 -0
  6. cledar/kafka/clients/producer.py +80 -0
  7. cledar/kafka/config/schemas.py +178 -0
  8. cledar/kafka/exceptions.py +22 -0
  9. cledar/kafka/handlers/dead_letter.py +82 -0
  10. cledar/kafka/handlers/parser.py +49 -0
  11. cledar/kafka/logger.py +3 -0
  12. cledar/kafka/models/input.py +13 -0
  13. cledar/kafka/models/message.py +10 -0
  14. cledar/kafka/models/output.py +8 -0
  15. cledar/kafka/tests/.env.test.kafka +3 -0
  16. cledar/kafka/tests/README.md +216 -0
  17. cledar/kafka/tests/conftest.py +104 -0
  18. cledar/kafka/tests/integration/__init__.py +1 -0
  19. cledar/kafka/tests/integration/conftest.py +78 -0
  20. cledar/kafka/tests/integration/helpers.py +47 -0
  21. cledar/kafka/tests/integration/test_consumer_integration.py +375 -0
  22. cledar/kafka/tests/integration/test_integration.py +394 -0
  23. cledar/kafka/tests/integration/test_producer_consumer_interaction.py +388 -0
  24. cledar/kafka/tests/integration/test_producer_integration.py +217 -0
  25. cledar/kafka/tests/unit/__init__.py +1 -0
  26. cledar/kafka/tests/unit/test_base_kafka_client.py +391 -0
  27. cledar/kafka/tests/unit/test_config_validation.py +609 -0
  28. cledar/kafka/tests/unit/test_dead_letter_handler.py +443 -0
  29. cledar/kafka/tests/unit/test_error_handling.py +674 -0
  30. cledar/kafka/tests/unit/test_input_parser.py +310 -0
  31. cledar/kafka/tests/unit/test_input_parser_comprehensive.py +489 -0
  32. cledar/kafka/tests/unit/test_utils.py +25 -0
  33. cledar/kafka/tests/unit/test_utils_comprehensive.py +408 -0
  34. cledar/kafka/utils/callbacks.py +19 -0
  35. cledar/kafka/utils/messages.py +28 -0
  36. cledar/kafka/utils/topics.py +2 -0
  37. cledar/kserve/README.md +352 -0
  38. cledar/kserve/__init__.py +3 -0
  39. cledar/kserve/tests/__init__.py +0 -0
  40. cledar/kserve/tests/test_utils.py +64 -0
  41. cledar/kserve/utils.py +27 -0
  42. cledar/logging/README.md +53 -0
  43. cledar/logging/__init__.py +3 -0
  44. cledar/logging/tests/test_universal_plaintext_formatter.py +249 -0
  45. cledar/logging/universal_plaintext_formatter.py +94 -0
  46. cledar/monitoring/README.md +71 -0
  47. cledar/monitoring/__init__.py +3 -0
  48. cledar/monitoring/monitoring_server.py +112 -0
  49. cledar/monitoring/tests/integration/test_monitoring_server_int.py +162 -0
  50. cledar/monitoring/tests/test_monitoring_server.py +59 -0
  51. cledar/nonce/README.md +99 -0
  52. cledar/nonce/__init__.py +3 -0
  53. cledar/nonce/nonce_service.py +36 -0
  54. cledar/nonce/tests/__init__.py +0 -0
  55. cledar/nonce/tests/test_nonce_service.py +136 -0
  56. cledar/redis/README.md +536 -0
  57. cledar/redis/__init__.py +15 -0
  58. cledar/redis/async_example.py +111 -0
  59. cledar/redis/example.py +37 -0
  60. cledar/redis/exceptions.py +22 -0
  61. cledar/redis/logger.py +3 -0
  62. cledar/redis/model.py +10 -0
  63. cledar/redis/redis.py +525 -0
  64. cledar/redis/redis_config_store.py +252 -0
  65. cledar/redis/tests/test_async_integration_redis.py +158 -0
  66. cledar/redis/tests/test_async_redis_service.py +380 -0
  67. cledar/redis/tests/test_integration_redis.py +119 -0
  68. cledar/redis/tests/test_redis_service.py +319 -0
  69. cledar/storage/README.md +529 -0
  70. cledar/storage/__init__.py +4 -0
  71. cledar/storage/constants.py +3 -0
  72. cledar/storage/exceptions.py +50 -0
  73. cledar/storage/models.py +19 -0
  74. cledar/storage/object_storage.py +955 -0
  75. cledar/storage/tests/conftest.py +18 -0
  76. cledar/storage/tests/test_abfs.py +164 -0
  77. cledar/storage/tests/test_integration_filesystem.py +359 -0
  78. cledar/storage/tests/test_integration_s3.py +453 -0
  79. cledar/storage/tests/test_local.py +384 -0
  80. cledar/storage/tests/test_s3.py +521 -0
  81. {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.0.3.dist-info}/METADATA +1 -1
  82. cledar_sdk-2.0.3.dist-info/RECORD +84 -0
  83. cledar_sdk-2.0.2.dist-info/RECORD +0 -4
  84. {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.0.3.dist-info}/WHEEL +0 -0
  85. {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.0.3.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,443 @@
1
+ """
2
+ Comprehensive tests for DeadLetterHandler covering message handling,
3
+ header building, error scenarios, and edge cases.
4
+ """
5
+
6
+ import json
7
+ from unittest.mock import MagicMock, patch
8
+
9
+ import pytest
10
+
11
+ from cledar.kafka.handlers.dead_letter import DeadLetterHandler
12
+ from cledar.kafka.models.message import KafkaMessage
13
+ from cledar.kafka.models.output import FailedMessageData
14
+
15
+
16
+ @pytest.fixture
17
+ def mock_producer() -> MagicMock:
18
+ """Create a mock KafkaProducer for testing."""
19
+ return MagicMock()
20
+
21
+
22
+ @pytest.fixture
23
+ def dlq_topic() -> str:
24
+ """DLQ topic name for testing."""
25
+ return "test-dlq-topic"
26
+
27
+
28
+ @pytest.fixture
29
+ def dead_letter_handler(mock_producer: MagicMock, dlq_topic: str) -> DeadLetterHandler:
30
+ """Create a DeadLetterHandler instance for testing."""
31
+ return DeadLetterHandler(producer=mock_producer, dlq_topic=dlq_topic)
32
+
33
+
34
+ @pytest.fixture
35
+ def sample_message() -> KafkaMessage:
36
+ """Create a sample KafkaMessage for testing."""
37
+ return KafkaMessage(
38
+ topic="test-topic",
39
+ value='{"id": "123", "data": "test"}',
40
+ key="test-key",
41
+ offset=100,
42
+ partition=0,
43
+ )
44
+
45
+
46
+ @pytest.fixture
47
+ def sample_failure_details() -> list[FailedMessageData]:
48
+ """Create sample failure details for testing."""
49
+ return [
50
+ FailedMessageData(
51
+ raised_at="2024-01-01T00:00:00Z",
52
+ exception_message="Test exception",
53
+ exception_trace="Traceback...",
54
+ failure_reason="Processing failed",
55
+ ),
56
+ FailedMessageData(
57
+ raised_at="2024-01-01T00:01:00Z",
58
+ exception_message="Another exception",
59
+ exception_trace="Another traceback...",
60
+ failure_reason="Retry failed",
61
+ ),
62
+ ]
63
+
64
+
65
+ def test_init(mock_producer: MagicMock, dlq_topic: str) -> None:
66
+ """Test DeadLetterHandler initialization."""
67
+ handler = DeadLetterHandler(producer=mock_producer, dlq_topic=dlq_topic)
68
+
69
+ assert handler.producer == mock_producer
70
+ assert handler.dlq_topic == dlq_topic
71
+
72
+
73
+ @patch("cledar.kafka.handlers.dead_letter.logger")
74
+ def test_handle_with_failure_details(
75
+ mock_logger: MagicMock,
76
+ dead_letter_handler: DeadLetterHandler,
77
+ sample_message: KafkaMessage,
78
+ sample_failure_details: list[FailedMessageData],
79
+ ) -> None:
80
+ """Test handling a message with failure details."""
81
+ dead_letter_handler.handle(sample_message, sample_failure_details)
82
+
83
+ # Verify producer.send was called
84
+ from typing import Any, cast
85
+
86
+ producer_send = cast(Any, dead_letter_handler.producer).send
87
+ assert producer_send.call_count == 1
88
+
89
+ # Verify logging calls
90
+ assert (
91
+ mock_logger.info.call_count == 3
92
+ ) # "Handling message", "DLQ message built", and "Message sent"
93
+
94
+ # Verify the send call arguments
95
+ call_args = producer_send.call_args
96
+ assert call_args[1]["topic"] == dead_letter_handler.dlq_topic
97
+ assert call_args[1]["value"] == sample_message.value
98
+ assert call_args[1]["key"] == sample_message.key
99
+ assert "headers" in call_args[1]
100
+
101
+
102
+ @patch("cledar.kafka.handlers.dead_letter.logger")
103
+ def test_handle_without_failure_details(
104
+ mock_logger: MagicMock,
105
+ dead_letter_handler: DeadLetterHandler,
106
+ sample_message: KafkaMessage,
107
+ ) -> None:
108
+ """Test handling a message without failure details."""
109
+ dead_letter_handler.handle(sample_message, None)
110
+
111
+ # Verify producer.send was called
112
+ from typing import Any, cast
113
+
114
+ producer_send = cast(Any, dead_letter_handler.producer).send
115
+ assert producer_send.call_count == 1
116
+
117
+ # Verify logging calls
118
+ assert mock_logger.info.call_count == 3
119
+
120
+ # Verify the send call arguments
121
+ call_args = producer_send.call_args
122
+ assert call_args[1]["topic"] == dead_letter_handler.dlq_topic
123
+ assert call_args[1]["value"] == sample_message.value
124
+ assert call_args[1]["key"] == sample_message.key
125
+ assert call_args[1]["headers"] == []
126
+
127
+
128
+ @patch("cledar.kafka.handlers.dead_letter.logger")
129
+ def test_handle_with_empty_failure_details(
130
+ mock_logger: MagicMock,
131
+ dead_letter_handler: DeadLetterHandler,
132
+ sample_message: KafkaMessage,
133
+ ) -> None:
134
+ """Test handling a message with empty failure details list."""
135
+ dead_letter_handler.handle(sample_message, [])
136
+
137
+ # Verify producer.send was called
138
+ from typing import Any, cast
139
+
140
+ producer_send = cast(Any, dead_letter_handler.producer).send
141
+ assert producer_send.call_count == 1
142
+
143
+ # Verify the send call arguments
144
+ call_args = producer_send.call_args
145
+ assert call_args[1]["headers"] == []
146
+
147
+
148
+ def test_build_headers_with_failure_details(
149
+ dead_letter_handler: DeadLetterHandler,
150
+ sample_failure_details: list[FailedMessageData],
151
+ ) -> None:
152
+ """Test building headers with failure details."""
153
+ headers = dead_letter_handler._build_headers(sample_failure_details)
154
+
155
+ assert len(headers) == 1
156
+ assert headers[0][0] == "failures_details"
157
+
158
+ # Verify the JSON content
159
+ failures_json = headers[0][1].decode("utf-8")
160
+ failures_data = json.loads(failures_json)
161
+
162
+ assert len(failures_data) == 2
163
+ assert failures_data[0]["exception_message"] == "Test exception"
164
+ assert failures_data[1]["exception_message"] == "Another exception"
165
+
166
+
167
+ def test_build_headers_without_failure_details(
168
+ dead_letter_handler: DeadLetterHandler,
169
+ ) -> None:
170
+ """Test building headers without failure details."""
171
+ headers = dead_letter_handler._build_headers(None)
172
+
173
+ assert headers == []
174
+
175
+
176
+ def test_build_headers_with_empty_failure_details(
177
+ dead_letter_handler: DeadLetterHandler,
178
+ ) -> None:
179
+ """Test building headers with empty failure details list."""
180
+ headers = dead_letter_handler._build_headers([])
181
+
182
+ assert headers == []
183
+
184
+
185
+ def test_build_headers_json_serialization(
186
+ dead_letter_handler: DeadLetterHandler,
187
+ ) -> None:
188
+ """Test that failure details are properly JSON serialized."""
189
+ failure_details = [
190
+ FailedMessageData(
191
+ raised_at="2024-01-01T00:00:00Z",
192
+ exception_message="Test with special chars: \n\t\"'",
193
+ exception_trace="Traceback with unicode: 你好",
194
+ failure_reason="Reason with symbols: @#$%",
195
+ )
196
+ ]
197
+
198
+ headers = dead_letter_handler._build_headers(failure_details)
199
+
200
+ # Should not raise any exception during JSON serialization
201
+ failures_json = headers[0][1].decode("utf-8")
202
+ failures_data = json.loads(failures_json)
203
+
204
+ assert failures_data[0]["exception_message"] == "Test with special chars: \n\t\"'"
205
+ assert failures_data[0]["exception_trace"] == "Traceback with unicode: 你好"
206
+ assert failures_data[0]["failure_reason"] == "Reason with symbols: @#$%"
207
+
208
+
209
+ @patch("cledar.kafka.handlers.dead_letter.logger")
210
+ def test_send_message(
211
+ mock_logger: MagicMock,
212
+ dead_letter_handler: DeadLetterHandler,
213
+ ) -> None:
214
+ """Test sending a DLQ message."""
215
+ message_value = '{"test": "data"}'
216
+ message_key = "test-key"
217
+ headers = [("test-header", b"test-value")]
218
+
219
+ dead_letter_handler._send_message(message_value, message_key, headers)
220
+
221
+ # Verify producer.send was called
222
+ # mypy: MagicMock doesn't expose precise attributes; rely on runtime assertion
223
+ from typing import Any, cast
224
+
225
+ producer_send = cast(Any, dead_letter_handler.producer).send
226
+ producer_send.assert_called_once_with(
227
+ topic=dead_letter_handler.dlq_topic,
228
+ value=message_value,
229
+ key=message_key,
230
+ headers=headers,
231
+ )
232
+
233
+ # Verify logging
234
+ mock_logger.info.assert_called_once()
235
+ log_message = mock_logger.info.call_args[0][0]
236
+ assert "Message sent to DLQ topic successfully" in log_message
237
+
238
+
239
+ @patch("cledar.kafka.handlers.dead_letter.logger")
240
+ def test_send_message_with_none_values(
241
+ mock_logger: MagicMock,
242
+ dead_letter_handler: DeadLetterHandler,
243
+ ) -> None:
244
+ """Test sending a DLQ message with None values."""
245
+ dead_letter_handler._send_message(None, None, [])
246
+
247
+ # Verify producer.send was called with None values
248
+ # mypy: MagicMock doesn't expose precise attributes; rely on runtime assertion
249
+ from typing import Any, cast
250
+
251
+ producer_send = cast(Any, dead_letter_handler.producer).send
252
+ producer_send.assert_called_once_with(
253
+ topic=dead_letter_handler.dlq_topic,
254
+ value=None,
255
+ key=None,
256
+ headers=[],
257
+ )
258
+
259
+
260
+ def test_handle_message_with_none_value(dead_letter_handler: DeadLetterHandler) -> None:
261
+ """Test handling a message with None value."""
262
+ message = KafkaMessage(
263
+ topic="test-topic",
264
+ value=None,
265
+ key="test-key",
266
+ offset=100,
267
+ partition=0,
268
+ )
269
+
270
+ dead_letter_handler.handle(message, None)
271
+
272
+ # Should still send the message
273
+ from typing import Any, cast
274
+
275
+ producer_send = cast(Any, dead_letter_handler.producer).send
276
+ assert producer_send.call_count == 1
277
+ call_args = producer_send.call_args
278
+ assert call_args[1]["value"] is None
279
+
280
+
281
+ def test_handle_message_with_empty_string_value(
282
+ dead_letter_handler: DeadLetterHandler,
283
+ ) -> None:
284
+ """Test handling a message with empty string value."""
285
+ message = KafkaMessage(
286
+ topic="test-topic",
287
+ value="",
288
+ key="test-key",
289
+ offset=100,
290
+ partition=0,
291
+ )
292
+
293
+ dead_letter_handler.handle(message, None)
294
+
295
+ # Should still send the message
296
+ from typing import Any, cast
297
+
298
+ producer_send = cast(Any, dead_letter_handler.producer).send
299
+ assert producer_send.call_count == 1
300
+ call_args = producer_send.call_args
301
+ assert call_args[1]["value"] == ""
302
+
303
+
304
+ def test_handle_message_with_large_value(
305
+ dead_letter_handler: DeadLetterHandler,
306
+ ) -> None:
307
+ """Test handling a message with large value."""
308
+ large_value = "x" * 10000 # 10KB string
309
+ message = KafkaMessage(
310
+ topic="test-topic",
311
+ value=large_value,
312
+ key="test-key",
313
+ offset=100,
314
+ partition=0,
315
+ )
316
+
317
+ dead_letter_handler.handle(message, None)
318
+
319
+ # Should still send the message
320
+ from typing import Any, cast
321
+
322
+ producer_send = cast(Any, dead_letter_handler.producer).send
323
+ assert producer_send.call_count == 1
324
+ call_args = producer_send.call_args
325
+ assert call_args[1]["value"] == large_value
326
+
327
+
328
+ def test_handle_message_with_special_characters_in_key(
329
+ dead_letter_handler: DeadLetterHandler,
330
+ ) -> None:
331
+ """Test handling a message with special characters in key."""
332
+ special_key = "key-with-special-chars: @#$%^&*()"
333
+ message = KafkaMessage(
334
+ topic="test-topic",
335
+ value='{"test": "data"}',
336
+ key=special_key,
337
+ offset=100,
338
+ partition=0,
339
+ )
340
+
341
+ dead_letter_handler.handle(message, None)
342
+
343
+ # Should still send the message
344
+ from typing import Any, cast
345
+
346
+ producer_send = cast(Any, dead_letter_handler.producer).send
347
+ producer_send.assert_called_once()
348
+ call_args = producer_send.call_args
349
+ assert call_args[1]["key"] == special_key
350
+
351
+
352
+ def test_multiple_failure_details_serialization(
353
+ dead_letter_handler: DeadLetterHandler,
354
+ ) -> None:
355
+ """Test serialization of multiple failure details."""
356
+ failure_details = [
357
+ FailedMessageData(
358
+ raised_at="2024-01-01T00:00:00Z",
359
+ exception_message="First failure",
360
+ exception_trace="First trace",
361
+ failure_reason="First reason",
362
+ ),
363
+ FailedMessageData(
364
+ raised_at="2024-01-01T00:01:00Z",
365
+ exception_message="Second failure",
366
+ exception_trace="Second trace",
367
+ failure_reason="Second reason",
368
+ ),
369
+ FailedMessageData(
370
+ raised_at="2024-01-01T00:02:00Z",
371
+ exception_message="Third failure",
372
+ exception_trace="Third trace",
373
+ failure_reason="Third reason",
374
+ ),
375
+ ]
376
+
377
+ headers = dead_letter_handler._build_headers(failure_details)
378
+
379
+ failures_json = headers[0][1].decode("utf-8")
380
+ failures_data = json.loads(failures_json)
381
+
382
+ assert len(failures_data) == 3
383
+ assert failures_data[0]["exception_message"] == "First failure"
384
+ assert failures_data[1]["exception_message"] == "Second failure"
385
+ assert failures_data[2]["exception_message"] == "Third failure"
386
+
387
+
388
+ def test_failure_details_with_none_values(
389
+ dead_letter_handler: DeadLetterHandler,
390
+ ) -> None:
391
+ """Test failure details with None values."""
392
+ failure_details = [
393
+ FailedMessageData(
394
+ raised_at="2024-01-01T00:00:00Z",
395
+ exception_message=None,
396
+ exception_trace=None,
397
+ failure_reason=None,
398
+ )
399
+ ]
400
+
401
+ headers = dead_letter_handler._build_headers(failure_details)
402
+
403
+ failures_json = headers[0][1].decode("utf-8")
404
+ failures_data = json.loads(failures_json)
405
+
406
+ assert failures_data[0]["exception_message"] is None
407
+ assert failures_data[0]["exception_trace"] is None
408
+ assert failures_data[0]["failure_reason"] is None
409
+
410
+
411
+ def test_dlq_topic_configuration(mock_producer: MagicMock) -> None:
412
+ """Test that DLQ topic is properly configured."""
413
+ custom_dlq_topic = "custom-dlq-topic"
414
+ handler = DeadLetterHandler(producer=mock_producer, dlq_topic=custom_dlq_topic)
415
+
416
+ message = KafkaMessage(
417
+ topic="test-topic",
418
+ value='{"test": "data"}',
419
+ key="test-key",
420
+ offset=100,
421
+ partition=0,
422
+ )
423
+
424
+ handler.handle(message, None)
425
+
426
+ from typing import Any, cast
427
+
428
+ producer_send = cast(Any, handler.producer).send
429
+ call_args = producer_send.call_args
430
+ assert call_args[1]["topic"] == custom_dlq_topic
431
+
432
+
433
+ def test_producer_dependency_injection(dlq_topic: str) -> None:
434
+ """Test that producer dependency is properly injected."""
435
+ mock_producer1 = MagicMock()
436
+ mock_producer2 = MagicMock()
437
+
438
+ handler1 = DeadLetterHandler(producer=mock_producer1, dlq_topic=dlq_topic)
439
+ handler2 = DeadLetterHandler(producer=mock_producer2, dlq_topic=dlq_topic)
440
+
441
+ assert handler1.producer == mock_producer1
442
+ assert handler2.producer == mock_producer2
443
+ assert handler1.producer != handler2.producer