cledar-sdk 2.0.2__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cledar/__init__.py +1 -0
- cledar/kafka/README.md +239 -0
- cledar/kafka/__init__.py +42 -0
- cledar/kafka/clients/base.py +117 -0
- cledar/kafka/clients/consumer.py +138 -0
- cledar/kafka/clients/producer.py +97 -0
- cledar/kafka/config/schemas.py +262 -0
- cledar/kafka/exceptions.py +17 -0
- cledar/kafka/handlers/dead_letter.py +88 -0
- cledar/kafka/handlers/parser.py +83 -0
- cledar/kafka/logger.py +5 -0
- cledar/kafka/models/input.py +17 -0
- cledar/kafka/models/message.py +14 -0
- cledar/kafka/models/output.py +12 -0
- cledar/kafka/tests/.env.test.kafka +3 -0
- cledar/kafka/tests/README.md +216 -0
- cledar/kafka/tests/conftest.py +104 -0
- cledar/kafka/tests/integration/__init__.py +1 -0
- cledar/kafka/tests/integration/conftest.py +78 -0
- cledar/kafka/tests/integration/helpers.py +47 -0
- cledar/kafka/tests/integration/test_consumer_integration.py +375 -0
- cledar/kafka/tests/integration/test_integration.py +394 -0
- cledar/kafka/tests/integration/test_producer_consumer_interaction.py +388 -0
- cledar/kafka/tests/integration/test_producer_integration.py +217 -0
- cledar/kafka/tests/unit/__init__.py +1 -0
- cledar/kafka/tests/unit/test_base_kafka_client.py +391 -0
- cledar/kafka/tests/unit/test_config_validation.py +609 -0
- cledar/kafka/tests/unit/test_dead_letter_handler.py +443 -0
- cledar/kafka/tests/unit/test_error_handling.py +674 -0
- cledar/kafka/tests/unit/test_input_parser.py +310 -0
- cledar/kafka/tests/unit/test_input_parser_comprehensive.py +489 -0
- cledar/kafka/tests/unit/test_utils.py +25 -0
- cledar/kafka/tests/unit/test_utils_comprehensive.py +408 -0
- cledar/kafka/utils/callbacks.py +28 -0
- cledar/kafka/utils/messages.py +39 -0
- cledar/kafka/utils/topics.py +15 -0
- cledar/kserve/README.md +352 -0
- cledar/kserve/__init__.py +5 -0
- cledar/kserve/tests/__init__.py +0 -0
- cledar/kserve/tests/test_utils.py +64 -0
- cledar/kserve/utils.py +30 -0
- cledar/logging/README.md +53 -0
- cledar/logging/__init__.py +5 -0
- cledar/logging/tests/test_universal_plaintext_formatter.py +249 -0
- cledar/logging/universal_plaintext_formatter.py +99 -0
- cledar/monitoring/README.md +71 -0
- cledar/monitoring/__init__.py +5 -0
- cledar/monitoring/monitoring_server.py +156 -0
- cledar/monitoring/tests/integration/test_monitoring_server_int.py +162 -0
- cledar/monitoring/tests/test_monitoring_server.py +59 -0
- cledar/nonce/README.md +99 -0
- cledar/nonce/__init__.py +5 -0
- cledar/nonce/nonce_service.py +62 -0
- cledar/nonce/tests/__init__.py +0 -0
- cledar/nonce/tests/test_nonce_service.py +136 -0
- cledar/redis/README.md +536 -0
- cledar/redis/__init__.py +17 -0
- cledar/redis/async_example.py +112 -0
- cledar/redis/example.py +67 -0
- cledar/redis/exceptions.py +25 -0
- cledar/redis/logger.py +5 -0
- cledar/redis/model.py +14 -0
- cledar/redis/redis.py +764 -0
- cledar/redis/redis_config_store.py +333 -0
- cledar/redis/tests/test_async_integration_redis.py +158 -0
- cledar/redis/tests/test_async_redis_service.py +380 -0
- cledar/redis/tests/test_integration_redis.py +119 -0
- cledar/redis/tests/test_redis_service.py +319 -0
- cledar/storage/README.md +529 -0
- cledar/storage/__init__.py +6 -0
- cledar/storage/constants.py +5 -0
- cledar/storage/exceptions.py +79 -0
- cledar/storage/models.py +41 -0
- cledar/storage/object_storage.py +1274 -0
- cledar/storage/tests/conftest.py +18 -0
- cledar/storage/tests/test_abfs.py +164 -0
- cledar/storage/tests/test_integration_filesystem.py +359 -0
- cledar/storage/tests/test_integration_s3.py +453 -0
- cledar/storage/tests/test_local.py +384 -0
- cledar/storage/tests/test_s3.py +521 -0
- {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.1.0.dist-info}/METADATA +1 -1
- cledar_sdk-2.1.0.dist-info/RECORD +84 -0
- cledar_sdk-2.0.2.dist-info/RECORD +0 -4
- {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.1.0.dist-info}/WHEEL +0 -0
- {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.1.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
# Kafka Service Tests
|
|
2
|
+
|
|
3
|
+
This directory contains the test suite for the Kafka service, organized into unit and integration tests.
|
|
4
|
+
|
|
5
|
+
## Directory Structure
|
|
6
|
+
|
|
7
|
+
```
|
|
8
|
+
tests/
|
|
9
|
+
├── conftest.py # Test-wide teardown (cleans Kafka client threads)
|
|
10
|
+
├── README.md
|
|
11
|
+
├── unit/ # Unit tests (176 tests)
|
|
12
|
+
│ ├── test_base_kafka_client.py
|
|
13
|
+
│ ├── test_config_validation.py
|
|
14
|
+
│ ├── test_dead_letter_handler.py
|
|
15
|
+
│ ├── test_error_handling.py
|
|
16
|
+
│ ├── test_input_parser.py
|
|
17
|
+
│ ├── test_input_parser_comprehensive.py
|
|
18
|
+
│ ├── test_utils.py
|
|
19
|
+
│ ├── test_utils_comprehensive.py
|
|
20
|
+
│ └── requirements-test.txt
|
|
21
|
+
└── integration/ # Integration tests (41 tests)
|
|
22
|
+
├── conftest.py # Shared Kafka fixtures (container, configs, clients)
|
|
23
|
+
├── helpers.py # E2EData, consume_until, ensure_topic_and_subscribe
|
|
24
|
+
├── test_integration.py
|
|
25
|
+
├── test_producer_integration.py
|
|
26
|
+
├── test_consumer_integration.py
|
|
27
|
+
└── test_producer_consumer_interaction.py
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
## Test Categories
|
|
31
|
+
|
|
32
|
+
### Unit Tests (`unit/`)
|
|
33
|
+
Unit tests focus on testing individual components in isolation using mocks and stubs. They are fast, reliable, and don't require external dependencies.
|
|
34
|
+
|
|
35
|
+
- **Base Client Tests**: Test the base Kafka client functionality
|
|
36
|
+
- **Config Validation**: Test configuration validation and schema validation
|
|
37
|
+
- **Dead Letter Handler**: Test dead letter queue handling with mocked producers
|
|
38
|
+
- **Error Handling**: Test error scenarios and exception handling
|
|
39
|
+
- **Input Parser**: Test message parsing and validation
|
|
40
|
+
- **Utils**: Test utility functions and helper methods
|
|
41
|
+
|
|
42
|
+
### Integration Tests (`integration/`)
|
|
43
|
+
Integration tests use real external dependencies (like Kafka via testcontainers) to test the complete flow of the system.
|
|
44
|
+
|
|
45
|
+
- **Real Kafka Integration**: Tests with actual Kafka instance using testcontainers
|
|
46
|
+
- **Producer Integration**: Real producer operations and message sending
|
|
47
|
+
- **Consumer Integration**: Real consumer operations and message consumption
|
|
48
|
+
- **Producer-Consumer Interaction**: Real interaction patterns between producer and consumer
|
|
49
|
+
- **End-to-End Flows**: Complete producer-consumer workflows
|
|
50
|
+
- **Connection Recovery**: Real connection failure and recovery scenarios
|
|
51
|
+
- **Performance Tests**: Stress tests and large message handling
|
|
52
|
+
|
|
53
|
+
## Running Tests
|
|
54
|
+
|
|
55
|
+
### Run All Tests
|
|
56
|
+
```bash
|
|
57
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
### Run Only Unit Tests
|
|
61
|
+
```bash
|
|
62
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/unit/
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
### Run Only Integration Tests
|
|
66
|
+
```bash
|
|
67
|
+
# Run all integration tests
|
|
68
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/integration/
|
|
69
|
+
|
|
70
|
+
# Run specific integration test file
|
|
71
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/integration/test_producer_integration.py
|
|
72
|
+
|
|
73
|
+
# Run single test
|
|
74
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/integration/test_integration.py::test_end_to_end_message_flow -v
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
### Run Specific Test Files
|
|
78
|
+
```bash
|
|
79
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/unit/test_config_validation.py
|
|
80
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/integration/test_integration.py
|
|
81
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/integration/test_producer_integration.py
|
|
82
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/integration/test_consumer_integration.py
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
## Test Requirements
|
|
86
|
+
|
|
87
|
+
- **Unit Tests**: No external dependencies required
|
|
88
|
+
- **Integration Tests**: Requires Docker to be running for testcontainers
|
|
89
|
+
- **Slow Integration Tests**: Marked as skipped by default due to execution time (2-5 minutes each)
|
|
90
|
+
|
|
91
|
+
## Performance Notes
|
|
92
|
+
|
|
93
|
+
- **Unit Tests**: Fast execution (~10–15 seconds for all 176 tests)
|
|
94
|
+
- **Integration Tests**: Moderate execution (~2–2.5 minutes for 41 tests)
|
|
95
|
+
- Helpers reduce flakiness: `consume_until()` polls with timeout instead of fixed sleeps
|
|
96
|
+
|
|
97
|
+
## Docker Setup for Integration Tests
|
|
98
|
+
|
|
99
|
+
The integration tests use testcontainers to spin up real Kafka instances for testing. This requires Docker to be installed and running.
|
|
100
|
+
|
|
101
|
+
### Prerequisites
|
|
102
|
+
|
|
103
|
+
1. **Install Docker Desktop** (recommended):
|
|
104
|
+
- [Docker Desktop for Mac](https://docs.docker.com/desktop/mac/install/)
|
|
105
|
+
- [Docker Desktop for Windows](https://docs.docker.com/desktop/windows/install/)
|
|
106
|
+
- [Docker Desktop for Linux](https://docs.docker.com/desktop/linux/install/)
|
|
107
|
+
|
|
108
|
+
2. **Or install Docker Engine** (alternative):
|
|
109
|
+
```bash
|
|
110
|
+
# macOS (using Homebrew)
|
|
111
|
+
brew install docker
|
|
112
|
+
|
|
113
|
+
# Ubuntu/Debian
|
|
114
|
+
sudo apt-get update
|
|
115
|
+
sudo apt-get install docker.io
|
|
116
|
+
|
|
117
|
+
# CentOS/RHEL
|
|
118
|
+
sudo yum install docker
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
### Starting Docker
|
|
122
|
+
|
|
123
|
+
#### Docker Desktop
|
|
124
|
+
1. Launch Docker Desktop application
|
|
125
|
+
2. Wait for Docker to start (you'll see the Docker whale icon in your system tray)
|
|
126
|
+
3. Verify Docker is running:
|
|
127
|
+
```bash
|
|
128
|
+
docker --version
|
|
129
|
+
docker ps
|
|
130
|
+
```
|
|
131
|
+
|
|
132
|
+
#### Docker Engine (Linux)
|
|
133
|
+
```bash
|
|
134
|
+
# Start Docker service
|
|
135
|
+
sudo systemctl start docker
|
|
136
|
+
sudo systemctl enable docker
|
|
137
|
+
|
|
138
|
+
# Add your user to docker group (optional, to avoid sudo)
|
|
139
|
+
sudo usermod -aG docker $USER
|
|
140
|
+
# Log out and back in for group changes to take effect
|
|
141
|
+
|
|
142
|
+
# Verify Docker is running
|
|
143
|
+
docker --version
|
|
144
|
+
docker ps
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
### Running Integration Tests
|
|
148
|
+
|
|
149
|
+
Once Docker is running, you can execute the integration tests:
|
|
150
|
+
|
|
151
|
+
```bash
|
|
152
|
+
# Run all integration tests
|
|
153
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/integration/
|
|
154
|
+
|
|
155
|
+
# Run a specific integration test
|
|
156
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/integration/test_integration.py::test_producer_consumer_basic_flow
|
|
157
|
+
|
|
158
|
+
# Run integration tests with verbose output
|
|
159
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/integration/ -v
|
|
160
|
+
|
|
161
|
+
# Run integration tests and show logs
|
|
162
|
+
PYTHONPATH=. uv run pytest cledar/kafka_service/tests/integration/ -s
|
|
163
|
+
```
|
|
164
|
+
|
|
165
|
+
### Troubleshooting Docker Issues
|
|
166
|
+
|
|
167
|
+
#### Docker not running
|
|
168
|
+
```bash
|
|
169
|
+
# Check if Docker is running
|
|
170
|
+
docker ps
|
|
171
|
+
# If you get "Cannot connect to the Docker daemon", Docker is not running
|
|
172
|
+
|
|
173
|
+
# Start Docker Desktop or Docker service
|
|
174
|
+
# Docker Desktop: Launch the application
|
|
175
|
+
# Docker Engine: sudo systemctl start docker
|
|
176
|
+
```
|
|
177
|
+
|
|
178
|
+
#### Permission denied errors
|
|
179
|
+
```bash
|
|
180
|
+
# Add user to docker group (Linux)
|
|
181
|
+
sudo usermod -aG docker $USER
|
|
182
|
+
# Log out and back in
|
|
183
|
+
|
|
184
|
+
# Or run with sudo (not recommended)
|
|
185
|
+
sudo docker ps
|
|
186
|
+
```
|
|
187
|
+
|
|
188
|
+
#### Port conflicts
|
|
189
|
+
If you have Kafka running locally on port 9092, the testcontainers will automatically use different ports. No action needed.
|
|
190
|
+
|
|
191
|
+
#### Resource constraints
|
|
192
|
+
If tests fail due to memory/CPU constraints:
|
|
193
|
+
```bash
|
|
194
|
+
# Check Docker resource limits in Docker Desktop settings
|
|
195
|
+
# Increase memory allocation if needed (recommended: 4GB+)
|
|
196
|
+
```
|
|
197
|
+
|
|
198
|
+
### Test Container Details
|
|
199
|
+
|
|
200
|
+
The integration tests use:
|
|
201
|
+
- **Kafka Image**: `confluentinc/cp-kafka:7.4.0`
|
|
202
|
+
- **Automatic Port Assignment**: testcontainers handles port conflicts
|
|
203
|
+
- **Automatic Cleanup**: containers are removed after tests complete
|
|
204
|
+
- **Session Scope**: Kafka container is shared across all integration tests in a session
|
|
205
|
+
|
|
206
|
+
## Test Statistics
|
|
207
|
+
|
|
208
|
+
- **Total Tests**: 217
|
|
209
|
+
- **Unit Tests**: 176
|
|
210
|
+
- **Integration Tests**: 41
|
|
211
|
+
|
|
212
|
+
## Notes
|
|
213
|
+
|
|
214
|
+
- All tests use `PYTHONPATH=.` to ensure proper module imports
|
|
215
|
+
- Integration tests use shared fixtures in `integration/conftest.py` and helpers in `integration/helpers.py`
|
|
216
|
+
- Test-wide teardown in `tests/conftest.py` ensures Kafka client threads don’t block process exit
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Pytest configuration and fixtures to ensure proper teardown of Kafka clients and
|
|
3
|
+
their background threads during tests.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import threading
|
|
7
|
+
import time
|
|
8
|
+
import weakref
|
|
9
|
+
from collections.abc import Callable, Generator
|
|
10
|
+
from typing import Any, cast
|
|
11
|
+
|
|
12
|
+
import pytest
|
|
13
|
+
|
|
14
|
+
from cledar.kafka.clients.base import BaseKafkaClient
|
|
15
|
+
from cledar.kafka.logger import logger
|
|
16
|
+
|
|
17
|
+
# Weak registry of all created BaseKafkaClient instances (does not keep them alive)
|
|
18
|
+
_active_clients: "weakref.WeakSet[BaseKafkaClient]" = weakref.WeakSet()
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _wrap_post_init() -> tuple[
|
|
22
|
+
Callable[[BaseKafkaClient], None], Callable[[BaseKafkaClient], None]
|
|
23
|
+
]:
|
|
24
|
+
"""Monkeypatch BaseKafkaClient.__post_init__ to register instances."""
|
|
25
|
+
original = BaseKafkaClient.__post_init__
|
|
26
|
+
|
|
27
|
+
def wrapped(self: BaseKafkaClient) -> None:
|
|
28
|
+
original(self)
|
|
29
|
+
try:
|
|
30
|
+
_active_clients.add(self)
|
|
31
|
+
except Exception:
|
|
32
|
+
# Best-effort registration only for tests
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
return original, wrapped
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _wrap_start_connection_check_thread() -> tuple[
|
|
39
|
+
Callable[[BaseKafkaClient], None], Callable[[BaseKafkaClient], None]
|
|
40
|
+
]:
|
|
41
|
+
"""Monkeypatch start_connection_check_thread to use daemon threads in tests.
|
|
42
|
+
|
|
43
|
+
This prevents non-daemon threads from blocking interpreter shutdown when tests
|
|
44
|
+
forget to call shutdown() explicitly.
|
|
45
|
+
"""
|
|
46
|
+
original = BaseKafkaClient.start_connection_check_thread
|
|
47
|
+
|
|
48
|
+
def wrapped(self: BaseKafkaClient) -> None:
|
|
49
|
+
if self.connection_check_thread is None:
|
|
50
|
+
self.connection_check_thread = threading.Thread(
|
|
51
|
+
target=self._monitor_connection
|
|
52
|
+
)
|
|
53
|
+
# Ensure test background threads never block process exit
|
|
54
|
+
self.connection_check_thread.daemon = True
|
|
55
|
+
self.connection_check_thread.start()
|
|
56
|
+
logger.info(
|
|
57
|
+
f"Started {self.__class__.__name__} connection check thread.",
|
|
58
|
+
extra={"interval": self.config.kafka_connection_check_interval_sec},
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
return original, wrapped
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _cleanup_all_clients() -> None:
|
|
65
|
+
"""Shutdown all known clients; ignore errors during cleanup."""
|
|
66
|
+
for client in list(_active_clients):
|
|
67
|
+
try:
|
|
68
|
+
client.shutdown()
|
|
69
|
+
except Exception:
|
|
70
|
+
pass
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@pytest.fixture(scope="session", autouse=True)
|
|
74
|
+
def _session_monkeypatch() -> Generator[None, None, None]:
|
|
75
|
+
"""Apply monkeypatches for the entire test session and ensure final cleanup."""
|
|
76
|
+
# Monkeypatch __post_init__ to register instances
|
|
77
|
+
orig_post_init, wrapped_post_init = _wrap_post_init()
|
|
78
|
+
cast(Any, BaseKafkaClient).__post_init__ = wrapped_post_init
|
|
79
|
+
|
|
80
|
+
# Monkeypatch start_connection_check_thread to create daemon threads
|
|
81
|
+
orig_start, wrapped_start = _wrap_start_connection_check_thread()
|
|
82
|
+
cast(Any, BaseKafkaClient).start_connection_check_thread = wrapped_start
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
yield
|
|
86
|
+
finally:
|
|
87
|
+
# Restore originals
|
|
88
|
+
cast(Any, BaseKafkaClient).__post_init__ = orig_post_init
|
|
89
|
+
cast(Any, BaseKafkaClient).start_connection_check_thread = orig_start
|
|
90
|
+
|
|
91
|
+
# Final cleanup at session end
|
|
92
|
+
_cleanup_all_clients()
|
|
93
|
+
|
|
94
|
+
# Give threads a small grace period to finish
|
|
95
|
+
time.sleep(0.1)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@pytest.fixture(autouse=True)
|
|
99
|
+
def _per_test_cleanup() -> Generator[None, None, None]:
|
|
100
|
+
"""Ensure all clients are shut down after each individual test."""
|
|
101
|
+
yield
|
|
102
|
+
_cleanup_all_clients()
|
|
103
|
+
# Small grace to allow quick thread exit
|
|
104
|
+
time.sleep(0.05)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Integration tests for kafka_service."""
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
from collections.abc import Generator
|
|
2
|
+
|
|
3
|
+
import pytest
|
|
4
|
+
from testcontainers.kafka import KafkaContainer
|
|
5
|
+
|
|
6
|
+
from cledar.kafka.clients.consumer import KafkaConsumer
|
|
7
|
+
from cledar.kafka.clients.producer import KafkaProducer
|
|
8
|
+
from cledar.kafka.config.schemas import KafkaConsumerConfig, KafkaProducerConfig
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@pytest.fixture(scope="session")
|
|
12
|
+
def kafka_container() -> Generator[KafkaContainer, None, None]:
|
|
13
|
+
kafka = KafkaContainer("confluentinc/cp-kafka:7.4.0")
|
|
14
|
+
kafka = kafka.with_env("KAFKA_AUTO_CREATE_TOPICS_ENABLE", "true")
|
|
15
|
+
kafka = kafka.with_env("KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR", "1")
|
|
16
|
+
kafka = kafka.with_env("KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR", "1")
|
|
17
|
+
kafka = kafka.with_env("KAFKA_TRANSACTION_STATE_LOG_MIN_ISR", "1")
|
|
18
|
+
kafka = kafka.with_env("KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS", "0")
|
|
19
|
+
kafka = kafka.with_env("KAFKA_LOG_RETENTION_HOURS", "1")
|
|
20
|
+
with kafka as container:
|
|
21
|
+
yield container
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@pytest.fixture
|
|
25
|
+
def kafka_bootstrap_servers(kafka_container: KafkaContainer) -> str:
|
|
26
|
+
# testcontainers returns a str, but mypy sees Any without stubs
|
|
27
|
+
server: str = str(kafka_container.get_bootstrap_server())
|
|
28
|
+
return server
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@pytest.fixture
|
|
32
|
+
def producer_config(kafka_bootstrap_servers: str) -> KafkaProducerConfig:
|
|
33
|
+
return KafkaProducerConfig(
|
|
34
|
+
kafka_servers=kafka_bootstrap_servers,
|
|
35
|
+
kafka_group_id="integration-test-producer",
|
|
36
|
+
kafka_topic_prefix="integration-test.",
|
|
37
|
+
kafka_block_buffer_time_sec=1,
|
|
38
|
+
kafka_connection_check_timeout_sec=5,
|
|
39
|
+
kafka_connection_check_interval_sec=10,
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@pytest.fixture
|
|
44
|
+
def consumer_config(kafka_bootstrap_servers: str) -> KafkaConsumerConfig:
|
|
45
|
+
return KafkaConsumerConfig(
|
|
46
|
+
kafka_servers=kafka_bootstrap_servers,
|
|
47
|
+
kafka_group_id="integration-test-consumer",
|
|
48
|
+
kafka_offset="earliest",
|
|
49
|
+
kafka_topic_prefix="integration-test.",
|
|
50
|
+
kafka_block_consumer_time_sec=1,
|
|
51
|
+
kafka_connection_check_timeout_sec=5,
|
|
52
|
+
kafka_auto_commit_interval_ms=1000,
|
|
53
|
+
kafka_connection_check_interval_sec=10,
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@pytest.fixture
|
|
58
|
+
def producer(
|
|
59
|
+
producer_config: KafkaProducerConfig,
|
|
60
|
+
) -> Generator[KafkaProducer, None, None]:
|
|
61
|
+
p = KafkaProducer(producer_config)
|
|
62
|
+
p.connect()
|
|
63
|
+
try:
|
|
64
|
+
yield p
|
|
65
|
+
finally:
|
|
66
|
+
p.shutdown()
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@pytest.fixture
|
|
70
|
+
def consumer(
|
|
71
|
+
consumer_config: KafkaConsumerConfig,
|
|
72
|
+
) -> Generator[KafkaConsumer, None, None]:
|
|
73
|
+
c = KafkaConsumer(consumer_config)
|
|
74
|
+
c.connect()
|
|
75
|
+
try:
|
|
76
|
+
yield c
|
|
77
|
+
finally:
|
|
78
|
+
c.shutdown()
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import time
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
|
|
5
|
+
from cledar.kafka.clients.consumer import KafkaConsumer
|
|
6
|
+
from cledar.kafka.clients.producer import KafkaProducer
|
|
7
|
+
from cledar.kafka.models.message import KafkaMessage
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class E2EData(BaseModel):
|
|
11
|
+
id: str
|
|
12
|
+
message: str
|
|
13
|
+
timestamp: float
|
|
14
|
+
|
|
15
|
+
def to_json(self) -> str:
|
|
16
|
+
return self.model_dump_json()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def ensure_topic_and_subscribe(
|
|
20
|
+
producer: KafkaProducer,
|
|
21
|
+
consumer: KafkaConsumer,
|
|
22
|
+
topic: str,
|
|
23
|
+
init_payload: str = '{"id":"init","message":"init"}',
|
|
24
|
+
create_wait: float = 2.0,
|
|
25
|
+
subscribe_wait: float = 1.0,
|
|
26
|
+
) -> None:
|
|
27
|
+
producer.send(topic=topic, value=init_payload, key="init-key")
|
|
28
|
+
time.sleep(create_wait)
|
|
29
|
+
consumer.subscribe([topic])
|
|
30
|
+
time.sleep(subscribe_wait)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def consume_until(
|
|
34
|
+
consumer: KafkaConsumer,
|
|
35
|
+
expected_count: int,
|
|
36
|
+
timeout_seconds: float = 10.0,
|
|
37
|
+
idle_sleep: float = 0.2,
|
|
38
|
+
) -> list[KafkaMessage]:
|
|
39
|
+
deadline = time.time() + timeout_seconds
|
|
40
|
+
received: list[KafkaMessage] = []
|
|
41
|
+
while len(received) < expected_count and time.time() < deadline:
|
|
42
|
+
msg = consumer.consume_next()
|
|
43
|
+
if msg is not None:
|
|
44
|
+
received.append(msg)
|
|
45
|
+
else:
|
|
46
|
+
time.sleep(idle_sleep)
|
|
47
|
+
return received
|