pytest-kafka-broker 0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,16 @@
1
+ Metadata-Version: 2.4
2
+ Name: pytest-kafka-broker
3
+ Version: 0.0.0
4
+ Summary: Pytest plugin to run a single-broker Kafka cluster
5
+ Author-email: Leo Singer <leo.singer@ligo.org>
6
+ License-Expression: Apache-2.0
7
+ Classifier: Framework :: Pytest
8
+ Classifier: Topic :: System :: Networking
9
+ Requires-Python: >=3.11
10
+ Requires-Dist: astropy
11
+ Requires-Dist: confluent-kafka
12
+ Requires-Dist: rich
13
+ Requires-Dist: pytest-asyncio
14
+ Provides-Extra: docs
15
+ Requires-Dist: sphinx-astropy[confv2]; extra == "docs"
16
+ Requires-Dist: sphinx-automodapi>=0.20.0; extra == "docs"
@@ -0,0 +1,6 @@
1
+ pytest_kafka_broker.py,sha256=h0D80ZYRw_cxB7wdwKvhctSiGyB_GNX4G75w8M_BQF0,6036
2
+ pytest_kafka_broker-0.0.0.dist-info/METADATA,sha256=TOPcXLoS_VeoDr_DiXmZKorulCQlhSZZk9zBCKV8dkk,537
3
+ pytest_kafka_broker-0.0.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
4
+ pytest_kafka_broker-0.0.0.dist-info/entry_points.txt,sha256=mjFsRbu6FOiZkUOlSaXpo4QdLHwniNB-p_NEIgRbDnw,46
5
+ pytest_kafka_broker-0.0.0.dist-info/top_level.txt,sha256=nTrYx9xVeK5hsqbhBRL2bgBV_ea-J66_f4Dk8eD-Ci0,20
6
+ pytest_kafka_broker-0.0.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.10.2)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [pytest11]
2
+ kafka_broker = pytest_kafka_broker
@@ -0,0 +1 @@
1
+ pytest_kafka_broker
pytest_kafka_broker.py ADDED
@@ -0,0 +1,187 @@
1
+ import asyncio
2
+ import subprocess
3
+ from dataclasses import dataclass
4
+ from pathlib import Path
5
+ from tarfile import TarFile
6
+ from tempfile import TemporaryDirectory
7
+ from uuid import uuid4
8
+
9
+ import pytest
10
+ import pytest_asyncio
11
+ from astropy.config import get_cache_dir_path
12
+ from astropy.utils.data import get_readable_fileobj
13
+ from confluent_kafka import Consumer, Producer
14
+ from confluent_kafka.aio import AIOConsumer, AIOProducer
15
+ from rich.status import Status
16
+
17
+ __all__ = (
18
+ "kafka_broker",
19
+ "KafkaBrokerContext",
20
+ )
21
+
22
+
23
+ SCALA_VERSION = "2.13"
24
+ KAFKA_VERSION = "4.1.1"
25
+
26
+
27
+ async def wait_port(port: int, timeout: float = 0.25):
28
+ """Wait until a connection is detected listening on the given port."""
29
+ while True:
30
+ try:
31
+ _, writer = await asyncio.open_connection("localhost", port)
32
+ except OSError:
33
+ await asyncio.sleep(timeout)
34
+ else:
35
+ writer.close()
36
+ await writer.wait_closed()
37
+ return
38
+
39
+
40
+ @pytest.fixture(scope="session")
41
+ def kafka_home() -> Path:
42
+ """Download and install Kafka into a cached directory.
43
+
44
+ Returns the path where Kafka is installed.
45
+ """
46
+ dirname = f"kafka_{SCALA_VERSION}-{KAFKA_VERSION}"
47
+ cache_path = get_cache_dir_path() / __package__
48
+ dest_path = cache_path / dirname
49
+ if not dest_path.exists():
50
+ dest_path.mkdir(parents=True, exist_ok=True)
51
+ with (
52
+ Status("Downloading Kafka"),
53
+ get_readable_fileobj(
54
+ f"https://dlcdn.apache.org/kafka/{KAFKA_VERSION}/{dirname}.tgz",
55
+ encoding="binary",
56
+ cache=True,
57
+ ) as download,
58
+ TarFile(fileobj=download) as tarfile,
59
+ TemporaryDirectory(dir=cache_path) as temp_dir,
60
+ ):
61
+ tarfile.extractall(temp_dir)
62
+ (Path(temp_dir) / dirname).rename(dest_path)
63
+ return dest_path
64
+
65
+
66
+ _doc = """{}
67
+
68
+ Parameters
69
+ ----------
70
+ config
71
+ Extra Kafka client configuration properties. See list in the
72
+ `librdkafka documentation <https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md>`_.
73
+ """
74
+
75
+
76
+ @dataclass
77
+ class KafkaBrokerContext:
78
+ """Information and convenience methods for a temporary Kafka cluster.
79
+
80
+ This object is returned by :func:`kafka_broker`.
81
+ """
82
+
83
+ bootstrap_server: str
84
+ """Kafka bootstrap server in the form :samp:`{host}:{port}`."""
85
+
86
+ def config(self, config: dict | None = None):
87
+ return {**(config or {}), "bootstrap.servers": self.bootstrap_server}
88
+
89
+ def producer(self, config: dict | None = None) -> Producer:
90
+ return Producer(self.config(config))
91
+
92
+ def consumer(self, config: dict | None = None) -> Consumer:
93
+ return Consumer(self.config(config))
94
+
95
+ def aio_producer(self, config: dict | None = None) -> AIOProducer:
96
+ return AIOProducer(self.config(config))
97
+
98
+ def aio_consumer(self, config: dict | None = None) -> AIOConsumer:
99
+ return AIOConsumer(self.config(config))
100
+
101
+ config.__doc__ = _doc.format("Get the configuration for a Kafka client.")
102
+ producer.__doc__ = _doc.format("Create a Kafka producer connected to the cluster.")
103
+ consumer.__doc__ = _doc.format("Create a Kafka consumer connected to the cluster.")
104
+ aio_producer.__doc__ = _doc.format(
105
+ "Create an asynchronous Kafka producer connected to the cluster."
106
+ )
107
+ aio_consumer.__doc__ = _doc.format(
108
+ "Create an asynchronous Kafka consumer connected to the cluster."
109
+ )
110
+
111
+
112
+ del _doc
113
+
114
+
115
+ @pytest_asyncio.fixture
116
+ async def kafka_broker(kafka_home, tmp_path, unused_tcp_port_factory):
117
+ """Pytest fixture to run a local, temporary Kafka broker.
118
+
119
+ Returns
120
+ -------
121
+ : KafkaBrokerContext
122
+ """
123
+ kafka_storage = kafka_home / "bin" / "kafka-storage.sh"
124
+ kafka_server_start = kafka_home / "bin" / "kafka-server-start.sh"
125
+ config_path = tmp_path / "server.properties"
126
+ data_path = tmp_path / "run"
127
+ data_path.mkdir()
128
+ log_path = tmp_path / "log"
129
+ log_path.mkdir()
130
+ env = {"LOG_DIR": str(log_path)}
131
+ plaintext_port = unused_tcp_port_factory()
132
+ controller_port = unused_tcp_port_factory()
133
+ config_path.write_text(
134
+ f"""
135
+ process.roles=broker,controller
136
+ node.id=1
137
+ controller.quorum.bootstrap.servers=127.0.0.1:{controller_port}
138
+ listeners=PLAINTEXT://127.0.0.1:{plaintext_port},CONTROLLER://127.0.0.1:{controller_port}
139
+ controller.listener.names=CONTROLLER
140
+ listener.security.protocol.map=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT
141
+ log.dirs={data_path}
142
+ num.recovery.threads.per.data.dir=1
143
+ offsets.topic.replication.factor=1
144
+ share.coordinator.state.topic.replication.factor=1
145
+ share.coordinator.state.topic.min.isr=1
146
+ transaction.state.log.replication.factor=1
147
+ transaction.state.log.min.isr=1
148
+ """
149
+ )
150
+ with Status("Starting Kafka broker"):
151
+ subprocess.run(
152
+ [
153
+ kafka_storage,
154
+ "format",
155
+ "--standalone",
156
+ "-t",
157
+ str(uuid4()),
158
+ "-c",
159
+ config_path,
160
+ ],
161
+ env=env,
162
+ check=True,
163
+ stdout=subprocess.DEVNULL,
164
+ )
165
+ process = await asyncio.create_subprocess_exec(
166
+ kafka_server_start,
167
+ config_path,
168
+ env=env,
169
+ stdin=None,
170
+ stdout=subprocess.DEVNULL,
171
+ stderr=None,
172
+ )
173
+ with Status(f"Waiting for connection on port {plaintext_port}"):
174
+ exited = asyncio.create_task(process.wait())
175
+ port = asyncio.create_task(wait_port(plaintext_port))
176
+ done, _ = await asyncio.wait(
177
+ (exited, port), return_when=asyncio.FIRST_COMPLETED
178
+ )
179
+ if exited in done:
180
+ port.cancel()
181
+ raise RuntimeError("Kafka broker terminated unexpectedly")
182
+ try:
183
+ yield KafkaBrokerContext(f"127.0.0.1:{plaintext_port}")
184
+ finally:
185
+ with Status("Stopping Kafka broker"):
186
+ process.terminate()
187
+ await exited