pytest-kafka-broker 0.4.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,71 +1,12 @@
1
- import asyncio
2
- import subprocess
3
- from collections.abc import AsyncGenerator
4
1
  from dataclasses import dataclass
5
- from errno import EADDRINUSE
6
- from pathlib import Path
7
- from socket import socket
8
- from tarfile import TarFile
9
- from tempfile import TemporaryDirectory
10
- from uuid import uuid4
11
-
12
- import pytest
13
- import pytest_asyncio
14
- from astropy.config import get_cache_dir_path # type: ignore[import-untyped]
15
- from astropy.utils.data import get_readable_fileobj # type: ignore[import-untyped]
2
+
16
3
  from confluent_kafka import Consumer, Producer
17
4
  from confluent_kafka.admin import AdminClient
18
5
  from confluent_kafka.aio import AIOConsumer, AIOProducer
19
- from rich.status import Status
20
-
21
- __all__ = (
22
- "kafka_broker",
23
- "KafkaBrokerContext",
24
- )
25
-
26
-
27
- SCALA_VERSION = "2.13"
28
- KAFKA_VERSION = "4.1.1"
29
-
30
6
 
31
- async def wait_port(port: int, timeout: float = 0.25) -> None:
32
- """Wait until a connection is detected listening on the given port."""
33
- while True:
34
- try:
35
- _, writer = await asyncio.open_connection("localhost", port)
36
- except OSError:
37
- await asyncio.sleep(timeout)
38
- else:
39
- writer.close()
40
- await writer.wait_closed()
41
- return
42
-
43
-
44
- @pytest.fixture(scope="session")
45
- def kafka_home() -> Path:
46
- """Download and install Kafka into a cached directory.
47
-
48
- Returns the path where Kafka is installed.
49
- """
50
- dirname = f"kafka_{SCALA_VERSION}-{KAFKA_VERSION}"
51
- cache_path = get_cache_dir_path() / __package__
52
- dest_path = cache_path / dirname
53
- if not dest_path.exists():
54
- dest_path.mkdir(parents=True, exist_ok=True)
55
- with (
56
- Status("Downloading Kafka"),
57
- get_readable_fileobj(
58
- f"https://dlcdn.apache.org/kafka/{KAFKA_VERSION}/{dirname}.tgz",
59
- encoding="binary",
60
- cache=True,
61
- ) as download,
62
- TarFile(fileobj=download) as tarfile,
63
- TemporaryDirectory(dir=cache_path) as temp_dir,
64
- ):
65
- tarfile.extractall(temp_dir)
66
- (Path(temp_dir) / dirname).rename(dest_path)
67
- return dest_path
7
+ from .version import __version__ # noqa: F401
68
8
 
9
+ __all__ = ("KafkaBrokerContext",)
69
10
 
70
11
  _doc = """{}
71
12
 
@@ -77,36 +18,6 @@ config
77
18
  """
78
19
 
79
20
 
80
- def _unused_tcp_port(default: int = 0) -> int:
81
- with socket() as sock:
82
- try:
83
- sock.bind(("127.0.0.1", default))
84
- except OSError as e:
85
- if e.errno != EADDRINUSE:
86
- raise
87
- sock.bind(("127.0.0.1", 0))
88
- _, port = sock.getsockname()
89
- return port
90
-
91
-
92
- @pytest.fixture
93
- def find_unused_tcp_port():
94
- """Unused TCP port factory.
95
-
96
- This is similar to `unused_tcp_port_factory` from pytest_asyncio, but it
97
- supports a default port argument, and is not session-scoped.
98
- """
99
- used = set()
100
-
101
- def factory(default: int = 0) -> int:
102
- while (port := _unused_tcp_port(default)) in used:
103
- pass
104
- used.add(port)
105
- return port
106
-
107
- return factory
108
-
109
-
110
21
  @dataclass
111
22
  class KafkaBrokerContext:
112
23
  """Information and convenience methods for a temporary Kafka cluster.
@@ -148,78 +59,3 @@ class KafkaBrokerContext:
148
59
 
149
60
 
150
61
  del _doc
151
-
152
-
153
- @pytest_asyncio.fixture
154
- async def kafka_broker(
155
- kafka_home, tmp_path, find_unused_tcp_port
156
- ) -> AsyncGenerator[KafkaBrokerContext]:
157
- """Pytest fixture to run a local, temporary Kafka broker."""
158
- kafka_storage = kafka_home / "bin" / "kafka-storage.sh"
159
- kafka_server_start = kafka_home / "bin" / "kafka-server-start.sh"
160
- config_path = tmp_path / "server.properties"
161
- data_path = tmp_path / "run"
162
- data_path.mkdir()
163
- log_path = tmp_path / "log"
164
- log_path.mkdir()
165
- env = {"LOG_DIR": str(log_path)}
166
- plaintext_port = find_unused_tcp_port(9092)
167
- controller_port = find_unused_tcp_port(9093)
168
- config_path.write_text(
169
- f"""
170
- process.roles=broker,controller
171
- node.id=1
172
- controller.quorum.bootstrap.servers=127.0.0.1:{controller_port}
173
- listeners=PLAINTEXT://127.0.0.1:{plaintext_port},CONTROLLER://127.0.0.1:{controller_port}
174
- controller.listener.names=CONTROLLER
175
- listener.security.protocol.map=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT
176
- log.dirs={data_path}
177
- num.recovery.threads.per.data.dir=1
178
- offsets.topic.replication.factor=1
179
- share.coordinator.state.topic.replication.factor=1
180
- share.coordinator.state.topic.min.isr=1
181
- transaction.state.log.replication.factor=1
182
- transaction.state.log.min.isr=1
183
- """
184
- )
185
- with Status("Starting Kafka broker"):
186
- subprocess.run(
187
- [
188
- kafka_storage,
189
- "format",
190
- "--standalone",
191
- "-t",
192
- str(uuid4()),
193
- "-c",
194
- config_path,
195
- ],
196
- env=env,
197
- check=True,
198
- stdout=subprocess.DEVNULL,
199
- )
200
- process = await asyncio.create_subprocess_exec(
201
- kafka_server_start,
202
- config_path,
203
- env=env,
204
- stdin=None,
205
- stdout=subprocess.DEVNULL,
206
- stderr=None,
207
- )
208
- with Status(f"Waiting for connection on port {plaintext_port}"):
209
- exited = asyncio.create_task(process.wait())
210
- port = asyncio.create_task(wait_port(plaintext_port))
211
- done, _ = await asyncio.wait(
212
- (exited, port), return_when=asyncio.FIRST_COMPLETED
213
- )
214
- if exited in done:
215
- port.cancel()
216
- raise RuntimeError("Kafka broker terminated unexpectedly")
217
- try:
218
- yield KafkaBrokerContext(f"127.0.0.1:{plaintext_port}")
219
- finally:
220
- with Status("Stopping Kafka broker"):
221
- try:
222
- process.terminate()
223
- except ProcessLookupError:
224
- pass # Process has already terminated
225
- await exited
@@ -0,0 +1,186 @@
1
+ import asyncio
2
+ import subprocess
3
+ from collections.abc import AsyncGenerator
4
+ from errno import EADDRINUSE
5
+ from pathlib import Path
6
+ from socket import socket
7
+ from tarfile import TarFile
8
+ from tempfile import TemporaryDirectory
9
+ from uuid import uuid4
10
+
11
+ import pytest
12
+ import pytest_asyncio
13
+ from astropy.config import get_cache_dir_path # type: ignore[import-untyped]
14
+ from astropy.utils.data import get_readable_fileobj # type: ignore[import-untyped]
15
+ from rich.status import Status
16
+
17
+ from . import KafkaBrokerContext
18
+
19
+ SCALA_VERSION = "2.13"
20
+ KAFKA_VERSION = "4.1.1"
21
+
22
+
23
+ async def wait_port(port: int, timeout: float = 0.25) -> None:
24
+ """Wait until a connection is detected listening on the given port."""
25
+ while True:
26
+ try:
27
+ _, writer = await asyncio.open_connection("localhost", port)
28
+ except OSError:
29
+ await asyncio.sleep(timeout)
30
+ else:
31
+ writer.close()
32
+ await writer.wait_closed()
33
+ return
34
+
35
+
36
+ @pytest.fixture(scope="session")
37
+ def kafka_home() -> Path:
38
+ """Download and install Kafka into a cached directory.
39
+
40
+ Returns the path where Kafka is installed.
41
+ """
42
+ dirname = f"kafka_{SCALA_VERSION}-{KAFKA_VERSION}"
43
+ cache_path = get_cache_dir_path() / __package__
44
+ dest_path = cache_path / dirname
45
+ if not dest_path.exists():
46
+ dest_path.mkdir(parents=True, exist_ok=True)
47
+ with (
48
+ Status("Downloading Kafka"),
49
+ get_readable_fileobj(
50
+ f"https://dlcdn.apache.org/kafka/{KAFKA_VERSION}/{dirname}.tgz",
51
+ encoding="binary",
52
+ cache=True,
53
+ ) as download,
54
+ TarFile(fileobj=download) as tarfile,
55
+ TemporaryDirectory(dir=cache_path) as temp_dir,
56
+ ):
57
+ tarfile.extractall(temp_dir)
58
+ (Path(temp_dir) / dirname).rename(dest_path)
59
+ return dest_path
60
+
61
+
62
+ _doc = """{}
63
+
64
+ Parameters
65
+ ----------
66
+ config
67
+ Extra Kafka client configuration properties. See list in the
68
+ `librdkafka documentation <https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md>`_.
69
+ """
70
+
71
+
72
+ def _unused_tcp_port(default: int = 0) -> int:
73
+ with socket() as sock:
74
+ try:
75
+ sock.bind(("127.0.0.1", default))
76
+ except OSError as e:
77
+ if e.errno != EADDRINUSE:
78
+ raise
79
+ sock.bind(("127.0.0.1", 0))
80
+ _, port = sock.getsockname()
81
+ return port
82
+
83
+
84
+ @pytest.fixture
85
+ def find_unused_tcp_port():
86
+ """Unused TCP port factory.
87
+
88
+ This is similar to `unused_tcp_port_factory` from pytest_asyncio, but it
89
+ supports a default port argument, and is not session-scoped.
90
+ """
91
+ used = set()
92
+
93
+ def factory(default: int = 0) -> int:
94
+ while (port := _unused_tcp_port(default)) in used:
95
+ pass
96
+ used.add(port)
97
+ return port
98
+
99
+ return factory
100
+
101
+
102
+ @pytest_asyncio.fixture
103
+ async def kafka_broker(
104
+ kafka_home, tmp_path, find_unused_tcp_port, pytestconfig
105
+ ) -> AsyncGenerator[KafkaBrokerContext]:
106
+ """Pytest fixture to run a local, temporary Kafka broker."""
107
+ kafka_storage = kafka_home / "bin" / "kafka-storage.sh"
108
+ kafka_server_start = kafka_home / "bin" / "kafka-server-start.sh"
109
+ config_path = tmp_path / "server.properties"
110
+ data_path = tmp_path / "run"
111
+ data_path.mkdir()
112
+ log_path = tmp_path / "log"
113
+ log_path.mkdir()
114
+ env = {"LOG_DIR": str(log_path)}
115
+ plaintext_port = find_unused_tcp_port(9092)
116
+ controller_port = find_unused_tcp_port(9093)
117
+ extra_config = "\n".join(pytestconfig.getini("kafka_broker_extra_config"))
118
+ config_path.write_text(
119
+ f"""
120
+ process.roles=broker,controller
121
+ node.id=1
122
+ controller.quorum.bootstrap.servers=127.0.0.1:{controller_port}
123
+ listeners=PLAINTEXT://127.0.0.1:{plaintext_port},CONTROLLER://127.0.0.1:{controller_port}
124
+ controller.listener.names=CONTROLLER
125
+ listener.security.protocol.map=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT
126
+ log.dirs={data_path}
127
+ num.recovery.threads.per.data.dir=1
128
+ offsets.topic.replication.factor=1
129
+ share.coordinator.state.topic.replication.factor=1
130
+ share.coordinator.state.topic.min.isr=1
131
+ transaction.state.log.replication.factor=1
132
+ transaction.state.log.min.isr=1
133
+ {extra_config}
134
+ """
135
+ )
136
+ with Status("Starting Kafka broker"):
137
+ subprocess.run(
138
+ [
139
+ kafka_storage,
140
+ "format",
141
+ "--standalone",
142
+ "-t",
143
+ str(uuid4()),
144
+ "-c",
145
+ config_path,
146
+ ],
147
+ env=env,
148
+ check=True,
149
+ stdout=subprocess.DEVNULL,
150
+ )
151
+ process = await asyncio.create_subprocess_exec(
152
+ kafka_server_start,
153
+ config_path,
154
+ env=env,
155
+ stdin=None,
156
+ stdout=subprocess.DEVNULL,
157
+ stderr=None,
158
+ )
159
+ with Status(f"Waiting for connection on port {plaintext_port}"):
160
+ exited = asyncio.create_task(process.wait())
161
+ port = asyncio.create_task(wait_port(plaintext_port))
162
+ done, _ = await asyncio.wait(
163
+ (exited, port), return_when=asyncio.FIRST_COMPLETED
164
+ )
165
+ if exited in done:
166
+ port.cancel()
167
+ raise RuntimeError("Kafka broker terminated unexpectedly")
168
+ try:
169
+ bootstrap_server = f"127.0.0.1:{plaintext_port}"
170
+ print(f"Kafka broker running at {bootstrap_server}")
171
+ yield KafkaBrokerContext(bootstrap_server)
172
+ finally:
173
+ with Status("Stopping Kafka broker"):
174
+ try:
175
+ process.terminate()
176
+ except ProcessLookupError:
177
+ pass # Process has already terminated
178
+ await exited
179
+
180
+
181
+ def pytest_addoption(parser: pytest.Parser):
182
+ parser.addini(
183
+ "kafka_broker_extra_config",
184
+ type="linelist",
185
+ help="Extra broker configuration settings. See https://kafka.apache.org/41/configuration/broker-configs/",
186
+ )
@@ -0,0 +1,34 @@
1
+ # file generated by setuptools-scm
2
+ # don't change, don't track in version control
3
+
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
12
+
13
+ TYPE_CHECKING = False
14
+ if TYPE_CHECKING:
15
+ from typing import Tuple
16
+ from typing import Union
17
+
18
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
20
+ else:
21
+ VERSION_TUPLE = object
22
+ COMMIT_ID = object
23
+
24
+ version: str
25
+ __version__: str
26
+ __version_tuple__: VERSION_TUPLE
27
+ version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
30
+
31
+ __version__ = version = '0.6.0'
32
+ __version_tuple__ = version_tuple = (0, 6, 0)
33
+
34
+ __commit_id__ = commit_id = 'g025e5871a'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytest-kafka-broker
3
- Version: 0.4.0
3
+ Version: 0.6.0
4
4
  Summary: Pytest plugin to run a single-broker Kafka cluster
5
5
  Author-email: Leo Singer <leo.singer@ligo.org>
6
6
  License-Expression: Apache-2.0
@@ -0,0 +1,9 @@
1
+ pytest_kafka_broker/__init__.py,sha256=tajXiFFA3UDUWhQ-Og7tSJ2LdtY7Uyn7hA5JMyEXH5c,2063
2
+ pytest_kafka_broker/plugin.py,sha256=xSxN94AEwqhDgT9p19MmBYtkpVwWf88sUU7lpQcw0vo,5993
3
+ pytest_kafka_broker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ pytest_kafka_broker/version.py,sha256=M3F2qtkZWSFVyzXk0yRXuDuEZlCOpZUCp10aRBxLSmA,712
5
+ pytest_kafka_broker-0.6.0.dist-info/METADATA,sha256=QKfDpsAZxchzP_7Lxh_0UsQ3RtUCB6JBk01XlPTeqb8,683
6
+ pytest_kafka_broker-0.6.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
7
+ pytest_kafka_broker-0.6.0.dist-info/entry_points.txt,sha256=HBjuONsJJfvMwZGUs4bWFTWithmb_tOk0L5Nn5Fvduk,53
8
+ pytest_kafka_broker-0.6.0.dist-info/top_level.txt,sha256=nTrYx9xVeK5hsqbhBRL2bgBV_ea-J66_f4Dk8eD-Ci0,20
9
+ pytest_kafka_broker-0.6.0.dist-info/RECORD,,
@@ -0,0 +1,2 @@
1
+ [pytest11]
2
+ kafka_broker = pytest_kafka_broker.plugin
@@ -1,7 +0,0 @@
1
- pytest_kafka_broker/__init__.py,sha256=c3QqN1zeed-X-IoL1aAKqY6h10GfGpkuo1-WJEBKuVs,7310
2
- pytest_kafka_broker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- pytest_kafka_broker-0.4.0.dist-info/METADATA,sha256=kSMiKX3AjTfzvBbOPUaOxyK36IA-SM0MWRuaHE-18SA,683
4
- pytest_kafka_broker-0.4.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
5
- pytest_kafka_broker-0.4.0.dist-info/entry_points.txt,sha256=mjFsRbu6FOiZkUOlSaXpo4QdLHwniNB-p_NEIgRbDnw,46
6
- pytest_kafka_broker-0.4.0.dist-info/top_level.txt,sha256=nTrYx9xVeK5hsqbhBRL2bgBV_ea-J66_f4Dk8eD-Ci0,20
7
- pytest_kafka_broker-0.4.0.dist-info/RECORD,,
@@ -1,2 +0,0 @@
1
- [pytest11]
2
- kafka_broker = pytest_kafka_broker