pytest-kafka-broker 0.5.0__tar.gz → 0.6.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/.gitignore +1 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/.pre-commit-config.yaml +1 -7
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/PKG-INFO +1 -1
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/pyproject.toml +7 -1
- pytest_kafka_broker-0.6.0/src/pytest_kafka_broker/__init__.py +61 -0
- pytest_kafka_broker-0.5.0/src/pytest_kafka_broker/__init__.py → pytest_kafka_broker-0.6.0/src/pytest_kafka_broker/plugin.py +12 -53
- pytest_kafka_broker-0.6.0/src/pytest_kafka_broker/version.py +34 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/src/pytest_kafka_broker.egg-info/PKG-INFO +1 -1
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/src/pytest_kafka_broker.egg-info/SOURCES.txt +3 -0
- pytest_kafka_broker-0.6.0/src/pytest_kafka_broker.egg-info/entry_points.txt +2 -0
- pytest_kafka_broker-0.6.0/tests/test_config.py +11 -0
- pytest_kafka_broker-0.5.0/src/pytest_kafka_broker.egg-info/entry_points.txt +0 -2
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/.gitlab-ci.yml +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/.readthedocs.yml +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/README.md +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/docs/Makefile +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/docs/conf.py +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/docs/index.rst +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/docs/make.bat +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/setup.cfg +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/src/pytest_kafka_broker/py.typed +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/src/pytest_kafka_broker.egg-info/dependency_links.txt +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/src/pytest_kafka_broker.egg-info/requires.txt +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/src/pytest_kafka_broker.egg-info/top_level.txt +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/tests/__init__.py +0 -0
- {pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/tests/test_kafka.py +0 -0
|
@@ -35,14 +35,8 @@ repos:
|
|
|
35
35
|
- tomli
|
|
36
36
|
|
|
37
37
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
|
38
|
-
rev: v0.
|
|
38
|
+
rev: v0.15.0
|
|
39
39
|
hooks:
|
|
40
40
|
- id: ruff-check
|
|
41
41
|
args: ["--extend-select", "I", "--fix"]
|
|
42
42
|
- id: ruff-format
|
|
43
|
-
|
|
44
|
-
- repo: https://github.com/tofuutils/pre-commit-opentofu
|
|
45
|
-
rev: v2.2.2
|
|
46
|
-
hooks:
|
|
47
|
-
- id: tofu_fmt
|
|
48
|
-
- id: tofu_validate
|
|
@@ -24,7 +24,7 @@ classifiers = [
|
|
|
24
24
|
]
|
|
25
25
|
|
|
26
26
|
[project.entry-points.pytest11]
|
|
27
|
-
kafka_broker = "pytest_kafka_broker"
|
|
27
|
+
kafka_broker = "pytest_kafka_broker.plugin"
|
|
28
28
|
|
|
29
29
|
[project.optional-dependencies]
|
|
30
30
|
docs = [
|
|
@@ -50,3 +50,9 @@ exclude = [
|
|
|
50
50
|
exclude_gitignore = true
|
|
51
51
|
|
|
52
52
|
[tool.setuptools_scm]
|
|
53
|
+
version_file = "src/pytest_kafka_broker/version.py"
|
|
54
|
+
|
|
55
|
+
[tool.pytest]
|
|
56
|
+
kafka_broker_extra_config = [
|
|
57
|
+
"message.max.bytes = 123456",
|
|
58
|
+
]
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from confluent_kafka import Consumer, Producer
|
|
4
|
+
from confluent_kafka.admin import AdminClient
|
|
5
|
+
from confluent_kafka.aio import AIOConsumer, AIOProducer
|
|
6
|
+
|
|
7
|
+
from .version import __version__ # noqa: F401
|
|
8
|
+
|
|
9
|
+
__all__ = ("KafkaBrokerContext",)
|
|
10
|
+
|
|
11
|
+
_doc = """{}
|
|
12
|
+
|
|
13
|
+
Parameters
|
|
14
|
+
----------
|
|
15
|
+
config
|
|
16
|
+
Extra Kafka client configuration properties. See list in the
|
|
17
|
+
`librdkafka documentation <https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md>`_.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class KafkaBrokerContext:
|
|
23
|
+
"""Information and convenience methods for a temporary Kafka cluster.
|
|
24
|
+
|
|
25
|
+
This object is returned by :func:`kafka_broker`.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
bootstrap_server: str
|
|
29
|
+
"""Kafka bootstrap server in the form :samp:`{host}:{port}`."""
|
|
30
|
+
|
|
31
|
+
def config(self, config: dict | None = None) -> dict:
|
|
32
|
+
return {**(config or {}), "bootstrap.servers": self.bootstrap_server}
|
|
33
|
+
|
|
34
|
+
def admin(self, config: dict | None = None) -> AdminClient:
|
|
35
|
+
return AdminClient(self.config(config))
|
|
36
|
+
|
|
37
|
+
def producer(self, config: dict | None = None) -> Producer:
|
|
38
|
+
return Producer(self.config(config))
|
|
39
|
+
|
|
40
|
+
def consumer(self, config: dict | None = None) -> Consumer:
|
|
41
|
+
return Consumer(self.config(config))
|
|
42
|
+
|
|
43
|
+
def aio_producer(self, config: dict | None = None) -> AIOProducer:
|
|
44
|
+
return AIOProducer(self.config(config))
|
|
45
|
+
|
|
46
|
+
def aio_consumer(self, config: dict | None = None) -> AIOConsumer:
|
|
47
|
+
return AIOConsumer(self.config(config))
|
|
48
|
+
|
|
49
|
+
config.__doc__ = _doc.format("Get the configuration for a Kafka client.")
|
|
50
|
+
admin.__doc__ = _doc.format("Create a Kafka admin client connected to the cluster.")
|
|
51
|
+
producer.__doc__ = _doc.format("Create a Kafka producer connected to the cluster.")
|
|
52
|
+
consumer.__doc__ = _doc.format("Create a Kafka consumer connected to the cluster.")
|
|
53
|
+
aio_producer.__doc__ = _doc.format(
|
|
54
|
+
"Create an asynchronous Kafka producer connected to the cluster."
|
|
55
|
+
)
|
|
56
|
+
aio_consumer.__doc__ = _doc.format(
|
|
57
|
+
"Create an asynchronous Kafka consumer connected to the cluster."
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
del _doc
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import subprocess
|
|
3
3
|
from collections.abc import AsyncGenerator
|
|
4
|
-
from dataclasses import dataclass
|
|
5
4
|
from errno import EADDRINUSE
|
|
6
5
|
from pathlib import Path
|
|
7
6
|
from socket import socket
|
|
@@ -13,16 +12,9 @@ import pytest
|
|
|
13
12
|
import pytest_asyncio
|
|
14
13
|
from astropy.config import get_cache_dir_path # type: ignore[import-untyped]
|
|
15
14
|
from astropy.utils.data import get_readable_fileobj # type: ignore[import-untyped]
|
|
16
|
-
from confluent_kafka import Consumer, Producer
|
|
17
|
-
from confluent_kafka.admin import AdminClient
|
|
18
|
-
from confluent_kafka.aio import AIOConsumer, AIOProducer
|
|
19
15
|
from rich.status import Status
|
|
20
16
|
|
|
21
|
-
|
|
22
|
-
"kafka_broker",
|
|
23
|
-
"KafkaBrokerContext",
|
|
24
|
-
)
|
|
25
|
-
|
|
17
|
+
from . import KafkaBrokerContext
|
|
26
18
|
|
|
27
19
|
SCALA_VERSION = "2.13"
|
|
28
20
|
KAFKA_VERSION = "4.1.1"
|
|
@@ -107,52 +99,9 @@ def find_unused_tcp_port():
|
|
|
107
99
|
return factory
|
|
108
100
|
|
|
109
101
|
|
|
110
|
-
@dataclass
|
|
111
|
-
class KafkaBrokerContext:
|
|
112
|
-
"""Information and convenience methods for a temporary Kafka cluster.
|
|
113
|
-
|
|
114
|
-
This object is returned by :func:`kafka_broker`.
|
|
115
|
-
"""
|
|
116
|
-
|
|
117
|
-
bootstrap_server: str
|
|
118
|
-
"""Kafka bootstrap server in the form :samp:`{host}:{port}`."""
|
|
119
|
-
|
|
120
|
-
def config(self, config: dict | None = None) -> dict:
|
|
121
|
-
return {**(config or {}), "bootstrap.servers": self.bootstrap_server}
|
|
122
|
-
|
|
123
|
-
def admin(self, config: dict | None = None) -> AdminClient:
|
|
124
|
-
return AdminClient(self.config(config))
|
|
125
|
-
|
|
126
|
-
def producer(self, config: dict | None = None) -> Producer:
|
|
127
|
-
return Producer(self.config(config))
|
|
128
|
-
|
|
129
|
-
def consumer(self, config: dict | None = None) -> Consumer:
|
|
130
|
-
return Consumer(self.config(config))
|
|
131
|
-
|
|
132
|
-
def aio_producer(self, config: dict | None = None) -> AIOProducer:
|
|
133
|
-
return AIOProducer(self.config(config))
|
|
134
|
-
|
|
135
|
-
def aio_consumer(self, config: dict | None = None) -> AIOConsumer:
|
|
136
|
-
return AIOConsumer(self.config(config))
|
|
137
|
-
|
|
138
|
-
config.__doc__ = _doc.format("Get the configuration for a Kafka client.")
|
|
139
|
-
admin.__doc__ = _doc.format("Create a Kafka admin client connected to the cluster.")
|
|
140
|
-
producer.__doc__ = _doc.format("Create a Kafka producer connected to the cluster.")
|
|
141
|
-
consumer.__doc__ = _doc.format("Create a Kafka consumer connected to the cluster.")
|
|
142
|
-
aio_producer.__doc__ = _doc.format(
|
|
143
|
-
"Create an asynchronous Kafka producer connected to the cluster."
|
|
144
|
-
)
|
|
145
|
-
aio_consumer.__doc__ = _doc.format(
|
|
146
|
-
"Create an asynchronous Kafka consumer connected to the cluster."
|
|
147
|
-
)
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
del _doc
|
|
151
|
-
|
|
152
|
-
|
|
153
102
|
@pytest_asyncio.fixture
|
|
154
103
|
async def kafka_broker(
|
|
155
|
-
kafka_home, tmp_path, find_unused_tcp_port
|
|
104
|
+
kafka_home, tmp_path, find_unused_tcp_port, pytestconfig
|
|
156
105
|
) -> AsyncGenerator[KafkaBrokerContext]:
|
|
157
106
|
"""Pytest fixture to run a local, temporary Kafka broker."""
|
|
158
107
|
kafka_storage = kafka_home / "bin" / "kafka-storage.sh"
|
|
@@ -165,6 +114,7 @@ async def kafka_broker(
|
|
|
165
114
|
env = {"LOG_DIR": str(log_path)}
|
|
166
115
|
plaintext_port = find_unused_tcp_port(9092)
|
|
167
116
|
controller_port = find_unused_tcp_port(9093)
|
|
117
|
+
extra_config = "\n".join(pytestconfig.getini("kafka_broker_extra_config"))
|
|
168
118
|
config_path.write_text(
|
|
169
119
|
f"""
|
|
170
120
|
process.roles=broker,controller
|
|
@@ -180,6 +130,7 @@ async def kafka_broker(
|
|
|
180
130
|
share.coordinator.state.topic.min.isr=1
|
|
181
131
|
transaction.state.log.replication.factor=1
|
|
182
132
|
transaction.state.log.min.isr=1
|
|
133
|
+
{extra_config}
|
|
183
134
|
"""
|
|
184
135
|
)
|
|
185
136
|
with Status("Starting Kafka broker"):
|
|
@@ -225,3 +176,11 @@ async def kafka_broker(
|
|
|
225
176
|
except ProcessLookupError:
|
|
226
177
|
pass # Process has already terminated
|
|
227
178
|
await exited
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def pytest_addoption(parser: pytest.Parser):
|
|
182
|
+
parser.addini(
|
|
183
|
+
"kafka_broker_extra_config",
|
|
184
|
+
type="linelist",
|
|
185
|
+
help="Extra broker configuration settings. See https://kafka.apache.org/41/configuration/broker-configs/",
|
|
186
|
+
)
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# file generated by setuptools-scm
|
|
2
|
+
# don't change, don't track in version control
|
|
3
|
+
|
|
4
|
+
__all__ = [
|
|
5
|
+
"__version__",
|
|
6
|
+
"__version_tuple__",
|
|
7
|
+
"version",
|
|
8
|
+
"version_tuple",
|
|
9
|
+
"__commit_id__",
|
|
10
|
+
"commit_id",
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
TYPE_CHECKING = False
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from typing import Tuple
|
|
16
|
+
from typing import Union
|
|
17
|
+
|
|
18
|
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
|
19
|
+
COMMIT_ID = Union[str, None]
|
|
20
|
+
else:
|
|
21
|
+
VERSION_TUPLE = object
|
|
22
|
+
COMMIT_ID = object
|
|
23
|
+
|
|
24
|
+
version: str
|
|
25
|
+
__version__: str
|
|
26
|
+
__version_tuple__: VERSION_TUPLE
|
|
27
|
+
version_tuple: VERSION_TUPLE
|
|
28
|
+
commit_id: COMMIT_ID
|
|
29
|
+
__commit_id__: COMMIT_ID
|
|
30
|
+
|
|
31
|
+
__version__ = version = '0.6.0'
|
|
32
|
+
__version_tuple__ = version_tuple = (0, 6, 0)
|
|
33
|
+
|
|
34
|
+
__commit_id__ = commit_id = 'g025e5871a'
|
{pytest_kafka_broker-0.5.0 → pytest_kafka_broker-0.6.0}/src/pytest_kafka_broker.egg-info/SOURCES.txt
RENAMED
|
@@ -9,7 +9,9 @@ docs/conf.py
|
|
|
9
9
|
docs/index.rst
|
|
10
10
|
docs/make.bat
|
|
11
11
|
src/pytest_kafka_broker/__init__.py
|
|
12
|
+
src/pytest_kafka_broker/plugin.py
|
|
12
13
|
src/pytest_kafka_broker/py.typed
|
|
14
|
+
src/pytest_kafka_broker/version.py
|
|
13
15
|
src/pytest_kafka_broker.egg-info/PKG-INFO
|
|
14
16
|
src/pytest_kafka_broker.egg-info/SOURCES.txt
|
|
15
17
|
src/pytest_kafka_broker.egg-info/dependency_links.txt
|
|
@@ -17,4 +19,5 @@ src/pytest_kafka_broker.egg-info/entry_points.txt
|
|
|
17
19
|
src/pytest_kafka_broker.egg-info/requires.txt
|
|
18
20
|
src/pytest_kafka_broker.egg-info/top_level.txt
|
|
19
21
|
tests/__init__.py
|
|
22
|
+
tests/test_config.py
|
|
20
23
|
tests/test_kafka.py
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from confluent_kafka.admin import ConfigResource, ResourceType
|
|
2
|
+
|
|
3
|
+
from pytest_kafka_broker import KafkaBrokerContext
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def test_kafka_broker_extra_config(kafka_broker: KafkaBrokerContext):
|
|
7
|
+
with kafka_broker.admin() as admin:
|
|
8
|
+
(future,) = admin.describe_configs(
|
|
9
|
+
[ConfigResource(ResourceType.BROKER, name="1")]
|
|
10
|
+
).values()
|
|
11
|
+
assert future.result()["message.max.bytes"].value == "123456"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|