pytest-kafka-broker 0.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,5 @@
1
+ __pycache__
2
+ *.egg-info
3
+ dist
4
+ _build
5
+ api
@@ -0,0 +1,14 @@
1
+ include:
2
+ - component: git.ligo.org/computing/gitlab/components/python/all@2.3.2
3
+ inputs:
4
+ code_quality_analyzer: ruff
5
+ python_versions:
6
+ - "3.11"
7
+ - "3.12"
8
+ - "3.13"
9
+ - "3.14"
10
+
11
+ python_test:
12
+ before_script:
13
+ - apt update
14
+ - apt install -y --no-install-recommends default-jdk-headless
@@ -0,0 +1,48 @@
1
+ repos:
2
+ - repo: https://github.com/pre-commit/pre-commit-hooks
3
+ rev: v6.0.0
4
+ hooks:
5
+ - id: check-added-large-files
6
+ # Prevent giant files from being committed.
7
+ - id: check-case-conflict
8
+ # Check for files with names that would conflict on a case-insensitive
9
+ # filesystem like MacOS HFS+ or Windows FAT.
10
+ - id: check-json
11
+ # Attempts to load all json files to verify syntax.
12
+ - id: check-merge-conflict
13
+ # Check for files that contain merge conflict strings.
14
+ - id: check-symlinks
15
+ # Checks for symlinks which do not point to anything.
16
+ - id: check-toml
17
+ # Attempts to load all TOML files to verify syntax.
18
+ - id: check-xml
19
+ # Attempts to load all xml files to verify syntax.
20
+ - id: check-yaml
21
+ # Attempts to load all yaml files to verify syntax.
22
+ - id: detect-private-key
23
+ # Checks for the existence of private keys.
24
+ - id: end-of-file-fixer
25
+ # Makes sure files end in a newline and only a newline.
26
+ - id: trailing-whitespace
27
+ # Trims trailing whitespace.
28
+
29
+ - repo: https://github.com/codespell-project/codespell
30
+ rev: v2.4.1
31
+ hooks:
32
+ - id: codespell
33
+ args: ["--write-changes"]
34
+ additional_dependencies:
35
+ - tomli
36
+
37
+ - repo: https://github.com/astral-sh/ruff-pre-commit
38
+ rev: v0.14.14
39
+ hooks:
40
+ - id: ruff-check
41
+ args: ["--extend-select", "I", "--fix"]
42
+ - id: ruff-format
43
+
44
+ - repo: https://github.com/tofuutils/pre-commit-opentofu
45
+ rev: v2.2.2
46
+ hooks:
47
+ - id: tofu_fmt
48
+ - id: tofu_validate
@@ -0,0 +1,18 @@
1
+ version: 2
2
+
3
+ build:
4
+ apt_packages:
5
+ - graphviz
6
+ os: ubuntu-24.04
7
+ tools:
8
+ python: '3.11'
9
+
10
+ python:
11
+ install:
12
+ - method: pip
13
+ path: .
14
+ extra_requirements:
15
+ - docs
16
+
17
+ sphinx:
18
+ configuration: docs/conf.py
@@ -0,0 +1,16 @@
1
+ Metadata-Version: 2.4
2
+ Name: pytest-kafka-broker
3
+ Version: 0.0.0
4
+ Summary: Pytest plugin to run a single-broker Kafka cluster
5
+ Author-email: Leo Singer <leo.singer@ligo.org>
6
+ License-Expression: Apache-2.0
7
+ Classifier: Framework :: Pytest
8
+ Classifier: Topic :: System :: Networking
9
+ Requires-Python: >=3.11
10
+ Requires-Dist: astropy
11
+ Requires-Dist: confluent-kafka
12
+ Requires-Dist: rich
13
+ Requires-Dist: pytest-asyncio
14
+ Provides-Extra: docs
15
+ Requires-Dist: sphinx-astropy[confv2]; extra == "docs"
16
+ Requires-Dist: sphinx-automodapi>=0.20.0; extra == "docs"
@@ -0,0 +1,3 @@
1
+ # pytest-kafka-broker
2
+
3
+ A Pytest plugin to run a single-broker Kafka cluster.
@@ -0,0 +1,20 @@
1
+ # Minimal makefile for Sphinx documentation
2
+ #
3
+
4
+ # You can set these variables from the command line, and also
5
+ # from the environment for the first two.
6
+ SPHINXOPTS ?=
7
+ SPHINXBUILD ?= sphinx-build
8
+ SOURCEDIR = .
9
+ BUILDDIR = _build
10
+
11
+ # Put it first so that "make" without argument is like "make help".
12
+ help:
13
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14
+
15
+ .PHONY: help Makefile
16
+
17
+ # Catch-all target: route all unknown targets to Sphinx using the new
18
+ # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19
+ %: Makefile
20
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
@@ -0,0 +1,210 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Licensed under a 3-clause BSD style license - see LICENSE.rst
3
+ #
4
+ # Astropy documentation build configuration file.
5
+ #
6
+ # This file is execfile()d with the current directory set to its containing dir.
7
+ #
8
+ # Note that not all possible configuration values are present in this file.
9
+ #
10
+ # All configuration values have a default. Some values are defined in
11
+ # the global Astropy configuration which is loaded here before anything else.
12
+ # See astropy.sphinx.conf for which values are set there.
13
+
14
+ # If extensions (or modules to document with autodoc) are in another directory,
15
+ # add these directories to sys.path here. If the directory is relative to the
16
+ # documentation root, use os.path.abspath to make it absolute, like shown here.
17
+ # sys.path.insert(0, os.path.abspath('..'))
18
+ # IMPORTANT: the above commented section was generated by sphinx-quickstart, but
19
+ # is *NOT* appropriate for astropy or Astropy affiliated packages. It is left
20
+ # commented out with this explanation to make it clear why this should not be
21
+ # done. If the sys.path entry above is added, when the astropy.sphinx.conf
22
+ # import occurs, it will import the *source* version of astropy instead of the
23
+ # version installed (if invoked as "make html" or directly with sphinx), or the
24
+ # version in the build directory (if "python setup.py build_sphinx" is used).
25
+ # Thus, any C-extensions that are needed to build the documentation will *not*
26
+ # be accessible, and the documentation will not build correctly.
27
+
28
+ import datetime
29
+ import os
30
+ import sys
31
+ import tomllib
32
+
33
+ sys.path.insert(0, os.path.realpath(".."))
34
+
35
+ try:
36
+ from sphinx_astropy.conf.v2 import * # noqa
37
+ except ImportError:
38
+ print(
39
+ "ERROR: the documentation requires the sphinx-astropy package to be installed"
40
+ )
41
+ sys.exit(1)
42
+
43
+ # Get configuration information from pyproject.toml
44
+
45
+ with open(os.path.join(os.path.dirname(__file__), "..", "pyproject.toml"), "rb") as f:
46
+ project_metadata = tomllib.load(f)["project"]
47
+
48
+ # -- General configuration ----------------------------------------------------
49
+
50
+ # By default, highlight as Python 3.
51
+ highlight_language = "python3"
52
+
53
+ # If your documentation needs a minimal Sphinx version, state it here.
54
+ # needs_sphinx = '1.2'
55
+
56
+ # To perform a Sphinx version check that needs to be more specific than
57
+ # major.minor, call `check_sphinx_version("X.Y.Z")` here.
58
+ # check_sphinx_version("1.2.1")
59
+
60
+ # List of patterns, relative to source directory, that match files and
61
+ # directories to ignore when looking for source files.
62
+ exclude_patterns.append("_templates") # noqa: F405
63
+
64
+ # This is added to the end of RST files - a good place to put substitutions to
65
+ # be used globally.
66
+ # rst_epilog += r"""
67
+ # """ # noqa: F405
68
+
69
+ # -- Project information ------------------------------------------------------
70
+
71
+ # This does not *have* to match the package name, but typically does
72
+ project = project_metadata["name"]
73
+ author = project_metadata["authors"][0]["name"]
74
+ copyright = "{0}, {1}".format(datetime.datetime.now().year, author)
75
+
76
+ # The version info for the project you're documenting, acts as replacement for
77
+ # |version| and |release|, also used in various other places throughout the
78
+ # built documents.
79
+
80
+ # import_module(project_metadata["name"])
81
+ # package = sys.modules[project_metadata["name"]]
82
+
83
+ # The short X.Y version.
84
+ # version = package.__version__.split("-", 1)[0]
85
+ # The full version, including alpha/beta/rc tags.
86
+ # release = package.__version__
87
+
88
+ # Only include dev docs in dev version.
89
+ # dev = "dev" in release
90
+
91
+
92
+ # -- Options for HTML output --------------------------------------------------
93
+
94
+ # A NOTE ON HTML THEMES
95
+ # The global astropy configuration uses a custom theme, 'bootstrap-astropy',
96
+ # which is installed along with astropy. A different theme can be used or
97
+ # the options for this theme can be modified by overriding some of the
98
+ # variables set in the global configuration. The variables set in the
99
+ # global configuration are listed below, commented out.
100
+
101
+
102
+ # Add any paths that contain custom themes here, relative to this directory.
103
+ # To use a different custom theme, add the directory containing the theme.
104
+ # html_theme_path = []
105
+
106
+ # The theme to use for HTML and HTML Help pages. See the documentation for
107
+ # a list of builtin themes. To override the custom theme, set this to the
108
+ # name of a builtin theme or the name of a custom theme in html_theme_path.
109
+ # html_theme = None
110
+
111
+ html_static_path = ["_static"]
112
+ # html_css_files = []
113
+
114
+
115
+ # Custom sidebar templates, maps document names to template names.
116
+ # html_sidebars = {}
117
+
118
+ # The name of an image file (relative to this directory) to place at the top
119
+ # of the sidebar.
120
+ # html_logo = ''
121
+
122
+ # The name of an image file (within the static path) to use as favicon of the
123
+ # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
124
+ # pixels large.
125
+ # html_favicon = ''
126
+
127
+ # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
128
+ # using the given strftime format.
129
+ # html_last_updated_fmt = ''
130
+
131
+ # The name for this set of Sphinx documents. If None, it defaults to
132
+ # "<project> v<release> documentation".
133
+ # html_title = ""
134
+
135
+ # Output file base name for HTML help builder.
136
+ htmlhelp_basename = project + "doc"
137
+
138
+ # Prefixes that are ignored for sorting the Python module index
139
+ modindex_common_prefix = ["shigawire."]
140
+
141
+ html_context = {
142
+ "default_mode": "light",
143
+ "to_be_indexed": ["stable", "latest"],
144
+ "is_development": True,
145
+ "doc_path": "docs",
146
+ }
147
+
148
+ # -- Options for LaTeX output -------------------------------------------------
149
+
150
+ # Grouping the document tree into LaTeX files. List of tuples
151
+ # (source start file, target name, title, author, documentclass [howto/manual]).
152
+ latex_documents = [
153
+ ("index", project + ".tex", project + " Documentation", author, "manual")
154
+ ]
155
+
156
+
157
+ # -- Options for manual page output -------------------------------------------
158
+
159
+ # One entry per manual page. List of tuples
160
+ # (source start file, name, description, authors, manual section).
161
+ man_pages = [("index", project.lower(), project + " Documentation", [author], 1)]
162
+
163
+
164
+ # -- Options for linkcheck output -------------------------------------------
165
+ linkcheck_retry = 5
166
+ linkcheck_ignore = []
167
+ linkcheck_timeout = 180
168
+ linkcheck_anchors = False
169
+
170
+ # -- Turn on nitpicky mode for sphinx (to warn about references not found) ----
171
+ #
172
+ # nitpicky = True
173
+ # nitpick_ignore = []
174
+ #
175
+ # Some warnings are impossible to suppress, and you can list specific references
176
+ # that should be ignored in a nitpick-exceptions file which should be inside
177
+ # the docs/ directory. The format of the file should be:
178
+ #
179
+ # <type> <class>
180
+ #
181
+ # for example:
182
+ #
183
+ # py:class astropy.io.votable.tree.Element
184
+ # py:class astropy.io.votable.tree.SimpleElement
185
+ # py:class astropy.io.votable.tree.SimpleElementWithContent
186
+ #
187
+ # Uncomment the following lines to enable the exceptions:
188
+ #
189
+ # for line in open('nitpick-exceptions'):
190
+ # if line.strip() == "" or line.startswith("#"):
191
+ # continue
192
+ # dtype, target = line.split(None, 1)
193
+ # target = target.strip()
194
+ # nitpick_ignore.append((dtype, six.u(target)))
195
+
196
+
197
+ # -- Options for intersphinx --------------------------------------------------
198
+ intersphinx_mapping.update( # noqa: F405
199
+ {
200
+ "confluent-kafka": (
201
+ "https://docs.confluent.io/platform/current/clients/confluent-kafka-python/html/",
202
+ None,
203
+ ),
204
+ }
205
+ )
206
+
207
+ # -- Merge type annotations with numpydoc parameter docstrings ----------------
208
+ extensions.remove("numpydoc") # noqa: F405
209
+ extensions.append("sphinx.ext.napoleon") # noqa: F405
210
+ autodoc_typehints = "description"
@@ -0,0 +1,16 @@
1
+ pytest-kafka-broker documentation
2
+ =================================
3
+
4
+ This is a pytest plugin to run a temporary, local, single-broker Kafka cluster.
5
+
6
+ *******
7
+ Example
8
+ *******
9
+
10
+ .. literalinclude:: ../tests/test_kafka.py
11
+
12
+ *********
13
+ Reference
14
+ *********
15
+
16
+ .. automodapi:: pytest_kafka_broker
@@ -0,0 +1,35 @@
1
+ @ECHO OFF
2
+
3
+ pushd %~dp0
4
+
5
+ REM Command file for Sphinx documentation
6
+
7
+ if "%SPHINXBUILD%" == "" (
8
+ set SPHINXBUILD=sphinx-build
9
+ )
10
+ set SOURCEDIR=.
11
+ set BUILDDIR=_build
12
+
13
+ %SPHINXBUILD% >NUL 2>NUL
14
+ if errorlevel 9009 (
15
+ echo.
16
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
17
+ echo.installed, then set the SPHINXBUILD environment variable to point
18
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
19
+ echo.may add the Sphinx directory to PATH.
20
+ echo.
21
+ echo.If you don't have Sphinx installed, grab it from
22
+ echo.https://www.sphinx-doc.org/
23
+ exit /b 1
24
+ )
25
+
26
+ if "%1" == "" goto help
27
+
28
+ %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29
+ goto end
30
+
31
+ :help
32
+ %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33
+
34
+ :end
35
+ popd
@@ -0,0 +1,33 @@
1
+ [build-system]
2
+ requires = [
3
+ "setuptools",
4
+ "setuptools-scm>=8",
5
+ ]
6
+ build-backend = "setuptools.build_meta"
7
+
8
+ [project]
9
+ name = "pytest-kafka-broker"
10
+ authors = [{name = "Leo Singer", email = "leo.singer@ligo.org"}]
11
+ description = "Pytest plugin to run a single-broker Kafka cluster"
12
+ license = "Apache-2.0"
13
+ dynamic = ["version"]
14
+ requires-python = ">=3.11"
15
+ dependencies = [
16
+ "astropy",
17
+ "confluent-kafka",
18
+ "rich",
19
+ "pytest-asyncio",
20
+ ]
21
+ classifiers = [
22
+ "Framework :: Pytest",
23
+ "Topic :: System :: Networking",
24
+ ]
25
+
26
+ [project.entry-points.pytest11]
27
+ kafka_broker = "pytest_kafka_broker"
28
+
29
+ [project.optional-dependencies]
30
+ docs = [
31
+ "sphinx-astropy[confv2]",
32
+ "sphinx-automodapi>=0.20.0",
33
+ ]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,16 @@
1
+ Metadata-Version: 2.4
2
+ Name: pytest-kafka-broker
3
+ Version: 0.0.0
4
+ Summary: Pytest plugin to run a single-broker Kafka cluster
5
+ Author-email: Leo Singer <leo.singer@ligo.org>
6
+ License-Expression: Apache-2.0
7
+ Classifier: Framework :: Pytest
8
+ Classifier: Topic :: System :: Networking
9
+ Requires-Python: >=3.11
10
+ Requires-Dist: astropy
11
+ Requires-Dist: confluent-kafka
12
+ Requires-Dist: rich
13
+ Requires-Dist: pytest-asyncio
14
+ Provides-Extra: docs
15
+ Requires-Dist: sphinx-astropy[confv2]; extra == "docs"
16
+ Requires-Dist: sphinx-automodapi>=0.20.0; extra == "docs"
@@ -0,0 +1,19 @@
1
+ .gitignore
2
+ .gitlab-ci.yml
3
+ .pre-commit-config.yaml
4
+ .readthedocs.yml
5
+ README.md
6
+ pyproject.toml
7
+ docs/Makefile
8
+ docs/conf.py
9
+ docs/index.rst
10
+ docs/make.bat
11
+ src/pytest_kafka_broker.py
12
+ src/pytest_kafka_broker.egg-info/PKG-INFO
13
+ src/pytest_kafka_broker.egg-info/SOURCES.txt
14
+ src/pytest_kafka_broker.egg-info/dependency_links.txt
15
+ src/pytest_kafka_broker.egg-info/entry_points.txt
16
+ src/pytest_kafka_broker.egg-info/requires.txt
17
+ src/pytest_kafka_broker.egg-info/top_level.txt
18
+ tests/__init__.py
19
+ tests/test_kafka.py
@@ -0,0 +1,2 @@
1
+ [pytest11]
2
+ kafka_broker = pytest_kafka_broker
@@ -0,0 +1,8 @@
1
+ astropy
2
+ confluent-kafka
3
+ rich
4
+ pytest-asyncio
5
+
6
+ [docs]
7
+ sphinx-astropy[confv2]
8
+ sphinx-automodapi>=0.20.0
@@ -0,0 +1 @@
1
+ pytest_kafka_broker
@@ -0,0 +1,187 @@
1
+ import asyncio
2
+ import subprocess
3
+ from dataclasses import dataclass
4
+ from pathlib import Path
5
+ from tarfile import TarFile
6
+ from tempfile import TemporaryDirectory
7
+ from uuid import uuid4
8
+
9
+ import pytest
10
+ import pytest_asyncio
11
+ from astropy.config import get_cache_dir_path
12
+ from astropy.utils.data import get_readable_fileobj
13
+ from confluent_kafka import Consumer, Producer
14
+ from confluent_kafka.aio import AIOConsumer, AIOProducer
15
+ from rich.status import Status
16
+
17
+ __all__ = (
18
+ "kafka_broker",
19
+ "KafkaBrokerContext",
20
+ )
21
+
22
+
23
+ SCALA_VERSION = "2.13"
24
+ KAFKA_VERSION = "4.1.1"
25
+
26
+
27
+ async def wait_port(port: int, timeout: float = 0.25):
28
+ """Wait until a connection is detected listening on the given port."""
29
+ while True:
30
+ try:
31
+ _, writer = await asyncio.open_connection("localhost", port)
32
+ except OSError:
33
+ await asyncio.sleep(timeout)
34
+ else:
35
+ writer.close()
36
+ await writer.wait_closed()
37
+ return
38
+
39
+
40
+ @pytest.fixture(scope="session")
41
+ def kafka_home() -> Path:
42
+ """Download and install Kafka into a cached directory.
43
+
44
+ Returns the path where Kafka is installed.
45
+ """
46
+ dirname = f"kafka_{SCALA_VERSION}-{KAFKA_VERSION}"
47
+ cache_path = get_cache_dir_path() / __package__
48
+ dest_path = cache_path / dirname
49
+ if not dest_path.exists():
50
+ dest_path.mkdir(parents=True, exist_ok=True)
51
+ with (
52
+ Status("Downloading Kafka"),
53
+ get_readable_fileobj(
54
+ f"https://dlcdn.apache.org/kafka/{KAFKA_VERSION}/{dirname}.tgz",
55
+ encoding="binary",
56
+ cache=True,
57
+ ) as download,
58
+ TarFile(fileobj=download) as tarfile,
59
+ TemporaryDirectory(dir=cache_path) as temp_dir,
60
+ ):
61
+ tarfile.extractall(temp_dir)
62
+ (Path(temp_dir) / dirname).rename(dest_path)
63
+ return dest_path
64
+
65
+
66
+ _doc = """{}
67
+
68
+ Parameters
69
+ ----------
70
+ config
71
+ Extra Kafka client configuration properties. See list in the
72
+ `librdkafka documentation <https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md>`_.
73
+ """
74
+
75
+
76
+ @dataclass
77
+ class KafkaBrokerContext:
78
+ """Information and convenience methods for a temporary Kafka cluster.
79
+
80
+ This object is returned by :func:`kafka_broker`.
81
+ """
82
+
83
+ bootstrap_server: str
84
+ """Kafka bootstrap server in the form :samp:`{host}:{port}`."""
85
+
86
+ def config(self, config: dict | None = None):
87
+ return {**(config or {}), "bootstrap.servers": self.bootstrap_server}
88
+
89
+ def producer(self, config: dict | None = None) -> Producer:
90
+ return Producer(self.config(config))
91
+
92
+ def consumer(self, config: dict | None = None) -> Consumer:
93
+ return Consumer(self.config(config))
94
+
95
+ def aio_producer(self, config: dict | None = None) -> AIOProducer:
96
+ return AIOProducer(self.config(config))
97
+
98
+ def aio_consumer(self, config: dict | None = None) -> AIOConsumer:
99
+ return AIOConsumer(self.config(config))
100
+
101
+ config.__doc__ = _doc.format("Get the configuration for a Kafka client.")
102
+ producer.__doc__ = _doc.format("Create a Kafka producer connected to the cluster.")
103
+ consumer.__doc__ = _doc.format("Create a Kafka consumer connected to the cluster.")
104
+ aio_producer.__doc__ = _doc.format(
105
+ "Create an asynchronous Kafka producer connected to the cluster."
106
+ )
107
+ aio_consumer.__doc__ = _doc.format(
108
+ "Create an asynchronous Kafka consumer connected to the cluster."
109
+ )
110
+
111
+
112
+ del _doc
113
+
114
+
115
+ @pytest_asyncio.fixture
116
+ async def kafka_broker(kafka_home, tmp_path, unused_tcp_port_factory):
117
+ """Pytest fixture to run a local, temporary Kafka broker.
118
+
119
+ Returns
120
+ -------
121
+ : KafkaBrokerContext
122
+ """
123
+ kafka_storage = kafka_home / "bin" / "kafka-storage.sh"
124
+ kafka_server_start = kafka_home / "bin" / "kafka-server-start.sh"
125
+ config_path = tmp_path / "server.properties"
126
+ data_path = tmp_path / "run"
127
+ data_path.mkdir()
128
+ log_path = tmp_path / "log"
129
+ log_path.mkdir()
130
+ env = {"LOG_DIR": str(log_path)}
131
+ plaintext_port = unused_tcp_port_factory()
132
+ controller_port = unused_tcp_port_factory()
133
+ config_path.write_text(
134
+ f"""
135
+ process.roles=broker,controller
136
+ node.id=1
137
+ controller.quorum.bootstrap.servers=127.0.0.1:{controller_port}
138
+ listeners=PLAINTEXT://127.0.0.1:{plaintext_port},CONTROLLER://127.0.0.1:{controller_port}
139
+ controller.listener.names=CONTROLLER
140
+ listener.security.protocol.map=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT
141
+ log.dirs={data_path}
142
+ num.recovery.threads.per.data.dir=1
143
+ offsets.topic.replication.factor=1
144
+ share.coordinator.state.topic.replication.factor=1
145
+ share.coordinator.state.topic.min.isr=1
146
+ transaction.state.log.replication.factor=1
147
+ transaction.state.log.min.isr=1
148
+ """
149
+ )
150
+ with Status("Starting Kafka broker"):
151
+ subprocess.run(
152
+ [
153
+ kafka_storage,
154
+ "format",
155
+ "--standalone",
156
+ "-t",
157
+ str(uuid4()),
158
+ "-c",
159
+ config_path,
160
+ ],
161
+ env=env,
162
+ check=True,
163
+ stdout=subprocess.DEVNULL,
164
+ )
165
+ process = await asyncio.create_subprocess_exec(
166
+ kafka_server_start,
167
+ config_path,
168
+ env=env,
169
+ stdin=None,
170
+ stdout=subprocess.DEVNULL,
171
+ stderr=None,
172
+ )
173
+ with Status(f"Waiting for connection on port {plaintext_port}"):
174
+ exited = asyncio.create_task(process.wait())
175
+ port = asyncio.create_task(wait_port(plaintext_port))
176
+ done, _ = await asyncio.wait(
177
+ (exited, port), return_when=asyncio.FIRST_COMPLETED
178
+ )
179
+ if exited in done:
180
+ port.cancel()
181
+ raise RuntimeError("Kafka broker terminated unexpectedly")
182
+ try:
183
+ yield KafkaBrokerContext(f"127.0.0.1:{plaintext_port}")
184
+ finally:
185
+ with Status("Stopping Kafka broker"):
186
+ process.terminate()
187
+ await exited
File without changes
@@ -0,0 +1,38 @@
1
+ import pytest
2
+
3
+ topic = "topic"
4
+ payload = b"hello world"
5
+ group_id = "group_id"
6
+
7
+
8
+ def test_sync(kafka_broker):
9
+ """Demonstrate using the kafka_broker fixture in an ordinary test."""
10
+ with kafka_broker.producer() as producer:
11
+ producer.produce(topic, payload)
12
+ with kafka_broker.consumer(
13
+ {"group.id": group_id, "auto.offset.reset": "earliest"}
14
+ ) as consumer:
15
+ consumer.subscribe([topic])
16
+ (message,) = consumer.consume()
17
+ assert message.value() == payload
18
+
19
+
20
+ @pytest.mark.asyncio
21
+ async def test_async(kafka_broker):
22
+ """Demonstrate using the kafka_broker fixture in an async test."""
23
+ producer = kafka_broker.aio_producer()
24
+ try:
25
+ await producer.produce(topic, payload)
26
+ finally:
27
+ # FIXME: use async context manager; see https://github.com/confluentinc/confluent-kafka-python/pull/2180
28
+ await producer.close()
29
+ consumer = kafka_broker.aio_consumer(
30
+ {"group.id": group_id, "auto.offset.reset": "earliest"}
31
+ )
32
+ try:
33
+ await consumer.subscribe([topic])
34
+ (message,) = await consumer.consume()
35
+ finally:
36
+ # FIXME: use async context manager; see https://github.com/confluentinc/confluent-kafka-python/pull/2180
37
+ await consumer.close()
38
+ assert message.value() == payload