pytest-kafka-broker 0.1.1__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/.gitlab-ci.yml +4 -0
  2. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/PKG-INFO +1 -1
  3. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/pyproject.toml +6 -0
  4. pytest_kafka_broker-0.1.1/src/pytest_kafka_broker.py → pytest_kafka_broker-0.3.0/src/pytest_kafka_broker/__init__.py +14 -11
  5. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/src/pytest_kafka_broker.egg-info/PKG-INFO +1 -1
  6. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/src/pytest_kafka_broker.egg-info/SOURCES.txt +2 -1
  7. pytest_kafka_broker-0.3.0/tests/__init__.py +0 -0
  8. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/tests/test_kafka.py +19 -11
  9. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/.gitignore +0 -0
  10. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/.pre-commit-config.yaml +0 -0
  11. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/.readthedocs.yml +0 -0
  12. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/README.md +0 -0
  13. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/docs/Makefile +0 -0
  14. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/docs/conf.py +0 -0
  15. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/docs/index.rst +0 -0
  16. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/docs/make.bat +0 -0
  17. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/setup.cfg +0 -0
  18. /pytest_kafka_broker-0.1.1/tests/__init__.py → /pytest_kafka_broker-0.3.0/src/pytest_kafka_broker/py.typed +0 -0
  19. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/src/pytest_kafka_broker.egg-info/dependency_links.txt +0 -0
  20. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/src/pytest_kafka_broker.egg-info/entry_points.txt +0 -0
  21. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/src/pytest_kafka_broker.egg-info/requires.txt +0 -0
  22. {pytest_kafka_broker-0.1.1 → pytest_kafka_broker-0.3.0}/src/pytest_kafka_broker.egg-info/top_level.txt +0 -0
@@ -7,6 +7,10 @@ include:
7
7
  - "3.12"
8
8
  - "3.13"
9
9
  - "3.14"
10
+ - component: git.ligo.org/computing/gitlab/components/python/type-checking@2.3.2
11
+ inputs:
12
+ fail_on_findings: true
13
+ requirements: .
10
14
 
11
15
  python_test:
12
16
  before_script:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytest-kafka-broker
3
- Version: 0.1.1
3
+ Version: 0.3.0
4
4
  Summary: Pytest plugin to run a single-broker Kafka cluster
5
5
  Author-email: Leo Singer <leo.singer@ligo.org>
6
6
  License-Expression: Apache-2.0
@@ -36,4 +36,10 @@ docs = [
36
36
  source = "https://git.ligo.org/ultra-swift/pytest-kafka-broker"
37
37
  documentation = "https://pytest-kafka-broker.readthedocs.io/"
38
38
 
39
+ [tool.mypy]
40
+ exclude = [
41
+ "^docs/conf.py$",
42
+ ]
43
+ exclude_gitignore = true
44
+
39
45
  [tool.setuptools_scm]
@@ -1,5 +1,6 @@
1
1
  import asyncio
2
2
  import subprocess
3
+ from collections.abc import AsyncGenerator
3
4
  from dataclasses import dataclass
4
5
  from pathlib import Path
5
6
  from tarfile import TarFile
@@ -8,9 +9,10 @@ from uuid import uuid4
8
9
 
9
10
  import pytest
10
11
  import pytest_asyncio
11
- from astropy.config import get_cache_dir_path
12
- from astropy.utils.data import get_readable_fileobj
12
+ from astropy.config import get_cache_dir_path # type: ignore[import-untyped]
13
+ from astropy.utils.data import get_readable_fileobj # type: ignore[import-untyped]
13
14
  from confluent_kafka import Consumer, Producer
15
+ from confluent_kafka.admin import AdminClient
14
16
  from confluent_kafka.aio import AIOConsumer, AIOProducer
15
17
  from rich.status import Status
16
18
 
@@ -24,7 +26,7 @@ SCALA_VERSION = "2.13"
24
26
  KAFKA_VERSION = "4.1.1"
25
27
 
26
28
 
27
- async def wait_port(port: int, timeout: float = 0.25):
29
+ async def wait_port(port: int, timeout: float = 0.25) -> None:
28
30
  """Wait until a connection is detected listening on the given port."""
29
31
  while True:
30
32
  try:
@@ -83,9 +85,12 @@ class KafkaBrokerContext:
83
85
  bootstrap_server: str
84
86
  """Kafka bootstrap server in the form :samp:`{host}:{port}`."""
85
87
 
86
- def config(self, config: dict | None = None):
88
+ def config(self, config: dict | None = None) -> dict:
87
89
  return {**(config or {}), "bootstrap.servers": self.bootstrap_server}
88
90
 
91
+ def admin(self, config: dict | None = None) -> AdminClient:
92
+ return AdminClient(self.config(config))
93
+
89
94
  def producer(self, config: dict | None = None) -> Producer:
90
95
  return Producer(self.config(config))
91
96
 
@@ -99,6 +104,7 @@ class KafkaBrokerContext:
99
104
  return AIOConsumer(self.config(config))
100
105
 
101
106
  config.__doc__ = _doc.format("Get the configuration for a Kafka client.")
107
+ admin.__doc__ = _doc.format("Create a Kafka admin client connected to the cluster.")
102
108
  producer.__doc__ = _doc.format("Create a Kafka producer connected to the cluster.")
103
109
  consumer.__doc__ = _doc.format("Create a Kafka consumer connected to the cluster.")
104
110
  aio_producer.__doc__ = _doc.format(
@@ -113,13 +119,10 @@ del _doc
113
119
 
114
120
 
115
121
  @pytest_asyncio.fixture
116
- async def kafka_broker(kafka_home, tmp_path, unused_tcp_port_factory):
117
- """Pytest fixture to run a local, temporary Kafka broker.
118
-
119
- Returns
120
- -------
121
- : KafkaBrokerContext
122
- """
122
+ async def kafka_broker(
123
+ kafka_home, tmp_path, unused_tcp_port_factory
124
+ ) -> AsyncGenerator[KafkaBrokerContext]:
125
+ """Pytest fixture to run a local, temporary Kafka broker."""
123
126
  kafka_storage = kafka_home / "bin" / "kafka-storage.sh"
124
127
  kafka_server_start = kafka_home / "bin" / "kafka-server-start.sh"
125
128
  config_path = tmp_path / "server.properties"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytest-kafka-broker
3
- Version: 0.1.1
3
+ Version: 0.3.0
4
4
  Summary: Pytest plugin to run a single-broker Kafka cluster
5
5
  Author-email: Leo Singer <leo.singer@ligo.org>
6
6
  License-Expression: Apache-2.0
@@ -8,7 +8,8 @@ docs/Makefile
8
8
  docs/conf.py
9
9
  docs/index.rst
10
10
  docs/make.bat
11
- src/pytest_kafka_broker.py
11
+ src/pytest_kafka_broker/__init__.py
12
+ src/pytest_kafka_broker/py.typed
12
13
  src/pytest_kafka_broker.egg-info/PKG-INFO
13
14
  src/pytest_kafka_broker.egg-info/SOURCES.txt
14
15
  src/pytest_kafka_broker.egg-info/dependency_links.txt
File without changes
@@ -1,20 +1,24 @@
1
1
  import pytest
2
2
 
3
- topic = "topic"
4
- payload = b"hello world"
5
- group_id = "group_id"
3
+ TOPIC = "topic"
4
+ PAYLOAD = b"hello world"
5
+ GROUP_ID = "group_id"
6
6
 
7
7
 
8
8
  def test_sync(kafka_broker):
9
9
  """Demonstrate using the kafka_broker fixture in an ordinary test."""
10
10
  with kafka_broker.producer() as producer:
11
- producer.produce(topic, payload)
11
+ producer.produce(TOPIC, PAYLOAD)
12
+
12
13
  with kafka_broker.consumer(
13
- {"group.id": group_id, "auto.offset.reset": "earliest"}
14
+ {"group.id": GROUP_ID, "auto.offset.reset": "earliest"}
14
15
  ) as consumer:
15
- consumer.subscribe([topic])
16
+ consumer.subscribe([TOPIC])
16
17
  (message,) = consumer.consume()
17
- assert message.value() == payload
18
+ assert message.value() == PAYLOAD
19
+
20
+ with kafka_broker.admin() as admin:
21
+ assert TOPIC in admin.list_topics().topics
18
22
 
19
23
 
20
24
  @pytest.mark.asyncio
@@ -22,17 +26,21 @@ async def test_async(kafka_broker):
22
26
  """Demonstrate using the kafka_broker fixture in an async test."""
23
27
  producer = kafka_broker.aio_producer()
24
28
  try:
25
- await producer.produce(topic, payload)
29
+ await producer.produce(TOPIC, PAYLOAD)
26
30
  finally:
27
31
  # FIXME: use async context manager; see https://github.com/confluentinc/confluent-kafka-python/pull/2180
28
32
  await producer.close()
29
33
  consumer = kafka_broker.aio_consumer(
30
- {"group.id": group_id, "auto.offset.reset": "earliest"}
34
+ {"group.id": GROUP_ID, "auto.offset.reset": "earliest"}
31
35
  )
36
+
32
37
  try:
33
- await consumer.subscribe([topic])
38
+ await consumer.subscribe([TOPIC])
34
39
  (message,) = await consumer.consume()
35
40
  finally:
36
41
  # FIXME: use async context manager; see https://github.com/confluentinc/confluent-kafka-python/pull/2180
37
42
  await consumer.close()
38
- assert message.value() == payload
43
+ assert message.value() == PAYLOAD
44
+
45
+ with kafka_broker.admin() as admin:
46
+ assert TOPIC in admin.list_topics().topics