explicit-python-kafka 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
File without changes
@@ -0,0 +1,202 @@
1
+ """Реализация адаптера обмена сообщениями."""
2
+ from typing import Dict
3
+ from typing import Generator
4
+ from typing import Optional
5
+ import logging
6
+
7
+ from confluent_kafka import Consumer as Subscriber
8
+ from confluent_kafka import KafkaError
9
+ from confluent_kafka import KafkaException
10
+ from confluent_kafka import Message as KafkaMessage
11
+ from confluent_kafka import Producer as Publisher
12
+ from confluent_kafka.admin import AdminClient
13
+ from confluent_kafka.cimpl import NewTopic
14
+
15
+ from explicit.adapters.messaging import AbstractAdapter
16
+ from explicit.kafka.domain.model import PublishConfig
17
+ from explicit.kafka.domain.model import SubscribeConfig
18
+
19
+
20
+ logger = logging.getLogger(__name__)
21
+ logger.setLevel(logging.INFO)
22
+
23
+
24
+ def publish_callback(error, message: KafkaMessage):
25
+ if error is not None:
26
+ logger.error('Ошибка при публикации сообщения: %s', error)
27
+ else:
28
+ logger.info('Сообщение доставлено: %s [%s]', message.topic(), message.partition())
29
+ if logger.isEnabledFor(logging.DEBUG):
30
+ logger.debug('Тело сообщения: %s', message.value())
31
+
32
+
33
+ class Adapter(AbstractAdapter):
34
+
35
+ """Адаптер обмена сообщениями через Kafka."""
36
+
37
+ def __init__(
38
+ self, *,
39
+ subscribe_config: Optional[SubscribeConfig] = None,
40
+ publish_config: Optional[PublishConfig] = None,
41
+ must_ensure_topics: Optional[bool] = True
42
+ ) -> None:
43
+ """Инициализация адаптера.
44
+
45
+ :param subscribe_config: параметры роли подписчика.
46
+ :param publish_config: параметры роли издателя.
47
+ :param must_ensure_topics: убедиться в существовании топика
48
+ перед выполнением действий.
49
+ """
50
+ self._subscribe_config = subscribe_config
51
+ self._publish_config = publish_config
52
+ self._publisher: Publisher = None
53
+ self._subscribers: Dict[str, Subscriber] = {}
54
+ self._must_ensure_topics = must_ensure_topics
55
+
56
+ self._available_config = self._publish_config or self._subscribe_config
57
+
58
+ def _ensure_topics(self, *topics: str):
59
+ existing_topics = self.list_topics()
60
+
61
+ new_topics = [topic for topic in topics if topic not in existing_topics]
62
+
63
+ if not new_topics: # Все используемые топики созданы
64
+ return
65
+
66
+ self.create_topics(*new_topics)
67
+
68
+ def _ensure_publisher(self):
69
+ if self._publish_config is None:
70
+ raise RuntimeError('Publisher is not configured')
71
+
72
+ if self._publisher is None:
73
+ self._publisher = Publisher(self._publish_config.dict(by_alias=True))
74
+
75
+ return self._publisher
76
+
77
+ def _make_subscriber_key(self, *topics: str) -> str:
78
+ return ','.join(sorted(topics))
79
+
80
+ def _ensure_subscriber(self, *topics: str) -> Subscriber:
81
+ if self._subscribe_config is None:
82
+ raise RuntimeError('Subscriber is not configured')
83
+
84
+ key = self._make_subscriber_key(*topics)
85
+
86
+ subscriber = self._subscribers.get(key)
87
+
88
+ if subscriber is None:
89
+ self._subscribers[key] = Subscriber(self._subscribe_config.dict(by_alias=True))
90
+ return self._subscribers[key]
91
+
92
+ def _should_continue_polling(
93
+ self, message: KafkaMessage, break_on_eof: bool, break_on_error: bool
94
+ ) -> Optional[bool]:
95
+ """Продолжать ли опрос, если возникла ошибка или был достигнут конец партиции.
96
+
97
+ :param message: Сообщение из Kafka.
98
+ :param break_on_eof: Останавливать опрос подписчика при исчерпании сообщений.
99
+ :param break_on_error: Останавливать опрос подписчика при ошибках.
100
+ :return: True если нужно продолжить опрос, False нужно прекратить опрос, None действия не требуются.
101
+ """
102
+ eof = message is None or message.error() == KafkaError._PARTITION_EOF # pylint: disable=protected-access
103
+ if eof:
104
+ return not break_on_eof
105
+ elif error := message.error():
106
+ logger.error('Ошибка при получении сообщения: %s', error)
107
+ return not break_on_error
108
+ return None
109
+
110
+ def _poll_subscriber(
111
+ self,
112
+ subscriber: Subscriber,
113
+ break_on_eof: bool = False,
114
+ break_on_error: bool = False,
115
+ ) -> Generator[KafkaMessage, None, None]:
116
+ """Опрашивает подписчики и генерирует сообщения.
117
+
118
+ :param subscriber: Набор подписчиков для опроса.
119
+ :param break_on_eof: Останавливать опрос подписчика при исчерпании сообщений.
120
+ :param break_on_error: Останавливать опрос подписчика при ошибках.
121
+ :return: Генератор сообщений из Kafka.
122
+ """
123
+
124
+ while True:
125
+ message: KafkaMessage = subscriber.poll(1.0)
126
+
127
+ continue_polling = self._should_continue_polling(message, break_on_eof, break_on_error)
128
+ if continue_polling is True:
129
+ continue
130
+ elif continue_polling is False:
131
+ break
132
+
133
+ logger.info('Получено сообщение из %s', message.topic())
134
+ if logger.isEnabledFor(logging.DEBUG):
135
+ logger.debug('Тело сообщения: %s', message.value())
136
+
137
+ yield message
138
+
139
+ def publish(self, topic: str, message: str, *args, **kwargs) -> None: # pylint: disable=unused-argument
140
+ publisher = self._ensure_publisher()
141
+
142
+ if self._must_ensure_topics:
143
+ self._ensure_topics(topic)
144
+
145
+ publisher.poll(0)
146
+ publisher.produce(topic, message, callback=publish_callback)
147
+ publisher.flush()
148
+
149
+ def subscribe(self, *topics: str) -> Generator[KafkaMessage, None, None]:
150
+ subscriber: Subscriber = self._ensure_subscriber(*topics)
151
+
152
+ if self._must_ensure_topics:
153
+ self._ensure_topics(*topics)
154
+
155
+ subscriber.subscribe(list(topics))
156
+
157
+ yield from self._poll_subscriber(subscriber)
158
+
159
+ def list_topics(self, timeout=30.0):
160
+ admin = self._get_admin_client()
161
+ return list(admin.list_topics(timeout=timeout).topics.keys())
162
+
163
+ def create_topics(self, *topics: str, timeout=30.0):
164
+ admin = self._get_admin_client()
165
+ futures = admin.create_topics(
166
+ [
167
+ NewTopic(topic, num_partitions=1) for topic in topics
168
+ ],
169
+ request_timeout=timeout
170
+ )
171
+
172
+ for topic, future in futures.items():
173
+ try:
174
+ future.result()
175
+ except KafkaException as e:
176
+ error = e.args[0]
177
+ if error.code() in ('TOPIC_ALREADY_EXISTS', ):
178
+ logger.warning('Топик уже существует')
179
+
180
+ except Exception: # pylint: disable=broad-exception-caught
181
+ logger.exception('Невозможно создать topic')
182
+
183
+ else:
184
+ logger.info('Topic %s создан', topic)
185
+
186
+ def delete_topic(self, topic: str):
187
+ """Удалить единичный топик."""
188
+ return self.delete_topics(topic)[0]
189
+
190
+ def delete_topics(self, *topics: str):
191
+ """Удалить набор топиков."""
192
+ admin = self._get_admin_client()
193
+ return [
194
+ (topic, future.result())
195
+ for topic, future in admin.delete_topics(list(topics)).items()
196
+ ]
197
+
198
+ def _get_admin_client(self):
199
+ if self._available_config is None:
200
+ raise RuntimeError('Не указаны параметры подключения')
201
+
202
+ return AdminClient(self._available_config.dict(by_alias=True))
@@ -0,0 +1,7 @@
1
+ """Модель предметной области адаптера к Apache Kafka."""
2
+ from .model import AbstractAuth
3
+ from .model import AuthDisabled
4
+ from .model import BaseConfig
5
+ from .model import PublishConfig
6
+ from .model import SASLPlainAuth
7
+ from .model import SubscribeConfig
@@ -0,0 +1,7 @@
1
+ """Модель предметной области адаптера к Apache Kafka."""
2
+ from .auth import AbstractAuth
3
+ from .auth import AuthDisabled
4
+ from .auth import SASLPlainAuth
5
+ from .config import BaseConfig
6
+ from .config import PublishConfig
7
+ from .config import SubscribeConfig
@@ -0,0 +1,39 @@
1
+ """Параметры аутентификации."""
2
+ from typing import Final
3
+ from typing import Literal
4
+
5
+ from pydantic.main import BaseModel
6
+
7
+
8
+ SASL_PLAINTEXT: Final = 'SASL_PLAINTEXT'
9
+ SASL_SSL: Final = 'SASL_SSL'
10
+
11
+ SASL_PROTOCOLS = [SASL_PLAINTEXT, SASL_SSL]
12
+
13
+
14
+ class AbstractAuth(BaseModel):
15
+ """Параметры аутентификации.
16
+
17
+ Может нести параметры по-умолчанию и доп. валидацию.
18
+ """
19
+
20
+
21
+ class AuthDisabled(AbstractAuth):
22
+ """Аутентификация отключена."""
23
+
24
+
25
+ class SASLPlainAuth(AbstractAuth):
26
+
27
+ sasl_username: str
28
+ sasl_password: str
29
+ sasl_mechanism: str = 'PLAIN'
30
+ security_protocol: Literal['SASL_PLAINTEXT'] = SASL_PLAINTEXT
31
+
32
+ class Config:
33
+ allow_population_by_field_name = True
34
+ fields = {
35
+ 'sasl_username': 'sasl.username',
36
+ 'sasl_password': 'sasl.password',
37
+ 'sasl_mechanism': 'sasl.mechanism',
38
+ 'security_protocol': 'security.protocol'
39
+ }
@@ -0,0 +1,63 @@
1
+ """Набор конфигураций."""
2
+ from typing import TYPE_CHECKING
3
+ from typing import Dict
4
+ from typing import Union
5
+
6
+ from pydantic.fields import Field
7
+ from pydantic.main import BaseModel
8
+ from pydantic.utils import import_string
9
+
10
+ from .auth import AbstractAuth
11
+ from .auth import AuthDisabled
12
+
13
+
14
+ if TYPE_CHECKING:
15
+ from pydantic.typing import AbstractSetIntStr
16
+
17
+
18
+ class BaseConfig(BaseModel):
19
+ """Общие параметры издателя и подписчика."""
20
+
21
+ bootstrap__servers: str
22
+
23
+ auth: AbstractAuth = AuthDisabled()
24
+
25
+ class Config:
26
+ fields = { # Aliases
27
+ 'bootstrap__servers': 'bootstrap.servers',
28
+ }
29
+
30
+ def set_auth(self, auth: Dict) -> AbstractAuth:
31
+ auth_cls = import_string(auth['BACKEND'])
32
+ assert issubclass(auth_cls, AbstractAuth)
33
+ self.auth = auth_cls(**auth['OPTIONS'])
34
+ return self.auth
35
+
36
+ def dict(
37
+ self, *args, exclude: Union['AbstractSetIntStr', None] = None, **kwargs
38
+ ) -> dict:
39
+ """Возвращает пареметры, пригодные для передачи в низкоуровневый пакет взаимодействия."""
40
+ self_dict = super().dict(*args, exclude=(exclude or set()) | {'auth'}, **kwargs)
41
+ auth_dict = self.auth.dict(by_alias=True)
42
+ return self_dict | auth_dict
43
+
44
+
45
+ class PublishConfig(BaseConfig):
46
+
47
+ class Config(BaseConfig.Config):
48
+ fields = {
49
+ **BaseConfig.Config.fields
50
+ }
51
+
52
+
53
+ class SubscribeConfig(BaseConfig):
54
+
55
+ group__id: str
56
+ auto__offset__reset: str = Field('earliest', const=True)
57
+
58
+ class Config(BaseConfig.Config):
59
+ fields = {
60
+ 'auto__offset__reset': 'auto.offset.reset',
61
+ 'group__id': 'group.id',
62
+ **BaseConfig.Config.fields
63
+ }
File without changes
@@ -0,0 +1,95 @@
1
+ Metadata-Version: 2.1
2
+ Name: explicit-python-kafka
3
+ Version: 1.1.1
4
+ Summary: Набор компонентов для интеграции explicit с kafka
5
+ Author: BARS Group
6
+ Author-email: education_dev@bars-open.ru
7
+ License: MIT
8
+ Classifier: Intended Audience :: Developers
9
+ Classifier: Environment :: Web Environment
10
+ Classifier: Natural Language :: Russian
11
+ Classifier: Operating System :: OS Independent
12
+ Classifier: Programming Language :: Python
13
+ Classifier: Programming Language :: Python :: 3.9
14
+ Classifier: Programming Language :: Python :: 3.10
15
+ Classifier: Programming Language :: Python :: 3.11
16
+ Classifier: Programming Language :: Python :: 3.12
17
+ Classifier: Development Status :: 5 - Production/Stable
18
+ Description-Content-Type: text/markdown
19
+ Requires-Dist: explicit <3,>=2
20
+ Requires-Dist: confluent-kafka
21
+
22
+ # Explicit-Kafka
23
+ ## Набор компонентов для интеграции explicit с kafka.
24
+
25
+ Содержит реализацию адаптера обмена сообщениями через Kafka.
26
+
27
+ ### Пример использования
28
+ Настройка адаптера
29
+ ```python
30
+ # persons/core/apps.py
31
+ from django.apps.config import AppConfig as AppConfigBase
32
+
33
+
34
+ class AppConfig(AppConfigBase):
35
+
36
+ name = __package__
37
+
38
+ def _setup_adapter(self):
39
+ from explicit.kafka.adapters.messaging import Adapter
40
+ from explicit.kafka.adapters.messaging import PublishConfig
41
+ from explicit.kafka.adapters.messaging import SubscribeConfig
42
+
43
+ from persons import core
44
+
45
+ # конфигурация адаптера
46
+ adapter_base_config = {'bootstrap.servers': 'kafka:9092'}
47
+ publish_config = PublishConfig(adapter_base_config)
48
+ subscribe_config = SubscribeConfig(adapter_base_config | {'group.id': f'edu.persons'})
49
+
50
+ adapter = Adapter(subscribe_config=subscribe_config, publish_config=publish_config)
51
+ core.adapter = adapter
52
+
53
+ def ready(self):
54
+ self._setup_adapter()
55
+ ```
56
+ Отправка сообщений
57
+ ```python
58
+ # persons/core/persons/services/handlers/events.py
59
+
60
+ def on_person_created(
61
+ event: 'PersonCreated',
62
+ messaging_adapter: 'AbstractMessagingAdapter'
63
+ ):
64
+ messaging_adapter.publish('edu.persons.person', event.dump())
65
+ ```
66
+
67
+ Подписка на сообщения
68
+ ```python
69
+ # education/entrypoints/eventconsumer.py
70
+
71
+ def bootstrap():
72
+ import json
73
+
74
+ from education.core import adapter
75
+ from education.core import bus
76
+ from education.core.persons.domain.events import PersonCreated
77
+
78
+ TOPIC_EVENTS = {
79
+ 'edu.persons.person': PersonCreated,
80
+ }
81
+
82
+ for message in adapter.subscribe(*TOPIC_EVENTS):
83
+ for message in adapter.subscribe(*TOPIC_EVENTS):
84
+ event = TOPIC_EVENTS[message.topic()](
85
+ **json.loads(message.value())
86
+ )
87
+ bus.handle(event)
88
+
89
+ bootstrap()
90
+ ```
91
+
92
+ ### Запуск тестов
93
+ ```sh
94
+ $ tox
95
+ ```
@@ -0,0 +1,13 @@
1
+ explicit/kafka/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ explicit/kafka/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ explicit/kafka/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ explicit/kafka/adapters/messaging.py,sha256=V1DfOcaj_Wq9JfwzqmJIC24T-_XyaqOvnR0sLa2Jqg0,8063
5
+ explicit/kafka/domain/__init__.py,sha256=mkYZMtbCNw90NUxGEusosWe5EQsA4iucdMLQmS6q7SI,284
6
+ explicit/kafka/domain/model/__init__.py,sha256=BKA_dqvNOhOrUI7pUn3rYQO1X5YUijCmjyEHpuUkfp0,284
7
+ explicit/kafka/domain/model/auth.py,sha256=h2-P-aDP3_6JLYM3e51wSJqhDBEwAKI2Rfo1uoSMHg0,1025
8
+ explicit/kafka/domain/model/config.py,sha256=9HMY4ei180j6BEfR3w-D1FQsIMEh40S6I7Xw9Btrgrs,1808
9
+ explicit_python_kafka-1.1.1.dist-info/METADATA,sha256=Ju_DUsZAO9iJ9cR1WmAGoyfW4VggvzdrW_1NBtW-UBo,2914
10
+ explicit_python_kafka-1.1.1.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
11
+ explicit_python_kafka-1.1.1.dist-info/dependency_links.txt,sha256=z2Fi-sEElLMsunjPT4vpjRUYQ4oGu_tX_9NeLaEKlgo,44
12
+ explicit_python_kafka-1.1.1.dist-info/top_level.txt,sha256=tt6T8l4Yji4ww87qZQcD4CbcwTIHy7NAPmU7QAfMcpY,9
13
+ explicit_python_kafka-1.1.1.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (74.1.2)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1 @@
1
+ https://pypi.bars-open.ru/simple/m3-builder
@@ -0,0 +1 @@
1
+ explicit