tp-common 0.0.3__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,9 +16,9 @@ from aiohttp import (
16
16
  )
17
17
 
18
18
  from tp_common.base_client.exceptions import (
19
- BaseClientException,
20
19
  ClientConnectionException,
21
20
  ClientDNSException,
21
+ ClientException,
22
22
  ClientProxyException,
23
23
  ClientResponseErrorException,
24
24
  ClientTimeoutException,
@@ -471,7 +471,7 @@ class BaseClient:
471
471
  "response": response_body_text or "",
472
472
  },
473
473
  )
474
- raise BaseClientException(
474
+ raise ClientException(
475
475
  f"Неожиданная HTTP-ошибка: {str(e)}",
476
476
  url=url,
477
477
  ) from e
@@ -17,7 +17,6 @@ class ClientException(Exception):
17
17
  self.response_body = response_body
18
18
 
19
19
 
20
-
21
20
  class ClientResponseErrorException(ClientException):
22
21
  """Исключение при неуспешном HTTP статусе (>=400)."""
23
22
 
@@ -30,6 +29,7 @@ class ClientResponseErrorException(ClientException):
30
29
  ) -> None:
31
30
  super().__init__(message, url, status_code, response_body)
32
31
 
32
+
33
33
  class ClientConnectionException(ClientException):
34
34
  """Исключение при ошибке соединения (не удалось установить соединение)."""
35
35
 
@@ -40,6 +40,7 @@ class ClientConnectionException(ClientException):
40
40
  ) -> None:
41
41
  super().__init__(message, url)
42
42
 
43
+
43
44
  class ClientTimeoutException(ClientConnectionException):
44
45
  """Исключение при таймауте соединения."""
45
46
 
@@ -64,9 +65,6 @@ class ClientProxyException(ClientConnectionException):
64
65
  self.proxy = proxy
65
66
 
66
67
 
67
-
68
-
69
-
70
68
  class ClientDNSException(ClientConnectionException):
71
69
  """Исключение при ошибке DNS (не удалось разрешить доменное имя)."""
72
70
 
@@ -90,7 +88,7 @@ class ClientServerErrorException(ClientException):
90
88
  pass
91
89
 
92
90
 
93
- class BaseNetworkErrorException(ClientException):
91
+ class ClientNetworkErrorException(ClientException):
94
92
  """Базовое исключение для сетевых ошибок (таймауты, соединение, DNS)."""
95
93
 
96
94
  pass
@@ -0,0 +1,26 @@
1
+ from logging import Logger
2
+ from typing import Any
3
+
4
+ from tp_helper.functions import get_full_class_name
5
+
6
+
7
+ class BaseLoggingService:
8
+ def __init__(self, logger: Logger | None = None) -> None:
9
+ self.logger = logger
10
+
11
+ def set_logger(self, logger: Logger) -> None:
12
+ self.logger = logger
13
+
14
+ def logging_error(
15
+ self, exception: Any, message: str, retry_delay: float | None = None
16
+ ) -> None:
17
+ if self.logger is None:
18
+ return
19
+ error_type = get_full_class_name(exception)
20
+ error_text = str(exception)
21
+
22
+ self.logger.error(message)
23
+ self.logger.error(f"{error_type}: {error_text}")
24
+
25
+ if retry_delay is not None:
26
+ self.logger.info(f"🔁 Повтор через {retry_delay:.1f} сек...")
@@ -0,0 +1,8 @@
1
+ from pydantic import BaseModel, ConfigDict
2
+ from pydantic.alias_generators import to_camel
3
+
4
+
5
+ class BaseRequest(BaseModel):
6
+ model_config = ConfigDict(
7
+ alias_generator=to_camel, populate_by_name=True, from_attributes=True
8
+ )
@@ -0,0 +1,8 @@
1
+ from pydantic import BaseModel, ConfigDict
2
+ from pydantic.alias_generators import to_camel
3
+
4
+
5
+ class BaseResponse(BaseModel):
6
+ model_config = ConfigDict(
7
+ alias_generator=to_camel, populate_by_name=True, from_attributes=True
8
+ )
@@ -0,0 +1,6 @@
1
+ from pydantic import BaseModel
2
+
3
+
4
+ class BaseSchema(BaseModel):
5
+ def to_json(self) -> str:
6
+ return self.model_dump_json()
@@ -0,0 +1,21 @@
1
+ from datetime import UTC, datetime
2
+
3
+ from sqlalchemy import TIMESTAMP, text
4
+ from sqlalchemy.orm import Mapped, mapped_column
5
+
6
+
7
+ class BaseTimestampModel:
8
+ created_at: Mapped[datetime] = mapped_column(
9
+ TIMESTAMP(timezone=True),
10
+ server_default=text("TIMEZONE('utc', now())"),
11
+ default=lambda: datetime.now(UTC), # Aware datetime по умолчанию
12
+ nullable=False,
13
+ )
14
+
15
+ updated_at: Mapped[datetime] = mapped_column(
16
+ TIMESTAMP(timezone=True),
17
+ server_default=text("TIMEZONE('utc', now())"),
18
+ onupdate=text("TIMEZONE('utc', now())"),
19
+ default=lambda: datetime.now(UTC), # Aware datetime по умолчанию
20
+ nullable=False,
21
+ )
@@ -0,0 +1,11 @@
1
+ from logging import Logger
2
+
3
+ from tp_helper.base_items.base_discord import BaseDiscord
4
+ from tp_helper.base_items.base_logging_service import BaseLoggingService
5
+ from tp_helper.discord_helper import DiscordHelper
6
+
7
+
8
+ class BaseWorker(BaseLoggingService, BaseDiscord):
9
+ def __init__(self, logger: Logger, discord: DiscordHelper):
10
+ BaseLoggingService.__init__(self, logger)
11
+ BaseDiscord.__init__(self, discord)
@@ -0,0 +1,14 @@
1
+ from logging import Logger
2
+
3
+ from redis.asyncio import Redis
4
+ from tp_helper.base_items.base_logging_service import BaseLoggingService
5
+
6
+
7
+ class BaseWorkerService(BaseLoggingService):
8
+ def __init__(
9
+ self,
10
+ logger: Logger,
11
+ redis_client: Redis | None = None,
12
+ ) -> None:
13
+ super().__init__(logger)
14
+ self.redis_client = redis_client
@@ -0,0 +1,123 @@
1
+ import asyncio
2
+ import functools
3
+ import inspect
4
+ import logging
5
+ from collections.abc import Awaitable, Callable
6
+ from typing import ParamSpec, Protocol, TypeVar, cast
7
+
8
+ P = ParamSpec("P") # параметры оригинальной функции
9
+ R = TypeVar("R") # возвращаемый тип оригинальной функции
10
+
11
+
12
+ class _HasLogger(Protocol):
13
+ logger: logging.Logger
14
+
15
+
16
+ def retry_forever(
17
+ start_message: str,
18
+ error_message: str,
19
+ delay: int = 10,
20
+ backoff: float = 1.2,
21
+ max_delay: int = 60,
22
+ discord_every: int = 3,
23
+ ignore_exceptions: list[type[Exception | BaseException]] | None = None,
24
+ ) -> Callable[
25
+ [Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]
26
+ ]: # принимает и возвращает асинхронную функцию с исходной сигнатурой
27
+ """
28
+ Оборачивает только метод класса.
29
+ """
30
+ ignored_list: list[type[Exception] | type[BaseException]] = (
31
+ ignore_exceptions if ignore_exceptions is not None else []
32
+ )
33
+
34
+ def decorator(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]:
35
+ sig = inspect.signature(func)
36
+
37
+ @functools.wraps(func)
38
+ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
39
+ # --- Собираем контекст для подстановки в сообщения
40
+ self = args[0] if args else None # self итак уже есть в *args
41
+ if self is None:
42
+ raise ValueError(
43
+ "@retry_forever применяется только к методу класса, "
44
+ f"но {func.__qualname__} не принимает аргументов"
45
+ )
46
+ try:
47
+ bound = sig.bind(*args, **kwargs)
48
+ bound.apply_defaults()
49
+ context = dict(bound.arguments)
50
+ except Exception:
51
+ context = {"self": self}
52
+
53
+ str_context = {k: str(v) for k, v in context.items()}
54
+
55
+ # --- Распаковываем self.*
56
+ if "self" in context:
57
+ self_obj = context["self"]
58
+ try:
59
+ for attr in dir(self_obj):
60
+ if not attr.startswith("_"):
61
+ val = getattr(self_obj, attr)
62
+ if not callable(val):
63
+ str_context[attr] = str(val)
64
+ except Exception:
65
+ pass
66
+
67
+ # --- Лог старта
68
+ self_typed = cast(_HasLogger, self)
69
+ try:
70
+ self_typed.logger.debug(start_message.format_map(str_context))
71
+ except Exception:
72
+ self_typed.logger.debug(start_message)
73
+
74
+ # --- Цикл повторов
75
+ current_delay: float = float(delay)
76
+ retry_count = 0
77
+
78
+ while True:
79
+ try:
80
+ return await func(*args, **kwargs)
81
+ except Exception as e:
82
+ if type(e) in ignored_list:
83
+ raise e from e
84
+ retry_count += 1
85
+
86
+ str_context_with_exception = {
87
+ **str_context,
88
+ "e": str(e),
89
+ "retry_count": retry_count,
90
+ }
91
+
92
+ try:
93
+ err_msg = error_message.format_map(str_context_with_exception)
94
+ except Exception:
95
+ err_msg = error_message
96
+
97
+ self_typed.logger.exception(f"❌ {err_msg}")
98
+
99
+ discord_obj = getattr(self, "discord", None)
100
+ if (
101
+ retry_count % discord_every == 0
102
+ and discord_obj is not None
103
+ and callable(
104
+ getattr(discord_obj, "send_traceback_report", None)
105
+ )
106
+ ):
107
+ try:
108
+ await discord_obj.send_traceback_report(e, err_msg)
109
+ except Exception as discord_error:
110
+ self_typed.logger.warning(
111
+ f"⚠️ Ошибка при отправке в Discord: {discord_error}"
112
+ )
113
+
114
+ self_typed.logger.info(
115
+ f"🔁 Повтор #{retry_count} через {current_delay:.1f} сек..."
116
+ )
117
+
118
+ await asyncio.sleep(current_delay)
119
+ current_delay = min(current_delay * backoff, float(max_delay))
120
+
121
+ return wrapper
122
+
123
+ return decorator
@@ -0,0 +1,222 @@
1
+ import json
2
+ from collections.abc import Sequence
3
+ from enum import Enum
4
+ from logging import Logger
5
+ from typing import TypeVar, cast
6
+
7
+ from confluent_kafka import Consumer, KafkaError, KafkaException, Message, Producer
8
+ from pydantic import ValidationError
9
+
10
+ from tp_common.event_service.event_config import (
11
+ KAFKA_STREAM_PREFIX,
12
+ create_consumer,
13
+ create_producer,
14
+ )
15
+ from tp_common.event_service.schemas import BaseEventPayload, BaseEventType
16
+
17
+ EventEnumT = TypeVar("EventEnumT", bound=Enum)
18
+
19
+
20
+ class BaseEventService:
21
+ def __init__(self, logger: Logger) -> None:
22
+ self.logger = logger
23
+
24
+
25
+ class ConsumerBaseEventService[EventEnumT: Enum](BaseEventService):
26
+ def __init__(
27
+ self,
28
+ group_id: str,
29
+ group_name: str,
30
+ logger: Logger,
31
+ poll_timeout: float = 1.0,
32
+ event_enum: type[EventEnumT] = cast(type[EventEnumT], BaseEventType),
33
+ subscribe_to: Sequence[EventEnumT] | None = None,
34
+ auto_ack_ignored: bool = True,
35
+ ) -> None:
36
+ super().__init__(logger=logger)
37
+ self.consumer: Consumer = create_consumer(group_id=group_id)
38
+ self.group_name: str = group_name
39
+ self.poll_timeout: float = poll_timeout
40
+ self.event_enum = event_enum
41
+ self.subscribe_to = subscribe_to
42
+ self.auto_ack_ignored = auto_ack_ignored
43
+
44
+ def subscribe(self) -> None:
45
+ """
46
+ Подписываемся на очередь по group_name.
47
+ """
48
+ self.consumer.subscribe([f"{KAFKA_STREAM_PREFIX}.{self.group_name}"])
49
+
50
+ def consume_filtered(
51
+ self,
52
+ event_types: list[EventEnumT] | None = None,
53
+ ) -> tuple[Message, BaseEventPayload] | None:
54
+ """
55
+ Возвращает сообщение и payload для внешней обработки и commit'а.
56
+ """
57
+ msg = self._poll_message()
58
+ if msg is None:
59
+ return None
60
+
61
+ try:
62
+ payload = self.parse_payload(msg)
63
+ normalized_event = self._normalize_event(payload.event)
64
+ snapshot_value = getattr(self.event_enum, "SNAPSHOT", None)
65
+ if snapshot_value is not None and normalized_event == snapshot_value:
66
+ # Если это SNAPSHOT(начальная “полная выгрузка” данных из таблицы при старте Debezium‑коннектора)
67
+ self._log_snapshot_message(msg)
68
+ self.commit(msg)
69
+ return None
70
+ if not self._should_process_event(
71
+ normalized_event, event_types=event_types
72
+ ):
73
+ # Если не проходит по фильтру
74
+ self._handle_ignored_message(msg)
75
+ return None
76
+ # Если проходит по фильтру
77
+ return msg, payload
78
+ except ValidationError:
79
+ # Если в WAL служебные или иные сообщения
80
+ self._log_skipped_message(msg)
81
+ self.commit(msg)
82
+ return None
83
+ except Exception:
84
+ # Если иные ошибки
85
+ self.logger.exception(
86
+ "Failed to process message (topic=%s, partition=%s, offset=%s) — offset NOT committed",
87
+ msg.topic(),
88
+ msg.partition(),
89
+ msg.offset(),
90
+ )
91
+ return None
92
+
93
+ def commit(self, msg: Message) -> None:
94
+ """
95
+ Записываем изменения.
96
+ """
97
+ self.consumer.commit(message=msg, asynchronous=False)
98
+
99
+ def close(self) -> None:
100
+ """
101
+ Закрытие подключения к очереди.
102
+ """
103
+ self.consumer.close()
104
+
105
+ def _poll_message(self) -> Message | None:
106
+ msg = self.consumer.poll(self.poll_timeout)
107
+ if msg is None:
108
+ return None
109
+
110
+ if msg.error():
111
+ err = msg.error()
112
+ eof_code = getattr(KafkaError, "_PARTITION_EOF", 1)
113
+ if err is not None and err.code() == eof_code:
114
+ self.logger.debug(
115
+ "Partition EOF (topic=%s, partition=%s, offset=%s)",
116
+ msg.topic(),
117
+ msg.partition(),
118
+ msg.offset(),
119
+ )
120
+ return None
121
+ raise KafkaException(err)
122
+
123
+ return msg
124
+
125
+ def _handle_ignored_message(self, msg: Message) -> None:
126
+ if not self.auto_ack_ignored:
127
+ return
128
+ self.logger.debug(
129
+ "Ignored message by filter (topic=%s, partition=%s, offset=%s) — committed",
130
+ msg.topic(),
131
+ msg.partition(),
132
+ msg.offset(),
133
+ )
134
+ self.commit(msg)
135
+
136
+ def _log_skipped_message(self, msg: Message) -> None:
137
+ # Не Debezium-payload (тестовые/служебные сообщения) — пропускаем и коммитим.
138
+ self.logger.warning(
139
+ "Skipped non-Debezium message (topic=%s, partition=%s, offset=%s) — committed anyway",
140
+ msg.topic(),
141
+ msg.partition(),
142
+ msg.offset(),
143
+ exc_info=True,
144
+ )
145
+
146
+ def _log_snapshot_message(self, msg: Message) -> None:
147
+ self.logger.debug(
148
+ "Skipped snapshot message (topic=%s, partition=%s, offset=%s)",
149
+ msg.topic(),
150
+ msg.partition(),
151
+ msg.offset(),
152
+ )
153
+
154
+ def _should_process_event(
155
+ self,
156
+ event: EventEnumT | None,
157
+ event_types: Sequence[EventEnumT] | None = None,
158
+ ) -> bool:
159
+ active_filter = event_types if event_types is not None else self.subscribe_to
160
+ if active_filter is None:
161
+ return True
162
+ if event is None:
163
+ return False
164
+ return event in list(active_filter)
165
+
166
+ def _normalize_event(self, value: object) -> EventEnumT | None:
167
+ if value is None:
168
+ return None
169
+ if isinstance(value, self.event_enum):
170
+ return value
171
+ try:
172
+ return self.event_enum(value)
173
+ except Exception:
174
+ return None
175
+
176
+ def parse_payload(self, msg: Message) -> BaseEventPayload:
177
+ raw = msg.value()
178
+ if raw is None:
179
+ raise ValueError("Message value is None")
180
+ event_dict = json.loads(raw)
181
+ return BaseEventPayload.model_validate(event_dict)
182
+
183
+
184
+ class ProducerBaseEventService(BaseEventService):
185
+ def __init__(self, logger: Logger) -> None:
186
+ super().__init__(logger=logger)
187
+ self.producer: Producer = create_producer()
188
+
189
+ def publish_payload(
190
+ self,
191
+ payload: BaseEventPayload,
192
+ *,
193
+ topic: str,
194
+ key: str | None = None,
195
+ ) -> None:
196
+ data = json.dumps(payload.model_dump(mode="json"), ensure_ascii=False).encode(
197
+ "utf-8"
198
+ )
199
+ key_bytes = key.encode("utf-8") if key is not None else None
200
+ self.producer.produce(
201
+ topic=topic,
202
+ value=data,
203
+ key=key_bytes,
204
+ )
205
+ self.producer.poll(0)
206
+
207
+ def flush(self, timeout: float = 10.0) -> None:
208
+ self.producer.flush(timeout)
209
+
210
+ def close(self, timeout: float = 10.0) -> None:
211
+ self.flush(timeout)
212
+
213
+ def _on_delivery(self, err: KafkaError | None, msg: Message) -> None:
214
+ if err:
215
+ self.logger.error("Kafka delivery error: %s", err)
216
+ return
217
+ self.logger.debug(
218
+ "Kafka delivered: topic=%s partition=%s offset=%s",
219
+ msg.topic(),
220
+ msg.partition(),
221
+ msg.offset(),
222
+ )
@@ -0,0 +1,88 @@
1
+ import typing
2
+ from enum import Enum
3
+ from logging import Logger
4
+ from typing import cast
5
+
6
+ from confluent_kafka import Message
7
+ from pydantic import BaseModel, ValidationError
8
+
9
+ from tp_common.event_service.base_event_service import ConsumerBaseEventService
10
+ from tp_common.event_service.schemas import BaseEventType, TypedEventPayload
11
+
12
+
13
+ class ConsumeEventService[EventEnumT: Enum, ModelT: BaseModel](
14
+ ConsumerBaseEventService[EventEnumT]
15
+ ):
16
+ def __init__(
17
+ self,
18
+ group_id: str,
19
+ group_name: str,
20
+ logger: Logger,
21
+ payload_schema: type[ModelT],
22
+ poll_timeout: float = 1.0,
23
+ event_enum: type[EventEnumT] = cast(type[EventEnumT], BaseEventType),
24
+ subscribe_to: list[EventEnumT] | None = None,
25
+ auto_ack_ignored: bool = True,
26
+ ) -> None:
27
+ super().__init__(
28
+ group_id=group_id,
29
+ group_name=group_name,
30
+ logger=logger,
31
+ poll_timeout=poll_timeout,
32
+ event_enum=event_enum,
33
+ subscribe_to=subscribe_to,
34
+ auto_ack_ignored=auto_ack_ignored,
35
+ )
36
+ self.payload_schema = payload_schema
37
+
38
+ def parse_payload(self, msg: Message) -> TypedEventPayload[ModelT]: # type: ignore[override]
39
+ base_payload = super().parse_payload(msg)
40
+ before = self._parse_model_part(base_payload.before)
41
+ after = self._parse_model_part(base_payload.after)
42
+ normalized_event = self._normalize_event(base_payload.event)
43
+ return TypedEventPayload[ModelT](
44
+ before=before,
45
+ after=after,
46
+ event=normalized_event,
47
+ )
48
+
49
+ def consume_filtered( # type: ignore[override]
50
+ self,
51
+ event_types: list[EventEnumT] | None = None,
52
+ ) -> tuple[Message, TypedEventPayload[ModelT]] | None:
53
+ msg = self._poll_message()
54
+ if msg is None:
55
+ return None
56
+ try:
57
+ payload = self.parse_payload(msg)
58
+ snapshot_value = getattr(self.event_enum, "SNAPSHOT", None)
59
+ if snapshot_value is not None and payload.event == snapshot_value:
60
+ self._log_snapshot_message(msg)
61
+ self.commit(msg)
62
+ return None
63
+ if not self._should_process_event(
64
+ typing.cast(EventEnumT | None, payload.event),
65
+ event_types=event_types,
66
+ ):
67
+ self._handle_ignored_message(msg)
68
+ return None
69
+ return msg, payload
70
+ except ValidationError:
71
+ self._log_skipped_message(msg)
72
+ self.commit(msg)
73
+ return None
74
+ except Exception:
75
+ self.logger.exception(
76
+ "Failed to process message (topic=%s, partition=%s, offset=%s) — offset NOT committed",
77
+ msg.topic(),
78
+ msg.partition(),
79
+ msg.offset(),
80
+ )
81
+ return None
82
+
83
+ def _parse_model_part(self, data: dict | BaseModel | None) -> ModelT | None:
84
+ if data is None:
85
+ return None
86
+ if isinstance(data, self.payload_schema):
87
+ return typing.cast(ModelT, data)
88
+ return typing.cast(ModelT, self.payload_schema.model_validate(data))
@@ -0,0 +1,46 @@
1
+ import os
2
+
3
+ from confluent_kafka import Consumer, Producer
4
+
5
+ KAFKA_BOOTSTRAP_SERVERS = os.getenv("KAFKA_BOOTSTRAP_SERVERS", "192.168.81.61:9094")
6
+ KAFKA_STREAM_PREFIX = os.getenv("KAFKA_STREAM_PREFIX", "events.public")
7
+ DEFAULT_GROUP_ID = os.getenv("DEFAULT_GROUP_ID", "domain-tpc-consumer")
8
+
9
+ SASL_USERNAME = os.getenv("KAFKA_SASL_USERNAME", "user")
10
+ SASL_PASSWORD = os.getenv("KAFKA_SASL_PASSWORD", "user1122@")
11
+ SASL_MECHANISM = os.getenv("KAFKA_SASL_MECHANISM", "SCRAM-SHA-256")
12
+ SECURITY_PROTOCOL = os.getenv("KAFKA_SECURITY_PROTOCOL", "SASL_PLAINTEXT")
13
+
14
+
15
+ def create_consumer(group_id: str = DEFAULT_GROUP_ID) -> Consumer:
16
+ return Consumer(
17
+ {
18
+ "bootstrap.servers": KAFKA_BOOTSTRAP_SERVERS,
19
+ "group.id": group_id,
20
+ "auto.offset.reset": "earliest",
21
+ "enable.auto.commit": False,
22
+ "session.timeout.ms": 30000,
23
+ "max.poll.interval.ms": 300000,
24
+ # SASL
25
+ "security.protocol": SECURITY_PROTOCOL,
26
+ "sasl.mechanism": SASL_MECHANISM,
27
+ "sasl.username": SASL_USERNAME,
28
+ "sasl.password": SASL_PASSWORD,
29
+ }
30
+ )
31
+
32
+
33
+ def create_producer() -> Producer:
34
+ return Producer(
35
+ {
36
+ "bootstrap.servers": KAFKA_BOOTSTRAP_SERVERS,
37
+ "acks": "all",
38
+ "retries": 3,
39
+ "retry.backoff.ms": 100,
40
+ # SASL
41
+ "security.protocol": SECURITY_PROTOCOL,
42
+ "sasl.mechanism": SASL_MECHANISM,
43
+ "sasl.username": SASL_USERNAME,
44
+ "sasl.password": SASL_PASSWORD,
45
+ }
46
+ )
@@ -0,0 +1,97 @@
1
+ import json
2
+ from enum import Enum
3
+ from logging import Logger
4
+ from typing import TypeVar
5
+
6
+ from pydantic import BaseModel
7
+
8
+ from tp_common.event_service.base_event_service import ProducerBaseEventService
9
+ from tp_common.event_service.event_config import KAFKA_STREAM_PREFIX
10
+ from tp_common.event_service.schemas import BaseEventPayload, TypedEventPayload
11
+
12
+ ModelT = TypeVar("ModelT", bound=BaseModel)
13
+
14
+
15
+ class ProducerEventService(ProducerBaseEventService):
16
+ def __init__(
17
+ self,
18
+ logger: Logger,
19
+ group_name: str,
20
+ payload_schema: type[ModelT] | None = None,
21
+ ) -> None:
22
+ super().__init__(logger=logger)
23
+ self.topic_name = f"{KAFKA_STREAM_PREFIX}.{group_name}"
24
+ self.payload_schema = payload_schema
25
+
26
+ def publish(
27
+ self,
28
+ payload: BaseEventPayload | TypedEventPayload[ModelT],
29
+ ) -> None:
30
+ normalized_payload = self._normalize_payload(payload)
31
+ data = json.dumps(
32
+ normalized_payload.model_dump(mode="json"),
33
+ ensure_ascii=False,
34
+ ).encode("utf-8")
35
+ self.producer.produce(
36
+ topic=self.topic_name,
37
+ value=data,
38
+ on_delivery=self._on_delivery,
39
+ )
40
+
41
+ self.producer.poll(0)
42
+
43
+ def publish_to_group(
44
+ self,
45
+ payload: BaseEventPayload | TypedEventPayload[ModelT],
46
+ ) -> None:
47
+ self.publish(payload)
48
+
49
+ def _normalize_payload(
50
+ self,
51
+ payload: BaseEventPayload | TypedEventPayload[ModelT],
52
+ ) -> BaseEventPayload:
53
+ if isinstance(payload, BaseEventPayload):
54
+ return self._normalize_base_payload(payload)
55
+ before = self._normalize_model_part(payload.before)
56
+ after = self._normalize_model_part(payload.after)
57
+ event = self._normalize_event_value(payload.event)
58
+ return BaseEventPayload(
59
+ before=before,
60
+ after=after,
61
+ event=event,
62
+ )
63
+
64
+ def _normalize_base_payload(self, payload: BaseEventPayload) -> BaseEventPayload:
65
+ before = self._normalize_model_part(payload.before)
66
+ after = self._normalize_model_part(payload.after)
67
+ event = self._normalize_event_value(payload.event)
68
+ if (
69
+ before is payload.before
70
+ and after is payload.after
71
+ and event == payload.event
72
+ ):
73
+ return payload
74
+ return BaseEventPayload(
75
+ before=before,
76
+ after=after,
77
+ event=event,
78
+ )
79
+
80
+ def _normalize_model_part(self, data: dict | BaseModel | None) -> dict | None:
81
+ if data is None:
82
+ return None
83
+ if isinstance(data, BaseModel):
84
+ return data.model_dump(mode="json")
85
+ if self.payload_schema is None:
86
+ return data
87
+ if isinstance(data, self.payload_schema):
88
+ return data.model_dump(mode="json")
89
+ return self.payload_schema.model_validate(data).model_dump(mode="json")
90
+
91
+ @staticmethod
92
+ def _normalize_event_value(value: object) -> str | None:
93
+ if value is None:
94
+ return None
95
+ if isinstance(value, Enum):
96
+ return str(value.value)
97
+ return str(value)
@@ -0,0 +1,59 @@
1
+ from __future__ import annotations
2
+
3
+ from datetime import datetime
4
+ from enum import Enum
5
+ from typing import Any, TypeVar
6
+
7
+ from pydantic import AliasChoices, BaseModel, ConfigDict, Field
8
+
9
+
10
+ class BaseEventType(str, Enum):
11
+ CREATE = "create"
12
+ UPDATE = "update"
13
+ DELETE = "delete"
14
+ SNAPSHOT = "snapshot"
15
+
16
+ @classmethod
17
+ def _missing_(cls, value: object) -> BaseEventType | None:
18
+ if not isinstance(value, str):
19
+ return None
20
+ mapping = {
21
+ "c": cls.CREATE,
22
+ "u": cls.UPDATE,
23
+ "d": cls.DELETE,
24
+ "r": cls.SNAPSHOT,
25
+ }
26
+ return mapping.get(value.lower())
27
+
28
+
29
+ class BaseEventPayload(BaseModel):
30
+ before: dict[str, Any] | None = Field(None, description="Схема до изменений")
31
+ after: dict[str, Any] | None = Field(None, description="Схема после изменений")
32
+ event: str | None = Field(
33
+ default=None,
34
+ validation_alias=AliasChoices("event"),
35
+ serialization_alias="event",
36
+ )
37
+
38
+ model_config = ConfigDict(extra="allow", populate_by_name=True)
39
+
40
+
41
+ ModelT = TypeVar("ModelT", bound=BaseModel)
42
+
43
+
44
+ class TypedEventPayload[ModelT: BaseModel](BaseModel):
45
+ before: ModelT | None = None
46
+ after: ModelT | None = None
47
+ event: Enum | None = None
48
+
49
+ model_config = ConfigDict(extra="allow")
50
+
51
+
52
+ # Для примера
53
+ class UserPayload(BaseModel):
54
+ id: int
55
+ name: str
56
+ description: str | None = None
57
+ is_active: bool
58
+ created_at: datetime
59
+ updated_at: datetime | None = None
@@ -0,0 +1,83 @@
1
+ from __future__ import annotations
2
+
3
+ from enum import Enum
4
+ from logging import Logger
5
+ from typing import TypeVar, cast
6
+
7
+ from pydantic import BaseModel
8
+
9
+ from tp_common.event_service.consumer_event_service import ConsumeEventService
10
+ from tp_common.event_service.producer_event_service import ProducerEventService
11
+ from tp_common.event_service.schemas import (
12
+ BaseEventPayload,
13
+ BaseEventType,
14
+ TypedEventPayload,
15
+ )
16
+
17
+ ModelT = TypeVar("ModelT", bound=BaseModel)
18
+ EventEnumT = TypeVar("EventEnumT", bound=Enum)
19
+
20
+
21
+ class BaseEventWorker[ModelT: BaseModel, EventEnumT: Enum]:
22
+ def __init__(
23
+ self,
24
+ logger: Logger,
25
+ group_id: str,
26
+ group_name: str,
27
+ producer_group_name: str,
28
+ payload_schema: type[ModelT],
29
+ event_enum: type[EventEnumT] = cast(type[EventEnumT], BaseEventType),
30
+ subscribe_to: list[EventEnumT] | None = None,
31
+ ) -> None:
32
+ self.consumer_service = ConsumeEventService(
33
+ group_id=group_id,
34
+ group_name=group_name,
35
+ logger=logger,
36
+ payload_schema=payload_schema,
37
+ event_enum=event_enum,
38
+ subscribe_to=subscribe_to,
39
+ )
40
+ self.producer_service = ProducerEventService(
41
+ logger=logger,
42
+ group_name=producer_group_name,
43
+ payload_schema=payload_schema,
44
+ )
45
+
46
+ def start(self) -> None:
47
+ self.subscribe_to_queue()
48
+ while True:
49
+ print("PROCESS")
50
+ self.process()
51
+
52
+ def subscribe_to_queue(self) -> None:
53
+ self.consumer_service.subscribe()
54
+
55
+ def process(self) -> None:
56
+ result = self.consumer_service.consume_filtered()
57
+ if result is None:
58
+ return
59
+ msg, payload = result
60
+ print(msg, payload)
61
+ self.handle_update(payload)
62
+ self.consumer_service.commit(msg)
63
+
64
+ def handle_update(self, payload: TypedEventPayload[ModelT]) -> None:
65
+ before = payload.before
66
+ after = payload.after
67
+ if not before or not after:
68
+ return
69
+
70
+ events = self.get_event_types(before, after)
71
+ for event_type in events:
72
+ out_payload = TypedEventPayload[ModelT](
73
+ before=before,
74
+ after=after,
75
+ event=event_type,
76
+ )
77
+ self.produce(out_payload)
78
+
79
+ def get_event_types(self, before: ModelT, after: ModelT) -> list[Enum]:
80
+ raise NotImplementedError("get_event_types must be implemented in subclass")
81
+
82
+ def produce(self, payload: BaseEventPayload | TypedEventPayload[ModelT]) -> None:
83
+ self.producer_service.publish(payload)
tp_common/route/shames.py CHANGED
@@ -1,3 +1,7 @@
1
+ from pydantic import BaseModel, ConfigDict
2
+ from pydantic.alias_generators import to_camel
3
+
4
+
1
5
  class BaseResponse(BaseModel):
2
6
  model_config = ConfigDict(
3
7
  alias_generator=to_camel,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: tp-common
3
- Version: 0.0.3
3
+ Version: 0.0.5
4
4
  Summary:
5
5
  Author: Developer
6
6
  Author-email: front-gold@mail.ru
@@ -9,6 +9,7 @@ Classifier: Programming Language :: Python :: 3
9
9
  Classifier: Programming Language :: Python :: 3.12
10
10
  Classifier: Programming Language :: Python :: 3.13
11
11
  Requires-Dist: aiohttp (>=3.13.3,<4.0.0)
12
+ Requires-Dist: confluent-kafka (>=2.13.0,<3.0.0)
12
13
  Requires-Dist: pydantic (>=2.12.5,<3.0.0)
13
14
  Requires-Dist: python-json-logger (>=4.0.0,<5.0.0)
14
15
  Requires-Dist: tp-helper (>=0.4.87,<0.5.0)
@@ -0,0 +1,20 @@
1
+ tp_common/base_client/client.py,sha256=tgYSk2XK5TbvyHVICl9fVtL-DeNkrYfgcBgpJ6ePSF0,24391
2
+ tp_common/base_client/exceptions.py,sha256=94ZY6qP3oHWJhz9W4-llDrP4vS-ZtHrvy94d3Yg_9H8,3839
3
+ tp_common/base_items/base_logging_service.py,sha256=8otay6kxesADMDMokz7iU27Beg-DEFDx9vRhZK7OkwM,777
4
+ tp_common/base_items/base_request.py,sha256=3D-u7gdgtkE6ewvj6mqaPmJJvP2EeLuywplLN9-iqDc,237
5
+ tp_common/base_items/base_response.py,sha256=seh2ho2Uye17SMDTfc0XgZIwHGA2GdK_TJCUFdyfKXQ,238
6
+ tp_common/base_items/base_schema.py,sha256=Af1mAsIsm-6J4Nnb21ljQ4oJTX72eZ0Qzm1hmLwQuJ0,130
7
+ tp_common/base_items/base_timestamp_model.py,sha256=B3Etn3d3sG5pNb7WmiePMOI2cEoHdCm6i9IObVlksL4,710
8
+ tp_common/base_items/base_worker.py,sha256=R3T8S97N_Hjryf3WMvk-p562HHnvv-UEFmpJnpjH9uw,421
9
+ tp_common/base_items/base_worker_service.py,sha256=T_UYxVk2LI8uHGrTFUddFvVUVVnIsCJDoPXIf03Uax8,368
10
+ tp_common/decorators/decorator_retry_forever.py,sha256=Nu6cUMT-e9ttAaZqyaVCsAkpvkWAd-yFK_Adj4rlnOc,4722
11
+ tp_common/event_service/base_event_service.py,sha256=Ree0dK7D8DK9Ip8zESS6DWMZEw1dnlRCGSK5sKETfrY,7560
12
+ tp_common/event_service/consumer_event_service.py,sha256=8NErMxSOOZ1aysxdVeFi0GD4TtqOIdemS9fddhonnlk,3200
13
+ tp_common/event_service/event_config.py,sha256=Z1VgcVgxypQ61JQMp0fQds7PBenD-jXrTXMjjH4QEEs,1572
14
+ tp_common/event_service/producer_event_service.py,sha256=qsjBZyuk1peAM9p8TxjumOgKZEjOr8TNkTdevXKy_lY,3162
15
+ tp_common/event_service/schemas.py,sha256=PbTORh20kSNvd4BCSqOj-7h9UaAoiqz75UI0oZWYLUQ,1541
16
+ tp_common/event_service/workers/base_event_worker.py,sha256=3DRgUf30nERiU2hZdx4ODTFr9r6u45efqDVPnICxJvU,2621
17
+ tp_common/route/shames.py,sha256=zmAZ0K1mkzuZSSeqC_vQL7xqAu-l7pH0ZwOirWP05KA,520
18
+ tp_common-0.0.5.dist-info/METADATA,sha256=70CLhiZOM9H50JGPGgYwM5TIn10J8AWYTJJWuOMqbC4,6631
19
+ tp_common-0.0.5.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
20
+ tp_common-0.0.5.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- tp_common/base_client/client.py,sha256=uE-eYkMXGis6czGjm10euvv9j2QBjHJMHNp_qP5HDNw,24399
2
- tp_common/base_client/exceptions.py,sha256=l5CjxkBH3bUBCDuNc3sz_ygNG4ccO1odRlDiL7EmAwg,3839
3
- tp_common/route/shames.py,sha256=2R63rNmkuK942bCitnjonAU8aPvqCr2G52N1gefwN1U,428
4
- tp_common-0.0.3.dist-info/METADATA,sha256=iG-RfBU4cTTP7oqWq1AtGe_G7RMmcTb7p3NzVW7lv60,6582
5
- tp_common-0.0.3.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
6
- tp_common-0.0.3.dist-info/RECORD,,