tp-shared 0.2.27__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. tp_shared/autoins_mpg_service/repos/autoins_results_ack_list_queue_repo.py +14 -0
  2. tp_shared/autoins_mpg_service/schemas/autoins_result_message.py +34 -0
  3. tp_shared/autoins_mpg_service/worker_services/base_autoins_results_ack_list_queue_worker_service.py +34 -0
  4. tp_shared/base/base_message.py +5 -0
  5. tp_shared/gibdd_service/repo/gibdd_dc_results_stream_queue_repo.py +17 -0
  6. tp_shared/gibdd_service/schemas/gibdd_dc_result_message.py +39 -0
  7. tp_shared/gibdd_service/worker_services/base_gibdd_dc_results_stream_queue_worker_service.py +122 -0
  8. tp_shared/mos_passes_service/repo/mos_passes_results_stream_queue_repo.py +17 -0
  9. tp_shared/mos_passes_service/schemas/mos_passes_result_message.py +23 -0
  10. tp_shared/mos_passes_service/worker_services/base_mos_passes_results_stream_queue_worker_service.py +109 -0
  11. tp_shared/nsis_service/repos/nsis_results_ack_list_queue_repo.py +13 -0
  12. tp_shared/nsis_service/schemas/nsis_result_message.py +29 -0
  13. tp_shared/nsis_service/types/nsis_task_type.py +6 -0
  14. tp_shared/nsis_service/worker_services/base_nsis_results_queue_worker_service.py +32 -0
  15. tp_shared/policies_service/repos/policies_event_stream_queue_repo.py +17 -0
  16. tp_shared/policies_service/schemas/policies_result_message.py +28 -0
  17. tp_shared/policies_service/worker_services/base_policies_results_queue_worker_service.py +109 -0
  18. tp_shared/rnis_check_service/repos/rnis_check_results_stream_queue_repo.py +17 -0
  19. tp_shared/rnis_check_service/schemas/rnis_check_result_message.py +9 -0
  20. tp_shared/rnis_check_service/worker_services/base_rnis_results_queue_worker_service.py +109 -0
  21. tp_shared/types/dc_operator_status.py +7 -0
  22. tp_shared/types/pass_allowed_zone.py +8 -0
  23. tp_shared/types/pass_series.py +14 -0
  24. tp_shared/types/pass_time_of_date.py +6 -0
  25. tp_shared/types/policy_series.py +19 -0
  26. tp_shared/types/policy_status.py +7 -0
  27. tp_shared-0.2.27.dist-info/METADATA +76 -0
  28. tp_shared-0.2.27.dist-info/RECORD +29 -0
  29. tp_shared-0.2.27.dist-info/WHEEL +4 -0
@@ -0,0 +1,14 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_ack_list_queue_repo import BaseAckListQueueRepo
3
+
4
+ # from src.config import config
5
+ from tp_shared.autoins_mpg_service.schemas.autoins_result_message import (
6
+ AutoinsResultMessage,
7
+ )
8
+
9
+
10
+ class AutoinsResultsAckListQueueRepo(BaseAckListQueueRepo):
11
+ QUEUE_NAME = "autoins:service:results:ack:list"
12
+
13
+ def __init__(self, redis_client: Redis):
14
+ super().__init__(redis_client=redis_client, schema=AutoinsResultMessage)
@@ -0,0 +1,34 @@
1
+ from datetime import date
2
+
3
+ from pydantic import BaseModel
4
+
5
+ from tp_shared.types.policy_series import PolicySeries
6
+
7
+
8
+ class AutoinsResultPolicy(BaseModel):
9
+ insurer_name: str
10
+ reg_number: str
11
+ series: PolicySeries
12
+ number: str
13
+ start_date: date
14
+ end_date: date
15
+ period1_start: date | None = None
16
+ period1_end: date | None = None
17
+ period2_start: date | None = None
18
+ period2_end: date | None = None
19
+ period3_start: date | None = None
20
+ period3_end: date | None = None
21
+ vin: str | None = None
22
+ body_number: str | None = None
23
+ chassis_number: str | None = None
24
+ car_mark: str | None = None
25
+ car_model: str | None = None
26
+ external_policy_id: int | None = None
27
+ policy_state: str | None = None
28
+ policy_status_t_use: str | None = None
29
+
30
+
31
+ class AutoinsResultMessage(BaseModel):
32
+ series: PolicySeries
33
+ number: str
34
+ policies: list[AutoinsResultPolicy] = []
@@ -0,0 +1,34 @@
1
+ from logging import Logger
2
+
3
+ from redis.asyncio import Redis
4
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
5
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
6
+
7
+ from tp_shared.autoins_mpg_service.repos.autoins_results_ack_list_queue_repo import (
8
+ AutoinsResultsAckListQueueRepo,
9
+ )
10
+ from tp_shared.autoins_mpg_service.schemas.autoins_result_message import (
11
+ AutoinsResultMessage,
12
+ )
13
+
14
+
15
+ class BaseAutoinsResultsAckListQueueWorkerService(
16
+ BaseWorkerService, AutoinsResultsAckListQueueRepo
17
+ ):
18
+ def __init__(self, redis_client: Redis, logger: Logger):
19
+ BaseWorkerService.__init__(self, logger=logger, redis_client=redis_client)
20
+ AutoinsResultsAckListQueueRepo.__init__(self, redis_client=redis_client)
21
+
22
+ @retry_forever(
23
+ start_message="πŸ“₯ Π§Ρ‚Π΅Π½ΠΈΠ΅ Π·Π°Π΄Π°Ρ‡ ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
24
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ Ρ‡Ρ‚Π΅Π½ΠΈΠΈ ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
25
+ )
26
+ async def pop(self) -> AutoinsResultMessage | None:
27
+ return await AutoinsResultsAckListQueueRepo.pop(self)
28
+
29
+ @retry_forever(
30
+ start_message="πŸ—‘οΈ ack Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
31
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ ack Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
32
+ )
33
+ async def ack(self) -> None:
34
+ return await AutoinsResultsAckListQueueRepo.ack(self)
@@ -0,0 +1,5 @@
1
+ from tp_helper import BaseSchema
2
+
3
+
4
+ class BaseMessage(BaseSchema):
5
+ version: str = "1.0"
@@ -0,0 +1,17 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
3
+
4
+ from tp_shared.gibdd_service.schemas.gibdd_dc_result_message import (
5
+ GibddDcResultMessage,
6
+ )
7
+
8
+
9
+ class GibddDcResultsStreamQueueRepo(BaseStreamQueueRepo):
10
+ QUEUE_NAME = "gibdd:service:dc:results:stream"
11
+
12
+ def __init__(self, redis_client: Redis):
13
+ super().__init__(
14
+ redis_client=redis_client,
15
+ schema=GibddDcResultMessage,
16
+ queue_name=self.QUEUE_NAME,
17
+ )
@@ -0,0 +1,39 @@
1
+ from datetime import date
2
+
3
+ from pydantic import ConfigDict
4
+
5
+ from tp_shared.base.base_message import BaseMessage
6
+ from tp_shared.types.dc_operator_status import DcOperatorStatus
7
+
8
+
9
+ class GibddDcResultOperator(BaseMessage):
10
+ operator_id: int
11
+ status: DcOperatorStatus
12
+ name: str
13
+ address_line: str
14
+ phone_number: str
15
+ email: str
16
+ site: str
17
+ canceled_date: date | None
18
+ canceled_at: int | None
19
+
20
+
21
+ class GibddDcResultCard(BaseMessage):
22
+ card_number: str
23
+ vin: str
24
+ start_date: date
25
+ end_date: date
26
+ odometer_value: int
27
+ is_active: bool
28
+ updated_at: int
29
+ created_at: int
30
+
31
+ operator: GibddDcResultOperator
32
+
33
+ model_config = ConfigDict(from_attributes=True, populate_by_name=True)
34
+
35
+
36
+ class GibddDcResultMessage(BaseMessage):
37
+ version: str = "1.0"
38
+ vin: str
39
+ diagnostic_cards: list[GibddDcResultCard] = []
@@ -0,0 +1,122 @@
1
+ from datetime import timedelta
2
+ from logging import Logger
3
+
4
+ from redis.asyncio import Redis
5
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
6
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
7
+
8
+ from tp_shared.gibdd_service.repo.gibdd_dc_results_stream_queue_repo import (
9
+ GibddDcResultStreamQueueRepo,
10
+ )
11
+ from tp_shared.gibdd_service.schemas.gibdd_dc_result_message import (
12
+ GibddDcResultMessage,
13
+ )
14
+
15
+
16
+ class BaseGibddDcResultsStreamQueueWorkerService(
17
+ GibddDcResultStreamQueueRepo, BaseWorkerService
18
+ ):
19
+ def __init__(
20
+ self,
21
+ redis_client: Redis,
22
+ logger: Logger,
23
+ group_name: str,
24
+ consumer_name: str,
25
+ ):
26
+ BaseWorkerService.__init__(self, redis_client=redis_client, logger=logger)
27
+ GibddDcResultStreamQueueRepo.__init__(self, redis_client=redis_client)
28
+
29
+ self.group_name = group_name
30
+ self.consumer_name = consumer_name
31
+
32
+ @retry_forever(
33
+ start_message="Π”ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
34
+ error_message="Ошибка ΠΏΡ€ΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
35
+ )
36
+ async def add(self, message: GibddDcResultMessage) -> None:
37
+ await GibddDcResultStreamQueueRepo.add(self, message)
38
+
39
+ @retry_forever(
40
+ start_message="ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
41
+ error_message="Ошибка получСния сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
42
+ )
43
+ async def pop(
44
+ self,
45
+ stream_id: str = ">",
46
+ block: int = 0,
47
+ count: int = 100,
48
+ prioritize_claimed: bool = True,
49
+ min_idle_time: int = 60000,
50
+ ) -> list[tuple[str, GibddDcResultMessage]] | None:
51
+ return await GibddDcResultStreamQueueRepo.pop(
52
+ self,
53
+ group_name=self.group_name,
54
+ consumer_name=self.consumer_name,
55
+ stream_id=stream_id,
56
+ block=block,
57
+ count=count,
58
+ prioritize_claimed=prioritize_claimed,
59
+ min_idle_time=min_idle_time,
60
+ )
61
+
62
+ @retry_forever(
63
+ start_message="ΠŸΠΎΠ΄Ρ‚Π²Π΅Ρ€ΠΆΠ΄Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
64
+ error_message="Ошибка подтвСрТдСния сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
65
+ )
66
+ async def ack(self, message_id: str):
67
+ await GibddDcResultStreamQueueRepo.ack(self, self.group_name, message_id)
68
+
69
+ @retry_forever(
70
+ start_message="Поиск Π·Π°Π²ΠΈΡΡˆΠΈΡ… сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
71
+ error_message="Ошибка ΠΏΡ€ΠΈ auto-claim сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
72
+ )
73
+ async def claim_reassign(
74
+ self,
75
+ min_idle_time: int = 60000,
76
+ count: int = 100,
77
+ ) -> list[tuple[str, GibddDcResultMessage]]:
78
+ return await GibddDcResultStreamQueueRepo.claim_reassign(
79
+ self,
80
+ group_name=self.group_name,
81
+ consumer_name=self.consumer_name,
82
+ min_idle_time=min_idle_time,
83
+ count=count,
84
+ )
85
+
86
+ @retry_forever(
87
+ start_message="Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³Ρ€ΡƒΠΏΠΏΡ‹ ΠΏΠΎΡ‚Ρ€Π΅Π±ΠΈΡ‚Π΅Π»Π΅ΠΉ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
88
+ error_message="Ошибка создания Π³Ρ€ΡƒΠΏΠΏΡ‹ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
89
+ )
90
+ async def create_consumer_group(self, create_stream: bool = True):
91
+ try:
92
+ await GibddDcResultStreamQueueRepo.create_consumer_group(
93
+ self,
94
+ group_name=self.group_name,
95
+ create_stream=create_stream,
96
+ stream_id="0",
97
+ )
98
+ except Exception as e:
99
+ print(e)
100
+
101
+ @retry_forever(
102
+ start_message="ΠžΡ‡ΠΈΡΡ‚ΠΊΠ° сообщСний ΡΡ‚Π°Ρ€ΡˆΠ΅ {retention} Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
103
+ error_message="Ошибка ΠΏΡ€ΠΈ очисткС сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
104
+ )
105
+ async def trim_by_age(self, retention: timedelta) -> int:
106
+ """
107
+ УдаляСт сообщСния ΡΡ‚Π°Ρ€ΡˆΠ΅ ΡƒΠΊΠ°Π·Π°Π½Π½ΠΎΠ³ΠΎ ΠΏΠ΅Ρ€ΠΈΠΎΠ΄Π° (retention) Ρ‡Π΅Ρ€Π΅Π· XTRIM MINID.
108
+
109
+ :param retention: ΠœΠ°ΠΊΡΠΈΠΌΠ°Π»ΡŒΠ½Ρ‹ΠΉ "возраст" сообщСний, Π½Π°ΠΏΡ€ΠΈΠΌΠ΅Ρ€ timedelta(days=1)
110
+ :return: Кол-Π²ΠΎ ΡƒΠ΄Π°Π»Ρ‘Π½Π½Ρ‹Ρ… сообщСний
111
+ """
112
+ return await GibddDcResultStreamQueueRepo.trim_by_age(self, retention)
113
+
114
+ @retry_forever(
115
+ start_message="Полная очистка ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
116
+ error_message="Ошибка ΠΏΡ€ΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ очисткС ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
117
+ )
118
+ async def delete_all(self) -> None:
119
+ """
120
+ УдаляСт всС consumer group ΠΈ сам ΠΏΠΎΡ‚ΠΎΠΊ (полная очистка).
121
+ """
122
+ await GibddDcResultStreamQueueRepo.delete_all(self)
@@ -0,0 +1,17 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
3
+
4
+ from tp_shared.mos_passes_service.schemas.mos_passes_result_message import (
5
+ MosPassesResultMessage,
6
+ )
7
+
8
+
9
+ class MosPassesResultsStreamQueueRepo(BaseStreamQueueRepo):
10
+ QUEUE_NAME = "mos:passes:service:results:stream"
11
+
12
+ def __init__(self, redis_client: Redis):
13
+ super().__init__(
14
+ redis_client=redis_client,
15
+ schema=MosPassesResultMessage,
16
+ queue_name=self.QUEUE_NAME,
17
+ )
@@ -0,0 +1,23 @@
1
+ from datetime import date
2
+
3
+ from tp_shared.base.base_message import BaseMessage
4
+ from tp_shared.types.pass_allowed_zone import PassAllowedZone
5
+ from tp_shared.types.pass_series import PassSeries
6
+ from tp_shared.types.pass_time_of_date import PassTimeOfDate
7
+
8
+
9
+ class MosPassesResultPass(BaseMessage):
10
+ reg_number: str
11
+ time_of_day: PassTimeOfDate
12
+ series: PassSeries
13
+ number: str
14
+ allowed_zone: PassAllowedZone
15
+ start_date: date
16
+ end_date: date
17
+ cancel_date: date | None
18
+
19
+
20
+ class MosPassesResultMessage(BaseMessage):
21
+ version: str = "1.0"
22
+ reg_number: str
23
+ passes: list[MosPassesResultPass] = []
@@ -0,0 +1,109 @@
1
+ from datetime import timedelta
2
+ from logging import Logger
3
+
4
+ from redis.asyncio import Redis
5
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
6
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
7
+
8
+ from tp_shared.mos_passes_service.repo.mos_passes_results_stream_queue_repo import (
9
+ MosPassesResultsStreamQueueRepo,
10
+ )
11
+ from tp_shared.mos_passes_service.schemas.mos_passes_result_message import (
12
+ MosPassesResultMessage,
13
+ )
14
+
15
+
16
+ class BaseMosPassesResultsStreamQueueWorkerService(
17
+ MosPassesResultsStreamQueueRepo, BaseWorkerService
18
+ ):
19
+ def __init__(
20
+ self,
21
+ redis_client: Redis,
22
+ logger: Logger,
23
+ group_name: str,
24
+ consumer_name: str,
25
+ ):
26
+ BaseWorkerService.__init__(self, redis_client=redis_client, logger=logger)
27
+ MosPassesResultsStreamQueueRepo.__init__(self, redis_client=redis_client)
28
+
29
+ self.group_name = group_name
30
+ self.consumer_name = consumer_name
31
+
32
+ @retry_forever(
33
+ start_message="Π”ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
34
+ error_message="Ошибка ΠΏΡ€ΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
35
+ )
36
+ async def add(self, message: MosPassesResultMessage) -> None:
37
+ await MosPassesResultsStreamQueueRepo.add(self, message)
38
+
39
+ @retry_forever(
40
+ start_message="ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
41
+ error_message="Ошибка получСния сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
42
+ )
43
+ async def pop(
44
+ self,
45
+ stream_id: str = ">",
46
+ block: int = 0,
47
+ count: int = 100,
48
+ prioritize_claimed: bool = True,
49
+ min_idle_time: int = 60000,
50
+ ) -> list[tuple[str, MosPassesResultMessage]] | None:
51
+ return await MosPassesResultsStreamQueueRepo.pop(
52
+ self,
53
+ group_name=self.group_name,
54
+ consumer_name=self.consumer_name,
55
+ stream_id=stream_id,
56
+ block=block,
57
+ count=count,
58
+ prioritize_claimed=prioritize_claimed,
59
+ min_idle_time=min_idle_time,
60
+ )
61
+
62
+ @retry_forever(
63
+ start_message="ΠŸΠΎΠ΄Ρ‚Π²Π΅Ρ€ΠΆΠ΄Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
64
+ error_message="Ошибка подтвСрТдСния сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
65
+ )
66
+ async def ack(self, message_id: str):
67
+ await MosPassesResultsStreamQueueRepo.ack(self, self.group_name, message_id)
68
+
69
+ @retry_forever(
70
+ start_message="Поиск Π·Π°Π²ΠΈΡΡˆΠΈΡ… сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
71
+ error_message="Ошибка ΠΏΡ€ΠΈ auto-claim сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
72
+ )
73
+ async def claim_reassign(
74
+ self,
75
+ min_idle_time: int = 60000,
76
+ count: int = 100,
77
+ ) -> list[tuple[str, MosPassesResultMessage]]:
78
+ return await MosPassesResultsStreamQueueRepo.claim_reassign(
79
+ self,
80
+ group_name=self.group_name,
81
+ consumer_name=self.consumer_name,
82
+ min_idle_time=min_idle_time,
83
+ count=count,
84
+ )
85
+
86
+ @retry_forever(
87
+ start_message="Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³Ρ€ΡƒΠΏΠΏΡ‹ ΠΏΠΎΡ‚Ρ€Π΅Π±ΠΈΡ‚Π΅Π»Π΅ΠΉ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
88
+ error_message="Ошибка создания Π³Ρ€ΡƒΠΏΠΏΡ‹ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
89
+ )
90
+ async def create_consumer_group(self, create_stream: bool = True):
91
+ await MosPassesResultsStreamQueueRepo.create_consumer_group(
92
+ self,
93
+ group_name=self.group_name,
94
+ create_stream=create_stream,
95
+ )
96
+
97
+ @retry_forever(
98
+ start_message="ΠžΡ‡ΠΈΡΡ‚ΠΊΠ° сообщСний ΡΡ‚Π°Ρ€ΡˆΠ΅ {retention} Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
99
+ error_message="Ошибка ΠΏΡ€ΠΈ очисткС сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
100
+ )
101
+ async def trim_by_age(self, retention: timedelta) -> int:
102
+ return await MosPassesResultsStreamQueueRepo.trim_by_age(self, retention)
103
+
104
+ @retry_forever(
105
+ start_message="Полная очистка ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
106
+ error_message="Ошибка ΠΏΡ€ΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ очисткС ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
107
+ )
108
+ async def delete_all(self) -> None:
109
+ await MosPassesResultsStreamQueueRepo.delete_all(self)
@@ -0,0 +1,13 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_ack_list_queue_repo import BaseAckListQueueRepo
3
+
4
+ from tp_shared.nsis_service.schemas.nsis_result_message import (
5
+ NsisResultMessage,
6
+ )
7
+
8
+
9
+ class NsisResultsAckListQueueRepo(BaseAckListQueueRepo):
10
+ QUEUE_NAME = "nsis:service:nsis:results:ack:list"
11
+
12
+ def __init__(self, redis_client: Redis):
13
+ super().__init__(redis_client, schema=NsisResultMessage)
@@ -0,0 +1,29 @@
1
+ from datetime import date
2
+
3
+ from tp_helper.base_items.base_schema import BaseSchema
4
+
5
+ from tp_shared.nsis_service.types.nsis_task_type import NsisTaskType
6
+ from tp_shared.types.policy_series import PolicySeries
7
+ from tp_shared.types.policy_status import PolicyStatus
8
+
9
+
10
+ class NsisResultPolicy(BaseSchema):
11
+ status: PolicyStatus
12
+ vin: str | None = None
13
+ reg_number: str
14
+ series: PolicySeries
15
+ number: str
16
+ start_date: date | None = None
17
+ end_date: date | None = None
18
+ insurer_id: int
19
+ insurer_name: str | None = None
20
+ request_date: date
21
+ created_at: int
22
+ updated_at: int
23
+
24
+
25
+ class NsisResultMessage(BaseSchema):
26
+ task_type: NsisTaskType
27
+ query: str
28
+ request_date: date
29
+ policies: list[NsisResultPolicy] = []
@@ -0,0 +1,6 @@
1
+ from enum import Enum
2
+
3
+
4
+ class NsisTaskType(Enum):
5
+ REG_NUMBER = "REG_NUMBER"
6
+ VIN = "VIN"
@@ -0,0 +1,32 @@
1
+ from logging import Logger
2
+
3
+ from redis.asyncio import Redis
4
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
5
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
6
+
7
+ from tp_shared.nsis_service.repos.nsis_results_ack_list_queue_repo import (
8
+ NsisResultsAckListQueueRepo,
9
+ )
10
+ from tp_shared.nsis_service.schemas.nsis_result_message import NsisResultMessage
11
+
12
+
13
+ class BaseNsisResultsAckListQueueWorkerService(
14
+ NsisResultsAckListQueueRepo, BaseWorkerService
15
+ ):
16
+ def __init__(self, redis_client: Redis, logger: Logger):
17
+ BaseWorkerService.__init__(self, logger=logger, redis_client=redis_client)
18
+ NsisResultsAckListQueueRepo.__init__(self, redis_client=redis_client)
19
+
20
+ @retry_forever(
21
+ start_message="πŸ“₯ Начало чтСния Π·Π°Π΄Π°Ρ‡ ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
22
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ Ρ‡Ρ‚Π΅Π½ΠΈΠΈ ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
23
+ )
24
+ async def pop(self) -> NsisResultMessage | None:
25
+ return await NsisResultsAckListQueueRepo.pop(self)
26
+
27
+ @retry_forever(
28
+ start_message="πŸ—‘οΈ Π£Π΄Π°Π»Π΅Π½ΠΈΠ΅ Π·Π°Π΄Π°Ρ‡ ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
29
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ ΡƒΠ΄Π°Π»Π΅Π½ΠΈΠΈ Π·Π°Π΄Π°Ρ‡ ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
30
+ )
31
+ async def ack(self) -> None:
32
+ return await NsisResultsAckListQueueRepo.ack(self)
@@ -0,0 +1,17 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
3
+
4
+ from tp_shared.policies_service.schemas.policies_result_message import (
5
+ PoliciesResultMessage,
6
+ )
7
+
8
+
9
+ class PoliciesEventStreamQueueRepo(BaseStreamQueueRepo):
10
+ QUEUE_NAME = "policies:service:policies:event:stream"
11
+
12
+ def __init__(self, redis_client: Redis):
13
+ super().__init__(
14
+ redis_client=redis_client,
15
+ schema=PoliciesResultMessage,
16
+ queue_name=self.QUEUE_NAME,
17
+ )
@@ -0,0 +1,28 @@
1
+ from datetime import date
2
+
3
+ from tp_shared.base.base_message import BaseMessage
4
+ from tp_shared.types.policy_series import PolicySeries
5
+ from tp_shared.types.policy_status import PolicyStatus
6
+
7
+
8
+ class PoliciesResultPolicy(BaseMessage):
9
+ series: PolicySeries
10
+ number: str
11
+ status: PolicyStatus
12
+ start_date: date
13
+ end_date: date
14
+ period1_start: date
15
+ period1_end: date
16
+ period2_start: date
17
+ period2_end: date
18
+ period3_start: date
19
+ period3_end: date
20
+ vin: str
21
+ car_mark: str
22
+ car_model: str
23
+
24
+
25
+ class PoliciesResultMessage(BaseMessage):
26
+ version: str = "1.0"
27
+ reg_number: str
28
+ policies: list[PoliciesResultPolicy] = []
@@ -0,0 +1,109 @@
1
+ from datetime import timedelta
2
+ from logging import Logger
3
+
4
+ from redis.asyncio import Redis
5
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
6
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
7
+
8
+ from tp_shared.policies_service.repos.policies_event_stream_queue_repo import (
9
+ PoliciesEventStreamQueueRepo,
10
+ )
11
+ from tp_shared.policies_service.schemas.policies_result_message import (
12
+ PoliciesResultMessage,
13
+ )
14
+
15
+
16
+ class BasePolicyEventStreamQueueWorkerService(
17
+ PoliciesEventStreamQueueRepo, BaseWorkerService
18
+ ):
19
+ def __init__(
20
+ self,
21
+ redis_client: Redis,
22
+ logger: Logger,
23
+ group_name: str = None,
24
+ consumer_name: str = None,
25
+ ):
26
+ BaseWorkerService.__init__(self, redis_client=redis_client, logger=logger)
27
+ PoliciesEventStreamQueueRepo.__init__(self, redis_client=redis_client)
28
+
29
+ self.group_name = group_name
30
+ self.consumer_name = consumer_name
31
+
32
+ @retry_forever(
33
+ start_message="βž• Π”ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
34
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
35
+ )
36
+ async def add(self, message: PoliciesResultMessage) -> None:
37
+ await PoliciesEventStreamQueueRepo.add(self, message)
38
+
39
+ @retry_forever(
40
+ start_message="πŸ“₯ ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
41
+ error_message="❗ Ошибка получСния сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
42
+ )
43
+ async def pop(
44
+ self,
45
+ stream_id: str = ">",
46
+ block: int = 0,
47
+ count: int = 100,
48
+ prioritize_claimed: bool = True,
49
+ min_idle_time: int = 60000,
50
+ ) -> list[tuple[str, PoliciesResultMessage]] | None:
51
+ return await PoliciesEventStreamQueueRepo.pop(
52
+ self,
53
+ group_name=self.group_name,
54
+ consumer_name=self.consumer_name,
55
+ stream_id=stream_id,
56
+ block=block,
57
+ count=count,
58
+ prioritize_claimed=prioritize_claimed,
59
+ min_idle_time=min_idle_time,
60
+ )
61
+
62
+ @retry_forever(
63
+ start_message="βœ… ΠŸΠΎΠ΄Ρ‚Π²Π΅Ρ€ΠΆΠ΄Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
64
+ error_message="⚠️ Ошибка подтвСрТдСния сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
65
+ )
66
+ async def ack(self, message_id: str):
67
+ await PoliciesEventStreamQueueRepo.ack(self, self.group_name, message_id)
68
+
69
+ @retry_forever(
70
+ start_message="πŸ” Поиск Π·Π°Π²ΠΈΡΡˆΠΈΡ… сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
71
+ error_message="🚫 Ошибка ΠΏΡ€ΠΈ auto-claim сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
72
+ )
73
+ async def claim_reassign(
74
+ self,
75
+ min_idle_time: int = 60000,
76
+ count: int = 100,
77
+ ) -> list[tuple[str, PoliciesResultMessage]]:
78
+ return await PoliciesEventStreamQueueRepo.claim_reassign(
79
+ self,
80
+ group_name=self.group_name,
81
+ consumer_name=self.consumer_name,
82
+ min_idle_time=min_idle_time,
83
+ count=count,
84
+ )
85
+
86
+ @retry_forever(
87
+ start_message="πŸ‘₯ Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³Ρ€ΡƒΠΏΠΏΡ‹ ΠΏΠΎΡ‚Ρ€Π΅Π±ΠΈΡ‚Π΅Π»Π΅ΠΉ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
88
+ error_message="❌ Ошибка создания Π³Ρ€ΡƒΠΏΠΏΡ‹ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
89
+ )
90
+ async def create_consumer_group(self, create_stream: bool = True):
91
+ await PoliciesEventStreamQueueRepo.create_consumer_group(
92
+ self,
93
+ group_name=self.group_name,
94
+ create_stream=create_stream,
95
+ )
96
+
97
+ @retry_forever(
98
+ start_message="🧹 ΠžΡ‡ΠΈΡΡ‚ΠΊΠ° сообщСний ΡΡ‚Π°Ρ€ΡˆΠ΅ {retention} Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
99
+ error_message="⚠️ Ошибка ΠΏΡ€ΠΈ очисткС сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
100
+ )
101
+ async def trim_by_age(self, retention: timedelta) -> int:
102
+ return await PoliciesEventStreamQueueRepo.trim_by_age(self, retention)
103
+
104
+ @retry_forever(
105
+ start_message="πŸ—‘οΈ Полная очистка ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
106
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ очисткС ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
107
+ )
108
+ async def delete_all(self) -> None:
109
+ await PoliciesEventStreamQueueRepo.delete_all(self)
@@ -0,0 +1,17 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
3
+
4
+ from tp_shared.rnis_check_service.schemas.rnis_check_result_message import (
5
+ RNISCheckResultMessage,
6
+ )
7
+
8
+
9
+ class RNISCheckResultsStreamQueueRepo(BaseStreamQueueRepo):
10
+ QUEUE_NAME = "rnis:check:service:results:stream"
11
+
12
+ def __init__(self, redis_client: Redis):
13
+ super().__init__(
14
+ redis_client=redis_client,
15
+ schema=RNISCheckResultMessage,
16
+ queue_name=self.QUEUE_NAME,
17
+ )
@@ -0,0 +1,9 @@
1
+ from tp_shared.base.base_message import BaseMessage
2
+
3
+
4
+ class RNISCheckResultMessage(BaseMessage):
5
+ version: str = "1.0"
6
+ reg_number: str
7
+ exists: bool
8
+ last_mark: int | None
9
+ terminals_amount: int
@@ -0,0 +1,109 @@
1
+ from datetime import timedelta
2
+ from logging import Logger
3
+
4
+ from redis.asyncio import Redis
5
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
6
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
7
+
8
+ from tp_shared.rnis_check_service.repos.rnis_check_results_stream_queue_repo import (
9
+ RNISCheckResultsStreamQueueRepo,
10
+ )
11
+ from tp_shared.rnis_check_service.schemas.rnis_check_result_message import (
12
+ RNISCheckResultMessage,
13
+ )
14
+
15
+
16
+ class RNISCheckResultsStreamQueueWorkerService(
17
+ RNISCheckResultsStreamQueueRepo, BaseWorkerService
18
+ ):
19
+ def __init__(
20
+ self,
21
+ redis_client: Redis,
22
+ logger: Logger,
23
+ group_name: str,
24
+ consumer_name: str,
25
+ ):
26
+ BaseWorkerService.__init__(self, redis_client=redis_client, logger=logger)
27
+ RNISCheckResultsStreamQueueRepo.__init__(self, redis_client=redis_client)
28
+
29
+ self.group_name = group_name
30
+ self.consumer_name = consumer_name
31
+
32
+ @retry_forever(
33
+ start_message="βž• Π”ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
34
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
35
+ )
36
+ async def add(self, message: RNISCheckResultMessage) -> None:
37
+ await RNISCheckResultsStreamQueueRepo.add(self, message)
38
+
39
+ @retry_forever(
40
+ start_message="πŸ“₯ ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
41
+ error_message="⚠️ Ошибка получСния сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
42
+ )
43
+ async def pop(
44
+ self,
45
+ stream_id: str = ">",
46
+ block: int = 0,
47
+ count: int = 100,
48
+ prioritize_claimed: bool = True,
49
+ min_idle_time: int = 60000,
50
+ ) -> list[tuple[str, RNISCheckResultMessage]] | None:
51
+ return await RNISCheckResultsStreamQueueRepo.pop(
52
+ self,
53
+ group_name=self.group_name,
54
+ consumer_name=self.consumer_name,
55
+ stream_id=stream_id,
56
+ block=block,
57
+ count=count,
58
+ prioritize_claimed=prioritize_claimed,
59
+ min_idle_time=min_idle_time,
60
+ )
61
+
62
+ @retry_forever(
63
+ start_message="βœ… ΠŸΠΎΠ΄Ρ‚Π²Π΅Ρ€ΠΆΠ΄Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
64
+ error_message="⚠️ Ошибка подтвСрТдСния сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
65
+ )
66
+ async def ack(self, message_id: str):
67
+ await RNISCheckResultsStreamQueueRepo.ack(self, self.group_name, message_id)
68
+
69
+ @retry_forever(
70
+ start_message="πŸ” Поиск Π·Π°Π²ΠΈΡΡˆΠΈΡ… сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
71
+ error_message="🚫 Ошибка ΠΏΡ€ΠΈ auto-claim сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
72
+ )
73
+ async def claim_reassign(
74
+ self,
75
+ min_idle_time: int = 60000,
76
+ count: int = 100,
77
+ ) -> list[tuple[str, RNISCheckResultMessage]]:
78
+ return await RNISCheckResultsStreamQueueRepo.claim_reassign(
79
+ self,
80
+ group_name=self.group_name,
81
+ consumer_name=self.consumer_name,
82
+ min_idle_time=min_idle_time,
83
+ count=count,
84
+ )
85
+
86
+ @retry_forever(
87
+ start_message="πŸ‘₯ Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³Ρ€ΡƒΠΏΠΏΡ‹ ΠΏΠΎΡ‚Ρ€Π΅Π±ΠΈΡ‚Π΅Π»Π΅ΠΉ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
88
+ error_message="❌ Ошибка создания Π³Ρ€ΡƒΠΏΠΏΡ‹ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
89
+ )
90
+ async def create_consumer_group(self, create_stream: bool = True):
91
+ await RNISCheckResultsStreamQueueRepo.create_consumer_group(
92
+ self,
93
+ group_name=self.group_name,
94
+ create_stream=create_stream,
95
+ )
96
+
97
+ @retry_forever(
98
+ start_message="🧹 ΠžΡ‡ΠΈΡΡ‚ΠΊΠ° сообщСний ΡΡ‚Π°Ρ€ΡˆΠ΅ {retention} Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
99
+ error_message="⚠️ Ошибка ΠΏΡ€ΠΈ очисткС сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
100
+ )
101
+ async def trim_by_age(self, retention: timedelta) -> int:
102
+ return await RNISCheckResultsStreamQueueRepo.trim_by_age(self, retention)
103
+
104
+ @retry_forever(
105
+ start_message="πŸ—‘οΈ Полная очистка ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
106
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ очисткС ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
107
+ )
108
+ async def delete_all(self) -> None:
109
+ await RNISCheckResultsStreamQueueRepo.delete_all(self)
@@ -0,0 +1,7 @@
1
+ import enum
2
+
3
+
4
+ class DcOperatorStatus(str, enum.Enum):
5
+ ACTIVE = "ACTIVE"
6
+ PAUSE = "PAUSED"
7
+ CANCEL = "CANCELLED"
@@ -0,0 +1,8 @@
1
+ from enum import Enum
2
+
3
+
4
+ class PassAllowedZone(str, Enum):
5
+ MKAD = "ΠœΠšΠΠ”"
6
+ SK = "БК"
7
+ TTK = "ВВК"
8
+ MO = "МО"
@@ -0,0 +1,14 @@
1
+ from enum import Enum
2
+
3
+
4
+ class PassSeries(str, Enum):
5
+ AA = "AA"
6
+ BA = "БА"
7
+ AB = "АБ"
8
+ BB = "Π‘Π‘"
9
+ MB = "ΠœΠ‘"
10
+ MK = "МК"
11
+ MA = "МА"
12
+ MO = "MO"
13
+ II = "Π―Π―"
14
+ MOJD = "ΠœΠžΠ–Π”"
@@ -0,0 +1,6 @@
1
+ from enum import Enum
2
+
3
+
4
+ class PassTimeOfDate(str, Enum):
5
+ DAY = "DAY"
6
+ NIGHT = "NIGHT"
@@ -0,0 +1,19 @@
1
+ from enum import Enum
2
+
3
+
4
+ class PolicySeries(str, Enum):
5
+ XXX = "Π₯Π₯Π₯"
6
+ TTT = "Π’Π’Π’"
7
+ AAA = "ААА"
8
+ AAV = "ААВ"
9
+ AAK = "ААК"
10
+ AAM = "ААМ"
11
+ AAN = "ААН"
12
+ AAS = "ААБ"
13
+ VVV = "Π’Π’Π’"
14
+ EEE = "Π•Π•Π•"
15
+ KKK = "ККК"
16
+ MMM = "МММ"
17
+ NNN = "ННН"
18
+ RRR = "Π Π Π "
19
+ SSS = "Π‘Π‘Π‘"
@@ -0,0 +1,7 @@
1
+ from enum import Enum
2
+
3
+
4
+ class PolicyStatus(str, Enum):
5
+ ACTIVE = "ACTIVE"
6
+ WAITING_ACTIVATION = "WAITING_ACTIVATION"
7
+ EXPIRED = "EXPIRED"
@@ -0,0 +1,76 @@
1
+ Metadata-Version: 2.3
2
+ Name: tp-shared
3
+ Version: 0.2.27
4
+ Summary: Pydantic cΡ…Π΅ΠΌΡ‹ для всСх ΠΏΡ€ΠΎΠ΅ΠΊΡ‚ΠΎΠ²
5
+ Requires-Python: >=3.12
6
+ Classifier: Programming Language :: Python :: 3
7
+ Classifier: Programming Language :: Python :: 3.12
8
+ Classifier: Programming Language :: Python :: 3.13
9
+ Requires-Dist: pydantic (>=2.11.7,<3.0.0)
10
+ Requires-Dist: tp-helper (>=0.4.34,<0.5.0)
11
+ Description-Content-Type: text/markdown
12
+
13
+ # 🧩 tp-shared
14
+
15
+ ΠžΠ±Ρ‰ΠΈΠΉ Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ схСм для использования Π² Π½Π΅ΡΠΊΠΎΠ»ΡŒΠΊΠΈΡ… ΠΏΡ€ΠΎΠ΅ΠΊΡ‚Π°Ρ….
16
+
17
+ ---
18
+
19
+ ## Установка:
20
+ `poetry add tp-shared`
21
+
22
+ ## ΠžΡ‡ΠΈΡΡ‚ΠΊΠ° ΠΏΡ€ΠΈ ΠΎΠ±Π½ΠΎΠ²Π»Π΅Π½ΠΈΠΈ
23
+ - `poetry cache clear --all PyPI`
24
+ - `poetry add tp-shared`
25
+ - `poetry update`
26
+
27
+
28
+
29
+ ## ΠŸΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΡ:
30
+ Π‘ΠΎΠ±ΠΈΡ€Π°Π΅Ρ‚ ΠΈ Π·Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ собранный ΠΏΠ°ΠΊΠ΅Ρ‚ Π² PyPI.
31
+
32
+ `poetry publish --build`
33
+
34
+ ## Π‘Ρ‚Ρ€ΡƒΠΊΡ‚ΡƒΡ€Π° ΠΏΡ€ΠΎΠ΅ΠΊΡ‚Π°
35
+
36
+ **messages**
37
+ -------------------------
38
+ Π‘Ρ…Π΅ΠΌΡ‹ сообщСний ΠΎΡ‚ сСрвисов
39
+
40
+ ΠŸΡ€ΠΈΠΌΠ΅Ρ€ ΠΈΠΌΠΏΠΎΡ€Ρ‚Π°
41
+
42
+ from tp_shared_schemas.messages import GibddDcResultMessage
43
+
44
+ Π’ ΠΊΠ°ΠΆΠ΄ΠΎΠΉ ΠΏΠ°ΠΏΠΊΠ΅ Π»Π΅ΠΆΠ°Ρ‚ ΡΠΎΠΎΡ‚Π²Π΅Ρ‚ΡΡ‚Π²ΡƒΡŽΡ‰ΠΈΠ΅ Pydantic-схСмы, сгруппированныС ΠΏΠΎ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΎΠ½Π°Π»Ρƒ.
45
+ --------------------------
46
+ ---
47
+
48
+ ## Как ΠΏΠΎΠ΄ΠΊΠ»ΡŽΡ‡ΠΈΡ‚ΡŒ Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ ΠΊ ΡΡƒΡ‰Π΅ΡΡ‚Π²ΡƒΡŽΡ‰Π΅ΠΌΡƒ ΠΏΡ€ΠΎΠ΅ΠΊΡ‚Ρƒ
49
+
50
+ Если Ρƒ вас Π΅ΡΡ‚ΡŒ Π»ΠΎΠΊΠ°Π»ΡŒΠ½Ρ‹ΠΉ ΠΏΡ€ΠΎΠ΅ΠΊΡ‚ ΠΈ Π²Ρ‹ Ρ…ΠΎΡ‚ΠΈΡ‚Π΅ Π΄ΠΎΠ±Π°Π²ΠΈΡ‚ΡŒ Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ с ΠΎΠ±Ρ‰ΠΈΠΌΠΈ схСмами, Π²Ρ‹ΠΏΠΎΠ»Π½ΠΈΡ‚Π΅ ΠΊΠΎΠΌΠ°Π½Π΄Ρ‹:
51
+ Π² Ρ„Π°ΠΉΠ»Π΅ pyproject.toml ΠΏΡ€ΠΎΠΏΠΈΡΠ°Ρ‚ΡŒ Π·Π°Π²ΠΈΡΠΈΠΌΠΎΡΡ‚ΡŒ:
52
+ 1)
53
+
54
+ ```Python
55
+ [tool.poetry.dependencies]
56
+ tp-shared = { git = "https://gitlab.8525.ru/modules/tp-shared.git", rev = "main" }
57
+ ```
58
+
59
+ poetry add git
60
+
61
+ ```python
62
+ poetry add git+https://gitlab.8525.ru/modules/tp-shared.git
63
+ ```
64
+
65
+ 2) Π’Ρ‹ΠΏΠΎΠ»Π½ΠΈΡ‚ΡŒ ΠΊΠΎΠΌΠ°Π½Π΄Ρƒ poetry install ΠΈΠ»ΠΈ poetry update
66
+
67
+
68
+ ## Π Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ
69
+ ```
70
+ cd existing_repo
71
+ git remote add origin https://gitlab.8525.ru/modules/tp-shared.git
72
+ git branch -M main
73
+ git push -uf origin main
74
+ ```
75
+
76
+
@@ -0,0 +1,29 @@
1
+ tp_shared/autoins_mpg_service/repos/autoins_results_ack_list_queue_repo.py,sha256=fDOF7DiaXKUvJDqih3Bix-MR-Fh7gWT3KfYBeSpE4qA,503
2
+ tp_shared/autoins_mpg_service/schemas/autoins_result_message.py,sha256=bc1fK_S6m22OMXFbE_7cYWCTuK_GgAKNEfLEzXVAOws,946
3
+ tp_shared/autoins_mpg_service/worker_services/base_autoins_results_ack_list_queue_worker_service.py,sha256=aauusMNRwetPxZaSYbxFUCQXs9wT1SDxByzCnZj40VI,1400
4
+ tp_shared/base/base_message.py,sha256=VdTP5beDfsNT5uLuR4WIs9S1szUIcJwARxZ65--qZEY,96
5
+ tp_shared/gibdd_service/repo/gibdd_dc_results_stream_queue_repo.py,sha256=tKAqHyRsb5N5CLBfvoq3PeEjT5xdnTVkj9KcWz1lz0o,538
6
+ tp_shared/gibdd_service/schemas/gibdd_dc_result_message.py,sha256=-7k7mxykw5JpxWwnsUjjYZfkyc2hOr1feB4dBeaR7t8,898
7
+ tp_shared/gibdd_service/worker_services/base_gibdd_dc_results_stream_queue_worker_service.py,sha256=PDIa8QrmbPnwVz4fQ1lKhNuolZBwjcMqjx6r1OABnGs,5001
8
+ tp_shared/mos_passes_service/repo/mos_passes_results_stream_queue_repo.py,sha256=AW-e8J8_oYmzjPkEt0myZ2-iGwiSVHLyt-dEujolZpw,553
9
+ tp_shared/mos_passes_service/schemas/mos_passes_result_message.py,sha256=WR9G7Xy268nPr2vlb-SgTZEEShsTAwO4r3VbY2y3GMw,645
10
+ tp_shared/mos_passes_service/worker_services/base_mos_passes_results_stream_queue_worker_service.py,sha256=9xPViK59pB077enyQ5GilIZZyhvoxTxeCb7CnrVdunc,4463
11
+ tp_shared/nsis_service/repos/nsis_results_ack_list_queue_repo.py,sha256=EmM24OEQny2U4ukuON_LFDkDLiU37C1sv3BhWIgC5uA,440
12
+ tp_shared/nsis_service/schemas/nsis_result_message.py,sha256=XU7lYU2AdpnYGlPFNpkyNscRI-lKlCYnl6drhfRY4SU,777
13
+ tp_shared/nsis_service/types/nsis_task_type.py,sha256=eNcqzojojnpo2esBjbAU8d_SSh0kq92Je7J_T7e49sc,102
14
+ tp_shared/nsis_service/worker_services/base_nsis_results_queue_worker_service.py,sha256=ZF_3y96cG2ufLXKIjd6g_N0n_hvJSBS5GkEHRnaqYqU,1410
15
+ tp_shared/policies_service/repos/policies_event_stream_queue_repo.py,sha256=igfDBdLc2P-ER-Hqdl3k-GgrLqkmDGKftqnA6gJKGy8,549
16
+ tp_shared/policies_service/schemas/policies_result_message.py,sha256=UiH3e6lyAvcuAoS_tR4kRgzMcS4AmDwsverUvAwFWx0,689
17
+ tp_shared/policies_service/worker_services/base_policies_results_queue_worker_service.py,sha256=LemQbwNzG20Ivh_OrixAIm_MGNeuAQEhlxnjQmHOYdg,4500
18
+ tp_shared/rnis_check_service/repos/rnis_check_results_stream_queue_repo.py,sha256=JNx7DMz0YAT19GWjvlVWBFIuhk179nVbjPNZa6tRpEE,553
19
+ tp_shared/rnis_check_service/schemas/rnis_check_result_message.py,sha256=AjPwwRUuPqHWm-wEHtV_68dw7YaLQeEViq60MQkeFEM,220
20
+ tp_shared/rnis_check_service/worker_services/base_rnis_results_queue_worker_service.py,sha256=Vidx0LEUDHic86E6bqOeN5jzo9WZF74oh1kyl7VLuSg,4534
21
+ tp_shared/types/dc_operator_status.py,sha256=JJ8ke9pCKLwIyES7C2WjYGybZwTbgG20IYe6NTxS5Cc,129
22
+ tp_shared/types/pass_allowed_zone.py,sha256=WRLZienfaFKssDag2ClFLoVOvFZONYtqlUlPqBZYr8A,139
23
+ tp_shared/types/pass_series.py,sha256=fpiDGUFSuHDYVARUXUWgoJeYnIWDtxVImyOHZotQYyY,229
24
+ tp_shared/types/pass_time_of_date.py,sha256=rjeYh8h_6ncXRu2C8kfFaaZNY9AJWpubLABpzSqg_8c,99
25
+ tp_shared/types/policy_series.py,sha256=YxIDrwYanaZtCLkrGMcwXY7iqh74W9jPFMkkPTtt3yM,359
26
+ tp_shared/types/policy_status.py,sha256=U3x4FY52-_RDZYkEp2FGAtxp0XEbOQvcy8OPACpJ2lo,154
27
+ tp_shared-0.2.27.dist-info/METADATA,sha256=cNWZ5Rq9sntHpU5hlU8gF9uqprDT0zEWlPLmU29lXQ8,2086
28
+ tp_shared-0.2.27.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
29
+ tp_shared-0.2.27.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: poetry-core 2.1.3
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any