tp-shared 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. tp_shared-0.1.0/PKG-INFO +80 -0
  2. tp_shared-0.1.0/README.md +65 -0
  3. tp_shared-0.1.0/pyproject.toml +46 -0
  4. tp_shared-0.1.0/src/tp_shared/autoins_mpg_service/repos/autoins_results_ack_list_queue_repo.py +14 -0
  5. tp_shared-0.1.0/src/tp_shared/autoins_mpg_service/schemas/autoins_result_message.py +34 -0
  6. tp_shared-0.1.0/src/tp_shared/autoins_mpg_service/worker_services/base_autoins_results_ack_list_queue_worker_service.py +34 -0
  7. tp_shared-0.1.0/src/tp_shared/base/base_message.py +5 -0
  8. tp_shared-0.1.0/src/tp_shared/gibdd_service/repo/gibdd_dc_results_stream_queue_repo.py +17 -0
  9. tp_shared-0.1.0/src/tp_shared/gibdd_service/schemas/gibdd_dc_result_message.py +39 -0
  10. tp_shared-0.1.0/src/tp_shared/gibdd_service/worker_services/base_gibdd_dc_results_stream_queue_worker_service.py +122 -0
  11. tp_shared-0.1.0/src/tp_shared/mos_passes_service/repo/mos_passes_results_stream_queue_repo.py +17 -0
  12. tp_shared-0.1.0/src/tp_shared/mos_passes_service/schemas/mos_passes_result_message.py +23 -0
  13. tp_shared-0.1.0/src/tp_shared/mos_passes_service/worker_services/base_mos_passes_results_stream_queue_worker_service.py +109 -0
  14. tp_shared-0.1.0/src/tp_shared/nsis_service/repos/nsis_results_ack_list_queue_repo.py +13 -0
  15. tp_shared-0.1.0/src/tp_shared/nsis_service/schemas/nsis_result_message.py +23 -0
  16. tp_shared-0.1.0/src/tp_shared/nsis_service/types/nsis_task_type.py +6 -0
  17. tp_shared-0.1.0/src/tp_shared/nsis_service/worker_services/base_nsis_results_queue_worker_service.py +32 -0
  18. tp_shared-0.1.0/src/tp_shared/policies_service/repos/policies_event_stream_queue_repo.py +17 -0
  19. tp_shared-0.1.0/src/tp_shared/policies_service/schemas/policies_result_message.py +28 -0
  20. tp_shared-0.1.0/src/tp_shared/policies_service/worker_services/base_policies_results_queue_worker_service.py +109 -0
  21. tp_shared-0.1.0/src/tp_shared/rnis_check_service/repos/rnis_check_results_stream_queue_repo.py +17 -0
  22. tp_shared-0.1.0/src/tp_shared/rnis_check_service/schemas/rnis_check_result_message.py +9 -0
  23. tp_shared-0.1.0/src/tp_shared/rnis_check_service/worker_services/base_rnis_results_queue_worker_service.py +109 -0
  24. tp_shared-0.1.0/src/tp_shared/rnis_emulator_service/repos/rnis_emulator_results_stream_queue_repo.py +17 -0
  25. tp_shared-0.1.0/src/tp_shared/rnis_emulator_service/schemas/rnis_emulator_result_message.py +57 -0
  26. tp_shared-0.1.0/src/tp_shared/rnis_emulator_service/types/rnis_emulator_types.py +35 -0
  27. tp_shared-0.1.0/src/tp_shared/rnis_emulator_service/worker_services/rnis_emulator_results_stream_worker_service.py +102 -0
  28. tp_shared-0.1.0/src/tp_shared/types/dc_operator_status.py +7 -0
  29. tp_shared-0.1.0/src/tp_shared/types/pass_allowed_zone.py +8 -0
  30. tp_shared-0.1.0/src/tp_shared/types/pass_series.py +14 -0
  31. tp_shared-0.1.0/src/tp_shared/types/pass_time_of_date.py +6 -0
  32. tp_shared-0.1.0/src/tp_shared/types/policy_series.py +19 -0
  33. tp_shared-0.1.0/src/tp_shared/types/policy_status.py +7 -0
@@ -0,0 +1,80 @@
1
+ Metadata-Version: 2.3
2
+ Name: tp-shared
3
+ Version: 0.1.0
4
+ Summary:
5
+ Author: Developer
6
+ Author-email: front-gold@mail.ru
7
+ Requires-Python: >=3.12,<=3.14
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: Programming Language :: Python :: 3.12
10
+ Classifier: Programming Language :: Python :: 3.13
11
+ Requires-Dist: pydantic (>=2.11.7,<3.0.0)
12
+ Requires-Dist: tp-helper (>=0.4.62,<0.5.0)
13
+ Description-Content-Type: text/markdown
14
+
15
+ # 🧩 tp-shared
16
+
17
+ ΠžΠ±Ρ‰ΠΈΠΉ Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ схСм для использования Π² Π½Π΅ΡΠΊΠΎΠ»ΡŒΠΊΠΈΡ… ΠΏΡ€ΠΎΠ΅ΠΊΡ‚Π°Ρ….
18
+
19
+ ---
20
+
21
+ ## Установка:
22
+ `poetry add tp-shared`
23
+
24
+ ## ΠžΡ‡ΠΈΡΡ‚ΠΊΠ° ΠΏΡ€ΠΈ ΠΎΠ±Π½ΠΎΠ²Π»Π΅Π½ΠΈΠΈ
25
+ ```
26
+ poetry cache clear pypi --all --no-interaction; poetry add tp-shared@latest
27
+ ```
28
+
29
+ ```
30
+ poetry cache clear pypi --all --no-interaction && poetry add tp-shared@latest
31
+ ```
32
+
33
+ ## ΠŸΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΡ:
34
+ Π‘ΠΎΠ±ΠΈΡ€Π°Π΅Ρ‚ ΠΈ Π·Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ собранный ΠΏΠ°ΠΊΠ΅Ρ‚ Π² PyPI.
35
+
36
+ `poetry publish --build`
37
+
38
+ ## Π‘Ρ‚Ρ€ΡƒΠΊΡ‚ΡƒΡ€Π° ΠΏΡ€ΠΎΠ΅ΠΊΡ‚Π°
39
+
40
+ **messages**
41
+ -------------------------
42
+ Π‘Ρ…Π΅ΠΌΡ‹ сообщСний ΠΎΡ‚ сСрвисов
43
+
44
+ ΠŸΡ€ΠΈΠΌΠ΅Ρ€ ΠΈΠΌΠΏΠΎΡ€Ρ‚Π°
45
+
46
+ from tp_shared_schemas.messages import GibddDcResultMessage
47
+
48
+ Π’ ΠΊΠ°ΠΆΠ΄ΠΎΠΉ ΠΏΠ°ΠΏΠΊΠ΅ Π»Π΅ΠΆΠ°Ρ‚ ΡΠΎΠΎΡ‚Π²Π΅Ρ‚ΡΡ‚Π²ΡƒΡŽΡ‰ΠΈΠ΅ Pydantic-схСмы, сгруппированныС ΠΏΠΎ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΎΠ½Π°Π»Ρƒ.
49
+ --------------------------
50
+ ---
51
+
52
+ ## Как ΠΏΠΎΠ΄ΠΊΠ»ΡŽΡ‡ΠΈΡ‚ΡŒ Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ ΠΊ ΡΡƒΡ‰Π΅ΡΡ‚Π²ΡƒΡŽΡ‰Π΅ΠΌΡƒ ΠΏΡ€ΠΎΠ΅ΠΊΡ‚Ρƒ
53
+
54
+ Если Ρƒ вас Π΅ΡΡ‚ΡŒ Π»ΠΎΠΊΠ°Π»ΡŒΠ½Ρ‹ΠΉ ΠΏΡ€ΠΎΠ΅ΠΊΡ‚ ΠΈ Π²Ρ‹ Ρ…ΠΎΡ‚ΠΈΡ‚Π΅ Π΄ΠΎΠ±Π°Π²ΠΈΡ‚ΡŒ Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ с ΠΎΠ±Ρ‰ΠΈΠΌΠΈ схСмами, Π²Ρ‹ΠΏΠΎΠ»Π½ΠΈΡ‚Π΅ ΠΊΠΎΠΌΠ°Π½Π΄Ρ‹:
55
+ Π² Ρ„Π°ΠΉΠ»Π΅ pyproject.toml ΠΏΡ€ΠΎΠΏΠΈΡΠ°Ρ‚ΡŒ Π·Π°Π²ΠΈΡΠΈΠΌΠΎΡΡ‚ΡŒ:
56
+ 1)
57
+
58
+ ```Python
59
+ [tool.poetry.dependencies]
60
+ tp-shared = { git = "https://gitlab.8525.ru/modules/tp-shared.git", rev = "main" }
61
+ ```
62
+
63
+ poetry add git
64
+
65
+ ```python
66
+ poetry add git+https://gitlab.8525.ru/modules/tp-shared.git
67
+ ```
68
+
69
+ 2) Π’Ρ‹ΠΏΠΎΠ»Π½ΠΈΡ‚ΡŒ ΠΊΠΎΠΌΠ°Π½Π΄Ρƒ poetry install ΠΈΠ»ΠΈ poetry update
70
+
71
+
72
+ ## Π Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ
73
+ ```
74
+ cd existing_repo
75
+ git remote add origin https://gitlab.8525.ru/modules/tp-shared.git
76
+ git branch -M main
77
+ git push -uf origin main
78
+ ```
79
+
80
+
@@ -0,0 +1,65 @@
1
+ # 🧩 tp-shared
2
+
3
+ ΠžΠ±Ρ‰ΠΈΠΉ Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ схСм для использования Π² Π½Π΅ΡΠΊΠΎΠ»ΡŒΠΊΠΈΡ… ΠΏΡ€ΠΎΠ΅ΠΊΡ‚Π°Ρ….
4
+
5
+ ---
6
+
7
+ ## Установка:
8
+ `poetry add tp-shared`
9
+
10
+ ## ΠžΡ‡ΠΈΡΡ‚ΠΊΠ° ΠΏΡ€ΠΈ ΠΎΠ±Π½ΠΎΠ²Π»Π΅Π½ΠΈΠΈ
11
+ ```
12
+ poetry cache clear pypi --all --no-interaction; poetry add tp-shared@latest
13
+ ```
14
+
15
+ ```
16
+ poetry cache clear pypi --all --no-interaction && poetry add tp-shared@latest
17
+ ```
18
+
19
+ ## ΠŸΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΡ:
20
+ Π‘ΠΎΠ±ΠΈΡ€Π°Π΅Ρ‚ ΠΈ Π·Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ собранный ΠΏΠ°ΠΊΠ΅Ρ‚ Π² PyPI.
21
+
22
+ `poetry publish --build`
23
+
24
+ ## Π‘Ρ‚Ρ€ΡƒΠΊΡ‚ΡƒΡ€Π° ΠΏΡ€ΠΎΠ΅ΠΊΡ‚Π°
25
+
26
+ **messages**
27
+ -------------------------
28
+ Π‘Ρ…Π΅ΠΌΡ‹ сообщСний ΠΎΡ‚ сСрвисов
29
+
30
+ ΠŸΡ€ΠΈΠΌΠ΅Ρ€ ΠΈΠΌΠΏΠΎΡ€Ρ‚Π°
31
+
32
+ from tp_shared_schemas.messages import GibddDcResultMessage
33
+
34
+ Π’ ΠΊΠ°ΠΆΠ΄ΠΎΠΉ ΠΏΠ°ΠΏΠΊΠ΅ Π»Π΅ΠΆΠ°Ρ‚ ΡΠΎΠΎΡ‚Π²Π΅Ρ‚ΡΡ‚Π²ΡƒΡŽΡ‰ΠΈΠ΅ Pydantic-схСмы, сгруппированныС ΠΏΠΎ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΎΠ½Π°Π»Ρƒ.
35
+ --------------------------
36
+ ---
37
+
38
+ ## Как ΠΏΠΎΠ΄ΠΊΠ»ΡŽΡ‡ΠΈΡ‚ΡŒ Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ ΠΊ ΡΡƒΡ‰Π΅ΡΡ‚Π²ΡƒΡŽΡ‰Π΅ΠΌΡƒ ΠΏΡ€ΠΎΠ΅ΠΊΡ‚Ρƒ
39
+
40
+ Если Ρƒ вас Π΅ΡΡ‚ΡŒ Π»ΠΎΠΊΠ°Π»ΡŒΠ½Ρ‹ΠΉ ΠΏΡ€ΠΎΠ΅ΠΊΡ‚ ΠΈ Π²Ρ‹ Ρ…ΠΎΡ‚ΠΈΡ‚Π΅ Π΄ΠΎΠ±Π°Π²ΠΈΡ‚ΡŒ Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ с ΠΎΠ±Ρ‰ΠΈΠΌΠΈ схСмами, Π²Ρ‹ΠΏΠΎΠ»Π½ΠΈΡ‚Π΅ ΠΊΠΎΠΌΠ°Π½Π΄Ρ‹:
41
+ Π² Ρ„Π°ΠΉΠ»Π΅ pyproject.toml ΠΏΡ€ΠΎΠΏΠΈΡΠ°Ρ‚ΡŒ Π·Π°Π²ΠΈΡΠΈΠΌΠΎΡΡ‚ΡŒ:
42
+ 1)
43
+
44
+ ```Python
45
+ [tool.poetry.dependencies]
46
+ tp-shared = { git = "https://gitlab.8525.ru/modules/tp-shared.git", rev = "main" }
47
+ ```
48
+
49
+ poetry add git
50
+
51
+ ```python
52
+ poetry add git+https://gitlab.8525.ru/modules/tp-shared.git
53
+ ```
54
+
55
+ 2) Π’Ρ‹ΠΏΠΎΠ»Π½ΠΈΡ‚ΡŒ ΠΊΠΎΠΌΠ°Π½Π΄Ρƒ poetry install ΠΈΠ»ΠΈ poetry update
56
+
57
+
58
+ ## Π Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ
59
+ ```
60
+ cd existing_repo
61
+ git remote add origin https://gitlab.8525.ru/modules/tp-shared.git
62
+ git branch -M main
63
+ git push -uf origin main
64
+ ```
65
+
@@ -0,0 +1,46 @@
1
+ [project]
2
+ name = "tp-shared"
3
+ version = "0.1.0"
4
+ description = ""
5
+ authors = [
6
+ {name = "Developer",email = "front-gold@mail.ru"}
7
+ ]
8
+ readme = "README.md"
9
+ requires-python = ">=3.12,<=3.14"
10
+ dependencies = [
11
+ "pydantic (>=2.11.7,<3.0.0)",
12
+ "tp-helper (>=0.4.62,<0.5.0)"
13
+ ]
14
+
15
+
16
+ [build-system]
17
+ requires = ["poetry-core>=2.0.0,<3.0.0"]
18
+ build-backend = "poetry.core.masonry.api"
19
+
20
+ [tool.ruff]
21
+ line-length = 88
22
+ target-version = "py313"
23
+ fix = true
24
+
25
+ [tool.ruff.lint]
26
+ select = [
27
+ "E", # pycodestyle (ΡΡ‚ΠΈΠ»ΡŒ)
28
+ "F", # pyflakes (ошибки исполнСния)
29
+ "I", # isort (сортировка ΠΈΠΌΠΏΠΎΡ€Ρ‚ΠΎΠ²)
30
+ "UP", # pyupgrade (ΠΎΠ±Π½ΠΎΠ²Π»Π΅Π½ΠΈΠ΅ синтаксиса)
31
+ "B", # bugbear (ΠΏΠΎΡ‚Π΅Π½Ρ†ΠΈΠ°Π»ΡŒΠ½Ρ‹Π΅ Π±Π°Π³ΠΈ)
32
+ "A", # flake8-builtins (ΠΊΠΎΠ½Ρ„Π»ΠΈΠΊΡ‚Ρ‹ с встроСнными ΠΈΠΌΠ΅Π½Π°ΠΌΠΈ)
33
+ "C4", # flake8-comprehensions
34
+ "SIM", # flake8-simplify
35
+ ]
36
+ ignore = [
37
+ "B008", # Depends(...) Π² Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Π°Ρ… FastAPI β€” бСзопасно ΠΈ распространённо
38
+ "E501", # Π”Π»ΠΈΠ½Π° строки β€” ΠΈΠ³Π½ΠΎΡ€ΠΈΡ€ΡƒΠ΅ΠΌ, Ρ‚.ΠΊ. ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ black с line-length = 88
39
+ "SIM117", # Π’Π»ΠΎΠΆΠ΅Π½Π½Ρ‹Π΅ async with β€” Ρ‡ΠΈΡ‚Π°Π΅ΠΌΠΎΡΡ‚ΡŒ Π² Π½Π΅ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Ρ… случаях Π²Π°ΠΆΠ½Π΅Π΅
40
+ ]
41
+
42
+ [tool.poetry.group.dev.dependencies]
43
+ ruff = "^0.13.0"
44
+ pre-commit = "^4.3.0"
45
+ pylint = "^3.3.8"
46
+
@@ -0,0 +1,14 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_ack_list_queue_repo import BaseAckListQueueRepo
3
+
4
+ # from src.config import config
5
+ from tp_shared.autoins_mpg_service.schemas.autoins_result_message import (
6
+ AutoinsResultMessage,
7
+ )
8
+
9
+
10
+ class AutoinsResultsAckListQueueRepo(BaseAckListQueueRepo):
11
+ QUEUE_NAME = "autoins:service:results:ack:list"
12
+
13
+ def __init__(self, redis_client: Redis):
14
+ super().__init__(redis_client=redis_client, message_type=AutoinsResultMessage)
@@ -0,0 +1,34 @@
1
+ from datetime import date
2
+
3
+ from pydantic import BaseModel
4
+
5
+ from tp_shared.types.policy_series import PolicySeries
6
+
7
+
8
+ class AutoinsResultPolicy(BaseModel):
9
+ insurer_name: str
10
+ reg_number: str
11
+ series: PolicySeries
12
+ number: str
13
+ start_date: date
14
+ end_date: date
15
+ period1_start: date | None = None
16
+ period1_end: date | None = None
17
+ period2_start: date | None = None
18
+ period2_end: date | None = None
19
+ period3_start: date | None = None
20
+ period3_end: date | None = None
21
+ vin: str | None = None
22
+ body_number: str | None = None
23
+ chassis_number: str | None = None
24
+ car_mark: str | None = None
25
+ car_model: str | None = None
26
+ external_policy_id: int | None = None
27
+ policy_state: str | None = None
28
+ policy_status_t_use: str | None = None
29
+
30
+
31
+ class AutoinsResultMessage(BaseModel):
32
+ series: PolicySeries
33
+ number: str
34
+ policies: list[AutoinsResultPolicy] = []
@@ -0,0 +1,34 @@
1
+ from logging import Logger
2
+
3
+ from redis.asyncio import Redis
4
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
5
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
6
+
7
+ from tp_shared.autoins_mpg_service.repos.autoins_results_ack_list_queue_repo import (
8
+ AutoinsResultsAckListQueueRepo,
9
+ )
10
+ from tp_shared.autoins_mpg_service.schemas.autoins_result_message import (
11
+ AutoinsResultMessage,
12
+ )
13
+
14
+
15
+ class BaseAutoinsResultsAckListQueueWorkerService(
16
+ BaseWorkerService, AutoinsResultsAckListQueueRepo
17
+ ):
18
+ def __init__(self, redis_client: Redis, logger: Logger):
19
+ BaseWorkerService.__init__(self, logger=logger, redis_client=redis_client)
20
+ AutoinsResultsAckListQueueRepo.__init__(self, redis_client=redis_client)
21
+
22
+ @retry_forever(
23
+ start_message="πŸ“₯ Π§Ρ‚Π΅Π½ΠΈΠ΅ Π·Π°Π΄Π°Ρ‡ ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
24
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ Ρ‡Ρ‚Π΅Π½ΠΈΠΈ ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
25
+ )
26
+ async def pop(self) -> AutoinsResultMessage | None:
27
+ return await AutoinsResultsAckListQueueRepo.pop(self)
28
+
29
+ @retry_forever(
30
+ start_message="πŸ—‘οΈ ack Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
31
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ ack Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
32
+ )
33
+ async def ack(self) -> None:
34
+ return await AutoinsResultsAckListQueueRepo.ack(self)
@@ -0,0 +1,5 @@
1
+ from tp_helper import BaseSchema
2
+
3
+
4
+ class BaseMessage(BaseSchema):
5
+ version: str = "1.0"
@@ -0,0 +1,17 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
3
+
4
+ from tp_shared.gibdd_service.schemas.gibdd_dc_result_message import (
5
+ GibddDcResultMessage,
6
+ )
7
+
8
+
9
+ class GibddDcResultsStreamQueueRepo(BaseStreamQueueRepo):
10
+ QUEUE_NAME = "gibdd:service:dc:results:stream"
11
+
12
+ def __init__(self, redis_client: Redis):
13
+ super().__init__(
14
+ redis_client=redis_client,
15
+ schema=GibddDcResultMessage,
16
+ queue_name=self.QUEUE_NAME,
17
+ )
@@ -0,0 +1,39 @@
1
+ from datetime import date
2
+
3
+ from pydantic import ConfigDict
4
+
5
+ from tp_shared.base.base_message import BaseMessage
6
+ from tp_shared.types.dc_operator_status import DcOperatorStatus
7
+
8
+
9
+ class GibddDcResultOperator(BaseMessage):
10
+ operator_id: int
11
+ status: DcOperatorStatus
12
+ name: str
13
+ address_line: str
14
+ phone_number: str
15
+ email: str
16
+ site: str
17
+ canceled_date: date | None
18
+ canceled_at: int | None
19
+
20
+
21
+ class GibddDcResultCard(BaseMessage):
22
+ card_number: str
23
+ vin: str
24
+ start_date: date
25
+ end_date: date
26
+ odometer_value: int
27
+ is_active: bool
28
+ updated_at: int
29
+ created_at: int
30
+
31
+ operator: GibddDcResultOperator
32
+
33
+ model_config = ConfigDict(from_attributes=True, populate_by_name=True)
34
+
35
+
36
+ class GibddDcResultMessage(BaseMessage):
37
+ version: str = "1.0"
38
+ vin: str
39
+ diagnostic_cards: list[GibddDcResultCard] = []
@@ -0,0 +1,122 @@
1
+ from datetime import timedelta
2
+ from logging import Logger
3
+
4
+ from redis.asyncio import Redis
5
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
6
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
7
+
8
+ from tp_shared.gibdd_service.repo.gibdd_dc_results_stream_queue_repo import (
9
+ GibddDcResultsStreamQueueRepo,
10
+ )
11
+ from tp_shared.gibdd_service.schemas.gibdd_dc_result_message import (
12
+ GibddDcResultMessage,
13
+ )
14
+
15
+
16
+ class BaseGibddDcResultsStreamQueueWorkerService(
17
+ GibddDcResultsStreamQueueRepo, BaseWorkerService
18
+ ):
19
+ def __init__(
20
+ self,
21
+ redis_client: Redis,
22
+ logger: Logger,
23
+ group_name: str,
24
+ consumer_name: str,
25
+ ):
26
+ BaseWorkerService.__init__(self, redis_client=redis_client, logger=logger)
27
+ GibddDcResultsStreamQueueRepo.__init__(self, redis_client=redis_client)
28
+
29
+ self.group_name = group_name
30
+ self.consumer_name = consumer_name
31
+
32
+ @retry_forever(
33
+ start_message="Π”ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
34
+ error_message="Ошибка ΠΏΡ€ΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
35
+ )
36
+ async def add(self, message: GibddDcResultMessage) -> None:
37
+ await GibddDcResultsStreamQueueRepo.add(self, message)
38
+
39
+ @retry_forever(
40
+ start_message="ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
41
+ error_message="Ошибка получСния сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
42
+ )
43
+ async def pop(
44
+ self,
45
+ stream_id: str = ">",
46
+ block: int = 0,
47
+ count: int = 100,
48
+ prioritize_claimed: bool = True,
49
+ min_idle_time: int = 60000,
50
+ ) -> list[tuple[str, GibddDcResultMessage]] | None:
51
+ return await GibddDcResultsStreamQueueRepo.pop(
52
+ self,
53
+ group_name=self.group_name,
54
+ consumer_name=self.consumer_name,
55
+ stream_id=stream_id,
56
+ block=block,
57
+ count=count,
58
+ prioritize_claimed=prioritize_claimed,
59
+ min_idle_time=min_idle_time,
60
+ )
61
+
62
+ @retry_forever(
63
+ start_message="ΠŸΠΎΠ΄Ρ‚Π²Π΅Ρ€ΠΆΠ΄Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
64
+ error_message="Ошибка подтвСрТдСния сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
65
+ )
66
+ async def ack(self, message_id: str):
67
+ await GibddDcResultsStreamQueueRepo.ack(self, self.group_name, message_id)
68
+
69
+ @retry_forever(
70
+ start_message="Поиск Π·Π°Π²ΠΈΡΡˆΠΈΡ… сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
71
+ error_message="Ошибка ΠΏΡ€ΠΈ auto-claim сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
72
+ )
73
+ async def claim_reassign(
74
+ self,
75
+ min_idle_time: int = 60000,
76
+ count: int = 100,
77
+ ) -> list[tuple[str, GibddDcResultMessage]]:
78
+ return await GibddDcResultsStreamQueueRepo.claim_reassign(
79
+ self,
80
+ group_name=self.group_name,
81
+ consumer_name=self.consumer_name,
82
+ min_idle_time=min_idle_time,
83
+ count=count,
84
+ )
85
+
86
+ @retry_forever(
87
+ start_message="Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³Ρ€ΡƒΠΏΠΏΡ‹ ΠΏΠΎΡ‚Ρ€Π΅Π±ΠΈΡ‚Π΅Π»Π΅ΠΉ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
88
+ error_message="Ошибка создания Π³Ρ€ΡƒΠΏΠΏΡ‹ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
89
+ )
90
+ async def create_consumer_group(self, create_stream: bool = True):
91
+ try:
92
+ await GibddDcResultsStreamQueueRepo.create_consumer_group(
93
+ self,
94
+ group_name=self.group_name,
95
+ create_stream=create_stream,
96
+ stream_id="0",
97
+ )
98
+ except Exception as e:
99
+ print(e)
100
+
101
+ @retry_forever(
102
+ start_message="ΠžΡ‡ΠΈΡΡ‚ΠΊΠ° сообщСний ΡΡ‚Π°Ρ€ΡˆΠ΅ {retention} Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
103
+ error_message="Ошибка ΠΏΡ€ΠΈ очисткС сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
104
+ )
105
+ async def trim_by_age(self, retention: timedelta) -> int:
106
+ """
107
+ УдаляСт сообщСния ΡΡ‚Π°Ρ€ΡˆΠ΅ ΡƒΠΊΠ°Π·Π°Π½Π½ΠΎΠ³ΠΎ ΠΏΠ΅Ρ€ΠΈΠΎΠ΄Π° (retention) Ρ‡Π΅Ρ€Π΅Π· XTRIM MINID.
108
+
109
+ :param retention: ΠœΠ°ΠΊΡΠΈΠΌΠ°Π»ΡŒΠ½Ρ‹ΠΉ "возраст" сообщСний, Π½Π°ΠΏΡ€ΠΈΠΌΠ΅Ρ€ timedelta(days=1)
110
+ :return: Кол-Π²ΠΎ ΡƒΠ΄Π°Π»Ρ‘Π½Π½Ρ‹Ρ… сообщСний
111
+ """
112
+ return await GibddDcResultsStreamQueueRepo.trim_by_age(self, retention)
113
+
114
+ @retry_forever(
115
+ start_message="Полная очистка ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
116
+ error_message="Ошибка ΠΏΡ€ΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ очисткС ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
117
+ )
118
+ async def delete_all(self) -> None:
119
+ """
120
+ УдаляСт всС consumer group ΠΈ сам ΠΏΠΎΡ‚ΠΎΠΊ (полная очистка).
121
+ """
122
+ await GibddDcResultsStreamQueueRepo.delete_all(self)
@@ -0,0 +1,17 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
3
+
4
+ from tp_shared.mos_passes_service.schemas.mos_passes_result_message import (
5
+ MosPassesResultMessage,
6
+ )
7
+
8
+
9
+ class MosPassesResultsStreamQueueRepo(BaseStreamQueueRepo):
10
+ QUEUE_NAME = "mos:passes:service:results:stream"
11
+
12
+ def __init__(self, redis_client: Redis):
13
+ super().__init__(
14
+ redis_client=redis_client,
15
+ schema=MosPassesResultMessage,
16
+ queue_name=self.QUEUE_NAME,
17
+ )
@@ -0,0 +1,23 @@
1
+ from datetime import date
2
+
3
+ from tp_shared.base.base_message import BaseMessage
4
+ from tp_shared.types.pass_allowed_zone import PassAllowedZone
5
+ from tp_shared.types.pass_series import PassSeries
6
+ from tp_shared.types.pass_time_of_date import PassTimeOfDate
7
+
8
+
9
+ class MosPassesResultPass(BaseMessage):
10
+ reg_number: str
11
+ time_of_day: PassTimeOfDate
12
+ series: PassSeries
13
+ number: str
14
+ allowed_zone: PassAllowedZone
15
+ start_date: date
16
+ end_date: date
17
+ cancel_date: date | None
18
+
19
+
20
+ class MosPassesResultMessage(BaseMessage):
21
+ version: str = "1.0"
22
+ reg_number: str
23
+ passes: list[MosPassesResultPass] = []
@@ -0,0 +1,109 @@
1
+ from datetime import timedelta
2
+ from logging import Logger
3
+
4
+ from redis.asyncio import Redis
5
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
6
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
7
+
8
+ from tp_shared.mos_passes_service.repo.mos_passes_results_stream_queue_repo import (
9
+ MosPassesResultsStreamQueueRepo,
10
+ )
11
+ from tp_shared.mos_passes_service.schemas.mos_passes_result_message import (
12
+ MosPassesResultMessage,
13
+ )
14
+
15
+
16
+ class BaseMosPassesResultsStreamQueueWorkerService(
17
+ MosPassesResultsStreamQueueRepo, BaseWorkerService
18
+ ):
19
+ def __init__(
20
+ self,
21
+ redis_client: Redis,
22
+ logger: Logger,
23
+ group_name: str,
24
+ consumer_name: str,
25
+ ):
26
+ BaseWorkerService.__init__(self, redis_client=redis_client, logger=logger)
27
+ MosPassesResultsStreamQueueRepo.__init__(self, redis_client=redis_client)
28
+
29
+ self.group_name = group_name
30
+ self.consumer_name = consumer_name
31
+
32
+ @retry_forever(
33
+ start_message="Π”ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
34
+ error_message="Ошибка ΠΏΡ€ΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
35
+ )
36
+ async def add(self, message: MosPassesResultMessage) -> None:
37
+ await MosPassesResultsStreamQueueRepo.add(self, message)
38
+
39
+ @retry_forever(
40
+ start_message="ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
41
+ error_message="Ошибка получСния сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
42
+ )
43
+ async def pop(
44
+ self,
45
+ stream_id: str = ">",
46
+ block: int = 0,
47
+ count: int = 100,
48
+ prioritize_claimed: bool = True,
49
+ min_idle_time: int = 60000,
50
+ ) -> list[tuple[str, MosPassesResultMessage]] | None:
51
+ return await MosPassesResultsStreamQueueRepo.pop(
52
+ self,
53
+ group_name=self.group_name,
54
+ consumer_name=self.consumer_name,
55
+ stream_id=stream_id,
56
+ block=block,
57
+ count=count,
58
+ prioritize_claimed=prioritize_claimed,
59
+ min_idle_time=min_idle_time,
60
+ )
61
+
62
+ @retry_forever(
63
+ start_message="ΠŸΠΎΠ΄Ρ‚Π²Π΅Ρ€ΠΆΠ΄Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
64
+ error_message="Ошибка подтвСрТдСния сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
65
+ )
66
+ async def ack(self, message_id: str):
67
+ await MosPassesResultsStreamQueueRepo.ack(self, self.group_name, message_id)
68
+
69
+ @retry_forever(
70
+ start_message="Поиск Π·Π°Π²ΠΈΡΡˆΠΈΡ… сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
71
+ error_message="Ошибка ΠΏΡ€ΠΈ auto-claim сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
72
+ )
73
+ async def claim_reassign(
74
+ self,
75
+ min_idle_time: int = 60000,
76
+ count: int = 100,
77
+ ) -> list[tuple[str, MosPassesResultMessage]]:
78
+ return await MosPassesResultsStreamQueueRepo.claim_reassign(
79
+ self,
80
+ group_name=self.group_name,
81
+ consumer_name=self.consumer_name,
82
+ min_idle_time=min_idle_time,
83
+ count=count,
84
+ )
85
+
86
+ @retry_forever(
87
+ start_message="Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³Ρ€ΡƒΠΏΠΏΡ‹ ΠΏΠΎΡ‚Ρ€Π΅Π±ΠΈΡ‚Π΅Π»Π΅ΠΉ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
88
+ error_message="Ошибка создания Π³Ρ€ΡƒΠΏΠΏΡ‹ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
89
+ )
90
+ async def create_consumer_group(self, create_stream: bool = True):
91
+ await MosPassesResultsStreamQueueRepo.create_consumer_group(
92
+ self,
93
+ group_name=self.group_name,
94
+ create_stream=create_stream,
95
+ )
96
+
97
+ @retry_forever(
98
+ start_message="ΠžΡ‡ΠΈΡΡ‚ΠΊΠ° сообщСний ΡΡ‚Π°Ρ€ΡˆΠ΅ {retention} Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
99
+ error_message="Ошибка ΠΏΡ€ΠΈ очисткС сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
100
+ )
101
+ async def trim_by_age(self, retention: timedelta) -> int:
102
+ return await MosPassesResultsStreamQueueRepo.trim_by_age(self, retention)
103
+
104
+ @retry_forever(
105
+ start_message="Полная очистка ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
106
+ error_message="Ошибка ΠΏΡ€ΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ очисткС ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
107
+ )
108
+ async def delete_all(self) -> None:
109
+ await MosPassesResultsStreamQueueRepo.delete_all(self)
@@ -0,0 +1,13 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_ack_list_queue_repo import BaseAckListQueueRepo
3
+
4
+ from tp_shared.nsis_service.schemas.nsis_result_message import (
5
+ NsisResultMessage,
6
+ )
7
+
8
+
9
+ class NsisResultsAckListQueueRepo(BaseAckListQueueRepo):
10
+ QUEUE_NAME = "nsis:service:results:ack:list"
11
+
12
+ def __init__(self, redis_client: Redis):
13
+ super().__init__(redis_client, message_type=NsisResultMessage)
@@ -0,0 +1,23 @@
1
+ from datetime import date
2
+
3
+ from tp_helper.base_items.base_schema import BaseSchema
4
+
5
+ from tp_shared.nsis_service.types.nsis_task_type import NsisTaskType
6
+ from tp_shared.types.policy_series import PolicySeries
7
+ from tp_shared.types.policy_status import PolicyStatus
8
+
9
+
10
+ class NsisResultPolicy(BaseSchema):
11
+ status: PolicyStatus
12
+ series: PolicySeries
13
+ number: str
14
+ start_date: date | None = None
15
+ end_date: date | None = None
16
+ insurer_name: str
17
+
18
+
19
+ class NsisResultMessage(BaseSchema):
20
+ task_type: NsisTaskType
21
+ query: str
22
+ request_date: date
23
+ policies: list[NsisResultPolicy] = []
@@ -0,0 +1,6 @@
1
+ from enum import Enum
2
+
3
+
4
+ class NsisTaskType(Enum):
5
+ REG_NUMBER = "REG_NUMBER"
6
+ VIN = "VIN"
@@ -0,0 +1,32 @@
1
+ from logging import Logger
2
+
3
+ from redis.asyncio import Redis
4
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
5
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
6
+
7
+ from tp_shared.nsis_service.repos.nsis_results_ack_list_queue_repo import (
8
+ NsisResultsAckListQueueRepo,
9
+ )
10
+ from tp_shared.nsis_service.schemas.nsis_result_message import NsisResultMessage
11
+
12
+
13
+ class BaseNsisResultsAckListQueueWorkerService(
14
+ NsisResultsAckListQueueRepo, BaseWorkerService
15
+ ):
16
+ def __init__(self, redis_client: Redis, logger: Logger):
17
+ BaseWorkerService.__init__(self, logger=logger, redis_client=redis_client)
18
+ NsisResultsAckListQueueRepo.__init__(self, redis_client=redis_client)
19
+
20
+ @retry_forever(
21
+ start_message="πŸ“₯ Начало чтСния Π·Π°Π΄Π°Ρ‡ ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
22
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ Ρ‡Ρ‚Π΅Π½ΠΈΠΈ ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
23
+ )
24
+ async def pop(self) -> NsisResultMessage | None:
25
+ return await NsisResultsAckListQueueRepo.pop(self)
26
+
27
+ @retry_forever(
28
+ start_message="πŸ—‘οΈ Π£Π΄Π°Π»Π΅Π½ΠΈΠ΅ Π·Π°Π΄Π°Ρ‡ ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
29
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ ΡƒΠ΄Π°Π»Π΅Π½ΠΈΠΈ Π·Π°Π΄Π°Ρ‡ ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
30
+ )
31
+ async def ack(self) -> None:
32
+ return await NsisResultsAckListQueueRepo.ack(self)
@@ -0,0 +1,17 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
3
+
4
+ from tp_shared.policies_service.schemas.policies_result_message import (
5
+ PoliciesResultMessage,
6
+ )
7
+
8
+
9
+ class PoliciesResultsStreamQueueRepo(BaseStreamQueueRepo):
10
+ QUEUE_NAME = "policies:service:results:stream"
11
+
12
+ def __init__(self, redis_client: Redis):
13
+ super().__init__(
14
+ redis_client=redis_client,
15
+ schema=PoliciesResultMessage,
16
+ queue_name=self.QUEUE_NAME,
17
+ )
@@ -0,0 +1,28 @@
1
+ from datetime import date
2
+
3
+ from tp_shared.base.base_message import BaseMessage
4
+ from tp_shared.types.policy_series import PolicySeries
5
+ from tp_shared.types.policy_status import PolicyStatus
6
+
7
+
8
+ class PoliciesResultPolicy(BaseMessage):
9
+ series: PolicySeries
10
+ number: str
11
+ status: PolicyStatus
12
+ start_date: date | None = None
13
+ end_date: date | None = None
14
+ period1_start: date | None = None
15
+ period1_end: date | None = None
16
+ period2_start: date | None = None
17
+ period2_end: date | None = None
18
+ period3_start: date | None = None
19
+ period3_end: date | None = None
20
+ vin: str | None = None
21
+ car_mark: str | None = None
22
+ car_model: str | None = None
23
+
24
+
25
+ class PoliciesResultMessage(BaseMessage):
26
+ version: str = "1.0"
27
+ reg_number: str
28
+ policies: list[PoliciesResultPolicy] = []
@@ -0,0 +1,109 @@
1
+ from datetime import timedelta
2
+ from logging import Logger
3
+
4
+ from redis.asyncio import Redis
5
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
6
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
7
+
8
+ from tp_shared.policies_service.repos.policies_event_stream_queue_repo import (
9
+ PoliciesResultsStreamQueueRepo,
10
+ )
11
+ from tp_shared.policies_service.schemas.policies_result_message import (
12
+ PoliciesResultMessage,
13
+ )
14
+
15
+
16
+ class BasePolicyResultsStreamQueueWorkerService(
17
+ PoliciesResultsStreamQueueRepo, BaseWorkerService
18
+ ):
19
+ def __init__(
20
+ self,
21
+ redis_client: Redis,
22
+ logger: Logger,
23
+ group_name: str = None,
24
+ consumer_name: str = None,
25
+ ):
26
+ BaseWorkerService.__init__(self, redis_client=redis_client, logger=logger)
27
+ PoliciesResultsStreamQueueRepo.__init__(self, redis_client=redis_client)
28
+
29
+ self.group_name = group_name
30
+ self.consumer_name = consumer_name
31
+
32
+ @retry_forever(
33
+ start_message="βž• Π”ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
34
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
35
+ )
36
+ async def add(self, message: PoliciesResultMessage) -> None:
37
+ await PoliciesResultsStreamQueueRepo.add(self, message)
38
+
39
+ @retry_forever(
40
+ start_message="πŸ“₯ ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
41
+ error_message="❗ Ошибка получСния сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
42
+ )
43
+ async def pop(
44
+ self,
45
+ stream_id: str = ">",
46
+ block: int = 0,
47
+ count: int = 100,
48
+ prioritize_claimed: bool = True,
49
+ min_idle_time: int = 60000,
50
+ ) -> list[tuple[str, PoliciesResultMessage]] | None:
51
+ return await PoliciesResultsStreamQueueRepo.pop(
52
+ self,
53
+ group_name=self.group_name,
54
+ consumer_name=self.consumer_name,
55
+ stream_id=stream_id,
56
+ block=block,
57
+ count=count,
58
+ prioritize_claimed=prioritize_claimed,
59
+ min_idle_time=min_idle_time,
60
+ )
61
+
62
+ @retry_forever(
63
+ start_message="βœ… ΠŸΠΎΠ΄Ρ‚Π²Π΅Ρ€ΠΆΠ΄Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
64
+ error_message="⚠️ Ошибка подтвСрТдСния сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
65
+ )
66
+ async def ack(self, message_id: str):
67
+ await PoliciesResultsStreamQueueRepo.ack(self, self.group_name, message_id)
68
+
69
+ @retry_forever(
70
+ start_message="πŸ” Поиск Π·Π°Π²ΠΈΡΡˆΠΈΡ… сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
71
+ error_message="🚫 Ошибка ΠΏΡ€ΠΈ auto-claim сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
72
+ )
73
+ async def claim_reassign(
74
+ self,
75
+ min_idle_time: int = 60000,
76
+ count: int = 100,
77
+ ) -> list[tuple[str, PoliciesResultMessage]]:
78
+ return await PoliciesResultsStreamQueueRepo.claim_reassign(
79
+ self,
80
+ group_name=self.group_name,
81
+ consumer_name=self.consumer_name,
82
+ min_idle_time=min_idle_time,
83
+ count=count,
84
+ )
85
+
86
+ @retry_forever(
87
+ start_message="πŸ‘₯ Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³Ρ€ΡƒΠΏΠΏΡ‹ ΠΏΠΎΡ‚Ρ€Π΅Π±ΠΈΡ‚Π΅Π»Π΅ΠΉ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
88
+ error_message="❌ Ошибка создания Π³Ρ€ΡƒΠΏΠΏΡ‹ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
89
+ )
90
+ async def create_consumer_group(self, create_stream: bool = True):
91
+ await PoliciesResultsStreamQueueRepo.create_consumer_group(
92
+ self,
93
+ group_name=self.group_name,
94
+ create_stream=create_stream,
95
+ )
96
+
97
+ @retry_forever(
98
+ start_message="🧹 ΠžΡ‡ΠΈΡΡ‚ΠΊΠ° сообщСний ΡΡ‚Π°Ρ€ΡˆΠ΅ {retention} Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
99
+ error_message="⚠️ Ошибка ΠΏΡ€ΠΈ очисткС сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
100
+ )
101
+ async def trim_by_age(self, retention: timedelta) -> int:
102
+ return await PoliciesResultsStreamQueueRepo.trim_by_age(self, retention)
103
+
104
+ @retry_forever(
105
+ start_message="πŸ—‘οΈ Полная очистка ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
106
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ очисткС ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
107
+ )
108
+ async def delete_all(self) -> None:
109
+ await PoliciesResultsStreamQueueRepo.delete_all(self)
@@ -0,0 +1,17 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
3
+
4
+ from tp_shared.rnis_check_service.schemas.rnis_check_result_message import (
5
+ RNISCheckResultMessage,
6
+ )
7
+
8
+
9
+ class RNISCheckResultsStreamQueueRepo(BaseStreamQueueRepo):
10
+ QUEUE_NAME = "rnis:check:service:results:stream"
11
+
12
+ def __init__(self, redis_client: Redis):
13
+ super().__init__(
14
+ redis_client=redis_client,
15
+ schema=RNISCheckResultMessage,
16
+ queue_name=self.QUEUE_NAME,
17
+ )
@@ -0,0 +1,9 @@
1
+ from tp_shared.base.base_message import BaseMessage
2
+
3
+
4
+ class RNISCheckResultMessage(BaseMessage):
5
+ version: str = "1.0"
6
+ reg_number: str
7
+ exists: bool
8
+ last_mark: int | None
9
+ terminals_amount: int
@@ -0,0 +1,109 @@
1
+ from datetime import timedelta
2
+ from logging import Logger
3
+
4
+ from redis.asyncio import Redis
5
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
6
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
7
+
8
+ from tp_shared.rnis_check_service.repos.rnis_check_results_stream_queue_repo import (
9
+ RNISCheckResultsStreamQueueRepo,
10
+ )
11
+ from tp_shared.rnis_check_service.schemas.rnis_check_result_message import (
12
+ RNISCheckResultMessage,
13
+ )
14
+
15
+
16
+ class BaseRNISCheckResultsStreamQueueWorkerService(
17
+ RNISCheckResultsStreamQueueRepo, BaseWorkerService
18
+ ):
19
+ def __init__(
20
+ self,
21
+ redis_client: Redis,
22
+ logger: Logger,
23
+ group_name: str,
24
+ consumer_name: str,
25
+ ):
26
+ BaseWorkerService.__init__(self, redis_client=redis_client, logger=logger)
27
+ RNISCheckResultsStreamQueueRepo.__init__(self, redis_client=redis_client)
28
+
29
+ self.group_name = group_name
30
+ self.consumer_name = consumer_name
31
+
32
+ @retry_forever(
33
+ start_message="βž• Π”ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
34
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
35
+ )
36
+ async def add(self, message: RNISCheckResultMessage) -> None:
37
+ await RNISCheckResultsStreamQueueRepo.add(self, message)
38
+
39
+ @retry_forever(
40
+ start_message="πŸ“₯ ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
41
+ error_message="⚠️ Ошибка получСния сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
42
+ )
43
+ async def pop(
44
+ self,
45
+ stream_id: str = ">",
46
+ block: int = 0,
47
+ count: int = 100,
48
+ prioritize_claimed: bool = True,
49
+ min_idle_time: int = 60000,
50
+ ) -> list[tuple[str, RNISCheckResultMessage]] | None:
51
+ return await RNISCheckResultsStreamQueueRepo.pop(
52
+ self,
53
+ group_name=self.group_name,
54
+ consumer_name=self.consumer_name,
55
+ stream_id=stream_id,
56
+ block=block,
57
+ count=count,
58
+ prioritize_claimed=prioritize_claimed,
59
+ min_idle_time=min_idle_time,
60
+ )
61
+
62
+ @retry_forever(
63
+ start_message="βœ… ΠŸΠΎΠ΄Ρ‚Π²Π΅Ρ€ΠΆΠ΄Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
64
+ error_message="⚠️ Ошибка подтвСрТдСния сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
65
+ )
66
+ async def ack(self, message_id: str):
67
+ await RNISCheckResultsStreamQueueRepo.ack(self, self.group_name, message_id)
68
+
69
+ @retry_forever(
70
+ start_message="πŸ” Поиск Π·Π°Π²ΠΈΡΡˆΠΈΡ… сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
71
+ error_message="🚫 Ошибка ΠΏΡ€ΠΈ auto-claim сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
72
+ )
73
+ async def claim_reassign(
74
+ self,
75
+ min_idle_time: int = 60000,
76
+ count: int = 100,
77
+ ) -> list[tuple[str, RNISCheckResultMessage]]:
78
+ return await RNISCheckResultsStreamQueueRepo.claim_reassign(
79
+ self,
80
+ group_name=self.group_name,
81
+ consumer_name=self.consumer_name,
82
+ min_idle_time=min_idle_time,
83
+ count=count,
84
+ )
85
+
86
+ @retry_forever(
87
+ start_message="πŸ‘₯ Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³Ρ€ΡƒΠΏΠΏΡ‹ ΠΏΠΎΡ‚Ρ€Π΅Π±ΠΈΡ‚Π΅Π»Π΅ΠΉ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
88
+ error_message="❌ Ошибка создания Π³Ρ€ΡƒΠΏΠΏΡ‹ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
89
+ )
90
+ async def create_consumer_group(self, create_stream: bool = True):
91
+ await RNISCheckResultsStreamQueueRepo.create_consumer_group(
92
+ self,
93
+ group_name=self.group_name,
94
+ create_stream=create_stream,
95
+ )
96
+
97
+ @retry_forever(
98
+ start_message="🧹 ΠžΡ‡ΠΈΡΡ‚ΠΊΠ° сообщСний ΡΡ‚Π°Ρ€ΡˆΠ΅ {retention} Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
99
+ error_message="⚠️ Ошибка ΠΏΡ€ΠΈ очисткС сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
100
+ )
101
+ async def trim_by_age(self, retention: timedelta) -> int:
102
+ return await RNISCheckResultsStreamQueueRepo.trim_by_age(self, retention)
103
+
104
+ @retry_forever(
105
+ start_message="πŸ—‘οΈ Полная очистка ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
106
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ очисткС ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
107
+ )
108
+ async def delete_all(self) -> None:
109
+ await RNISCheckResultsStreamQueueRepo.delete_all(self)
@@ -0,0 +1,17 @@
1
+ from redis.asyncio import Redis
2
+ from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
3
+
4
+ from tp_shared.rnis_emulator_service.schemas.rnis_emulator_result_message import (
5
+ RnisEmulatorResultMessage,
6
+ )
7
+
8
+
9
+ class RnisEmulatorResultsStreamQueueRepo(BaseStreamQueueRepo):
10
+ QUEUE_NAME = "rnis:emulator:service:results:stream"
11
+
12
+ def __init__(self, redis_client: Redis):
13
+ super().__init__(
14
+ redis_client=redis_client,
15
+ schema=RnisEmulatorResultMessage,
16
+ queue_name=self.QUEUE_NAME,
17
+ )
@@ -0,0 +1,57 @@
1
+ import uuid
2
+ from datetime import date
3
+
4
+ from pydantic import model_validator
5
+ from tp_helper.base_items.base_schema import BaseSchema
6
+
7
+ from tp_shared.rnis_emulator_service.types.rnis_emulator_types import (
8
+ RnisEmulatorActionType,
9
+ RnisEmulatorResultType,
10
+ RnisEmulatorSubscriptionStatus,
11
+ RnisEmulatorTaskStatus,
12
+ )
13
+ from tp_shared.types.pass_time_of_date import PassTimeOfDate
14
+
15
+
16
+ class RnisEmulatorResultTask(BaseSchema):
17
+ task_id: uuid.UUID
18
+ subscription_id: uuid.UUID
19
+ reg_number: str
20
+ is_test_drive: bool
21
+ time_of_day: PassTimeOfDate
22
+ status: RnisEmulatorTaskStatus
23
+ error_message: str | None = None
24
+ started_at: int | None = None
25
+ ended_at: int | None = None
26
+
27
+
28
+ class RnisEmulatorResultSubscription(BaseSchema):
29
+ subscription_id: uuid.UUID
30
+ reg_number: str
31
+ time_of_day: PassTimeOfDate
32
+ status: RnisEmulatorSubscriptionStatus
33
+ monthly_run_count: int
34
+ start_date: date
35
+ end_date: date
36
+ created_at: int
37
+
38
+ @model_validator(mode="after")
39
+ def _check_dates(self):
40
+ if self.start_date > self.end_date:
41
+ raise ValueError("start_date Π½Π΅ ΠΌΠΎΠΆΠ΅Ρ‚ Π±Ρ‹Ρ‚ΡŒ ΠΏΠΎΠ·ΠΆΠ΅ end_date")
42
+ return self
43
+
44
+
45
+ class RnisEmulatorResultMessage(BaseSchema):
46
+ type: RnisEmulatorResultType
47
+ task: RnisEmulatorResultTask | None = None
48
+ subscription: RnisEmulatorResultSubscription | None = None
49
+ action_type: RnisEmulatorActionType
50
+
51
+ @model_validator(mode="after")
52
+ def _one_of_task_or_subscription(self):
53
+ if (self.task is None) and (self.subscription is None):
54
+ raise ValueError(
55
+ "Π”ΠΎΠ»ΠΆΠ½ΠΎ Π±Ρ‹Ρ‚ΡŒ Π·Π°ΠΏΠΎΠ»Π½Π΅Π½ΠΎ хотя Π±Ρ‹ ΠΎΠ΄Π½ΠΎ ΠΏΠΎΠ»Π΅: 'task' ΠΈΠ»ΠΈ 'subscription'."
56
+ )
57
+ return self
@@ -0,0 +1,35 @@
1
+ from enum import Enum, StrEnum
2
+
3
+
4
+ class RnisEmulatorResultType(StrEnum):
5
+ TASK = "TASK"
6
+ SUBSCRIPTION = "SUBSCRIPTION"
7
+
8
+
9
+ class RnisEmulatorActionType(StrEnum):
10
+ CREATE = "CREATE"
11
+ UPDATE = "UPDATE"
12
+ DELETE = "DELETE"
13
+
14
+
15
+ class RnisEmulatorTaskStatus(str, Enum):
16
+ WAITING = "WAITING"
17
+ CALCULATING = "CALCULATING"
18
+ CALCULATED = "CALCULATED" # Расчёт ΠΏΡ€ΠΎΠ²Π΅Π΄Ρ‘Π½, Π³ΠΎΡ‚ΠΎΠ²Π° ΠΊ эмуляции
19
+ IN_WORK = "IN_WORK"
20
+ UNLOADING = "UNLOADING"
21
+ PARKING = "PARKING"
22
+ COMPLETED = "COMPLETED"
23
+ CANCELED = "CANCELED"
24
+ ERROR = "ERROR"
25
+
26
+
27
+ class RnisEmulatorSubscriptionStatus(str, Enum):
28
+ # ΠΏΠ΅Ρ€Π²ΠΈΡ‡Π½Ρ‹ΠΉ, Ρ‚ΠΎΠ»ΡŒΠΊΠΎ Ρ‡Ρ‚ΠΎ создана, Π΅Ρ‰Ρ‘ Π½Π΅ Π°ΠΊΡ‚ΠΈΠ²Π½Π°
29
+ CREATED = "CREATED"
30
+ # Ρ€Π°Π±ΠΎΡ‚Π°Π΅Ρ‚
31
+ ACTIVE = "ACTIVE"
32
+ # Π²Ρ€ΡƒΡ‡Π½ΡƒΡŽ приостановлСна
33
+ SUSPENDED = "SUSPENDED"
34
+ # Π·Π°ΠΊΠΎΠ½Ρ‡ΠΈΠ»Π°ΡΡŒ ΠΏΠΎ сроку
35
+ EXPIRED = "EXPIRED"
@@ -0,0 +1,102 @@
1
+ from datetime import timedelta
2
+ from logging import Logger
3
+
4
+ from tp_helper.base_items.base_worker_service import BaseWorkerService
5
+ from tp_helper.decorators.decorator_retry_forever import retry_forever
6
+
7
+ from tp_shared.rnis_emulator_service.repos.rnis_emulator_results_stream_queue_repo import (
8
+ RnisEmulatorResultsStreamQueueRepo,
9
+ )
10
+ from tp_shared.rnis_emulator_service.schemas.rnis_emulator_result_message import (
11
+ RnisEmulatorResultMessage,
12
+ )
13
+
14
+
15
+ class RnisEmulatorResultsStreamQueueWorkerService(BaseWorkerService):
16
+ def __init__(
17
+ self,
18
+ repo: RnisEmulatorResultsStreamQueueRepo,
19
+ logger: Logger,
20
+ group_name: str,
21
+ consumer_name: str,
22
+ ):
23
+ super().__init__(logger=logger)
24
+ self.repo = repo
25
+ self.group_name = group_name
26
+ self.consumer_name = consumer_name
27
+
28
+ @retry_forever(
29
+ start_message="βž• Π”ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
30
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΎΡ‡Π΅Ρ€Π΅Π΄ΡŒ {queue_name}",
31
+ )
32
+ async def add(self, message: RnisEmulatorResultMessage) -> None:
33
+ await self.repo.add(message)
34
+
35
+ @retry_forever(
36
+ start_message="πŸ“₯ ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
37
+ error_message="⚠️ Ошибка получСния сообщСний ΠΈΠ· ΠΎΡ‡Π΅Ρ€Π΅Π΄ΠΈ {queue_name}",
38
+ )
39
+ async def pop(
40
+ self,
41
+ stream_id: str = ">",
42
+ block: int = 60,
43
+ count: int = 100,
44
+ prioritize_claimed: bool = True,
45
+ min_idle_time: int = 60000,
46
+ ) -> list[tuple[str, RnisEmulatorResultMessage]] | None:
47
+ return await self.repo.pop(
48
+ group_name=self.group_name,
49
+ consumer_name=self.consumer_name,
50
+ stream_id=stream_id,
51
+ block=block,
52
+ count=count,
53
+ prioritize_claimed=prioritize_claimed,
54
+ min_idle_time=min_idle_time,
55
+ )
56
+
57
+ @retry_forever(
58
+ start_message="βœ… ΠŸΠΎΠ΄Ρ‚Π²Π΅Ρ€ΠΆΠ΄Π΅Π½ΠΈΠ΅ сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
59
+ error_message="⚠️ Ошибка подтвСрТдСния сообщСния Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
60
+ )
61
+ async def ack(self, message_id: str):
62
+ await self.repo.ack(self.group_name, message_id)
63
+
64
+ @retry_forever(
65
+ start_message="πŸ” Поиск Π·Π°Π²ΠΈΡΡˆΠΈΡ… сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
66
+ error_message="🚫 Ошибка ΠΏΡ€ΠΈ auto-claim сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
67
+ )
68
+ async def claim_reassign(
69
+ self,
70
+ min_idle_time: int = 60000,
71
+ count: int = 100,
72
+ ) -> list[tuple[str, RnisEmulatorResultMessage]]:
73
+ return await self.repo.claim_reassign(
74
+ group_name=self.group_name,
75
+ consumer_name=self.consumer_name,
76
+ min_idle_time=min_idle_time,
77
+ count=count,
78
+ )
79
+
80
+ @retry_forever(
81
+ start_message="πŸ‘₯ Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³Ρ€ΡƒΠΏΠΏΡ‹ ΠΏΠΎΡ‚Ρ€Π΅Π±ΠΈΡ‚Π΅Π»Π΅ΠΉ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
82
+ error_message="❌ Ошибка создания Π³Ρ€ΡƒΠΏΠΏΡ‹ Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
83
+ )
84
+ async def create_consumer_group(self, create_stream: bool = True):
85
+ await self.repo.create_consumer_group(
86
+ group_name=self.group_name,
87
+ create_stream=create_stream,
88
+ )
89
+
90
+ @retry_forever(
91
+ start_message="🧹 ΠžΡ‡ΠΈΡΡ‚ΠΊΠ° сообщСний ΡΡ‚Π°Ρ€ΡˆΠ΅ {retention} Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
92
+ error_message="⚠️ Ошибка ΠΏΡ€ΠΈ очисткС сообщСний Π² ΠΏΠΎΡ‚ΠΎΠΊΠ΅ {queue_name}",
93
+ )
94
+ async def trim_by_age(self, retention: timedelta) -> int:
95
+ return await self.repo.trim_by_age(retention)
96
+
97
+ @retry_forever(
98
+ start_message="πŸ—‘οΈ Полная очистка ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
99
+ error_message="❌ Ошибка ΠΏΡ€ΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ очисткС ΠΏΠΎΡ‚ΠΎΠΊΠ° {queue_name}",
100
+ )
101
+ async def delete_all(self) -> None:
102
+ await self.repo.delete_all()
@@ -0,0 +1,7 @@
1
+ import enum
2
+
3
+
4
+ class DcOperatorStatus(str, enum.Enum):
5
+ ACTIVE = "ACTIVE"
6
+ PAUSED = "PAUSED"
7
+ CANCELLED = "CANCELLED"
@@ -0,0 +1,8 @@
1
+ from enum import Enum
2
+
3
+
4
+ class PassAllowedZone(str, Enum):
5
+ MKAD = "ΠœΠšΠΠ”"
6
+ SK = "БК"
7
+ TTK = "ВВК"
8
+ MO = "МО"
@@ -0,0 +1,14 @@
1
+ from enum import Enum
2
+
3
+
4
+ class PassSeries(str, Enum):
5
+ AA = "АА"
6
+ BA = "БА"
7
+ AB = "АБ"
8
+ BB = "Π‘Π‘"
9
+ MB = "ΠœΠ‘"
10
+ MK = "МК"
11
+ MA = "МА"
12
+ MO = "МО"
13
+ II = "Π―Π―"
14
+ MOJD = "ΠœΠžΠ–Π”"
@@ -0,0 +1,6 @@
1
+ from enum import Enum
2
+
3
+
4
+ class PassTimeOfDate(str, Enum):
5
+ DAY = "DAY"
6
+ NIGHT = "NIGHT"
@@ -0,0 +1,19 @@
1
+ from enum import Enum
2
+
3
+
4
+ class PolicySeries(str, Enum):
5
+ XXX = "Π₯Π₯Π₯"
6
+ TTT = "Π’Π’Π’"
7
+ AAA = "ААА"
8
+ AAV = "ААВ"
9
+ AAK = "ААК"
10
+ AAM = "ААМ"
11
+ AAN = "ААН"
12
+ AAS = "ААБ"
13
+ VVV = "Π’Π’Π’"
14
+ EEE = "Π•Π•Π•"
15
+ KKK = "ККК"
16
+ MMM = "МММ"
17
+ NNN = "ННН"
18
+ RRR = "Π Π Π "
19
+ SSS = "Π‘Π‘Π‘"
@@ -0,0 +1,7 @@
1
+ from enum import Enum
2
+
3
+
4
+ class PolicyStatus(str, Enum):
5
+ ACTIVE = "ACTIVE"
6
+ WAITING_ACTIVATION = "WAITING_ACTIVATION"
7
+ EXPIRED = "EXPIRED"