tp-shared 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tp_shared-0.1.0/PKG-INFO +80 -0
- tp_shared-0.1.0/README.md +65 -0
- tp_shared-0.1.0/pyproject.toml +46 -0
- tp_shared-0.1.0/src/tp_shared/autoins_mpg_service/repos/autoins_results_ack_list_queue_repo.py +14 -0
- tp_shared-0.1.0/src/tp_shared/autoins_mpg_service/schemas/autoins_result_message.py +34 -0
- tp_shared-0.1.0/src/tp_shared/autoins_mpg_service/worker_services/base_autoins_results_ack_list_queue_worker_service.py +34 -0
- tp_shared-0.1.0/src/tp_shared/base/base_message.py +5 -0
- tp_shared-0.1.0/src/tp_shared/gibdd_service/repo/gibdd_dc_results_stream_queue_repo.py +17 -0
- tp_shared-0.1.0/src/tp_shared/gibdd_service/schemas/gibdd_dc_result_message.py +39 -0
- tp_shared-0.1.0/src/tp_shared/gibdd_service/worker_services/base_gibdd_dc_results_stream_queue_worker_service.py +122 -0
- tp_shared-0.1.0/src/tp_shared/mos_passes_service/repo/mos_passes_results_stream_queue_repo.py +17 -0
- tp_shared-0.1.0/src/tp_shared/mos_passes_service/schemas/mos_passes_result_message.py +23 -0
- tp_shared-0.1.0/src/tp_shared/mos_passes_service/worker_services/base_mos_passes_results_stream_queue_worker_service.py +109 -0
- tp_shared-0.1.0/src/tp_shared/nsis_service/repos/nsis_results_ack_list_queue_repo.py +13 -0
- tp_shared-0.1.0/src/tp_shared/nsis_service/schemas/nsis_result_message.py +23 -0
- tp_shared-0.1.0/src/tp_shared/nsis_service/types/nsis_task_type.py +6 -0
- tp_shared-0.1.0/src/tp_shared/nsis_service/worker_services/base_nsis_results_queue_worker_service.py +32 -0
- tp_shared-0.1.0/src/tp_shared/policies_service/repos/policies_event_stream_queue_repo.py +17 -0
- tp_shared-0.1.0/src/tp_shared/policies_service/schemas/policies_result_message.py +28 -0
- tp_shared-0.1.0/src/tp_shared/policies_service/worker_services/base_policies_results_queue_worker_service.py +109 -0
- tp_shared-0.1.0/src/tp_shared/rnis_check_service/repos/rnis_check_results_stream_queue_repo.py +17 -0
- tp_shared-0.1.0/src/tp_shared/rnis_check_service/schemas/rnis_check_result_message.py +9 -0
- tp_shared-0.1.0/src/tp_shared/rnis_check_service/worker_services/base_rnis_results_queue_worker_service.py +109 -0
- tp_shared-0.1.0/src/tp_shared/rnis_emulator_service/repos/rnis_emulator_results_stream_queue_repo.py +17 -0
- tp_shared-0.1.0/src/tp_shared/rnis_emulator_service/schemas/rnis_emulator_result_message.py +57 -0
- tp_shared-0.1.0/src/tp_shared/rnis_emulator_service/types/rnis_emulator_types.py +35 -0
- tp_shared-0.1.0/src/tp_shared/rnis_emulator_service/worker_services/rnis_emulator_results_stream_worker_service.py +102 -0
- tp_shared-0.1.0/src/tp_shared/types/dc_operator_status.py +7 -0
- tp_shared-0.1.0/src/tp_shared/types/pass_allowed_zone.py +8 -0
- tp_shared-0.1.0/src/tp_shared/types/pass_series.py +14 -0
- tp_shared-0.1.0/src/tp_shared/types/pass_time_of_date.py +6 -0
- tp_shared-0.1.0/src/tp_shared/types/policy_series.py +19 -0
- tp_shared-0.1.0/src/tp_shared/types/policy_status.py +7 -0
tp_shared-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: tp-shared
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary:
|
|
5
|
+
Author: Developer
|
|
6
|
+
Author-email: front-gold@mail.ru
|
|
7
|
+
Requires-Python: >=3.12,<=3.14
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
9
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
11
|
+
Requires-Dist: pydantic (>=2.11.7,<3.0.0)
|
|
12
|
+
Requires-Dist: tp-helper (>=0.4.62,<0.5.0)
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
|
|
15
|
+
# π§© tp-shared
|
|
16
|
+
|
|
17
|
+
ΠΠ±ΡΠΈΠΉ ΡΠ΅ΠΏΠΎΠ·ΠΈΡΠΎΡΠΈΠΉ ΡΡ
Π΅ΠΌ Π΄Π»Ρ ΠΈΡΠΏΠΎΠ»ΡΠ·ΠΎΠ²Π°Π½ΠΈΡ Π² Π½Π΅ΡΠΊΠΎΠ»ΡΠΊΠΈΡ
ΠΏΡΠΎΠ΅ΠΊΡΠ°Ρ
.
|
|
18
|
+
|
|
19
|
+
---
|
|
20
|
+
|
|
21
|
+
## Π£ΡΡΠ°Π½ΠΎΠ²ΠΊΠ°:
|
|
22
|
+
`poetry add tp-shared`
|
|
23
|
+
|
|
24
|
+
## ΠΡΠΈΡΡΠΊΠ° ΠΏΡΠΈ ΠΎΠ±Π½ΠΎΠ²Π»Π΅Π½ΠΈΠΈ
|
|
25
|
+
```
|
|
26
|
+
poetry cache clear pypi --all --no-interaction; poetry add tp-shared@latest
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
```
|
|
30
|
+
poetry cache clear pypi --all --no-interaction && poetry add tp-shared@latest
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
## ΠΡΠ±Π»ΠΈΠΊΠ°ΡΠΈΡ:
|
|
34
|
+
Π‘ΠΎΠ±ΠΈΡΠ°Π΅Ρ ΠΈ Π·Π°Π³ΡΡΠΆΠ°Π΅Ρ ΡΠΎΠ±ΡΠ°Π½Π½ΡΠΉ ΠΏΠ°ΠΊΠ΅Ρ Π² PyPI.
|
|
35
|
+
|
|
36
|
+
`poetry publish --build`
|
|
37
|
+
|
|
38
|
+
## Π‘ΡΡΡΠΊΡΡΡΠ° ΠΏΡΠΎΠ΅ΠΊΡΠ°
|
|
39
|
+
|
|
40
|
+
**messages**
|
|
41
|
+
-------------------------
|
|
42
|
+
Π‘Ρ
Π΅ΠΌΡ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΠΎΡ ΡΠ΅ΡΠ²ΠΈΡΠΎΠ²
|
|
43
|
+
|
|
44
|
+
ΠΡΠΈΠΌΠ΅Ρ ΠΈΠΌΠΏΠΎΡΡΠ°
|
|
45
|
+
|
|
46
|
+
from tp_shared_schemas.messages import GibddDcResultMessage
|
|
47
|
+
|
|
48
|
+
Π ΠΊΠ°ΠΆΠ΄ΠΎΠΉ ΠΏΠ°ΠΏΠΊΠ΅ Π»Π΅ΠΆΠ°Ρ ΡΠΎΠΎΡΠ²Π΅ΡΡΡΠ²ΡΡΡΠΈΠ΅ Pydantic-ΡΡ
Π΅ΠΌΡ, ΡΠ³ΡΡΠΏΠΏΠΈΡΠΎΠ²Π°Π½Π½ΡΠ΅ ΠΏΠΎ ΡΡΠ½ΠΊΡΠΈΠΎΠ½Π°Π»Ρ.
|
|
49
|
+
--------------------------
|
|
50
|
+
---
|
|
51
|
+
|
|
52
|
+
## ΠΠ°ΠΊ ΠΏΠΎΠ΄ΠΊΠ»ΡΡΠΈΡΡ ΡΠ΅ΠΏΠΎΠ·ΠΈΡΠΎΡΠΈΠΉ ΠΊ ΡΡΡΠ΅ΡΡΠ²ΡΡΡΠ΅ΠΌΡ ΠΏΡΠΎΠ΅ΠΊΡΡ
|
|
53
|
+
|
|
54
|
+
ΠΡΠ»ΠΈ Ρ Π²Π°Ρ Π΅ΡΡΡ Π»ΠΎΠΊΠ°Π»ΡΠ½ΡΠΉ ΠΏΡΠΎΠ΅ΠΊΡ ΠΈ Π²Ρ Ρ
ΠΎΡΠΈΡΠ΅ Π΄ΠΎΠ±Π°Π²ΠΈΡΡ ΡΠ΅ΠΏΠΎΠ·ΠΈΡΠΎΡΠΈΠΉ Ρ ΠΎΠ±ΡΠΈΠΌΠΈ ΡΡ
Π΅ΠΌΠ°ΠΌΠΈ, Π²ΡΠΏΠΎΠ»Π½ΠΈΡΠ΅ ΠΊΠΎΠΌΠ°Π½Π΄Ρ:
|
|
55
|
+
Π² ΡΠ°ΠΉΠ»Π΅ pyproject.toml ΠΏΡΠΎΠΏΠΈΡΠ°ΡΡ Π·Π°Π²ΠΈΡΠΈΠΌΠΎΡΡΡ:
|
|
56
|
+
1)
|
|
57
|
+
|
|
58
|
+
```Python
|
|
59
|
+
[tool.poetry.dependencies]
|
|
60
|
+
tp-shared = { git = "https://gitlab.8525.ru/modules/tp-shared.git", rev = "main" }
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
poetry add git
|
|
64
|
+
|
|
65
|
+
```python
|
|
66
|
+
poetry add git+https://gitlab.8525.ru/modules/tp-shared.git
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
2) ΠΡΠΏΠΎΠ»Π½ΠΈΡΡ ΠΊΠΎΠΌΠ°Π½Π΄Ρ poetry install ΠΈΠ»ΠΈ poetry update
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
## Π Π΅ΠΏΠΎΠ·ΠΈΡΠΎΡΠΈΠΉ
|
|
73
|
+
```
|
|
74
|
+
cd existing_repo
|
|
75
|
+
git remote add origin https://gitlab.8525.ru/modules/tp-shared.git
|
|
76
|
+
git branch -M main
|
|
77
|
+
git push -uf origin main
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# π§© tp-shared
|
|
2
|
+
|
|
3
|
+
ΠΠ±ΡΠΈΠΉ ΡΠ΅ΠΏΠΎΠ·ΠΈΡΠΎΡΠΈΠΉ ΡΡ
Π΅ΠΌ Π΄Π»Ρ ΠΈΡΠΏΠΎΠ»ΡΠ·ΠΎΠ²Π°Π½ΠΈΡ Π² Π½Π΅ΡΠΊΠΎΠ»ΡΠΊΠΈΡ
ΠΏΡΠΎΠ΅ΠΊΡΠ°Ρ
.
|
|
4
|
+
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
## Π£ΡΡΠ°Π½ΠΎΠ²ΠΊΠ°:
|
|
8
|
+
`poetry add tp-shared`
|
|
9
|
+
|
|
10
|
+
## ΠΡΠΈΡΡΠΊΠ° ΠΏΡΠΈ ΠΎΠ±Π½ΠΎΠ²Π»Π΅Π½ΠΈΠΈ
|
|
11
|
+
```
|
|
12
|
+
poetry cache clear pypi --all --no-interaction; poetry add tp-shared@latest
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
```
|
|
16
|
+
poetry cache clear pypi --all --no-interaction && poetry add tp-shared@latest
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
## ΠΡΠ±Π»ΠΈΠΊΠ°ΡΠΈΡ:
|
|
20
|
+
Π‘ΠΎΠ±ΠΈΡΠ°Π΅Ρ ΠΈ Π·Π°Π³ΡΡΠΆΠ°Π΅Ρ ΡΠΎΠ±ΡΠ°Π½Π½ΡΠΉ ΠΏΠ°ΠΊΠ΅Ρ Π² PyPI.
|
|
21
|
+
|
|
22
|
+
`poetry publish --build`
|
|
23
|
+
|
|
24
|
+
## Π‘ΡΡΡΠΊΡΡΡΠ° ΠΏΡΠΎΠ΅ΠΊΡΠ°
|
|
25
|
+
|
|
26
|
+
**messages**
|
|
27
|
+
-------------------------
|
|
28
|
+
Π‘Ρ
Π΅ΠΌΡ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΠΎΡ ΡΠ΅ΡΠ²ΠΈΡΠΎΠ²
|
|
29
|
+
|
|
30
|
+
ΠΡΠΈΠΌΠ΅Ρ ΠΈΠΌΠΏΠΎΡΡΠ°
|
|
31
|
+
|
|
32
|
+
from tp_shared_schemas.messages import GibddDcResultMessage
|
|
33
|
+
|
|
34
|
+
Π ΠΊΠ°ΠΆΠ΄ΠΎΠΉ ΠΏΠ°ΠΏΠΊΠ΅ Π»Π΅ΠΆΠ°Ρ ΡΠΎΠΎΡΠ²Π΅ΡΡΡΠ²ΡΡΡΠΈΠ΅ Pydantic-ΡΡ
Π΅ΠΌΡ, ΡΠ³ΡΡΠΏΠΏΠΈΡΠΎΠ²Π°Π½Π½ΡΠ΅ ΠΏΠΎ ΡΡΠ½ΠΊΡΠΈΠΎΠ½Π°Π»Ρ.
|
|
35
|
+
--------------------------
|
|
36
|
+
---
|
|
37
|
+
|
|
38
|
+
## ΠΠ°ΠΊ ΠΏΠΎΠ΄ΠΊΠ»ΡΡΠΈΡΡ ΡΠ΅ΠΏΠΎΠ·ΠΈΡΠΎΡΠΈΠΉ ΠΊ ΡΡΡΠ΅ΡΡΠ²ΡΡΡΠ΅ΠΌΡ ΠΏΡΠΎΠ΅ΠΊΡΡ
|
|
39
|
+
|
|
40
|
+
ΠΡΠ»ΠΈ Ρ Π²Π°Ρ Π΅ΡΡΡ Π»ΠΎΠΊΠ°Π»ΡΠ½ΡΠΉ ΠΏΡΠΎΠ΅ΠΊΡ ΠΈ Π²Ρ Ρ
ΠΎΡΠΈΡΠ΅ Π΄ΠΎΠ±Π°Π²ΠΈΡΡ ΡΠ΅ΠΏΠΎΠ·ΠΈΡΠΎΡΠΈΠΉ Ρ ΠΎΠ±ΡΠΈΠΌΠΈ ΡΡ
Π΅ΠΌΠ°ΠΌΠΈ, Π²ΡΠΏΠΎΠ»Π½ΠΈΡΠ΅ ΠΊΠΎΠΌΠ°Π½Π΄Ρ:
|
|
41
|
+
Π² ΡΠ°ΠΉΠ»Π΅ pyproject.toml ΠΏΡΠΎΠΏΠΈΡΠ°ΡΡ Π·Π°Π²ΠΈΡΠΈΠΌΠΎΡΡΡ:
|
|
42
|
+
1)
|
|
43
|
+
|
|
44
|
+
```Python
|
|
45
|
+
[tool.poetry.dependencies]
|
|
46
|
+
tp-shared = { git = "https://gitlab.8525.ru/modules/tp-shared.git", rev = "main" }
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
poetry add git
|
|
50
|
+
|
|
51
|
+
```python
|
|
52
|
+
poetry add git+https://gitlab.8525.ru/modules/tp-shared.git
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
2) ΠΡΠΏΠΎΠ»Π½ΠΈΡΡ ΠΊΠΎΠΌΠ°Π½Π΄Ρ poetry install ΠΈΠ»ΠΈ poetry update
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
## Π Π΅ΠΏΠΎΠ·ΠΈΡΠΎΡΠΈΠΉ
|
|
59
|
+
```
|
|
60
|
+
cd existing_repo
|
|
61
|
+
git remote add origin https://gitlab.8525.ru/modules/tp-shared.git
|
|
62
|
+
git branch -M main
|
|
63
|
+
git push -uf origin main
|
|
64
|
+
```
|
|
65
|
+
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "tp-shared"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = ""
|
|
5
|
+
authors = [
|
|
6
|
+
{name = "Developer",email = "front-gold@mail.ru"}
|
|
7
|
+
]
|
|
8
|
+
readme = "README.md"
|
|
9
|
+
requires-python = ">=3.12,<=3.14"
|
|
10
|
+
dependencies = [
|
|
11
|
+
"pydantic (>=2.11.7,<3.0.0)",
|
|
12
|
+
"tp-helper (>=0.4.62,<0.5.0)"
|
|
13
|
+
]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
[build-system]
|
|
17
|
+
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
|
18
|
+
build-backend = "poetry.core.masonry.api"
|
|
19
|
+
|
|
20
|
+
[tool.ruff]
|
|
21
|
+
line-length = 88
|
|
22
|
+
target-version = "py313"
|
|
23
|
+
fix = true
|
|
24
|
+
|
|
25
|
+
[tool.ruff.lint]
|
|
26
|
+
select = [
|
|
27
|
+
"E", # pycodestyle (ΡΡΠΈΠ»Ρ)
|
|
28
|
+
"F", # pyflakes (ΠΎΡΠΈΠ±ΠΊΠΈ ΠΈΡΠΏΠΎΠ»Π½Π΅Π½ΠΈΡ)
|
|
29
|
+
"I", # isort (ΡΠΎΡΡΠΈΡΠΎΠ²ΠΊΠ° ΠΈΠΌΠΏΠΎΡΡΠΎΠ²)
|
|
30
|
+
"UP", # pyupgrade (ΠΎΠ±Π½ΠΎΠ²Π»Π΅Π½ΠΈΠ΅ ΡΠΈΠ½ΡΠ°ΠΊΡΠΈΡΠ°)
|
|
31
|
+
"B", # bugbear (ΠΏΠΎΡΠ΅Π½ΡΠΈΠ°Π»ΡΠ½ΡΠ΅ Π±Π°Π³ΠΈ)
|
|
32
|
+
"A", # flake8-builtins (ΠΊΠΎΠ½ΡΠ»ΠΈΠΊΡΡ Ρ Π²ΡΡΡΠΎΠ΅Π½Π½ΡΠΌΠΈ ΠΈΠΌΠ΅Π½Π°ΠΌΠΈ)
|
|
33
|
+
"C4", # flake8-comprehensions
|
|
34
|
+
"SIM", # flake8-simplify
|
|
35
|
+
]
|
|
36
|
+
ignore = [
|
|
37
|
+
"B008", # Depends(...) Π² Π°ΡΠ³ΡΠΌΠ΅Π½ΡΠ°Ρ
FastAPI β Π±Π΅Π·ΠΎΠΏΠ°ΡΠ½ΠΎ ΠΈ ΡΠ°ΡΠΏΡΠΎΡΡΡΠ°Π½ΡΠ½Π½ΠΎ
|
|
38
|
+
"E501", # ΠΠ»ΠΈΠ½Π° ΡΡΡΠΎΠΊΠΈ β ΠΈΠ³Π½ΠΎΡΠΈΡΡΠ΅ΠΌ, Ρ.ΠΊ. ΠΈΡΠΏΠΎΠ»ΡΠ·ΡΠ΅ΠΌ black Ρ line-length = 88
|
|
39
|
+
"SIM117", # ΠΠ»ΠΎΠΆΠ΅Π½Π½ΡΠ΅ async with β ΡΠΈΡΠ°Π΅ΠΌΠΎΡΡΡ Π² Π½Π΅ΠΊΠΎΡΠΎΡΡΡ
ΡΠ»ΡΡΠ°ΡΡ
Π²Π°ΠΆΠ½Π΅Π΅
|
|
40
|
+
]
|
|
41
|
+
|
|
42
|
+
[tool.poetry.group.dev.dependencies]
|
|
43
|
+
ruff = "^0.13.0"
|
|
44
|
+
pre-commit = "^4.3.0"
|
|
45
|
+
pylint = "^3.3.8"
|
|
46
|
+
|
tp_shared-0.1.0/src/tp_shared/autoins_mpg_service/repos/autoins_results_ack_list_queue_repo.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from redis.asyncio import Redis
|
|
2
|
+
from tp_helper.base_queues.base_ack_list_queue_repo import BaseAckListQueueRepo
|
|
3
|
+
|
|
4
|
+
# from src.config import config
|
|
5
|
+
from tp_shared.autoins_mpg_service.schemas.autoins_result_message import (
|
|
6
|
+
AutoinsResultMessage,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class AutoinsResultsAckListQueueRepo(BaseAckListQueueRepo):
|
|
11
|
+
QUEUE_NAME = "autoins:service:results:ack:list"
|
|
12
|
+
|
|
13
|
+
def __init__(self, redis_client: Redis):
|
|
14
|
+
super().__init__(redis_client=redis_client, message_type=AutoinsResultMessage)
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from datetime import date
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
|
|
5
|
+
from tp_shared.types.policy_series import PolicySeries
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class AutoinsResultPolicy(BaseModel):
|
|
9
|
+
insurer_name: str
|
|
10
|
+
reg_number: str
|
|
11
|
+
series: PolicySeries
|
|
12
|
+
number: str
|
|
13
|
+
start_date: date
|
|
14
|
+
end_date: date
|
|
15
|
+
period1_start: date | None = None
|
|
16
|
+
period1_end: date | None = None
|
|
17
|
+
period2_start: date | None = None
|
|
18
|
+
period2_end: date | None = None
|
|
19
|
+
period3_start: date | None = None
|
|
20
|
+
period3_end: date | None = None
|
|
21
|
+
vin: str | None = None
|
|
22
|
+
body_number: str | None = None
|
|
23
|
+
chassis_number: str | None = None
|
|
24
|
+
car_mark: str | None = None
|
|
25
|
+
car_model: str | None = None
|
|
26
|
+
external_policy_id: int | None = None
|
|
27
|
+
policy_state: str | None = None
|
|
28
|
+
policy_status_t_use: str | None = None
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class AutoinsResultMessage(BaseModel):
|
|
32
|
+
series: PolicySeries
|
|
33
|
+
number: str
|
|
34
|
+
policies: list[AutoinsResultPolicy] = []
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from logging import Logger
|
|
2
|
+
|
|
3
|
+
from redis.asyncio import Redis
|
|
4
|
+
from tp_helper.base_items.base_worker_service import BaseWorkerService
|
|
5
|
+
from tp_helper.decorators.decorator_retry_forever import retry_forever
|
|
6
|
+
|
|
7
|
+
from tp_shared.autoins_mpg_service.repos.autoins_results_ack_list_queue_repo import (
|
|
8
|
+
AutoinsResultsAckListQueueRepo,
|
|
9
|
+
)
|
|
10
|
+
from tp_shared.autoins_mpg_service.schemas.autoins_result_message import (
|
|
11
|
+
AutoinsResultMessage,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class BaseAutoinsResultsAckListQueueWorkerService(
|
|
16
|
+
BaseWorkerService, AutoinsResultsAckListQueueRepo
|
|
17
|
+
):
|
|
18
|
+
def __init__(self, redis_client: Redis, logger: Logger):
|
|
19
|
+
BaseWorkerService.__init__(self, logger=logger, redis_client=redis_client)
|
|
20
|
+
AutoinsResultsAckListQueueRepo.__init__(self, redis_client=redis_client)
|
|
21
|
+
|
|
22
|
+
@retry_forever(
|
|
23
|
+
start_message="π₯ Π§ΡΠ΅Π½ΠΈΠ΅ Π·Π°Π΄Π°Ρ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
24
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΡΡΠ΅Π½ΠΈΠΈ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
25
|
+
)
|
|
26
|
+
async def pop(self) -> AutoinsResultMessage | None:
|
|
27
|
+
return await AutoinsResultsAckListQueueRepo.pop(self)
|
|
28
|
+
|
|
29
|
+
@retry_forever(
|
|
30
|
+
start_message="ποΈ ack Π² ΠΎΡΠ΅ΡΠ΅Π΄Ρ {queue_name}",
|
|
31
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ack Π² ΠΎΡΠ΅ΡΠ΅Π΄Ρ {queue_name}",
|
|
32
|
+
)
|
|
33
|
+
async def ack(self) -> None:
|
|
34
|
+
return await AutoinsResultsAckListQueueRepo.ack(self)
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from redis.asyncio import Redis
|
|
2
|
+
from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
|
|
3
|
+
|
|
4
|
+
from tp_shared.gibdd_service.schemas.gibdd_dc_result_message import (
|
|
5
|
+
GibddDcResultMessage,
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class GibddDcResultsStreamQueueRepo(BaseStreamQueueRepo):
|
|
10
|
+
QUEUE_NAME = "gibdd:service:dc:results:stream"
|
|
11
|
+
|
|
12
|
+
def __init__(self, redis_client: Redis):
|
|
13
|
+
super().__init__(
|
|
14
|
+
redis_client=redis_client,
|
|
15
|
+
schema=GibddDcResultMessage,
|
|
16
|
+
queue_name=self.QUEUE_NAME,
|
|
17
|
+
)
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from datetime import date
|
|
2
|
+
|
|
3
|
+
from pydantic import ConfigDict
|
|
4
|
+
|
|
5
|
+
from tp_shared.base.base_message import BaseMessage
|
|
6
|
+
from tp_shared.types.dc_operator_status import DcOperatorStatus
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class GibddDcResultOperator(BaseMessage):
|
|
10
|
+
operator_id: int
|
|
11
|
+
status: DcOperatorStatus
|
|
12
|
+
name: str
|
|
13
|
+
address_line: str
|
|
14
|
+
phone_number: str
|
|
15
|
+
email: str
|
|
16
|
+
site: str
|
|
17
|
+
canceled_date: date | None
|
|
18
|
+
canceled_at: int | None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class GibddDcResultCard(BaseMessage):
|
|
22
|
+
card_number: str
|
|
23
|
+
vin: str
|
|
24
|
+
start_date: date
|
|
25
|
+
end_date: date
|
|
26
|
+
odometer_value: int
|
|
27
|
+
is_active: bool
|
|
28
|
+
updated_at: int
|
|
29
|
+
created_at: int
|
|
30
|
+
|
|
31
|
+
operator: GibddDcResultOperator
|
|
32
|
+
|
|
33
|
+
model_config = ConfigDict(from_attributes=True, populate_by_name=True)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class GibddDcResultMessage(BaseMessage):
|
|
37
|
+
version: str = "1.0"
|
|
38
|
+
vin: str
|
|
39
|
+
diagnostic_cards: list[GibddDcResultCard] = []
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
from datetime import timedelta
|
|
2
|
+
from logging import Logger
|
|
3
|
+
|
|
4
|
+
from redis.asyncio import Redis
|
|
5
|
+
from tp_helper.base_items.base_worker_service import BaseWorkerService
|
|
6
|
+
from tp_helper.decorators.decorator_retry_forever import retry_forever
|
|
7
|
+
|
|
8
|
+
from tp_shared.gibdd_service.repo.gibdd_dc_results_stream_queue_repo import (
|
|
9
|
+
GibddDcResultsStreamQueueRepo,
|
|
10
|
+
)
|
|
11
|
+
from tp_shared.gibdd_service.schemas.gibdd_dc_result_message import (
|
|
12
|
+
GibddDcResultMessage,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class BaseGibddDcResultsStreamQueueWorkerService(
|
|
17
|
+
GibddDcResultsStreamQueueRepo, BaseWorkerService
|
|
18
|
+
):
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
redis_client: Redis,
|
|
22
|
+
logger: Logger,
|
|
23
|
+
group_name: str,
|
|
24
|
+
consumer_name: str,
|
|
25
|
+
):
|
|
26
|
+
BaseWorkerService.__init__(self, redis_client=redis_client, logger=logger)
|
|
27
|
+
GibddDcResultsStreamQueueRepo.__init__(self, redis_client=redis_client)
|
|
28
|
+
|
|
29
|
+
self.group_name = group_name
|
|
30
|
+
self.consumer_name = consumer_name
|
|
31
|
+
|
|
32
|
+
@retry_forever(
|
|
33
|
+
start_message="ΠΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄Ρ {queue_name}",
|
|
34
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΎΡΠ΅ΡΠ΅Π΄Ρ {queue_name}",
|
|
35
|
+
)
|
|
36
|
+
async def add(self, message: GibddDcResultMessage) -> None:
|
|
37
|
+
await GibddDcResultsStreamQueueRepo.add(self, message)
|
|
38
|
+
|
|
39
|
+
@retry_forever(
|
|
40
|
+
start_message="ΠΠΎΠ»ΡΡΠ΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
41
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΠΏΠΎΠ»ΡΡΠ΅Π½ΠΈΡ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
42
|
+
)
|
|
43
|
+
async def pop(
|
|
44
|
+
self,
|
|
45
|
+
stream_id: str = ">",
|
|
46
|
+
block: int = 0,
|
|
47
|
+
count: int = 100,
|
|
48
|
+
prioritize_claimed: bool = True,
|
|
49
|
+
min_idle_time: int = 60000,
|
|
50
|
+
) -> list[tuple[str, GibddDcResultMessage]] | None:
|
|
51
|
+
return await GibddDcResultsStreamQueueRepo.pop(
|
|
52
|
+
self,
|
|
53
|
+
group_name=self.group_name,
|
|
54
|
+
consumer_name=self.consumer_name,
|
|
55
|
+
stream_id=stream_id,
|
|
56
|
+
block=block,
|
|
57
|
+
count=count,
|
|
58
|
+
prioritize_claimed=prioritize_claimed,
|
|
59
|
+
min_idle_time=min_idle_time,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
@retry_forever(
|
|
63
|
+
start_message="ΠΠΎΠ΄ΡΠ²Π΅ΡΠΆΠ΄Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
64
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΠΏΠΎΠ΄ΡΠ²Π΅ΡΠΆΠ΄Π΅Π½ΠΈΡ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
65
|
+
)
|
|
66
|
+
async def ack(self, message_id: str):
|
|
67
|
+
await GibddDcResultsStreamQueueRepo.ack(self, self.group_name, message_id)
|
|
68
|
+
|
|
69
|
+
@retry_forever(
|
|
70
|
+
start_message="ΠΠΎΠΈΡΠΊ Π·Π°Π²ΠΈΡΡΠΈΡ
ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
71
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ auto-claim ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
72
|
+
)
|
|
73
|
+
async def claim_reassign(
|
|
74
|
+
self,
|
|
75
|
+
min_idle_time: int = 60000,
|
|
76
|
+
count: int = 100,
|
|
77
|
+
) -> list[tuple[str, GibddDcResultMessage]]:
|
|
78
|
+
return await GibddDcResultsStreamQueueRepo.claim_reassign(
|
|
79
|
+
self,
|
|
80
|
+
group_name=self.group_name,
|
|
81
|
+
consumer_name=self.consumer_name,
|
|
82
|
+
min_idle_time=min_idle_time,
|
|
83
|
+
count=count,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
@retry_forever(
|
|
87
|
+
start_message="Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³ΡΡΠΏΠΏΡ ΠΏΠΎΡΡΠ΅Π±ΠΈΡΠ΅Π»Π΅ΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
88
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΡΠΎΠ·Π΄Π°Π½ΠΈΡ Π³ΡΡΠΏΠΏΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
89
|
+
)
|
|
90
|
+
async def create_consumer_group(self, create_stream: bool = True):
|
|
91
|
+
try:
|
|
92
|
+
await GibddDcResultsStreamQueueRepo.create_consumer_group(
|
|
93
|
+
self,
|
|
94
|
+
group_name=self.group_name,
|
|
95
|
+
create_stream=create_stream,
|
|
96
|
+
stream_id="0",
|
|
97
|
+
)
|
|
98
|
+
except Exception as e:
|
|
99
|
+
print(e)
|
|
100
|
+
|
|
101
|
+
@retry_forever(
|
|
102
|
+
start_message="ΠΡΠΈΡΡΠΊΠ° ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΡΡΠ°ΡΡΠ΅ {retention} Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
103
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΠΎΡΠΈΡΡΠΊΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
104
|
+
)
|
|
105
|
+
async def trim_by_age(self, retention: timedelta) -> int:
|
|
106
|
+
"""
|
|
107
|
+
Π£Π΄Π°Π»ΡΠ΅Ρ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ ΡΡΠ°ΡΡΠ΅ ΡΠΊΠ°Π·Π°Π½Π½ΠΎΠ³ΠΎ ΠΏΠ΅ΡΠΈΠΎΠ΄Π° (retention) ΡΠ΅ΡΠ΅Π· XTRIM MINID.
|
|
108
|
+
|
|
109
|
+
:param retention: ΠΠ°ΠΊΡΠΈΠΌΠ°Π»ΡΠ½ΡΠΉ "Π²ΠΎΠ·ΡΠ°ΡΡ" ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ, Π½Π°ΠΏΡΠΈΠΌΠ΅Ρ timedelta(days=1)
|
|
110
|
+
:return: ΠΠΎΠ»-Π²ΠΎ ΡΠ΄Π°Π»ΡΠ½Π½ΡΡ
ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ
|
|
111
|
+
"""
|
|
112
|
+
return await GibddDcResultsStreamQueueRepo.trim_by_age(self, retention)
|
|
113
|
+
|
|
114
|
+
@retry_forever(
|
|
115
|
+
start_message="ΠΠΎΠ»Π½Π°Ρ ΠΎΡΠΈΡΡΠΊΠ° ΠΏΠΎΡΠΎΠΊΠ° {queue_name}",
|
|
116
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ ΠΎΡΠΈΡΡΠΊΠ΅ ΠΏΠΎΡΠΎΠΊΠ° {queue_name}",
|
|
117
|
+
)
|
|
118
|
+
async def delete_all(self) -> None:
|
|
119
|
+
"""
|
|
120
|
+
Π£Π΄Π°Π»ΡΠ΅Ρ Π²ΡΠ΅ consumer group ΠΈ ΡΠ°ΠΌ ΠΏΠΎΡΠΎΠΊ (ΠΏΠΎΠ»Π½Π°Ρ ΠΎΡΠΈΡΡΠΊΠ°).
|
|
121
|
+
"""
|
|
122
|
+
await GibddDcResultsStreamQueueRepo.delete_all(self)
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from redis.asyncio import Redis
|
|
2
|
+
from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
|
|
3
|
+
|
|
4
|
+
from tp_shared.mos_passes_service.schemas.mos_passes_result_message import (
|
|
5
|
+
MosPassesResultMessage,
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class MosPassesResultsStreamQueueRepo(BaseStreamQueueRepo):
|
|
10
|
+
QUEUE_NAME = "mos:passes:service:results:stream"
|
|
11
|
+
|
|
12
|
+
def __init__(self, redis_client: Redis):
|
|
13
|
+
super().__init__(
|
|
14
|
+
redis_client=redis_client,
|
|
15
|
+
schema=MosPassesResultMessage,
|
|
16
|
+
queue_name=self.QUEUE_NAME,
|
|
17
|
+
)
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from datetime import date
|
|
2
|
+
|
|
3
|
+
from tp_shared.base.base_message import BaseMessage
|
|
4
|
+
from tp_shared.types.pass_allowed_zone import PassAllowedZone
|
|
5
|
+
from tp_shared.types.pass_series import PassSeries
|
|
6
|
+
from tp_shared.types.pass_time_of_date import PassTimeOfDate
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class MosPassesResultPass(BaseMessage):
|
|
10
|
+
reg_number: str
|
|
11
|
+
time_of_day: PassTimeOfDate
|
|
12
|
+
series: PassSeries
|
|
13
|
+
number: str
|
|
14
|
+
allowed_zone: PassAllowedZone
|
|
15
|
+
start_date: date
|
|
16
|
+
end_date: date
|
|
17
|
+
cancel_date: date | None
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class MosPassesResultMessage(BaseMessage):
|
|
21
|
+
version: str = "1.0"
|
|
22
|
+
reg_number: str
|
|
23
|
+
passes: list[MosPassesResultPass] = []
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
from datetime import timedelta
|
|
2
|
+
from logging import Logger
|
|
3
|
+
|
|
4
|
+
from redis.asyncio import Redis
|
|
5
|
+
from tp_helper.base_items.base_worker_service import BaseWorkerService
|
|
6
|
+
from tp_helper.decorators.decorator_retry_forever import retry_forever
|
|
7
|
+
|
|
8
|
+
from tp_shared.mos_passes_service.repo.mos_passes_results_stream_queue_repo import (
|
|
9
|
+
MosPassesResultsStreamQueueRepo,
|
|
10
|
+
)
|
|
11
|
+
from tp_shared.mos_passes_service.schemas.mos_passes_result_message import (
|
|
12
|
+
MosPassesResultMessage,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class BaseMosPassesResultsStreamQueueWorkerService(
|
|
17
|
+
MosPassesResultsStreamQueueRepo, BaseWorkerService
|
|
18
|
+
):
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
redis_client: Redis,
|
|
22
|
+
logger: Logger,
|
|
23
|
+
group_name: str,
|
|
24
|
+
consumer_name: str,
|
|
25
|
+
):
|
|
26
|
+
BaseWorkerService.__init__(self, redis_client=redis_client, logger=logger)
|
|
27
|
+
MosPassesResultsStreamQueueRepo.__init__(self, redis_client=redis_client)
|
|
28
|
+
|
|
29
|
+
self.group_name = group_name
|
|
30
|
+
self.consumer_name = consumer_name
|
|
31
|
+
|
|
32
|
+
@retry_forever(
|
|
33
|
+
start_message="ΠΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄Ρ {queue_name}",
|
|
34
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΎΡΠ΅ΡΠ΅Π΄Ρ {queue_name}",
|
|
35
|
+
)
|
|
36
|
+
async def add(self, message: MosPassesResultMessage) -> None:
|
|
37
|
+
await MosPassesResultsStreamQueueRepo.add(self, message)
|
|
38
|
+
|
|
39
|
+
@retry_forever(
|
|
40
|
+
start_message="ΠΠΎΠ»ΡΡΠ΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
41
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΠΏΠΎΠ»ΡΡΠ΅Π½ΠΈΡ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
42
|
+
)
|
|
43
|
+
async def pop(
|
|
44
|
+
self,
|
|
45
|
+
stream_id: str = ">",
|
|
46
|
+
block: int = 0,
|
|
47
|
+
count: int = 100,
|
|
48
|
+
prioritize_claimed: bool = True,
|
|
49
|
+
min_idle_time: int = 60000,
|
|
50
|
+
) -> list[tuple[str, MosPassesResultMessage]] | None:
|
|
51
|
+
return await MosPassesResultsStreamQueueRepo.pop(
|
|
52
|
+
self,
|
|
53
|
+
group_name=self.group_name,
|
|
54
|
+
consumer_name=self.consumer_name,
|
|
55
|
+
stream_id=stream_id,
|
|
56
|
+
block=block,
|
|
57
|
+
count=count,
|
|
58
|
+
prioritize_claimed=prioritize_claimed,
|
|
59
|
+
min_idle_time=min_idle_time,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
@retry_forever(
|
|
63
|
+
start_message="ΠΠΎΠ΄ΡΠ²Π΅ΡΠΆΠ΄Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
64
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΠΏΠΎΠ΄ΡΠ²Π΅ΡΠΆΠ΄Π΅Π½ΠΈΡ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
65
|
+
)
|
|
66
|
+
async def ack(self, message_id: str):
|
|
67
|
+
await MosPassesResultsStreamQueueRepo.ack(self, self.group_name, message_id)
|
|
68
|
+
|
|
69
|
+
@retry_forever(
|
|
70
|
+
start_message="ΠΠΎΠΈΡΠΊ Π·Π°Π²ΠΈΡΡΠΈΡ
ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
71
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ auto-claim ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
72
|
+
)
|
|
73
|
+
async def claim_reassign(
|
|
74
|
+
self,
|
|
75
|
+
min_idle_time: int = 60000,
|
|
76
|
+
count: int = 100,
|
|
77
|
+
) -> list[tuple[str, MosPassesResultMessage]]:
|
|
78
|
+
return await MosPassesResultsStreamQueueRepo.claim_reassign(
|
|
79
|
+
self,
|
|
80
|
+
group_name=self.group_name,
|
|
81
|
+
consumer_name=self.consumer_name,
|
|
82
|
+
min_idle_time=min_idle_time,
|
|
83
|
+
count=count,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
@retry_forever(
|
|
87
|
+
start_message="Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³ΡΡΠΏΠΏΡ ΠΏΠΎΡΡΠ΅Π±ΠΈΡΠ΅Π»Π΅ΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
88
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΡΠΎΠ·Π΄Π°Π½ΠΈΡ Π³ΡΡΠΏΠΏΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
89
|
+
)
|
|
90
|
+
async def create_consumer_group(self, create_stream: bool = True):
|
|
91
|
+
await MosPassesResultsStreamQueueRepo.create_consumer_group(
|
|
92
|
+
self,
|
|
93
|
+
group_name=self.group_name,
|
|
94
|
+
create_stream=create_stream,
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
@retry_forever(
|
|
98
|
+
start_message="ΠΡΠΈΡΡΠΊΠ° ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΡΡΠ°ΡΡΠ΅ {retention} Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
99
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΠΎΡΠΈΡΡΠΊΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
100
|
+
)
|
|
101
|
+
async def trim_by_age(self, retention: timedelta) -> int:
|
|
102
|
+
return await MosPassesResultsStreamQueueRepo.trim_by_age(self, retention)
|
|
103
|
+
|
|
104
|
+
@retry_forever(
|
|
105
|
+
start_message="ΠΠΎΠ»Π½Π°Ρ ΠΎΡΠΈΡΡΠΊΠ° ΠΏΠΎΡΠΎΠΊΠ° {queue_name}",
|
|
106
|
+
error_message="ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ ΠΎΡΠΈΡΡΠΊΠ΅ ΠΏΠΎΡΠΎΠΊΠ° {queue_name}",
|
|
107
|
+
)
|
|
108
|
+
async def delete_all(self) -> None:
|
|
109
|
+
await MosPassesResultsStreamQueueRepo.delete_all(self)
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from redis.asyncio import Redis
|
|
2
|
+
from tp_helper.base_queues.base_ack_list_queue_repo import BaseAckListQueueRepo
|
|
3
|
+
|
|
4
|
+
from tp_shared.nsis_service.schemas.nsis_result_message import (
|
|
5
|
+
NsisResultMessage,
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class NsisResultsAckListQueueRepo(BaseAckListQueueRepo):
|
|
10
|
+
QUEUE_NAME = "nsis:service:results:ack:list"
|
|
11
|
+
|
|
12
|
+
def __init__(self, redis_client: Redis):
|
|
13
|
+
super().__init__(redis_client, message_type=NsisResultMessage)
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from datetime import date
|
|
2
|
+
|
|
3
|
+
from tp_helper.base_items.base_schema import BaseSchema
|
|
4
|
+
|
|
5
|
+
from tp_shared.nsis_service.types.nsis_task_type import NsisTaskType
|
|
6
|
+
from tp_shared.types.policy_series import PolicySeries
|
|
7
|
+
from tp_shared.types.policy_status import PolicyStatus
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class NsisResultPolicy(BaseSchema):
|
|
11
|
+
status: PolicyStatus
|
|
12
|
+
series: PolicySeries
|
|
13
|
+
number: str
|
|
14
|
+
start_date: date | None = None
|
|
15
|
+
end_date: date | None = None
|
|
16
|
+
insurer_name: str
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class NsisResultMessage(BaseSchema):
|
|
20
|
+
task_type: NsisTaskType
|
|
21
|
+
query: str
|
|
22
|
+
request_date: date
|
|
23
|
+
policies: list[NsisResultPolicy] = []
|
tp_shared-0.1.0/src/tp_shared/nsis_service/worker_services/base_nsis_results_queue_worker_service.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from logging import Logger
|
|
2
|
+
|
|
3
|
+
from redis.asyncio import Redis
|
|
4
|
+
from tp_helper.base_items.base_worker_service import BaseWorkerService
|
|
5
|
+
from tp_helper.decorators.decorator_retry_forever import retry_forever
|
|
6
|
+
|
|
7
|
+
from tp_shared.nsis_service.repos.nsis_results_ack_list_queue_repo import (
|
|
8
|
+
NsisResultsAckListQueueRepo,
|
|
9
|
+
)
|
|
10
|
+
from tp_shared.nsis_service.schemas.nsis_result_message import NsisResultMessage
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class BaseNsisResultsAckListQueueWorkerService(
|
|
14
|
+
NsisResultsAckListQueueRepo, BaseWorkerService
|
|
15
|
+
):
|
|
16
|
+
def __init__(self, redis_client: Redis, logger: Logger):
|
|
17
|
+
BaseWorkerService.__init__(self, logger=logger, redis_client=redis_client)
|
|
18
|
+
NsisResultsAckListQueueRepo.__init__(self, redis_client=redis_client)
|
|
19
|
+
|
|
20
|
+
@retry_forever(
|
|
21
|
+
start_message="π₯ ΠΠ°ΡΠ°Π»ΠΎ ΡΡΠ΅Π½ΠΈΡ Π·Π°Π΄Π°Ρ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
22
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΡΡΠ΅Π½ΠΈΠΈ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
23
|
+
)
|
|
24
|
+
async def pop(self) -> NsisResultMessage | None:
|
|
25
|
+
return await NsisResultsAckListQueueRepo.pop(self)
|
|
26
|
+
|
|
27
|
+
@retry_forever(
|
|
28
|
+
start_message="ποΈ Π£Π΄Π°Π»Π΅Π½ΠΈΠ΅ Π·Π°Π΄Π°Ρ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
29
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΡΠ΄Π°Π»Π΅Π½ΠΈΠΈ Π·Π°Π΄Π°Ρ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
30
|
+
)
|
|
31
|
+
async def ack(self) -> None:
|
|
32
|
+
return await NsisResultsAckListQueueRepo.ack(self)
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from redis.asyncio import Redis
|
|
2
|
+
from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
|
|
3
|
+
|
|
4
|
+
from tp_shared.policies_service.schemas.policies_result_message import (
|
|
5
|
+
PoliciesResultMessage,
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PoliciesResultsStreamQueueRepo(BaseStreamQueueRepo):
|
|
10
|
+
QUEUE_NAME = "policies:service:results:stream"
|
|
11
|
+
|
|
12
|
+
def __init__(self, redis_client: Redis):
|
|
13
|
+
super().__init__(
|
|
14
|
+
redis_client=redis_client,
|
|
15
|
+
schema=PoliciesResultMessage,
|
|
16
|
+
queue_name=self.QUEUE_NAME,
|
|
17
|
+
)
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from datetime import date
|
|
2
|
+
|
|
3
|
+
from tp_shared.base.base_message import BaseMessage
|
|
4
|
+
from tp_shared.types.policy_series import PolicySeries
|
|
5
|
+
from tp_shared.types.policy_status import PolicyStatus
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class PoliciesResultPolicy(BaseMessage):
|
|
9
|
+
series: PolicySeries
|
|
10
|
+
number: str
|
|
11
|
+
status: PolicyStatus
|
|
12
|
+
start_date: date | None = None
|
|
13
|
+
end_date: date | None = None
|
|
14
|
+
period1_start: date | None = None
|
|
15
|
+
period1_end: date | None = None
|
|
16
|
+
period2_start: date | None = None
|
|
17
|
+
period2_end: date | None = None
|
|
18
|
+
period3_start: date | None = None
|
|
19
|
+
period3_end: date | None = None
|
|
20
|
+
vin: str | None = None
|
|
21
|
+
car_mark: str | None = None
|
|
22
|
+
car_model: str | None = None
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class PoliciesResultMessage(BaseMessage):
|
|
26
|
+
version: str = "1.0"
|
|
27
|
+
reg_number: str
|
|
28
|
+
policies: list[PoliciesResultPolicy] = []
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
from datetime import timedelta
|
|
2
|
+
from logging import Logger
|
|
3
|
+
|
|
4
|
+
from redis.asyncio import Redis
|
|
5
|
+
from tp_helper.base_items.base_worker_service import BaseWorkerService
|
|
6
|
+
from tp_helper.decorators.decorator_retry_forever import retry_forever
|
|
7
|
+
|
|
8
|
+
from tp_shared.policies_service.repos.policies_event_stream_queue_repo import (
|
|
9
|
+
PoliciesResultsStreamQueueRepo,
|
|
10
|
+
)
|
|
11
|
+
from tp_shared.policies_service.schemas.policies_result_message import (
|
|
12
|
+
PoliciesResultMessage,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class BasePolicyResultsStreamQueueWorkerService(
|
|
17
|
+
PoliciesResultsStreamQueueRepo, BaseWorkerService
|
|
18
|
+
):
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
redis_client: Redis,
|
|
22
|
+
logger: Logger,
|
|
23
|
+
group_name: str = None,
|
|
24
|
+
consumer_name: str = None,
|
|
25
|
+
):
|
|
26
|
+
BaseWorkerService.__init__(self, redis_client=redis_client, logger=logger)
|
|
27
|
+
PoliciesResultsStreamQueueRepo.__init__(self, redis_client=redis_client)
|
|
28
|
+
|
|
29
|
+
self.group_name = group_name
|
|
30
|
+
self.consumer_name = consumer_name
|
|
31
|
+
|
|
32
|
+
@retry_forever(
|
|
33
|
+
start_message="β ΠΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄Ρ {queue_name}",
|
|
34
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΎΡΠ΅ΡΠ΅Π΄Ρ {queue_name}",
|
|
35
|
+
)
|
|
36
|
+
async def add(self, message: PoliciesResultMessage) -> None:
|
|
37
|
+
await PoliciesResultsStreamQueueRepo.add(self, message)
|
|
38
|
+
|
|
39
|
+
@retry_forever(
|
|
40
|
+
start_message="π₯ ΠΠΎΠ»ΡΡΠ΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
41
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΠΏΠΎΠ»ΡΡΠ΅Π½ΠΈΡ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
42
|
+
)
|
|
43
|
+
async def pop(
|
|
44
|
+
self,
|
|
45
|
+
stream_id: str = ">",
|
|
46
|
+
block: int = 0,
|
|
47
|
+
count: int = 100,
|
|
48
|
+
prioritize_claimed: bool = True,
|
|
49
|
+
min_idle_time: int = 60000,
|
|
50
|
+
) -> list[tuple[str, PoliciesResultMessage]] | None:
|
|
51
|
+
return await PoliciesResultsStreamQueueRepo.pop(
|
|
52
|
+
self,
|
|
53
|
+
group_name=self.group_name,
|
|
54
|
+
consumer_name=self.consumer_name,
|
|
55
|
+
stream_id=stream_id,
|
|
56
|
+
block=block,
|
|
57
|
+
count=count,
|
|
58
|
+
prioritize_claimed=prioritize_claimed,
|
|
59
|
+
min_idle_time=min_idle_time,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
@retry_forever(
|
|
63
|
+
start_message="β
ΠΠΎΠ΄ΡΠ²Π΅ΡΠΆΠ΄Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
64
|
+
error_message="β οΈ ΠΡΠΈΠ±ΠΊΠ° ΠΏΠΎΠ΄ΡΠ²Π΅ΡΠΆΠ΄Π΅Π½ΠΈΡ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
65
|
+
)
|
|
66
|
+
async def ack(self, message_id: str):
|
|
67
|
+
await PoliciesResultsStreamQueueRepo.ack(self, self.group_name, message_id)
|
|
68
|
+
|
|
69
|
+
@retry_forever(
|
|
70
|
+
start_message="π ΠΠΎΠΈΡΠΊ Π·Π°Π²ΠΈΡΡΠΈΡ
ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
71
|
+
error_message="π« ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ auto-claim ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
72
|
+
)
|
|
73
|
+
async def claim_reassign(
|
|
74
|
+
self,
|
|
75
|
+
min_idle_time: int = 60000,
|
|
76
|
+
count: int = 100,
|
|
77
|
+
) -> list[tuple[str, PoliciesResultMessage]]:
|
|
78
|
+
return await PoliciesResultsStreamQueueRepo.claim_reassign(
|
|
79
|
+
self,
|
|
80
|
+
group_name=self.group_name,
|
|
81
|
+
consumer_name=self.consumer_name,
|
|
82
|
+
min_idle_time=min_idle_time,
|
|
83
|
+
count=count,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
@retry_forever(
|
|
87
|
+
start_message="π₯ Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³ΡΡΠΏΠΏΡ ΠΏΠΎΡΡΠ΅Π±ΠΈΡΠ΅Π»Π΅ΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
88
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΡΠΎΠ·Π΄Π°Π½ΠΈΡ Π³ΡΡΠΏΠΏΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
89
|
+
)
|
|
90
|
+
async def create_consumer_group(self, create_stream: bool = True):
|
|
91
|
+
await PoliciesResultsStreamQueueRepo.create_consumer_group(
|
|
92
|
+
self,
|
|
93
|
+
group_name=self.group_name,
|
|
94
|
+
create_stream=create_stream,
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
@retry_forever(
|
|
98
|
+
start_message="π§Ή ΠΡΠΈΡΡΠΊΠ° ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΡΡΠ°ΡΡΠ΅ {retention} Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
99
|
+
error_message="β οΈ ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΠΎΡΠΈΡΡΠΊΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
100
|
+
)
|
|
101
|
+
async def trim_by_age(self, retention: timedelta) -> int:
|
|
102
|
+
return await PoliciesResultsStreamQueueRepo.trim_by_age(self, retention)
|
|
103
|
+
|
|
104
|
+
@retry_forever(
|
|
105
|
+
start_message="ποΈ ΠΠΎΠ»Π½Π°Ρ ΠΎΡΠΈΡΡΠΊΠ° ΠΏΠΎΡΠΎΠΊΠ° {queue_name}",
|
|
106
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ ΠΎΡΠΈΡΡΠΊΠ΅ ΠΏΠΎΡΠΎΠΊΠ° {queue_name}",
|
|
107
|
+
)
|
|
108
|
+
async def delete_all(self) -> None:
|
|
109
|
+
await PoliciesResultsStreamQueueRepo.delete_all(self)
|
tp_shared-0.1.0/src/tp_shared/rnis_check_service/repos/rnis_check_results_stream_queue_repo.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from redis.asyncio import Redis
|
|
2
|
+
from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
|
|
3
|
+
|
|
4
|
+
from tp_shared.rnis_check_service.schemas.rnis_check_result_message import (
|
|
5
|
+
RNISCheckResultMessage,
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class RNISCheckResultsStreamQueueRepo(BaseStreamQueueRepo):
|
|
10
|
+
QUEUE_NAME = "rnis:check:service:results:stream"
|
|
11
|
+
|
|
12
|
+
def __init__(self, redis_client: Redis):
|
|
13
|
+
super().__init__(
|
|
14
|
+
redis_client=redis_client,
|
|
15
|
+
schema=RNISCheckResultMessage,
|
|
16
|
+
queue_name=self.QUEUE_NAME,
|
|
17
|
+
)
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
from datetime import timedelta
|
|
2
|
+
from logging import Logger
|
|
3
|
+
|
|
4
|
+
from redis.asyncio import Redis
|
|
5
|
+
from tp_helper.base_items.base_worker_service import BaseWorkerService
|
|
6
|
+
from tp_helper.decorators.decorator_retry_forever import retry_forever
|
|
7
|
+
|
|
8
|
+
from tp_shared.rnis_check_service.repos.rnis_check_results_stream_queue_repo import (
|
|
9
|
+
RNISCheckResultsStreamQueueRepo,
|
|
10
|
+
)
|
|
11
|
+
from tp_shared.rnis_check_service.schemas.rnis_check_result_message import (
|
|
12
|
+
RNISCheckResultMessage,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class BaseRNISCheckResultsStreamQueueWorkerService(
|
|
17
|
+
RNISCheckResultsStreamQueueRepo, BaseWorkerService
|
|
18
|
+
):
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
redis_client: Redis,
|
|
22
|
+
logger: Logger,
|
|
23
|
+
group_name: str,
|
|
24
|
+
consumer_name: str,
|
|
25
|
+
):
|
|
26
|
+
BaseWorkerService.__init__(self, redis_client=redis_client, logger=logger)
|
|
27
|
+
RNISCheckResultsStreamQueueRepo.__init__(self, redis_client=redis_client)
|
|
28
|
+
|
|
29
|
+
self.group_name = group_name
|
|
30
|
+
self.consumer_name = consumer_name
|
|
31
|
+
|
|
32
|
+
@retry_forever(
|
|
33
|
+
start_message="β ΠΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄Ρ {queue_name}",
|
|
34
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΎΡΠ΅ΡΠ΅Π΄Ρ {queue_name}",
|
|
35
|
+
)
|
|
36
|
+
async def add(self, message: RNISCheckResultMessage) -> None:
|
|
37
|
+
await RNISCheckResultsStreamQueueRepo.add(self, message)
|
|
38
|
+
|
|
39
|
+
@retry_forever(
|
|
40
|
+
start_message="π₯ ΠΠΎΠ»ΡΡΠ΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
41
|
+
error_message="β οΈ ΠΡΠΈΠ±ΠΊΠ° ΠΏΠΎΠ»ΡΡΠ΅Π½ΠΈΡ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
42
|
+
)
|
|
43
|
+
async def pop(
|
|
44
|
+
self,
|
|
45
|
+
stream_id: str = ">",
|
|
46
|
+
block: int = 0,
|
|
47
|
+
count: int = 100,
|
|
48
|
+
prioritize_claimed: bool = True,
|
|
49
|
+
min_idle_time: int = 60000,
|
|
50
|
+
) -> list[tuple[str, RNISCheckResultMessage]] | None:
|
|
51
|
+
return await RNISCheckResultsStreamQueueRepo.pop(
|
|
52
|
+
self,
|
|
53
|
+
group_name=self.group_name,
|
|
54
|
+
consumer_name=self.consumer_name,
|
|
55
|
+
stream_id=stream_id,
|
|
56
|
+
block=block,
|
|
57
|
+
count=count,
|
|
58
|
+
prioritize_claimed=prioritize_claimed,
|
|
59
|
+
min_idle_time=min_idle_time,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
@retry_forever(
|
|
63
|
+
start_message="β
ΠΠΎΠ΄ΡΠ²Π΅ΡΠΆΠ΄Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
64
|
+
error_message="β οΈ ΠΡΠΈΠ±ΠΊΠ° ΠΏΠΎΠ΄ΡΠ²Π΅ΡΠΆΠ΄Π΅Π½ΠΈΡ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
65
|
+
)
|
|
66
|
+
async def ack(self, message_id: str):
|
|
67
|
+
await RNISCheckResultsStreamQueueRepo.ack(self, self.group_name, message_id)
|
|
68
|
+
|
|
69
|
+
@retry_forever(
|
|
70
|
+
start_message="π ΠΠΎΠΈΡΠΊ Π·Π°Π²ΠΈΡΡΠΈΡ
ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
71
|
+
error_message="π« ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ auto-claim ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
72
|
+
)
|
|
73
|
+
async def claim_reassign(
|
|
74
|
+
self,
|
|
75
|
+
min_idle_time: int = 60000,
|
|
76
|
+
count: int = 100,
|
|
77
|
+
) -> list[tuple[str, RNISCheckResultMessage]]:
|
|
78
|
+
return await RNISCheckResultsStreamQueueRepo.claim_reassign(
|
|
79
|
+
self,
|
|
80
|
+
group_name=self.group_name,
|
|
81
|
+
consumer_name=self.consumer_name,
|
|
82
|
+
min_idle_time=min_idle_time,
|
|
83
|
+
count=count,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
@retry_forever(
|
|
87
|
+
start_message="π₯ Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³ΡΡΠΏΠΏΡ ΠΏΠΎΡΡΠ΅Π±ΠΈΡΠ΅Π»Π΅ΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
88
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΡΠΎΠ·Π΄Π°Π½ΠΈΡ Π³ΡΡΠΏΠΏΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
89
|
+
)
|
|
90
|
+
async def create_consumer_group(self, create_stream: bool = True):
|
|
91
|
+
await RNISCheckResultsStreamQueueRepo.create_consumer_group(
|
|
92
|
+
self,
|
|
93
|
+
group_name=self.group_name,
|
|
94
|
+
create_stream=create_stream,
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
@retry_forever(
|
|
98
|
+
start_message="π§Ή ΠΡΠΈΡΡΠΊΠ° ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΡΡΠ°ΡΡΠ΅ {retention} Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
99
|
+
error_message="β οΈ ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΠΎΡΠΈΡΡΠΊΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
100
|
+
)
|
|
101
|
+
async def trim_by_age(self, retention: timedelta) -> int:
|
|
102
|
+
return await RNISCheckResultsStreamQueueRepo.trim_by_age(self, retention)
|
|
103
|
+
|
|
104
|
+
@retry_forever(
|
|
105
|
+
start_message="ποΈ ΠΠΎΠ»Π½Π°Ρ ΠΎΡΠΈΡΡΠΊΠ° ΠΏΠΎΡΠΎΠΊΠ° {queue_name}",
|
|
106
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ ΠΎΡΠΈΡΡΠΊΠ΅ ΠΏΠΎΡΠΎΠΊΠ° {queue_name}",
|
|
107
|
+
)
|
|
108
|
+
async def delete_all(self) -> None:
|
|
109
|
+
await RNISCheckResultsStreamQueueRepo.delete_all(self)
|
tp_shared-0.1.0/src/tp_shared/rnis_emulator_service/repos/rnis_emulator_results_stream_queue_repo.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from redis.asyncio import Redis
|
|
2
|
+
from tp_helper.base_queues.base_stream_queue_repo import BaseStreamQueueRepo
|
|
3
|
+
|
|
4
|
+
from tp_shared.rnis_emulator_service.schemas.rnis_emulator_result_message import (
|
|
5
|
+
RnisEmulatorResultMessage,
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class RnisEmulatorResultsStreamQueueRepo(BaseStreamQueueRepo):
|
|
10
|
+
QUEUE_NAME = "rnis:emulator:service:results:stream"
|
|
11
|
+
|
|
12
|
+
def __init__(self, redis_client: Redis):
|
|
13
|
+
super().__init__(
|
|
14
|
+
redis_client=redis_client,
|
|
15
|
+
schema=RnisEmulatorResultMessage,
|
|
16
|
+
queue_name=self.QUEUE_NAME,
|
|
17
|
+
)
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from datetime import date
|
|
3
|
+
|
|
4
|
+
from pydantic import model_validator
|
|
5
|
+
from tp_helper.base_items.base_schema import BaseSchema
|
|
6
|
+
|
|
7
|
+
from tp_shared.rnis_emulator_service.types.rnis_emulator_types import (
|
|
8
|
+
RnisEmulatorActionType,
|
|
9
|
+
RnisEmulatorResultType,
|
|
10
|
+
RnisEmulatorSubscriptionStatus,
|
|
11
|
+
RnisEmulatorTaskStatus,
|
|
12
|
+
)
|
|
13
|
+
from tp_shared.types.pass_time_of_date import PassTimeOfDate
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class RnisEmulatorResultTask(BaseSchema):
|
|
17
|
+
task_id: uuid.UUID
|
|
18
|
+
subscription_id: uuid.UUID
|
|
19
|
+
reg_number: str
|
|
20
|
+
is_test_drive: bool
|
|
21
|
+
time_of_day: PassTimeOfDate
|
|
22
|
+
status: RnisEmulatorTaskStatus
|
|
23
|
+
error_message: str | None = None
|
|
24
|
+
started_at: int | None = None
|
|
25
|
+
ended_at: int | None = None
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class RnisEmulatorResultSubscription(BaseSchema):
|
|
29
|
+
subscription_id: uuid.UUID
|
|
30
|
+
reg_number: str
|
|
31
|
+
time_of_day: PassTimeOfDate
|
|
32
|
+
status: RnisEmulatorSubscriptionStatus
|
|
33
|
+
monthly_run_count: int
|
|
34
|
+
start_date: date
|
|
35
|
+
end_date: date
|
|
36
|
+
created_at: int
|
|
37
|
+
|
|
38
|
+
@model_validator(mode="after")
|
|
39
|
+
def _check_dates(self):
|
|
40
|
+
if self.start_date > self.end_date:
|
|
41
|
+
raise ValueError("start_date Π½Π΅ ΠΌΠΎΠΆΠ΅Ρ Π±ΡΡΡ ΠΏΠΎΠ·ΠΆΠ΅ end_date")
|
|
42
|
+
return self
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class RnisEmulatorResultMessage(BaseSchema):
|
|
46
|
+
type: RnisEmulatorResultType
|
|
47
|
+
task: RnisEmulatorResultTask | None = None
|
|
48
|
+
subscription: RnisEmulatorResultSubscription | None = None
|
|
49
|
+
action_type: RnisEmulatorActionType
|
|
50
|
+
|
|
51
|
+
@model_validator(mode="after")
|
|
52
|
+
def _one_of_task_or_subscription(self):
|
|
53
|
+
if (self.task is None) and (self.subscription is None):
|
|
54
|
+
raise ValueError(
|
|
55
|
+
"ΠΠΎΠ»ΠΆΠ½ΠΎ Π±ΡΡΡ Π·Π°ΠΏΠΎΠ»Π½Π΅Π½ΠΎ Ρ
ΠΎΡΡ Π±Ρ ΠΎΠ΄Π½ΠΎ ΠΏΠΎΠ»Π΅: 'task' ΠΈΠ»ΠΈ 'subscription'."
|
|
56
|
+
)
|
|
57
|
+
return self
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from enum import Enum, StrEnum
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class RnisEmulatorResultType(StrEnum):
|
|
5
|
+
TASK = "TASK"
|
|
6
|
+
SUBSCRIPTION = "SUBSCRIPTION"
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class RnisEmulatorActionType(StrEnum):
|
|
10
|
+
CREATE = "CREATE"
|
|
11
|
+
UPDATE = "UPDATE"
|
|
12
|
+
DELETE = "DELETE"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class RnisEmulatorTaskStatus(str, Enum):
|
|
16
|
+
WAITING = "WAITING"
|
|
17
|
+
CALCULATING = "CALCULATING"
|
|
18
|
+
CALCULATED = "CALCULATED" # Π Π°ΡΡΡΡ ΠΏΡΠΎΠ²Π΅Π΄ΡΠ½, Π³ΠΎΡΠΎΠ²Π° ΠΊ ΡΠΌΡΠ»ΡΡΠΈΠΈ
|
|
19
|
+
IN_WORK = "IN_WORK"
|
|
20
|
+
UNLOADING = "UNLOADING"
|
|
21
|
+
PARKING = "PARKING"
|
|
22
|
+
COMPLETED = "COMPLETED"
|
|
23
|
+
CANCELED = "CANCELED"
|
|
24
|
+
ERROR = "ERROR"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class RnisEmulatorSubscriptionStatus(str, Enum):
|
|
28
|
+
# ΠΏΠ΅ΡΠ²ΠΈΡΠ½ΡΠΉ, ΡΠΎΠ»ΡΠΊΠΎ ΡΡΠΎ ΡΠΎΠ·Π΄Π°Π½Π°, Π΅ΡΡ Π½Π΅ Π°ΠΊΡΠΈΠ²Π½Π°
|
|
29
|
+
CREATED = "CREATED"
|
|
30
|
+
# ΡΠ°Π±ΠΎΡΠ°Π΅Ρ
|
|
31
|
+
ACTIVE = "ACTIVE"
|
|
32
|
+
# Π²ΡΡΡΠ½ΡΡ ΠΏΡΠΈΠΎΡΡΠ°Π½ΠΎΠ²Π»Π΅Π½Π°
|
|
33
|
+
SUSPENDED = "SUSPENDED"
|
|
34
|
+
# Π·Π°ΠΊΠΎΠ½ΡΠΈΠ»Π°ΡΡ ΠΏΠΎ ΡΡΠΎΠΊΡ
|
|
35
|
+
EXPIRED = "EXPIRED"
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
from datetime import timedelta
|
|
2
|
+
from logging import Logger
|
|
3
|
+
|
|
4
|
+
from tp_helper.base_items.base_worker_service import BaseWorkerService
|
|
5
|
+
from tp_helper.decorators.decorator_retry_forever import retry_forever
|
|
6
|
+
|
|
7
|
+
from tp_shared.rnis_emulator_service.repos.rnis_emulator_results_stream_queue_repo import (
|
|
8
|
+
RnisEmulatorResultsStreamQueueRepo,
|
|
9
|
+
)
|
|
10
|
+
from tp_shared.rnis_emulator_service.schemas.rnis_emulator_result_message import (
|
|
11
|
+
RnisEmulatorResultMessage,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class RnisEmulatorResultsStreamQueueWorkerService(BaseWorkerService):
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
repo: RnisEmulatorResultsStreamQueueRepo,
|
|
19
|
+
logger: Logger,
|
|
20
|
+
group_name: str,
|
|
21
|
+
consumer_name: str,
|
|
22
|
+
):
|
|
23
|
+
super().__init__(logger=logger)
|
|
24
|
+
self.repo = repo
|
|
25
|
+
self.group_name = group_name
|
|
26
|
+
self.consumer_name = consumer_name
|
|
27
|
+
|
|
28
|
+
@retry_forever(
|
|
29
|
+
start_message="β ΠΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄Ρ {queue_name}",
|
|
30
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΎΡΠ΅ΡΠ΅Π΄Ρ {queue_name}",
|
|
31
|
+
)
|
|
32
|
+
async def add(self, message: RnisEmulatorResultMessage) -> None:
|
|
33
|
+
await self.repo.add(message)
|
|
34
|
+
|
|
35
|
+
@retry_forever(
|
|
36
|
+
start_message="π₯ ΠΠΎΠ»ΡΡΠ΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
37
|
+
error_message="β οΈ ΠΡΠΈΠ±ΠΊΠ° ΠΏΠΎΠ»ΡΡΠ΅Π½ΠΈΡ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΠΈΠ· ΠΎΡΠ΅ΡΠ΅Π΄ΠΈ {queue_name}",
|
|
38
|
+
)
|
|
39
|
+
async def pop(
|
|
40
|
+
self,
|
|
41
|
+
stream_id: str = ">",
|
|
42
|
+
block: int = 60,
|
|
43
|
+
count: int = 100,
|
|
44
|
+
prioritize_claimed: bool = True,
|
|
45
|
+
min_idle_time: int = 60000,
|
|
46
|
+
) -> list[tuple[str, RnisEmulatorResultMessage]] | None:
|
|
47
|
+
return await self.repo.pop(
|
|
48
|
+
group_name=self.group_name,
|
|
49
|
+
consumer_name=self.consumer_name,
|
|
50
|
+
stream_id=stream_id,
|
|
51
|
+
block=block,
|
|
52
|
+
count=count,
|
|
53
|
+
prioritize_claimed=prioritize_claimed,
|
|
54
|
+
min_idle_time=min_idle_time,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
@retry_forever(
|
|
58
|
+
start_message="β
ΠΠΎΠ΄ΡΠ²Π΅ΡΠΆΠ΄Π΅Π½ΠΈΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
59
|
+
error_message="β οΈ ΠΡΠΈΠ±ΠΊΠ° ΠΏΠΎΠ΄ΡΠ²Π΅ΡΠΆΠ΄Π΅Π½ΠΈΡ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
60
|
+
)
|
|
61
|
+
async def ack(self, message_id: str):
|
|
62
|
+
await self.repo.ack(self.group_name, message_id)
|
|
63
|
+
|
|
64
|
+
@retry_forever(
|
|
65
|
+
start_message="π ΠΠΎΠΈΡΠΊ Π·Π°Π²ΠΈΡΡΠΈΡ
ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
66
|
+
error_message="π« ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ auto-claim ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
67
|
+
)
|
|
68
|
+
async def claim_reassign(
|
|
69
|
+
self,
|
|
70
|
+
min_idle_time: int = 60000,
|
|
71
|
+
count: int = 100,
|
|
72
|
+
) -> list[tuple[str, RnisEmulatorResultMessage]]:
|
|
73
|
+
return await self.repo.claim_reassign(
|
|
74
|
+
group_name=self.group_name,
|
|
75
|
+
consumer_name=self.consumer_name,
|
|
76
|
+
min_idle_time=min_idle_time,
|
|
77
|
+
count=count,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
@retry_forever(
|
|
81
|
+
start_message="π₯ Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Π³ΡΡΠΏΠΏΡ ΠΏΠΎΡΡΠ΅Π±ΠΈΡΠ΅Π»Π΅ΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
82
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΡΠΎΠ·Π΄Π°Π½ΠΈΡ Π³ΡΡΠΏΠΏΡ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
83
|
+
)
|
|
84
|
+
async def create_consumer_group(self, create_stream: bool = True):
|
|
85
|
+
await self.repo.create_consumer_group(
|
|
86
|
+
group_name=self.group_name,
|
|
87
|
+
create_stream=create_stream,
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
@retry_forever(
|
|
91
|
+
start_message="π§Ή ΠΡΠΈΡΡΠΊΠ° ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ ΡΡΠ°ΡΡΠ΅ {retention} Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
92
|
+
error_message="β οΈ ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΠΎΡΠΈΡΡΠΊΠ΅ ΡΠΎΠΎΠ±ΡΠ΅Π½ΠΈΠΉ Π² ΠΏΠΎΡΠΎΠΊΠ΅ {queue_name}",
|
|
93
|
+
)
|
|
94
|
+
async def trim_by_age(self, retention: timedelta) -> int:
|
|
95
|
+
return await self.repo.trim_by_age(retention)
|
|
96
|
+
|
|
97
|
+
@retry_forever(
|
|
98
|
+
start_message="ποΈ ΠΠΎΠ»Π½Π°Ρ ΠΎΡΠΈΡΡΠΊΠ° ΠΏΠΎΡΠΎΠΊΠ° {queue_name}",
|
|
99
|
+
error_message="β ΠΡΠΈΠ±ΠΊΠ° ΠΏΡΠΈ ΠΏΠΎΠ»Π½ΠΎΠΉ ΠΎΡΠΈΡΡΠΊΠ΅ ΠΏΠΎΡΠΎΠΊΠ° {queue_name}",
|
|
100
|
+
)
|
|
101
|
+
async def delete_all(self) -> None:
|
|
102
|
+
await self.repo.delete_all()
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class PolicySeries(str, Enum):
|
|
5
|
+
XXX = "Π₯Π₯Π₯"
|
|
6
|
+
TTT = "Π’Π’Π’"
|
|
7
|
+
AAA = "ΠΠΠ"
|
|
8
|
+
AAV = "ΠΠΠ"
|
|
9
|
+
AAK = "ΠΠΠ"
|
|
10
|
+
AAM = "ΠΠΠ"
|
|
11
|
+
AAN = "ΠΠΠ"
|
|
12
|
+
AAS = "ΠΠΠ‘"
|
|
13
|
+
VVV = "ΠΠΠ"
|
|
14
|
+
EEE = "ΠΠΠ"
|
|
15
|
+
KKK = "ΠΠΠ"
|
|
16
|
+
MMM = "ΠΠΠ"
|
|
17
|
+
NNN = "ΠΠΠ"
|
|
18
|
+
RRR = "Π Π Π "
|
|
19
|
+
SSS = "Π‘Π‘Π‘"
|