arpakitlib 1.5.28__py3-none-any.whl → 1.5.29__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arpakitlib/ar_base_worker.py +12 -12
- arpakitlib/ar_fastapi_util.py +28 -12
- arpakitlib/ar_http_request_util.py +54 -24
- arpakitlib/ar_operation_execution_util.py +311 -51
- arpakitlib/ar_schedule_uust_api_client.py +45 -61
- {arpakitlib-1.5.28.dist-info → arpakitlib-1.5.29.dist-info}/METADATA +2 -2
- {arpakitlib-1.5.28.dist-info → arpakitlib-1.5.29.dist-info}/RECORD +10 -10
- {arpakitlib-1.5.28.dist-info → arpakitlib-1.5.29.dist-info}/LICENSE +0 -0
- {arpakitlib-1.5.28.dist-info → arpakitlib-1.5.29.dist-info}/NOTICE +0 -0
- {arpakitlib-1.5.28.dist-info → arpakitlib-1.5.29.dist-info}/WHEEL +0 -0
arpakitlib/ar_base_worker.py
CHANGED
@@ -29,28 +29,28 @@ class BaseWorker(ABC):
|
|
29
29
|
self._logger.exception(exception)
|
30
30
|
|
31
31
|
def sync_safe_run(self):
|
32
|
-
self._logger.info(f"sync_safe_run")
|
32
|
+
self._logger.info(f"start sync_safe_run")
|
33
33
|
|
34
|
-
self._logger.info("sync_on_startup
|
34
|
+
self._logger.info("start sync_on_startup ")
|
35
35
|
self.sync_on_startup()
|
36
|
-
self._logger.info("sync_on_startup
|
36
|
+
self._logger.info("finish sync_on_startup")
|
37
37
|
|
38
38
|
while True:
|
39
39
|
|
40
40
|
try:
|
41
41
|
|
42
|
-
self._logger.info("sync_run
|
42
|
+
self._logger.info("start sync_run")
|
43
43
|
self.sync_run()
|
44
|
-
self._logger.info("sync_run
|
44
|
+
self._logger.info("finish sync_run")
|
45
45
|
|
46
46
|
if self.timeout_after_run is not None:
|
47
47
|
sync_safe_sleep(self.timeout_after_run)
|
48
48
|
|
49
49
|
except BaseException as exception:
|
50
50
|
|
51
|
-
self._logger.info("sync_run_on_error
|
51
|
+
self._logger.info("start sync_run_on_error")
|
52
52
|
self.sync_run_on_error(exception=exception, kwargs={})
|
53
|
-
self._logger.info("sync_run_on_error
|
53
|
+
self._logger.info("start sync_run_on_error")
|
54
54
|
|
55
55
|
if self.timeout_after_err_in_run is not None:
|
56
56
|
sync_safe_sleep(self.timeout_after_err_in_run)
|
@@ -65,11 +65,11 @@ class BaseWorker(ABC):
|
|
65
65
|
self._logger.exception(exception)
|
66
66
|
|
67
67
|
async def async_safe_run(self):
|
68
|
-
self._logger.info(f"async_safe_run
|
68
|
+
self._logger.info(f"start async_safe_run")
|
69
69
|
|
70
|
-
self._logger.info("async_on_startup
|
70
|
+
self._logger.info("start async_on_startup")
|
71
71
|
await self.async_on_startup()
|
72
|
-
self._logger.info("async_on_startup
|
72
|
+
self._logger.info("start async_on_startup")
|
73
73
|
|
74
74
|
while True:
|
75
75
|
|
@@ -82,9 +82,9 @@ class BaseWorker(ABC):
|
|
82
82
|
|
83
83
|
except BaseException as exception:
|
84
84
|
|
85
|
-
self._logger.info("async_run_on_error
|
85
|
+
self._logger.info("start async_run_on_error")
|
86
86
|
await self.async_run_on_error(exception=exception, kwargs={})
|
87
|
-
self._logger.info("async_run_on_error
|
87
|
+
self._logger.info("finish async_run_on_error")
|
88
88
|
|
89
89
|
if self.timeout_after_err_in_run is not None:
|
90
90
|
await async_safe_sleep(self.timeout_after_err_in_run)
|
arpakitlib/ar_fastapi_util.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
# arpakit
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
import asyncio
|
4
6
|
import logging
|
5
7
|
import os.path
|
@@ -24,6 +26,9 @@ _ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
|
24
26
|
_logger = logging.getLogger(__name__)
|
25
27
|
|
26
28
|
|
29
|
+
# ---
|
30
|
+
|
31
|
+
|
27
32
|
class BaseAPISchema(BaseModel):
|
28
33
|
model_config = ConfigDict(extra="ignore", arbitrary_types_allowed=True, from_attributes=True)
|
29
34
|
|
@@ -47,14 +52,6 @@ class BaseAPISO(BaseAPISchema):
|
|
47
52
|
pass
|
48
53
|
|
49
54
|
|
50
|
-
class APIJSONResponse(fastapi.responses.JSONResponse):
|
51
|
-
def __init__(self, *, content: BaseAPISO, status_code: int = starlette.status.HTTP_200_OK):
|
52
|
-
super().__init__(
|
53
|
-
content=content.model_dump(mode="json"),
|
54
|
-
status_code=status_code
|
55
|
-
)
|
56
|
-
|
57
|
-
|
58
55
|
class APIErrorSO(BaseAPISO):
|
59
56
|
class APIErrorCodes(EasyEnumeration):
|
60
57
|
cannot_authorize = "CANNOT_AUTHORIZE"
|
@@ -69,6 +66,17 @@ class APIErrorSO(BaseAPISO):
|
|
69
66
|
error_data: dict[str, Any] = {}
|
70
67
|
|
71
68
|
|
69
|
+
# ---
|
70
|
+
|
71
|
+
|
72
|
+
class APIJSONResponse(fastapi.responses.JSONResponse):
|
73
|
+
def __init__(self, *, content: BaseAPISO, status_code: int = starlette.status.HTTP_200_OK):
|
74
|
+
super().__init__(
|
75
|
+
content=content.model_dump(mode="json"),
|
76
|
+
status_code=status_code
|
77
|
+
)
|
78
|
+
|
79
|
+
|
72
80
|
class APIException(fastapi.exceptions.HTTPException):
|
73
81
|
def __init__(
|
74
82
|
self,
|
@@ -163,6 +171,9 @@ def from_exception_to_api_json_response(
|
|
163
171
|
)
|
164
172
|
|
165
173
|
|
174
|
+
# ---
|
175
|
+
|
176
|
+
|
166
177
|
def add_exception_handler_to_fastapi_app(*, fastapi_app: FastAPI, api_handle_exception_: Callable) -> FastAPI:
|
167
178
|
fastapi_app.add_exception_handler(
|
168
179
|
exc_class_or_status_code=Exception,
|
@@ -237,7 +248,8 @@ class BaseAPIStartupEvent:
|
|
237
248
|
self._logger = logging.getLogger(self.__class__.__name__)
|
238
249
|
|
239
250
|
async def on_startup(self, *args, **kwargs):
|
240
|
-
self._logger.info("on_startup")
|
251
|
+
self._logger.info("on_startup starts")
|
252
|
+
self._logger.info("on_startup ends")
|
241
253
|
|
242
254
|
|
243
255
|
class BaseAPIShutdownEvent:
|
@@ -245,15 +257,16 @@ class BaseAPIShutdownEvent:
|
|
245
257
|
self._logger = logging.getLogger(self.__class__.__name__)
|
246
258
|
|
247
259
|
async def on_shutdown(self, *args, **kwargs):
|
248
|
-
self._logger.info("on_shutdown")
|
260
|
+
self._logger.info("on_shutdown starts")
|
261
|
+
self._logger.info("on_shutdown ends")
|
249
262
|
|
250
263
|
|
251
264
|
def create_fastapi_app(
|
252
265
|
*,
|
253
266
|
title: str,
|
254
267
|
description: str | None = None,
|
255
|
-
api_startup_event: BaseAPIStartupEvent | None =
|
256
|
-
api_shutdown_event: BaseAPIShutdownEvent | None =
|
268
|
+
api_startup_event: BaseAPIStartupEvent | None = BaseAPIStartupEvent(),
|
269
|
+
api_shutdown_event: BaseAPIShutdownEvent | None = BaseAPIShutdownEvent(),
|
257
270
|
api_handle_exception_: Callable | None = simple_api_handle_exception
|
258
271
|
):
|
259
272
|
app = FastAPI(
|
@@ -284,6 +297,9 @@ def create_fastapi_app(
|
|
284
297
|
return app
|
285
298
|
|
286
299
|
|
300
|
+
# ---
|
301
|
+
|
302
|
+
|
287
303
|
def __example():
|
288
304
|
pass
|
289
305
|
|
@@ -1,65 +1,95 @@
|
|
1
1
|
# arpakit
|
2
2
|
|
3
3
|
import asyncio
|
4
|
-
import inspect
|
5
4
|
import logging
|
6
5
|
from datetime import timedelta
|
7
6
|
|
8
7
|
import aiohttp
|
9
8
|
import requests
|
9
|
+
from aiohttp_socks import ProxyConnector
|
10
10
|
|
11
11
|
from arpakitlib.ar_sleep_util import sync_safe_sleep, async_safe_sleep
|
12
12
|
|
13
13
|
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
14
14
|
|
15
|
+
_logger = logging.getLogger(__name__)
|
15
16
|
|
16
|
-
def sync_make_request(*, method: str, url: str, **kwargs) -> requests.Response:
|
17
|
-
_logger = logging.getLogger(inspect.currentframe().f_code.co_name)
|
18
17
|
|
19
|
-
|
20
|
-
|
18
|
+
def sync_make_request(
|
19
|
+
*,
|
20
|
+
method: str = "GET",
|
21
|
+
url: str,
|
22
|
+
max_tries: int = 7,
|
23
|
+
proxy_url_: str | None = None,
|
24
|
+
raise_for_status_: bool = False,
|
25
|
+
**kwargs
|
26
|
+
) -> requests.Response:
|
27
|
+
tries_counter = 0
|
21
28
|
|
22
29
|
kwargs["method"] = method
|
23
30
|
kwargs["url"] = url
|
24
31
|
if "timeout" not in kwargs:
|
25
32
|
kwargs["timeout"] = (timedelta(seconds=15).total_seconds(), timedelta(seconds=15).total_seconds())
|
33
|
+
if proxy_url_:
|
34
|
+
kwargs["proxies"] = {
|
35
|
+
"http": proxy_url_,
|
36
|
+
"https": proxy_url_
|
37
|
+
}
|
38
|
+
if "allow_redirects" not in kwargs:
|
39
|
+
kwargs["allow_redirects"] = True
|
26
40
|
|
27
41
|
while True:
|
28
|
-
|
29
|
-
_logger.info(f"{method} {url}")
|
42
|
+
tries_counter += 1
|
43
|
+
_logger.info(f"{method} {url} {kwargs.get('params')}")
|
30
44
|
try:
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
45
|
+
response = requests.request(**kwargs)
|
46
|
+
if raise_for_status_:
|
47
|
+
response.raise_for_status()
|
48
|
+
return response
|
49
|
+
except BaseException as exception:
|
50
|
+
_logger.warning(f"{tries_counter}/{max_tries} {method} {url} {exception}")
|
51
|
+
if tries_counter >= max_tries:
|
52
|
+
raise exception
|
36
53
|
sync_safe_sleep(timedelta(seconds=0.1).total_seconds())
|
37
54
|
continue
|
38
55
|
|
39
56
|
|
40
|
-
async def async_make_request(
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
57
|
+
async def async_make_request(
|
58
|
+
*,
|
59
|
+
method: str = "GET",
|
60
|
+
url: str,
|
61
|
+
max_tries: int = 7,
|
62
|
+
proxy_url_: str | None = None,
|
63
|
+
raise_for_status_: bool = False,
|
64
|
+
**kwargs
|
65
|
+
) -> aiohttp.ClientResponse:
|
66
|
+
tries_counter = 0
|
45
67
|
|
46
68
|
kwargs["method"] = method
|
47
69
|
kwargs["url"] = url
|
48
70
|
if "timeout" not in kwargs:
|
49
71
|
kwargs["timeout"] = aiohttp.ClientTimeout(total=timedelta(seconds=15).total_seconds())
|
72
|
+
if "allow_redirects" in kwargs:
|
73
|
+
kwargs["allow_redirects"] = True
|
74
|
+
|
75
|
+
proxy_connector: ProxyConnector | None = None
|
76
|
+
if proxy_url_:
|
77
|
+
proxy_connector = ProxyConnector.from_url(proxy_url_)
|
50
78
|
|
51
79
|
while True:
|
52
|
-
|
53
|
-
_logger.info(f"{method} {url}")
|
80
|
+
tries_counter += 1
|
81
|
+
_logger.info(f"{method} {url} {kwargs.get('params')}")
|
54
82
|
try:
|
55
|
-
async with aiohttp.ClientSession() as session:
|
83
|
+
async with aiohttp.ClientSession(connector=proxy_connector) as session:
|
56
84
|
async with session.request(**kwargs) as response:
|
85
|
+
if raise_for_status_:
|
86
|
+
response.raise_for_status()
|
57
87
|
await response.read()
|
58
88
|
return response
|
59
|
-
except
|
60
|
-
_logger.warning(f"{
|
61
|
-
if
|
62
|
-
raise
|
89
|
+
except BaseException as exception:
|
90
|
+
_logger.warning(f"{tries_counter}/{max_tries} {method} {url} {exception}")
|
91
|
+
if tries_counter >= max_tries:
|
92
|
+
raise exception
|
63
93
|
await async_safe_sleep(timedelta(seconds=0.1).total_seconds())
|
64
94
|
continue
|
65
95
|
|
@@ -1,51 +1,311 @@
|
|
1
|
-
#
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
1
|
+
# arpakit
|
2
|
+
|
3
|
+
from __future__ import annotations
|
4
|
+
|
5
|
+
import logging
|
6
|
+
import traceback
|
7
|
+
from datetime import datetime, timedelta
|
8
|
+
from typing import Any
|
9
|
+
|
10
|
+
from sqlalchemy import TIMESTAMP, TEXT, asc
|
11
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
12
|
+
from sqlalchemy.orm import Mapped, mapped_column, Session
|
13
|
+
|
14
|
+
from arpakitlib.ar_base_worker import BaseWorker
|
15
|
+
from arpakitlib.ar_datetime_util import now_utc_dt
|
16
|
+
from arpakitlib.ar_dict_util import combine_dicts
|
17
|
+
from arpakitlib.ar_easy_sqlalchemy_util import EasySQLAlchemyDB
|
18
|
+
from arpakitlib.ar_enumeration import EasyEnumeration
|
19
|
+
from arpakitlib.ar_fastapi_util import BaseAPISO
|
20
|
+
from arpakitlib.ar_sqlalchemy_model_util import SimpleDBM
|
21
|
+
|
22
|
+
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
23
|
+
|
24
|
+
|
25
|
+
class OperationDBM(SimpleDBM):
|
26
|
+
__tablename__ = "operation"
|
27
|
+
|
28
|
+
class Statuses(EasyEnumeration):
|
29
|
+
waiting_for_execution = "waiting_for_execution"
|
30
|
+
executing = "executing"
|
31
|
+
executed_without_error = "executed_without_error"
|
32
|
+
executed_with_error = "executed_with_error"
|
33
|
+
|
34
|
+
class Types(EasyEnumeration):
|
35
|
+
healthcheck_ = "healthcheck"
|
36
|
+
raise_fake_exception = "raise_fake_exception"
|
37
|
+
|
38
|
+
status: Mapped[str] = mapped_column(
|
39
|
+
TEXT, index=True, insert_default=Statuses.waiting_for_execution,
|
40
|
+
server_default=Statuses.waiting_for_execution, nullable=False
|
41
|
+
)
|
42
|
+
type: Mapped[str] = mapped_column(
|
43
|
+
TEXT, index=True, insert_default=Types.healthcheck_, nullable=False
|
44
|
+
)
|
45
|
+
execution_start_dt: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
46
|
+
execution_finish_dt: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
47
|
+
input_data: Mapped[dict[str, Any]] = mapped_column(
|
48
|
+
JSONB,
|
49
|
+
insert_default={},
|
50
|
+
server_default="{}",
|
51
|
+
nullable=False
|
52
|
+
)
|
53
|
+
output_data: Mapped[dict[str, Any]] = mapped_column(JSONB, insert_default={}, server_default="{}", nullable=False)
|
54
|
+
error_data: Mapped[dict[str, Any]] = mapped_column(JSONB, insert_default={}, server_default="{}", nullable=False)
|
55
|
+
|
56
|
+
def raise_if_executed_with_error(self):
|
57
|
+
if self.status == self.Statuses.executed_with_error:
|
58
|
+
raise Exception(
|
59
|
+
f"Operation (id={self.id}, type={self.type}) executed with error, error_data={self.error_data}"
|
60
|
+
)
|
61
|
+
|
62
|
+
def raise_if_error_data(self):
|
63
|
+
if self.status == self.Statuses.executed_with_error:
|
64
|
+
raise Exception(
|
65
|
+
f"Operation (id={self.id}, type={self.type}) has error_data, error_data={self.error_data}"
|
66
|
+
)
|
67
|
+
|
68
|
+
@property
|
69
|
+
def duration(self) -> timedelta | None:
|
70
|
+
if self.execution_start_dt is None or self.execution_finish_dt is None:
|
71
|
+
return None
|
72
|
+
return self.execution_finish_dt - self.execution_start_dt
|
73
|
+
|
74
|
+
@property
|
75
|
+
def duration_total_seconds(self) -> float | None:
|
76
|
+
if self.duration is None:
|
77
|
+
return None
|
78
|
+
return self.duration.total_seconds()
|
79
|
+
|
80
|
+
|
81
|
+
class OperationSO(BaseAPISO):
|
82
|
+
id: int
|
83
|
+
long_id: str
|
84
|
+
creation_dt: datetime
|
85
|
+
execution_start_dt: datetime | None
|
86
|
+
execution_finish_dt: datetime | None
|
87
|
+
status: str
|
88
|
+
type: str
|
89
|
+
input_data: dict[str, Any]
|
90
|
+
output_data: dict[str, Any]
|
91
|
+
error_data: dict[str, Any]
|
92
|
+
duration_total_seconds: float | None
|
93
|
+
|
94
|
+
|
95
|
+
def get_operation_for_execution(
|
96
|
+
*,
|
97
|
+
easy_sql_alchemy_db: EasySQLAlchemyDB,
|
98
|
+
filter_operation_type: str | None = None
|
99
|
+
) -> OperationDBM | None:
|
100
|
+
with easy_sql_alchemy_db.new_session() as session:
|
101
|
+
query = (
|
102
|
+
session
|
103
|
+
.query(OperationDBM)
|
104
|
+
.filter(OperationDBM.status == OperationDBM.Statuses.waiting_for_execution)
|
105
|
+
)
|
106
|
+
if filter_operation_type:
|
107
|
+
query = query.filter(OperationDBM.type == filter_operation_type)
|
108
|
+
query = query.order_by(asc(OperationDBM.creation_dt))
|
109
|
+
operation_dbm: OperationDBM | None = query.first()
|
110
|
+
return operation_dbm
|
111
|
+
|
112
|
+
|
113
|
+
def get_operation_by_id(
|
114
|
+
*,
|
115
|
+
session: Session,
|
116
|
+
filter_operation_id: int,
|
117
|
+
strict: bool = False
|
118
|
+
) -> OperationDBM | None:
|
119
|
+
query = (
|
120
|
+
session
|
121
|
+
.query(OperationDBM)
|
122
|
+
.filter(OperationDBM.id == filter_operation_id)
|
123
|
+
)
|
124
|
+
if strict:
|
125
|
+
return query.one()
|
126
|
+
else:
|
127
|
+
return query.one_or_none()
|
128
|
+
|
129
|
+
|
130
|
+
class BaseOperationExecutor:
|
131
|
+
def __init__(self, *, easy_sql_alchemy_db: EasySQLAlchemyDB):
|
132
|
+
self._logger = logging.getLogger(self.__class__.__name__)
|
133
|
+
self.easy_sql_alchemy_db = easy_sql_alchemy_db
|
134
|
+
|
135
|
+
async def async_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
136
|
+
if operation_dbm.type == OperationDBM.Types.healthcheck_:
|
137
|
+
self._logger.info("healthcheck")
|
138
|
+
elif operation_dbm.type == OperationDBM.Types.raise_fake_exception:
|
139
|
+
self._logger.info("raise_fake_exception")
|
140
|
+
raise Exception("raise_fake_exception")
|
141
|
+
else:
|
142
|
+
raise ValueError(f"unknown operation.type = {operation_dbm.type}")
|
143
|
+
return operation_dbm
|
144
|
+
|
145
|
+
async def async_safe_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
146
|
+
self._logger.info(
|
147
|
+
f"start async_safe_execute_operation"
|
148
|
+
f", operation_dbm.id={operation_dbm.id}"
|
149
|
+
f", operation_dbm.type={operation_dbm.type}"
|
150
|
+
)
|
151
|
+
|
152
|
+
with self.easy_sql_alchemy_db.new_session() as session:
|
153
|
+
operation_dbm: OperationDBM = get_operation_by_id(
|
154
|
+
session=session, filter_operation_id=operation_dbm.id, strict=True
|
155
|
+
)
|
156
|
+
operation_dbm.execution_start_dt = now_utc_dt()
|
157
|
+
operation_dbm.status = OperationDBM.Statuses.executing
|
158
|
+
session.commit()
|
159
|
+
session.refresh(operation_dbm)
|
160
|
+
|
161
|
+
exception: BaseException | None = None
|
162
|
+
traceback_str: str | None = None
|
163
|
+
|
164
|
+
try:
|
165
|
+
await self.async_execute_operation(operation_dbm=operation_dbm)
|
166
|
+
except BaseException as exception_:
|
167
|
+
self._logger.exception(exception_)
|
168
|
+
exception = exception_
|
169
|
+
traceback_str = traceback.format_exc()
|
170
|
+
|
171
|
+
with self.easy_sql_alchemy_db.new_session() as session:
|
172
|
+
operation_dbm: OperationDBM = get_operation_by_id(
|
173
|
+
session=session, filter_operation_id=operation_dbm.id, strict=True
|
174
|
+
)
|
175
|
+
operation_dbm.execution_finish_dt = now_utc_dt()
|
176
|
+
if exception:
|
177
|
+
operation_dbm.status = OperationDBM.Statuses.executed_with_error
|
178
|
+
operation_dbm.error_data = combine_dicts(
|
179
|
+
{"exception": str(exception), "traceback_str": traceback_str},
|
180
|
+
operation_dbm.error_data
|
181
|
+
)
|
182
|
+
else:
|
183
|
+
operation_dbm.status = OperationDBM.Statuses.executed_without_error
|
184
|
+
session.commit()
|
185
|
+
session.refresh(operation_dbm)
|
186
|
+
|
187
|
+
self._logger.info(
|
188
|
+
f"finish async_safe_execute_operation"
|
189
|
+
f", operation_dbm.id={operation_dbm.id}"
|
190
|
+
f", operation_dbm.type={operation_dbm.type}"
|
191
|
+
)
|
192
|
+
|
193
|
+
return operation_dbm
|
194
|
+
|
195
|
+
def sync_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
196
|
+
if operation_dbm.type == OperationDBM.Types.healthcheck_:
|
197
|
+
self._logger.info("healthcheck")
|
198
|
+
elif operation_dbm.type == OperationDBM.Types.raise_fake_exception:
|
199
|
+
self._logger.info("raise_fake_exception")
|
200
|
+
raise Exception("raise_fake_exception")
|
201
|
+
else:
|
202
|
+
raise ValueError(f"unknown operation.type = {operation_dbm.type}")
|
203
|
+
return operation_dbm
|
204
|
+
|
205
|
+
def sync_safe_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
206
|
+
self._logger.info(
|
207
|
+
f"start sync_safe_execute_operation"
|
208
|
+
f", operation_dbm.id={operation_dbm.id}"
|
209
|
+
f", operation_dbm.type={operation_dbm.type}"
|
210
|
+
)
|
211
|
+
|
212
|
+
with self.easy_sql_alchemy_db.new_session() as session:
|
213
|
+
operation_dbm: OperationDBM = get_operation_by_id(
|
214
|
+
session=session, filter_operation_id=operation_dbm.id, strict=True
|
215
|
+
)
|
216
|
+
operation_dbm.execution_start_dt = now_utc_dt()
|
217
|
+
operation_dbm.status = OperationDBM.Statuses.executing
|
218
|
+
session.commit()
|
219
|
+
session.refresh(operation_dbm)
|
220
|
+
|
221
|
+
exception: BaseException | None = None
|
222
|
+
traceback_str: str | None = None
|
223
|
+
|
224
|
+
try:
|
225
|
+
self.sync_execute_operation(operation_dbm=operation_dbm)
|
226
|
+
except BaseException as exception_:
|
227
|
+
self._logger.exception(exception_)
|
228
|
+
exception = exception_
|
229
|
+
traceback_str = traceback.format_exc()
|
230
|
+
|
231
|
+
with self.easy_sql_alchemy_db.new_session() as session:
|
232
|
+
operation_dbm: OperationDBM = get_operation_by_id(
|
233
|
+
session=session, filter_operation_id=operation_dbm.id, strict=True
|
234
|
+
)
|
235
|
+
operation_dbm.execution_finish_dt = now_utc_dt()
|
236
|
+
if exception:
|
237
|
+
operation_dbm.status = OperationDBM.Statuses.executed_with_error
|
238
|
+
operation_dbm.error_data = combine_dicts(
|
239
|
+
{"exception": str(exception), "traceback_str": traceback_str},
|
240
|
+
operation_dbm.error_data
|
241
|
+
)
|
242
|
+
else:
|
243
|
+
operation_dbm.status = OperationDBM.Statuses.executed_without_error
|
244
|
+
session.commit()
|
245
|
+
session.refresh(operation_dbm)
|
246
|
+
|
247
|
+
self._logger.info(
|
248
|
+
f"finish sync_safe_execute_operation"
|
249
|
+
f", operation_dbm.id={operation_dbm.id}"
|
250
|
+
f", operation_dbm.type={operation_dbm.type}"
|
251
|
+
f", operation_dbm.duration={operation_dbm.duration}"
|
252
|
+
)
|
253
|
+
|
254
|
+
return operation_dbm
|
255
|
+
|
256
|
+
|
257
|
+
class ExecuteOperationWorker(BaseWorker):
|
258
|
+
|
259
|
+
def __init__(
|
260
|
+
self,
|
261
|
+
*,
|
262
|
+
easy_sql_alchemy_db: EasySQLAlchemyDB,
|
263
|
+
operation_executor: BaseOperationExecutor,
|
264
|
+
need_operation_type: str | None = None
|
265
|
+
):
|
266
|
+
super().__init__()
|
267
|
+
self.easy_sql_alchemy_db = easy_sql_alchemy_db
|
268
|
+
self.timeout_after_run = timedelta(seconds=0.1).total_seconds()
|
269
|
+
self.timeout_after_err_in_run = timedelta(seconds=1).total_seconds()
|
270
|
+
self.operation_executor = operation_executor
|
271
|
+
self.need_operation_type = need_operation_type
|
272
|
+
|
273
|
+
async def async_on_startup(self):
|
274
|
+
self.easy_sql_alchemy_db.init()
|
275
|
+
|
276
|
+
async def async_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
277
|
+
return await self.operation_executor.async_safe_execute_operation(operation_dbm=operation_dbm)
|
278
|
+
|
279
|
+
async def async_run(self):
|
280
|
+
operation_dbm: OperationDBM | None = get_operation_for_execution(
|
281
|
+
easy_sql_alchemy_db=self.easy_sql_alchemy_db,
|
282
|
+
filter_operation_type=self.need_operation_type
|
283
|
+
)
|
284
|
+
|
285
|
+
if not operation_dbm:
|
286
|
+
return
|
287
|
+
|
288
|
+
await self.async_execute_operation(operation_dbm=operation_dbm)
|
289
|
+
|
290
|
+
async def async_run_on_error(self, exception: BaseException, kwargs: dict[str, Any]):
|
291
|
+
self._logger.exception(exception)
|
292
|
+
|
293
|
+
def sync_on_startup(self):
|
294
|
+
self.easy_sql_alchemy_db.init()
|
295
|
+
|
296
|
+
def sync_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
297
|
+
return self.operation_executor.sync_safe_execute_operation(operation_dbm=operation_dbm)
|
298
|
+
|
299
|
+
def sync_run(self):
|
300
|
+
operation_dbm: OperationDBM | None = get_operation_for_execution(
|
301
|
+
easy_sql_alchemy_db=self.easy_sql_alchemy_db,
|
302
|
+
filter_operation_type=self.need_operation_type
|
303
|
+
)
|
304
|
+
|
305
|
+
if not operation_dbm:
|
306
|
+
return
|
307
|
+
|
308
|
+
self.sync_execute_operation(operation_dbm=operation_dbm)
|
309
|
+
|
310
|
+
def sync_run_on_error(self, exception: BaseException, kwargs: dict[str, Any]):
|
311
|
+
self._logger.exception(exception)
|
@@ -3,16 +3,15 @@
|
|
3
3
|
import asyncio
|
4
4
|
import hashlib
|
5
5
|
import logging
|
6
|
-
from datetime import
|
7
|
-
from typing import
|
6
|
+
from datetime import datetime
|
7
|
+
from typing import Any
|
8
8
|
|
9
|
-
import aiohttp
|
10
9
|
import pytz
|
11
|
-
from aiohttp import ClientTimeout
|
12
|
-
from aiohttp_socks import ProxyConnector
|
13
10
|
|
14
11
|
from arpakitlib.ar_dict_util import combine_dicts
|
15
|
-
from arpakitlib.
|
12
|
+
from arpakitlib.ar_http_request_util import async_make_request
|
13
|
+
from arpakitlib.ar_logging_util import setup_normal_logging
|
14
|
+
from arpakitlib.ar_type_util import raise_for_type
|
16
15
|
|
17
16
|
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
18
17
|
|
@@ -71,42 +70,23 @@ class ScheduleUUSTAPIClient:
|
|
71
70
|
password_first_part + datetime.now(tz=pytz.timezone("Asia/Yekaterinburg")).strftime("%Y-%m-%d")
|
72
71
|
)
|
73
72
|
|
74
|
-
async def
|
73
|
+
async def _async_make_http_get_request(
|
75
74
|
self,
|
76
75
|
*,
|
77
76
|
url: str,
|
78
|
-
params:
|
77
|
+
params: dict | None = None
|
79
78
|
) -> dict[str, Any]:
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
else None
|
92
|
-
)
|
93
|
-
|
94
|
-
try:
|
95
|
-
async with aiohttp.ClientSession(connector=connector) as session:
|
96
|
-
async with session.get(
|
97
|
-
url=url,
|
98
|
-
params=params,
|
99
|
-
timeout=ClientTimeout(total=timedelta(seconds=15).total_seconds())
|
100
|
-
) as response:
|
101
|
-
response.raise_for_status()
|
102
|
-
return await response.json()
|
103
|
-
except Exception as err:
|
104
|
-
self._logger.warning(f"{tries}/{max_tries} {err} GET {url} {params} proxy={self.api_proxy_url}")
|
105
|
-
if tries >= max_tries:
|
106
|
-
raise err
|
107
|
-
await async_safe_sleep(timedelta(seconds=1).total_seconds())
|
108
|
-
self._logger.warning(f"{tries}/{max_tries} AGAIN GET {url} {params} proxy={self.api_proxy_url}")
|
109
|
-
continue
|
79
|
+
params = combine_dicts(self.auth_params(), params)
|
80
|
+
response = await async_make_request(
|
81
|
+
url=url,
|
82
|
+
method="GET",
|
83
|
+
params=params,
|
84
|
+
proxy_url_=self.api_proxy_url,
|
85
|
+
max_tries=9
|
86
|
+
)
|
87
|
+
json_data = await response.json()
|
88
|
+
raise_for_type(json_data, dict)
|
89
|
+
return json_data
|
110
90
|
|
111
91
|
async def get_current_week(self) -> int:
|
112
92
|
"""
|
@@ -116,10 +96,9 @@ class ScheduleUUSTAPIClient:
|
|
116
96
|
}
|
117
97
|
"""
|
118
98
|
|
119
|
-
|
120
|
-
json_data = await self._async_get_request(
|
99
|
+
json_data = await self._async_make_http_get_request(
|
121
100
|
url=self.api_url,
|
122
|
-
params=
|
101
|
+
params={"ask": "get_current_week"}
|
123
102
|
)
|
124
103
|
return json_data["data"][0]
|
125
104
|
|
@@ -131,10 +110,9 @@ class ScheduleUUSTAPIClient:
|
|
131
110
|
}
|
132
111
|
"""
|
133
112
|
|
134
|
-
|
135
|
-
json_data = await self._async_get_request(
|
113
|
+
json_data = await self._async_make_http_get_request(
|
136
114
|
url=self.api_url,
|
137
|
-
params=
|
115
|
+
params={"ask": "get_current_semestr"}
|
138
116
|
)
|
139
117
|
return json_data["data"][0]
|
140
118
|
|
@@ -153,42 +131,37 @@ class ScheduleUUSTAPIClient:
|
|
153
131
|
}
|
154
132
|
"""
|
155
133
|
|
156
|
-
|
157
|
-
json_data = await self._async_get_request(
|
134
|
+
json_data = await self._async_make_http_get_request(
|
158
135
|
url=self.api_url,
|
159
|
-
params=
|
136
|
+
params={"ask": "get_group_list"}
|
160
137
|
)
|
161
138
|
return list(json_data["data"].values())
|
162
139
|
|
163
140
|
async def get_group_lessons(self, group_id: int, semester: str | None = None) -> list[dict[str, Any]]:
|
164
|
-
params =
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
"id": group_id
|
169
|
-
}
|
170
|
-
)
|
141
|
+
params = {
|
142
|
+
"ask": "get_group_schedule",
|
143
|
+
"id": group_id
|
144
|
+
}
|
171
145
|
if semester is not None:
|
172
146
|
params["semester"] = semester
|
173
|
-
json_data = await self.
|
147
|
+
json_data = await self._async_make_http_get_request(
|
174
148
|
url=self.api_url,
|
175
149
|
params=params
|
176
150
|
)
|
177
151
|
return json_data["data"]
|
178
152
|
|
179
153
|
async def get_teachers(self) -> list[dict[str, Any]]:
|
180
|
-
|
181
|
-
json_data = await self._async_get_request(
|
154
|
+
json_data = await self._async_make_http_get_request(
|
182
155
|
url=self.api_url,
|
183
|
-
params=
|
156
|
+
params={"ask": "get_teacher_list"}
|
184
157
|
)
|
185
158
|
return list(json_data["data"].values())
|
186
159
|
|
187
160
|
async def get_teacher_lessons(self, teacher_id: int, semester: str | None = None) -> list[dict[str, Any]]:
|
188
|
-
params =
|
161
|
+
params = {"ask": "get_teacher_schedule", "id": teacher_id}
|
189
162
|
if semester is not None:
|
190
163
|
params["semester"] = semester
|
191
|
-
json_data = await self.
|
164
|
+
json_data = await self._async_make_http_get_request(
|
192
165
|
url=self.api_url,
|
193
166
|
params=params
|
194
167
|
)
|
@@ -205,13 +178,24 @@ class ScheduleUUSTAPIClient:
|
|
205
178
|
return False
|
206
179
|
return True
|
207
180
|
|
181
|
+
async def check_all(self):
|
182
|
+
await self.get_groups()
|
183
|
+
await self.get_teachers()
|
184
|
+
await self.get_current_semester()
|
185
|
+
await self.get_current_week()
|
186
|
+
|
208
187
|
|
209
188
|
def __example():
|
210
189
|
pass
|
211
190
|
|
212
191
|
|
213
192
|
async def __async_example():
|
214
|
-
|
193
|
+
setup_normal_logging()
|
194
|
+
client = ScheduleUUSTAPIClient(
|
195
|
+
api_login="arpakit",
|
196
|
+
api_password_first_part="bAEb2wXJNNZ8"
|
197
|
+
)
|
198
|
+
await client.check_all()
|
215
199
|
|
216
200
|
|
217
201
|
if __name__ == '__main__':
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: arpakitlib
|
3
|
-
Version: 1.5.
|
3
|
+
Version: 1.5.29
|
4
4
|
Summary: arpakitlib
|
5
5
|
Home-page: https://github.com/ARPAKIT-Company/arpakitlib
|
6
6
|
License: Apache-2.0
|
@@ -33,7 +33,7 @@ Requires-Dist: pymongo (>=4.10.1,<5.0.0)
|
|
33
33
|
Requires-Dist: pytz (>=2024.2,<2025.0)
|
34
34
|
Requires-Dist: pyzabbix (>=1.3.1,<2.0.0)
|
35
35
|
Requires-Dist: redis (>=5.2.0,<6.0.0)
|
36
|
-
Requires-Dist: requests (>=2.32.3,<3.0.0)
|
36
|
+
Requires-Dist: requests[socks] (>=2.32.3,<3.0.0)
|
37
37
|
Requires-Dist: sqladmin (>=0.20.1,<0.21.0)
|
38
38
|
Requires-Dist: sqlalchemy (>=2.0.36,<3.0.0)
|
39
39
|
Requires-Dist: twine (>=6.0.1,<7.0.0)
|
@@ -9,7 +9,7 @@ arpakitlib/ar_arpakit_lib_module_util.py,sha256=YzobxRG8-QJ1L5r_8wBdL668CwXoQRIM
|
|
9
9
|
arpakitlib/ar_arpakit_schedule_uust_api_client.py,sha256=V8n5XxkrsZo7ASNAepD0mqgRtI7qIleCDgk7WAmdLW8,18244
|
10
10
|
arpakitlib/ar_arpakitlib_info.py,sha256=cvgrLnEznmYkCAg1adbY46ATjD6GJd-Yk8PTgOPjpKM,248
|
11
11
|
arpakitlib/ar_base64_util.py,sha256=aZkg2cZTuAaP2IWeG_LXJ6RO7qhyskVwec-Lks0iM-k,676
|
12
|
-
arpakitlib/ar_base_worker.py,sha256=
|
12
|
+
arpakitlib/ar_base_worker.py,sha256=Y6yRFp1nhhTyv-TeGDao-3q4ICVVJ2zVsKdIlHdiGSI,2897
|
13
13
|
arpakitlib/ar_cache_file.py,sha256=m73_vU6bMjXsIurSPO9VCLcHsiHk8ITFS0LNjfI_8Uw,3471
|
14
14
|
arpakitlib/ar_datetime_util.py,sha256=Xe1NiT9oPQzNSG7RVRkhukhbg4i-hhS5ImmV7sPUc8o,971
|
15
15
|
arpakitlib/ar_dict_util.py,sha256=cF5LQJ6tLqyGoEXfDljMDZrikeZoWPw7CgINHIFGvXM,419
|
@@ -36,11 +36,11 @@ arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.css,sha256=jzPZlgJTFwSdSphk9C
|
|
36
36
|
arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.css.map,sha256=5wq8eXMLU6Zxb45orZPL1zAsBFJReFw6GjYqGpUX3hg,262650
|
37
37
|
arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.js,sha256=ffrLZHHEQ_g84A-ul3yWa10Kk09waOAxHcQXPuZuavg,339292
|
38
38
|
arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.js.map,sha256=9UhIW7MqCOZPAz1Sl1IKfZUuhWU0p-LJqrnjjJD9Xhc,1159454
|
39
|
-
arpakitlib/ar_fastapi_util.py,sha256=
|
39
|
+
arpakitlib/ar_fastapi_util.py,sha256=zru-le-J139KpLlPeo9DUjMuDygh4tR2GhAJjlr7ROs,9959
|
40
40
|
arpakitlib/ar_file_storage_in_dir.py,sha256=D3e3rGuHoI6xqAA5mVvEpVVpOWY1jyjNsjj2UhyHRbE,3674
|
41
41
|
arpakitlib/ar_generate_env_example.py,sha256=WseNlk_So6mTVQ2amMuigWYV4ZVmd940POvXtodoYj0,325
|
42
42
|
arpakitlib/ar_hash_util.py,sha256=Iqy6KBAOLBQMFLWv676boI5sV7atT2B-fb7aCdHOmIQ,340
|
43
|
-
arpakitlib/ar_http_request_util.py,sha256=
|
43
|
+
arpakitlib/ar_http_request_util.py,sha256=jx0ggOVXLvsh2hVUE7tasWaEsUnXbO0WS8-rJb0Y6BU,3063
|
44
44
|
arpakitlib/ar_ip_util.py,sha256=aEAa1Hvobh9DWX7cmBAPLqnXSTiKe2hRk-WJaiKMaI8,1009
|
45
45
|
arpakitlib/ar_json_db.py,sha256=CEyhIU4WuNmX5mqwBVYxUKSdpFelXvWmf_tJ1fuxMSE,7187
|
46
46
|
arpakitlib/ar_json_util.py,sha256=S8CskZ3uoYuJGCy1GhQ8Ikhn-fxXk-9JpLUbBvXADqI,833
|
@@ -51,11 +51,11 @@ arpakitlib/ar_logging_util.py,sha256=c5wX2FLqCzb4aLckLVhIJ7go52rJQ4GN9dIkJ6KMc3o
|
|
51
51
|
arpakitlib/ar_mongodb_util.py,sha256=2ECkTnGAZ92qxioL-fmN6R4yZOSr3bXdXLWTzT1C3vk,4038
|
52
52
|
arpakitlib/ar_need_type_util.py,sha256=qCRSWlSgx-3yU0NRHZDQ5lCOmuZKcz2Na3py9nr6hJM,1618
|
53
53
|
arpakitlib/ar_openai_util.py,sha256=d5Aj1O2yo_zYLZCLeOLvuveYYxA2jGOqhMs1oUbuVk8,1210
|
54
|
-
arpakitlib/ar_operation_execution_util.py,sha256=
|
54
|
+
arpakitlib/ar_operation_execution_util.py,sha256=i-GDXFCAXrPwi-juH1sKWNsIPQZ036QxyC4n9oN_RWw,11802
|
55
55
|
arpakitlib/ar_parse_command.py,sha256=qpr2OwG3Bf7DFiL9S3iWgtbvtE80RSC35E5zFJvjG1I,2714
|
56
56
|
arpakitlib/ar_postgresql_util.py,sha256=SAHEmAyMkZe516uk2gS830v_Wn2kRUZUYNcTNwmgXJk,1160
|
57
57
|
arpakitlib/ar_run_cmd.py,sha256=D_rPavKMmWkQtwvZFz-Io5Ak8eSODHkcFeLPzNVC68g,1072
|
58
|
-
arpakitlib/ar_schedule_uust_api_client.py,sha256=
|
58
|
+
arpakitlib/ar_schedule_uust_api_client.py,sha256=hShokh3uJuTPC78-AMW2mv1WSJMy4cAuVDJ4sLaoqMs,6043
|
59
59
|
arpakitlib/ar_sleep_util.py,sha256=9ZN4Qo4eZ_q3hjM7vNBQjFRcH-9-sqv3QLSjnxVJE90,1405
|
60
60
|
arpakitlib/ar_sqlalchemy_model_util.py,sha256=3zscvaloi9XY1NR70rJ4-jJlFUIqhmTbQ9wdvK-Yjf8,1379
|
61
61
|
arpakitlib/ar_ssh_runner.py,sha256=jlnss4V4pziBN1rBzoK_lDiWm6nMOqGXfa6NFJSKH-Y,6796
|
@@ -63,8 +63,8 @@ arpakitlib/ar_str_util.py,sha256=xSEzmsDvRiZVaxyqFFjcgzpphktCbXg2FHcvsd1DYpA,188
|
|
63
63
|
arpakitlib/ar_type_util.py,sha256=-h-SCsVl11eVo1u4hy2Asn0IfD5TIxmX3Ndug4AvnPE,1761
|
64
64
|
arpakitlib/ar_yookassa_api_client.py,sha256=BwsTygaXf35AACVBl_09uYlSD_t-U1OOzbj58OOFT4Q,6480
|
65
65
|
arpakitlib/ar_zabbix_util.py,sha256=MTQbmS0QpNCKNOGONNQHf6j7KTZsKGlIbd5rCH0R0WI,6313
|
66
|
-
arpakitlib-1.5.
|
67
|
-
arpakitlib-1.5.
|
68
|
-
arpakitlib-1.5.
|
69
|
-
arpakitlib-1.5.
|
70
|
-
arpakitlib-1.5.
|
66
|
+
arpakitlib-1.5.29.dist-info/LICENSE,sha256=1jqWIkbnMxDfs_i0SXP5qbV6PHjBr1g8506oW7uPjfg,11347
|
67
|
+
arpakitlib-1.5.29.dist-info/METADATA,sha256=BeKeVptEUesxJ2rNi-UpFTY2XuRep1w_3p1dXPN_4SM,2330
|
68
|
+
arpakitlib-1.5.29.dist-info/NOTICE,sha256=wHwmiq3wExfFfgMsE5U5TOBP9_l72ocIG82KurEels0,43
|
69
|
+
arpakitlib-1.5.29.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
70
|
+
arpakitlib-1.5.29.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|