arpakitlib 1.5.28__py3-none-any.whl → 1.5.30__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arpakitlib/ar_base_worker.py +12 -12
- arpakitlib/ar_fastapi_util.py +28 -12
- arpakitlib/ar_http_request_util.py +60 -28
- arpakitlib/ar_operation_execution_util.py +311 -51
- arpakitlib/ar_schedule_uust_api_client.py +36 -60
- {arpakitlib-1.5.28.dist-info → arpakitlib-1.5.30.dist-info}/METADATA +2 -2
- {arpakitlib-1.5.28.dist-info → arpakitlib-1.5.30.dist-info}/RECORD +10 -10
- {arpakitlib-1.5.28.dist-info → arpakitlib-1.5.30.dist-info}/LICENSE +0 -0
- {arpakitlib-1.5.28.dist-info → arpakitlib-1.5.30.dist-info}/NOTICE +0 -0
- {arpakitlib-1.5.28.dist-info → arpakitlib-1.5.30.dist-info}/WHEEL +0 -0
arpakitlib/ar_base_worker.py
CHANGED
@@ -29,28 +29,28 @@ class BaseWorker(ABC):
|
|
29
29
|
self._logger.exception(exception)
|
30
30
|
|
31
31
|
def sync_safe_run(self):
|
32
|
-
self._logger.info(f"sync_safe_run")
|
32
|
+
self._logger.info(f"start sync_safe_run")
|
33
33
|
|
34
|
-
self._logger.info("sync_on_startup
|
34
|
+
self._logger.info("start sync_on_startup ")
|
35
35
|
self.sync_on_startup()
|
36
|
-
self._logger.info("sync_on_startup
|
36
|
+
self._logger.info("finish sync_on_startup")
|
37
37
|
|
38
38
|
while True:
|
39
39
|
|
40
40
|
try:
|
41
41
|
|
42
|
-
self._logger.info("sync_run
|
42
|
+
self._logger.info("start sync_run")
|
43
43
|
self.sync_run()
|
44
|
-
self._logger.info("sync_run
|
44
|
+
self._logger.info("finish sync_run")
|
45
45
|
|
46
46
|
if self.timeout_after_run is not None:
|
47
47
|
sync_safe_sleep(self.timeout_after_run)
|
48
48
|
|
49
49
|
except BaseException as exception:
|
50
50
|
|
51
|
-
self._logger.info("sync_run_on_error
|
51
|
+
self._logger.info("start sync_run_on_error")
|
52
52
|
self.sync_run_on_error(exception=exception, kwargs={})
|
53
|
-
self._logger.info("sync_run_on_error
|
53
|
+
self._logger.info("start sync_run_on_error")
|
54
54
|
|
55
55
|
if self.timeout_after_err_in_run is not None:
|
56
56
|
sync_safe_sleep(self.timeout_after_err_in_run)
|
@@ -65,11 +65,11 @@ class BaseWorker(ABC):
|
|
65
65
|
self._logger.exception(exception)
|
66
66
|
|
67
67
|
async def async_safe_run(self):
|
68
|
-
self._logger.info(f"async_safe_run
|
68
|
+
self._logger.info(f"start async_safe_run")
|
69
69
|
|
70
|
-
self._logger.info("async_on_startup
|
70
|
+
self._logger.info("start async_on_startup")
|
71
71
|
await self.async_on_startup()
|
72
|
-
self._logger.info("async_on_startup
|
72
|
+
self._logger.info("start async_on_startup")
|
73
73
|
|
74
74
|
while True:
|
75
75
|
|
@@ -82,9 +82,9 @@ class BaseWorker(ABC):
|
|
82
82
|
|
83
83
|
except BaseException as exception:
|
84
84
|
|
85
|
-
self._logger.info("async_run_on_error
|
85
|
+
self._logger.info("start async_run_on_error")
|
86
86
|
await self.async_run_on_error(exception=exception, kwargs={})
|
87
|
-
self._logger.info("async_run_on_error
|
87
|
+
self._logger.info("finish async_run_on_error")
|
88
88
|
|
89
89
|
if self.timeout_after_err_in_run is not None:
|
90
90
|
await async_safe_sleep(self.timeout_after_err_in_run)
|
arpakitlib/ar_fastapi_util.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
# arpakit
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
import asyncio
|
4
6
|
import logging
|
5
7
|
import os.path
|
@@ -24,6 +26,9 @@ _ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
|
24
26
|
_logger = logging.getLogger(__name__)
|
25
27
|
|
26
28
|
|
29
|
+
# ---
|
30
|
+
|
31
|
+
|
27
32
|
class BaseAPISchema(BaseModel):
|
28
33
|
model_config = ConfigDict(extra="ignore", arbitrary_types_allowed=True, from_attributes=True)
|
29
34
|
|
@@ -47,14 +52,6 @@ class BaseAPISO(BaseAPISchema):
|
|
47
52
|
pass
|
48
53
|
|
49
54
|
|
50
|
-
class APIJSONResponse(fastapi.responses.JSONResponse):
|
51
|
-
def __init__(self, *, content: BaseAPISO, status_code: int = starlette.status.HTTP_200_OK):
|
52
|
-
super().__init__(
|
53
|
-
content=content.model_dump(mode="json"),
|
54
|
-
status_code=status_code
|
55
|
-
)
|
56
|
-
|
57
|
-
|
58
55
|
class APIErrorSO(BaseAPISO):
|
59
56
|
class APIErrorCodes(EasyEnumeration):
|
60
57
|
cannot_authorize = "CANNOT_AUTHORIZE"
|
@@ -69,6 +66,17 @@ class APIErrorSO(BaseAPISO):
|
|
69
66
|
error_data: dict[str, Any] = {}
|
70
67
|
|
71
68
|
|
69
|
+
# ---
|
70
|
+
|
71
|
+
|
72
|
+
class APIJSONResponse(fastapi.responses.JSONResponse):
|
73
|
+
def __init__(self, *, content: BaseAPISO, status_code: int = starlette.status.HTTP_200_OK):
|
74
|
+
super().__init__(
|
75
|
+
content=content.model_dump(mode="json"),
|
76
|
+
status_code=status_code
|
77
|
+
)
|
78
|
+
|
79
|
+
|
72
80
|
class APIException(fastapi.exceptions.HTTPException):
|
73
81
|
def __init__(
|
74
82
|
self,
|
@@ -163,6 +171,9 @@ def from_exception_to_api_json_response(
|
|
163
171
|
)
|
164
172
|
|
165
173
|
|
174
|
+
# ---
|
175
|
+
|
176
|
+
|
166
177
|
def add_exception_handler_to_fastapi_app(*, fastapi_app: FastAPI, api_handle_exception_: Callable) -> FastAPI:
|
167
178
|
fastapi_app.add_exception_handler(
|
168
179
|
exc_class_or_status_code=Exception,
|
@@ -237,7 +248,8 @@ class BaseAPIStartupEvent:
|
|
237
248
|
self._logger = logging.getLogger(self.__class__.__name__)
|
238
249
|
|
239
250
|
async def on_startup(self, *args, **kwargs):
|
240
|
-
self._logger.info("on_startup")
|
251
|
+
self._logger.info("on_startup starts")
|
252
|
+
self._logger.info("on_startup ends")
|
241
253
|
|
242
254
|
|
243
255
|
class BaseAPIShutdownEvent:
|
@@ -245,15 +257,16 @@ class BaseAPIShutdownEvent:
|
|
245
257
|
self._logger = logging.getLogger(self.__class__.__name__)
|
246
258
|
|
247
259
|
async def on_shutdown(self, *args, **kwargs):
|
248
|
-
self._logger.info("on_shutdown")
|
260
|
+
self._logger.info("on_shutdown starts")
|
261
|
+
self._logger.info("on_shutdown ends")
|
249
262
|
|
250
263
|
|
251
264
|
def create_fastapi_app(
|
252
265
|
*,
|
253
266
|
title: str,
|
254
267
|
description: str | None = None,
|
255
|
-
api_startup_event: BaseAPIStartupEvent | None =
|
256
|
-
api_shutdown_event: BaseAPIShutdownEvent | None =
|
268
|
+
api_startup_event: BaseAPIStartupEvent | None = BaseAPIStartupEvent(),
|
269
|
+
api_shutdown_event: BaseAPIShutdownEvent | None = BaseAPIShutdownEvent(),
|
257
270
|
api_handle_exception_: Callable | None = simple_api_handle_exception
|
258
271
|
):
|
259
272
|
app = FastAPI(
|
@@ -284,6 +297,9 @@ def create_fastapi_app(
|
|
284
297
|
return app
|
285
298
|
|
286
299
|
|
300
|
+
# ---
|
301
|
+
|
302
|
+
|
287
303
|
def __example():
|
288
304
|
pass
|
289
305
|
|
@@ -1,65 +1,97 @@
|
|
1
1
|
# arpakit
|
2
2
|
|
3
3
|
import asyncio
|
4
|
-
import inspect
|
5
4
|
import logging
|
6
5
|
from datetime import timedelta
|
7
6
|
|
8
7
|
import aiohttp
|
9
8
|
import requests
|
9
|
+
from aiohttp_socks import ProxyConnector
|
10
10
|
|
11
11
|
from arpakitlib.ar_sleep_util import sync_safe_sleep, async_safe_sleep
|
12
12
|
|
13
13
|
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
14
14
|
|
15
|
+
_logger = logging.getLogger(__name__)
|
15
16
|
|
16
|
-
def sync_make_request(*, method: str, url: str, **kwargs) -> requests.Response:
|
17
|
-
_logger = logging.getLogger(inspect.currentframe().f_code.co_name)
|
18
17
|
|
19
|
-
|
20
|
-
|
18
|
+
def sync_make_request(
|
19
|
+
*,
|
20
|
+
method: str = "GET",
|
21
|
+
url: str,
|
22
|
+
max_tries_: int = 9,
|
23
|
+
proxy_url_: str | None = None,
|
24
|
+
raise_for_status_: bool = False,
|
25
|
+
timeout_: timedelta = timedelta(seconds=15).total_seconds(),
|
26
|
+
**kwargs
|
27
|
+
) -> requests.Response:
|
28
|
+
tries_counter = 0
|
21
29
|
|
22
30
|
kwargs["method"] = method
|
23
31
|
kwargs["url"] = url
|
24
|
-
if
|
25
|
-
kwargs["timeout"] =
|
32
|
+
if timeout_ is not None:
|
33
|
+
kwargs["timeout"] = timeout_.total_seconds()
|
34
|
+
if proxy_url_:
|
35
|
+
kwargs["proxies"] = {
|
36
|
+
"http": proxy_url_,
|
37
|
+
"https": proxy_url_
|
38
|
+
}
|
39
|
+
if "allow_redirects" not in kwargs:
|
40
|
+
kwargs["allow_redirects"] = True
|
26
41
|
|
27
42
|
while True:
|
28
|
-
|
29
|
-
_logger.info(f"{method} {url}")
|
43
|
+
tries_counter += 1
|
44
|
+
_logger.info(f"{method} {url} {kwargs.get('params')}")
|
30
45
|
try:
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
46
|
+
response = requests.request(**kwargs)
|
47
|
+
if raise_for_status_:
|
48
|
+
response.raise_for_status()
|
49
|
+
return response
|
50
|
+
except BaseException as exception:
|
51
|
+
_logger.warning(f"{tries_counter}/{max_tries_} {method} {url} {exception}")
|
52
|
+
if tries_counter >= max_tries_:
|
53
|
+
raise exception
|
36
54
|
sync_safe_sleep(timedelta(seconds=0.1).total_seconds())
|
37
55
|
continue
|
38
56
|
|
39
57
|
|
40
|
-
async def async_make_request(
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
58
|
+
async def async_make_request(
|
59
|
+
*,
|
60
|
+
method: str = "GET",
|
61
|
+
url: str,
|
62
|
+
max_tries_: int = 9,
|
63
|
+
proxy_url_: str | None = None,
|
64
|
+
raise_for_status_: bool = False,
|
65
|
+
timeout_: timedelta | None = timedelta(seconds=15),
|
66
|
+
**kwargs
|
67
|
+
) -> aiohttp.ClientResponse:
|
68
|
+
tries_counter = 0
|
45
69
|
|
46
70
|
kwargs["method"] = method
|
47
71
|
kwargs["url"] = url
|
48
|
-
if
|
49
|
-
kwargs["timeout"] = aiohttp.ClientTimeout(total=
|
72
|
+
if timeout_ is not None:
|
73
|
+
kwargs["timeout"] = aiohttp.ClientTimeout(total=timeout_.total_seconds())
|
74
|
+
if "allow_redirects" not in kwargs:
|
75
|
+
kwargs["allow_redirects"] = True
|
76
|
+
|
77
|
+
proxy_connector: ProxyConnector | None = None
|
78
|
+
if proxy_url_:
|
79
|
+
proxy_connector = ProxyConnector.from_url(proxy_url_)
|
50
80
|
|
51
81
|
while True:
|
52
|
-
|
53
|
-
_logger.info(f"{method} {url}")
|
82
|
+
tries_counter += 1
|
83
|
+
_logger.info(f"{method} {url} {kwargs.get('params')}")
|
54
84
|
try:
|
55
|
-
async with aiohttp.ClientSession() as session:
|
85
|
+
async with aiohttp.ClientSession(connector=proxy_connector) as session:
|
56
86
|
async with session.request(**kwargs) as response:
|
87
|
+
if raise_for_status_:
|
88
|
+
response.raise_for_status()
|
57
89
|
await response.read()
|
58
90
|
return response
|
59
|
-
except
|
60
|
-
_logger.warning(f"{
|
61
|
-
if
|
62
|
-
raise
|
91
|
+
except BaseException as exception:
|
92
|
+
_logger.warning(f"{tries_counter}/{max_tries_} {method} {url} {exception}")
|
93
|
+
if tries_counter >= max_tries_:
|
94
|
+
raise exception
|
63
95
|
await async_safe_sleep(timedelta(seconds=0.1).total_seconds())
|
64
96
|
continue
|
65
97
|
|
@@ -1,51 +1,311 @@
|
|
1
|
-
#
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
1
|
+
# arpakit
|
2
|
+
|
3
|
+
from __future__ import annotations
|
4
|
+
|
5
|
+
import logging
|
6
|
+
import traceback
|
7
|
+
from datetime import datetime, timedelta
|
8
|
+
from typing import Any
|
9
|
+
|
10
|
+
from sqlalchemy import TIMESTAMP, TEXT, asc
|
11
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
12
|
+
from sqlalchemy.orm import Mapped, mapped_column, Session
|
13
|
+
|
14
|
+
from arpakitlib.ar_base_worker import BaseWorker
|
15
|
+
from arpakitlib.ar_datetime_util import now_utc_dt
|
16
|
+
from arpakitlib.ar_dict_util import combine_dicts
|
17
|
+
from arpakitlib.ar_easy_sqlalchemy_util import EasySQLAlchemyDB
|
18
|
+
from arpakitlib.ar_enumeration import EasyEnumeration
|
19
|
+
from arpakitlib.ar_fastapi_util import BaseAPISO
|
20
|
+
from arpakitlib.ar_sqlalchemy_model_util import SimpleDBM
|
21
|
+
|
22
|
+
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
23
|
+
|
24
|
+
|
25
|
+
class OperationDBM(SimpleDBM):
|
26
|
+
__tablename__ = "operation"
|
27
|
+
|
28
|
+
class Statuses(EasyEnumeration):
|
29
|
+
waiting_for_execution = "waiting_for_execution"
|
30
|
+
executing = "executing"
|
31
|
+
executed_without_error = "executed_without_error"
|
32
|
+
executed_with_error = "executed_with_error"
|
33
|
+
|
34
|
+
class Types(EasyEnumeration):
|
35
|
+
healthcheck_ = "healthcheck"
|
36
|
+
raise_fake_exception = "raise_fake_exception"
|
37
|
+
|
38
|
+
status: Mapped[str] = mapped_column(
|
39
|
+
TEXT, index=True, insert_default=Statuses.waiting_for_execution,
|
40
|
+
server_default=Statuses.waiting_for_execution, nullable=False
|
41
|
+
)
|
42
|
+
type: Mapped[str] = mapped_column(
|
43
|
+
TEXT, index=True, insert_default=Types.healthcheck_, nullable=False
|
44
|
+
)
|
45
|
+
execution_start_dt: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
46
|
+
execution_finish_dt: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
47
|
+
input_data: Mapped[dict[str, Any]] = mapped_column(
|
48
|
+
JSONB,
|
49
|
+
insert_default={},
|
50
|
+
server_default="{}",
|
51
|
+
nullable=False
|
52
|
+
)
|
53
|
+
output_data: Mapped[dict[str, Any]] = mapped_column(JSONB, insert_default={}, server_default="{}", nullable=False)
|
54
|
+
error_data: Mapped[dict[str, Any]] = mapped_column(JSONB, insert_default={}, server_default="{}", nullable=False)
|
55
|
+
|
56
|
+
def raise_if_executed_with_error(self):
|
57
|
+
if self.status == self.Statuses.executed_with_error:
|
58
|
+
raise Exception(
|
59
|
+
f"Operation (id={self.id}, type={self.type}) executed with error, error_data={self.error_data}"
|
60
|
+
)
|
61
|
+
|
62
|
+
def raise_if_error_data(self):
|
63
|
+
if self.status == self.Statuses.executed_with_error:
|
64
|
+
raise Exception(
|
65
|
+
f"Operation (id={self.id}, type={self.type}) has error_data, error_data={self.error_data}"
|
66
|
+
)
|
67
|
+
|
68
|
+
@property
|
69
|
+
def duration(self) -> timedelta | None:
|
70
|
+
if self.execution_start_dt is None or self.execution_finish_dt is None:
|
71
|
+
return None
|
72
|
+
return self.execution_finish_dt - self.execution_start_dt
|
73
|
+
|
74
|
+
@property
|
75
|
+
def duration_total_seconds(self) -> float | None:
|
76
|
+
if self.duration is None:
|
77
|
+
return None
|
78
|
+
return self.duration.total_seconds()
|
79
|
+
|
80
|
+
|
81
|
+
class OperationSO(BaseAPISO):
|
82
|
+
id: int
|
83
|
+
long_id: str
|
84
|
+
creation_dt: datetime
|
85
|
+
execution_start_dt: datetime | None
|
86
|
+
execution_finish_dt: datetime | None
|
87
|
+
status: str
|
88
|
+
type: str
|
89
|
+
input_data: dict[str, Any]
|
90
|
+
output_data: dict[str, Any]
|
91
|
+
error_data: dict[str, Any]
|
92
|
+
duration_total_seconds: float | None
|
93
|
+
|
94
|
+
|
95
|
+
def get_operation_for_execution(
|
96
|
+
*,
|
97
|
+
easy_sql_alchemy_db: EasySQLAlchemyDB,
|
98
|
+
filter_operation_type: str | None = None
|
99
|
+
) -> OperationDBM | None:
|
100
|
+
with easy_sql_alchemy_db.new_session() as session:
|
101
|
+
query = (
|
102
|
+
session
|
103
|
+
.query(OperationDBM)
|
104
|
+
.filter(OperationDBM.status == OperationDBM.Statuses.waiting_for_execution)
|
105
|
+
)
|
106
|
+
if filter_operation_type:
|
107
|
+
query = query.filter(OperationDBM.type == filter_operation_type)
|
108
|
+
query = query.order_by(asc(OperationDBM.creation_dt))
|
109
|
+
operation_dbm: OperationDBM | None = query.first()
|
110
|
+
return operation_dbm
|
111
|
+
|
112
|
+
|
113
|
+
def get_operation_by_id(
|
114
|
+
*,
|
115
|
+
session: Session,
|
116
|
+
filter_operation_id: int,
|
117
|
+
strict: bool = False
|
118
|
+
) -> OperationDBM | None:
|
119
|
+
query = (
|
120
|
+
session
|
121
|
+
.query(OperationDBM)
|
122
|
+
.filter(OperationDBM.id == filter_operation_id)
|
123
|
+
)
|
124
|
+
if strict:
|
125
|
+
return query.one()
|
126
|
+
else:
|
127
|
+
return query.one_or_none()
|
128
|
+
|
129
|
+
|
130
|
+
class BaseOperationExecutor:
|
131
|
+
def __init__(self, *, easy_sql_alchemy_db: EasySQLAlchemyDB):
|
132
|
+
self._logger = logging.getLogger(self.__class__.__name__)
|
133
|
+
self.easy_sql_alchemy_db = easy_sql_alchemy_db
|
134
|
+
|
135
|
+
async def async_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
136
|
+
if operation_dbm.type == OperationDBM.Types.healthcheck_:
|
137
|
+
self._logger.info("healthcheck")
|
138
|
+
elif operation_dbm.type == OperationDBM.Types.raise_fake_exception:
|
139
|
+
self._logger.info("raise_fake_exception")
|
140
|
+
raise Exception("raise_fake_exception")
|
141
|
+
else:
|
142
|
+
raise ValueError(f"unknown operation.type = {operation_dbm.type}")
|
143
|
+
return operation_dbm
|
144
|
+
|
145
|
+
async def async_safe_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
146
|
+
self._logger.info(
|
147
|
+
f"start async_safe_execute_operation"
|
148
|
+
f", operation_dbm.id={operation_dbm.id}"
|
149
|
+
f", operation_dbm.type={operation_dbm.type}"
|
150
|
+
)
|
151
|
+
|
152
|
+
with self.easy_sql_alchemy_db.new_session() as session:
|
153
|
+
operation_dbm: OperationDBM = get_operation_by_id(
|
154
|
+
session=session, filter_operation_id=operation_dbm.id, strict=True
|
155
|
+
)
|
156
|
+
operation_dbm.execution_start_dt = now_utc_dt()
|
157
|
+
operation_dbm.status = OperationDBM.Statuses.executing
|
158
|
+
session.commit()
|
159
|
+
session.refresh(operation_dbm)
|
160
|
+
|
161
|
+
exception: BaseException | None = None
|
162
|
+
traceback_str: str | None = None
|
163
|
+
|
164
|
+
try:
|
165
|
+
await self.async_execute_operation(operation_dbm=operation_dbm)
|
166
|
+
except BaseException as exception_:
|
167
|
+
self._logger.exception(exception_)
|
168
|
+
exception = exception_
|
169
|
+
traceback_str = traceback.format_exc()
|
170
|
+
|
171
|
+
with self.easy_sql_alchemy_db.new_session() as session:
|
172
|
+
operation_dbm: OperationDBM = get_operation_by_id(
|
173
|
+
session=session, filter_operation_id=operation_dbm.id, strict=True
|
174
|
+
)
|
175
|
+
operation_dbm.execution_finish_dt = now_utc_dt()
|
176
|
+
if exception:
|
177
|
+
operation_dbm.status = OperationDBM.Statuses.executed_with_error
|
178
|
+
operation_dbm.error_data = combine_dicts(
|
179
|
+
{"exception": str(exception), "traceback_str": traceback_str},
|
180
|
+
operation_dbm.error_data
|
181
|
+
)
|
182
|
+
else:
|
183
|
+
operation_dbm.status = OperationDBM.Statuses.executed_without_error
|
184
|
+
session.commit()
|
185
|
+
session.refresh(operation_dbm)
|
186
|
+
|
187
|
+
self._logger.info(
|
188
|
+
f"finish async_safe_execute_operation"
|
189
|
+
f", operation_dbm.id={operation_dbm.id}"
|
190
|
+
f", operation_dbm.type={operation_dbm.type}"
|
191
|
+
)
|
192
|
+
|
193
|
+
return operation_dbm
|
194
|
+
|
195
|
+
def sync_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
196
|
+
if operation_dbm.type == OperationDBM.Types.healthcheck_:
|
197
|
+
self._logger.info("healthcheck")
|
198
|
+
elif operation_dbm.type == OperationDBM.Types.raise_fake_exception:
|
199
|
+
self._logger.info("raise_fake_exception")
|
200
|
+
raise Exception("raise_fake_exception")
|
201
|
+
else:
|
202
|
+
raise ValueError(f"unknown operation.type = {operation_dbm.type}")
|
203
|
+
return operation_dbm
|
204
|
+
|
205
|
+
def sync_safe_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
206
|
+
self._logger.info(
|
207
|
+
f"start sync_safe_execute_operation"
|
208
|
+
f", operation_dbm.id={operation_dbm.id}"
|
209
|
+
f", operation_dbm.type={operation_dbm.type}"
|
210
|
+
)
|
211
|
+
|
212
|
+
with self.easy_sql_alchemy_db.new_session() as session:
|
213
|
+
operation_dbm: OperationDBM = get_operation_by_id(
|
214
|
+
session=session, filter_operation_id=operation_dbm.id, strict=True
|
215
|
+
)
|
216
|
+
operation_dbm.execution_start_dt = now_utc_dt()
|
217
|
+
operation_dbm.status = OperationDBM.Statuses.executing
|
218
|
+
session.commit()
|
219
|
+
session.refresh(operation_dbm)
|
220
|
+
|
221
|
+
exception: BaseException | None = None
|
222
|
+
traceback_str: str | None = None
|
223
|
+
|
224
|
+
try:
|
225
|
+
self.sync_execute_operation(operation_dbm=operation_dbm)
|
226
|
+
except BaseException as exception_:
|
227
|
+
self._logger.exception(exception_)
|
228
|
+
exception = exception_
|
229
|
+
traceback_str = traceback.format_exc()
|
230
|
+
|
231
|
+
with self.easy_sql_alchemy_db.new_session() as session:
|
232
|
+
operation_dbm: OperationDBM = get_operation_by_id(
|
233
|
+
session=session, filter_operation_id=operation_dbm.id, strict=True
|
234
|
+
)
|
235
|
+
operation_dbm.execution_finish_dt = now_utc_dt()
|
236
|
+
if exception:
|
237
|
+
operation_dbm.status = OperationDBM.Statuses.executed_with_error
|
238
|
+
operation_dbm.error_data = combine_dicts(
|
239
|
+
{"exception": str(exception), "traceback_str": traceback_str},
|
240
|
+
operation_dbm.error_data
|
241
|
+
)
|
242
|
+
else:
|
243
|
+
operation_dbm.status = OperationDBM.Statuses.executed_without_error
|
244
|
+
session.commit()
|
245
|
+
session.refresh(operation_dbm)
|
246
|
+
|
247
|
+
self._logger.info(
|
248
|
+
f"finish sync_safe_execute_operation"
|
249
|
+
f", operation_dbm.id={operation_dbm.id}"
|
250
|
+
f", operation_dbm.type={operation_dbm.type}"
|
251
|
+
f", operation_dbm.duration={operation_dbm.duration}"
|
252
|
+
)
|
253
|
+
|
254
|
+
return operation_dbm
|
255
|
+
|
256
|
+
|
257
|
+
class ExecuteOperationWorker(BaseWorker):
|
258
|
+
|
259
|
+
def __init__(
|
260
|
+
self,
|
261
|
+
*,
|
262
|
+
easy_sql_alchemy_db: EasySQLAlchemyDB,
|
263
|
+
operation_executor: BaseOperationExecutor,
|
264
|
+
need_operation_type: str | None = None
|
265
|
+
):
|
266
|
+
super().__init__()
|
267
|
+
self.easy_sql_alchemy_db = easy_sql_alchemy_db
|
268
|
+
self.timeout_after_run = timedelta(seconds=0.1).total_seconds()
|
269
|
+
self.timeout_after_err_in_run = timedelta(seconds=1).total_seconds()
|
270
|
+
self.operation_executor = operation_executor
|
271
|
+
self.need_operation_type = need_operation_type
|
272
|
+
|
273
|
+
async def async_on_startup(self):
|
274
|
+
self.easy_sql_alchemy_db.init()
|
275
|
+
|
276
|
+
async def async_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
277
|
+
return await self.operation_executor.async_safe_execute_operation(operation_dbm=operation_dbm)
|
278
|
+
|
279
|
+
async def async_run(self):
|
280
|
+
operation_dbm: OperationDBM | None = get_operation_for_execution(
|
281
|
+
easy_sql_alchemy_db=self.easy_sql_alchemy_db,
|
282
|
+
filter_operation_type=self.need_operation_type
|
283
|
+
)
|
284
|
+
|
285
|
+
if not operation_dbm:
|
286
|
+
return
|
287
|
+
|
288
|
+
await self.async_execute_operation(operation_dbm=operation_dbm)
|
289
|
+
|
290
|
+
async def async_run_on_error(self, exception: BaseException, kwargs: dict[str, Any]):
|
291
|
+
self._logger.exception(exception)
|
292
|
+
|
293
|
+
def sync_on_startup(self):
|
294
|
+
self.easy_sql_alchemy_db.init()
|
295
|
+
|
296
|
+
def sync_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
297
|
+
return self.operation_executor.sync_safe_execute_operation(operation_dbm=operation_dbm)
|
298
|
+
|
299
|
+
def sync_run(self):
|
300
|
+
operation_dbm: OperationDBM | None = get_operation_for_execution(
|
301
|
+
easy_sql_alchemy_db=self.easy_sql_alchemy_db,
|
302
|
+
filter_operation_type=self.need_operation_type
|
303
|
+
)
|
304
|
+
|
305
|
+
if not operation_dbm:
|
306
|
+
return
|
307
|
+
|
308
|
+
self.sync_execute_operation(operation_dbm=operation_dbm)
|
309
|
+
|
310
|
+
def sync_run_on_error(self, exception: BaseException, kwargs: dict[str, Any]):
|
311
|
+
self._logger.exception(exception)
|
@@ -3,16 +3,14 @@
|
|
3
3
|
import asyncio
|
4
4
|
import hashlib
|
5
5
|
import logging
|
6
|
-
from datetime import
|
7
|
-
from typing import
|
6
|
+
from datetime import datetime
|
7
|
+
from typing import Any
|
8
8
|
|
9
|
-
import aiohttp
|
10
9
|
import pytz
|
11
|
-
from aiohttp import ClientTimeout
|
12
|
-
from aiohttp_socks import ProxyConnector
|
13
10
|
|
14
11
|
from arpakitlib.ar_dict_util import combine_dicts
|
15
|
-
from arpakitlib.
|
12
|
+
from arpakitlib.ar_http_request_util import async_make_request
|
13
|
+
from arpakitlib.ar_type_util import raise_for_type
|
16
14
|
|
17
15
|
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
18
16
|
|
@@ -71,42 +69,21 @@ class ScheduleUUSTAPIClient:
|
|
71
69
|
password_first_part + datetime.now(tz=pytz.timezone("Asia/Yekaterinburg")).strftime("%Y-%m-%d")
|
72
70
|
)
|
73
71
|
|
74
|
-
async def
|
72
|
+
async def _async_make_http_get_request(
|
75
73
|
self,
|
76
74
|
*,
|
77
75
|
url: str,
|
78
|
-
params:
|
76
|
+
params: dict | None = None
|
79
77
|
) -> dict[str, Any]:
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
ProxyConnector.from_url(self.api_proxy_url)
|
90
|
-
if self.api_proxy_url is not None
|
91
|
-
else None
|
92
|
-
)
|
93
|
-
|
94
|
-
try:
|
95
|
-
async with aiohttp.ClientSession(connector=connector) as session:
|
96
|
-
async with session.get(
|
97
|
-
url=url,
|
98
|
-
params=params,
|
99
|
-
timeout=ClientTimeout(total=timedelta(seconds=15).total_seconds())
|
100
|
-
) as response:
|
101
|
-
response.raise_for_status()
|
102
|
-
return await response.json()
|
103
|
-
except Exception as err:
|
104
|
-
self._logger.warning(f"{tries}/{max_tries} {err} GET {url} {params} proxy={self.api_proxy_url}")
|
105
|
-
if tries >= max_tries:
|
106
|
-
raise err
|
107
|
-
await async_safe_sleep(timedelta(seconds=1).total_seconds())
|
108
|
-
self._logger.warning(f"{tries}/{max_tries} AGAIN GET {url} {params} proxy={self.api_proxy_url}")
|
109
|
-
continue
|
78
|
+
response = await async_make_request(
|
79
|
+
url=url,
|
80
|
+
method="GET",
|
81
|
+
params=combine_dicts(params, self.auth_params()),
|
82
|
+
proxy_url_=self.api_proxy_url,
|
83
|
+
)
|
84
|
+
json_data = await response.json()
|
85
|
+
raise_for_type(json_data, dict)
|
86
|
+
return json_data
|
110
87
|
|
111
88
|
async def get_current_week(self) -> int:
|
112
89
|
"""
|
@@ -116,10 +93,9 @@ class ScheduleUUSTAPIClient:
|
|
116
93
|
}
|
117
94
|
"""
|
118
95
|
|
119
|
-
|
120
|
-
json_data = await self._async_get_request(
|
96
|
+
json_data = await self._async_make_http_get_request(
|
121
97
|
url=self.api_url,
|
122
|
-
params=
|
98
|
+
params={"ask": "get_current_week"}
|
123
99
|
)
|
124
100
|
return json_data["data"][0]
|
125
101
|
|
@@ -131,10 +107,9 @@ class ScheduleUUSTAPIClient:
|
|
131
107
|
}
|
132
108
|
"""
|
133
109
|
|
134
|
-
|
135
|
-
json_data = await self._async_get_request(
|
110
|
+
json_data = await self._async_make_http_get_request(
|
136
111
|
url=self.api_url,
|
137
|
-
params=
|
112
|
+
params={"ask": "get_current_semestr"}
|
138
113
|
)
|
139
114
|
return json_data["data"][0]
|
140
115
|
|
@@ -153,42 +128,37 @@ class ScheduleUUSTAPIClient:
|
|
153
128
|
}
|
154
129
|
"""
|
155
130
|
|
156
|
-
|
157
|
-
json_data = await self._async_get_request(
|
131
|
+
json_data = await self._async_make_http_get_request(
|
158
132
|
url=self.api_url,
|
159
|
-
params=
|
133
|
+
params={"ask": "get_group_list"}
|
160
134
|
)
|
161
135
|
return list(json_data["data"].values())
|
162
136
|
|
163
137
|
async def get_group_lessons(self, group_id: int, semester: str | None = None) -> list[dict[str, Any]]:
|
164
|
-
params =
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
"id": group_id
|
169
|
-
}
|
170
|
-
)
|
138
|
+
params = {
|
139
|
+
"ask": "get_group_schedule",
|
140
|
+
"id": group_id
|
141
|
+
}
|
171
142
|
if semester is not None:
|
172
143
|
params["semester"] = semester
|
173
|
-
json_data = await self.
|
144
|
+
json_data = await self._async_make_http_get_request(
|
174
145
|
url=self.api_url,
|
175
146
|
params=params
|
176
147
|
)
|
177
148
|
return json_data["data"]
|
178
149
|
|
179
150
|
async def get_teachers(self) -> list[dict[str, Any]]:
|
180
|
-
|
181
|
-
json_data = await self._async_get_request(
|
151
|
+
json_data = await self._async_make_http_get_request(
|
182
152
|
url=self.api_url,
|
183
|
-
params=
|
153
|
+
params={"ask": "get_teacher_list"}
|
184
154
|
)
|
185
155
|
return list(json_data["data"].values())
|
186
156
|
|
187
157
|
async def get_teacher_lessons(self, teacher_id: int, semester: str | None = None) -> list[dict[str, Any]]:
|
188
|
-
params =
|
158
|
+
params = {"ask": "get_teacher_schedule", "id": teacher_id}
|
189
159
|
if semester is not None:
|
190
160
|
params["semester"] = semester
|
191
|
-
json_data = await self.
|
161
|
+
json_data = await self._async_make_http_get_request(
|
192
162
|
url=self.api_url,
|
193
163
|
params=params
|
194
164
|
)
|
@@ -205,6 +175,12 @@ class ScheduleUUSTAPIClient:
|
|
205
175
|
return False
|
206
176
|
return True
|
207
177
|
|
178
|
+
async def check_all(self):
|
179
|
+
await self.get_groups()
|
180
|
+
await self.get_teachers()
|
181
|
+
await self.get_current_semester()
|
182
|
+
await self.get_current_week()
|
183
|
+
|
208
184
|
|
209
185
|
def __example():
|
210
186
|
pass
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: arpakitlib
|
3
|
-
Version: 1.5.
|
3
|
+
Version: 1.5.30
|
4
4
|
Summary: arpakitlib
|
5
5
|
Home-page: https://github.com/ARPAKIT-Company/arpakitlib
|
6
6
|
License: Apache-2.0
|
@@ -33,7 +33,7 @@ Requires-Dist: pymongo (>=4.10.1,<5.0.0)
|
|
33
33
|
Requires-Dist: pytz (>=2024.2,<2025.0)
|
34
34
|
Requires-Dist: pyzabbix (>=1.3.1,<2.0.0)
|
35
35
|
Requires-Dist: redis (>=5.2.0,<6.0.0)
|
36
|
-
Requires-Dist: requests (>=2.32.3,<3.0.0)
|
36
|
+
Requires-Dist: requests[socks] (>=2.32.3,<3.0.0)
|
37
37
|
Requires-Dist: sqladmin (>=0.20.1,<0.21.0)
|
38
38
|
Requires-Dist: sqlalchemy (>=2.0.36,<3.0.0)
|
39
39
|
Requires-Dist: twine (>=6.0.1,<7.0.0)
|
@@ -9,7 +9,7 @@ arpakitlib/ar_arpakit_lib_module_util.py,sha256=YzobxRG8-QJ1L5r_8wBdL668CwXoQRIM
|
|
9
9
|
arpakitlib/ar_arpakit_schedule_uust_api_client.py,sha256=V8n5XxkrsZo7ASNAepD0mqgRtI7qIleCDgk7WAmdLW8,18244
|
10
10
|
arpakitlib/ar_arpakitlib_info.py,sha256=cvgrLnEznmYkCAg1adbY46ATjD6GJd-Yk8PTgOPjpKM,248
|
11
11
|
arpakitlib/ar_base64_util.py,sha256=aZkg2cZTuAaP2IWeG_LXJ6RO7qhyskVwec-Lks0iM-k,676
|
12
|
-
arpakitlib/ar_base_worker.py,sha256=
|
12
|
+
arpakitlib/ar_base_worker.py,sha256=Y6yRFp1nhhTyv-TeGDao-3q4ICVVJ2zVsKdIlHdiGSI,2897
|
13
13
|
arpakitlib/ar_cache_file.py,sha256=m73_vU6bMjXsIurSPO9VCLcHsiHk8ITFS0LNjfI_8Uw,3471
|
14
14
|
arpakitlib/ar_datetime_util.py,sha256=Xe1NiT9oPQzNSG7RVRkhukhbg4i-hhS5ImmV7sPUc8o,971
|
15
15
|
arpakitlib/ar_dict_util.py,sha256=cF5LQJ6tLqyGoEXfDljMDZrikeZoWPw7CgINHIFGvXM,419
|
@@ -36,11 +36,11 @@ arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.css,sha256=jzPZlgJTFwSdSphk9C
|
|
36
36
|
arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.css.map,sha256=5wq8eXMLU6Zxb45orZPL1zAsBFJReFw6GjYqGpUX3hg,262650
|
37
37
|
arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.js,sha256=ffrLZHHEQ_g84A-ul3yWa10Kk09waOAxHcQXPuZuavg,339292
|
38
38
|
arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.js.map,sha256=9UhIW7MqCOZPAz1Sl1IKfZUuhWU0p-LJqrnjjJD9Xhc,1159454
|
39
|
-
arpakitlib/ar_fastapi_util.py,sha256=
|
39
|
+
arpakitlib/ar_fastapi_util.py,sha256=zru-le-J139KpLlPeo9DUjMuDygh4tR2GhAJjlr7ROs,9959
|
40
40
|
arpakitlib/ar_file_storage_in_dir.py,sha256=D3e3rGuHoI6xqAA5mVvEpVVpOWY1jyjNsjj2UhyHRbE,3674
|
41
41
|
arpakitlib/ar_generate_env_example.py,sha256=WseNlk_So6mTVQ2amMuigWYV4ZVmd940POvXtodoYj0,325
|
42
42
|
arpakitlib/ar_hash_util.py,sha256=Iqy6KBAOLBQMFLWv676boI5sV7atT2B-fb7aCdHOmIQ,340
|
43
|
-
arpakitlib/ar_http_request_util.py,sha256=
|
43
|
+
arpakitlib/ar_http_request_util.py,sha256=DooIL24jW6Ouz771TMTTvzDZETBc12R1RBmbXp9vNqg,3129
|
44
44
|
arpakitlib/ar_ip_util.py,sha256=aEAa1Hvobh9DWX7cmBAPLqnXSTiKe2hRk-WJaiKMaI8,1009
|
45
45
|
arpakitlib/ar_json_db.py,sha256=CEyhIU4WuNmX5mqwBVYxUKSdpFelXvWmf_tJ1fuxMSE,7187
|
46
46
|
arpakitlib/ar_json_util.py,sha256=S8CskZ3uoYuJGCy1GhQ8Ikhn-fxXk-9JpLUbBvXADqI,833
|
@@ -51,11 +51,11 @@ arpakitlib/ar_logging_util.py,sha256=c5wX2FLqCzb4aLckLVhIJ7go52rJQ4GN9dIkJ6KMc3o
|
|
51
51
|
arpakitlib/ar_mongodb_util.py,sha256=2ECkTnGAZ92qxioL-fmN6R4yZOSr3bXdXLWTzT1C3vk,4038
|
52
52
|
arpakitlib/ar_need_type_util.py,sha256=qCRSWlSgx-3yU0NRHZDQ5lCOmuZKcz2Na3py9nr6hJM,1618
|
53
53
|
arpakitlib/ar_openai_util.py,sha256=d5Aj1O2yo_zYLZCLeOLvuveYYxA2jGOqhMs1oUbuVk8,1210
|
54
|
-
arpakitlib/ar_operation_execution_util.py,sha256=
|
54
|
+
arpakitlib/ar_operation_execution_util.py,sha256=i-GDXFCAXrPwi-juH1sKWNsIPQZ036QxyC4n9oN_RWw,11802
|
55
55
|
arpakitlib/ar_parse_command.py,sha256=qpr2OwG3Bf7DFiL9S3iWgtbvtE80RSC35E5zFJvjG1I,2714
|
56
56
|
arpakitlib/ar_postgresql_util.py,sha256=SAHEmAyMkZe516uk2gS830v_Wn2kRUZUYNcTNwmgXJk,1160
|
57
57
|
arpakitlib/ar_run_cmd.py,sha256=D_rPavKMmWkQtwvZFz-Io5Ak8eSODHkcFeLPzNVC68g,1072
|
58
|
-
arpakitlib/ar_schedule_uust_api_client.py,sha256=
|
58
|
+
arpakitlib/ar_schedule_uust_api_client.py,sha256=1JGUy6rrjAXdWjeAqiAOQlCAEV3xuc5FUDWfXODKB-A,5770
|
59
59
|
arpakitlib/ar_sleep_util.py,sha256=9ZN4Qo4eZ_q3hjM7vNBQjFRcH-9-sqv3QLSjnxVJE90,1405
|
60
60
|
arpakitlib/ar_sqlalchemy_model_util.py,sha256=3zscvaloi9XY1NR70rJ4-jJlFUIqhmTbQ9wdvK-Yjf8,1379
|
61
61
|
arpakitlib/ar_ssh_runner.py,sha256=jlnss4V4pziBN1rBzoK_lDiWm6nMOqGXfa6NFJSKH-Y,6796
|
@@ -63,8 +63,8 @@ arpakitlib/ar_str_util.py,sha256=xSEzmsDvRiZVaxyqFFjcgzpphktCbXg2FHcvsd1DYpA,188
|
|
63
63
|
arpakitlib/ar_type_util.py,sha256=-h-SCsVl11eVo1u4hy2Asn0IfD5TIxmX3Ndug4AvnPE,1761
|
64
64
|
arpakitlib/ar_yookassa_api_client.py,sha256=BwsTygaXf35AACVBl_09uYlSD_t-U1OOzbj58OOFT4Q,6480
|
65
65
|
arpakitlib/ar_zabbix_util.py,sha256=MTQbmS0QpNCKNOGONNQHf6j7KTZsKGlIbd5rCH0R0WI,6313
|
66
|
-
arpakitlib-1.5.
|
67
|
-
arpakitlib-1.5.
|
68
|
-
arpakitlib-1.5.
|
69
|
-
arpakitlib-1.5.
|
70
|
-
arpakitlib-1.5.
|
66
|
+
arpakitlib-1.5.30.dist-info/LICENSE,sha256=1jqWIkbnMxDfs_i0SXP5qbV6PHjBr1g8506oW7uPjfg,11347
|
67
|
+
arpakitlib-1.5.30.dist-info/METADATA,sha256=-MrOdtGHS5oI_5ygdbJYMxdl9xJQALm6cA60w0Kxbzk,2330
|
68
|
+
arpakitlib-1.5.30.dist-info/NOTICE,sha256=wHwmiq3wExfFfgMsE5U5TOBP9_l72ocIG82KurEels0,43
|
69
|
+
arpakitlib-1.5.30.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
70
|
+
arpakitlib-1.5.30.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|