arpakitlib 1.5.27__py3-none-any.whl → 1.5.29__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,7 +6,6 @@ import asyncio
6
6
  import hashlib
7
7
  import json
8
8
  import logging
9
- from asyncio import sleep
10
9
  from datetime import timedelta, datetime, time
11
10
  from typing import Any
12
11
  from urllib.parse import urljoin
@@ -19,6 +18,7 @@ from pydantic import ConfigDict, BaseModel
19
18
  from arpakitlib.ar_dict_util import combine_dicts
20
19
  from arpakitlib.ar_enumeration import EasyEnumeration
21
20
  from arpakitlib.ar_json_util import safely_transfer_to_json_str
21
+ from arpakitlib.ar_sleep_util import async_safe_sleep
22
22
  from arpakitlib.ar_type_util import raise_for_type
23
23
 
24
24
  _ARPAKIT_LIB_MODULE_VERSION = "3.0"
@@ -334,7 +334,7 @@ class ARPAKITScheduleUUSTAPIClient:
334
334
  self._logger.warning(f"{tries}/{max_tries} {err} {method} {url}")
335
335
  if tries >= max_tries:
336
336
  raise err
337
- await sleep(timedelta(seconds=0.1).total_seconds())
337
+ await async_safe_sleep(timedelta(seconds=0.1).total_seconds())
338
338
  continue
339
339
 
340
340
  async def healthcheck(self) -> bool:
@@ -6,7 +6,7 @@ from abc import ABC
6
6
  from datetime import timedelta
7
7
  from typing import Any
8
8
 
9
- from arpakitlib.ar_safe_sleep import safe_sleep
9
+ from arpakitlib.ar_sleep_util import sync_safe_sleep, async_safe_sleep
10
10
 
11
11
  _ARPAKIT_LIB_MODULE_VERSION = "3.0"
12
12
 
@@ -29,31 +29,31 @@ class BaseWorker(ABC):
29
29
  self._logger.exception(exception)
30
30
 
31
31
  def sync_safe_run(self):
32
- self._logger.info(f"sync_safe_run")
32
+ self._logger.info(f"start sync_safe_run")
33
33
 
34
- self._logger.info("sync_on_startup starts")
34
+ self._logger.info("start sync_on_startup ")
35
35
  self.sync_on_startup()
36
- self._logger.info("sync_on_startup ends")
36
+ self._logger.info("finish sync_on_startup")
37
37
 
38
38
  while True:
39
39
 
40
40
  try:
41
41
 
42
- self._logger.info("sync_run starts")
42
+ self._logger.info("start sync_run")
43
43
  self.sync_run()
44
- self._logger.info("sync_run ends")
44
+ self._logger.info("finish sync_run")
45
45
 
46
46
  if self.timeout_after_run is not None:
47
- safe_sleep(self.timeout_after_run)
47
+ sync_safe_sleep(self.timeout_after_run)
48
48
 
49
49
  except BaseException as exception:
50
50
 
51
- self._logger.info("sync_run_on_error starts")
51
+ self._logger.info("start sync_run_on_error")
52
52
  self.sync_run_on_error(exception=exception, kwargs={})
53
- self._logger.info("sync_run_on_error ends")
53
+ self._logger.info("start sync_run_on_error")
54
54
 
55
55
  if self.timeout_after_err_in_run is not None:
56
- safe_sleep(self.timeout_after_err_in_run)
56
+ sync_safe_sleep(self.timeout_after_err_in_run)
57
57
 
58
58
  async def async_on_startup(self):
59
59
  pass
@@ -65,11 +65,11 @@ class BaseWorker(ABC):
65
65
  self._logger.exception(exception)
66
66
 
67
67
  async def async_safe_run(self):
68
- self._logger.info(f"async_safe_run starts")
68
+ self._logger.info(f"start async_safe_run")
69
69
 
70
- self._logger.info("async_on_startup starts")
70
+ self._logger.info("start async_on_startup")
71
71
  await self.async_on_startup()
72
- self._logger.info("async_on_startup ends")
72
+ self._logger.info("start async_on_startup")
73
73
 
74
74
  while True:
75
75
 
@@ -78,16 +78,16 @@ class BaseWorker(ABC):
78
78
  await self.async_run()
79
79
 
80
80
  if self.timeout_after_run is not None:
81
- await asyncio.sleep(self.timeout_after_run)
81
+ await async_safe_sleep(self.timeout_after_run)
82
82
 
83
83
  except BaseException as exception:
84
84
 
85
- self._logger.info("async_run_on_error starts")
85
+ self._logger.info("start async_run_on_error")
86
86
  await self.async_run_on_error(exception=exception, kwargs={})
87
- self._logger.info("async_run_on_error ends")
87
+ self._logger.info("finish async_run_on_error")
88
88
 
89
89
  if self.timeout_after_err_in_run is not None:
90
- await asyncio.sleep(self.timeout_after_err_in_run)
90
+ await async_safe_sleep(self.timeout_after_err_in_run)
91
91
 
92
92
 
93
93
  def __example():
@@ -2,7 +2,6 @@
2
2
 
3
3
  import asyncio
4
4
  import logging
5
- from asyncio import sleep
6
5
  from datetime import timedelta
7
6
  from urllib.parse import urljoin
8
7
 
@@ -12,6 +11,7 @@ from pydantic import ConfigDict, BaseModel
12
11
 
13
12
  from arpakitlib.ar_base64_util import convert_base64_string_to_bytes
14
13
  from arpakitlib.ar_json_util import safely_transfer_to_json_str
14
+ from arpakitlib.ar_sleep_util import async_safe_sleep
15
15
 
16
16
  _ARPAKIT_LIB_MODULE_VERSION = "3.0"
17
17
 
@@ -72,7 +72,7 @@ class DreamAIAPIClient:
72
72
  self._logger.warning(f"{tries}/{max_tries} {err} {method} {url}")
73
73
  if tries >= max_tries:
74
74
  raise err
75
- await sleep(timedelta(seconds=0.1).total_seconds())
75
+ await async_safe_sleep(timedelta(seconds=0.1).total_seconds())
76
76
  continue
77
77
 
78
78
  async def healthcheck(self) -> bool:
@@ -1,5 +1,7 @@
1
1
  # arpakit
2
2
 
3
+ from __future__ import annotations
4
+
3
5
  import asyncio
4
6
  import logging
5
7
  import os.path
@@ -24,6 +26,9 @@ _ARPAKIT_LIB_MODULE_VERSION = "3.0"
24
26
  _logger = logging.getLogger(__name__)
25
27
 
26
28
 
29
+ # ---
30
+
31
+
27
32
  class BaseAPISchema(BaseModel):
28
33
  model_config = ConfigDict(extra="ignore", arbitrary_types_allowed=True, from_attributes=True)
29
34
 
@@ -47,22 +52,13 @@ class BaseAPISO(BaseAPISchema):
47
52
  pass
48
53
 
49
54
 
50
- class APIJSONResponse(fastapi.responses.JSONResponse):
51
- def __init__(self, *, content: BaseAPISO, status_code: int = starlette.status.HTTP_200_OK):
52
- super().__init__(
53
- content=content.model_dump(mode="json"),
54
- status_code=status_code
55
- )
56
-
57
-
58
- class APIErrorCodes(EasyEnumeration):
59
- cannot_authorize = "CANNOT_AUTHORIZE"
60
- unknown_error = "UNKNOWN_ERROR"
61
- error_in_request = "ERROR_IN_REQUEST"
62
- not_found = "NOT_FOUND"
63
-
64
-
65
55
  class APIErrorSO(BaseAPISO):
56
+ class APIErrorCodes(EasyEnumeration):
57
+ cannot_authorize = "CANNOT_AUTHORIZE"
58
+ unknown_error = "UNKNOWN_ERROR"
59
+ error_in_request = "ERROR_IN_REQUEST"
60
+ not_found = "NOT_FOUND"
61
+
66
62
  has_error: bool = True
67
63
  error_code: str | None = None
68
64
  error_code_specification: str | None = None
@@ -70,12 +66,23 @@ class APIErrorSO(BaseAPISO):
70
66
  error_data: dict[str, Any] = {}
71
67
 
72
68
 
69
+ # ---
70
+
71
+
72
+ class APIJSONResponse(fastapi.responses.JSONResponse):
73
+ def __init__(self, *, content: BaseAPISO, status_code: int = starlette.status.HTTP_200_OK):
74
+ super().__init__(
75
+ content=content.model_dump(mode="json"),
76
+ status_code=status_code
77
+ )
78
+
79
+
73
80
  class APIException(fastapi.exceptions.HTTPException):
74
81
  def __init__(
75
82
  self,
76
83
  *,
77
84
  status_code: int = starlette.status.HTTP_400_BAD_REQUEST,
78
- error_code: str | None = APIErrorCodes.unknown_error,
85
+ error_code: str | None = APIErrorSO.APIErrorCodes.unknown_error,
79
86
  error_code_specification: str | None = None,
80
87
  error_description: str | None = None,
81
88
  error_data: dict[str, Any] | None = None
@@ -113,7 +120,7 @@ def from_exception_to_api_json_response(
113
120
 
114
121
  easy_api_error_so = APIErrorSO(
115
122
  has_error=True,
116
- error_code=APIErrorCodes.unknown_error
123
+ error_code=APIErrorSO.APIErrorCodes.unknown_error
117
124
  )
118
125
 
119
126
  status_code = starlette.status.HTTP_500_INTERNAL_SERVER_ERROR
@@ -124,11 +131,11 @@ def from_exception_to_api_json_response(
124
131
  elif isinstance(exception, starlette.exceptions.HTTPException):
125
132
  status_code = exception.status_code
126
133
  if status_code in (starlette.status.HTTP_403_FORBIDDEN, starlette.status.HTTP_401_UNAUTHORIZED):
127
- easy_api_error_so.error_code = APIErrorCodes.cannot_authorize
134
+ easy_api_error_so.error_code = APIErrorSO.APIErrorCodes.cannot_authorize
128
135
  elif status_code == starlette.status.HTTP_404_NOT_FOUND:
129
- easy_api_error_so.error_code = APIErrorCodes.not_found
136
+ easy_api_error_so.error_code = APIErrorSO.APIErrorCodes.not_found
130
137
  else:
131
- easy_api_error_so.error_code = APIErrorCodes.unknown_error
138
+ easy_api_error_so.error_code = APIErrorSO.APIErrorCodes.unknown_error
132
139
  if (
133
140
  isinstance(exception.detail, dict)
134
141
  or isinstance(exception.detail, list)
@@ -141,12 +148,12 @@ def from_exception_to_api_json_response(
141
148
 
142
149
  elif isinstance(exception, fastapi.exceptions.RequestValidationError):
143
150
  status_code = starlette.status.HTTP_422_UNPROCESSABLE_ENTITY
144
- easy_api_error_so.error_code = APIErrorCodes.error_in_request
151
+ easy_api_error_so.error_code = APIErrorSO.APIErrorCodes.error_in_request
145
152
  easy_api_error_so.error_data["raw"] = str(exception.errors()) if exception.errors() else {}
146
153
 
147
154
  else:
148
155
  status_code = starlette.status.HTTP_500_INTERNAL_SERVER_ERROR
149
- easy_api_error_so.error_code = APIErrorCodes.unknown_error
156
+ easy_api_error_so.error_code = APIErrorSO.APIErrorCodes.unknown_error
150
157
  easy_api_error_so.error_data["raw"] = str(exception)
151
158
  _logger.exception(exception)
152
159
 
@@ -164,6 +171,9 @@ def from_exception_to_api_json_response(
164
171
  )
165
172
 
166
173
 
174
+ # ---
175
+
176
+
167
177
  def add_exception_handler_to_fastapi_app(*, fastapi_app: FastAPI, api_handle_exception_: Callable) -> FastAPI:
168
178
  fastapi_app.add_exception_handler(
169
179
  exc_class_or_status_code=Exception,
@@ -238,7 +248,8 @@ class BaseAPIStartupEvent:
238
248
  self._logger = logging.getLogger(self.__class__.__name__)
239
249
 
240
250
  async def on_startup(self, *args, **kwargs):
241
- self._logger.info("on_startup")
251
+ self._logger.info("on_startup starts")
252
+ self._logger.info("on_startup ends")
242
253
 
243
254
 
244
255
  class BaseAPIShutdownEvent:
@@ -246,15 +257,16 @@ class BaseAPIShutdownEvent:
246
257
  self._logger = logging.getLogger(self.__class__.__name__)
247
258
 
248
259
  async def on_shutdown(self, *args, **kwargs):
249
- self._logger.info("on_shutdown")
260
+ self._logger.info("on_shutdown starts")
261
+ self._logger.info("on_shutdown ends")
250
262
 
251
263
 
252
264
  def create_fastapi_app(
253
265
  *,
254
266
  title: str,
255
267
  description: str | None = None,
256
- api_startup_event: BaseAPIStartupEvent | None = None,
257
- api_shutdown_event: BaseAPIShutdownEvent | None = None,
268
+ api_startup_event: BaseAPIStartupEvent | None = BaseAPIStartupEvent(),
269
+ api_shutdown_event: BaseAPIShutdownEvent | None = BaseAPIShutdownEvent(),
258
270
  api_handle_exception_: Callable | None = simple_api_handle_exception
259
271
  ):
260
272
  app = FastAPI(
@@ -285,6 +297,9 @@ def create_fastapi_app(
285
297
  return app
286
298
 
287
299
 
300
+ # ---
301
+
302
+
288
303
  def __example():
289
304
  pass
290
305
 
@@ -1,66 +1,96 @@
1
1
  # arpakit
2
2
 
3
3
  import asyncio
4
- import inspect
5
4
  import logging
6
5
  from datetime import timedelta
7
6
 
8
7
  import aiohttp
9
8
  import requests
9
+ from aiohttp_socks import ProxyConnector
10
10
 
11
- from arpakitlib.ar_safe_sleep import safe_sleep
11
+ from arpakitlib.ar_sleep_util import sync_safe_sleep, async_safe_sleep
12
12
 
13
13
  _ARPAKIT_LIB_MODULE_VERSION = "3.0"
14
14
 
15
+ _logger = logging.getLogger(__name__)
15
16
 
16
- def sync_make_request(*, method: str, url: str, **kwargs) -> requests.Response:
17
- _logger = logging.getLogger(inspect.currentframe().f_code.co_name)
18
17
 
19
- max_tries = 7
20
- tries = 0
18
+ def sync_make_request(
19
+ *,
20
+ method: str = "GET",
21
+ url: str,
22
+ max_tries: int = 7,
23
+ proxy_url_: str | None = None,
24
+ raise_for_status_: bool = False,
25
+ **kwargs
26
+ ) -> requests.Response:
27
+ tries_counter = 0
21
28
 
22
29
  kwargs["method"] = method
23
30
  kwargs["url"] = url
24
31
  if "timeout" not in kwargs:
25
32
  kwargs["timeout"] = (timedelta(seconds=15).total_seconds(), timedelta(seconds=15).total_seconds())
33
+ if proxy_url_:
34
+ kwargs["proxies"] = {
35
+ "http": proxy_url_,
36
+ "https": proxy_url_
37
+ }
38
+ if "allow_redirects" not in kwargs:
39
+ kwargs["allow_redirects"] = True
26
40
 
27
41
  while True:
28
- tries += 1
29
- _logger.info(f"{method} {url}")
42
+ tries_counter += 1
43
+ _logger.info(f"{method} {url} {kwargs.get('params')}")
30
44
  try:
31
- return requests.request(**kwargs)
32
- except Exception as err:
33
- _logger.warning(f"{tries}/{max_tries} {method} {url} {err}")
34
- if tries >= max_tries:
35
- raise Exception(err)
36
- safe_sleep(timedelta(seconds=0.1).total_seconds())
45
+ response = requests.request(**kwargs)
46
+ if raise_for_status_:
47
+ response.raise_for_status()
48
+ return response
49
+ except BaseException as exception:
50
+ _logger.warning(f"{tries_counter}/{max_tries} {method} {url} {exception}")
51
+ if tries_counter >= max_tries:
52
+ raise exception
53
+ sync_safe_sleep(timedelta(seconds=0.1).total_seconds())
37
54
  continue
38
55
 
39
56
 
40
- async def async_make_request(*, method: str, url: str, **kwargs) -> aiohttp.ClientResponse:
41
- _logger = logging.getLogger(inspect.currentframe().f_code.co_name)
42
-
43
- max_tries = 7
44
- tries = 0
57
+ async def async_make_request(
58
+ *,
59
+ method: str = "GET",
60
+ url: str,
61
+ max_tries: int = 7,
62
+ proxy_url_: str | None = None,
63
+ raise_for_status_: bool = False,
64
+ **kwargs
65
+ ) -> aiohttp.ClientResponse:
66
+ tries_counter = 0
45
67
 
46
68
  kwargs["method"] = method
47
69
  kwargs["url"] = url
48
70
  if "timeout" not in kwargs:
49
71
  kwargs["timeout"] = aiohttp.ClientTimeout(total=timedelta(seconds=15).total_seconds())
72
+ if "allow_redirects" in kwargs:
73
+ kwargs["allow_redirects"] = True
74
+
75
+ proxy_connector: ProxyConnector | None = None
76
+ if proxy_url_:
77
+ proxy_connector = ProxyConnector.from_url(proxy_url_)
50
78
 
51
79
  while True:
52
- tries += 1
53
- _logger.info(f"{method} {url}")
80
+ tries_counter += 1
81
+ _logger.info(f"{method} {url} {kwargs.get('params')}")
54
82
  try:
55
- async with aiohttp.ClientSession() as session:
83
+ async with aiohttp.ClientSession(connector=proxy_connector) as session:
56
84
  async with session.request(**kwargs) as response:
85
+ if raise_for_status_:
86
+ response.raise_for_status()
57
87
  await response.read()
58
88
  return response
59
- except Exception as err:
60
- _logger.warning(f"{tries}/{max_tries} {method} {url} {err}")
61
- if tries >= max_tries:
62
- raise Exception(err)
63
- await asyncio.sleep(timedelta(seconds=0.1).total_seconds())
89
+ except BaseException as exception:
90
+ _logger.warning(f"{tries_counter}/{max_tries} {method} {url} {exception}")
91
+ if tries_counter >= max_tries:
92
+ raise exception
93
+ await async_safe_sleep(timedelta(seconds=0.1).total_seconds())
64
94
  continue
65
95
 
66
96
 
@@ -0,0 +1,311 @@
1
+ # arpakit
2
+
3
+ from __future__ import annotations
4
+
5
+ import logging
6
+ import traceback
7
+ from datetime import datetime, timedelta
8
+ from typing import Any
9
+
10
+ from sqlalchemy import TIMESTAMP, TEXT, asc
11
+ from sqlalchemy.dialects.postgresql import JSONB
12
+ from sqlalchemy.orm import Mapped, mapped_column, Session
13
+
14
+ from arpakitlib.ar_base_worker import BaseWorker
15
+ from arpakitlib.ar_datetime_util import now_utc_dt
16
+ from arpakitlib.ar_dict_util import combine_dicts
17
+ from arpakitlib.ar_easy_sqlalchemy_util import EasySQLAlchemyDB
18
+ from arpakitlib.ar_enumeration import EasyEnumeration
19
+ from arpakitlib.ar_fastapi_util import BaseAPISO
20
+ from arpakitlib.ar_sqlalchemy_model_util import SimpleDBM
21
+
22
+ _ARPAKIT_LIB_MODULE_VERSION = "3.0"
23
+
24
+
25
+ class OperationDBM(SimpleDBM):
26
+ __tablename__ = "operation"
27
+
28
+ class Statuses(EasyEnumeration):
29
+ waiting_for_execution = "waiting_for_execution"
30
+ executing = "executing"
31
+ executed_without_error = "executed_without_error"
32
+ executed_with_error = "executed_with_error"
33
+
34
+ class Types(EasyEnumeration):
35
+ healthcheck_ = "healthcheck"
36
+ raise_fake_exception = "raise_fake_exception"
37
+
38
+ status: Mapped[str] = mapped_column(
39
+ TEXT, index=True, insert_default=Statuses.waiting_for_execution,
40
+ server_default=Statuses.waiting_for_execution, nullable=False
41
+ )
42
+ type: Mapped[str] = mapped_column(
43
+ TEXT, index=True, insert_default=Types.healthcheck_, nullable=False
44
+ )
45
+ execution_start_dt: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
46
+ execution_finish_dt: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
47
+ input_data: Mapped[dict[str, Any]] = mapped_column(
48
+ JSONB,
49
+ insert_default={},
50
+ server_default="{}",
51
+ nullable=False
52
+ )
53
+ output_data: Mapped[dict[str, Any]] = mapped_column(JSONB, insert_default={}, server_default="{}", nullable=False)
54
+ error_data: Mapped[dict[str, Any]] = mapped_column(JSONB, insert_default={}, server_default="{}", nullable=False)
55
+
56
+ def raise_if_executed_with_error(self):
57
+ if self.status == self.Statuses.executed_with_error:
58
+ raise Exception(
59
+ f"Operation (id={self.id}, type={self.type}) executed with error, error_data={self.error_data}"
60
+ )
61
+
62
+ def raise_if_error_data(self):
63
+ if self.status == self.Statuses.executed_with_error:
64
+ raise Exception(
65
+ f"Operation (id={self.id}, type={self.type}) has error_data, error_data={self.error_data}"
66
+ )
67
+
68
+ @property
69
+ def duration(self) -> timedelta | None:
70
+ if self.execution_start_dt is None or self.execution_finish_dt is None:
71
+ return None
72
+ return self.execution_finish_dt - self.execution_start_dt
73
+
74
+ @property
75
+ def duration_total_seconds(self) -> float | None:
76
+ if self.duration is None:
77
+ return None
78
+ return self.duration.total_seconds()
79
+
80
+
81
+ class OperationSO(BaseAPISO):
82
+ id: int
83
+ long_id: str
84
+ creation_dt: datetime
85
+ execution_start_dt: datetime | None
86
+ execution_finish_dt: datetime | None
87
+ status: str
88
+ type: str
89
+ input_data: dict[str, Any]
90
+ output_data: dict[str, Any]
91
+ error_data: dict[str, Any]
92
+ duration_total_seconds: float | None
93
+
94
+
95
+ def get_operation_for_execution(
96
+ *,
97
+ easy_sql_alchemy_db: EasySQLAlchemyDB,
98
+ filter_operation_type: str | None = None
99
+ ) -> OperationDBM | None:
100
+ with easy_sql_alchemy_db.new_session() as session:
101
+ query = (
102
+ session
103
+ .query(OperationDBM)
104
+ .filter(OperationDBM.status == OperationDBM.Statuses.waiting_for_execution)
105
+ )
106
+ if filter_operation_type:
107
+ query = query.filter(OperationDBM.type == filter_operation_type)
108
+ query = query.order_by(asc(OperationDBM.creation_dt))
109
+ operation_dbm: OperationDBM | None = query.first()
110
+ return operation_dbm
111
+
112
+
113
+ def get_operation_by_id(
114
+ *,
115
+ session: Session,
116
+ filter_operation_id: int,
117
+ strict: bool = False
118
+ ) -> OperationDBM | None:
119
+ query = (
120
+ session
121
+ .query(OperationDBM)
122
+ .filter(OperationDBM.id == filter_operation_id)
123
+ )
124
+ if strict:
125
+ return query.one()
126
+ else:
127
+ return query.one_or_none()
128
+
129
+
130
+ class BaseOperationExecutor:
131
+ def __init__(self, *, easy_sql_alchemy_db: EasySQLAlchemyDB):
132
+ self._logger = logging.getLogger(self.__class__.__name__)
133
+ self.easy_sql_alchemy_db = easy_sql_alchemy_db
134
+
135
+ async def async_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
136
+ if operation_dbm.type == OperationDBM.Types.healthcheck_:
137
+ self._logger.info("healthcheck")
138
+ elif operation_dbm.type == OperationDBM.Types.raise_fake_exception:
139
+ self._logger.info("raise_fake_exception")
140
+ raise Exception("raise_fake_exception")
141
+ else:
142
+ raise ValueError(f"unknown operation.type = {operation_dbm.type}")
143
+ return operation_dbm
144
+
145
+ async def async_safe_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
146
+ self._logger.info(
147
+ f"start async_safe_execute_operation"
148
+ f", operation_dbm.id={operation_dbm.id}"
149
+ f", operation_dbm.type={operation_dbm.type}"
150
+ )
151
+
152
+ with self.easy_sql_alchemy_db.new_session() as session:
153
+ operation_dbm: OperationDBM = get_operation_by_id(
154
+ session=session, filter_operation_id=operation_dbm.id, strict=True
155
+ )
156
+ operation_dbm.execution_start_dt = now_utc_dt()
157
+ operation_dbm.status = OperationDBM.Statuses.executing
158
+ session.commit()
159
+ session.refresh(operation_dbm)
160
+
161
+ exception: BaseException | None = None
162
+ traceback_str: str | None = None
163
+
164
+ try:
165
+ await self.async_execute_operation(operation_dbm=operation_dbm)
166
+ except BaseException as exception_:
167
+ self._logger.exception(exception_)
168
+ exception = exception_
169
+ traceback_str = traceback.format_exc()
170
+
171
+ with self.easy_sql_alchemy_db.new_session() as session:
172
+ operation_dbm: OperationDBM = get_operation_by_id(
173
+ session=session, filter_operation_id=operation_dbm.id, strict=True
174
+ )
175
+ operation_dbm.execution_finish_dt = now_utc_dt()
176
+ if exception:
177
+ operation_dbm.status = OperationDBM.Statuses.executed_with_error
178
+ operation_dbm.error_data = combine_dicts(
179
+ {"exception": str(exception), "traceback_str": traceback_str},
180
+ operation_dbm.error_data
181
+ )
182
+ else:
183
+ operation_dbm.status = OperationDBM.Statuses.executed_without_error
184
+ session.commit()
185
+ session.refresh(operation_dbm)
186
+
187
+ self._logger.info(
188
+ f"finish async_safe_execute_operation"
189
+ f", operation_dbm.id={operation_dbm.id}"
190
+ f", operation_dbm.type={operation_dbm.type}"
191
+ )
192
+
193
+ return operation_dbm
194
+
195
+ def sync_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
196
+ if operation_dbm.type == OperationDBM.Types.healthcheck_:
197
+ self._logger.info("healthcheck")
198
+ elif operation_dbm.type == OperationDBM.Types.raise_fake_exception:
199
+ self._logger.info("raise_fake_exception")
200
+ raise Exception("raise_fake_exception")
201
+ else:
202
+ raise ValueError(f"unknown operation.type = {operation_dbm.type}")
203
+ return operation_dbm
204
+
205
+ def sync_safe_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
206
+ self._logger.info(
207
+ f"start sync_safe_execute_operation"
208
+ f", operation_dbm.id={operation_dbm.id}"
209
+ f", operation_dbm.type={operation_dbm.type}"
210
+ )
211
+
212
+ with self.easy_sql_alchemy_db.new_session() as session:
213
+ operation_dbm: OperationDBM = get_operation_by_id(
214
+ session=session, filter_operation_id=operation_dbm.id, strict=True
215
+ )
216
+ operation_dbm.execution_start_dt = now_utc_dt()
217
+ operation_dbm.status = OperationDBM.Statuses.executing
218
+ session.commit()
219
+ session.refresh(operation_dbm)
220
+
221
+ exception: BaseException | None = None
222
+ traceback_str: str | None = None
223
+
224
+ try:
225
+ self.sync_execute_operation(operation_dbm=operation_dbm)
226
+ except BaseException as exception_:
227
+ self._logger.exception(exception_)
228
+ exception = exception_
229
+ traceback_str = traceback.format_exc()
230
+
231
+ with self.easy_sql_alchemy_db.new_session() as session:
232
+ operation_dbm: OperationDBM = get_operation_by_id(
233
+ session=session, filter_operation_id=operation_dbm.id, strict=True
234
+ )
235
+ operation_dbm.execution_finish_dt = now_utc_dt()
236
+ if exception:
237
+ operation_dbm.status = OperationDBM.Statuses.executed_with_error
238
+ operation_dbm.error_data = combine_dicts(
239
+ {"exception": str(exception), "traceback_str": traceback_str},
240
+ operation_dbm.error_data
241
+ )
242
+ else:
243
+ operation_dbm.status = OperationDBM.Statuses.executed_without_error
244
+ session.commit()
245
+ session.refresh(operation_dbm)
246
+
247
+ self._logger.info(
248
+ f"finish sync_safe_execute_operation"
249
+ f", operation_dbm.id={operation_dbm.id}"
250
+ f", operation_dbm.type={operation_dbm.type}"
251
+ f", operation_dbm.duration={operation_dbm.duration}"
252
+ )
253
+
254
+ return operation_dbm
255
+
256
+
257
+ class ExecuteOperationWorker(BaseWorker):
258
+
259
+ def __init__(
260
+ self,
261
+ *,
262
+ easy_sql_alchemy_db: EasySQLAlchemyDB,
263
+ operation_executor: BaseOperationExecutor,
264
+ need_operation_type: str | None = None
265
+ ):
266
+ super().__init__()
267
+ self.easy_sql_alchemy_db = easy_sql_alchemy_db
268
+ self.timeout_after_run = timedelta(seconds=0.1).total_seconds()
269
+ self.timeout_after_err_in_run = timedelta(seconds=1).total_seconds()
270
+ self.operation_executor = operation_executor
271
+ self.need_operation_type = need_operation_type
272
+
273
+ async def async_on_startup(self):
274
+ self.easy_sql_alchemy_db.init()
275
+
276
+ async def async_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
277
+ return await self.operation_executor.async_safe_execute_operation(operation_dbm=operation_dbm)
278
+
279
+ async def async_run(self):
280
+ operation_dbm: OperationDBM | None = get_operation_for_execution(
281
+ easy_sql_alchemy_db=self.easy_sql_alchemy_db,
282
+ filter_operation_type=self.need_operation_type
283
+ )
284
+
285
+ if not operation_dbm:
286
+ return
287
+
288
+ await self.async_execute_operation(operation_dbm=operation_dbm)
289
+
290
+ async def async_run_on_error(self, exception: BaseException, kwargs: dict[str, Any]):
291
+ self._logger.exception(exception)
292
+
293
+ def sync_on_startup(self):
294
+ self.easy_sql_alchemy_db.init()
295
+
296
+ def sync_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
297
+ return self.operation_executor.sync_safe_execute_operation(operation_dbm=operation_dbm)
298
+
299
+ def sync_run(self):
300
+ operation_dbm: OperationDBM | None = get_operation_for_execution(
301
+ easy_sql_alchemy_db=self.easy_sql_alchemy_db,
302
+ filter_operation_type=self.need_operation_type
303
+ )
304
+
305
+ if not operation_dbm:
306
+ return
307
+
308
+ self.sync_execute_operation(operation_dbm=operation_dbm)
309
+
310
+ def sync_run_on_error(self, exception: BaseException, kwargs: dict[str, Any]):
311
+ self._logger.exception(exception)
@@ -3,15 +3,15 @@
3
3
  import asyncio
4
4
  import hashlib
5
5
  import logging
6
- from datetime import timedelta, datetime
7
- from typing import Optional, Any
6
+ from datetime import datetime
7
+ from typing import Any
8
8
 
9
- import aiohttp
10
9
  import pytz
11
- from aiohttp import ClientTimeout
12
- from aiohttp_socks import ProxyConnector
13
10
 
14
11
  from arpakitlib.ar_dict_util import combine_dicts
12
+ from arpakitlib.ar_http_request_util import async_make_request
13
+ from arpakitlib.ar_logging_util import setup_normal_logging
14
+ from arpakitlib.ar_type_util import raise_for_type
15
15
 
16
16
  _ARPAKIT_LIB_MODULE_VERSION = "3.0"
17
17
 
@@ -70,42 +70,23 @@ class ScheduleUUSTAPIClient:
70
70
  password_first_part + datetime.now(tz=pytz.timezone("Asia/Yekaterinburg")).strftime("%Y-%m-%d")
71
71
  )
72
72
 
73
- async def _async_get_request(
73
+ async def _async_make_http_get_request(
74
74
  self,
75
75
  *,
76
76
  url: str,
77
- params: Optional[dict] = None
77
+ params: dict | None = None
78
78
  ) -> dict[str, Any]:
79
- max_tries = 7
80
- tries = 0
81
-
82
- while True:
83
- self._logger.info(f"GET {url} {params} proxy={self.api_proxy_url}")
84
-
85
- tries += 1
86
-
87
- connector = (
88
- ProxyConnector.from_url(self.api_proxy_url)
89
- if self.api_proxy_url is not None
90
- else None
91
- )
92
-
93
- try:
94
- async with aiohttp.ClientSession(connector=connector) as session:
95
- async with session.get(
96
- url=url,
97
- params=params,
98
- timeout=ClientTimeout(total=timedelta(seconds=15).total_seconds())
99
- ) as response:
100
- response.raise_for_status()
101
- return await response.json()
102
- except Exception as err:
103
- self._logger.warning(f"{tries}/{max_tries} {err} GET {url} {params} proxy={self.api_proxy_url}")
104
- if tries >= max_tries:
105
- raise err
106
- await asyncio.sleep(timedelta(seconds=1).total_seconds())
107
- self._logger.warning(f"{tries}/{max_tries} AGAIN GET {url} {params} proxy={self.api_proxy_url}")
108
- continue
79
+ params = combine_dicts(self.auth_params(), params)
80
+ response = await async_make_request(
81
+ url=url,
82
+ method="GET",
83
+ params=params,
84
+ proxy_url_=self.api_proxy_url,
85
+ max_tries=9
86
+ )
87
+ json_data = await response.json()
88
+ raise_for_type(json_data, dict)
89
+ return json_data
109
90
 
110
91
  async def get_current_week(self) -> int:
111
92
  """
@@ -115,10 +96,9 @@ class ScheduleUUSTAPIClient:
115
96
  }
116
97
  """
117
98
 
118
- params = combine_dicts(self.auth_params(), {"ask": "get_current_week"})
119
- json_data = await self._async_get_request(
99
+ json_data = await self._async_make_http_get_request(
120
100
  url=self.api_url,
121
- params=params
101
+ params={"ask": "get_current_week"}
122
102
  )
123
103
  return json_data["data"][0]
124
104
 
@@ -130,10 +110,9 @@ class ScheduleUUSTAPIClient:
130
110
  }
131
111
  """
132
112
 
133
- params = combine_dicts(self.auth_params(), {"ask": "get_current_semestr"})
134
- json_data = await self._async_get_request(
113
+ json_data = await self._async_make_http_get_request(
135
114
  url=self.api_url,
136
- params=params
115
+ params={"ask": "get_current_semestr"}
137
116
  )
138
117
  return json_data["data"][0]
139
118
 
@@ -152,42 +131,37 @@ class ScheduleUUSTAPIClient:
152
131
  }
153
132
  """
154
133
 
155
- params = combine_dicts(self.auth_params(), {"ask": "get_group_list"})
156
- json_data = await self._async_get_request(
134
+ json_data = await self._async_make_http_get_request(
157
135
  url=self.api_url,
158
- params=params
136
+ params={"ask": "get_group_list"}
159
137
  )
160
138
  return list(json_data["data"].values())
161
139
 
162
140
  async def get_group_lessons(self, group_id: int, semester: str | None = None) -> list[dict[str, Any]]:
163
- params = combine_dicts(
164
- self.auth_params(),
165
- {
166
- "ask": "get_group_schedule",
167
- "id": group_id
168
- }
169
- )
141
+ params = {
142
+ "ask": "get_group_schedule",
143
+ "id": group_id
144
+ }
170
145
  if semester is not None:
171
146
  params["semester"] = semester
172
- json_data = await self._async_get_request(
147
+ json_data = await self._async_make_http_get_request(
173
148
  url=self.api_url,
174
149
  params=params
175
150
  )
176
151
  return json_data["data"]
177
152
 
178
153
  async def get_teachers(self) -> list[dict[str, Any]]:
179
- params = combine_dicts(self.auth_params(), {"ask": "get_teacher_list"})
180
- json_data = await self._async_get_request(
154
+ json_data = await self._async_make_http_get_request(
181
155
  url=self.api_url,
182
- params=params
156
+ params={"ask": "get_teacher_list"}
183
157
  )
184
158
  return list(json_data["data"].values())
185
159
 
186
160
  async def get_teacher_lessons(self, teacher_id: int, semester: str | None = None) -> list[dict[str, Any]]:
187
- params = combine_dicts(self.auth_params(), {"ask": "get_teacher_schedule", "id": teacher_id})
161
+ params = {"ask": "get_teacher_schedule", "id": teacher_id}
188
162
  if semester is not None:
189
163
  params["semester"] = semester
190
- json_data = await self._async_get_request(
164
+ json_data = await self._async_make_http_get_request(
191
165
  url=self.api_url,
192
166
  params=params
193
167
  )
@@ -204,13 +178,24 @@ class ScheduleUUSTAPIClient:
204
178
  return False
205
179
  return True
206
180
 
181
+ async def check_all(self):
182
+ await self.get_groups()
183
+ await self.get_teachers()
184
+ await self.get_current_semester()
185
+ await self.get_current_week()
186
+
207
187
 
208
188
  def __example():
209
189
  pass
210
190
 
211
191
 
212
192
  async def __async_example():
213
- pass
193
+ setup_normal_logging()
194
+ client = ScheduleUUSTAPIClient(
195
+ api_login="arpakit",
196
+ api_password_first_part="bAEb2wXJNNZ8"
197
+ )
198
+ await client.check_all()
214
199
 
215
200
 
216
201
  if __name__ == '__main__':
@@ -0,0 +1,68 @@
1
+ # arpakit
2
+
3
+ import asyncio
4
+ import logging
5
+ import math
6
+ from time import sleep
7
+
8
+ from asyncpg.pgproto.pgproto import timedelta
9
+
10
+ _ARPAKIT_LIB_MODULE_VERSION = "3.0"
11
+
12
+ _logger = logging.getLogger(__name__)
13
+
14
+
15
+ def sync_safe_sleep(n: timedelta | float | int):
16
+ _logger.info(f"sync_safe_sleep ({n}) starts")
17
+
18
+ if isinstance(n, timedelta):
19
+ n = n.total_seconds()
20
+ elif isinstance(n, int):
21
+ n = float(n)
22
+ elif isinstance(n, float):
23
+ n = n
24
+ else:
25
+ raise TypeError(f"n={n}, type={type(n)}, n: timedelta | float | int")
26
+
27
+ n: float = n
28
+
29
+ frac, int_part = math.modf(n)
30
+ for i in range(int(int_part)):
31
+ sleep(1)
32
+ sleep(frac)
33
+
34
+ _logger.info(f"sync_safe_sleep ({n}) ends")
35
+
36
+
37
+ async def async_safe_sleep(n: timedelta | float | int):
38
+ _logger.info(f"async_safe_sleep ({n}) starts")
39
+
40
+ if isinstance(n, timedelta):
41
+ n = n.total_seconds()
42
+ elif isinstance(n, int):
43
+ n = float(n)
44
+ elif isinstance(n, float):
45
+ n = n
46
+ else:
47
+ raise TypeError(f"n={n}, type={type(n)}, n: timedelta | float | int")
48
+
49
+ n: float = n
50
+
51
+ _logger.info(f"sleep_time ({n}) starts")
52
+ await asyncio.sleep(n)
53
+ _logger.info(f"sleep_time ({n}) ends")
54
+
55
+ _logger.info(f"async_safe_sleep ({n}) ends")
56
+
57
+
58
+ def __example():
59
+ pass
60
+
61
+
62
+ async def __async_example():
63
+ pass
64
+
65
+
66
+ if __name__ == '__main__':
67
+ __example()
68
+ asyncio.run(__async_example())
@@ -42,3 +42,6 @@ class SimpleDBM(BaseDBM):
42
42
  creation_dt: Mapped[datetime] = mapped_column(
43
43
  TIMESTAMP(timezone=True), insert_default=now_utc_dt, index=True, nullable=False
44
44
  )
45
+
46
+ def __repr__(self):
47
+ return f"{self.__class__.__name__.removesuffix('DBM')} (id={self.id})"
@@ -13,7 +13,7 @@ import requests
13
13
 
14
14
  from arpakitlib.ar_dict_util import combine_dicts
15
15
  from arpakitlib.ar_enumeration import EasyEnumeration
16
- from arpakitlib.ar_safe_sleep import safe_sleep
16
+ from arpakitlib.ar_sleep_util import sync_safe_sleep, async_safe_sleep
17
17
  from arpakitlib.ar_type_util import raise_for_type
18
18
 
19
19
  _ARPAKIT_LIB_MODULE_VERSION = "3.0"
@@ -63,7 +63,7 @@ class YookassaAPIClient:
63
63
  self._logger.warning(f"{tries}/{max_tries} {err} {method} {url}")
64
64
  if tries >= max_tries:
65
65
  raise YookassaAPIException(err)
66
- safe_sleep(timedelta(seconds=0.1).total_seconds())
66
+ sync_safe_sleep(timedelta(seconds=0.1).total_seconds())
67
67
  continue
68
68
 
69
69
  async def _async_make_request(self, method: str, url: str, **kwargs) -> aiohttp.ClientResponse:
@@ -90,7 +90,7 @@ class YookassaAPIClient:
90
90
  self._logger.warning(f"{tries}/{max_tries} {err} {method} {url}")
91
91
  if tries >= max_tries:
92
92
  raise YookassaAPIException(err)
93
- await asyncio.sleep(timedelta(seconds=0.1).total_seconds())
93
+ await async_safe_sleep(timedelta(seconds=0.1).total_seconds())
94
94
  continue
95
95
 
96
96
  def sync_create_payment(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: arpakitlib
3
- Version: 1.5.27
3
+ Version: 1.5.29
4
4
  Summary: arpakitlib
5
5
  Home-page: https://github.com/ARPAKIT-Company/arpakitlib
6
6
  License: Apache-2.0
@@ -33,7 +33,7 @@ Requires-Dist: pymongo (>=4.10.1,<5.0.0)
33
33
  Requires-Dist: pytz (>=2024.2,<2025.0)
34
34
  Requires-Dist: pyzabbix (>=1.3.1,<2.0.0)
35
35
  Requires-Dist: redis (>=5.2.0,<6.0.0)
36
- Requires-Dist: requests (>=2.32.3,<3.0.0)
36
+ Requires-Dist: requests[socks] (>=2.32.3,<3.0.0)
37
37
  Requires-Dist: sqladmin (>=0.20.1,<0.21.0)
38
38
  Requires-Dist: sqlalchemy (>=2.0.36,<3.0.0)
39
39
  Requires-Dist: twine (>=6.0.1,<7.0.0)
@@ -6,14 +6,14 @@ arpakitlib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  arpakitlib/ar_additional_model_util.py,sha256=Eq7pvVUgO2L3gYBocm-pP9TrztTb8VNCp7LdRMml-F8,237
7
7
  arpakitlib/ar_aiogram_util.py,sha256=IA48PRMIJrPLMhFA0Eb2vQpLcqm98o9tKfC3pDy8qsI,12022
8
8
  arpakitlib/ar_arpakit_lib_module_util.py,sha256=YzobxRG8-QJ1L5r_8wBdL668CwXoQRIM1Cpec1o2WBc,5447
9
- arpakitlib/ar_arpakit_schedule_uust_api_client.py,sha256=mswH5a7ek-xs-smtTTaRTPPmeoXkrAaoDoi-u6HS0BM,18205
9
+ arpakitlib/ar_arpakit_schedule_uust_api_client.py,sha256=V8n5XxkrsZo7ASNAepD0mqgRtI7qIleCDgk7WAmdLW8,18244
10
10
  arpakitlib/ar_arpakitlib_info.py,sha256=cvgrLnEznmYkCAg1adbY46ATjD6GJd-Yk8PTgOPjpKM,248
11
11
  arpakitlib/ar_base64_util.py,sha256=aZkg2cZTuAaP2IWeG_LXJ6RO7qhyskVwec-Lks0iM-k,676
12
- arpakitlib/ar_base_worker.py,sha256=NqZKJIij-7rmisPR2k4pl3nEnWCBpMqcvUDYXYIW1_Q,2849
12
+ arpakitlib/ar_base_worker.py,sha256=Y6yRFp1nhhTyv-TeGDao-3q4ICVVJ2zVsKdIlHdiGSI,2897
13
13
  arpakitlib/ar_cache_file.py,sha256=m73_vU6bMjXsIurSPO9VCLcHsiHk8ITFS0LNjfI_8Uw,3471
14
14
  arpakitlib/ar_datetime_util.py,sha256=Xe1NiT9oPQzNSG7RVRkhukhbg4i-hhS5ImmV7sPUc8o,971
15
15
  arpakitlib/ar_dict_util.py,sha256=cF5LQJ6tLqyGoEXfDljMDZrikeZoWPw7CgINHIFGvXM,419
16
- arpakitlib/ar_dream_ai_api_client.py,sha256=73iNDQc5x4fSiWNNuniNPlAwju4kVI_pL3dbMf30Jyg,3990
16
+ arpakitlib/ar_dream_ai_api_client.py,sha256=hDPL9wbG4MjIuhn2ed6qepueogANIkt-NddhhiPUv0Y,4029
17
17
  arpakitlib/ar_easy_sqlalchemy_util.py,sha256=HuKRBD4XoxeZ5tpXlDTol5Y6AOzuCBluJQHfyRjlRqs,3224
18
18
  arpakitlib/ar_encrypt_and_decrypt_util.py,sha256=GhWnp7HHkbhwFVVCzO1H07m-5gryr4yjWsXjOaNQm1Y,520
19
19
  arpakitlib/ar_enumeration.py,sha256=6KUJYOabHDPLfdigBVN0ZI4ZOUJh8TkL0g4o92Hke2I,2254
@@ -36,11 +36,11 @@ arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.css,sha256=jzPZlgJTFwSdSphk9C
36
36
  arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.css.map,sha256=5wq8eXMLU6Zxb45orZPL1zAsBFJReFw6GjYqGpUX3hg,262650
37
37
  arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.js,sha256=ffrLZHHEQ_g84A-ul3yWa10Kk09waOAxHcQXPuZuavg,339292
38
38
  arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.js.map,sha256=9UhIW7MqCOZPAz1Sl1IKfZUuhWU0p-LJqrnjjJD9Xhc,1159454
39
- arpakitlib/ar_fastapi_util.py,sha256=ugnVVI0zFRoeCkoGQKMNEGxaM8ly5wBAyHajPZk04CE,9655
39
+ arpakitlib/ar_fastapi_util.py,sha256=zru-le-J139KpLlPeo9DUjMuDygh4tR2GhAJjlr7ROs,9959
40
40
  arpakitlib/ar_file_storage_in_dir.py,sha256=D3e3rGuHoI6xqAA5mVvEpVVpOWY1jyjNsjj2UhyHRbE,3674
41
41
  arpakitlib/ar_generate_env_example.py,sha256=WseNlk_So6mTVQ2amMuigWYV4ZVmd940POvXtodoYj0,325
42
42
  arpakitlib/ar_hash_util.py,sha256=Iqy6KBAOLBQMFLWv676boI5sV7atT2B-fb7aCdHOmIQ,340
43
- arpakitlib/ar_http_request_util.py,sha256=ZVM8p5XWrEj7rhlFLrUMvGcS6M8O-GlFHqOsrUXQf2I,2073
43
+ arpakitlib/ar_http_request_util.py,sha256=jx0ggOVXLvsh2hVUE7tasWaEsUnXbO0WS8-rJb0Y6BU,3063
44
44
  arpakitlib/ar_ip_util.py,sha256=aEAa1Hvobh9DWX7cmBAPLqnXSTiKe2hRk-WJaiKMaI8,1009
45
45
  arpakitlib/ar_json_db.py,sha256=CEyhIU4WuNmX5mqwBVYxUKSdpFelXvWmf_tJ1fuxMSE,7187
46
46
  arpakitlib/ar_json_util.py,sha256=S8CskZ3uoYuJGCy1GhQ8Ikhn-fxXk-9JpLUbBvXADqI,833
@@ -51,19 +51,20 @@ arpakitlib/ar_logging_util.py,sha256=c5wX2FLqCzb4aLckLVhIJ7go52rJQ4GN9dIkJ6KMc3o
51
51
  arpakitlib/ar_mongodb_util.py,sha256=2ECkTnGAZ92qxioL-fmN6R4yZOSr3bXdXLWTzT1C3vk,4038
52
52
  arpakitlib/ar_need_type_util.py,sha256=qCRSWlSgx-3yU0NRHZDQ5lCOmuZKcz2Na3py9nr6hJM,1618
53
53
  arpakitlib/ar_openai_util.py,sha256=d5Aj1O2yo_zYLZCLeOLvuveYYxA2jGOqhMs1oUbuVk8,1210
54
+ arpakitlib/ar_operation_execution_util.py,sha256=i-GDXFCAXrPwi-juH1sKWNsIPQZ036QxyC4n9oN_RWw,11802
54
55
  arpakitlib/ar_parse_command.py,sha256=qpr2OwG3Bf7DFiL9S3iWgtbvtE80RSC35E5zFJvjG1I,2714
55
56
  arpakitlib/ar_postgresql_util.py,sha256=SAHEmAyMkZe516uk2gS830v_Wn2kRUZUYNcTNwmgXJk,1160
56
57
  arpakitlib/ar_run_cmd.py,sha256=D_rPavKMmWkQtwvZFz-Io5Ak8eSODHkcFeLPzNVC68g,1072
57
- arpakitlib/ar_safe_sleep.py,sha256=b7vj6BrKLgYlUQup2t3qiwx0cA_jrkIX9MZjaraW0bM,409
58
- arpakitlib/ar_schedule_uust_api_client.py,sha256=DUke6CQFKKcQQJQnpZc5JPPmQpsOJba4K-6FYKyBj-0,6794
59
- arpakitlib/ar_sqlalchemy_model_util.py,sha256=OZCciQk-rCBcAzSf4In27Mik9GmeHPKN7CZr3FA3umk,1275
58
+ arpakitlib/ar_schedule_uust_api_client.py,sha256=hShokh3uJuTPC78-AMW2mv1WSJMy4cAuVDJ4sLaoqMs,6043
59
+ arpakitlib/ar_sleep_util.py,sha256=9ZN4Qo4eZ_q3hjM7vNBQjFRcH-9-sqv3QLSjnxVJE90,1405
60
+ arpakitlib/ar_sqlalchemy_model_util.py,sha256=3zscvaloi9XY1NR70rJ4-jJlFUIqhmTbQ9wdvK-Yjf8,1379
60
61
  arpakitlib/ar_ssh_runner.py,sha256=jlnss4V4pziBN1rBzoK_lDiWm6nMOqGXfa6NFJSKH-Y,6796
61
62
  arpakitlib/ar_str_util.py,sha256=xSEzmsDvRiZVaxyqFFjcgzpphktCbXg2FHcvsd1DYpA,1885
62
63
  arpakitlib/ar_type_util.py,sha256=-h-SCsVl11eVo1u4hy2Asn0IfD5TIxmX3Ndug4AvnPE,1761
63
- arpakitlib/ar_yookassa_api_client.py,sha256=HOzhTggH_-BligDyqRg2fwhxYf8ULTnnFZkyn0PNdQM,6449
64
+ arpakitlib/ar_yookassa_api_client.py,sha256=BwsTygaXf35AACVBl_09uYlSD_t-U1OOzbj58OOFT4Q,6480
64
65
  arpakitlib/ar_zabbix_util.py,sha256=MTQbmS0QpNCKNOGONNQHf6j7KTZsKGlIbd5rCH0R0WI,6313
65
- arpakitlib-1.5.27.dist-info/LICENSE,sha256=1jqWIkbnMxDfs_i0SXP5qbV6PHjBr1g8506oW7uPjfg,11347
66
- arpakitlib-1.5.27.dist-info/METADATA,sha256=ZJZ7aI5UygBLSoJmcxRN9Ib1Ec1_3Iu9lc7yNJ3CwfA,2323
67
- arpakitlib-1.5.27.dist-info/NOTICE,sha256=wHwmiq3wExfFfgMsE5U5TOBP9_l72ocIG82KurEels0,43
68
- arpakitlib-1.5.27.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
69
- arpakitlib-1.5.27.dist-info/RECORD,,
66
+ arpakitlib-1.5.29.dist-info/LICENSE,sha256=1jqWIkbnMxDfs_i0SXP5qbV6PHjBr1g8506oW7uPjfg,11347
67
+ arpakitlib-1.5.29.dist-info/METADATA,sha256=BeKeVptEUesxJ2rNi-UpFTY2XuRep1w_3p1dXPN_4SM,2330
68
+ arpakitlib-1.5.29.dist-info/NOTICE,sha256=wHwmiq3wExfFfgMsE5U5TOBP9_l72ocIG82KurEels0,43
69
+ arpakitlib-1.5.29.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
70
+ arpakitlib-1.5.29.dist-info/RECORD,,
@@ -1,25 +0,0 @@
1
- # arpakit
2
-
3
- import logging
4
- import math
5
- from time import sleep
6
-
7
- _ARPAKIT_LIB_MODULE_VERSION = "3.0"
8
-
9
- _logger = logging.getLogger(__name__)
10
-
11
-
12
- def safe_sleep(sleep_time: float | int):
13
- _logger.info(f"sleep_time={sleep_time}")
14
- frac, int_part = math.modf(sleep_time)
15
- for i in range(int(int_part)):
16
- sleep(1)
17
- sleep(frac)
18
-
19
-
20
- def __example():
21
- pass
22
-
23
-
24
- if __name__ == '__main__':
25
- __example()