arpakitlib 1.5.34__py3-none-any.whl → 1.5.37__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arpakitlib/ar_arpakit_schedule_uust_api_client.py +3 -3
- arpakitlib/{ar_enumeration.py → ar_enumeration_util.py} +1 -1
- arpakitlib/ar_fastapi_util.py +341 -136
- arpakitlib/ar_need_type_util.py +2 -2
- arpakitlib/ar_openai_util.py +1 -1
- arpakitlib/{ar_operation_util.py → ar_operation_execution_util.py} +50 -112
- arpakitlib/ar_sqlalchemy_model_util.py +76 -1
- arpakitlib/{ar_easy_sqlalchemy_util.py → ar_sqlalchemy_util.py} +2 -16
- arpakitlib/ar_type_util.py +4 -8
- arpakitlib/ar_yookassa_api_client.py +2 -2
- {arpakitlib-1.5.34.dist-info → arpakitlib-1.5.37.dist-info}/METADATA +1 -1
- {arpakitlib-1.5.34.dist-info → arpakitlib-1.5.37.dist-info}/RECORD +16 -17
- arpakitlib/ar_story_log_util.py +0 -40
- /arpakitlib/{ar_ssh_runner.py → ar_ssh_util.py} +0 -0
- {arpakitlib-1.5.34.dist-info → arpakitlib-1.5.37.dist-info}/LICENSE +0 -0
- {arpakitlib-1.5.34.dist-info → arpakitlib-1.5.37.dist-info}/NOTICE +0 -0
- {arpakitlib-1.5.34.dist-info → arpakitlib-1.5.37.dist-info}/WHEEL +0 -0
@@ -16,7 +16,7 @@ from aiohttp import ClientResponse, ClientTimeout, ClientResponseError
|
|
16
16
|
from pydantic import ConfigDict, BaseModel
|
17
17
|
|
18
18
|
from arpakitlib.ar_dict_util import combine_dicts
|
19
|
-
from arpakitlib.
|
19
|
+
from arpakitlib.ar_enumeration_util import Enumeration
|
20
20
|
from arpakitlib.ar_json_util import safely_transfer_to_json_str
|
21
21
|
from arpakitlib.ar_sleep_util import async_safe_sleep
|
22
22
|
from arpakitlib.ar_type_util import raise_for_type
|
@@ -24,7 +24,7 @@ from arpakitlib.ar_type_util import raise_for_type
|
|
24
24
|
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
25
25
|
|
26
26
|
|
27
|
-
class Weekdays(
|
27
|
+
class Weekdays(Enumeration):
|
28
28
|
monday = 1
|
29
29
|
tuesday = 2
|
30
30
|
wednesday = 3
|
@@ -34,7 +34,7 @@ class Weekdays(EasyEnumeration):
|
|
34
34
|
sunday = 7
|
35
35
|
|
36
36
|
|
37
|
-
class Months(
|
37
|
+
class Months(Enumeration):
|
38
38
|
january = 1
|
39
39
|
february = 2
|
40
40
|
march = 3
|
arpakitlib/ar_fastapi_util.py
CHANGED
@@ -6,6 +6,8 @@ import asyncio
|
|
6
6
|
import logging
|
7
7
|
import os.path
|
8
8
|
import pathlib
|
9
|
+
import threading
|
10
|
+
import traceback
|
9
11
|
from datetime import datetime
|
10
12
|
from typing import Any, Callable
|
11
13
|
|
@@ -14,21 +16,28 @@ import fastapi.responses
|
|
14
16
|
import starlette.exceptions
|
15
17
|
import starlette.requests
|
16
18
|
import starlette.status
|
17
|
-
from fastapi import FastAPI
|
19
|
+
from fastapi import FastAPI, APIRouter, Query
|
18
20
|
from fastapi.openapi.docs import get_swagger_ui_html, get_redoc_html
|
21
|
+
from jaraco.context import suppress
|
19
22
|
from pydantic import BaseModel, ConfigDict
|
20
23
|
from starlette.middleware.cors import CORSMiddleware
|
21
24
|
from starlette.staticfiles import StaticFiles
|
22
25
|
|
23
|
-
from arpakitlib.
|
24
|
-
from arpakitlib.
|
26
|
+
from arpakitlib.ar_dict_util import combine_dicts
|
27
|
+
from arpakitlib.ar_enumeration_util import Enumeration
|
28
|
+
from arpakitlib.ar_json_util import safely_transfer_to_json_str_to_json_obj
|
29
|
+
from arpakitlib.ar_logging_util import setup_normal_logging
|
30
|
+
from arpakitlib.ar_operation_execution_util import ExecuteOperationWorker
|
31
|
+
from arpakitlib.ar_sqlalchemy_model_util import StoryLogDBM
|
32
|
+
from arpakitlib.ar_sqlalchemy_util import SQLAlchemyDB
|
33
|
+
from arpakitlib.ar_type_util import raise_for_type, raise_if_not_async_func
|
25
34
|
|
26
35
|
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
27
36
|
|
28
37
|
_logger = logging.getLogger(__name__)
|
29
38
|
|
30
39
|
|
31
|
-
class
|
40
|
+
class BaseSchema(BaseModel):
|
32
41
|
model_config = ConfigDict(extra="ignore", arbitrary_types_allowed=True, from_attributes=True)
|
33
42
|
|
34
43
|
@classmethod
|
@@ -43,26 +52,22 @@ class BaseAPISchema(BaseModel):
|
|
43
52
|
super().__init_subclass__(**kwargs)
|
44
53
|
|
45
54
|
|
46
|
-
class
|
55
|
+
class BaseSI(BaseSchema):
|
47
56
|
pass
|
48
57
|
|
49
58
|
|
50
|
-
class
|
59
|
+
class BaseSO(BaseSchema):
|
51
60
|
pass
|
52
61
|
|
53
62
|
|
54
|
-
class
|
55
|
-
data: dict[str, Any] = {}
|
56
|
-
|
57
|
-
|
58
|
-
class BaseAPISimpleSO(BaseAPISO):
|
63
|
+
class SimpleSO(BaseSO):
|
59
64
|
id: int
|
60
65
|
long_id: str
|
61
66
|
creation_dt: datetime
|
62
67
|
|
63
68
|
|
64
|
-
class
|
65
|
-
class APIErrorCodes(
|
69
|
+
class ErrorSO(BaseSO):
|
70
|
+
class APIErrorCodes(Enumeration):
|
66
71
|
cannot_authorize = "CANNOT_AUTHORIZE"
|
67
72
|
unknown_error = "UNKNOWN_ERROR"
|
68
73
|
error_in_request = "ERROR_IN_REQUEST"
|
@@ -75,10 +80,33 @@ class APIErrorSO(BaseAPISO):
|
|
75
80
|
error_data: dict[str, Any] = {}
|
76
81
|
|
77
82
|
|
83
|
+
class RawDataSO(BaseSO):
|
84
|
+
data: dict[str, Any] = {}
|
85
|
+
|
86
|
+
|
87
|
+
class StoryLogSO(SimpleSO):
|
88
|
+
level: str
|
89
|
+
title: str | None
|
90
|
+
data: dict[str, Any]
|
91
|
+
|
92
|
+
|
93
|
+
class OperationSO(SimpleSO):
|
94
|
+
execution_start_dt: datetime | None
|
95
|
+
execution_finish_dt: datetime | None
|
96
|
+
status: str
|
97
|
+
type: str
|
98
|
+
input_data: dict[str, Any]
|
99
|
+
output_data: dict[str, Any]
|
100
|
+
error_data: dict[str, Any]
|
101
|
+
duration_total_seconds: float | None
|
102
|
+
|
103
|
+
|
78
104
|
class APIJSONResponse(fastapi.responses.JSONResponse):
|
79
|
-
def __init__(self, *, content:
|
105
|
+
def __init__(self, *, content: BaseSO, status_code: int = starlette.status.HTTP_200_OK):
|
106
|
+
self.content_ = content
|
107
|
+
self.status_code_ = status_code
|
80
108
|
super().__init__(
|
81
|
-
content=content.model_dump(
|
109
|
+
content=safely_transfer_to_json_str_to_json_obj(content.model_dump()),
|
82
110
|
status_code=status_code
|
83
111
|
)
|
84
112
|
|
@@ -88,7 +116,7 @@ class APIException(fastapi.exceptions.HTTPException):
|
|
88
116
|
self,
|
89
117
|
*,
|
90
118
|
status_code: int = starlette.status.HTTP_400_BAD_REQUEST,
|
91
|
-
error_code: str | None =
|
119
|
+
error_code: str | None = ErrorSO.APIErrorCodes.unknown_error,
|
92
120
|
error_code_specification: str | None = None,
|
93
121
|
error_description: str | None = None,
|
94
122
|
error_data: dict[str, Any] | None = None
|
@@ -101,7 +129,7 @@ class APIException(fastapi.exceptions.HTTPException):
|
|
101
129
|
error_data = {}
|
102
130
|
self.error_data = error_data
|
103
131
|
|
104
|
-
self.
|
132
|
+
self.error_so = ErrorSO(
|
105
133
|
has_error=True,
|
106
134
|
error_code=self.error_code,
|
107
135
|
error_specification=self.error_code_specification,
|
@@ -111,142 +139,240 @@ class APIException(fastapi.exceptions.HTTPException):
|
|
111
139
|
|
112
140
|
super().__init__(
|
113
141
|
status_code=self.status_code,
|
114
|
-
detail=self.
|
142
|
+
detail=self.error_so.model_dump(mode="json")
|
115
143
|
)
|
116
144
|
|
117
145
|
|
118
|
-
def
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
146
|
+
def create_handle_exception(
|
147
|
+
*,
|
148
|
+
funcs_before_response: list[Callable] | None = None,
|
149
|
+
async_funcs_after_response: list[Callable] | None = None,
|
150
|
+
) -> Any:
|
151
|
+
if funcs_before_response is None:
|
152
|
+
funcs_before_response = []
|
153
|
+
|
154
|
+
if async_funcs_after_response is None:
|
155
|
+
async_funcs_after_response = []
|
156
|
+
|
157
|
+
async def handle_exception(
|
158
|
+
request: starlette.requests.Request, exception: Exception
|
159
|
+
) -> APIJSONResponse:
|
160
|
+
status_code = starlette.status.HTTP_500_INTERNAL_SERVER_ERROR
|
131
161
|
|
132
|
-
|
162
|
+
error_so = ErrorSO(
|
163
|
+
has_error=True,
|
164
|
+
error_code=ErrorSO.APIErrorCodes.unknown_error,
|
165
|
+
error_data={
|
166
|
+
"exception_type": str(type(exception)),
|
167
|
+
"exception_str": str(exception),
|
168
|
+
"request.method": str(request.method),
|
169
|
+
"request.url": str(request.url),
|
170
|
+
}
|
171
|
+
)
|
133
172
|
|
134
|
-
|
135
|
-
|
173
|
+
if isinstance(exception, APIException):
|
174
|
+
old_error_data = error_so.error_data
|
175
|
+
error_so = exception.error_so
|
176
|
+
error_so.error_data = combine_dicts(old_error_data, error_so.error_data)
|
177
|
+
_need_exc_info = False
|
178
|
+
|
179
|
+
elif isinstance(exception, starlette.exceptions.HTTPException):
|
180
|
+
status_code = exception.status_code
|
181
|
+
if status_code in (starlette.status.HTTP_403_FORBIDDEN, starlette.status.HTTP_401_UNAUTHORIZED):
|
182
|
+
error_so.error_code = ErrorSO.APIErrorCodes.cannot_authorize
|
183
|
+
_need_exc_info = False
|
184
|
+
elif status_code == starlette.status.HTTP_404_NOT_FOUND:
|
185
|
+
error_so.error_code = ErrorSO.APIErrorCodes.not_found
|
186
|
+
_need_exc_info = False
|
187
|
+
else:
|
188
|
+
status_code = starlette.status.HTTP_500_INTERNAL_SERVER_ERROR
|
189
|
+
_need_exc_info = True
|
190
|
+
with suppress(Exception):
|
191
|
+
error_so.error_data["exception.detail"] = exception.detail
|
192
|
+
|
193
|
+
elif isinstance(exception, fastapi.exceptions.RequestValidationError):
|
194
|
+
status_code = starlette.status.HTTP_422_UNPROCESSABLE_ENTITY
|
195
|
+
error_so.error_code = ErrorSO.APIErrorCodes.error_in_request
|
196
|
+
with suppress(Exception):
|
197
|
+
error_so.error_data["exception.errors"] = str(exception.errors()) if exception.errors() else {}
|
198
|
+
_need_exc_info = False
|
136
199
|
|
137
|
-
elif isinstance(exception, starlette.exceptions.HTTPException):
|
138
|
-
status_code = exception.status_code
|
139
|
-
if status_code in (starlette.status.HTTP_403_FORBIDDEN, starlette.status.HTTP_401_UNAUTHORIZED):
|
140
|
-
easy_api_error_so.error_code = APIErrorSO.APIErrorCodes.cannot_authorize
|
141
|
-
elif status_code == starlette.status.HTTP_404_NOT_FOUND:
|
142
|
-
easy_api_error_so.error_code = APIErrorSO.APIErrorCodes.not_found
|
143
200
|
else:
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
or isinstance(exception.detail, str)
|
149
|
-
or isinstance(exception.detail, int)
|
150
|
-
or isinstance(exception.detail, float)
|
151
|
-
or isinstance(exception.detail, bool)
|
152
|
-
):
|
153
|
-
easy_api_error_so.error_data["raw"] = exception.detail
|
201
|
+
status_code = starlette.status.HTTP_500_INTERNAL_SERVER_ERROR
|
202
|
+
error_so.error_code = ErrorSO.APIErrorCodes.unknown_error
|
203
|
+
_logger.exception(exception)
|
204
|
+
_need_exc_info = True
|
154
205
|
|
155
|
-
|
156
|
-
|
157
|
-
easy_api_error_so.error_code = APIErrorSO.APIErrorCodes.error_in_request
|
158
|
-
easy_api_error_so.error_data["raw"] = str(exception.errors()) if exception.errors() else {}
|
206
|
+
if error_so.error_code:
|
207
|
+
error_so.error_code = error_so.error_code.upper().replace(" ", "_").strip()
|
159
208
|
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
_logger.exception(exception)
|
165
|
-
|
166
|
-
if easy_api_error_so.error_code:
|
167
|
-
easy_api_error_so.error_code = easy_api_error_so.error_code.upper().replace(" ", "_").strip()
|
209
|
+
if error_so.error_code_specification:
|
210
|
+
error_so.error_code_specification = (
|
211
|
+
error_so.error_code_specification.upper().replace(" ", "_").strip()
|
212
|
+
)
|
168
213
|
|
169
|
-
|
170
|
-
|
171
|
-
|
214
|
+
if _need_exc_info:
|
215
|
+
_logger.error(str(exception), exc_info=exception)
|
216
|
+
else:
|
217
|
+
_logger.error(str(exception))
|
218
|
+
|
219
|
+
_kwargs = {}
|
220
|
+
for func in funcs_before_response:
|
221
|
+
data = func(
|
222
|
+
error_so=error_so, status_code=status_code, request=request, exception=exception, **_kwargs
|
223
|
+
)
|
224
|
+
if asyncio.iscoroutine(data):
|
225
|
+
data = await data
|
226
|
+
if data is not None:
|
227
|
+
status_code, error_so, _kwargs = data[0], data[1], data[2]
|
228
|
+
raise_for_type(status_code, int)
|
229
|
+
raise_for_type(error_so, ErrorSO)
|
230
|
+
raise_for_type(_kwargs, dict)
|
231
|
+
|
232
|
+
for async_func_after_response in async_funcs_after_response:
|
233
|
+
raise_if_not_async_func(async_func_after_response)
|
234
|
+
_ = asyncio.create_task(async_func_after_response(
|
235
|
+
error_so=error_so, status_code=status_code, request=request, exception=exception
|
236
|
+
))
|
237
|
+
|
238
|
+
return APIJSONResponse(
|
239
|
+
content=error_so,
|
240
|
+
status_code=status_code
|
172
241
|
)
|
173
242
|
|
174
|
-
return
|
175
|
-
content=easy_api_error_so,
|
176
|
-
status_code=status_code
|
177
|
-
)
|
243
|
+
return handle_exception
|
178
244
|
|
179
245
|
|
180
|
-
def
|
181
|
-
|
246
|
+
def create_handle_exception_creating_story_log(
|
247
|
+
*,
|
248
|
+
sqlalchemy_db: SQLAlchemyDB
|
249
|
+
) -> Callable:
|
250
|
+
def handle_exception(
|
251
|
+
*,
|
252
|
+
error_so: ErrorSO,
|
253
|
+
status_code: int,
|
254
|
+
request: starlette.requests.Request,
|
255
|
+
exception: Exception,
|
256
|
+
**kwargs
|
257
|
+
) -> (int, ErrorSO, dict[str, Any]):
|
258
|
+
sqlalchemy_db.init()
|
259
|
+
traceback_str = "".join(traceback.format_exception(type(exception), exception, exception.__traceback__))
|
260
|
+
with sqlalchemy_db.new_session() as session:
|
261
|
+
story_log_dbm = StoryLogDBM(
|
262
|
+
level=StoryLogDBM.Levels.error,
|
263
|
+
title=str(exception),
|
264
|
+
data={
|
265
|
+
"error_so": error_so.model_dump(),
|
266
|
+
"traceback_str": traceback_str
|
267
|
+
}
|
268
|
+
)
|
269
|
+
session.add(story_log_dbm)
|
270
|
+
session.commit()
|
271
|
+
session.refresh(story_log_dbm)
|
272
|
+
error_so.error_data.update({"story_log_long_id": story_log_dbm.long_id})
|
273
|
+
kwargs["story_log_id"] = story_log_dbm.id
|
274
|
+
return status_code, error_so, kwargs
|
275
|
+
|
276
|
+
return handle_exception
|
277
|
+
|
278
|
+
|
279
|
+
def add_exception_handler_to_app(*, app: FastAPI, handle_exception: Callable) -> FastAPI:
|
280
|
+
app.add_exception_handler(
|
182
281
|
exc_class_or_status_code=Exception,
|
183
|
-
handler=
|
282
|
+
handler=handle_exception
|
184
283
|
)
|
185
|
-
|
284
|
+
app.add_exception_handler(
|
186
285
|
exc_class_or_status_code=ValueError,
|
187
|
-
handler=
|
286
|
+
handler=handle_exception
|
188
287
|
)
|
189
|
-
|
288
|
+
app.add_exception_handler(
|
190
289
|
exc_class_or_status_code=fastapi.exceptions.RequestValidationError,
|
191
|
-
handler=
|
290
|
+
handler=handle_exception
|
192
291
|
)
|
193
|
-
|
292
|
+
app.add_exception_handler(
|
194
293
|
exc_class_or_status_code=starlette.exceptions.HTTPException,
|
195
|
-
handler=
|
196
|
-
)
|
197
|
-
return fastapi_app
|
198
|
-
|
199
|
-
|
200
|
-
def add_middleware_cors_to_fastapi_app(*, fastapi_app: FastAPI) -> FastAPI:
|
201
|
-
fastapi_app.add_middleware(
|
202
|
-
CORSMiddleware,
|
203
|
-
allow_origins=["*"],
|
204
|
-
allow_credentials=True,
|
205
|
-
allow_methods=["*"],
|
206
|
-
allow_headers=["*"],
|
207
|
-
)
|
208
|
-
return fastapi_app
|
209
|
-
|
210
|
-
|
211
|
-
def add_ar_fastapi_static_to_fastapi_app(*, fastapi_app: FastAPI):
|
212
|
-
ar_fastapi_static_dirpath = os.path.join(str(pathlib.Path(__file__).parent), "ar_fastapi_static")
|
213
|
-
fastapi_app.mount(
|
214
|
-
"/ar_fastapi_static",
|
215
|
-
StaticFiles(directory=ar_fastapi_static_dirpath),
|
216
|
-
name="ar_fastapi_static"
|
294
|
+
handler=handle_exception
|
217
295
|
)
|
296
|
+
return app
|
218
297
|
|
219
298
|
|
220
|
-
def
|
299
|
+
def add_swagger_to_app(
|
221
300
|
*,
|
222
|
-
|
301
|
+
app: FastAPI,
|
223
302
|
favicon_url: str | None = None
|
224
303
|
):
|
225
|
-
|
304
|
+
app.mount(
|
305
|
+
"/ar_fastapi_static",
|
306
|
+
StaticFiles(directory=os.path.join(str(pathlib.Path(__file__).parent), "ar_fastapi_static")),
|
307
|
+
name="ar_fastapi_static"
|
308
|
+
)
|
226
309
|
|
227
|
-
@
|
310
|
+
@app.get("/docs", include_in_schema=False)
|
228
311
|
async def custom_swagger_ui_html():
|
229
312
|
return get_swagger_ui_html(
|
230
|
-
openapi_url=
|
231
|
-
title=
|
313
|
+
openapi_url=app.openapi_url,
|
314
|
+
title=app.title,
|
232
315
|
swagger_js_url="/ar_fastapi_static/swagger-ui/swagger-ui-bundle.js",
|
233
316
|
swagger_css_url="/ar_fastapi_static/swagger-ui/swagger-ui.css",
|
234
317
|
swagger_favicon_url=favicon_url
|
235
318
|
)
|
236
319
|
|
237
|
-
@
|
320
|
+
@app.get("/redoc", include_in_schema=False)
|
238
321
|
async def custom_redoc_html():
|
239
322
|
return get_redoc_html(
|
240
|
-
openapi_url=
|
241
|
-
title=
|
323
|
+
openapi_url=app.openapi_url,
|
324
|
+
title=app.title,
|
242
325
|
redoc_js_url="/ar_fastapi_static/redoc/redoc.standalone.js",
|
243
326
|
redoc_favicon_url=favicon_url
|
244
327
|
)
|
245
328
|
|
246
|
-
return
|
329
|
+
return app
|
330
|
+
|
331
|
+
|
332
|
+
def add_cors_to_app(*, app: FastAPI):
|
333
|
+
app.add_middleware(
|
334
|
+
CORSMiddleware,
|
335
|
+
allow_origins=["*"],
|
336
|
+
allow_credentials=True,
|
337
|
+
allow_methods=["*"],
|
338
|
+
allow_headers=["*"],
|
339
|
+
)
|
340
|
+
return app
|
341
|
+
|
342
|
+
|
343
|
+
def add_needed_api_router_to_app(*, app: FastAPI):
|
344
|
+
api_router = APIRouter()
|
345
|
+
|
346
|
+
@api_router.get(
|
347
|
+
"/healthcheck",
|
348
|
+
response_model=ErrorSO,
|
349
|
+
status_code=starlette.status.HTTP_200_OK,
|
350
|
+
tags=["Healthcheck"]
|
351
|
+
)
|
352
|
+
async def _():
|
353
|
+
return APIJSONResponse(
|
354
|
+
status_code=starlette.status.HTTP_200_OK,
|
355
|
+
content=RawDataSO(data={"healthcheck": "healthcheck"})
|
356
|
+
)
|
357
|
+
|
358
|
+
@api_router.get(
|
359
|
+
"/arpakitlib",
|
360
|
+
response_model=ErrorSO,
|
361
|
+
status_code=starlette.status.HTTP_200_OK,
|
362
|
+
tags=["arpakitlib"]
|
363
|
+
)
|
364
|
+
async def _():
|
365
|
+
return APIJSONResponse(
|
366
|
+
status_code=starlette.status.HTTP_200_OK,
|
367
|
+
content=RawDataSO(data={"arpakitlib": "arpakitlib"})
|
368
|
+
)
|
247
369
|
|
370
|
+
app.include_router(router=api_router, prefix="")
|
248
371
|
|
249
|
-
|
372
|
+
return app
|
373
|
+
|
374
|
+
|
375
|
+
class BaseStartupAPIEvent:
|
250
376
|
def __init__(self, *args, **kwargs):
|
251
377
|
self._logger = logging.getLogger(self.__class__.__name__)
|
252
378
|
|
@@ -255,7 +381,38 @@ class BaseAPIStartupEvent:
|
|
255
381
|
self._logger.info("on_startup ends")
|
256
382
|
|
257
383
|
|
258
|
-
class
|
384
|
+
class InitSqlalchemyDBStartupAPIEvent(BaseStartupAPIEvent):
|
385
|
+
def __init__(self, sqlalchemy_db: SQLAlchemyDB):
|
386
|
+
super().__init__()
|
387
|
+
self.sqlalchemy_db = sqlalchemy_db
|
388
|
+
|
389
|
+
def async_on_startup(self, *args, **kwargs):
|
390
|
+
self.sqlalchemy_db.init()
|
391
|
+
|
392
|
+
|
393
|
+
class SyncSafeRunExecuteOperationWorkerStartupAPIEvent(BaseStartupAPIEvent):
|
394
|
+
def __init__(self, execute_operation_worker: ExecuteOperationWorker):
|
395
|
+
super().__init__()
|
396
|
+
self.execute_operation_worker = execute_operation_worker
|
397
|
+
|
398
|
+
def async_on_startup(self, *args, **kwargs):
|
399
|
+
thread = threading.Thread(
|
400
|
+
target=self.execute_operation_worker.sync_safe_run,
|
401
|
+
daemon=True
|
402
|
+
)
|
403
|
+
thread.start()
|
404
|
+
|
405
|
+
|
406
|
+
class AsyncSafeRunExecuteOperationWorkerStartupAPIEvent(BaseStartupAPIEvent):
|
407
|
+
def __init__(self, execute_operation_worker: ExecuteOperationWorker):
|
408
|
+
super().__init__()
|
409
|
+
self.execute_operation_worker = execute_operation_worker
|
410
|
+
|
411
|
+
def async_on_startup(self, *args, **kwargs):
|
412
|
+
_ = asyncio.create_task(self.execute_operation_worker.async_safe_run())
|
413
|
+
|
414
|
+
|
415
|
+
class BaseShutdownAPIEvent:
|
259
416
|
def __init__(self, *args, **kwargs):
|
260
417
|
self._logger = logging.getLogger(self.__class__.__name__)
|
261
418
|
|
@@ -264,28 +421,70 @@ class BaseAPIShutdownEvent:
|
|
264
421
|
self._logger.info("on_shutdown ends")
|
265
422
|
|
266
423
|
|
267
|
-
class
|
268
|
-
|
269
|
-
super().__init__()
|
270
|
-
self.easy_sqlalchemy_db = easy_sqlalchemy_db
|
424
|
+
class BaseTransmittedAPIData(BaseModel):
|
425
|
+
model_config = ConfigDict(extra="ignore", arbitrary_types_allowed=True, from_attributes=True)
|
271
426
|
|
272
|
-
|
273
|
-
|
427
|
+
|
428
|
+
def get_transmitted_api_data(request: starlette.requests.Request) -> BaseTransmittedAPIData:
|
429
|
+
return request.app.state.transmitted_api_data
|
430
|
+
|
431
|
+
|
432
|
+
def simple_api_router_for_testing():
|
433
|
+
router = APIRouter(tags=["Testing"])
|
434
|
+
|
435
|
+
@router.get(
|
436
|
+
"/raise_fake_exception_1",
|
437
|
+
response_model=ErrorSO
|
438
|
+
)
|
439
|
+
async def _():
|
440
|
+
raise fastapi.HTTPException(status_code=starlette.status.HTTP_500_INTERNAL_SERVER_ERROR)
|
441
|
+
|
442
|
+
@router.get(
|
443
|
+
"/raise_fake_exception_2",
|
444
|
+
response_model=ErrorSO
|
445
|
+
)
|
446
|
+
async def _():
|
447
|
+
raise APIException(
|
448
|
+
error_code="raise_fake_exception_2",
|
449
|
+
error_code_specification="raise_fake_exception_2",
|
450
|
+
error_description="raise_fake_exception_2"
|
451
|
+
)
|
452
|
+
|
453
|
+
@router.get(
|
454
|
+
"/raise_fake_exception_3",
|
455
|
+
response_model=ErrorSO
|
456
|
+
)
|
457
|
+
async def _():
|
458
|
+
raise Exception("raise_fake_exception_3")
|
459
|
+
|
460
|
+
@router.get(
|
461
|
+
"/check_params",
|
462
|
+
response_model=ErrorSO
|
463
|
+
)
|
464
|
+
async def _(name: int = Query()):
|
465
|
+
return RawDataSO(data={"name": name})
|
466
|
+
|
467
|
+
return router
|
274
468
|
|
275
469
|
|
276
470
|
def create_fastapi_app(
|
277
471
|
*,
|
278
472
|
title: str = "ARPAKITLIB FastAPI",
|
279
|
-
description: str | None =
|
280
|
-
|
281
|
-
|
282
|
-
|
473
|
+
description: str | None = "ARPAKITLIB FastAPI",
|
474
|
+
log_filepath: str | None = "./story.log",
|
475
|
+
handle_exception_: Callable | None = create_handle_exception(),
|
476
|
+
startup_api_events: list[BaseStartupAPIEvent] | None = None,
|
477
|
+
shutdown_api_events: list[BaseStartupAPIEvent] | None = None,
|
478
|
+
transmitted_api_data: BaseTransmittedAPIData = BaseTransmittedAPIData(),
|
479
|
+
api_router: APIRouter = simple_api_router_for_testing()
|
283
480
|
):
|
284
|
-
|
285
|
-
|
481
|
+
setup_normal_logging(log_filepath=log_filepath)
|
482
|
+
|
483
|
+
if not startup_api_events:
|
484
|
+
startup_api_events = [BaseStartupAPIEvent()]
|
286
485
|
|
287
|
-
if
|
288
|
-
|
486
|
+
if not shutdown_api_events:
|
487
|
+
shutdown_api_events = [BaseShutdownAPIEvent()]
|
289
488
|
|
290
489
|
app = FastAPI(
|
291
490
|
title=title,
|
@@ -293,25 +492,31 @@ def create_fastapi_app(
|
|
293
492
|
docs_url=None,
|
294
493
|
redoc_url=None,
|
295
494
|
openapi_url="/openapi",
|
296
|
-
on_startup=[api_startup_event.async_on_startup for api_startup_event in
|
297
|
-
on_shutdown=[api_shutdown_event.async_on_shutdown for api_shutdown_event in
|
495
|
+
on_startup=[api_startup_event.async_on_startup for api_startup_event in startup_api_events],
|
496
|
+
on_shutdown=[api_shutdown_event.async_on_shutdown for api_shutdown_event in shutdown_api_events]
|
298
497
|
)
|
299
498
|
|
300
|
-
|
499
|
+
app.state.transmitted_api_data = transmitted_api_data
|
301
500
|
|
302
|
-
|
501
|
+
add_cors_to_app(app=app)
|
303
502
|
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
503
|
+
add_swagger_to_app(app=app)
|
504
|
+
|
505
|
+
if handle_exception_:
|
506
|
+
add_exception_handler_to_app(
|
507
|
+
app=app,
|
508
|
+
handle_exception=handle_exception_
|
308
509
|
)
|
309
510
|
else:
|
310
|
-
|
311
|
-
|
312
|
-
|
511
|
+
add_exception_handler_to_app(
|
512
|
+
app=app,
|
513
|
+
handle_exception=create_handle_exception()
|
313
514
|
)
|
314
515
|
|
516
|
+
add_needed_api_router_to_app(app=app)
|
517
|
+
|
518
|
+
app.include_router(router=api_router)
|
519
|
+
|
315
520
|
return app
|
316
521
|
|
317
522
|
|
arpakitlib/ar_need_type_util.py
CHANGED
@@ -3,12 +3,12 @@
|
|
3
3
|
import json
|
4
4
|
from typing import Any
|
5
5
|
|
6
|
-
from arpakitlib.
|
6
|
+
from arpakitlib.ar_enumeration_util import Enumeration
|
7
7
|
|
8
8
|
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
9
9
|
|
10
10
|
|
11
|
-
class NeedTypes(
|
11
|
+
class NeedTypes(Enumeration):
|
12
12
|
str_ = "str"
|
13
13
|
int_ = "int"
|
14
14
|
bool_ = "bool"
|
arpakitlib/ar_openai_util.py
CHANGED
@@ -4,100 +4,29 @@ from __future__ import annotations
|
|
4
4
|
|
5
5
|
import logging
|
6
6
|
import traceback
|
7
|
-
from datetime import
|
7
|
+
from datetime import timedelta
|
8
8
|
from typing import Any
|
9
9
|
|
10
|
-
from sqlalchemy import
|
11
|
-
from sqlalchemy.
|
12
|
-
from sqlalchemy.orm import Mapped, mapped_column, Session
|
10
|
+
from sqlalchemy import asc
|
11
|
+
from sqlalchemy.orm import Session
|
13
12
|
|
14
13
|
from arpakitlib.ar_base_worker import BaseWorker
|
15
14
|
from arpakitlib.ar_datetime_util import now_utc_dt
|
16
15
|
from arpakitlib.ar_dict_util import combine_dicts
|
17
|
-
from arpakitlib.
|
18
|
-
from arpakitlib.
|
19
|
-
from arpakitlib.ar_fastapi_util import BaseAPISimpleSO
|
20
|
-
from arpakitlib.ar_sqlalchemy_model_util import SimpleDBM
|
21
|
-
from arpakitlib.ar_story_log_util import StoryLogDBM
|
16
|
+
from arpakitlib.ar_sqlalchemy_model_util import OperationDBM, StoryLogDBM
|
17
|
+
from arpakitlib.ar_sqlalchemy_util import SQLAlchemyDB
|
22
18
|
|
23
19
|
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
24
20
|
|
25
21
|
_logger = logging.getLogger(__name__)
|
26
22
|
|
27
23
|
|
28
|
-
class OperationDBM(SimpleDBM):
|
29
|
-
__tablename__ = "operation"
|
30
|
-
|
31
|
-
class Statuses(EasyEnumeration):
|
32
|
-
waiting_for_execution = "waiting_for_execution"
|
33
|
-
executing = "executing"
|
34
|
-
executed_without_error = "executed_without_error"
|
35
|
-
executed_with_error = "executed_with_error"
|
36
|
-
|
37
|
-
class Types(EasyEnumeration):
|
38
|
-
healthcheck_ = "healthcheck"
|
39
|
-
raise_fake_exception = "raise_fake_exception"
|
40
|
-
|
41
|
-
status: Mapped[str] = mapped_column(
|
42
|
-
TEXT, index=True, insert_default=Statuses.waiting_for_execution,
|
43
|
-
server_default=Statuses.waiting_for_execution, nullable=False
|
44
|
-
)
|
45
|
-
type: Mapped[str] = mapped_column(
|
46
|
-
TEXT, index=True, insert_default=Types.healthcheck_, nullable=False
|
47
|
-
)
|
48
|
-
execution_start_dt: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
49
|
-
execution_finish_dt: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
50
|
-
input_data: Mapped[dict[str, Any]] = mapped_column(
|
51
|
-
JSONB,
|
52
|
-
insert_default={},
|
53
|
-
server_default="{}",
|
54
|
-
nullable=False
|
55
|
-
)
|
56
|
-
output_data: Mapped[dict[str, Any]] = mapped_column(JSONB, insert_default={}, server_default="{}", nullable=False)
|
57
|
-
error_data: Mapped[dict[str, Any]] = mapped_column(JSONB, insert_default={}, server_default="{}", nullable=False)
|
58
|
-
|
59
|
-
def raise_if_executed_with_error(self):
|
60
|
-
if self.status == self.Statuses.executed_with_error:
|
61
|
-
raise Exception(
|
62
|
-
f"Operation (id={self.id}, type={self.type}) executed with error, error_data={self.error_data}"
|
63
|
-
)
|
64
|
-
|
65
|
-
def raise_if_error_data(self):
|
66
|
-
if self.status == self.Statuses.executed_with_error:
|
67
|
-
raise Exception(
|
68
|
-
f"Operation (id={self.id}, type={self.type}) has error_data, error_data={self.error_data}"
|
69
|
-
)
|
70
|
-
|
71
|
-
@property
|
72
|
-
def duration(self) -> timedelta | None:
|
73
|
-
if self.execution_start_dt is None or self.execution_finish_dt is None:
|
74
|
-
return None
|
75
|
-
return self.execution_finish_dt - self.execution_start_dt
|
76
|
-
|
77
|
-
@property
|
78
|
-
def duration_total_seconds(self) -> float | None:
|
79
|
-
if self.duration is None:
|
80
|
-
return None
|
81
|
-
return self.duration.total_seconds()
|
82
|
-
|
83
|
-
|
84
|
-
class OperationSO(BaseAPISimpleSO):
|
85
|
-
execution_start_dt: datetime | None
|
86
|
-
execution_finish_dt: datetime | None
|
87
|
-
status: str
|
88
|
-
type: str
|
89
|
-
input_data: dict[str, Any]
|
90
|
-
output_data: dict[str, Any]
|
91
|
-
error_data: dict[str, Any]
|
92
|
-
duration_total_seconds: float | None
|
93
|
-
|
94
|
-
|
95
24
|
def get_operation_for_execution(
|
96
25
|
*,
|
97
|
-
|
26
|
+
sqlalchemy_db: SQLAlchemyDB,
|
98
27
|
filter_operation_type: str | None = None
|
99
28
|
) -> OperationDBM | None:
|
100
|
-
with
|
29
|
+
with sqlalchemy_db.new_session() as session:
|
101
30
|
query = (
|
102
31
|
session
|
103
32
|
.query(OperationDBM)
|
@@ -114,32 +43,30 @@ def get_operation_by_id(
|
|
114
43
|
*,
|
115
44
|
session: Session,
|
116
45
|
filter_operation_id: int,
|
117
|
-
|
46
|
+
raise_if_not_found: bool = False
|
118
47
|
) -> OperationDBM | None:
|
119
48
|
query = (
|
120
49
|
session
|
121
50
|
.query(OperationDBM)
|
122
51
|
.filter(OperationDBM.id == filter_operation_id)
|
123
52
|
)
|
124
|
-
if
|
53
|
+
if raise_if_not_found:
|
125
54
|
return query.one()
|
126
55
|
else:
|
127
56
|
return query.one_or_none()
|
128
57
|
|
129
58
|
|
130
59
|
class BaseOperationExecutor:
|
131
|
-
def __init__(self, *,
|
60
|
+
def __init__(self, *, sqlalchemy_db: SQLAlchemyDB):
|
132
61
|
self._logger = logging.getLogger(self.__class__.__name__)
|
133
|
-
self.
|
62
|
+
self.sql_alchemy_db = sqlalchemy_db
|
134
63
|
|
135
64
|
async def async_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
136
65
|
if operation_dbm.type == OperationDBM.Types.healthcheck_:
|
137
66
|
self._logger.info("healthcheck")
|
138
|
-
elif operation_dbm.type == OperationDBM.Types.
|
67
|
+
elif operation_dbm.type == OperationDBM.Types.raise_fake_exception_:
|
139
68
|
self._logger.info("raise_fake_exception")
|
140
69
|
raise Exception("raise_fake_exception")
|
141
|
-
else:
|
142
|
-
raise ValueError(f"unknown operation.type = {operation_dbm.type}")
|
143
70
|
return operation_dbm
|
144
71
|
|
145
72
|
async def async_safe_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
@@ -149,9 +76,9 @@ class BaseOperationExecutor:
|
|
149
76
|
f", operation_dbm.type={operation_dbm.type}"
|
150
77
|
)
|
151
78
|
|
152
|
-
with self.
|
79
|
+
with self.sql_alchemy_db.new_session() as session:
|
153
80
|
operation_dbm: OperationDBM = get_operation_by_id(
|
154
|
-
session=session, filter_operation_id=operation_dbm.id,
|
81
|
+
session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True
|
155
82
|
)
|
156
83
|
operation_dbm.execution_start_dt = now_utc_dt()
|
157
84
|
operation_dbm.status = OperationDBM.Statuses.executing
|
@@ -168,9 +95,10 @@ class BaseOperationExecutor:
|
|
168
95
|
exception = exception_
|
169
96
|
traceback_str = traceback.format_exc()
|
170
97
|
|
171
|
-
with self.
|
98
|
+
with self.sql_alchemy_db.new_session() as session:
|
99
|
+
|
172
100
|
operation_dbm: OperationDBM = get_operation_by_id(
|
173
|
-
session=session, filter_operation_id=operation_dbm.id,
|
101
|
+
session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True
|
174
102
|
)
|
175
103
|
operation_dbm.execution_finish_dt = now_utc_dt()
|
176
104
|
if exception:
|
@@ -182,7 +110,21 @@ class BaseOperationExecutor:
|
|
182
110
|
else:
|
183
111
|
operation_dbm.status = OperationDBM.Statuses.executed_without_error
|
184
112
|
session.commit()
|
113
|
+
|
114
|
+
story_log_dbm = StoryLogDBM(
|
115
|
+
level=StoryLogDBM.Levels.error,
|
116
|
+
title="Error in async_execute_operation",
|
117
|
+
data={
|
118
|
+
"operation_id": operation_dbm.id,
|
119
|
+
"exception_str": str(exception),
|
120
|
+
"traceback_str": traceback_str
|
121
|
+
}
|
122
|
+
)
|
123
|
+
session.add(story_log_dbm)
|
124
|
+
session.commit()
|
125
|
+
|
185
126
|
session.refresh(operation_dbm)
|
127
|
+
session.refresh(story_log_dbm)
|
186
128
|
|
187
129
|
self._logger.info(
|
188
130
|
f"finish async_safe_execute_operation"
|
@@ -195,11 +137,9 @@ class BaseOperationExecutor:
|
|
195
137
|
def sync_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
196
138
|
if operation_dbm.type == OperationDBM.Types.healthcheck_:
|
197
139
|
self._logger.info("healthcheck")
|
198
|
-
elif operation_dbm.type == OperationDBM.Types.
|
140
|
+
elif operation_dbm.type == OperationDBM.Types.raise_fake_exception_:
|
199
141
|
self._logger.info("raise_fake_exception")
|
200
142
|
raise Exception("raise_fake_exception")
|
201
|
-
else:
|
202
|
-
raise ValueError(f"unknown operation.type = {operation_dbm.type}")
|
203
143
|
return operation_dbm
|
204
144
|
|
205
145
|
def sync_safe_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
@@ -209,9 +149,9 @@ class BaseOperationExecutor:
|
|
209
149
|
f", operation_dbm.type={operation_dbm.type}"
|
210
150
|
)
|
211
151
|
|
212
|
-
with self.
|
152
|
+
with self.sql_alchemy_db.new_session() as session:
|
213
153
|
operation_dbm: OperationDBM = get_operation_by_id(
|
214
|
-
session=session, filter_operation_id=operation_dbm.id,
|
154
|
+
session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True
|
215
155
|
)
|
216
156
|
operation_dbm.execution_start_dt = now_utc_dt()
|
217
157
|
operation_dbm.status = OperationDBM.Statuses.executing
|
@@ -228,10 +168,10 @@ class BaseOperationExecutor:
|
|
228
168
|
exception = exception_
|
229
169
|
traceback_str = traceback.format_exc()
|
230
170
|
|
231
|
-
with self.
|
171
|
+
with self.sql_alchemy_db.new_session() as session:
|
232
172
|
|
233
173
|
operation_dbm: OperationDBM = get_operation_by_id(
|
234
|
-
session=session, filter_operation_id=operation_dbm.id,
|
174
|
+
session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True
|
235
175
|
)
|
236
176
|
operation_dbm.execution_finish_dt = now_utc_dt()
|
237
177
|
if exception:
|
@@ -249,7 +189,7 @@ class BaseOperationExecutor:
|
|
249
189
|
title="Error in sync_execute_operation",
|
250
190
|
data={
|
251
191
|
"operation_id": operation_dbm.id,
|
252
|
-
"
|
192
|
+
"exception_str": str(exception),
|
253
193
|
"traceback_str": traceback_str
|
254
194
|
}
|
255
195
|
)
|
@@ -274,27 +214,29 @@ class ExecuteOperationWorker(BaseWorker):
|
|
274
214
|
def __init__(
|
275
215
|
self,
|
276
216
|
*,
|
277
|
-
|
278
|
-
operation_executor: BaseOperationExecutor,
|
279
|
-
|
217
|
+
sqlalchemy_db: SQLAlchemyDB,
|
218
|
+
operation_executor: BaseOperationExecutor | None = None,
|
219
|
+
filter_operation_type: str | None = None
|
280
220
|
):
|
281
221
|
super().__init__()
|
282
|
-
self.
|
222
|
+
self.sqlalchemy_db = sqlalchemy_db
|
283
223
|
self.timeout_after_run = timedelta(seconds=0.1).total_seconds()
|
284
224
|
self.timeout_after_err_in_run = timedelta(seconds=1).total_seconds()
|
225
|
+
if operation_executor is None:
|
226
|
+
operation_executor = BaseOperationExecutor(sqlalchemy_db=sqlalchemy_db)
|
285
227
|
self.operation_executor = operation_executor
|
286
|
-
self.
|
228
|
+
self.filter_operation_type = filter_operation_type
|
287
229
|
|
288
230
|
async def async_on_startup(self):
|
289
|
-
self.
|
231
|
+
self.sqlalchemy_db.init()
|
290
232
|
|
291
233
|
async def async_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
292
234
|
return await self.operation_executor.async_safe_execute_operation(operation_dbm=operation_dbm)
|
293
235
|
|
294
236
|
async def async_run(self):
|
295
237
|
operation_dbm: OperationDBM | None = get_operation_for_execution(
|
296
|
-
|
297
|
-
filter_operation_type=self.
|
238
|
+
sqlalchemy_db=self.sqlalchemy_db,
|
239
|
+
filter_operation_type=self.filter_operation_type
|
298
240
|
)
|
299
241
|
|
300
242
|
if not operation_dbm:
|
@@ -306,15 +248,15 @@ class ExecuteOperationWorker(BaseWorker):
|
|
306
248
|
self._logger.exception(exception)
|
307
249
|
|
308
250
|
def sync_on_startup(self):
|
309
|
-
self.
|
251
|
+
self.sqlalchemy_db.init()
|
310
252
|
|
311
253
|
def sync_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
312
254
|
return self.operation_executor.sync_safe_execute_operation(operation_dbm=operation_dbm)
|
313
255
|
|
314
256
|
def sync_run(self):
|
315
257
|
operation_dbm: OperationDBM | None = get_operation_for_execution(
|
316
|
-
|
317
|
-
filter_operation_type=self.
|
258
|
+
sqlalchemy_db=self.sqlalchemy_db,
|
259
|
+
filter_operation_type=self.filter_operation_type
|
318
260
|
)
|
319
261
|
|
320
262
|
if not operation_dbm:
|
@@ -324,7 +266,3 @@ class ExecuteOperationWorker(BaseWorker):
|
|
324
266
|
|
325
267
|
def sync_run_on_error(self, exception: BaseException, kwargs: dict[str, Any]):
|
326
268
|
self._logger.exception(exception)
|
327
|
-
|
328
|
-
|
329
|
-
def import_ar_operation_execution_util():
|
330
|
-
_logger.info("import_ar_operation_execution_util")
|
@@ -1,13 +1,15 @@
|
|
1
1
|
# arpakit
|
2
2
|
|
3
|
-
from datetime import datetime
|
3
|
+
from datetime import datetime, timedelta
|
4
4
|
from typing import Any
|
5
5
|
from uuid import uuid4
|
6
6
|
|
7
7
|
from sqlalchemy import inspect, INTEGER, TEXT, TIMESTAMP
|
8
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
8
9
|
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
9
10
|
|
10
11
|
from arpakitlib.ar_datetime_util import now_utc_dt
|
12
|
+
from arpakitlib.ar_enumeration_util import Enumeration
|
11
13
|
from arpakitlib.ar_json_util import safely_transfer_to_json_str
|
12
14
|
|
13
15
|
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
@@ -45,3 +47,76 @@ class SimpleDBM(BaseDBM):
|
|
45
47
|
|
46
48
|
def __repr__(self):
|
47
49
|
return f"{self.__class__.__name__.removesuffix('DBM')} (id={self.id})"
|
50
|
+
|
51
|
+
|
52
|
+
class StoryLogDBM(SimpleDBM):
|
53
|
+
__tablename__ = "story_log"
|
54
|
+
|
55
|
+
class Levels(Enumeration):
|
56
|
+
info = "info"
|
57
|
+
warning = "warning"
|
58
|
+
error = "error"
|
59
|
+
|
60
|
+
level: Mapped[str] = mapped_column(
|
61
|
+
TEXT, insert_default=Levels.info, server_default=Levels.info, index=True, nullable=False
|
62
|
+
)
|
63
|
+
title: Mapped[str | None] = mapped_column(TEXT, index=True, default=None, nullable=True)
|
64
|
+
data: Mapped[dict[str, Any]] = mapped_column(
|
65
|
+
JSONB, insert_default={}, server_default="{}", index=True, nullable=False
|
66
|
+
)
|
67
|
+
|
68
|
+
|
69
|
+
class OperationDBM(SimpleDBM):
|
70
|
+
__tablename__ = "operation"
|
71
|
+
|
72
|
+
class Statuses(Enumeration):
|
73
|
+
waiting_for_execution = "waiting_for_execution"
|
74
|
+
executing = "executing"
|
75
|
+
executed_without_error = "executed_without_error"
|
76
|
+
executed_with_error = "executed_with_error"
|
77
|
+
|
78
|
+
class Types(Enumeration):
|
79
|
+
healthcheck_ = "healthcheck"
|
80
|
+
raise_fake_exception_ = "raise_fake_exception"
|
81
|
+
|
82
|
+
status: Mapped[str] = mapped_column(
|
83
|
+
TEXT, index=True, insert_default=Statuses.waiting_for_execution,
|
84
|
+
server_default=Statuses.waiting_for_execution, nullable=False
|
85
|
+
)
|
86
|
+
type: Mapped[str] = mapped_column(
|
87
|
+
TEXT, index=True, insert_default=Types.healthcheck_, nullable=False
|
88
|
+
)
|
89
|
+
execution_start_dt: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
90
|
+
execution_finish_dt: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
91
|
+
input_data: Mapped[dict[str, Any]] = mapped_column(
|
92
|
+
JSONB,
|
93
|
+
insert_default={},
|
94
|
+
server_default="{}",
|
95
|
+
nullable=False
|
96
|
+
)
|
97
|
+
output_data: Mapped[dict[str, Any]] = mapped_column(JSONB, insert_default={}, server_default="{}", nullable=False)
|
98
|
+
error_data: Mapped[dict[str, Any]] = mapped_column(JSONB, insert_default={}, server_default="{}", nullable=False)
|
99
|
+
|
100
|
+
def raise_if_executed_with_error(self):
|
101
|
+
if self.status == self.Statuses.executed_with_error:
|
102
|
+
raise Exception(
|
103
|
+
f"Operation (id={self.id}, type={self.type}) executed with error, error_data={self.error_data}"
|
104
|
+
)
|
105
|
+
|
106
|
+
def raise_if_error_data(self):
|
107
|
+
if self.error_data:
|
108
|
+
raise Exception(
|
109
|
+
f"Operation (id={self.id}, type={self.type}) has error_data, error_data={self.error_data}"
|
110
|
+
)
|
111
|
+
|
112
|
+
@property
|
113
|
+
def duration(self) -> timedelta | None:
|
114
|
+
if self.execution_start_dt is None or self.execution_finish_dt is None:
|
115
|
+
return None
|
116
|
+
return self.execution_finish_dt - self.execution_start_dt
|
117
|
+
|
118
|
+
@property
|
119
|
+
def duration_total_seconds(self) -> float | None:
|
120
|
+
if self.duration is None:
|
121
|
+
return None
|
122
|
+
return self.duration.total_seconds()
|
@@ -12,11 +12,11 @@ from sqlalchemy.orm.session import Session
|
|
12
12
|
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
13
13
|
|
14
14
|
|
15
|
-
class
|
15
|
+
class SQLAlchemyDB:
|
16
16
|
def __init__(
|
17
17
|
self,
|
18
18
|
*,
|
19
|
-
db_url: str,
|
19
|
+
db_url: str = "postgresql://arpakitlib:arpakitlib@localhost:50629/arpakitlib",
|
20
20
|
echo: bool = False,
|
21
21
|
need_include_operation_dbm: bool = False,
|
22
22
|
need_include_story_dbm: bool = False
|
@@ -37,16 +37,6 @@ class EasySQLAlchemyDB:
|
|
37
37
|
self.sessionmaker = sessionmaker(bind=self.engine)
|
38
38
|
self.func_new_session_counter = 0
|
39
39
|
|
40
|
-
def include_operation_dbm(self):
|
41
|
-
if self.need_include_operation_dbm:
|
42
|
-
from arpakitlib.ar_operation_util import import_ar_operation_execution_util
|
43
|
-
import_ar_operation_execution_util()
|
44
|
-
|
45
|
-
def include_story_dbm(self):
|
46
|
-
if self.need_include_story_dbm or self.need_include_operation_dbm:
|
47
|
-
from arpakitlib.ar_story_log_util import import_ar_story_util
|
48
|
-
import_ar_story_util()
|
49
|
-
|
50
40
|
def drop_celery_tables(self):
|
51
41
|
with self.engine.connect() as connection:
|
52
42
|
connection.execute(text("DROP TABLE IF EXISTS celery_tasksetmeta CASCADE;"))
|
@@ -62,8 +52,6 @@ class EasySQLAlchemyDB:
|
|
62
52
|
self._logger.info("celery tables data were removed")
|
63
53
|
|
64
54
|
def init(self):
|
65
|
-
self.include_operation_dbm()
|
66
|
-
self.include_story_dbm()
|
67
55
|
from arpakitlib.ar_sqlalchemy_model_util import BaseDBM
|
68
56
|
BaseDBM.metadata.create_all(bind=self.engine, checkfirst=True)
|
69
57
|
self._logger.info("db was inited")
|
@@ -74,8 +62,6 @@ class EasySQLAlchemyDB:
|
|
74
62
|
self._logger.info("db was dropped")
|
75
63
|
|
76
64
|
def reinit(self):
|
77
|
-
self.include_operation_dbm()
|
78
|
-
self.include_story_dbm()
|
79
65
|
from arpakitlib.ar_sqlalchemy_model_util import BaseDBM
|
80
66
|
BaseDBM.metadata.drop_all(bind=self.engine, checkfirst=True)
|
81
67
|
BaseDBM.metadata.create_all(bind=self.engine, checkfirst=True)
|
arpakitlib/ar_type_util.py
CHANGED
@@ -1,13 +1,10 @@
|
|
1
1
|
# arpakit
|
2
|
-
|
2
|
+
import inspect
|
3
3
|
from typing import Optional, Any
|
4
4
|
|
5
5
|
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
6
6
|
|
7
7
|
|
8
|
-
# ---
|
9
|
-
|
10
|
-
|
11
8
|
class NotSet:
|
12
9
|
pass
|
13
10
|
|
@@ -44,9 +41,6 @@ def make_none_if_not_set(v: Any) -> Any:
|
|
44
41
|
return v
|
45
42
|
|
46
43
|
|
47
|
-
# ---
|
48
|
-
|
49
|
-
|
50
44
|
def raise_for_type(comparable, need_type, comment_for_error: Optional[str] = None):
|
51
45
|
if comparable is need_type:
|
52
46
|
return
|
@@ -73,7 +67,9 @@ def raise_for_types(comparable, need_types, comment_for_error: Optional[str] = N
|
|
73
67
|
raise TypeError(err)
|
74
68
|
|
75
69
|
|
76
|
-
|
70
|
+
def raise_if_not_async_func(func: Any):
|
71
|
+
if not inspect.iscoroutinefunction(func):
|
72
|
+
raise TypeError(f"The provided function '{func.__name__}' is not an async function")
|
77
73
|
|
78
74
|
|
79
75
|
def __example():
|
@@ -12,7 +12,7 @@ import aiohttp
|
|
12
12
|
import requests
|
13
13
|
|
14
14
|
from arpakitlib.ar_dict_util import combine_dicts
|
15
|
-
from arpakitlib.
|
15
|
+
from arpakitlib.ar_enumeration_util import Enumeration
|
16
16
|
from arpakitlib.ar_sleep_util import sync_safe_sleep, async_safe_sleep
|
17
17
|
from arpakitlib.ar_type_util import raise_for_type
|
18
18
|
|
@@ -23,7 +23,7 @@ https://yookassa.ru/developers/api
|
|
23
23
|
"""
|
24
24
|
|
25
25
|
|
26
|
-
class YookassaPaymentStatuses(
|
26
|
+
class YookassaPaymentStatuses(Enumeration):
|
27
27
|
pending = "pending"
|
28
28
|
waiting_for_capture = "waiting_for_capture"
|
29
29
|
succeeded = "succeeded"
|
@@ -6,7 +6,7 @@ arpakitlib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
arpakitlib/ar_additional_model_util.py,sha256=Eq7pvVUgO2L3gYBocm-pP9TrztTb8VNCp7LdRMml-F8,237
|
7
7
|
arpakitlib/ar_aiogram_util.py,sha256=IA48PRMIJrPLMhFA0Eb2vQpLcqm98o9tKfC3pDy8qsI,12022
|
8
8
|
arpakitlib/ar_arpakit_lib_module_util.py,sha256=YzobxRG8-QJ1L5r_8wBdL668CwXoQRIM1Cpec1o2WBc,5447
|
9
|
-
arpakitlib/ar_arpakit_schedule_uust_api_client.py,sha256=
|
9
|
+
arpakitlib/ar_arpakit_schedule_uust_api_client.py,sha256=SYWWQDohPnw0qpBIu2hEvGZRVdaI4NUUQdEjnMnseo4,18237
|
10
10
|
arpakitlib/ar_arpakitlib_info.py,sha256=cvgrLnEznmYkCAg1adbY46ATjD6GJd-Yk8PTgOPjpKM,248
|
11
11
|
arpakitlib/ar_base64_util.py,sha256=aZkg2cZTuAaP2IWeG_LXJ6RO7qhyskVwec-Lks0iM-k,676
|
12
12
|
arpakitlib/ar_base_worker.py,sha256=Y6yRFp1nhhTyv-TeGDao-3q4ICVVJ2zVsKdIlHdiGSI,2897
|
@@ -14,9 +14,8 @@ arpakitlib/ar_cache_file.py,sha256=m73_vU6bMjXsIurSPO9VCLcHsiHk8ITFS0LNjfI_8Uw,3
|
|
14
14
|
arpakitlib/ar_datetime_util.py,sha256=Xe1NiT9oPQzNSG7RVRkhukhbg4i-hhS5ImmV7sPUc8o,971
|
15
15
|
arpakitlib/ar_dict_util.py,sha256=cF5LQJ6tLqyGoEXfDljMDZrikeZoWPw7CgINHIFGvXM,419
|
16
16
|
arpakitlib/ar_dream_ai_api_client.py,sha256=hDPL9wbG4MjIuhn2ed6qepueogANIkt-NddhhiPUv0Y,4029
|
17
|
-
arpakitlib/ar_easy_sqlalchemy_util.py,sha256=tHVL5bimXzY7u7TXEH--W7LSebSpN0Y5WN3iWiD8OR8,4180
|
18
17
|
arpakitlib/ar_encrypt_and_decrypt_util.py,sha256=GhWnp7HHkbhwFVVCzO1H07m-5gryr4yjWsXjOaNQm1Y,520
|
19
|
-
arpakitlib/
|
18
|
+
arpakitlib/ar_enumeration_util.py,sha256=0DN46uyI0Gu9JPDgso3XPbnre7hZZefYTZwmmE1iYH4,2250
|
20
19
|
arpakitlib/ar_fastapi_static/redoc/redoc.standalone.js,sha256=WCuodUNv1qVh0oW5fjnJDwb5AwOue73jKHdI9z8iGKU,909365
|
21
20
|
arpakitlib/ar_fastapi_static/swagger-ui/favicon-16x16.png,sha256=ryStYE3Xs7zaj5dauXMHX0ovcKQIeUShL474tjo-B8I,665
|
22
21
|
arpakitlib/ar_fastapi_static/swagger-ui/favicon-32x32.png,sha256=PtYS9B4FDKXnAAytbxy-fn2jn2X8qZwC6Z5lkQVuWDc,628
|
@@ -36,7 +35,7 @@ arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.css,sha256=jzPZlgJTFwSdSphk9C
|
|
36
35
|
arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.css.map,sha256=5wq8eXMLU6Zxb45orZPL1zAsBFJReFw6GjYqGpUX3hg,262650
|
37
36
|
arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.js,sha256=ffrLZHHEQ_g84A-ul3yWa10Kk09waOAxHcQXPuZuavg,339292
|
38
37
|
arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.js.map,sha256=9UhIW7MqCOZPAz1Sl1IKfZUuhWU0p-LJqrnjjJD9Xhc,1159454
|
39
|
-
arpakitlib/ar_fastapi_util.py,sha256=
|
38
|
+
arpakitlib/ar_fastapi_util.py,sha256=7YUx9WL4hc_TAiNjFDqxdcpAutvv0UtI5IEICbBZdVQ,16847
|
40
39
|
arpakitlib/ar_file_storage_in_dir.py,sha256=D3e3rGuHoI6xqAA5mVvEpVVpOWY1jyjNsjj2UhyHRbE,3674
|
41
40
|
arpakitlib/ar_generate_env_example.py,sha256=WseNlk_So6mTVQ2amMuigWYV4ZVmd940POvXtodoYj0,325
|
42
41
|
arpakitlib/ar_hash_util.py,sha256=Iqy6KBAOLBQMFLWv676boI5sV7atT2B-fb7aCdHOmIQ,340
|
@@ -49,23 +48,23 @@ arpakitlib/ar_list_of_dicts_to_xlsx.py,sha256=MyjEl4Jl4beLVZqLVQMMv0-XDtBD3Xh4Z_
|
|
49
48
|
arpakitlib/ar_list_util.py,sha256=2woOAHAU8oTIiVjZ8GLnx15odEaoQUq3Q0JPxlufFF0,457
|
50
49
|
arpakitlib/ar_logging_util.py,sha256=c5wX2FLqCzb4aLckLVhIJ7go52rJQ4GN9dIkJ6KMc3o,1500
|
51
50
|
arpakitlib/ar_mongodb_util.py,sha256=2ECkTnGAZ92qxioL-fmN6R4yZOSr3bXdXLWTzT1C3vk,4038
|
52
|
-
arpakitlib/ar_need_type_util.py,sha256=
|
53
|
-
arpakitlib/ar_openai_util.py,sha256=
|
54
|
-
arpakitlib/
|
51
|
+
arpakitlib/ar_need_type_util.py,sha256=n2kBETxzOSVhSVoy7qUtHtuQzgrrxzgi1_iVQimPb9o,1615
|
52
|
+
arpakitlib/ar_openai_util.py,sha256=dHUbfg1sVVCjsNl_fra3iCMEz1bR-Hk9fE-DdYbu7Wc,1215
|
53
|
+
arpakitlib/ar_operation_execution_util.py,sha256=Vxuu6MoYsmR2GS5UlW_fIDwNf62Gsw0UtjeSnh4Evi8,9928
|
55
54
|
arpakitlib/ar_parse_command.py,sha256=qpr2OwG3Bf7DFiL9S3iWgtbvtE80RSC35E5zFJvjG1I,2714
|
56
55
|
arpakitlib/ar_postgresql_util.py,sha256=SAHEmAyMkZe516uk2gS830v_Wn2kRUZUYNcTNwmgXJk,1160
|
57
56
|
arpakitlib/ar_run_cmd.py,sha256=D_rPavKMmWkQtwvZFz-Io5Ak8eSODHkcFeLPzNVC68g,1072
|
58
57
|
arpakitlib/ar_schedule_uust_api_client.py,sha256=1JGUy6rrjAXdWjeAqiAOQlCAEV3xuc5FUDWfXODKB-A,5770
|
59
58
|
arpakitlib/ar_sleep_util.py,sha256=9ZN4Qo4eZ_q3hjM7vNBQjFRcH-9-sqv3QLSjnxVJE90,1405
|
60
|
-
arpakitlib/ar_sqlalchemy_model_util.py,sha256=
|
61
|
-
arpakitlib/
|
62
|
-
arpakitlib/
|
59
|
+
arpakitlib/ar_sqlalchemy_model_util.py,sha256=tKz6n9zuebo2J9yTr6IQUHoXZ9KF340MAo4UiDXaX_4,4251
|
60
|
+
arpakitlib/ar_sqlalchemy_util.py,sha256=VH23Xld3k5wITkwR0JcaBkO77gmxWn9jlK4eyxKuz_0,3665
|
61
|
+
arpakitlib/ar_ssh_util.py,sha256=jlnss4V4pziBN1rBzoK_lDiWm6nMOqGXfa6NFJSKH-Y,6796
|
63
62
|
arpakitlib/ar_str_util.py,sha256=xSEzmsDvRiZVaxyqFFjcgzpphktCbXg2FHcvsd1DYpA,1885
|
64
|
-
arpakitlib/ar_type_util.py,sha256
|
65
|
-
arpakitlib/ar_yookassa_api_client.py,sha256=
|
63
|
+
arpakitlib/ar_type_util.py,sha256=I6jbTz7_dxR1lkhz1JfUb5ZyLLdXVhG_-hzjdgT6N6s,1932
|
64
|
+
arpakitlib/ar_yookassa_api_client.py,sha256=WZoTd10d2lOmT0lCdlQFTB0LEVARubqLEiFtPNQLvi8,6477
|
66
65
|
arpakitlib/ar_zabbix_util.py,sha256=MTQbmS0QpNCKNOGONNQHf6j7KTZsKGlIbd5rCH0R0WI,6313
|
67
|
-
arpakitlib-1.5.
|
68
|
-
arpakitlib-1.5.
|
69
|
-
arpakitlib-1.5.
|
70
|
-
arpakitlib-1.5.
|
71
|
-
arpakitlib-1.5.
|
66
|
+
arpakitlib-1.5.37.dist-info/LICENSE,sha256=1jqWIkbnMxDfs_i0SXP5qbV6PHjBr1g8506oW7uPjfg,11347
|
67
|
+
arpakitlib-1.5.37.dist-info/METADATA,sha256=mYUQJqE6gMICk_29Mn7VcJJcqQtkQMm-DhGce-Dd4YU,2330
|
68
|
+
arpakitlib-1.5.37.dist-info/NOTICE,sha256=wHwmiq3wExfFfgMsE5U5TOBP9_l72ocIG82KurEels0,43
|
69
|
+
arpakitlib-1.5.37.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
70
|
+
arpakitlib-1.5.37.dist-info/RECORD,,
|
arpakitlib/ar_story_log_util.py
DELETED
@@ -1,40 +0,0 @@
|
|
1
|
-
import logging
|
2
|
-
from typing import Any
|
3
|
-
|
4
|
-
from sqlalchemy import TEXT
|
5
|
-
from sqlalchemy.dialects.postgresql import JSONB
|
6
|
-
from sqlalchemy.orm import Mapped, mapped_column
|
7
|
-
|
8
|
-
from arpakitlib.ar_enumeration import EasyEnumeration
|
9
|
-
from arpakitlib.ar_fastapi_util import BaseAPISimpleSO
|
10
|
-
from arpakitlib.ar_sqlalchemy_model_util import SimpleDBM
|
11
|
-
|
12
|
-
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
13
|
-
_logger = logging.getLogger(__name__)
|
14
|
-
|
15
|
-
|
16
|
-
class StoryLogDBM(SimpleDBM):
|
17
|
-
__tablename__ = "story_log"
|
18
|
-
|
19
|
-
class Levels(EasyEnumeration):
|
20
|
-
info = "info"
|
21
|
-
warning = "warning"
|
22
|
-
error = "error"
|
23
|
-
|
24
|
-
level: Mapped[str] = mapped_column(
|
25
|
-
TEXT, insert_default=Levels.info, server_default=Levels.info, index=True, nullable=False
|
26
|
-
)
|
27
|
-
title: Mapped[str | None] = mapped_column(TEXT, index=True, default=None, nullable=True)
|
28
|
-
data: Mapped[dict[str, Any]] = mapped_column(
|
29
|
-
JSONB, insert_default={}, server_default="{}", index=True, nullable=False
|
30
|
-
)
|
31
|
-
|
32
|
-
|
33
|
-
class StoryLogSO(BaseAPISimpleSO):
|
34
|
-
level: str
|
35
|
-
title: str | None
|
36
|
-
data: dict[str, Any]
|
37
|
-
|
38
|
-
|
39
|
-
def import_ar_story_util():
|
40
|
-
_logger.info("import_ar_operation_execution_util")
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|