arpakitlib 1.7.250__py3-none-any.whl → 1.7.252__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arpakitlib/_arpakit_project_template/alembic/README +1 -0
- arpakitlib/_arpakit_project_template/alembic/env.py +83 -0
- arpakitlib/_arpakit_project_template/alembic/script.py.mako +26 -0
- arpakitlib/_arpakit_project_template/alembic.ini +119 -0
- arpakitlib/_arpakit_project_template/example.env +5 -14
- arpakitlib/_arpakit_project_template/manage/docker_run_postgres.sh +4 -3
- arpakitlib/_arpakit_project_template/manage/docker_start_postgres.sh +2 -1
- arpakitlib/_arpakit_project_template/manage/docker_stop_postgres.sh +2 -1
- arpakitlib/_arpakit_project_template/manage/git_set_arpakit_company_origin.sh +4 -2
- arpakitlib/_arpakit_project_template/manage/git_set_arpakit_origin.sh +4 -2
- arpakitlib/_arpakit_project_template/src/additional_model/additional_model.py +0 -7
- arpakitlib/_arpakit_project_template/src/api/auth.py +52 -0
- arpakitlib/_arpakit_project_template/src/api/create_api_app.py +21 -14
- arpakitlib/_arpakit_project_template/src/api/create_handle_exception_.py +13 -13
- arpakitlib/_arpakit_project_template/src/api/event.py +24 -2
- arpakitlib/_arpakit_project_template/src/api/transmitted_api_data.py +3 -11
- arpakitlib/_arpakit_project_template/src/core/settings.py +3 -111
- arpakitlib/_arpakit_project_template/src/db/util.py +1 -3
- arpakitlib/_arpakit_project_template/src/just_script/__init__.py +0 -0
- arpakitlib/_arpakit_project_template/src/just_script/example.py +16 -0
- arpakitlib/ar_arpakit_project_template_util.py +8 -18
- arpakitlib/ar_arpakit_schedule_uust_api_client_util.py +5 -4
- arpakitlib/ar_arpakitlib_cli_util.py +10 -22
- arpakitlib/ar_class_util.py +0 -1
- arpakitlib/ar_cryptomus_api_client_util.py +21 -0
- arpakitlib/ar_fastapi_util.py +101 -70
- arpakitlib/ar_schedule_uust_api_client_util.py +24 -24
- arpakitlib/ar_settings_util.py +166 -14
- arpakitlib/ar_sqlalchemy_model_util.py +1 -1
- arpakitlib/ar_steam_payment_api_client_util.py +21 -0
- arpakitlib/ar_wata_api_client.py +21 -0
- {arpakitlib-1.7.250.dist-info → arpakitlib-1.7.252.dist-info}/METADATA +1 -1
- {arpakitlib-1.7.250.dist-info → arpakitlib-1.7.252.dist-info}/RECORD +37 -28
- /arpakitlib/_arpakit_project_template/src/core/{_check_settings.py → _show_settings.py} +0 -0
- {arpakitlib-1.7.250.dist-info → arpakitlib-1.7.252.dist-info}/LICENSE +0 -0
- {arpakitlib-1.7.250.dist-info → arpakitlib-1.7.252.dist-info}/WHEEL +0 -0
- {arpakitlib-1.7.250.dist-info → arpakitlib-1.7.252.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1 @@
|
|
1
|
+
Generic single-database configuration.
|
@@ -0,0 +1,83 @@
|
|
1
|
+
from logging.config import fileConfig
|
2
|
+
|
3
|
+
from alembic import context
|
4
|
+
from sqlalchemy import engine_from_config
|
5
|
+
from sqlalchemy import pool
|
6
|
+
|
7
|
+
from src.core.settings import get_cached_settings
|
8
|
+
from src.db.util import get_base_dbm
|
9
|
+
|
10
|
+
# this is the Alembic Config object, which provides
|
11
|
+
# access to the values within the .ini file in use.
|
12
|
+
config = context.config
|
13
|
+
|
14
|
+
config.set_section_option("alembic", "sqlalchemy.url", get_cached_settings().sync_sql_db_url)
|
15
|
+
|
16
|
+
# Interpret the config file for Python logging.
|
17
|
+
# This line sets up loggers basically.
|
18
|
+
if config.config_file_name is not None:
|
19
|
+
fileConfig(config.config_file_name)
|
20
|
+
|
21
|
+
# add your model's MetaData object here
|
22
|
+
# for 'autogenerate' support
|
23
|
+
# from myapp import mymodel
|
24
|
+
# target_metadata = mymodel.Base.metadata
|
25
|
+
target_metadata = get_base_dbm().metadata
|
26
|
+
|
27
|
+
|
28
|
+
# other values from the config, defined by the needs of env.py,
|
29
|
+
# can be acquired:
|
30
|
+
# my_important_option = config.get_main_option("my_important_option")
|
31
|
+
# ... etc.
|
32
|
+
|
33
|
+
|
34
|
+
def run_migrations_offline() -> None:
|
35
|
+
"""Run migrations in 'offline' mode.
|
36
|
+
|
37
|
+
This configures the context with just a URL
|
38
|
+
and not an Engine, though an Engine is acceptable
|
39
|
+
here as well. By skipping the Engine creation
|
40
|
+
we don't even need a DBAPI to be available.
|
41
|
+
|
42
|
+
Calls to context.execute() here emit the given string to the
|
43
|
+
script output.
|
44
|
+
|
45
|
+
"""
|
46
|
+
url = config.get_main_option("sqlalchemy.url")
|
47
|
+
context.configure(
|
48
|
+
url=url,
|
49
|
+
target_metadata=target_metadata,
|
50
|
+
literal_binds=True,
|
51
|
+
dialect_opts={"paramstyle": "named"},
|
52
|
+
)
|
53
|
+
|
54
|
+
with context.begin_transaction():
|
55
|
+
context.run_migrations()
|
56
|
+
|
57
|
+
|
58
|
+
def run_migrations_online() -> None:
|
59
|
+
"""Run migrations in 'online' mode.
|
60
|
+
|
61
|
+
In this scenario we need to create an Engine
|
62
|
+
and associate a connection with the context.
|
63
|
+
|
64
|
+
"""
|
65
|
+
connectable = engine_from_config(
|
66
|
+
config.get_section(config.config_ini_section, {}),
|
67
|
+
prefix="sqlalchemy.",
|
68
|
+
poolclass=pool.NullPool,
|
69
|
+
)
|
70
|
+
|
71
|
+
with connectable.connect() as connection:
|
72
|
+
context.configure(
|
73
|
+
connection=connection, target_metadata=target_metadata
|
74
|
+
)
|
75
|
+
|
76
|
+
with context.begin_transaction():
|
77
|
+
context.run_migrations()
|
78
|
+
|
79
|
+
|
80
|
+
if context.is_offline_mode():
|
81
|
+
run_migrations_offline()
|
82
|
+
else:
|
83
|
+
run_migrations_online()
|
@@ -0,0 +1,26 @@
|
|
1
|
+
"""${message}
|
2
|
+
|
3
|
+
Revision ID: ${up_revision}
|
4
|
+
Revises: ${down_revision | comma,n}
|
5
|
+
Create Date: ${create_date}
|
6
|
+
|
7
|
+
"""
|
8
|
+
from typing import Sequence, Union
|
9
|
+
|
10
|
+
from alembic import op
|
11
|
+
import sqlalchemy as sa
|
12
|
+
${imports if imports else ""}
|
13
|
+
|
14
|
+
# revision identifiers, used by Alembic.
|
15
|
+
revision: str = ${repr(up_revision)}
|
16
|
+
down_revision: Union[str, None] = ${repr(down_revision)}
|
17
|
+
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
18
|
+
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
19
|
+
|
20
|
+
|
21
|
+
def upgrade() -> None:
|
22
|
+
${upgrades if upgrades else "pass"}
|
23
|
+
|
24
|
+
|
25
|
+
def downgrade() -> None:
|
26
|
+
${downgrades if downgrades else "pass"}
|
@@ -0,0 +1,119 @@
|
|
1
|
+
# A generic, single database configuration.
|
2
|
+
|
3
|
+
[alembic]
|
4
|
+
# path to migration scripts
|
5
|
+
# Use forward slashes (/) also on windows to provide an os agnostic path
|
6
|
+
script_location = alembic
|
7
|
+
|
8
|
+
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
9
|
+
# Uncomment the line below if you want the files to be prepended with date and time
|
10
|
+
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
11
|
+
# for all available tokens
|
12
|
+
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
13
|
+
|
14
|
+
# sys.path path, will be prepended to sys.path if present.
|
15
|
+
# defaults to the current working directory.
|
16
|
+
prepend_sys_path = .
|
17
|
+
|
18
|
+
# timezone to use when rendering the date within the migration file
|
19
|
+
# as well as the filename.
|
20
|
+
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
21
|
+
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
22
|
+
# string value is passed to ZoneInfo()
|
23
|
+
# leave blank for localtime
|
24
|
+
# timezone =
|
25
|
+
|
26
|
+
# max length of characters to apply to the "slug" field
|
27
|
+
# truncate_slug_length = 40
|
28
|
+
|
29
|
+
# set to 'true' to run the environment during
|
30
|
+
# the 'revision' command, regardless of autogenerate
|
31
|
+
# revision_environment = false
|
32
|
+
|
33
|
+
# set to 'true' to allow .pyc and .pyo files without
|
34
|
+
# a source .py file to be detected as revisions in the
|
35
|
+
# versions/ directory
|
36
|
+
# sourceless = false
|
37
|
+
|
38
|
+
# version location specification; This defaults
|
39
|
+
# to alembic/versions. When using multiple version
|
40
|
+
# directories, initial revisions must be specified with --version-path.
|
41
|
+
# The path separator used here should be the separator specified by "version_path_separator" below.
|
42
|
+
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
43
|
+
|
44
|
+
# version path separator; As mentioned above, this is the character used to split
|
45
|
+
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
46
|
+
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
47
|
+
# Valid values for version_path_separator are:
|
48
|
+
#
|
49
|
+
# version_path_separator = :
|
50
|
+
# version_path_separator = ;
|
51
|
+
# version_path_separator = space
|
52
|
+
# version_path_separator = newline
|
53
|
+
#
|
54
|
+
# Use os.pathsep. Default configuration used for new projects.
|
55
|
+
version_path_separator = os
|
56
|
+
|
57
|
+
# set to 'true' to search source files recursively
|
58
|
+
# in each "version_locations" directory
|
59
|
+
# new in Alembic version 1.10
|
60
|
+
# recursive_version_locations = false
|
61
|
+
|
62
|
+
# the output encoding used when revision files
|
63
|
+
# are written from script.py.mako
|
64
|
+
# output_encoding = utf-8
|
65
|
+
|
66
|
+
sqlalchemy.url = driver://user:pass@localhost/dbname
|
67
|
+
|
68
|
+
|
69
|
+
[post_write_hooks]
|
70
|
+
# post_write_hooks defines scripts or Python functions that are run
|
71
|
+
# on newly generated revision scripts. See the documentation for further
|
72
|
+
# detail and examples
|
73
|
+
|
74
|
+
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
75
|
+
# hooks = black
|
76
|
+
# black.type = console_scripts
|
77
|
+
# black.entrypoint = black
|
78
|
+
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
79
|
+
|
80
|
+
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
81
|
+
# hooks = ruff
|
82
|
+
# ruff.type = exec
|
83
|
+
# ruff.executable = %(here)s/.venv/bin/ruff
|
84
|
+
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
85
|
+
|
86
|
+
# Logging configuration
|
87
|
+
[loggers]
|
88
|
+
keys = root,sqlalchemy,alembic
|
89
|
+
|
90
|
+
[handlers]
|
91
|
+
keys = console
|
92
|
+
|
93
|
+
[formatters]
|
94
|
+
keys = generic
|
95
|
+
|
96
|
+
[logger_root]
|
97
|
+
level = WARNING
|
98
|
+
handlers = console
|
99
|
+
qualname =
|
100
|
+
|
101
|
+
[logger_sqlalchemy]
|
102
|
+
level = WARNING
|
103
|
+
handlers =
|
104
|
+
qualname = sqlalchemy.engine
|
105
|
+
|
106
|
+
[logger_alembic]
|
107
|
+
level = INFO
|
108
|
+
handlers =
|
109
|
+
qualname = alembic
|
110
|
+
|
111
|
+
[handler_console]
|
112
|
+
class = StreamHandler
|
113
|
+
args = (sys.stderr,)
|
114
|
+
level = NOTSET
|
115
|
+
formatter = generic
|
116
|
+
|
117
|
+
[formatter_generic]
|
118
|
+
format = %(levelname)-5.5s [%(name)s] %(message)s
|
119
|
+
datefmt = %H:%M:%S
|
@@ -4,29 +4,20 @@
|
|
4
4
|
# sql_db_password=
|
5
5
|
# sql_db_port=
|
6
6
|
# sql_db_database=
|
7
|
-
#
|
7
|
+
# sync_sql_db_url=
|
8
8
|
# async_sql_db_url=
|
9
9
|
# sql_db_echo=
|
10
|
-
# api_init_sql_db_at_start=
|
11
|
-
# api_title=
|
12
|
-
# api_description=
|
13
|
-
# api_logging_func_before_response=
|
14
|
-
# api_story_log_func_before_response=
|
15
|
-
# api_start_operation_executor_worker=
|
16
|
-
# api_start_scheduled_operation_creator_worker=
|
17
10
|
# api_port=
|
11
|
+
# api_init_sql_db_at_start=
|
12
|
+
# api_logging__api_func_before_in_handle_exception=
|
13
|
+
# api_story_log__api_func_before_in_handle_exception=
|
18
14
|
# api_correct_api_key=
|
19
15
|
# api_correct_token=
|
20
16
|
# api_enable_admin1=
|
21
|
-
#
|
17
|
+
# admin1_secret_key=
|
22
18
|
# var_dirpath=
|
23
|
-
# log_filename=
|
24
19
|
# log_filepath=
|
25
|
-
# cache_dirname=
|
26
20
|
# cache_dirpath=
|
27
|
-
# media_dirname=
|
28
21
|
# media_dirpath=
|
29
|
-
# dump_dirname=
|
30
22
|
# dump_dirpath=
|
31
23
|
# local_timezone=
|
32
|
-
# admin1_secret_key=
|
@@ -1,4 +1,5 @@
|
|
1
1
|
cd ..
|
2
|
-
|
3
|
-
docker
|
4
|
-
docker
|
2
|
+
source .env
|
3
|
+
docker rm ${PROJECT_NAME}_postgres
|
4
|
+
docker run --name ${PROJECT_NAME}_postgres -d -p ${SQL_DB_PORT}:5432 -e POSTGRES_USER=${SQL_DB_USER} -e POSTGRES_PASSWORD=${SQL_DB_PASSWORD} -e POSTGRES_DB=${SQL_DB_DATABASE} postgres:16 -c max_connections=100
|
5
|
+
docker start ${PROJECT_NAME}_postgres
|
@@ -1,7 +1,9 @@
|
|
1
1
|
cd ..
|
2
2
|
|
3
|
+
source .env
|
4
|
+
|
3
5
|
git remote remove arpakit_company_github_1
|
4
|
-
git remote add arpakit_company_github_1 git@github.com:ARPAKIT-Company
|
6
|
+
git remote add arpakit_company_github_1 git@github.com:ARPAKIT-Company/${PROJECT_NAME}.git
|
5
7
|
|
6
8
|
git remote remove arpakit_company_gitlab_1
|
7
|
-
git remote add arpakit_company_gitlab_1 git@gitlab.com:ARPAKIT-Company
|
9
|
+
git remote add arpakit_company_gitlab_1 git@gitlab.com:ARPAKIT-Company/${PROJECT_NAME}.git
|
@@ -1,7 +1,9 @@
|
|
1
1
|
cd ..
|
2
2
|
|
3
|
+
source .env
|
4
|
+
|
3
5
|
git remote remove arpakit_github_1
|
4
|
-
git remote add arpakit_github_1 git@github.com:arpakit
|
6
|
+
git remote add arpakit_github_1 git@github.com:arpakit/${PROJECT_NAME}.git
|
5
7
|
|
6
8
|
git remote remove arpakit_gitlab_1
|
7
|
-
git remote add arpakit_gitlab_1 git@gitlab.com:arpakit
|
9
|
+
git remote add arpakit_gitlab_1 git@gitlab.com:arpakit/${PROJECT_NAME}.git
|
@@ -1 +1,53 @@
|
|
1
1
|
# ...
|
2
|
+
from typing import Callable
|
3
|
+
|
4
|
+
import starlette.requests
|
5
|
+
|
6
|
+
from arpakitlib.ar_fastapi_util import BaseAPIAuthData
|
7
|
+
from src.api.transmitted_api_data import TransmittedAPIData
|
8
|
+
|
9
|
+
|
10
|
+
def correct_api_key_from_settings__validate_api_key_func(
|
11
|
+
*args, **kwargs
|
12
|
+
) -> Callable:
|
13
|
+
async def func(
|
14
|
+
*,
|
15
|
+
api_key_string: str | None,
|
16
|
+
token_string: str | None,
|
17
|
+
base_api_auth_data: BaseAPIAuthData,
|
18
|
+
transmitted_api_data: TransmittedAPIData,
|
19
|
+
request: starlette.requests.Request,
|
20
|
+
**kwargs_
|
21
|
+
):
|
22
|
+
if transmitted_api_data.settings.api_correct_api_key is None:
|
23
|
+
return True
|
24
|
+
if not api_key_string:
|
25
|
+
return False
|
26
|
+
if api_key_string.strip() != transmitted_api_data.settings.api_correct_api_key.strip():
|
27
|
+
return False
|
28
|
+
return True
|
29
|
+
|
30
|
+
return func
|
31
|
+
|
32
|
+
|
33
|
+
def correct_token_from_settings__validate_api_key_func(
|
34
|
+
*args, **kwargs
|
35
|
+
) -> Callable:
|
36
|
+
async def func(
|
37
|
+
*,
|
38
|
+
api_key_string: str | None,
|
39
|
+
token_string: str | None,
|
40
|
+
base_api_auth_data: BaseAPIAuthData,
|
41
|
+
transmitted_api_data: TransmittedAPIData,
|
42
|
+
request: starlette.requests.Request,
|
43
|
+
**kwargs_
|
44
|
+
):
|
45
|
+
if transmitted_api_data.settings.api_correct_token is None:
|
46
|
+
return True
|
47
|
+
if not token_string:
|
48
|
+
return False
|
49
|
+
if token_string.strip() != transmitted_api_data.settings.api_correct_token.strip():
|
50
|
+
return False
|
51
|
+
return True
|
52
|
+
|
53
|
+
return func
|
@@ -2,7 +2,7 @@ from fastapi import FastAPI
|
|
2
2
|
|
3
3
|
from arpakitlib.ar_fastapi_util import create_fastapi_app
|
4
4
|
from src.api.create_handle_exception_ import create_handle_exception_
|
5
|
-
from src.api.event import
|
5
|
+
from src.api.event import get_startup_api_events, get_shutdown_api_events
|
6
6
|
from src.api.router.main_router import main_api_router
|
7
7
|
from src.api.transmitted_api_data import TransmittedAPIData
|
8
8
|
from src.core.const import ProjectPaths
|
@@ -19,28 +19,35 @@ def create_api_app() -> FastAPI:
|
|
19
19
|
|
20
20
|
sqlalchemy_db = get_cached_sqlalchemy_db() if settings.sql_db_url is not None else None
|
21
21
|
|
22
|
+
media_file_storage_in_dir = (
|
23
|
+
get_cached_media_file_storage_in_dir() if settings.media_dirpath is not None else None
|
24
|
+
)
|
25
|
+
|
26
|
+
cache_file_storage_in_dir = (
|
27
|
+
get_cached_cache_file_storage_in_dir() if settings.cache_dirpath is not None else None
|
28
|
+
)
|
29
|
+
|
30
|
+
dump_file_storage_in_dir = (
|
31
|
+
get_cached_dump_file_storage_in_dir() if settings.dump_dirpath is not None else None
|
32
|
+
)
|
33
|
+
|
22
34
|
transmitted_api_data = TransmittedAPIData(
|
23
35
|
settings=settings,
|
24
36
|
sqlalchemy_db=sqlalchemy_db,
|
25
|
-
media_file_storage_in_dir=
|
26
|
-
cache_file_storage_in_dir=
|
27
|
-
dump_file_storage_in_dir=
|
37
|
+
media_file_storage_in_dir=media_file_storage_in_dir,
|
38
|
+
cache_file_storage_in_dir=cache_file_storage_in_dir,
|
39
|
+
dump_file_storage_in_dir=dump_file_storage_in_dir
|
28
40
|
)
|
29
41
|
|
30
|
-
|
31
|
-
|
32
|
-
startup_api_events.append(StartupAPIEvent(transmitted_api_data=transmitted_api_data))
|
33
|
-
|
34
|
-
shutdown_api_events = []
|
42
|
+
handle_exception_ = create_handle_exception_(transmitted_api_data=transmitted_api_data)
|
35
43
|
|
36
|
-
|
44
|
+
startup_api_events = get_startup_api_events(transmitted_api_data=transmitted_api_data)
|
37
45
|
|
38
|
-
|
46
|
+
shutdown_api_events = get_shutdown_api_events(transmitted_api_data=transmitted_api_data)
|
39
47
|
|
40
48
|
api_app = create_fastapi_app(
|
41
|
-
title=settings.
|
42
|
-
description=settings.
|
43
|
-
log_filepath=settings.log_filepath,
|
49
|
+
title=settings.project_name.strip(),
|
50
|
+
description=settings.project_name.strip(),
|
44
51
|
handle_exception_=handle_exception_,
|
45
52
|
startup_api_events=startup_api_events,
|
46
53
|
shutdown_api_events=shutdown_api_events,
|
@@ -2,18 +2,18 @@ import fastapi.exceptions
|
|
2
2
|
import starlette.exceptions
|
3
3
|
import starlette.status
|
4
4
|
|
5
|
-
from arpakitlib.ar_fastapi_util import create_handle_exception,
|
6
|
-
|
5
|
+
from arpakitlib.ar_fastapi_util import create_handle_exception, story_log__api_func_before_in_handle_exception, \
|
6
|
+
logging__api_func_before_in_handle_exception
|
7
7
|
from src.api.const import APIErrorCodes
|
8
8
|
from src.api.transmitted_api_data import TransmittedAPIData
|
9
9
|
|
10
10
|
|
11
|
-
def create_handle_exception_(*, transmitted_api_data: TransmittedAPIData):
|
12
|
-
|
11
|
+
def create_handle_exception_(*, transmitted_api_data: TransmittedAPIData, **kwargs):
|
12
|
+
funcs_before = []
|
13
13
|
|
14
|
-
if transmitted_api_data.settings.
|
15
|
-
|
16
|
-
|
14
|
+
if transmitted_api_data.settings.api_logging__api_func_before_in_handle_exception:
|
15
|
+
funcs_before.append(
|
16
|
+
logging__api_func_before_in_handle_exception(
|
17
17
|
ignore_api_error_codes=[
|
18
18
|
APIErrorCodes.cannot_authorize,
|
19
19
|
APIErrorCodes.error_in_request,
|
@@ -31,9 +31,9 @@ def create_handle_exception_(*, transmitted_api_data: TransmittedAPIData):
|
|
31
31
|
)
|
32
32
|
)
|
33
33
|
|
34
|
-
if transmitted_api_data.settings.
|
35
|
-
|
36
|
-
|
34
|
+
if transmitted_api_data.settings.api_story_log__api_func_before_in_handle_exception:
|
35
|
+
funcs_before.append(
|
36
|
+
story_log__api_func_before_in_handle_exception(
|
37
37
|
sqlalchemy_db=transmitted_api_data.sqlalchemy_db,
|
38
38
|
ignore_api_error_codes=[
|
39
39
|
APIErrorCodes.cannot_authorize,
|
@@ -51,9 +51,9 @@ def create_handle_exception_(*, transmitted_api_data: TransmittedAPIData):
|
|
51
51
|
)
|
52
52
|
)
|
53
53
|
|
54
|
-
|
54
|
+
async_funcs_after = []
|
55
55
|
|
56
56
|
return create_handle_exception(
|
57
|
-
|
58
|
-
|
57
|
+
funcs_before=funcs_before,
|
58
|
+
async_funcs_after=async_funcs_after
|
59
59
|
)
|
@@ -8,6 +8,9 @@ from src.operation_execution.operation_executor import OperationExecutor
|
|
8
8
|
from src.operation_execution.scheduled_operations import SCHEDULED_OPERATIONS
|
9
9
|
|
10
10
|
|
11
|
+
# STARTUP API EVENTS
|
12
|
+
|
13
|
+
|
11
14
|
class StartupAPIEvent(BaseStartupAPIEvent):
|
12
15
|
def __init__(self, transmitted_api_data: TransmittedAPIData, **kwargs):
|
13
16
|
super().__init__(**kwargs)
|
@@ -29,7 +32,7 @@ class StartupAPIEvent(BaseStartupAPIEvent):
|
|
29
32
|
raise_for_type(self.transmitted_api_data.sqlalchemy_db, SQLAlchemyDB)
|
30
33
|
self.transmitted_api_data.sqlalchemy_db.init()
|
31
34
|
|
32
|
-
if self.transmitted_api_data.settings.api_start_operation_executor_worker:
|
35
|
+
if self.transmitted_api_data.settings.api_start_operation_executor_worker: # TODO
|
33
36
|
raise_for_type(self.transmitted_api_data.sqlalchemy_db, SQLAlchemyDB)
|
34
37
|
_ = safe_run_worker_in_background(
|
35
38
|
worker=OperationExecutorWorker(
|
@@ -40,7 +43,7 @@ class StartupAPIEvent(BaseStartupAPIEvent):
|
|
40
43
|
mode=SafeRunInBackgroundModes.thread
|
41
44
|
)
|
42
45
|
|
43
|
-
if self.transmitted_api_data.settings.api_start_scheduled_operation_creator_worker:
|
46
|
+
if self.transmitted_api_data.settings.api_start_scheduled_operation_creator_worker: # TODO
|
44
47
|
raise_for_type(self.transmitted_api_data.sqlalchemy_db, SQLAlchemyDB)
|
45
48
|
_ = safe_run_worker_in_background(
|
46
49
|
worker=ScheduledOperationCreatorWorker(
|
@@ -53,6 +56,17 @@ class StartupAPIEvent(BaseStartupAPIEvent):
|
|
53
56
|
self._logger.info("finish")
|
54
57
|
|
55
58
|
|
59
|
+
def get_startup_api_events(
|
60
|
+
*, transmitted_api_data: TransmittedAPIData, **kwargs
|
61
|
+
) -> list[BaseStartupAPIEvent]:
|
62
|
+
res = []
|
63
|
+
res.append(StartupAPIEvent(transmitted_api_data=transmitted_api_data))
|
64
|
+
return res
|
65
|
+
|
66
|
+
|
67
|
+
# SHUTDOWN API EVENTS
|
68
|
+
|
69
|
+
|
56
70
|
class ShutdownAPIEvent(BaseShutdownAPIEvent):
|
57
71
|
def __init__(self, transmitted_api_data: TransmittedAPIData, **kwargs):
|
58
72
|
super().__init__(**kwargs)
|
@@ -61,3 +75,11 @@ class ShutdownAPIEvent(BaseShutdownAPIEvent):
|
|
61
75
|
async def async_on_shutdown(self, *args, **kwargs):
|
62
76
|
self._logger.info("start")
|
63
77
|
self._logger.info("finish")
|
78
|
+
|
79
|
+
|
80
|
+
def get_shutdown_api_events(
|
81
|
+
*, transmitted_api_data: TransmittedAPIData, **kwargs
|
82
|
+
) -> list[BaseShutdownAPIEvent]:
|
83
|
+
res = []
|
84
|
+
res.append(ShutdownAPIEvent(transmitted_api_data=transmitted_api_data))
|
85
|
+
return res
|
@@ -1,15 +1,7 @@
|
|
1
|
-
from arpakitlib.ar_fastapi_util import
|
2
|
-
from arpakitlib.ar_file_storage_in_dir_util import FileStorageInDir
|
3
|
-
from arpakitlib.ar_sqlalchemy_util import SQLAlchemyDB
|
1
|
+
from arpakitlib.ar_fastapi_util import AdvancedTransmittedAPIData
|
4
2
|
|
5
3
|
from src.core.settings import Settings
|
6
4
|
|
7
5
|
|
8
|
-
class TransmittedAPIData(
|
9
|
-
settings: Settings
|
10
|
-
sqlalchemy_db: SQLAlchemyDB | None = None
|
11
|
-
media_file_storage_in_dir: FileStorageInDir | None = None
|
12
|
-
cache_file_storage_in_dir: FileStorageInDir | None = None
|
13
|
-
dump_file_storage_in_dir: FileStorageInDir | None = None
|
14
|
-
|
15
|
-
# ...
|
6
|
+
class TransmittedAPIData(AdvancedTransmittedAPIData):
|
7
|
+
settings: Settings | None = None
|