arpakitlib 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arpakitlib might be problematic. Click here for more details.

Files changed (68) hide show
  1. arpakitlib/AUTHOR.md +6 -0
  2. arpakitlib/LICENSE +201 -0
  3. arpakitlib/NOTICE +2 -0
  4. arpakitlib/README.md +6 -0
  5. arpakitlib/__init__.py +0 -0
  6. arpakitlib/ar_additional_model_util.py +8 -0
  7. arpakitlib/ar_aiogram_util.py +363 -0
  8. arpakitlib/ar_arpakit_lib_module_util.py +150 -0
  9. arpakitlib/ar_arpakit_schedule_uust_api_client.py +527 -0
  10. arpakitlib/ar_arpakitlib_info.py +11 -0
  11. arpakitlib/ar_base64_util.py +30 -0
  12. arpakitlib/ar_base_worker.py +77 -0
  13. arpakitlib/ar_cache_file.py +124 -0
  14. arpakitlib/ar_datetime_util.py +38 -0
  15. arpakitlib/ar_dict_util.py +24 -0
  16. arpakitlib/ar_dream_ai_api_client.py +120 -0
  17. arpakitlib/ar_encrypt_and_decrypt_util.py +23 -0
  18. arpakitlib/ar_enumeration.py +76 -0
  19. arpakitlib/ar_fastapi_static/redoc/redoc.standalone.js +1826 -0
  20. arpakitlib/ar_fastapi_static/swagger-ui/favicon-16x16.png +0 -0
  21. arpakitlib/ar_fastapi_static/swagger-ui/favicon-32x32.png +0 -0
  22. arpakitlib/ar_fastapi_static/swagger-ui/index.css +16 -0
  23. arpakitlib/ar_fastapi_static/swagger-ui/index.html +19 -0
  24. arpakitlib/ar_fastapi_static/swagger-ui/oauth2-redirect.html +79 -0
  25. arpakitlib/ar_fastapi_static/swagger-ui/swagger-initializer.js +20 -0
  26. arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-bundle.js +2 -0
  27. arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-bundle.js.map +1 -0
  28. arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-es-bundle-core.js +3 -0
  29. arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-es-bundle-core.js.map +1 -0
  30. arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-es-bundle.js +2 -0
  31. arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-es-bundle.js.map +1 -0
  32. arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-standalone-preset.js +2 -0
  33. arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-standalone-preset.js.map +1 -0
  34. arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.css +3 -0
  35. arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.css.map +1 -0
  36. arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.js +2 -0
  37. arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.js.map +1 -0
  38. arpakitlib/ar_fastapi_util.py +294 -0
  39. arpakitlib/ar_file_storage_in_dir.py +127 -0
  40. arpakitlib/ar_generate_env_example.py +16 -0
  41. arpakitlib/ar_hash_util.py +19 -0
  42. arpakitlib/ar_http_request_util.py +75 -0
  43. arpakitlib/ar_ip_util.py +50 -0
  44. arpakitlib/ar_json_db.py +231 -0
  45. arpakitlib/ar_json_util.py +28 -0
  46. arpakitlib/ar_jwt_util.py +38 -0
  47. arpakitlib/ar_list_of_dicts_to_xlsx.py +32 -0
  48. arpakitlib/ar_list_util.py +26 -0
  49. arpakitlib/ar_logging_util.py +45 -0
  50. arpakitlib/ar_mongodb_util.py +143 -0
  51. arpakitlib/ar_need_type_util.py +58 -0
  52. arpakitlib/ar_openai_util.py +59 -0
  53. arpakitlib/ar_parse_command.py +102 -0
  54. arpakitlib/ar_postgresql_util.py +45 -0
  55. arpakitlib/ar_run_cmd.py +48 -0
  56. arpakitlib/ar_safe_sleep.py +23 -0
  57. arpakitlib/ar_schedule_uust_api_client.py +216 -0
  58. arpakitlib/ar_sqlalchemy_util.py +124 -0
  59. arpakitlib/ar_ssh_runner.py +260 -0
  60. arpakitlib/ar_str_util.py +79 -0
  61. arpakitlib/ar_type_util.py +82 -0
  62. arpakitlib/ar_yookassa_api_client.py +224 -0
  63. arpakitlib/ar_zabbix_util.py +190 -0
  64. arpakitlib-1.4.0.dist-info/LICENSE +201 -0
  65. arpakitlib-1.4.0.dist-info/METADATA +327 -0
  66. arpakitlib-1.4.0.dist-info/NOTICE +2 -0
  67. arpakitlib-1.4.0.dist-info/RECORD +68 -0
  68. arpakitlib-1.4.0.dist-info/WHEEL +4 -0
@@ -0,0 +1,102 @@
1
+ import shlex
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel
5
+
6
+ _ARPAKIT_LIB_MODULE_VERSION = "3.0"
7
+
8
+
9
+ class BadCommandFormat(Exception):
10
+ pass
11
+
12
+
13
+ class ParsedCommand(BaseModel):
14
+ command: str
15
+ key_to_value: dict[str, Optional[str]] = {}
16
+ values_without_key: list[str] = []
17
+
18
+ @property
19
+ def keys(self) -> list[str]:
20
+ return [k for k, v in self.key_to_value.items() if v is not None]
21
+
22
+ @property
23
+ def flags(self) -> list[str]:
24
+ return [k for k, v in self.key_to_value.items() if v is None]
25
+
26
+ @property
27
+ def values(self) -> list[str]:
28
+ return [self.key_to_value[k] for k in self.keys]
29
+
30
+ def get_value_by_key(self, key: str) -> Optional[str]:
31
+ return self.key_to_value.get(key)
32
+
33
+ def key_exists(self, key: str) -> bool:
34
+ return key in self.key_to_value.keys()
35
+
36
+ def keys_exists(self, keys: list[str]) -> bool:
37
+ for key in keys:
38
+ if key in self.keys:
39
+ return True
40
+ return False
41
+
42
+ def has_flag(self, flag: str) -> bool:
43
+ return flag in self.flags
44
+
45
+ def get_value_by_index(self, index: int) -> Optional[str]:
46
+ if index >= len(self.values_without_key):
47
+ return None
48
+ return self.values_without_key[index]
49
+
50
+
51
+ def parse_command(text: str) -> ParsedCommand:
52
+ text = " ".join([text_.strip() for text_ in text.split(" ") if text_.strip()]).strip()
53
+
54
+ parts = shlex.split(text)
55
+ if not parts:
56
+ raise BadCommandFormat("not parts")
57
+ if len(parts[0]) == 1:
58
+ raise BadCommandFormat("len(parts[0]) == 1")
59
+
60
+ res = ParsedCommand(command=parts[0])
61
+
62
+ last_key: Optional[str] = None
63
+ for part in parts[1:]:
64
+ part = part.strip()
65
+
66
+ if not part:
67
+ raise BadCommandFormat("not part")
68
+ if part == "-" or part == "--":
69
+ raise BadCommandFormat("part == '-' or part == '--'")
70
+
71
+ if part.startswith("-") or part.startswith("--"): # if it is key
72
+ if part.startswith("-"):
73
+ part = part[1:]
74
+ if part.startswith("-"):
75
+ part = part[1:]
76
+ if part.startswith("-"):
77
+ raise BadCommandFormat("a lots of -")
78
+
79
+ if part in res.key_to_value:
80
+ raise BadCommandFormat(f"{part} in {res.key_to_value}")
81
+
82
+ res.key_to_value[part] = None
83
+ last_key = part
84
+
85
+ continue
86
+
87
+ if last_key is not None: # if it is value
88
+ res.key_to_value[last_key] = part
89
+ last_key = None
90
+ continue
91
+
92
+ res.values_without_key.append(part) # if it is values_without_key
93
+
94
+ return res
95
+
96
+
97
+ def __example():
98
+ pass
99
+
100
+
101
+ if __name__ == '__main__':
102
+ __example()
@@ -0,0 +1,45 @@
1
+ from arpakitlib.ar_logging_util import setup_normal_logging
2
+ from arpakitlib.ar_run_cmd import run_cmd
3
+ from arpakitlib.ar_type_util import raise_for_type
4
+
5
+ _ARPAKIT_LIB_MODULE_VERSION = "3.0"
6
+
7
+
8
+ def make_postgresql_db_dump(
9
+ *,
10
+ user: str,
11
+ host: str = "127.0.0.1",
12
+ db_name: str,
13
+ port: int = 5432,
14
+ out_filepath: str = "db_dump.sql",
15
+ password: str | None = None
16
+ ) -> str:
17
+ raise_for_type(user, str)
18
+ raise_for_type(host, str)
19
+ raise_for_type(db_name, str)
20
+ raise_for_type(port, int)
21
+ if password:
22
+ run_cmd_res = run_cmd(
23
+ command=f"echo {password} | pg_dump -U {user} -h {host} {db_name} -p {port} > {out_filepath}"
24
+ )
25
+ else:
26
+ run_cmd_res = run_cmd(
27
+ command=f"pg_dump -U {user} -h {host} {db_name} -p {port} > {out_filepath}"
28
+ )
29
+ run_cmd_res.raise_for_bad_return_code()
30
+
31
+ return out_filepath
32
+
33
+
34
+ def __example():
35
+ setup_normal_logging()
36
+ make_postgresql_db_dump(
37
+ user="arpakitlib",
38
+ db_name="arpakitlib",
39
+ port=50629,
40
+ password="arpakitlib"
41
+ )
42
+
43
+
44
+ if __name__ == '__main__':
45
+ __example()
@@ -0,0 +1,48 @@
1
+ from __future__ import annotations
2
+
3
+ import subprocess
4
+
5
+ from pydantic import BaseModel
6
+
7
+ _ARPAKIT_LIB_MODULE_VERSION = "3.0"
8
+
9
+
10
+ class RunCmdResHasErr(Exception):
11
+ pass
12
+
13
+
14
+ class RunCmdRes(BaseModel):
15
+ out: str
16
+ err: str
17
+ return_code: int
18
+
19
+ @property
20
+ def has_bad_return_code(self) -> bool:
21
+ return self.return_code != 0
22
+
23
+ def raise_for_bad_return_code(self):
24
+ if self.has_bad_return_code is True:
25
+ raise RunCmdResHasErr(f"return_code={self.return_code}, err={self.err}")
26
+ return
27
+
28
+
29
+ def run_cmd(command: str, raise_for_bad_return_code: bool = False) -> RunCmdRes:
30
+ subprocess_res = subprocess.run(command, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE, check=False)
31
+
32
+ res = RunCmdRes(
33
+ out=subprocess_res.stdout.decode(),
34
+ err=subprocess_res.stderr.decode(),
35
+ return_code=subprocess_res.returncode
36
+ )
37
+ if raise_for_bad_return_code is True:
38
+ res.raise_for_bad_return_code()
39
+
40
+ return res
41
+
42
+
43
+ def __example():
44
+ pass
45
+
46
+
47
+ if __name__ == '__main__':
48
+ __example()
@@ -0,0 +1,23 @@
1
+ import logging
2
+ import math
3
+ from time import sleep
4
+
5
+ _ARPAKIT_LIB_MODULE_VERSION = "3.0"
6
+
7
+ _logger = logging.getLogger(__name__)
8
+
9
+
10
+ def safe_sleep(sleep_time: float | int):
11
+ _logger.info(f"sleep_time={sleep_time}")
12
+ frac, int_part = math.modf(sleep_time)
13
+ for i in range(int(int_part)):
14
+ sleep(1)
15
+ sleep(frac)
16
+
17
+
18
+ def __example():
19
+ pass
20
+
21
+
22
+ if __name__ == '__main__':
23
+ __example()
@@ -0,0 +1,216 @@
1
+ import asyncio
2
+ import hashlib
3
+ import logging
4
+ from datetime import timedelta, datetime
5
+ from typing import Optional, Any
6
+
7
+ import aiohttp
8
+ import pytz
9
+ from aiohttp import ClientTimeout
10
+ from aiohttp_socks import ProxyConnector
11
+
12
+ from arpakitlib.ar_dict_util import combine_dicts
13
+
14
+ _ARPAKIT_LIB_MODULE_VERSION = "3.0"
15
+
16
+
17
+ class ScheduleUUSTAPIClient:
18
+ def __init__(
19
+ self,
20
+ *,
21
+ api_login: str,
22
+ api_password: str | None = None,
23
+ api_password_first_part: str | None = None,
24
+ api_url: str = "https://isu.uust.ru/api/schedule_v2",
25
+ api_proxy_url: str | None = None
26
+ ):
27
+ self._logger = logging.getLogger(self.__class__.__name__)
28
+ self.api_login = api_login
29
+ self.api_password = api_password
30
+ self.api_password_first_part = api_password_first_part
31
+ self.api_url = api_url
32
+ self.api_proxy_url = api_proxy_url
33
+ self.headers = {
34
+ "Accept": (
35
+ "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;"
36
+ "q=0.8,application/signed-exchange;v=b3;q=0.7"
37
+ ),
38
+ "Accept-Encoding": "gzip, deflate, br, zstd",
39
+ "Accept-Language": "en-US,en;q=0.9,ru-RU;q=0.8,ru;q=0.7",
40
+ "User-Agent": (
41
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36"
42
+ )
43
+ }
44
+
45
+ def auth_params(self) -> dict[str, Any]:
46
+ if self.api_password:
47
+ return {
48
+ "login": self.api_login,
49
+ "pass": self.api_password
50
+ }
51
+ elif self.api_password_first_part:
52
+ return {
53
+ "login": self.api_login,
54
+ "pass": self.generate_v2_token(password_first_part=self.api_password_first_part)
55
+ }
56
+ else:
57
+ return {}
58
+
59
+ @classmethod
60
+ def hash_token(cls, token: str) -> str:
61
+ sha256 = hashlib.sha256()
62
+ sha256.update(token.encode('utf-8'))
63
+ return sha256.hexdigest()
64
+
65
+ @classmethod
66
+ def generate_v2_token(cls, password_first_part: str) -> str:
67
+ return cls.hash_token(
68
+ password_first_part + datetime.now(tz=pytz.timezone("Asia/Yekaterinburg")).strftime("%Y-%m-%d")
69
+ )
70
+
71
+ async def _async_get_request(
72
+ self,
73
+ *,
74
+ url: str,
75
+ params: Optional[dict] = None
76
+ ) -> dict[str, Any]:
77
+ max_tries = 7
78
+ tries = 0
79
+
80
+ while True:
81
+ self._logger.info(f"GET {url} {params} proxy={self.api_proxy_url}")
82
+
83
+ tries += 1
84
+
85
+ connector = (
86
+ ProxyConnector.from_url(self.api_proxy_url)
87
+ if self.api_proxy_url is not None
88
+ else None
89
+ )
90
+
91
+ try:
92
+ async with aiohttp.ClientSession(connector=connector) as session:
93
+ async with session.get(
94
+ url=url,
95
+ params=params,
96
+ timeout=ClientTimeout(total=timedelta(seconds=15).total_seconds())
97
+ ) as response:
98
+ response.raise_for_status()
99
+ return await response.json()
100
+ except Exception as err:
101
+ self._logger.warning(f"{tries}/{max_tries} {err} GET {url} {params} proxy={self.api_proxy_url}")
102
+ if tries >= max_tries:
103
+ raise err
104
+ await asyncio.sleep(timedelta(seconds=1).total_seconds())
105
+ self._logger.warning(f"{tries}/{max_tries} AGAIN GET {url} {params} proxy={self.api_proxy_url}")
106
+ continue
107
+
108
+ async def get_current_week(self) -> int:
109
+ """
110
+ response.json example
111
+ {
112
+ 'data': [15]
113
+ }
114
+ """
115
+
116
+ params = combine_dicts(self.auth_params(), {"ask": "get_current_week"})
117
+ json_data = await self._async_get_request(
118
+ url=self.api_url,
119
+ params=params
120
+ )
121
+ return json_data["data"][0]
122
+
123
+ async def get_current_semester(self) -> str:
124
+ """
125
+ response.json example
126
+ {
127
+ 'data': ['Осенний семестр 2023/2024']
128
+ }
129
+ """
130
+
131
+ params = combine_dicts(self.auth_params(), {"ask": "get_current_semestr"})
132
+ json_data = await self._async_get_request(
133
+ url=self.api_url,
134
+ params=params
135
+ )
136
+ return json_data["data"][0]
137
+
138
+ async def get_groups(self) -> list[dict[str, Any]]:
139
+ """
140
+ response.json example
141
+ {
142
+ "data": {
143
+ "4438": {
144
+ "group_id": 4438,
145
+ "group_title": "АРКТ-101А",
146
+ "faculty": "",
147
+ "course": 1
148
+ }
149
+ }
150
+ }
151
+ """
152
+
153
+ params = combine_dicts(self.auth_params(), {"ask": "get_group_list"})
154
+ json_data = await self._async_get_request(
155
+ url=self.api_url,
156
+ params=params
157
+ )
158
+ return list(json_data["data"].values())
159
+
160
+ async def get_group_lessons(self, group_id: int, semester: str | None = None) -> list[dict[str, Any]]:
161
+ params = combine_dicts(
162
+ self.auth_params(),
163
+ {
164
+ "ask": "get_group_schedule",
165
+ "id": group_id
166
+ }
167
+ )
168
+ if semester is not None:
169
+ params["semester"] = semester
170
+ json_data = await self._async_get_request(
171
+ url=self.api_url,
172
+ params=params
173
+ )
174
+ return json_data["data"]
175
+
176
+ async def get_teachers(self) -> list[dict[str, Any]]:
177
+ params = combine_dicts(self.auth_params(), {"ask": "get_teacher_list"})
178
+ json_data = await self._async_get_request(
179
+ url=self.api_url,
180
+ params=params
181
+ )
182
+ return list(json_data["data"].values())
183
+
184
+ async def get_teacher_lessons(self, teacher_id: int, semester: str | None = None) -> list[dict[str, Any]]:
185
+ params = combine_dicts(self.auth_params(), {"ask": "get_teacher_schedule", "id": teacher_id})
186
+ if semester is not None:
187
+ params["semester"] = semester
188
+ json_data = await self._async_get_request(
189
+ url=self.api_url,
190
+ params=params
191
+ )
192
+ return json_data["data"]
193
+
194
+ async def check_conn(self):
195
+ await self.get_current_week()
196
+
197
+ async def is_conn_good(self):
198
+ try:
199
+ await self.check_conn()
200
+ except Exception as e:
201
+ self._logger.error(e)
202
+ return False
203
+ return True
204
+
205
+
206
+ def __example():
207
+ pass
208
+
209
+
210
+ async def __async_example():
211
+ pass
212
+
213
+
214
+ if __name__ == '__main__':
215
+ __example()
216
+ asyncio.run(__async_example())
@@ -0,0 +1,124 @@
1
+ import logging
2
+ from datetime import timedelta, datetime
3
+ from typing import Any
4
+ from uuid import uuid4
5
+
6
+ from sqlalchemy import create_engine, QueuePool, text, func, inspect, INTEGER, TEXT, TIMESTAMP
7
+ from sqlalchemy.orm import sessionmaker, DeclarativeBase, Mapped, mapped_column
8
+ from sqlalchemy.orm.session import Session
9
+
10
+ from arpakitlib.ar_datetime_util import now_utc_dt
11
+ from arpakitlib.ar_json_util import safely_transfer_to_json_str
12
+
13
+ _ARPAKIT_LIB_MODULE_VERSION = "3.0"
14
+
15
+
16
+ class BaseDBM(DeclarativeBase):
17
+ __abstract__ = True
18
+ _bus_data: dict[str, Any] | None = None
19
+
20
+ @property
21
+ def bus_data(self) -> dict[str, Any]:
22
+ if self._bus_data is None:
23
+ self._bus_data = {}
24
+ return self._bus_data
25
+
26
+ def simple_dict(self) -> dict[str, Any]:
27
+ return {c.key: getattr(self, c.key) for c in inspect(self).mapper.column_attrs}
28
+
29
+ def simple_json(self) -> str:
30
+ return safely_transfer_to_json_str(self.simple_dict())
31
+
32
+
33
+ class SimpleDBM(BaseDBM):
34
+ __abstract__ = True
35
+
36
+ id: Mapped[int] = mapped_column(
37
+ INTEGER, primary_key=True, autoincrement=True, nullable=False
38
+ )
39
+ long_id: Mapped[str] = mapped_column(
40
+ TEXT, insert_default=uuid4, unique=True, nullable=False
41
+ )
42
+ creation_dt: Mapped[datetime] = mapped_column(
43
+ TIMESTAMP, insert_default=now_utc_dt, index=True, nullable=False
44
+ )
45
+
46
+
47
+ class SQLAlchemyDB:
48
+ def __init__(self, *, db_url: str, echo: bool = False):
49
+ self._logger = logging.getLogger(self.__class__.__name__)
50
+ self.engine = create_engine(
51
+ url=db_url,
52
+ echo=echo,
53
+ pool_size=5,
54
+ max_overflow=10,
55
+ poolclass=QueuePool,
56
+ pool_timeout=timedelta(seconds=30).total_seconds()
57
+ )
58
+ self.sessionmaker = sessionmaker(bind=self.engine)
59
+ self.func_new_session_counter = 0
60
+
61
+ def drop_celery_tables(self):
62
+ with self.engine.connect() as connection:
63
+ connection.execute(text("DROP TABLE IF EXISTS celery_tasksetmeta CASCADE;"))
64
+ connection.execute(text("DROP TABLE IF EXISTS celery_taskmeta CASCADE;"))
65
+ connection.commit()
66
+ self._logger.info("celery tables were dropped")
67
+
68
+ def remove_celery_tables_data(self):
69
+ with self.engine.connect() as connection:
70
+ connection.execute(text("DELETE FROM celery_tasksetmeta;"))
71
+ connection.execute(text("DELETE FROM celery_taskmeta;"))
72
+ connection.commit()
73
+ self._logger.info("celery tables data were removed")
74
+
75
+ def init(self):
76
+ BaseDBM.metadata.create_all(bind=self.engine, checkfirst=True)
77
+ self._logger.info("db was inited")
78
+
79
+ def drop(self):
80
+ BaseDBM.metadata.drop_all(bind=self.engine, checkfirst=True)
81
+ self._logger.info("db was dropped")
82
+
83
+ def reinit(self):
84
+ BaseDBM.metadata.drop_all(bind=self.engine, checkfirst=True)
85
+ BaseDBM.metadata.create_all(bind=self.engine, checkfirst=True)
86
+ self._logger.info("db was reinited")
87
+
88
+ def check_conn(self):
89
+ self.engine.connect()
90
+ self._logger.info("db conn is good")
91
+
92
+ def new_session(self) -> Session:
93
+ self.func_new_session_counter += 1
94
+ return self.sessionmaker(bind=self.engine)
95
+
96
+ def is_conn_good(self) -> bool:
97
+ try:
98
+ self.check_conn()
99
+ except Exception as e:
100
+ self._logger.error(e)
101
+ return False
102
+ return True
103
+
104
+ def generate_unique_id(self, *, class_dbm: type[BaseDBM]):
105
+ with self.new_session() as session:
106
+ res = session.query(func.max(class_dbm.id)).scalar()
107
+ while session.query(class_dbm).filter(class_dbm.id == res).first() is not None:
108
+ res += 1
109
+ return res
110
+
111
+ def generate_unique_long_id(self, *, class_dbm: type[BaseDBM]):
112
+ with self.new_session() as session:
113
+ res = str(uuid4())
114
+ while session.query(class_dbm).filter(class_dbm.long_id == res).first() is not None:
115
+ res = str(uuid4())
116
+ return res
117
+
118
+
119
+ def __example():
120
+ pass
121
+
122
+
123
+ if __name__ == '__main__':
124
+ __example()