arpakitlib 1.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arpakitlib might be problematic. Click here for more details.
- arpakitlib/AUTHOR.md +6 -0
- arpakitlib/LICENSE +201 -0
- arpakitlib/NOTICE +2 -0
- arpakitlib/README.md +6 -0
- arpakitlib/__init__.py +0 -0
- arpakitlib/ar_additional_model_util.py +8 -0
- arpakitlib/ar_aiogram_util.py +363 -0
- arpakitlib/ar_arpakit_lib_module_util.py +150 -0
- arpakitlib/ar_arpakit_schedule_uust_api_client.py +527 -0
- arpakitlib/ar_arpakitlib_info.py +11 -0
- arpakitlib/ar_base64_util.py +30 -0
- arpakitlib/ar_base_worker.py +77 -0
- arpakitlib/ar_cache_file.py +124 -0
- arpakitlib/ar_datetime_util.py +38 -0
- arpakitlib/ar_dict_util.py +24 -0
- arpakitlib/ar_dream_ai_api_client.py +120 -0
- arpakitlib/ar_encrypt_and_decrypt_util.py +23 -0
- arpakitlib/ar_enumeration.py +76 -0
- arpakitlib/ar_fastapi_static/redoc/redoc.standalone.js +1826 -0
- arpakitlib/ar_fastapi_static/swagger-ui/favicon-16x16.png +0 -0
- arpakitlib/ar_fastapi_static/swagger-ui/favicon-32x32.png +0 -0
- arpakitlib/ar_fastapi_static/swagger-ui/index.css +16 -0
- arpakitlib/ar_fastapi_static/swagger-ui/index.html +19 -0
- arpakitlib/ar_fastapi_static/swagger-ui/oauth2-redirect.html +79 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-initializer.js +20 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-bundle.js +2 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-bundle.js.map +1 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-es-bundle-core.js +3 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-es-bundle-core.js.map +1 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-es-bundle.js +2 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-es-bundle.js.map +1 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-standalone-preset.js +2 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui-standalone-preset.js.map +1 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.css +3 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.css.map +1 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.js +2 -0
- arpakitlib/ar_fastapi_static/swagger-ui/swagger-ui.js.map +1 -0
- arpakitlib/ar_fastapi_util.py +294 -0
- arpakitlib/ar_file_storage_in_dir.py +127 -0
- arpakitlib/ar_generate_env_example.py +16 -0
- arpakitlib/ar_hash_util.py +19 -0
- arpakitlib/ar_http_request_util.py +75 -0
- arpakitlib/ar_ip_util.py +50 -0
- arpakitlib/ar_json_db.py +231 -0
- arpakitlib/ar_json_util.py +28 -0
- arpakitlib/ar_jwt_util.py +38 -0
- arpakitlib/ar_list_of_dicts_to_xlsx.py +32 -0
- arpakitlib/ar_list_util.py +26 -0
- arpakitlib/ar_logging_util.py +45 -0
- arpakitlib/ar_mongodb_util.py +143 -0
- arpakitlib/ar_need_type_util.py +58 -0
- arpakitlib/ar_openai_util.py +59 -0
- arpakitlib/ar_parse_command.py +102 -0
- arpakitlib/ar_postgresql_util.py +45 -0
- arpakitlib/ar_run_cmd.py +48 -0
- arpakitlib/ar_safe_sleep.py +23 -0
- arpakitlib/ar_schedule_uust_api_client.py +216 -0
- arpakitlib/ar_sqlalchemy_util.py +124 -0
- arpakitlib/ar_ssh_runner.py +260 -0
- arpakitlib/ar_str_util.py +79 -0
- arpakitlib/ar_type_util.py +82 -0
- arpakitlib/ar_yookassa_api_client.py +224 -0
- arpakitlib/ar_zabbix_util.py +190 -0
- arpakitlib-1.4.0.dist-info/LICENSE +201 -0
- arpakitlib-1.4.0.dist-info/METADATA +327 -0
- arpakitlib-1.4.0.dist-info/NOTICE +2 -0
- arpakitlib-1.4.0.dist-info/RECORD +68 -0
- arpakitlib-1.4.0.dist-info/WHEEL +4 -0
arpakitlib/ar_json_db.py
ADDED
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import shutil
|
|
5
|
+
from typing import Any, Optional, Self
|
|
6
|
+
|
|
7
|
+
from arpakitlib.ar_type_util import raise_for_type
|
|
8
|
+
|
|
9
|
+
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class JSONDbFile:
|
|
13
|
+
|
|
14
|
+
def __init__(self, *, filepath: str, use_memory: bool = True, beautify_json: bool = True):
|
|
15
|
+
self._logger = logging.getLogger(self.__class__.__name__)
|
|
16
|
+
raise_for_type(filepath, str)
|
|
17
|
+
filepath = os.path.abspath(filepath.strip())
|
|
18
|
+
if not filepath:
|
|
19
|
+
raise ValueError("not filepath")
|
|
20
|
+
|
|
21
|
+
self.filepath = filepath
|
|
22
|
+
self.use_memory = use_memory
|
|
23
|
+
self.beautify_json = beautify_json
|
|
24
|
+
self.saved_json_data: Optional[dict[str, Any]] = None
|
|
25
|
+
|
|
26
|
+
def __str__(self) -> str:
|
|
27
|
+
return f"JSONDbFile ({self.filepath}) ({self.count_records()})"
|
|
28
|
+
|
|
29
|
+
def __repr__(self) -> str:
|
|
30
|
+
return f"JSONDbFile ({self.filepath}) ({self.count_records()})"
|
|
31
|
+
|
|
32
|
+
def __len__(self) -> int:
|
|
33
|
+
return self.count_records()
|
|
34
|
+
|
|
35
|
+
@property
|
|
36
|
+
def filename(self) -> str:
|
|
37
|
+
return os.path.split(self.filepath)[1]
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def dirpath(self) -> str:
|
|
41
|
+
return os.path.split(self.filepath)[0]
|
|
42
|
+
|
|
43
|
+
def write_json_data(self, json_data: dict[str, Any]):
|
|
44
|
+
raise_for_type(json_data, dict)
|
|
45
|
+
|
|
46
|
+
if self.dirpath and not os.path.exists(self.dirpath):
|
|
47
|
+
os.makedirs(self.dirpath, exist_ok=True)
|
|
48
|
+
|
|
49
|
+
with open(self.filepath, mode="w", encoding="utf-8") as f:
|
|
50
|
+
f.write(json.dumps(json_data, ensure_ascii=False, indent=2 if self.beautify_json else None))
|
|
51
|
+
|
|
52
|
+
if self.use_memory is True:
|
|
53
|
+
self.saved_json_data = json_data
|
|
54
|
+
|
|
55
|
+
def read_json_data(self) -> dict[str, Any]:
|
|
56
|
+
if self.use_memory is True and self.saved_json_data is not None:
|
|
57
|
+
return self.saved_json_data
|
|
58
|
+
|
|
59
|
+
if not self.check_exists():
|
|
60
|
+
self.write_json_data({})
|
|
61
|
+
|
|
62
|
+
with open(self.filepath, mode="r", encoding="utf-8") as f:
|
|
63
|
+
text_from_file = f.read()
|
|
64
|
+
json_data = json.loads(text_from_file)
|
|
65
|
+
|
|
66
|
+
if self.use_memory is True:
|
|
67
|
+
self.saved_json_data = json_data
|
|
68
|
+
|
|
69
|
+
return json_data
|
|
70
|
+
|
|
71
|
+
def init(self):
|
|
72
|
+
if not self.check_exists():
|
|
73
|
+
self.write_json_data({})
|
|
74
|
+
|
|
75
|
+
def reinit(self):
|
|
76
|
+
self.drop()
|
|
77
|
+
self.init()
|
|
78
|
+
|
|
79
|
+
def refresh_saved_json_data(self):
|
|
80
|
+
if self.use_memory is True:
|
|
81
|
+
self.saved_json_data = self.read_json_data()
|
|
82
|
+
|
|
83
|
+
def get_records(self) -> list[(str, dict[str, Any])]:
|
|
84
|
+
return [(record_id, record) for record_id, record in self.read_json_data().items()]
|
|
85
|
+
|
|
86
|
+
def get_record_ids(self) -> list[str]:
|
|
87
|
+
return [record_id for record_id in self.read_json_data().keys()]
|
|
88
|
+
|
|
89
|
+
def check_exists(self) -> bool:
|
|
90
|
+
return os.path.exists(self.filepath)
|
|
91
|
+
|
|
92
|
+
def check_record_id_exists(self, record_id: str) -> bool:
|
|
93
|
+
return self.get_record(record_id=record_id) is not None
|
|
94
|
+
|
|
95
|
+
def count_records(self) -> int:
|
|
96
|
+
return len(self.read_json_data().keys())
|
|
97
|
+
|
|
98
|
+
def get_record(self, record_id: str) -> Optional[dict[str, Any]]:
|
|
99
|
+
raise_for_type(record_id, str)
|
|
100
|
+
json_data = self.read_json_data()
|
|
101
|
+
return json_data[record_id] if record_id in json_data.keys() else None
|
|
102
|
+
|
|
103
|
+
def generate_record_id(self) -> str:
|
|
104
|
+
record_ids = set(self.get_record_ids())
|
|
105
|
+
res = len(record_ids)
|
|
106
|
+
while str(res) in record_ids:
|
|
107
|
+
res += 1
|
|
108
|
+
return str(res)
|
|
109
|
+
|
|
110
|
+
def create_record(
|
|
111
|
+
self,
|
|
112
|
+
record: dict[str, Any],
|
|
113
|
+
record_id: Optional[str] = None,
|
|
114
|
+
) -> (str, dict[str, Any]):
|
|
115
|
+
if record_id is None:
|
|
116
|
+
record_id = self.generate_record_id()
|
|
117
|
+
raise_for_type(record_id, str)
|
|
118
|
+
|
|
119
|
+
json_data = self.read_json_data()
|
|
120
|
+
if record_id in json_data.keys():
|
|
121
|
+
raise KeyError(f"record with record_id={record_id} already exists")
|
|
122
|
+
|
|
123
|
+
json_data[record_id] = record
|
|
124
|
+
self.write_json_data(json_data=json_data)
|
|
125
|
+
|
|
126
|
+
return record_id, record
|
|
127
|
+
|
|
128
|
+
def update_record(
|
|
129
|
+
self,
|
|
130
|
+
*,
|
|
131
|
+
record_id: str,
|
|
132
|
+
record: dict[str, Any]
|
|
133
|
+
) -> dict[str, Any]:
|
|
134
|
+
raise_for_type(record_id, str)
|
|
135
|
+
raise_for_type(record, dict)
|
|
136
|
+
|
|
137
|
+
json_data = self.read_json_data()
|
|
138
|
+
if record_id not in json_data.keys():
|
|
139
|
+
raise ValueError(f"record with record_id='{record_id}' not exists")
|
|
140
|
+
|
|
141
|
+
json_data[record_id] = record
|
|
142
|
+
self.write_json_data(json_data)
|
|
143
|
+
|
|
144
|
+
return record
|
|
145
|
+
|
|
146
|
+
def rm_record(self, record_id: str):
|
|
147
|
+
json_data = self.read_json_data()
|
|
148
|
+
if record_id not in json_data.keys():
|
|
149
|
+
return
|
|
150
|
+
del json_data[record_id]
|
|
151
|
+
self.write_json_data(json_data)
|
|
152
|
+
|
|
153
|
+
def rm_records(self, record_ids: list[str]):
|
|
154
|
+
json_data = self.read_json_data()
|
|
155
|
+
for record_id, record in json_data.items():
|
|
156
|
+
if record_id not in record_ids:
|
|
157
|
+
continue
|
|
158
|
+
del json_data[record_id]
|
|
159
|
+
self.write_json_data(json_data)
|
|
160
|
+
|
|
161
|
+
def rm_all_records(self):
|
|
162
|
+
self.write_json_data({})
|
|
163
|
+
|
|
164
|
+
def copy(self, to_filepath: str):
|
|
165
|
+
self.init()
|
|
166
|
+
shutil.copy(self.filepath, to_filepath)
|
|
167
|
+
return JSONDbFile(filepath=to_filepath, use_memory=self.use_memory, beautify_json=self.beautify_json)
|
|
168
|
+
|
|
169
|
+
def drop(self):
|
|
170
|
+
if self.check_exists():
|
|
171
|
+
os.remove(self.filepath)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
class JSONDb:
|
|
175
|
+
|
|
176
|
+
def __init__(self, json_db_files: Optional[list[JSONDbFile]] = None):
|
|
177
|
+
self._logger = logging.getLogger(self.__class__.__name__)
|
|
178
|
+
if json_db_files is None:
|
|
179
|
+
json_db_files = []
|
|
180
|
+
self.json_db_files: list[JSONDbFile] = json_db_files
|
|
181
|
+
|
|
182
|
+
def __str__(self) -> str:
|
|
183
|
+
return f"JSONDbFiles ({len(self.json_db_files)})"
|
|
184
|
+
|
|
185
|
+
def __repr__(self) -> str:
|
|
186
|
+
return f"JSONDbFiles ({len(self.json_db_files)})"
|
|
187
|
+
|
|
188
|
+
def __len__(self) -> int:
|
|
189
|
+
return len(self.json_db_files)
|
|
190
|
+
|
|
191
|
+
def create_json_db_file(self, filepath: str, use_memory: bool = False, beautify_json: bool = False) -> JSONDbFile:
|
|
192
|
+
json_db_file = JSONDbFile(filepath=filepath, use_memory=use_memory, beautify_json=beautify_json)
|
|
193
|
+
self.json_db_files.append(json_db_file)
|
|
194
|
+
return json_db_file
|
|
195
|
+
|
|
196
|
+
def add_json_db_file(self, json_db_file: JSONDbFile):
|
|
197
|
+
self.json_db_files.append(json_db_file)
|
|
198
|
+
|
|
199
|
+
def init(self):
|
|
200
|
+
for file in self.json_db_files:
|
|
201
|
+
file.init()
|
|
202
|
+
|
|
203
|
+
def reinit(self):
|
|
204
|
+
for file in self.json_db_files:
|
|
205
|
+
file.reinit()
|
|
206
|
+
|
|
207
|
+
def drop(self):
|
|
208
|
+
for file in self.json_db_files:
|
|
209
|
+
file.drop()
|
|
210
|
+
|
|
211
|
+
def rm_all_records(self):
|
|
212
|
+
for json_db_file in self.json_db_files:
|
|
213
|
+
json_db_file.rm_all_records()
|
|
214
|
+
|
|
215
|
+
def copy_files_to_dir(self, to_dirpath: str) -> Self:
|
|
216
|
+
json_db = JSONDb()
|
|
217
|
+
for json_db_file in self.json_db_files:
|
|
218
|
+
filepath = os.path.join(to_dirpath, json_db_file.filename)
|
|
219
|
+
json_db_file.copy(filepath)
|
|
220
|
+
json_db.create_json_db_file(
|
|
221
|
+
filepath=filepath, use_memory=json_db_file.use_memory, beautify_json=json_db_file.beautify_json
|
|
222
|
+
)
|
|
223
|
+
return json_db
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def __example():
|
|
227
|
+
pass
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
if __name__ == '__main__':
|
|
231
|
+
__example()
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Union, Any
|
|
3
|
+
|
|
4
|
+
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def safely_transfer_to_json_obj(data: str) -> Union[dict, list]:
|
|
8
|
+
if not isinstance(data, str):
|
|
9
|
+
raise ValueError("not isinstance(data, str)")
|
|
10
|
+
return json.loads(data)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def safely_transfer_to_json_str(data: Union[dict[str, Any], list[Any]]) -> str:
|
|
14
|
+
if not isinstance(data, dict) and not isinstance(data, list):
|
|
15
|
+
raise ValueError("not isinstance(data, dict) and not isinstance(data, list)")
|
|
16
|
+
return json.dumps(data, ensure_ascii=False, indent=2, default=str)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def safely_transfer_to_json_str_to_json_obj(data: Union[dict[str, Any], list[Any]]) -> Union[dict, list]:
|
|
20
|
+
return safely_transfer_to_json_obj(safely_transfer_to_json_str(data))
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def __example():
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
if __name__ == '__main__':
|
|
28
|
+
__example()
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from typing import Any, Optional
|
|
2
|
+
|
|
3
|
+
import jwt
|
|
4
|
+
from jwt import PyJWTError
|
|
5
|
+
|
|
6
|
+
from arpakitlib.ar_type_util import raise_for_type
|
|
7
|
+
|
|
8
|
+
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def encode_jwt_token(
|
|
12
|
+
*,
|
|
13
|
+
jwt_payload: dict[str, Any],
|
|
14
|
+
jwt_secret: str
|
|
15
|
+
) -> str:
|
|
16
|
+
raise_for_type(jwt_secret, str)
|
|
17
|
+
return jwt.encode(jwt_payload, jwt_secret, algorithm="HS256")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def decode_jwt_token(
|
|
21
|
+
*,
|
|
22
|
+
jwt_token: str,
|
|
23
|
+
jwt_secret: str
|
|
24
|
+
) -> Optional[dict[str, Any]]:
|
|
25
|
+
raise_for_type(jwt_token, str)
|
|
26
|
+
raise_for_type(jwt_secret, str)
|
|
27
|
+
try:
|
|
28
|
+
return jwt.decode(jwt_token, jwt_secret, algorithms=["HS256"])
|
|
29
|
+
except PyJWTError:
|
|
30
|
+
return None
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def __example():
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
if __name__ == '__main__':
|
|
38
|
+
__example()
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
import pandas
|
|
4
|
+
|
|
5
|
+
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def list_of_dicts_to_xlsx(
|
|
9
|
+
list_of_dicts: list[dict[str, Any]],
|
|
10
|
+
out_filepath: str = "out.xlsx",
|
|
11
|
+
sheet_name: str = "Sheet 1"
|
|
12
|
+
) -> str:
|
|
13
|
+
data_frame = pandas.DataFrame(list_of_dicts)
|
|
14
|
+
|
|
15
|
+
writer = pandas.ExcelWriter(out_filepath, engine="xlsxwriter")
|
|
16
|
+
data_frame.to_excel(writer, index=False, sheet_name=sheet_name)
|
|
17
|
+
|
|
18
|
+
for i, col in enumerate(data_frame.columns):
|
|
19
|
+
width = max(data_frame[col].apply(lambda x: len(str(x))).max(), len(col))
|
|
20
|
+
writer.sheets[sheet_name].set_column(i, i, width * 1.1)
|
|
21
|
+
|
|
22
|
+
writer._save()
|
|
23
|
+
|
|
24
|
+
return out_filepath
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def __example():
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
if __name__ == '__main__':
|
|
32
|
+
__example()
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def iter_group_list(list_: list[Any], n: int):
|
|
7
|
+
part = []
|
|
8
|
+
for v in list_:
|
|
9
|
+
if len(part) < n:
|
|
10
|
+
part.append(v)
|
|
11
|
+
else:
|
|
12
|
+
yield part.copy()
|
|
13
|
+
part = [v]
|
|
14
|
+
yield part
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def group_list(list_: list[Any], n: int):
|
|
18
|
+
return list(iter_group_list(list_=list_, n=n))
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def __example():
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
if __name__ == '__main__':
|
|
26
|
+
__example()
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def init_log_file(*, log_filepath: str):
|
|
9
|
+
if not os.path.exists(path=log_filepath):
|
|
10
|
+
with open(file=log_filepath, mode="w") as file:
|
|
11
|
+
file.write("")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def setup_normal_logging(log_filepath: Optional[str] = None):
|
|
15
|
+
logger = logging.getLogger()
|
|
16
|
+
logger.setLevel(logging.INFO)
|
|
17
|
+
|
|
18
|
+
stream_handler = logging.StreamHandler()
|
|
19
|
+
stream_handler.setLevel(logging.INFO)
|
|
20
|
+
stream_formatter = logging.Formatter(
|
|
21
|
+
"%(asctime)s | %(levelname)s | %(name)s - %(message)s",
|
|
22
|
+
datefmt="%d.%m.%Y %I:%M:%S%p"
|
|
23
|
+
)
|
|
24
|
+
stream_handler.setFormatter(stream_formatter)
|
|
25
|
+
logger.addHandler(stream_handler)
|
|
26
|
+
|
|
27
|
+
if log_filepath:
|
|
28
|
+
file_handler = logging.FileHandler(log_filepath)
|
|
29
|
+
file_handler.setLevel(logging.WARNING)
|
|
30
|
+
file_formatter = logging.Formatter(
|
|
31
|
+
"%(asctime)s | %(levelname)s | %(name)s | %(filename)s:%(lineno)d - %(message)s",
|
|
32
|
+
datefmt="%d.%m.%Y %I:%M:%S%p"
|
|
33
|
+
)
|
|
34
|
+
file_handler.setFormatter(file_formatter)
|
|
35
|
+
logger.addHandler(file_handler)
|
|
36
|
+
|
|
37
|
+
logger.info("normal logging was setup")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def __example():
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
if __name__ == '__main__':
|
|
45
|
+
__example()
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
from abc import abstractmethod
|
|
4
|
+
from random import randint
|
|
5
|
+
from typing import Optional
|
|
6
|
+
from urllib.parse import quote
|
|
7
|
+
|
|
8
|
+
from pymongo import MongoClient
|
|
9
|
+
from pymongo.collection import Collection
|
|
10
|
+
from pymongo.database import Database
|
|
11
|
+
|
|
12
|
+
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def generate_mongo_uri(
|
|
16
|
+
*,
|
|
17
|
+
mongo_user: Optional[str] = None,
|
|
18
|
+
mongo_password: Optional[str] = None,
|
|
19
|
+
mongo_hostname: str = "localhost",
|
|
20
|
+
mongo_port: int = 27017,
|
|
21
|
+
mongo_auth_db: Optional[str] = None
|
|
22
|
+
) -> str:
|
|
23
|
+
res: str = f'mongodb://'
|
|
24
|
+
if mongo_user:
|
|
25
|
+
res += f"{mongo_user}"
|
|
26
|
+
if mongo_password:
|
|
27
|
+
res += f":{quote(mongo_password)}"
|
|
28
|
+
res += "@"
|
|
29
|
+
res += f"{mongo_hostname}:{mongo_port}"
|
|
30
|
+
if mongo_auth_db is not None:
|
|
31
|
+
res += f"/?authSource={mongo_auth_db}"
|
|
32
|
+
|
|
33
|
+
return res
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class EasyMongoDb:
|
|
37
|
+
def __init__(
|
|
38
|
+
self,
|
|
39
|
+
*,
|
|
40
|
+
db_name: str,
|
|
41
|
+
username: str | None = None,
|
|
42
|
+
password: str | None = None,
|
|
43
|
+
hostname: str = "127.0.0.1",
|
|
44
|
+
port: int = 27017,
|
|
45
|
+
auth_source: str | None = None,
|
|
46
|
+
):
|
|
47
|
+
self._logger = logging.getLogger(self.__class__.__name__)
|
|
48
|
+
self.hostname = hostname
|
|
49
|
+
self.username = username
|
|
50
|
+
self.password = password
|
|
51
|
+
self.auth_source = auth_source
|
|
52
|
+
self.port = port
|
|
53
|
+
self.db_name = db_name
|
|
54
|
+
self.used_collections: list[Collection] = []
|
|
55
|
+
|
|
56
|
+
def init(self):
|
|
57
|
+
self.ensure_indexes()
|
|
58
|
+
|
|
59
|
+
def reinit(self):
|
|
60
|
+
self.drop_all_collections()
|
|
61
|
+
self.init()
|
|
62
|
+
|
|
63
|
+
def get_pymongo_client(self) -> MongoClient:
|
|
64
|
+
kwargs = {
|
|
65
|
+
"host": self.hostname,
|
|
66
|
+
"port": self.port,
|
|
67
|
+
"tz_aware": True
|
|
68
|
+
}
|
|
69
|
+
if self.username is not None:
|
|
70
|
+
kwargs["username"] = self.username
|
|
71
|
+
if self.password is not None:
|
|
72
|
+
kwargs["password"] = self.password
|
|
73
|
+
if self.auth_source is not None:
|
|
74
|
+
kwargs["authSource"] = self.auth_source
|
|
75
|
+
kwargs["timeoutMS"] = 5000
|
|
76
|
+
kwargs["connectTimeoutMS"] = 5000
|
|
77
|
+
kwargs["socketTimeoutMS"] = 5000
|
|
78
|
+
kwargs["serverSelectionTimeoutMS"] = 5000
|
|
79
|
+
return MongoClient(**kwargs)
|
|
80
|
+
|
|
81
|
+
def check_conn(self):
|
|
82
|
+
self.get_pymongo_client().server_info()
|
|
83
|
+
|
|
84
|
+
def is_db_conn_good(self) -> bool:
|
|
85
|
+
try:
|
|
86
|
+
self.get_pymongo_client().server_info()
|
|
87
|
+
except Exception as e:
|
|
88
|
+
self._logger.error(e)
|
|
89
|
+
return False
|
|
90
|
+
return True
|
|
91
|
+
|
|
92
|
+
def get_pymongo_db(self) -> Database:
|
|
93
|
+
return self.get_pymongo_client().get_database(self.db_name)
|
|
94
|
+
|
|
95
|
+
def drop_all_collections(self):
|
|
96
|
+
for collection in self.get_pymongo_db().list_collections():
|
|
97
|
+
self.get_pymongo_db().get_collection(collection["name"]).drop()
|
|
98
|
+
|
|
99
|
+
def drop_used_collections(self):
|
|
100
|
+
for collection in self.used_collections:
|
|
101
|
+
collection.drop()
|
|
102
|
+
|
|
103
|
+
def generate_collection_int_id(self, collection: Collection) -> int:
|
|
104
|
+
existing_ids = set(
|
|
105
|
+
doc["id"] for doc in collection.find({}, {"id": True}) if "id" in doc.keys()
|
|
106
|
+
)
|
|
107
|
+
if existing_ids:
|
|
108
|
+
res = max(existing_ids) + 1
|
|
109
|
+
else:
|
|
110
|
+
res = 1
|
|
111
|
+
while res in existing_ids:
|
|
112
|
+
res += 1
|
|
113
|
+
return res
|
|
114
|
+
|
|
115
|
+
def generate_collection_rand_int_id(self, collection: Collection, max_rand_int: int = 30) -> int:
|
|
116
|
+
existing_ids = set(
|
|
117
|
+
doc["id"] for doc in collection.find({}, {"id": True}) if "id" in doc.keys()
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
id_ = self.generate_collection_int_id(collection=collection)
|
|
121
|
+
res = id_ + randint(1, max_rand_int)
|
|
122
|
+
while res in existing_ids:
|
|
123
|
+
id_ += 1
|
|
124
|
+
res = id_ + randint(1, max_rand_int)
|
|
125
|
+
|
|
126
|
+
return res
|
|
127
|
+
|
|
128
|
+
@abstractmethod
|
|
129
|
+
def ensure_indexes(self):
|
|
130
|
+
raise NotImplemented()
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def __example():
|
|
134
|
+
pass
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
async def __async_example():
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
if __name__ == '__main__':
|
|
142
|
+
__example()
|
|
143
|
+
asyncio.run(__async_example())
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
from arpakitlib.ar_enumeration import EasyEnumeration
|
|
5
|
+
|
|
6
|
+
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class NeedTypes(EasyEnumeration):
|
|
10
|
+
str_ = "str"
|
|
11
|
+
int_ = "int"
|
|
12
|
+
bool_ = "bool"
|
|
13
|
+
float_ = "float"
|
|
14
|
+
list_of_int = "list_of_int"
|
|
15
|
+
list_of_str = "list_of_str"
|
|
16
|
+
list_of_float = "list_of_float"
|
|
17
|
+
json = "json"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def parse_need_type(value: Any, need_type: str) -> Any:
|
|
21
|
+
NeedTypes.parse_and_validate_values(need_type)
|
|
22
|
+
|
|
23
|
+
if need_type == NeedTypes.str_:
|
|
24
|
+
res = value
|
|
25
|
+
elif need_type == NeedTypes.int_:
|
|
26
|
+
res = int(value)
|
|
27
|
+
elif need_type == NeedTypes.bool_:
|
|
28
|
+
if value.lower() in ["true", "1"]:
|
|
29
|
+
res = True
|
|
30
|
+
elif value.lower() in ["false", "0"]:
|
|
31
|
+
res = False
|
|
32
|
+
else:
|
|
33
|
+
raise ValueError(f"value {value} is not bool type")
|
|
34
|
+
elif need_type == NeedTypes.float_:
|
|
35
|
+
res = float(value)
|
|
36
|
+
elif need_type == NeedTypes.list_of_int:
|
|
37
|
+
res = value.removeprefix("[").removesuffix("]")
|
|
38
|
+
res = [int(num.strip()) for num in res.split(",")]
|
|
39
|
+
elif need_type == NeedTypes.list_of_str:
|
|
40
|
+
res = value.removeprefix("[").removesuffix("]")
|
|
41
|
+
res = [num.strip() for num in res.split(",")]
|
|
42
|
+
elif need_type == NeedTypes.list_of_float:
|
|
43
|
+
res = value.removeprefix("[").removesuffix("]")
|
|
44
|
+
res = [float(num.strip()) for num in res.split(",")]
|
|
45
|
+
elif need_type == NeedTypes.json:
|
|
46
|
+
res = json.loads(value)
|
|
47
|
+
else:
|
|
48
|
+
raise ValueError(f"bad need_type {need_type}")
|
|
49
|
+
|
|
50
|
+
return res
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def __example():
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
if __name__ == '__main__':
|
|
58
|
+
__example()
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from openai import OpenAI, AsyncOpenAI
|
|
6
|
+
|
|
7
|
+
_ARPAKIT_LIB_MODULE_VERSION = "3.0"
|
|
8
|
+
|
|
9
|
+
"""
|
|
10
|
+
https://platform.openai.com/docs/
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class EasyOpenAI:
|
|
15
|
+
def __init__(
|
|
16
|
+
self,
|
|
17
|
+
*,
|
|
18
|
+
open_ai: Optional[OpenAI] = None,
|
|
19
|
+
async_open_ai: Optional[AsyncOpenAI] = None
|
|
20
|
+
):
|
|
21
|
+
self._logger = logging.getLogger(self.__class__.__name__)
|
|
22
|
+
|
|
23
|
+
self.open_ai = open_ai
|
|
24
|
+
self.async_open_ai = async_open_ai
|
|
25
|
+
|
|
26
|
+
def check_conn(self):
|
|
27
|
+
self.open_ai.models.list()
|
|
28
|
+
|
|
29
|
+
def is_conn_good(self) -> bool:
|
|
30
|
+
try:
|
|
31
|
+
self.check_conn()
|
|
32
|
+
return True
|
|
33
|
+
except Exception as e:
|
|
34
|
+
self._logger.error(e)
|
|
35
|
+
return False
|
|
36
|
+
|
|
37
|
+
async def async_check_conn(self):
|
|
38
|
+
await self.async_open_ai.models.list()
|
|
39
|
+
|
|
40
|
+
async def async_is_conn_good(self) -> bool:
|
|
41
|
+
try:
|
|
42
|
+
await self.async_check_conn()
|
|
43
|
+
return True
|
|
44
|
+
except Exception as e:
|
|
45
|
+
self._logger.error(e)
|
|
46
|
+
return False
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def __example():
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
async def __async_example():
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
if __name__ == '__main__':
|
|
58
|
+
__example()
|
|
59
|
+
asyncio.run(__async_example())
|