re-common 10.0.39__py3-none-any.whl → 10.0.41__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- re_common/baselibrary/__init__.py +4 -4
- re_common/baselibrary/baseabs/__init__.py +6 -6
- re_common/baselibrary/baseabs/baseabs.py +26 -26
- re_common/baselibrary/database/mbuilder.py +132 -132
- re_common/baselibrary/database/moudle.py +93 -93
- re_common/baselibrary/database/msqlite3.py +194 -194
- re_common/baselibrary/database/mysql.py +169 -169
- re_common/baselibrary/database/sql_factory.py +26 -26
- re_common/baselibrary/mthread/MThreadingRun.py +486 -486
- re_common/baselibrary/mthread/MThreadingRunEvent.py +349 -349
- re_common/baselibrary/mthread/__init__.py +2 -2
- re_common/baselibrary/mthread/mythreading.py +695 -695
- re_common/baselibrary/pakge_other/socks.py +404 -404
- re_common/baselibrary/readconfig/config_factory.py +18 -18
- re_common/baselibrary/readconfig/ini_config.py +317 -317
- re_common/baselibrary/readconfig/toml_config.py +49 -49
- re_common/baselibrary/temporary/envdata.py +36 -36
- re_common/baselibrary/tools/all_requests/aiohttp_request.py +118 -118
- re_common/baselibrary/tools/all_requests/httpx_requet.py +102 -102
- re_common/baselibrary/tools/all_requests/mrequest.py +412 -412
- re_common/baselibrary/tools/all_requests/requests_request.py +81 -81
- re_common/baselibrary/tools/batch_compre/bijiao_batch.py +31 -31
- re_common/baselibrary/tools/contrast_db3.py +123 -123
- re_common/baselibrary/tools/copy_file.py +39 -39
- re_common/baselibrary/tools/db3_2_sizedb3.py +102 -102
- re_common/baselibrary/tools/foreachgz.py +39 -39
- re_common/baselibrary/tools/get_attr.py +10 -10
- re_common/baselibrary/tools/image_to_pdf.py +61 -61
- re_common/baselibrary/tools/java_code_deal.py +139 -139
- re_common/baselibrary/tools/javacode.py +79 -79
- re_common/baselibrary/tools/mdb_db3.py +48 -48
- re_common/baselibrary/tools/merge_file.py +171 -171
- re_common/baselibrary/tools/merge_gz_file.py +165 -165
- re_common/baselibrary/tools/mhdfstools/down_hdfs_files.py +42 -42
- re_common/baselibrary/tools/mhdfstools/hdfst.py +42 -42
- re_common/baselibrary/tools/mhdfstools/up_hdfs_files.py +38 -38
- re_common/baselibrary/tools/mongo_tools.py +50 -50
- re_common/baselibrary/tools/move_file.py +170 -170
- re_common/baselibrary/tools/move_mongo/mongo_table_to_file.py +63 -63
- re_common/baselibrary/tools/move_mongo/move_mongo_table.py +354 -354
- re_common/baselibrary/tools/move_mongo/use_mttf.py +18 -18
- re_common/baselibrary/tools/move_mongo/use_mv.py +93 -93
- re_common/baselibrary/tools/mpandas/mpandasreadexcel.py +125 -125
- re_common/baselibrary/tools/mpandas/pandas_visualization.py +7 -7
- re_common/baselibrary/tools/myparsel.py +104 -104
- re_common/baselibrary/tools/rename_dir_file.py +37 -37
- re_common/baselibrary/tools/sequoiadb_utils.py +398 -398
- re_common/baselibrary/tools/split_line_to_many.py +25 -25
- re_common/baselibrary/tools/stringtodicts.py +33 -33
- re_common/baselibrary/tools/workwechant_bot.py +84 -84
- re_common/baselibrary/utils/baseaiohttp.py +296 -296
- re_common/baselibrary/utils/baseaiomysql.py +87 -87
- re_common/baselibrary/utils/baseallstep.py +191 -191
- re_common/baselibrary/utils/baseavro.py +19 -19
- re_common/baselibrary/utils/baseboto3.py +291 -291
- re_common/baselibrary/utils/basecsv.py +32 -32
- re_common/baselibrary/utils/basedict.py +133 -133
- re_common/baselibrary/utils/basedir.py +241 -241
- re_common/baselibrary/utils/baseencode.py +351 -351
- re_common/baselibrary/utils/baseencoding.py +28 -28
- re_common/baselibrary/utils/baseesdsl.py +86 -86
- re_common/baselibrary/utils/baseexcel.py +264 -264
- re_common/baselibrary/utils/baseexcept.py +109 -109
- re_common/baselibrary/utils/basefile.py +654 -654
- re_common/baselibrary/utils/baseftp.py +214 -214
- re_common/baselibrary/utils/basegzip.py +60 -60
- re_common/baselibrary/utils/basehdfs.py +135 -135
- re_common/baselibrary/utils/basehttpx.py +268 -268
- re_common/baselibrary/utils/baseip.py +87 -87
- re_common/baselibrary/utils/basejson.py +2 -2
- re_common/baselibrary/utils/baselist.py +32 -32
- re_common/baselibrary/utils/basemotor.py +190 -190
- re_common/baselibrary/utils/basemssql.py +98 -98
- re_common/baselibrary/utils/baseodbc.py +113 -113
- re_common/baselibrary/utils/basepandas.py +302 -302
- re_common/baselibrary/utils/basepeewee.py +11 -11
- re_common/baselibrary/utils/basepika.py +180 -180
- re_common/baselibrary/utils/basepydash.py +143 -143
- re_common/baselibrary/utils/basepymongo.py +230 -230
- re_common/baselibrary/utils/basequeue.py +22 -22
- re_common/baselibrary/utils/baserar.py +57 -57
- re_common/baselibrary/utils/baserequest.py +279 -279
- re_common/baselibrary/utils/baseset.py +8 -8
- re_common/baselibrary/utils/basesmb.py +403 -403
- re_common/baselibrary/utils/basestring.py +382 -382
- re_common/baselibrary/utils/basetime.py +320 -320
- re_common/baselibrary/utils/baseurl.py +121 -121
- re_common/baselibrary/utils/basezip.py +57 -57
- re_common/baselibrary/utils/core/__init__.py +7 -7
- re_common/baselibrary/utils/core/bottomutils.py +18 -18
- re_common/baselibrary/utils/core/mdeprecated.py +327 -327
- re_common/baselibrary/utils/core/mlamada.py +16 -16
- re_common/baselibrary/utils/core/msginfo.py +25 -25
- re_common/baselibrary/utils/core/requests_core.py +103 -103
- re_common/baselibrary/utils/fateadm.py +429 -429
- re_common/baselibrary/utils/importfun.py +123 -123
- re_common/baselibrary/utils/mfaker.py +57 -57
- re_common/baselibrary/utils/my_abc/__init__.py +3 -3
- re_common/baselibrary/utils/my_abc/better_abc.py +32 -32
- re_common/baselibrary/utils/mylogger.py +414 -414
- re_common/baselibrary/utils/myredisclient.py +861 -861
- re_common/baselibrary/utils/pipupgrade.py +21 -21
- re_common/baselibrary/utils/ringlist.py +85 -85
- re_common/baselibrary/utils/version_compare.py +36 -36
- re_common/baselibrary/utils/ydmhttp.py +126 -126
- re_common/facade/lazy_import.py +11 -11
- re_common/facade/loggerfacade.py +25 -25
- re_common/facade/mysqlfacade.py +467 -467
- re_common/facade/now.py +31 -31
- re_common/facade/sqlite3facade.py +257 -257
- re_common/facade/use/mq_use_facade.py +83 -83
- re_common/facade/use/proxy_use_facade.py +19 -19
- re_common/libtest/base_dict_test.py +19 -19
- re_common/libtest/baseavro_test.py +13 -13
- re_common/libtest/basefile_test.py +14 -14
- re_common/libtest/basemssql_test.py +77 -77
- re_common/libtest/baseodbc_test.py +7 -7
- re_common/libtest/basepandas_test.py +38 -38
- re_common/libtest/get_attr_test/get_attr_test_settings.py +14 -14
- re_common/libtest/get_attr_test/settings.py +54 -54
- re_common/libtest/idencode_test.py +53 -53
- re_common/libtest/iniconfig_test.py +35 -35
- re_common/libtest/ip_test.py +34 -34
- re_common/libtest/merge_file_test.py +20 -20
- re_common/libtest/mfaker_test.py +8 -8
- re_common/libtest/mm3_test.py +31 -31
- re_common/libtest/mylogger_test.py +88 -88
- re_common/libtest/myparsel_test.py +27 -27
- re_common/libtest/mysql_test.py +151 -151
- re_common/libtest/pymongo_test.py +21 -21
- re_common/libtest/split_test.py +11 -11
- re_common/libtest/sqlite3_merge_test.py +5 -5
- re_common/libtest/sqlite3_test.py +34 -34
- re_common/libtest/tomlconfig_test.py +30 -30
- re_common/libtest/use_tools_test/__init__.py +2 -2
- re_common/libtest/user/__init__.py +4 -4
- re_common/studio/__init__.py +4 -4
- re_common/studio/assignment_expressions.py +36 -36
- re_common/studio/mydash/test1.py +18 -18
- re_common/studio/pydashstudio/first.py +9 -9
- re_common/studio/streamlitstudio/first_app.py +65 -65
- re_common/studio/streamlitstudio/uber_pickups.py +23 -23
- re_common/studio/test.py +18 -18
- re_common/v2/baselibrary/business_utils/BusinessStringUtil.py +235 -220
- re_common/v2/baselibrary/business_utils/baseencodeid.py +100 -100
- re_common/v2/baselibrary/business_utils/full_doi_path.py +116 -116
- re_common/v2/baselibrary/business_utils/rel_tools.py +6 -6
- re_common/v2/baselibrary/decorators/utils.py +59 -59
- re_common/v2/baselibrary/helpers/search_packge/NearestNeighbors_test.py +105 -105
- re_common/v2/baselibrary/helpers/search_packge/fit_text_match.py +253 -253
- re_common/v2/baselibrary/helpers/search_packge/scikit_learn_text_matcher.py +260 -260
- re_common/v2/baselibrary/helpers/search_packge/test.py +1 -1
- re_common/v2/baselibrary/s3object/baseboto3.py +230 -230
- re_common/v2/baselibrary/tools/WeChatRobot.py +95 -95
- re_common/v2/baselibrary/tools/ac_ahocorasick.py +75 -75
- re_common/v2/baselibrary/tools/concurrency.py +35 -35
- re_common/v2/baselibrary/tools/data_processer/base.py +53 -53
- re_common/v2/baselibrary/tools/data_processer/data_processer.py +497 -508
- re_common/v2/baselibrary/tools/data_processer/data_reader.py +187 -187
- re_common/v2/baselibrary/tools/data_processer/data_writer.py +38 -38
- re_common/v2/baselibrary/tools/dict_tools.py +44 -44
- re_common/v2/baselibrary/tools/dolphinscheduler.py +187 -187
- re_common/v2/baselibrary/tools/hdfs_base_processor.py +204 -204
- re_common/v2/baselibrary/tools/hdfs_bulk_processor.py +67 -67
- re_common/v2/baselibrary/tools/hdfs_data_processer.py +338 -338
- re_common/v2/baselibrary/tools/hdfs_line_processor.py +74 -74
- re_common/v2/baselibrary/tools/list_tools.py +69 -69
- re_common/v2/baselibrary/tools/resume_tracker.py +94 -94
- re_common/v2/baselibrary/tools/search_hash_tools.py +54 -54
- re_common/v2/baselibrary/tools/text_matcher.py +326 -326
- re_common/v2/baselibrary/tools/tree_processor/__init__.py +0 -0
- re_common/v2/baselibrary/tools/tree_processor/builder.py +25 -0
- re_common/v2/baselibrary/tools/tree_processor/node.py +13 -0
- re_common/v2/baselibrary/tools/unionfind_tools.py +60 -60
- re_common/v2/baselibrary/utils/BusinessStringUtil.py +196 -196
- re_common/v2/baselibrary/utils/api_net_utils.py +270 -270
- re_common/v2/baselibrary/utils/author_smi.py +361 -361
- re_common/v2/baselibrary/utils/base_string_similarity.py +158 -158
- re_common/v2/baselibrary/utils/basedict.py +37 -37
- re_common/v2/baselibrary/utils/basehdfs.py +163 -163
- re_common/v2/baselibrary/utils/basepika.py +180 -180
- re_common/v2/baselibrary/utils/basetime.py +94 -77
- re_common/v2/baselibrary/utils/db.py +174 -156
- re_common/v2/baselibrary/utils/elasticsearch.py +46 -0
- re_common/v2/baselibrary/utils/json_cls.py +16 -16
- re_common/v2/baselibrary/utils/mq.py +83 -83
- re_common/v2/baselibrary/utils/n_ary_expression_tree.py +243 -243
- re_common/v2/baselibrary/utils/string_bool.py +187 -186
- re_common/v2/baselibrary/utils/string_clear.py +246 -246
- re_common/v2/baselibrary/utils/string_smi.py +18 -18
- re_common/v2/baselibrary/utils/stringutils.py +312 -271
- re_common/vip/base_step_process.py +11 -11
- re_common/vip/baseencodeid.py +90 -90
- re_common/vip/changetaskname.py +28 -28
- re_common/vip/core_var.py +24 -24
- re_common/vip/mmh3Hash.py +89 -89
- re_common/vip/proxy/allproxys.py +127 -127
- re_common/vip/proxy/allproxys_thread.py +159 -159
- re_common/vip/proxy/cnki_proxy.py +153 -153
- re_common/vip/proxy/kuaidaili.py +87 -87
- re_common/vip/proxy/proxy_all.py +113 -113
- re_common/vip/proxy/update_kuaidaili_0.py +42 -42
- re_common/vip/proxy/wanfang_proxy.py +152 -152
- re_common/vip/proxy/wp_proxy_all.py +181 -181
- re_common/vip/read_rawid_to_txt.py +91 -91
- re_common/vip/title/__init__.py +5 -5
- re_common/vip/title/transform/TransformBookTitleToZt.py +125 -125
- re_common/vip/title/transform/TransformConferenceTitleToZt.py +139 -139
- re_common/vip/title/transform/TransformCstadTitleToZt.py +195 -195
- re_common/vip/title/transform/TransformJournalTitleToZt.py +203 -203
- re_common/vip/title/transform/TransformPatentTitleToZt.py +132 -132
- re_common/vip/title/transform/TransformRegulationTitleToZt.py +114 -114
- re_common/vip/title/transform/TransformStandardTitleToZt.py +135 -135
- re_common/vip/title/transform/TransformThesisTitleToZt.py +135 -135
- re_common/vip/title/transform/__init__.py +10 -10
- {re_common-10.0.39.dist-info → re_common-10.0.41.dist-info}/LICENSE +201 -201
- {re_common-10.0.39.dist-info → re_common-10.0.41.dist-info}/METADATA +16 -16
- re_common-10.0.41.dist-info/RECORD +252 -0
- {re_common-10.0.39.dist-info → re_common-10.0.41.dist-info}/WHEEL +1 -1
- re_common-10.0.39.dist-info/RECORD +0 -248
- {re_common-10.0.39.dist-info → re_common-10.0.41.dist-info}/top_level.txt +0 -0
|
@@ -1,156 +1,174 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import time
|
|
3
|
-
|
|
4
|
-
import aiomysql
|
|
5
|
-
import asyncio
|
|
6
|
-
from contextlib import asynccontextmanager
|
|
7
|
-
from typing import AsyncGenerator, Tuple
|
|
8
|
-
from collections import namedtuple
|
|
9
|
-
|
|
10
|
-
from aiomysql import Pool, Connection, Cursor
|
|
11
|
-
|
|
12
|
-
DB_CONFIG = {
|
|
13
|
-
"host": "192.168.98.64",
|
|
14
|
-
"port": 4000,
|
|
15
|
-
"user": "dataware_house_baseUser",
|
|
16
|
-
"password": "FF19AF831AEBD580B450B16BF9264200",
|
|
17
|
-
"db": "dataware_house_base",
|
|
18
|
-
"charset": "utf8mb4",
|
|
19
|
-
"minsize": 16, # 最小连接数
|
|
20
|
-
"maxsize": 128, # 最大连接数
|
|
21
|
-
"autocommit": False, # 自动提交事务
|
|
22
|
-
"pool_recycle": 3600, # 每个连接的回收时间(秒),超过此时间后连接将被关闭并重新创建,避免失效连接
|
|
23
|
-
"echo": False, # 打印SQL语句
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
DB_CONFIG1 = {
|
|
27
|
-
"host": "192.168.98.64",
|
|
28
|
-
"port": 4000,
|
|
29
|
-
"user": "foreign_fulltextUser",
|
|
30
|
-
"password": "i4hIeasw1qpmhGN2nwL7",
|
|
31
|
-
"db": "foreign_fulltext",
|
|
32
|
-
"charset": "utf8mb4",
|
|
33
|
-
"minsize": 16, # 最小连接数
|
|
34
|
-
"maxsize": 128, # 最大连接数
|
|
35
|
-
"autocommit": False, # 自动提交事务
|
|
36
|
-
"pool_recycle": 3600, # 每个连接的回收时间(秒),超过此时间后连接将被关闭并重新创建,避免失效连接
|
|
37
|
-
"echo": False, # 打印SQL语句
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
async def get_pool_only(_DB_CONFIG: dict = None):
|
|
42
|
-
global DB_CONFIG
|
|
43
|
-
if _DB_CONFIG is not None:
|
|
44
|
-
DB_CONFIG = _DB_CONFIG
|
|
45
|
-
pool: Pool = await aiomysql.create_pool(**DB_CONFIG)
|
|
46
|
-
return pool
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
@asynccontextmanager
|
|
50
|
-
async def get_db_pool(_DB_CONFIG: dict = None):
|
|
51
|
-
"""异步数据库连接池管理工具"""
|
|
52
|
-
global DB_CONFIG
|
|
53
|
-
if _DB_CONFIG is not None:
|
|
54
|
-
DB_CONFIG = _DB_CONFIG
|
|
55
|
-
pool: Pool = await aiomysql.create_pool(**DB_CONFIG)
|
|
56
|
-
try:
|
|
57
|
-
yield pool
|
|
58
|
-
finally:
|
|
59
|
-
pool.close()
|
|
60
|
-
await pool.wait_closed()
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
@asynccontextmanager
|
|
64
|
-
async def get_session(pool: Pool) -> AsyncGenerator[Tuple[Connection, Cursor], None]:
|
|
65
|
-
"""获取数据库会话"""
|
|
66
|
-
async with pool.acquire() as conn:
|
|
67
|
-
async with conn.cursor() as cursor:
|
|
68
|
-
yield conn, cursor
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
async def dictfetchall(cursor: Cursor):
|
|
72
|
-
"""
|
|
73
|
-
Return all rows from a cursor as a dict.
|
|
74
|
-
Assume the column names are unique.
|
|
75
|
-
"""
|
|
76
|
-
columns = [col[0] for col in cursor.description]
|
|
77
|
-
return [dict(zip(columns, row)) for row in await cursor.fetchall()]
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
async def namedtuplefetchall(cursor: Cursor):
|
|
81
|
-
"""
|
|
82
|
-
Return all rows from a cursor as a namedtuple.
|
|
83
|
-
Assume the column names are unique.
|
|
84
|
-
"""
|
|
85
|
-
desc = cursor.description
|
|
86
|
-
nt_result = namedtuple("Result", [col[0] for col in desc])
|
|
87
|
-
return [nt_result(*row) for row in await cursor.fetchall()]
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
# main.py
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
aiomysql_pool = None
|
|
94
|
-
pool_lock = asyncio.Lock() # 全局异步锁
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
async def check_connection(client):
|
|
114
|
-
try:
|
|
115
|
-
print("check mongodb client ping")
|
|
116
|
-
await client.admin.command("ping")
|
|
117
|
-
return True
|
|
118
|
-
except Exception:
|
|
119
|
-
return False
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
async def init_motor_async(uri, db_name, bucket_name, is_reload=False):
|
|
123
|
-
global motor_fs, client, _loop_id_mongo
|
|
124
|
-
is_ping = True
|
|
125
|
-
|
|
126
|
-
if _loop_id_mongo is not None:
|
|
127
|
-
loop_id = id(asyncio.get_running_loop())
|
|
128
|
-
if loop_id != _loop_id_mongo:
|
|
129
|
-
is_reload = True
|
|
130
|
-
|
|
131
|
-
# 防止 每次都检查 只有 is_reload 时才检查连接
|
|
132
|
-
if is_reload:
|
|
133
|
-
is_ping = await check_connection(client)
|
|
134
|
-
if motor_fs is None or not is_ping:
|
|
135
|
-
async with motor_fs_lock:
|
|
136
|
-
if motor_fs is None or not is_ping:
|
|
137
|
-
print(f"[{os.getpid()}] Initializing motor_fs...")
|
|
138
|
-
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorGridFSBucket
|
|
139
|
-
client = AsyncIOMotorClient(uri)
|
|
140
|
-
db = client[db_name]
|
|
141
|
-
motor_fs = AsyncIOMotorGridFSBucket(database=db, bucket_name=bucket_name)
|
|
142
|
-
_loop_id_mongo = id(asyncio.get_running_loop())
|
|
143
|
-
return motor_fs, client
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
#
|
|
147
|
-
#
|
|
148
|
-
#
|
|
149
|
-
#
|
|
150
|
-
#
|
|
151
|
-
#
|
|
152
|
-
#
|
|
153
|
-
#
|
|
154
|
-
#
|
|
155
|
-
#
|
|
156
|
-
#
|
|
1
|
+
import os
|
|
2
|
+
import time
|
|
3
|
+
|
|
4
|
+
import aiomysql
|
|
5
|
+
import asyncio
|
|
6
|
+
from contextlib import asynccontextmanager
|
|
7
|
+
from typing import AsyncGenerator, Tuple
|
|
8
|
+
from collections import namedtuple
|
|
9
|
+
|
|
10
|
+
from aiomysql import Pool, Connection, Cursor
|
|
11
|
+
|
|
12
|
+
DB_CONFIG = {
|
|
13
|
+
"host": "192.168.98.64",
|
|
14
|
+
"port": 4000,
|
|
15
|
+
"user": "dataware_house_baseUser",
|
|
16
|
+
"password": "FF19AF831AEBD580B450B16BF9264200",
|
|
17
|
+
"db": "dataware_house_base",
|
|
18
|
+
"charset": "utf8mb4",
|
|
19
|
+
"minsize": 16, # 最小连接数
|
|
20
|
+
"maxsize": 128, # 最大连接数
|
|
21
|
+
"autocommit": False, # 自动提交事务
|
|
22
|
+
"pool_recycle": 3600, # 每个连接的回收时间(秒),超过此时间后连接将被关闭并重新创建,避免失效连接
|
|
23
|
+
"echo": False, # 打印SQL语句
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
DB_CONFIG1 = {
|
|
27
|
+
"host": "192.168.98.64",
|
|
28
|
+
"port": 4000,
|
|
29
|
+
"user": "foreign_fulltextUser",
|
|
30
|
+
"password": "i4hIeasw1qpmhGN2nwL7",
|
|
31
|
+
"db": "foreign_fulltext",
|
|
32
|
+
"charset": "utf8mb4",
|
|
33
|
+
"minsize": 16, # 最小连接数
|
|
34
|
+
"maxsize": 128, # 最大连接数
|
|
35
|
+
"autocommit": False, # 自动提交事务
|
|
36
|
+
"pool_recycle": 3600, # 每个连接的回收时间(秒),超过此时间后连接将被关闭并重新创建,避免失效连接
|
|
37
|
+
"echo": False, # 打印SQL语句
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
async def get_pool_only(_DB_CONFIG: dict = None):
|
|
42
|
+
global DB_CONFIG
|
|
43
|
+
if _DB_CONFIG is not None:
|
|
44
|
+
DB_CONFIG = _DB_CONFIG
|
|
45
|
+
pool: Pool = await aiomysql.create_pool(**DB_CONFIG)
|
|
46
|
+
return pool
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@asynccontextmanager
|
|
50
|
+
async def get_db_pool(_DB_CONFIG: dict = None):
|
|
51
|
+
"""异步数据库连接池管理工具"""
|
|
52
|
+
global DB_CONFIG
|
|
53
|
+
if _DB_CONFIG is not None:
|
|
54
|
+
DB_CONFIG = _DB_CONFIG
|
|
55
|
+
pool: Pool = await aiomysql.create_pool(**DB_CONFIG)
|
|
56
|
+
try:
|
|
57
|
+
yield pool
|
|
58
|
+
finally:
|
|
59
|
+
pool.close()
|
|
60
|
+
await pool.wait_closed()
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@asynccontextmanager
|
|
64
|
+
async def get_session(pool: Pool) -> AsyncGenerator[Tuple[Connection, Cursor], None]:
|
|
65
|
+
"""获取数据库会话"""
|
|
66
|
+
async with pool.acquire() as conn:
|
|
67
|
+
async with conn.cursor() as cursor:
|
|
68
|
+
yield conn, cursor
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
async def dictfetchall(cursor: Cursor):
|
|
72
|
+
"""
|
|
73
|
+
Return all rows from a cursor as a dict.
|
|
74
|
+
Assume the column names are unique.
|
|
75
|
+
"""
|
|
76
|
+
columns = [col[0] for col in cursor.description]
|
|
77
|
+
return [dict(zip(columns, row)) for row in await cursor.fetchall()]
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
async def namedtuplefetchall(cursor: Cursor):
|
|
81
|
+
"""
|
|
82
|
+
Return all rows from a cursor as a namedtuple.
|
|
83
|
+
Assume the column names are unique.
|
|
84
|
+
"""
|
|
85
|
+
desc = cursor.description
|
|
86
|
+
nt_result = namedtuple("Result", [col[0] for col in desc])
|
|
87
|
+
return [nt_result(*row) for row in await cursor.fetchall()]
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
# main.py
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
aiomysql_pool = None
|
|
94
|
+
pool_lock = asyncio.Lock() # 全局异步锁
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
async def init_aiomysql_pool_async():
|
|
98
|
+
global aiomysql_pool
|
|
99
|
+
if aiomysql_pool is None:
|
|
100
|
+
async with pool_lock:
|
|
101
|
+
if aiomysql_pool is None:
|
|
102
|
+
print(f"[{os.getpid()}] Initializing aiomysql pool...")
|
|
103
|
+
aiomysql_pool = await aiomysql.create_pool(**DB_CONFIG)
|
|
104
|
+
return aiomysql_pool
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
motor_fs = None
|
|
108
|
+
client = None
|
|
109
|
+
motor_fs_lock = asyncio.Lock() # 全局异步锁
|
|
110
|
+
_loop_id_mongo = None
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
async def check_connection(client):
|
|
114
|
+
try:
|
|
115
|
+
print("check mongodb client ping")
|
|
116
|
+
await client.admin.command("ping")
|
|
117
|
+
return True
|
|
118
|
+
except Exception:
|
|
119
|
+
return False
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
async def init_motor_async(uri, db_name, bucket_name, is_reload=False):
|
|
123
|
+
global motor_fs, client, _loop_id_mongo
|
|
124
|
+
is_ping = True
|
|
125
|
+
|
|
126
|
+
if _loop_id_mongo is not None:
|
|
127
|
+
loop_id = id(asyncio.get_running_loop())
|
|
128
|
+
if loop_id != _loop_id_mongo:
|
|
129
|
+
is_reload = True
|
|
130
|
+
|
|
131
|
+
# 防止 每次都检查 只有 is_reload 时才检查连接
|
|
132
|
+
if is_reload:
|
|
133
|
+
is_ping = await check_connection(client)
|
|
134
|
+
if motor_fs is None or not is_ping:
|
|
135
|
+
async with motor_fs_lock:
|
|
136
|
+
if motor_fs is None or not is_ping:
|
|
137
|
+
print(f"[{os.getpid()}] Initializing motor_fs...")
|
|
138
|
+
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorGridFSBucket
|
|
139
|
+
client = AsyncIOMotorClient(uri)
|
|
140
|
+
db = client[db_name]
|
|
141
|
+
motor_fs = AsyncIOMotorGridFSBucket(database=db, bucket_name=bucket_name)
|
|
142
|
+
_loop_id_mongo = id(asyncio.get_running_loop())
|
|
143
|
+
return motor_fs, client
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
# async def run_main():
|
|
147
|
+
# while True:
|
|
148
|
+
# uri = "mongodb://192.168.98.80:27001/wpdc"
|
|
149
|
+
# db_name = "wpdc"
|
|
150
|
+
# bucket_name = "sci_doc"
|
|
151
|
+
# motor_fs, client = await init_motor_async(uri, db_name, bucket_name,is_reload=True)
|
|
152
|
+
# # print(await check_connection(client))
|
|
153
|
+
# time.sleep(3)
|
|
154
|
+
#
|
|
155
|
+
#
|
|
156
|
+
# if __name__ == "__main__":
|
|
157
|
+
# asyncio.run(run_main())
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def get_connection(autocommit: bool = True) -> Connection:
|
|
161
|
+
from pymysql import Connection
|
|
162
|
+
from pymysql.cursors import DictCursor
|
|
163
|
+
import pymysql
|
|
164
|
+
db_conf = {
|
|
165
|
+
"host": "192.168.98.55",
|
|
166
|
+
"port": 4000,
|
|
167
|
+
"user": "dataware_house_baseUser",
|
|
168
|
+
"password": "FF19AF831AEBD580B450B16BF9264200",
|
|
169
|
+
"database": "dataware_house_base",
|
|
170
|
+
"autocommit": autocommit,
|
|
171
|
+
"cursorclass": DictCursor,
|
|
172
|
+
}
|
|
173
|
+
conn: Connection = pymysql.connect(**db_conf)
|
|
174
|
+
return conn
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
# elasticsearch[async]~=8.11.1
|
|
2
|
+
from typing import Any, Dict, List
|
|
3
|
+
from elasticsearch import AsyncElasticsearch, helpers
|
|
4
|
+
|
|
5
|
+
DEFAULT_CONFIG = {
|
|
6
|
+
"hosts": [
|
|
7
|
+
"http://192.168.32.97:9200",
|
|
8
|
+
"http://192.168.32.99:9200",
|
|
9
|
+
"http://192.168.32.101:9200",
|
|
10
|
+
"http://192.168.32.103:9200",
|
|
11
|
+
],
|
|
12
|
+
"basic_auth": ("elastic", "Zl2bWsOuvqi0IUwpvGhK"),
|
|
13
|
+
"timeout": 60,
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def get_es(config=DEFAULT_CONFIG):
|
|
18
|
+
es = AsyncElasticsearch(**config)
|
|
19
|
+
return es
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
async def update(es: AsyncElasticsearch, index: str, doc_id: str, doc: Dict[str, Any], doc_as_upsert: bool = False):
|
|
23
|
+
return await es.update(index=index, id=doc_id, doc=doc, doc_as_upsert=doc_as_upsert)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
async def bulk_update(es: AsyncElasticsearch, index: str, docs: List[Dict[str, Any]], doc_as_upsert: bool = False):
|
|
27
|
+
"""
|
|
28
|
+
批量更新ES文档
|
|
29
|
+
|
|
30
|
+
docs 格式示例:
|
|
31
|
+
[
|
|
32
|
+
{"_id": "1", "doc": {"field1": "value1"}},
|
|
33
|
+
{"_id": "2", "doc": {"field2": "value2"}},
|
|
34
|
+
]
|
|
35
|
+
"""
|
|
36
|
+
actions = []
|
|
37
|
+
for item in docs:
|
|
38
|
+
action = {
|
|
39
|
+
"_op_type": "update",
|
|
40
|
+
"_index": index,
|
|
41
|
+
"_id": item["_id"],
|
|
42
|
+
"doc": item["doc"],
|
|
43
|
+
"doc_as_upsert": doc_as_upsert,
|
|
44
|
+
}
|
|
45
|
+
actions.append(action)
|
|
46
|
+
return await helpers.async_bulk(es, actions=actions)
|
|
@@ -1,16 +1,16 @@
|
|
|
1
|
-
import base64
|
|
2
|
-
import functools
|
|
3
|
-
import json
|
|
4
|
-
|
|
5
|
-
json_dumps = functools.partial(json.dumps, ensure_ascii=False)
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class BytesEncoder(json.JSONEncoder):
|
|
9
|
-
def default(self, obj):
|
|
10
|
-
if isinstance(obj, bytes):
|
|
11
|
-
return base64.b64encode(obj).decode('utf-8')
|
|
12
|
-
return super().default(obj)
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def base64_to_bytes(base64_str, encoding="utf-8") -> bytes:
|
|
16
|
-
return base64.b64decode(base64_str.encode(encoding))
|
|
1
|
+
import base64
|
|
2
|
+
import functools
|
|
3
|
+
import json
|
|
4
|
+
|
|
5
|
+
json_dumps = functools.partial(json.dumps, ensure_ascii=False)
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class BytesEncoder(json.JSONEncoder):
|
|
9
|
+
def default(self, obj):
|
|
10
|
+
if isinstance(obj, bytes):
|
|
11
|
+
return base64.b64encode(obj).decode('utf-8')
|
|
12
|
+
return super().default(obj)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def base64_to_bytes(base64_str, encoding="utf-8") -> bytes:
|
|
16
|
+
return base64.b64decode(base64_str.encode(encoding))
|
|
@@ -1,83 +1,83 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
import traceback
|
|
3
|
-
|
|
4
|
-
from re_common.v2.baselibrary.utils.basepika import BasePika
|
|
5
|
-
from retry import retry
|
|
6
|
-
|
|
7
|
-
logging_logger = logging.getLogger(__name__)
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class UseMq(object):
|
|
11
|
-
|
|
12
|
-
def __init__(self, queue, qos=1):
|
|
13
|
-
self.queue = queue
|
|
14
|
-
self.qos = qos
|
|
15
|
-
self.basepika = BasePika()
|
|
16
|
-
self.basepika.set_default()
|
|
17
|
-
self.basepika.connect()
|
|
18
|
-
self.basepika.create_channel()
|
|
19
|
-
self.basepika.queue_declare(queue=queue, durable=True)
|
|
20
|
-
self.basepika.basic_qos(qos)
|
|
21
|
-
self.properties = self.basepika.get_properties()
|
|
22
|
-
|
|
23
|
-
def re_conn(self):
|
|
24
|
-
"""
|
|
25
|
-
重新连接
|
|
26
|
-
:return:
|
|
27
|
-
"""
|
|
28
|
-
self.basepika.connect()
|
|
29
|
-
self.basepika.create_channel()
|
|
30
|
-
self.basepika.queue_declare(queue=self.queue, durable=True)
|
|
31
|
-
self.basepika.basic_qos(self.qos)
|
|
32
|
-
|
|
33
|
-
@retry(delay=5, backoff=2, max_delay=60 * 3, logger=logging_logger)
|
|
34
|
-
def get_mq(self):
|
|
35
|
-
try:
|
|
36
|
-
if self.basepika.channel.is_closed:
|
|
37
|
-
logging_logger.info("重连中......")
|
|
38
|
-
self.re_conn()
|
|
39
|
-
logging_logger.info("重连完成......")
|
|
40
|
-
self.basepika.set_get_msg_callback(routing_key=self.queue, callback=self.callback, auto_ack=False)
|
|
41
|
-
self.basepika.start_get_msg()
|
|
42
|
-
except:
|
|
43
|
-
traceback.print_exc()
|
|
44
|
-
logging_logger.info("重连中......")
|
|
45
|
-
self.re_conn()
|
|
46
|
-
|
|
47
|
-
def callback(self, ch, method, properties, body):
|
|
48
|
-
# print(type(body))
|
|
49
|
-
# print(" [x] Received %r" % body)
|
|
50
|
-
# body = body.decode()
|
|
51
|
-
self.callback2(ch, method, properties, body)
|
|
52
|
-
if self.basepika.auto_ack is False:
|
|
53
|
-
self.basepika.basic_ack(ch, method)
|
|
54
|
-
|
|
55
|
-
def callback2(self, ch, method, properties, body):
|
|
56
|
-
pass
|
|
57
|
-
|
|
58
|
-
@retry(delay=5, backoff=2, max_delay=60 * 3, logger=logging_logger)
|
|
59
|
-
def send_mq(self, body, num=100):
|
|
60
|
-
try:
|
|
61
|
-
if self.basepika.get_queue_size(self.queue) < num:
|
|
62
|
-
self.basepika.easy_send_msg(routing_key=self.queue,
|
|
63
|
-
body=body,
|
|
64
|
-
properties=self.properties)
|
|
65
|
-
return True
|
|
66
|
-
else:
|
|
67
|
-
return False
|
|
68
|
-
except:
|
|
69
|
-
traceback.print_exc()
|
|
70
|
-
logging_logger.info("重连中......")
|
|
71
|
-
self.re_conn()
|
|
72
|
-
return False
|
|
73
|
-
|
|
74
|
-
def get_server_mq_num(self, num=100):
|
|
75
|
-
if self.basepika.get_queue_size(self.queue) < num:
|
|
76
|
-
return True
|
|
77
|
-
else:
|
|
78
|
-
return False
|
|
79
|
-
|
|
80
|
-
def easy_send_mq(self, body):
|
|
81
|
-
self.basepika.easy_send_msg(routing_key=self.queue,
|
|
82
|
-
body=body,
|
|
83
|
-
properties=self.properties)
|
|
1
|
+
import logging
|
|
2
|
+
import traceback
|
|
3
|
+
|
|
4
|
+
from re_common.v2.baselibrary.utils.basepika import BasePika
|
|
5
|
+
from retry import retry
|
|
6
|
+
|
|
7
|
+
logging_logger = logging.getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class UseMq(object):
|
|
11
|
+
|
|
12
|
+
def __init__(self, queue, qos=1):
|
|
13
|
+
self.queue = queue
|
|
14
|
+
self.qos = qos
|
|
15
|
+
self.basepika = BasePika()
|
|
16
|
+
self.basepika.set_default()
|
|
17
|
+
self.basepika.connect()
|
|
18
|
+
self.basepika.create_channel()
|
|
19
|
+
self.basepika.queue_declare(queue=queue, durable=True)
|
|
20
|
+
self.basepika.basic_qos(qos)
|
|
21
|
+
self.properties = self.basepika.get_properties()
|
|
22
|
+
|
|
23
|
+
def re_conn(self):
|
|
24
|
+
"""
|
|
25
|
+
重新连接
|
|
26
|
+
:return:
|
|
27
|
+
"""
|
|
28
|
+
self.basepika.connect()
|
|
29
|
+
self.basepika.create_channel()
|
|
30
|
+
self.basepika.queue_declare(queue=self.queue, durable=True)
|
|
31
|
+
self.basepika.basic_qos(self.qos)
|
|
32
|
+
|
|
33
|
+
@retry(delay=5, backoff=2, max_delay=60 * 3, logger=logging_logger)
|
|
34
|
+
def get_mq(self):
|
|
35
|
+
try:
|
|
36
|
+
if self.basepika.channel.is_closed:
|
|
37
|
+
logging_logger.info("重连中......")
|
|
38
|
+
self.re_conn()
|
|
39
|
+
logging_logger.info("重连完成......")
|
|
40
|
+
self.basepika.set_get_msg_callback(routing_key=self.queue, callback=self.callback, auto_ack=False)
|
|
41
|
+
self.basepika.start_get_msg()
|
|
42
|
+
except:
|
|
43
|
+
traceback.print_exc()
|
|
44
|
+
logging_logger.info("重连中......")
|
|
45
|
+
self.re_conn()
|
|
46
|
+
|
|
47
|
+
def callback(self, ch, method, properties, body):
|
|
48
|
+
# print(type(body))
|
|
49
|
+
# print(" [x] Received %r" % body)
|
|
50
|
+
# body = body.decode()
|
|
51
|
+
self.callback2(ch, method, properties, body)
|
|
52
|
+
if self.basepika.auto_ack is False:
|
|
53
|
+
self.basepika.basic_ack(ch, method)
|
|
54
|
+
|
|
55
|
+
def callback2(self, ch, method, properties, body):
|
|
56
|
+
pass
|
|
57
|
+
|
|
58
|
+
@retry(delay=5, backoff=2, max_delay=60 * 3, logger=logging_logger)
|
|
59
|
+
def send_mq(self, body, num=100):
|
|
60
|
+
try:
|
|
61
|
+
if self.basepika.get_queue_size(self.queue) < num:
|
|
62
|
+
self.basepika.easy_send_msg(routing_key=self.queue,
|
|
63
|
+
body=body,
|
|
64
|
+
properties=self.properties)
|
|
65
|
+
return True
|
|
66
|
+
else:
|
|
67
|
+
return False
|
|
68
|
+
except:
|
|
69
|
+
traceback.print_exc()
|
|
70
|
+
logging_logger.info("重连中......")
|
|
71
|
+
self.re_conn()
|
|
72
|
+
return False
|
|
73
|
+
|
|
74
|
+
def get_server_mq_num(self, num=100):
|
|
75
|
+
if self.basepika.get_queue_size(self.queue) < num:
|
|
76
|
+
return True
|
|
77
|
+
else:
|
|
78
|
+
return False
|
|
79
|
+
|
|
80
|
+
def easy_send_mq(self, body):
|
|
81
|
+
self.basepika.easy_send_msg(routing_key=self.queue,
|
|
82
|
+
body=body,
|
|
83
|
+
properties=self.properties)
|