xtn-tools-pro 1.0.0.0.3__tar.gz → 1.0.0.0.5__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {xtn-tools-pro-1.0.0.0.3/xtn_tools_pro.egg-info → xtn-tools-pro-1.0.0.0.5}/PKG-INFO +1 -1
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/setup.py +4 -1
- xtn-tools-pro-1.0.0.0.5/xtn_tools_pro/db/MysqlDB.py +397 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro/tools.py +72 -4
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro/tools_flie.py +1 -1
- xtn-tools-pro-1.0.0.0.5/xtn_tools_pro/utils/__init__.py +10 -0
- xtn-tools-pro-1.0.0.0.5/xtn_tools_pro/utils/log.py +193 -0
- xtn-tools-pro-1.0.0.0.5/xtn_tools_pro/utils/retry.py +57 -0
- xtn-tools-pro-1.0.0.0.5/xtn_tools_pro/utils/sql.py +159 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5/xtn_tools_pro.egg-info}/PKG-INFO +1 -1
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro.egg-info/SOURCES.txt +6 -1
- xtn-tools-pro-1.0.0.0.5/xtn_tools_pro.egg-info/requires.txt +6 -0
- xtn-tools-pro-1.0.0.0.3/xtn_tools_pro.egg-info/requires.txt +0 -3
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/LICENSE +0 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/README.md +0 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/setup.cfg +0 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro/__init__.py +0 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro/db/MongoDB.py +0 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro/db/RedisDB.py +0 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro/db/__init__.py +0 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro/proxy/XiaoXiangProxy.py +0 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro/proxy/__init__.py +0 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro/tools_time.py +0 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro.egg-info/dependency_links.txt +0 -0
- {xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro.egg-info/top_level.txt +0 -0
@@ -15,7 +15,7 @@ with open("README.md", "r") as f:
|
|
15
15
|
|
16
16
|
setuptools.setup(
|
17
17
|
name="xtn-tools-pro", # 模块名称
|
18
|
-
version="1.0.0.0.
|
18
|
+
version="1.0.0.0.5", # 版本
|
19
19
|
author="xtn", # 作者
|
20
20
|
author_email="czw011122@163.com", # 作者邮箱
|
21
21
|
description="xtn 开发工具", # 模块简介
|
@@ -30,6 +30,9 @@ setuptools.setup(
|
|
30
30
|
install_requires=[
|
31
31
|
"pymongo",
|
32
32
|
"redis",
|
33
|
+
"pymysql",
|
34
|
+
"dbutils",
|
35
|
+
"colorlog",
|
33
36
|
"requests"
|
34
37
|
],
|
35
38
|
python_requires='>=3',
|
@@ -0,0 +1,397 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
|
4
|
+
# 说明:
|
5
|
+
# 程序说明xxxxxxxxxxxxxxxxxxx
|
6
|
+
# History:
|
7
|
+
# Date Author Version Modification
|
8
|
+
# --------------------------------------------------------------------------------------------------
|
9
|
+
# 2024/5/11 xiatn V00.01.000 新建
|
10
|
+
# --------------------------------------------------------------------------------------------------
|
11
|
+
import json
|
12
|
+
import pymysql
|
13
|
+
import datetime
|
14
|
+
from pymysql import err
|
15
|
+
from urllib import parse
|
16
|
+
from pymysql import cursors
|
17
|
+
from typing import List, Dict
|
18
|
+
from dbutils.pooled_db import PooledDB
|
19
|
+
from xtn_tools_pro.utils.log import Log
|
20
|
+
from xtn_tools_pro.utils.sql import get_insert_sql, get_insert_batch_sql, get_update_sql
|
21
|
+
|
22
|
+
log = Log(name="MysqlDB", color=True)
|
23
|
+
|
24
|
+
|
25
|
+
def auto_retry(func):
|
26
|
+
def wapper(*args, **kwargs):
|
27
|
+
for i in range(3):
|
28
|
+
try:
|
29
|
+
return func(*args, **kwargs)
|
30
|
+
except (err.InterfaceError, err.OperationalError) as e:
|
31
|
+
log.error(
|
32
|
+
"""
|
33
|
+
error:%s
|
34
|
+
sql: %s
|
35
|
+
"""
|
36
|
+
% (e, kwargs.get("sql") or args[1])
|
37
|
+
)
|
38
|
+
|
39
|
+
return wapper
|
40
|
+
|
41
|
+
|
42
|
+
class MysqlDBPro:
|
43
|
+
def __init__(self, ip, port, db, user_name, user_pass, **kwargs):
|
44
|
+
try:
|
45
|
+
self.connect_pool = PooledDB(
|
46
|
+
creator=pymysql, # 指定数据库连接的创建方法,这里使用的是pymysql作为创建方法
|
47
|
+
mincached=1, # 连接池中空闲连接的初始数量
|
48
|
+
maxcached=100, # 连接池中空闲连接的最大数量
|
49
|
+
maxconnections=100, # 连接池允许的最大连接数
|
50
|
+
blocking=True, # 当连接池达到最大连接数时,是否阻塞等待连接释放
|
51
|
+
ping=7, # 连接池中的连接在重新使用之前,是否需要进行ping操作来验证连接的有效性,这里的7是一个时间间隔,表示每隔7秒会对连接进行一次ping操作
|
52
|
+
host=ip, # 数据库主机的IP地址
|
53
|
+
port=port, # 数据库的端口号
|
54
|
+
user=user_name, # 连接数据库的用户名
|
55
|
+
passwd=user_pass, # 连接数据库的密码
|
56
|
+
db=db, # 连接的数据库名
|
57
|
+
charset="utf8mb4", # 连接数据库时使用的字符编码
|
58
|
+
cursorclass=cursors.SSCursor, # 指定使用的游标类,这里使用的是cursors.SSCursor,该游标类在多线程下大批量插入数据时可以减少内存的使用
|
59
|
+
) # cursorclass 使用服务的游标,默认的在多线程下大批量插入数据会使内存递增
|
60
|
+
|
61
|
+
except Exception as e:
|
62
|
+
log.error(
|
63
|
+
"""
|
64
|
+
连接失败:
|
65
|
+
ip: {}
|
66
|
+
port: {}
|
67
|
+
db: {}
|
68
|
+
user_name: {}
|
69
|
+
user_pass: {}
|
70
|
+
exception: {}
|
71
|
+
""".format(
|
72
|
+
ip, port, db, user_name, user_pass, e
|
73
|
+
)
|
74
|
+
)
|
75
|
+
else:
|
76
|
+
log.debug("连接到mysql数据库 %s : %s" % (ip, db))
|
77
|
+
|
78
|
+
@classmethod
|
79
|
+
def from_url(cls, url, **kwargs):
|
80
|
+
"""
|
81
|
+
|
82
|
+
Args:
|
83
|
+
url: mysql://username:password@ip:port/db?charset=utf8mb4
|
84
|
+
url: mysql://username:password@127.0.0.1:port/db?charset=utf8mb4
|
85
|
+
**kwargs:
|
86
|
+
|
87
|
+
Returns:
|
88
|
+
|
89
|
+
"""
|
90
|
+
url_parsed = parse.urlparse(url)
|
91
|
+
|
92
|
+
db_type = url_parsed.scheme.strip()
|
93
|
+
if db_type != "mysql":
|
94
|
+
raise Exception(
|
95
|
+
"url error, expect mysql://username:ip:port/db?charset=utf8mb4, but get {}".format(
|
96
|
+
url
|
97
|
+
)
|
98
|
+
)
|
99
|
+
|
100
|
+
connect_params = {
|
101
|
+
"ip": url_parsed.hostname.strip(),
|
102
|
+
"port": url_parsed.port,
|
103
|
+
"user_name": url_parsed.username.strip(),
|
104
|
+
"user_pass": url_parsed.password.strip(),
|
105
|
+
"db": url_parsed.path.strip("/").strip(),
|
106
|
+
}
|
107
|
+
|
108
|
+
connect_params.update(kwargs)
|
109
|
+
|
110
|
+
return cls(**connect_params)
|
111
|
+
|
112
|
+
@staticmethod
|
113
|
+
def unescape_string(value):
|
114
|
+
if not isinstance(value, str):
|
115
|
+
return value
|
116
|
+
value = value.replace("\\0", "\0")
|
117
|
+
value = value.replace("\\\\", "\\")
|
118
|
+
value = value.replace("\\n", "\n")
|
119
|
+
value = value.replace("\\r", "\r")
|
120
|
+
value = value.replace("\\Z", "\032")
|
121
|
+
value = value.replace('\\"', '"')
|
122
|
+
value = value.replace("\\'", "'")
|
123
|
+
return value
|
124
|
+
|
125
|
+
def get_connection(self):
|
126
|
+
conn = self.connect_pool.connection(shareable=False)
|
127
|
+
# cursor = conn.cursor(cursors.SSCursor)
|
128
|
+
cursor = conn.cursor()
|
129
|
+
|
130
|
+
return conn, cursor
|
131
|
+
|
132
|
+
def close_connection(self, conn, cursor):
|
133
|
+
"""
|
134
|
+
关闭数据库连接和游标对象
|
135
|
+
:param conn:
|
136
|
+
:param cursor:
|
137
|
+
:return:
|
138
|
+
"""
|
139
|
+
if conn:
|
140
|
+
conn.close()
|
141
|
+
if cursor:
|
142
|
+
cursor.close()
|
143
|
+
|
144
|
+
def execute(self, sql):
|
145
|
+
"""
|
146
|
+
执行sql
|
147
|
+
:param sql:
|
148
|
+
:return:
|
149
|
+
"""
|
150
|
+
conn, cursor = None, None
|
151
|
+
try:
|
152
|
+
conn, cursor = self.get_connection()
|
153
|
+
cursor.execute(sql)
|
154
|
+
conn.commit()
|
155
|
+
except Exception as e:
|
156
|
+
log.error(
|
157
|
+
"""
|
158
|
+
error:%s
|
159
|
+
sql: %s
|
160
|
+
"""
|
161
|
+
% (e, sql)
|
162
|
+
)
|
163
|
+
return False
|
164
|
+
else:
|
165
|
+
return True
|
166
|
+
finally:
|
167
|
+
self.close_connection(conn, cursor)
|
168
|
+
|
169
|
+
def add(self, sql, exception_callfunc=None):
|
170
|
+
"""
|
171
|
+
单条 传入sql执行插入语句
|
172
|
+
:param sql: sql
|
173
|
+
:param exception_callfunc: 异常回调函数
|
174
|
+
:return: 添加行数
|
175
|
+
"""
|
176
|
+
affect_count = None
|
177
|
+
conn, cursor = None, None
|
178
|
+
|
179
|
+
try:
|
180
|
+
conn, cursor = self.get_connection()
|
181
|
+
affect_count = cursor.execute(sql)
|
182
|
+
conn.commit()
|
183
|
+
|
184
|
+
except Exception as e:
|
185
|
+
log.error(
|
186
|
+
"""
|
187
|
+
error:%s
|
188
|
+
sql: %s
|
189
|
+
"""
|
190
|
+
% (e, sql)
|
191
|
+
)
|
192
|
+
if exception_callfunc:
|
193
|
+
exception_callfunc(e)
|
194
|
+
finally:
|
195
|
+
self.close_connection(conn, cursor)
|
196
|
+
|
197
|
+
return affect_count
|
198
|
+
|
199
|
+
def add_smart(self, table, data: Dict, **kwargs):
|
200
|
+
"""
|
201
|
+
单条 添加数据, 直接传递json格式的数据,不用拼sql
|
202
|
+
:param table: 表
|
203
|
+
:param data: 字典 {"xxx":"xxx"}
|
204
|
+
:param kwargs:
|
205
|
+
:return: 添加行数
|
206
|
+
"""
|
207
|
+
sql = get_insert_sql(table, data, **kwargs)
|
208
|
+
return self.add(sql)
|
209
|
+
|
210
|
+
def add_batch(self, sql, datas: List[Dict]):
|
211
|
+
"""
|
212
|
+
批量 添加数据
|
213
|
+
建议配合 get_insert_batch_sql() 生成sql
|
214
|
+
get_insert_batch_sql("user_copy1", [{"auth": 2, "id": "9", "email": "999"}]
|
215
|
+
:param sql:
|
216
|
+
insert ignore into `表` (字段1,字段2) values (%s, %s)
|
217
|
+
insert into `表` (`字段1`,`字段2`,`字段3`) values (%s, %s, %s)
|
218
|
+
这里有多少个字段,values后面就要有多少个%s
|
219
|
+
:param datas: 列表 [{}, {}, {}]
|
220
|
+
:return:
|
221
|
+
"""
|
222
|
+
affect_count = None
|
223
|
+
conn, cursor = None, None
|
224
|
+
try:
|
225
|
+
conn, cursor = self.get_connection()
|
226
|
+
affect_count = cursor.executemany(sql, datas)
|
227
|
+
conn.commit()
|
228
|
+
|
229
|
+
except Exception as e:
|
230
|
+
log.error(
|
231
|
+
"""
|
232
|
+
error:%s
|
233
|
+
sql: %s
|
234
|
+
"""
|
235
|
+
% (e, sql)
|
236
|
+
)
|
237
|
+
finally:
|
238
|
+
self.close_connection(conn, cursor)
|
239
|
+
|
240
|
+
return affect_count
|
241
|
+
|
242
|
+
def add_batch_smart(self, table, datas: List[Dict], **kwargs):
|
243
|
+
"""
|
244
|
+
批量 直接传递list格式的数据,不用拼sql
|
245
|
+
:param table: 表名
|
246
|
+
:param datas: 列表 [{}, {}, {}]
|
247
|
+
:param kwargs:
|
248
|
+
:return: 添加行数
|
249
|
+
"""
|
250
|
+
sql, datas = get_insert_batch_sql(table, datas, **kwargs)
|
251
|
+
return self.add_batch(sql, datas)
|
252
|
+
|
253
|
+
def update(self, sql):
|
254
|
+
"""
|
255
|
+
更新
|
256
|
+
:param sql:
|
257
|
+
:return:
|
258
|
+
"""
|
259
|
+
conn, cursor = None, None
|
260
|
+
|
261
|
+
try:
|
262
|
+
conn, cursor = self.get_connection()
|
263
|
+
cursor.execute(sql)
|
264
|
+
conn.commit()
|
265
|
+
except Exception as e:
|
266
|
+
log.error(
|
267
|
+
"""
|
268
|
+
error:%s
|
269
|
+
sql: %s
|
270
|
+
"""
|
271
|
+
% (e, sql)
|
272
|
+
)
|
273
|
+
return False
|
274
|
+
else:
|
275
|
+
return True
|
276
|
+
finally:
|
277
|
+
self.close_connection(conn, cursor)
|
278
|
+
|
279
|
+
def update_smart(self, table, data: Dict, condition):
|
280
|
+
"""
|
281
|
+
更新 无需拼sql
|
282
|
+
:param table: 表名
|
283
|
+
:param data: 数据 {"xxx":"xxx"}
|
284
|
+
:param condition: 更新条件 where后面的条件,如 condition='status=1'
|
285
|
+
:return:
|
286
|
+
"""
|
287
|
+
sql = get_update_sql(table, data, condition)
|
288
|
+
return self.update(sql)
|
289
|
+
|
290
|
+
def delete(self, sql):
|
291
|
+
"""
|
292
|
+
删除
|
293
|
+
:param sql:
|
294
|
+
:return:
|
295
|
+
"""
|
296
|
+
conn, cursor = None, None
|
297
|
+
try:
|
298
|
+
conn, cursor = self.get_connection()
|
299
|
+
cursor.execute(sql)
|
300
|
+
conn.commit()
|
301
|
+
except Exception as e:
|
302
|
+
log.error(
|
303
|
+
"""
|
304
|
+
error:%s
|
305
|
+
sql: %s
|
306
|
+
"""
|
307
|
+
% (e, sql)
|
308
|
+
)
|
309
|
+
return False
|
310
|
+
else:
|
311
|
+
return True
|
312
|
+
finally:
|
313
|
+
self.close_connection(conn, cursor)
|
314
|
+
|
315
|
+
@auto_retry
|
316
|
+
def find(self, sql, limit=0, to_json=False, conver_col=True):
|
317
|
+
"""
|
318
|
+
查询
|
319
|
+
无数据: 返回None或[] 取决于limit
|
320
|
+
有数据: 如果limit=1 则返回 一条数据(字段1, 字段2) 其余返回[(字段1, 字段2),(字段1, 字段2)]
|
321
|
+
:param sql:
|
322
|
+
:param limit:
|
323
|
+
:param to_json: 是否将查询结果转为json
|
324
|
+
:param conver_col: 是否处理查询结果,如date类型转字符串,json字符串转成json。仅当to_json=True时生效
|
325
|
+
:return:
|
326
|
+
"""
|
327
|
+
conn, cursor = self.get_connection()
|
328
|
+
|
329
|
+
cursor.execute(sql)
|
330
|
+
|
331
|
+
if limit == 1:
|
332
|
+
result = cursor.fetchone() # 全部查出来,截取 不推荐使用
|
333
|
+
elif limit > 1:
|
334
|
+
result = cursor.fetchmany(limit) # 全部查出来,截取 不推荐使用
|
335
|
+
else:
|
336
|
+
result = cursor.fetchall()
|
337
|
+
|
338
|
+
if result is None:
|
339
|
+
return result
|
340
|
+
|
341
|
+
if to_json:
|
342
|
+
columns = [i[0] for i in cursor.description]
|
343
|
+
|
344
|
+
# 处理数据
|
345
|
+
def convert(col):
|
346
|
+
if isinstance(col, (datetime.date, datetime.time)):
|
347
|
+
return str(col)
|
348
|
+
elif isinstance(col, str) and (
|
349
|
+
col.startswith("{") or col.startswith("[")
|
350
|
+
):
|
351
|
+
try:
|
352
|
+
# col = self.unescape_string(col)
|
353
|
+
return json.loads(col)
|
354
|
+
except:
|
355
|
+
return col
|
356
|
+
else:
|
357
|
+
# col = self.unescape_string(col)
|
358
|
+
return col
|
359
|
+
|
360
|
+
if limit == 1:
|
361
|
+
if conver_col:
|
362
|
+
result = [convert(col) for col in result]
|
363
|
+
result = dict(zip(columns, result))
|
364
|
+
else:
|
365
|
+
if conver_col:
|
366
|
+
result = [[convert(col) for col in row] for row in result]
|
367
|
+
result = [dict(zip(columns, r)) for r in result]
|
368
|
+
|
369
|
+
self.close_connection(conn, cursor)
|
370
|
+
|
371
|
+
return result
|
372
|
+
|
373
|
+
|
374
|
+
if __name__ == '__main__':
|
375
|
+
pass
|
376
|
+
# mysql_db = MysqlDBPro(ip="127.0.0.1", port=3306, db="xtn_home", user_name="root", user_pass="xtn-kk")
|
377
|
+
# sql = """insert into `user_copy1` (`id`, `email`, `auth`) values (8, '888', 2)"""
|
378
|
+
# print(mysql_db.add(sql))
|
379
|
+
# print(mysql_db.add_smart("user_copy1", {"id": "9", "email": "999"}))
|
380
|
+
# sql = "insert ignore into `user_copy1` (`id`,`email`) values (%s, %s)"
|
381
|
+
# sql, datas = get_insert_batch_sql("user_copy1", [{"auth": 2, "id": "9", "email": "999"}])
|
382
|
+
# print(mysql_db.add_batch(sql, datas))
|
383
|
+
|
384
|
+
# print(mysql_db.add_batch_smart("user_copy1", [{"auth": 2, "id": "9", "email": "999"},
|
385
|
+
# {"auth": 2, "id": "10", "email": "10"},
|
386
|
+
# {"id": "11", "auth": 1, "email": "11"},
|
387
|
+
# {"auth": 2, "id": "12", "email": "12"}]))
|
388
|
+
|
389
|
+
# 更新案例
|
390
|
+
# sql = "UPDATE user_copy1 SET status = '2', auth = 1 WHERE id = 2;"
|
391
|
+
# print(mysql_db.update(sql))
|
392
|
+
|
393
|
+
# 更新 无需拼接sql案例
|
394
|
+
# print(mysql_db.update_smart("user_copy1", {"email": "123", "status": 4}, "id=22"))
|
395
|
+
|
396
|
+
# 查询案例
|
397
|
+
# print(mysql_db.find("select * from user_copy1 where id=11",1,True))
|
@@ -8,7 +8,8 @@
|
|
8
8
|
# --------------------------------------------------------------------------------------------------
|
9
9
|
# 2024/4/17 xiatn V00.01.000 新建
|
10
10
|
# --------------------------------------------------------------------------------------------------
|
11
|
-
import hashlib, json, math
|
11
|
+
import hashlib, json, math,re
|
12
|
+
from pprint import pformat
|
12
13
|
from urllib.parse import urlencode
|
13
14
|
|
14
15
|
|
@@ -37,25 +38,54 @@ def get_md5_16(s, is_upper=False):
|
|
37
38
|
return result[8:24]
|
38
39
|
|
39
40
|
|
40
|
-
def
|
41
|
+
def get_binary_content_md5_32(content, is_upper=False):
|
42
|
+
"""
|
43
|
+
二进制内容md5 例如图片
|
44
|
+
:param content: 二进制内容
|
45
|
+
:param is_upper: 是否转大写 默认False
|
46
|
+
:return:
|
47
|
+
"""
|
48
|
+
md5_hash = hashlib.md5(content)
|
49
|
+
md5_hexdigest = md5_hash.hexdigest()
|
50
|
+
if is_upper:
|
51
|
+
return md5_hexdigest.upper()
|
52
|
+
return md5_hexdigest
|
53
|
+
|
54
|
+
|
55
|
+
def get_binary_content_md5_16(content, is_upper=False):
|
56
|
+
"""
|
57
|
+
二进制内容md5 例如图片
|
58
|
+
:param content: 二进制内容
|
59
|
+
:param is_upper: 是否转大写 默认False
|
60
|
+
:return:
|
61
|
+
"""
|
62
|
+
result = get_binary_content_md5_32(content, is_upper)
|
63
|
+
return result[8:24]
|
64
|
+
|
65
|
+
|
66
|
+
def get_file_md5_32(file_path, is_upper=False):
|
41
67
|
"""
|
42
68
|
获取文件md5值
|
43
69
|
:param file_path: 文件路径
|
70
|
+
:param is_upper: 是否转大写 默认False
|
44
71
|
:return:
|
45
72
|
"""
|
46
73
|
with open(file_path, 'rb') as file:
|
47
74
|
data = file.read()
|
48
75
|
md5_hash = hashlib.md5(data).hexdigest()
|
76
|
+
if is_upper:
|
77
|
+
return md5_hash.upper()
|
49
78
|
return md5_hash
|
50
79
|
|
51
80
|
|
52
|
-
def get_file_md5_16(file_path):
|
81
|
+
def get_file_md5_16(file_path, is_upper=False):
|
53
82
|
"""
|
54
83
|
获取文件md5值
|
55
84
|
:param file_path: 文件路径
|
85
|
+
:param is_upper: 是否转大写 默认False
|
56
86
|
:return:
|
57
87
|
"""
|
58
|
-
result = get_file_md5_32(file_path)
|
88
|
+
result = get_file_md5_32(file_path, is_upper)
|
59
89
|
return result[8:24]
|
60
90
|
|
61
91
|
|
@@ -99,10 +129,48 @@ def get_calculate_total_page(total, limit):
|
|
99
129
|
total_pages = math.ceil(total / limit)
|
100
130
|
return total_pages
|
101
131
|
|
132
|
+
def list_to_strtuple(datas):
|
133
|
+
"""
|
134
|
+
列表转字符串
|
135
|
+
:param datas: datas: [1, 2]
|
136
|
+
:return: (1, 2) 字符串类型
|
137
|
+
"""
|
138
|
+
data_str = str(tuple(datas))
|
139
|
+
data_str = re.sub(",\)$", ")", data_str)
|
140
|
+
return data_str
|
141
|
+
|
142
|
+
|
143
|
+
|
102
144
|
|
145
|
+
def dumps_json(data,indent=4,sort_keys=False):
|
146
|
+
"""
|
147
|
+
将JSON数据格式化为可打印的字符串
|
148
|
+
:param data:
|
149
|
+
:param indent: 每一级嵌套都使用4个空格进行缩进
|
150
|
+
:param sort_keys: 是否排序
|
151
|
+
:return:
|
152
|
+
"""
|
153
|
+
try:
|
154
|
+
if isinstance(data, str):
|
155
|
+
data = get_str_to_json(data)
|
156
|
+
|
157
|
+
data = json.dumps(
|
158
|
+
data,
|
159
|
+
ensure_ascii=False,
|
160
|
+
indent=indent,
|
161
|
+
skipkeys=True,
|
162
|
+
sort_keys=sort_keys,
|
163
|
+
default=str,
|
164
|
+
)
|
103
165
|
|
166
|
+
except Exception as e:
|
167
|
+
data = pformat(data)
|
168
|
+
|
169
|
+
return data
|
104
170
|
|
105
171
|
|
172
|
+
def split_image(img):
|
173
|
+
pass
|
106
174
|
|
107
175
|
if __name__ == '__main__':
|
108
176
|
pass
|
@@ -38,4 +38,4 @@ def get_file_check_filename(file_name):
|
|
38
38
|
if __name__ == '__main__':
|
39
39
|
pass
|
40
40
|
print(get_file_extension('file/2024-04-19/BOSCH GEX 125-1A/125-1AE砂磨机操作说明书:[1]_jingyan.txt'))
|
41
|
-
print(
|
41
|
+
print(get_file_check_filename('file/2024-04-19/BOSCH GEX 125-1A/125-1AE砂磨机操作说明书:[1]_jingyan.txt'))
|
@@ -0,0 +1,10 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
|
4
|
+
# 说明:
|
5
|
+
# 程序说明xxxxxxxxxxxxxxxxxxx
|
6
|
+
# History:
|
7
|
+
# Date Author Version Modification
|
8
|
+
# --------------------------------------------------------------------------------------------------
|
9
|
+
# 2024/5/12 xiatn V00.01.000 新建
|
10
|
+
# --------------------------------------------------------------------------------------------------
|
@@ -0,0 +1,193 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
|
4
|
+
# 说明:
|
5
|
+
# 日志
|
6
|
+
# History:
|
7
|
+
# Date Author Version Modification
|
8
|
+
# --------------------------------------------------------------------------------------------------
|
9
|
+
# 2024/5/12 xiatn V00.01.000 新建
|
10
|
+
# --------------------------------------------------------------------------------------------------
|
11
|
+
import os
|
12
|
+
import sys
|
13
|
+
import time
|
14
|
+
import inspect
|
15
|
+
import logging
|
16
|
+
from xtn_tools_pro.tools_time import get_time_timestamp_to_datestr
|
17
|
+
from logging.handlers import BaseRotatingHandler
|
18
|
+
|
19
|
+
|
20
|
+
class RotatingFileHandler(BaseRotatingHandler):
|
21
|
+
def __init__(
|
22
|
+
self, filename, mode="a", max_bytes=0, backup_count=0, encoding=None, delay=0
|
23
|
+
):
|
24
|
+
BaseRotatingHandler.__init__(self, filename, mode, encoding, delay)
|
25
|
+
self.max_bytes = max_bytes
|
26
|
+
self.backup_count = backup_count
|
27
|
+
self.placeholder = str(len(str(backup_count)))
|
28
|
+
|
29
|
+
def doRollover(self):
|
30
|
+
if self.stream:
|
31
|
+
self.stream.close()
|
32
|
+
self.stream = None
|
33
|
+
if self.backup_count > 0:
|
34
|
+
for i in range(self.backup_count - 1, 0, -1):
|
35
|
+
sfn = ("%0" + self.placeholder + "d.") % i # '%2d.'%i -> 02
|
36
|
+
sfn = sfn.join(self.baseFilename.split("."))
|
37
|
+
# sfn = "%d_%s" % (i, self.baseFilename)
|
38
|
+
# dfn = "%d_%s" % (i + 1, self.baseFilename)
|
39
|
+
dfn = ("%0" + self.placeholder + "d.") % (i + 1)
|
40
|
+
dfn = dfn.join(self.baseFilename.split("."))
|
41
|
+
if os.path.exists(sfn):
|
42
|
+
# print "%s -> %s" % (sfn, dfn)
|
43
|
+
if os.path.exists(dfn):
|
44
|
+
os.remove(dfn)
|
45
|
+
os.rename(sfn, dfn)
|
46
|
+
dfn = (("%0" + self.placeholder + "d.") % 1).join(
|
47
|
+
self.baseFilename.split(".")
|
48
|
+
)
|
49
|
+
if os.path.exists(dfn):
|
50
|
+
os.remove(dfn)
|
51
|
+
# Issue 18940: A file may not have been created if delay is True.
|
52
|
+
if os.path.exists(self.baseFilename):
|
53
|
+
os.rename(self.baseFilename, dfn)
|
54
|
+
if not self.delay:
|
55
|
+
self.stream = self._open()
|
56
|
+
|
57
|
+
def shouldRollover(self, record):
|
58
|
+
|
59
|
+
if self.stream is None: # delay was set...
|
60
|
+
self.stream = self._open()
|
61
|
+
if self.max_bytes > 0: # are we rolling over?
|
62
|
+
msg = "%s\n" % self.format(record)
|
63
|
+
self.stream.seek(0, 2) # due to non-posix-compliant Windows feature
|
64
|
+
if self.stream.tell() + len(msg) >= self.max_bytes:
|
65
|
+
return 1
|
66
|
+
return 0
|
67
|
+
|
68
|
+
|
69
|
+
class BoldFormatter(logging.Formatter):
|
70
|
+
def format(self, record):
|
71
|
+
result = super().format(record)
|
72
|
+
return "\033[1m" + result + "\033[0m"
|
73
|
+
|
74
|
+
|
75
|
+
class Log:
|
76
|
+
def __init__(self, name, path=None, log_level='DEBUG',
|
77
|
+
is_write_to_console=True,
|
78
|
+
is_write_to_file=False,
|
79
|
+
color=True,
|
80
|
+
mode='a',
|
81
|
+
max_bytes=0, backup_count=0, encoding=None):
|
82
|
+
"""
|
83
|
+
:param name: log名
|
84
|
+
:param path: log文件存储路径 如 D://xxx.log
|
85
|
+
:param log_level: log等级 CRITICAL/ERROR/WARNING/INFO/DEBUG
|
86
|
+
:param is_write_to_console: 是否输出到控制台
|
87
|
+
:param is_write_to_file: 是否写入到文件 默认否
|
88
|
+
:param color: 是否有颜色
|
89
|
+
:param mode: 写文件模式
|
90
|
+
:param max_bytes: 每个日志文件的最大字节数
|
91
|
+
:param backup_count: 日志文件保留数量
|
92
|
+
:param encoding: 日志文件编码
|
93
|
+
|
94
|
+
"""
|
95
|
+
# 创建logger对象
|
96
|
+
self.logger = logging.getLogger(name)
|
97
|
+
# 设置日志等级
|
98
|
+
self.logger.setLevel(log_level.upper())
|
99
|
+
|
100
|
+
# 创建日志格式化器
|
101
|
+
# formatter = logging.Formatter('[%(now_datestr)s] [%(levelname)s] [%(func_name)s] - %(message)s') # 原
|
102
|
+
# formatter = logging.Formatter('\033[1m%(now_datestr)s] [%(levelname)s] [%(func_name)s] - %(message)s\033[0m') #加粗
|
103
|
+
formatter = logging.Formatter(
|
104
|
+
'\033[1m[%(now_datestr)s] | %(levelname)-8s | [%(func_name)s] - %(message)s\033[0m') # 加粗对齐
|
105
|
+
|
106
|
+
# formatter = BoldFormatter('[%(now_datestr)s] [%(levelname)s] [%(func_name)s] - %(message)s') # 加粗
|
107
|
+
|
108
|
+
# 判断是否要输出到控制台
|
109
|
+
if is_write_to_console:
|
110
|
+
# 创建控制台处理器
|
111
|
+
console_handler = logging.StreamHandler(sys.stdout)
|
112
|
+
# 设置控制台处理器的格式化器
|
113
|
+
console_handler.setFormatter(formatter)
|
114
|
+
# 将控制台处理器添加到logger中
|
115
|
+
self.logger.addHandler(console_handler)
|
116
|
+
|
117
|
+
# 判断是否要写入文件
|
118
|
+
if is_write_to_file:
|
119
|
+
# 创建文件处理器
|
120
|
+
file_handler = RotatingFileHandler(path, mode=mode, max_bytes=max_bytes,
|
121
|
+
backup_count=backup_count, encoding=encoding)
|
122
|
+
# 设置文件处理器的格式化器
|
123
|
+
file_handler.setFormatter(formatter)
|
124
|
+
# 将文件处理器添加到logger中
|
125
|
+
self.logger.addHandler(file_handler)
|
126
|
+
|
127
|
+
# 判断是否要带颜色
|
128
|
+
if color:
|
129
|
+
try:
|
130
|
+
from colorlog import ColoredFormatter
|
131
|
+
# 创建带颜色的日志格式化器
|
132
|
+
# color_formatter = ColoredFormatter('%(log_color)s[%(now_datestr)s] [%(levelname)s] [%(func_name)s] - %(message)s') # 原
|
133
|
+
# color_formatter = ColoredFormatter('\033[1m%(log_color)s[%(now_datestr)s] [%(levelname)s] [%(func_name)s] - %(message)s\033[0m') # 加粗
|
134
|
+
# 创建颜色映射
|
135
|
+
log_colors = {
|
136
|
+
'DEBUG': 'bold_blue',
|
137
|
+
'INFO': 'bold_cyan',
|
138
|
+
'WARNING': 'bold_yellow',
|
139
|
+
'ERROR': 'bold_red',
|
140
|
+
'CRITICAL': 'bold_red',
|
141
|
+
}
|
142
|
+
color_formatter = ColoredFormatter(
|
143
|
+
'\033[1m%(log_color)s[%(now_datestr)s] | %(levelname)-8s | [%(func_name)s] - %(message)s\033[0m',
|
144
|
+
log_colors=log_colors) # 加粗对齐
|
145
|
+
# 设置控制台处理器的格式化器为带颜色的格式化器
|
146
|
+
console_handler.setFormatter(color_formatter)
|
147
|
+
except ImportError:
|
148
|
+
pass
|
149
|
+
|
150
|
+
def debug(self, message):
|
151
|
+
# 记录DEBUG级别的日志
|
152
|
+
self.logger.debug(message, extra=self._get_caller_name_extra())
|
153
|
+
|
154
|
+
def info(self, message):
|
155
|
+
# 记录INFO级别的日志
|
156
|
+
self.logger.info(message, extra=self._get_caller_name_extra())
|
157
|
+
|
158
|
+
def warning(self, message):
|
159
|
+
# 记录WARNING级别的日志
|
160
|
+
self.logger.warning(message, extra=self._get_caller_name_extra())
|
161
|
+
|
162
|
+
def error(self, message):
|
163
|
+
# 记录ERROR级别的日志
|
164
|
+
self.logger.error(message, extra=self._get_caller_name_extra())
|
165
|
+
|
166
|
+
def critical(self, message):
|
167
|
+
# 记录CRITICAL级别的日志
|
168
|
+
self.logger.critical(message, extra=self._get_caller_name_extra())
|
169
|
+
|
170
|
+
def _get_caller_name_extra(self):
|
171
|
+
"""
|
172
|
+
获取调用日志函数的函数名称
|
173
|
+
"""
|
174
|
+
# 获取当前栈帧
|
175
|
+
frame = inspect.currentframe()
|
176
|
+
# 获取调用者的栈帧
|
177
|
+
caller_frame = frame.f_back.f_back
|
178
|
+
# 从栈帧中获取代码对象
|
179
|
+
code_obj = caller_frame.f_code
|
180
|
+
# 获取调用者的名字
|
181
|
+
caller_name = code_obj.co_name
|
182
|
+
return {"func_name": caller_name,
|
183
|
+
"now_datestr": get_time_timestamp_to_datestr()}
|
184
|
+
|
185
|
+
|
186
|
+
if __name__ == '__main__':
|
187
|
+
pass
|
188
|
+
logger = Log('mylogger', './xxx.log', log_level='DEBUG', is_write_to_console=True, is_write_to_file=True,
|
189
|
+
color=True, mode='a', max_bytes=1024, backup_count=3)
|
190
|
+
logger.debug("debug message")
|
191
|
+
logger.info("info level message")
|
192
|
+
logger.warning("warning level message")
|
193
|
+
logger.critical("critical level message")
|
@@ -0,0 +1,57 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
|
4
|
+
# 说明:
|
5
|
+
# 重试
|
6
|
+
# History:
|
7
|
+
# Date Author Version Modification
|
8
|
+
# --------------------------------------------------------------------------------------------------
|
9
|
+
# 2024/5/12 xiatn V00.01.000 新建
|
10
|
+
# --------------------------------------------------------------------------------------------------
|
11
|
+
import time
|
12
|
+
from xtn_tools_pro.utils.log import Log
|
13
|
+
|
14
|
+
log = Log(name="retry", color=True)
|
15
|
+
|
16
|
+
|
17
|
+
def retry(max_attempts=3, delay=0, exception_callfunc=None, *args_callfunc, **kwargs_callfunc):
|
18
|
+
"""
|
19
|
+
重试
|
20
|
+
:param max_attempts: 最多重试次数
|
21
|
+
:param delay: 每次重试间隔时间
|
22
|
+
:param exception_callfunc: 失败的回调函数
|
23
|
+
:return:
|
24
|
+
"""
|
25
|
+
|
26
|
+
def decorator(func):
|
27
|
+
def wrapper(*args, **kwargs):
|
28
|
+
attempts = 0
|
29
|
+
while attempts < max_attempts:
|
30
|
+
try:
|
31
|
+
return func(*args, **kwargs)
|
32
|
+
except Exception as e:
|
33
|
+
log.debug(f"重试第 {attempts + 1} 次,failed: {e}")
|
34
|
+
if exception_callfunc:
|
35
|
+
exception_callfunc(*args_callfunc, **kwargs_callfunc)
|
36
|
+
attempts += 1
|
37
|
+
time.sleep(delay)
|
38
|
+
|
39
|
+
return wrapper
|
40
|
+
|
41
|
+
return decorator
|
42
|
+
|
43
|
+
|
44
|
+
if __name__ == '__main__':
|
45
|
+
|
46
|
+
def test1(*args, **kwargs):
|
47
|
+
print("test1", args, kwargs)
|
48
|
+
|
49
|
+
|
50
|
+
@retry(3, 5)
|
51
|
+
def test(a, b):
|
52
|
+
import random
|
53
|
+
if random.random() < 0.5:
|
54
|
+
raise ValueError("Random value too small")
|
55
|
+
print("Success!")
|
56
|
+
|
57
|
+
test(1, 1)
|
@@ -0,0 +1,159 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
|
4
|
+
# 说明:
|
5
|
+
# sql相关
|
6
|
+
# History:
|
7
|
+
# Date Author Version Modification
|
8
|
+
# --------------------------------------------------------------------------------------------------
|
9
|
+
# 2024/5/12 xiatn V00.01.000 新建
|
10
|
+
# --------------------------------------------------------------------------------------------------
|
11
|
+
import datetime
|
12
|
+
from xtn_tools_pro.tools import list_to_strtuple, dumps_json
|
13
|
+
|
14
|
+
|
15
|
+
def format_sql_value(value):
|
16
|
+
if isinstance(value, str):
|
17
|
+
value = value.strip()
|
18
|
+
|
19
|
+
elif isinstance(value, (list, dict)):
|
20
|
+
value = dumps_json(value, indent=None)
|
21
|
+
|
22
|
+
elif isinstance(value, (datetime.date, datetime.time)):
|
23
|
+
value = str(value)
|
24
|
+
|
25
|
+
elif isinstance(value, bool):
|
26
|
+
value = int(value)
|
27
|
+
|
28
|
+
return value
|
29
|
+
|
30
|
+
|
31
|
+
def get_insert_sql(table, data, auto_update=False, update_columns=(), insert_ignore=False):
|
32
|
+
"""
|
33
|
+
生成 insert sql
|
34
|
+
:param table: 表
|
35
|
+
:param data: 表数据 json格式
|
36
|
+
:param auto_update: 使用的是replace into, 为完全覆盖已存在的数据
|
37
|
+
:param update_columns: 需要更新的列 默认全部,当指定值时,auto_update设置无效,当duplicate key冲突时更新指定的列
|
38
|
+
:param insert_ignore: 数据存在忽略,True则会忽略该插入操作,不会产生冲突错误
|
39
|
+
:return:
|
40
|
+
"""
|
41
|
+
keys = ["`{}`".format(key) for key in data.keys()]
|
42
|
+
keys = list_to_strtuple(keys).replace("'", "")
|
43
|
+
values = [format_sql_value(value) for value in data.values()]
|
44
|
+
values = list_to_strtuple(values)
|
45
|
+
if update_columns:
|
46
|
+
if not isinstance(update_columns, (tuple, list)):
|
47
|
+
update_columns = [update_columns]
|
48
|
+
update_columns_ = ", ".join(
|
49
|
+
["{key}=values({key})".format(key=key) for key in update_columns]
|
50
|
+
)
|
51
|
+
sql = (
|
52
|
+
"insert%s into `{table}` {keys} values {values} on duplicate key update %s"
|
53
|
+
% (" ignore" if insert_ignore else "", update_columns_)
|
54
|
+
)
|
55
|
+
|
56
|
+
elif auto_update:
|
57
|
+
sql = "replace into `{table}` {keys} values {values}"
|
58
|
+
else:
|
59
|
+
sql = "insert%s into `{table}` {keys} values {values}" % (
|
60
|
+
" ignore" if insert_ignore else ""
|
61
|
+
)
|
62
|
+
|
63
|
+
sql = sql.format(table=table, keys=keys, values=values).replace("None", "null")
|
64
|
+
return sql
|
65
|
+
|
66
|
+
def get_insert_batch_sql(table, datas, auto_update=False, update_columns=(), update_columns_value=()):
|
67
|
+
"""
|
68
|
+
生成 批量 insert sql
|
69
|
+
:param table: 表
|
70
|
+
:param datas: 表数据 [{...}]
|
71
|
+
:param auto_update: 使用的是replace into, 为完全覆盖已存在的数据
|
72
|
+
:param update_columns: 需要更新的列 默认全部,当指定值时,auto_update设置无效,当duplicate key冲突时更新指定的列
|
73
|
+
:param update_columns_value: 需要更新的列的值 默认为datas里边对应的值, 注意 如果值为字符串类型 需要主动加单引号, 如 update_columns_value=("'test'",)
|
74
|
+
:return:
|
75
|
+
"""
|
76
|
+
if not datas:
|
77
|
+
return
|
78
|
+
keys = list(set([key for data in datas for key in data]))
|
79
|
+
values_placeholder = ["%s"] * len(keys)
|
80
|
+
|
81
|
+
values = []
|
82
|
+
for data in datas:
|
83
|
+
value = []
|
84
|
+
for key in keys:
|
85
|
+
current_data = data.get(key)
|
86
|
+
current_data = format_sql_value(current_data)
|
87
|
+
|
88
|
+
value.append(current_data)
|
89
|
+
|
90
|
+
values.append(value)
|
91
|
+
|
92
|
+
keys = ["`{}`".format(key) for key in keys]
|
93
|
+
keys = list_to_strtuple(keys).replace("'", "")
|
94
|
+
|
95
|
+
values_placeholder = list_to_strtuple(values_placeholder).replace("'", "")
|
96
|
+
|
97
|
+
if update_columns:
|
98
|
+
if not isinstance(update_columns, (tuple, list)):
|
99
|
+
update_columns = [update_columns]
|
100
|
+
if update_columns_value:
|
101
|
+
update_columns_ = ", ".join(
|
102
|
+
[
|
103
|
+
"`{key}`={value}".format(key=key, value=value)
|
104
|
+
for key, value in zip(update_columns, update_columns_value)
|
105
|
+
]
|
106
|
+
)
|
107
|
+
else:
|
108
|
+
update_columns_ = ", ".join(
|
109
|
+
["`{key}`=values(`{key}`)".format(key=key) for key in update_columns]
|
110
|
+
)
|
111
|
+
sql = "insert into `{table}` {keys} values {values_placeholder} on duplicate key update {update_columns}".format(
|
112
|
+
table=table,
|
113
|
+
keys=keys,
|
114
|
+
values_placeholder=values_placeholder,
|
115
|
+
update_columns=update_columns_,
|
116
|
+
)
|
117
|
+
elif auto_update:
|
118
|
+
sql = "replace into `{table}` {keys} values {values_placeholder}".format(
|
119
|
+
table=table, keys=keys, values_placeholder=values_placeholder
|
120
|
+
)
|
121
|
+
else:
|
122
|
+
sql = "insert ignore into `{table}` {keys} values {values_placeholder}".format(
|
123
|
+
table=table, keys=keys, values_placeholder=values_placeholder
|
124
|
+
)
|
125
|
+
|
126
|
+
return sql, values
|
127
|
+
|
128
|
+
def get_update_sql(table, data, condition):
|
129
|
+
"""
|
130
|
+
生成更新sql
|
131
|
+
:param table: 表
|
132
|
+
:param data: 表数据 json格式
|
133
|
+
:param condition: where 条件
|
134
|
+
:return:
|
135
|
+
"""
|
136
|
+
key_values = []
|
137
|
+
|
138
|
+
for key, value in data.items():
|
139
|
+
value = format_sql_value(value)
|
140
|
+
if isinstance(value, str):
|
141
|
+
key_values.append("`{}`={}".format(key, repr(value)))
|
142
|
+
elif value is None:
|
143
|
+
key_values.append("`{}`={}".format(key, "null"))
|
144
|
+
else:
|
145
|
+
key_values.append("`{}`={}".format(key, value))
|
146
|
+
|
147
|
+
key_values = ", ".join(key_values)
|
148
|
+
|
149
|
+
sql = "update `{table}` set {key_values} where {condition}"
|
150
|
+
sql = sql.format(table=table, key_values=key_values, condition=condition)
|
151
|
+
return sql
|
152
|
+
|
153
|
+
if __name__ == '__main__':
|
154
|
+
print(get_insert_sql("user_copy1", {"id": 5, "nickname": "1212", "email": "121212", "auth": 2},insert_ignore=True))
|
155
|
+
print(get_insert_batch_sql("user_copy1", [{"id": 5, "nickname": "555", "email": "555", "auth": 1},
|
156
|
+
{"id": 6, "nickname": "666", "email": "666", "auth": 2},
|
157
|
+
{"id": 7, "nickname": "777", "email": "777", "auth": 1}],
|
158
|
+
))
|
159
|
+
print(get_update_sql("user_copy1",{"email":"123","status":4},"id=2"))
|
@@ -11,7 +11,12 @@ xtn_tools_pro.egg-info/dependency_links.txt
|
|
11
11
|
xtn_tools_pro.egg-info/requires.txt
|
12
12
|
xtn_tools_pro.egg-info/top_level.txt
|
13
13
|
xtn_tools_pro/db/MongoDB.py
|
14
|
+
xtn_tools_pro/db/MysqlDB.py
|
14
15
|
xtn_tools_pro/db/RedisDB.py
|
15
16
|
xtn_tools_pro/db/__init__.py
|
16
17
|
xtn_tools_pro/proxy/XiaoXiangProxy.py
|
17
|
-
xtn_tools_pro/proxy/__init__.py
|
18
|
+
xtn_tools_pro/proxy/__init__.py
|
19
|
+
xtn_tools_pro/utils/__init__.py
|
20
|
+
xtn_tools_pro/utils/log.py
|
21
|
+
xtn_tools_pro/utils/retry.py
|
22
|
+
xtn_tools_pro/utils/sql.py
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{xtn-tools-pro-1.0.0.0.3 → xtn-tools-pro-1.0.0.0.5}/xtn_tools_pro.egg-info/dependency_links.txt
RENAMED
File without changes
|
File without changes
|