mdbq 3.6.14__py3-none-any.whl → 3.6.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mdbq/mysql/mysql.py +37 -64
- mdbq/mysql/s_query.py +2 -2
- mdbq/redis/getredis.py +4 -4
- {mdbq-3.6.14.dist-info → mdbq-3.6.16.dist-info}/METADATA +1 -1
- {mdbq-3.6.14.dist-info → mdbq-3.6.16.dist-info}/RECORD +7 -7
- {mdbq-3.6.14.dist-info → mdbq-3.6.16.dist-info}/WHEEL +0 -0
- {mdbq-3.6.14.dist-info → mdbq-3.6.16.dist-info}/top_level.txt +0 -0
mdbq/mysql/mysql.py
CHANGED
@@ -121,14 +121,14 @@ class MysqlUpload:
|
|
121
121
|
try:
|
122
122
|
return func(*args, **kwargs)
|
123
123
|
except Exception as e:
|
124
|
-
logger.
|
124
|
+
logger.error(f'{func.__name__}, {e}') # 将异常信息返回
|
125
125
|
with open(error_file, 'a') as f:
|
126
126
|
now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
127
127
|
f.write(f'\n{now} \n')
|
128
128
|
f.write(f'函数注释内容(用于定位函数): {func.__doc__} \n')
|
129
129
|
# f.write(f'报错的文件:\n{e.__traceback__.tb_frame.f_globals["__file__"]}\n') # 发生异常所在的文件
|
130
130
|
traceback.print_exc(file=open(error_file, 'a')) # 返回完整的堆栈信息
|
131
|
-
logger.
|
131
|
+
logger.error(f'更多信息请查看日志文件: {error_file}')
|
132
132
|
|
133
133
|
return wrapper
|
134
134
|
|
@@ -139,10 +139,10 @@ class MysqlUpload:
|
|
139
139
|
connection = pymysql.connect(**_config) # 连接数据库
|
140
140
|
return connection
|
141
141
|
except Exception as e:
|
142
|
-
logger.
|
142
|
+
logger.error(f'连接失败,正在重试: {attempts}/{max_try} {e}')
|
143
143
|
attempts += 1
|
144
144
|
time.sleep(30)
|
145
|
-
logger.
|
145
|
+
logger.error(f'{_db_name}: 连接失败,重试次数超限,当前设定次数: {max_try}')
|
146
146
|
return None
|
147
147
|
|
148
148
|
def cover_doc_dtypes(self, dict_data):
|
@@ -302,8 +302,7 @@ class MysqlUpload:
|
|
302
302
|
|
303
303
|
# 插入数据到数据库
|
304
304
|
# 有数据格式错误问题,所以分开处理,将数据主体移到最后面用占位符
|
305
|
-
|
306
|
-
logger.info(f'{now} 正在更新: mysql ({self.host}:{self.port}) {db_name}/{table_name} -> {filename}')
|
305
|
+
logger.info(f'正在更新: mysql ({self.host}:{self.port}) {db_name}/{table_name} -> {filename}')
|
307
306
|
if new_dict:
|
308
307
|
cols = ', '.join(f"`{item}`" for item in new_dict.keys()) # 列名需要转义
|
309
308
|
values = ', '.join([f'"{item}"' for item in new_dict.values()]) # 值要加引号
|
@@ -344,7 +343,7 @@ class MysqlUpload:
|
|
344
343
|
else:
|
345
344
|
logger.info(f'{table_name} 存在复合主键: 存在复合主键: {[item['PrimaryKey'] for item in result]}, 无法重置自增id')
|
346
345
|
except Exception as e:
|
347
|
-
logger.
|
346
|
+
logger.error(f'333 {table_name} {e}')
|
348
347
|
connection.rollback()
|
349
348
|
connection.commit()
|
350
349
|
|
@@ -387,7 +386,7 @@ class MysqlUpload:
|
|
387
386
|
else:
|
388
387
|
logger.info(f'参数不正确,cut_data应为 year 或 month ')
|
389
388
|
except Exception as e:
|
390
|
-
logger.
|
389
|
+
logger.error(f'{table_name} 将数据按年/月保存(cut_data),但在转换日期时报错 -> {e}')
|
391
390
|
|
392
391
|
# connection = pymysql.connect(**self.config) # 连接数据库
|
393
392
|
connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
|
@@ -598,7 +597,7 @@ class MysqlUpload:
|
|
598
597
|
else:
|
599
598
|
logger.info(f'参数不正确,cut_data应为 year 或 month ')
|
600
599
|
except Exception as e:
|
601
|
-
logger.
|
600
|
+
logger.error(f'{table_name} 将数据按年/月保存(cut_data),但在转换日期时报错 -> {e}')
|
602
601
|
|
603
602
|
# connection = pymysql.connect(**self.config) # 连接数据库
|
604
603
|
connection = self.keep_connect(_db_name=db_name, _config=self.config, max_try=10)
|
@@ -960,7 +959,7 @@ class MysqlUpload:
|
|
960
959
|
else:
|
961
960
|
logger.info(f'参数不正确,cut_data应为 year 或 month ')
|
962
961
|
except Exception as e:
|
963
|
-
logger.
|
962
|
+
logger.error(f'{table_name} 将数据按年/月保存(cut_data),但在转换日期时报错 -> {e}')
|
964
963
|
# 清理 dataframe 非法值,并转换获取数据类型
|
965
964
|
dtypes, df = self.convert_df_dtypes(df)
|
966
965
|
if set_typ:
|
@@ -1031,9 +1030,7 @@ class MysqlUpload:
|
|
1031
1030
|
connection.commit() # 提交事务
|
1032
1031
|
|
1033
1032
|
if df_sql:
|
1034
|
-
|
1035
|
-
logger.info(
|
1036
|
-
f'{now} 正在更新: mysql ({self.host}:{self.port}) {db_name}/{table_name}, {count}, {self.filename}')
|
1033
|
+
logger.info(f'正在更新: mysql ({self.host}:{self.port}) {db_name}/{table_name}, {count}, {self.filename}')
|
1037
1034
|
engine = create_engine(
|
1038
1035
|
f"mysql+pymysql://{self.username}:{self.password}@{self.host}:{self.port}/{db_name}") # 创建数据库引擎
|
1039
1036
|
# df.to_csv('/Users/xigua/Downloads/mysql.csv', index=False, header=True, encoding='utf-8_sig')
|
@@ -1073,7 +1070,7 @@ class MysqlUpload:
|
|
1073
1070
|
else:
|
1074
1071
|
logger.info(f'{table_name} 存在复合主键: {[item['PrimaryKey'] for item in result]}, 无法重置自增id')
|
1075
1072
|
except Exception as e:
|
1076
|
-
logger.
|
1073
|
+
logger.error(f'333 {table_name} {e}')
|
1077
1074
|
connection.rollback()
|
1078
1075
|
connection.commit() # 提交事务
|
1079
1076
|
connection.close()
|
@@ -1129,7 +1126,7 @@ class MysqlUpload:
|
|
1129
1126
|
else:
|
1130
1127
|
logger.info(f'{table_name} 存在复合主键: {[item['PrimaryKey'] for item in result]}, 无法重置自增id')
|
1131
1128
|
except Exception as e:
|
1132
|
-
logger.
|
1129
|
+
logger.error(f'333 {table_name} {e}')
|
1133
1130
|
connection.rollback()
|
1134
1131
|
connection.close()
|
1135
1132
|
return
|
@@ -1219,10 +1216,6 @@ class MysqlUpload:
|
|
1219
1216
|
cursor.execute(sql)
|
1220
1217
|
except Exception as e:
|
1221
1218
|
pass
|
1222
|
-
# logger.info(data)
|
1223
|
-
# logger.info(values)
|
1224
|
-
# logger.info(f'mysql -> df_to_mysql 报错: {e}, {self.filename}')
|
1225
|
-
# breakpoint()
|
1226
1219
|
|
1227
1220
|
if reset_id:
|
1228
1221
|
# 6. 重置自增列
|
@@ -1252,7 +1245,7 @@ class MysqlUpload:
|
|
1252
1245
|
else:
|
1253
1246
|
logger.info(f'{table_name} 存在复合主键: {[item['PrimaryKey'] for item in result]}, 无法重置自增id')
|
1254
1247
|
except Exception as e:
|
1255
|
-
logger.
|
1248
|
+
logger.error(f'333 {table_name} {e}')
|
1256
1249
|
connection.rollback()
|
1257
1250
|
connection.commit() # 提交事务
|
1258
1251
|
connection.close()
|
@@ -1303,8 +1296,7 @@ class MysqlUpload:
|
|
1303
1296
|
# 将二进制数据写入到文件
|
1304
1297
|
with open(os.path.join(save_path, filename), 'wb') as f:
|
1305
1298
|
f.write(result['数据主体'])
|
1306
|
-
|
1307
|
-
logger.info(f'{now} 写入本地文件: ({self.host}:{self.port}) {db_name}/{table_name} -> {os.path.join(save_path, filename)}')
|
1299
|
+
logger.info(f'写入本地文件: ({self.host}:{self.port}) {db_name}/{table_name} -> {os.path.join(save_path, filename)}')
|
1308
1300
|
connection.close()
|
1309
1301
|
|
1310
1302
|
def read_mysql(self, table_name, start_date, end_date, db_name='远程数据源', date_name='日期'):
|
@@ -1325,8 +1317,7 @@ class MysqlUpload:
|
|
1325
1317
|
logger.info(f"Database {db_name} 数据库不存在")
|
1326
1318
|
return df
|
1327
1319
|
else:
|
1328
|
-
|
1329
|
-
logger.info(f'{now} mysql 正在查询表: {table_name}, 范围: {start_date}~{end_date}')
|
1320
|
+
logger.info(f'mysql 正在查询表: {table_name}, 范围: {start_date}~{end_date}')
|
1330
1321
|
except:
|
1331
1322
|
return df
|
1332
1323
|
finally:
|
@@ -1348,7 +1339,7 @@ class MysqlUpload:
|
|
1348
1339
|
columns = [desc[0] for desc in cursor.description]
|
1349
1340
|
df = pd.DataFrame(rows, columns=columns) # 转为 df
|
1350
1341
|
except Exception as e:
|
1351
|
-
logger.
|
1342
|
+
logger.error(f'{e} {db_name} -> {table_name} 表不存在')
|
1352
1343
|
return df
|
1353
1344
|
finally:
|
1354
1345
|
connection.close()
|
@@ -1356,11 +1347,10 @@ class MysqlUpload:
|
|
1356
1347
|
if len(df) == 0:
|
1357
1348
|
logger.info(f'database: {db_name}, table: {table_name} 查询的数据为空')
|
1358
1349
|
else:
|
1359
|
-
now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
1360
1350
|
cost_time = int(time.time() - before_time)
|
1361
1351
|
if cost_time < 1:
|
1362
1352
|
cost_time = round(time.time() - before_time, 2)
|
1363
|
-
logger.info(f'
|
1353
|
+
logger.info(f'mysql ({self.host}) 表: {table_name} 获取数据长度: {len(df)}, 用时: {cost_time} 秒')
|
1364
1354
|
return df
|
1365
1355
|
|
1366
1356
|
def upload_pandas(self, update_path, db_name, days=None):
|
@@ -1387,10 +1377,6 @@ class MysqlUpload:
|
|
1387
1377
|
for name in files:
|
1388
1378
|
if name.endswith('.csv') and 'baidu' not in name:
|
1389
1379
|
df = pd.read_csv(os.path.join(root, name), encoding='utf-8_sig', header=0, na_filter=False)
|
1390
|
-
# if '日期' not in df.columns.tolist():
|
1391
|
-
# now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
1392
|
-
# logger.info(f'{now} {root_file} 缺少日期列, 不支持上传 mysql')
|
1393
|
-
# continue
|
1394
1380
|
if '日期' in df.columns.tolist():
|
1395
1381
|
df['日期'] = df['日期'].apply(lambda x: pd.to_datetime(x) if x else x)
|
1396
1382
|
df = df[df['日期'] >= start_date]
|
@@ -1400,10 +1386,6 @@ class MysqlUpload:
|
|
1400
1386
|
elif os.path.isfile(f_path):
|
1401
1387
|
if f_path.endswith('.csv') and 'baidu' not in f_path:
|
1402
1388
|
df = pd.read_csv(f_path, encoding='utf-8_sig', header=0, na_filter=False)
|
1403
|
-
# if '日期' not in df.columns.tolist():
|
1404
|
-
# now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
1405
|
-
# logger.info(f'{now} {root_file} 缺少日期列, 不支持上传 mysql')
|
1406
|
-
# continue
|
1407
1389
|
if '日期' not in df.columns.tolist():
|
1408
1390
|
df['日期'] = df['日期'].apply(lambda x: pd.to_datetime(x) if x else x)
|
1409
1391
|
df = df[df['日期'] >= start_date]
|
@@ -1451,14 +1433,14 @@ class OptimizeDatas:
|
|
1451
1433
|
try:
|
1452
1434
|
return func(*args, **kwargs)
|
1453
1435
|
except Exception as e:
|
1454
|
-
logger.
|
1436
|
+
logger.error(f'{func.__name__}, {e}') # 将异常信息返回
|
1455
1437
|
with open(error_file, 'a') as f:
|
1456
1438
|
now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
1457
1439
|
f.write(f'\n{now} \n')
|
1458
1440
|
f.write(f'函数注释内容(用于定位函数): {func.__doc__} \n')
|
1459
1441
|
# f.write(f'报错的文件:\n{e.__traceback__.tb_frame.f_globals["__file__"]}\n') # 发生异常所在的文件
|
1460
1442
|
traceback.print_exc(file=open(error_file, 'a')) # 返回完整的堆栈信息
|
1461
|
-
logger.
|
1443
|
+
logger.error(f'更多信息请查看日志文件: {error_file}')
|
1462
1444
|
|
1463
1445
|
return wrapper
|
1464
1446
|
|
@@ -1469,10 +1451,10 @@ class OptimizeDatas:
|
|
1469
1451
|
connection = pymysql.connect(**_config) # 连接数据库
|
1470
1452
|
return connection
|
1471
1453
|
except Exception as e:
|
1472
|
-
logger.
|
1454
|
+
logger.error(f'连接失败,正在重试: {attempts}/{max_try} {e}')
|
1473
1455
|
attempts += 1
|
1474
1456
|
time.sleep(30)
|
1475
|
-
logger.
|
1457
|
+
logger.error(f'{_db_name}: 连接失败,重试次数超限,当前设定次数: {max_try}')
|
1476
1458
|
return None
|
1477
1459
|
|
1478
1460
|
def optimize_list(self):
|
@@ -1481,8 +1463,7 @@ class OptimizeDatas:
|
|
1481
1463
|
需要设置 self.db_name_lists
|
1482
1464
|
"""
|
1483
1465
|
if not self.db_name_lists:
|
1484
|
-
|
1485
|
-
logger.info(f'{now} 尚未设置参数: self.db_name_lists')
|
1466
|
+
logger.info(f'尚未设置参数: self.db_name_lists')
|
1486
1467
|
return
|
1487
1468
|
for db_name in self.db_name_lists:
|
1488
1469
|
self.db_name = db_name
|
@@ -1491,13 +1472,11 @@ class OptimizeDatas:
|
|
1491
1472
|
def optimize(self, except_key=['更新时间']):
|
1492
1473
|
""" 更新一个数据库 移除冗余数据 """
|
1493
1474
|
if not self.db_name:
|
1494
|
-
|
1495
|
-
logger.info(f'{now} 尚未设置参数: self.db_name')
|
1475
|
+
logger.info(f'尚未设置参数: self.db_name')
|
1496
1476
|
return
|
1497
1477
|
tables = self.table_list(db_name=self.db_name)
|
1498
1478
|
if not tables:
|
1499
|
-
|
1500
|
-
logger.info(f'{now} {self.db_name} -> 数据表不存在')
|
1479
|
+
logger.info(f'{self.db_name} -> 数据表不存在')
|
1501
1480
|
return
|
1502
1481
|
|
1503
1482
|
# 日期初始化
|
@@ -1514,8 +1493,7 @@ class OptimizeDatas:
|
|
1514
1493
|
start_date_before = self.start_date
|
1515
1494
|
end_date_before = self.end_date
|
1516
1495
|
|
1517
|
-
|
1518
|
-
logger.info(f'{now} mysql({self.host}: {self.port}) {self.db_name} 数据库优化中(日期长度: {self.days} 天)...')
|
1496
|
+
logger.info(f'mysql({self.host}: {self.port}) {self.db_name} 数据库优化中(日期长度: {self.days} 天)...')
|
1519
1497
|
for table_dict in tables:
|
1520
1498
|
for key, table_name in table_dict.items():
|
1521
1499
|
# if '店铺指标' not in table_name:
|
@@ -1531,8 +1509,7 @@ class OptimizeDatas:
|
|
1531
1509
|
cursor.execute(sql)
|
1532
1510
|
result = cursor.fetchone()
|
1533
1511
|
if not result:
|
1534
|
-
|
1535
|
-
logger.info(f'{now} 数据表: {table_name}, 数据长度为 0')
|
1512
|
+
logger.info(f'数据表: {table_name}, 数据长度为 0')
|
1536
1513
|
continue # 检查数据表是否为空
|
1537
1514
|
|
1538
1515
|
cursor.execute(f"SHOW FULL COLUMNS FROM `{table_name}`") # 查询数据表的列信息
|
@@ -1591,11 +1568,10 @@ class OptimizeDatas:
|
|
1591
1568
|
else:
|
1592
1569
|
logger.info(f'{table_name} 存在复合主键: {[item['PrimaryKey'] for item in result]}, 无法重置自增id')
|
1593
1570
|
except Exception as e:
|
1594
|
-
logger.
|
1571
|
+
logger.error(f'333 {table_name} {e}')
|
1595
1572
|
self.connection.rollback()
|
1596
1573
|
self.connection.close()
|
1597
|
-
|
1598
|
-
logger.info(f'{now} mysql({self.host}: {self.port}) {self.db_name} 数据库优化完成!')
|
1574
|
+
logger.info(f'mysql({self.host}: {self.port}) {self.db_name} 数据库优化完成!')
|
1599
1575
|
|
1600
1576
|
def delete_duplicate(self, table_name, date, except_key=['更新时间']):
|
1601
1577
|
datas = self.table_datas(db_name=self.db_name, table_name=str(table_name), date=date)
|
@@ -1616,7 +1592,7 @@ class OptimizeDatas:
|
|
1616
1592
|
continue
|
1617
1593
|
all_datas.append(data) # 数据没有重复
|
1618
1594
|
except Exception as e:
|
1619
|
-
logger.
|
1595
|
+
logger.error(f'{table_name} 函数: mysql - > OptimizeDatas -> delete_duplicate -> {e}')
|
1620
1596
|
del all_datas
|
1621
1597
|
|
1622
1598
|
if not duplicate_id: # 如果没有重复数据,则跳过该数据表
|
@@ -1628,11 +1604,10 @@ class OptimizeDatas:
|
|
1628
1604
|
# 移除冗余数据
|
1629
1605
|
sql = f"DELETE FROM `{table_name}` WHERE id IN ({placeholders})"
|
1630
1606
|
cursor.execute(sql, duplicate_id)
|
1631
|
-
|
1632
|
-
logger.info(f"{now} {table_name} -> {date.strftime('%Y-%m-%d')} before: {len(datas)}, remove: {cursor.rowcount}")
|
1607
|
+
logger.debug(f"{table_name} -> {date.strftime('%Y-%m-%d')} before: {len(datas)}, remove: {cursor.rowcount}")
|
1633
1608
|
self.connection.commit() # 提交事务
|
1634
1609
|
except Exception as e:
|
1635
|
-
logger.
|
1610
|
+
logger.error(f'{self.db_name}/{table_name}, {e}')
|
1636
1611
|
self.connection.rollback() # 异常则回滚
|
1637
1612
|
|
1638
1613
|
def delete_duplicate2(self, table_name, except_key=['更新时间']):
|
@@ -1666,12 +1641,11 @@ class OptimizeDatas:
|
|
1666
1641
|
# 移除冗余数据
|
1667
1642
|
sql = f"DELETE FROM `{table_name}` WHERE id IN ({placeholders})"
|
1668
1643
|
cursor.execute(sql, duplicate_id)
|
1669
|
-
|
1670
|
-
logger.info(f"{now} {table_name} -> before: {len(datas)}, "
|
1644
|
+
logger.info(f"{table_name} -> before: {len(datas)}, "
|
1671
1645
|
f"remove: {cursor.rowcount}")
|
1672
1646
|
self.connection.commit() # 提交事务
|
1673
1647
|
except Exception as e:
|
1674
|
-
logger.
|
1648
|
+
logger.error(f'{self.db_name}/{table_name}, {e}')
|
1675
1649
|
self.connection.rollback() # 异常则回滚
|
1676
1650
|
|
1677
1651
|
def database_list(self):
|
@@ -1697,11 +1671,10 @@ class OptimizeDatas:
|
|
1697
1671
|
cursor.execute(f"SHOW DATABASES LIKE '{db_name}'") # 检查数据库是否存在
|
1698
1672
|
database_exists = cursor.fetchone()
|
1699
1673
|
if not database_exists:
|
1700
|
-
|
1701
|
-
logger.info(f'{now} {db_name}: 数据表不存在!')
|
1674
|
+
logger.info(f'{db_name}: 数据表不存在!')
|
1702
1675
|
return
|
1703
1676
|
except Exception as e:
|
1704
|
-
logger.
|
1677
|
+
logger.error(f'002 {e}')
|
1705
1678
|
return
|
1706
1679
|
finally:
|
1707
1680
|
connection.close() # 断开连接
|
@@ -1732,7 +1705,7 @@ class OptimizeDatas:
|
|
1732
1705
|
cursor.execute(sql)
|
1733
1706
|
results = cursor.fetchall()
|
1734
1707
|
except Exception as e:
|
1735
|
-
logger.
|
1708
|
+
logger.error(f'001 {e}')
|
1736
1709
|
finally:
|
1737
1710
|
connection.close()
|
1738
1711
|
return results
|
@@ -1786,7 +1759,7 @@ def year_month_day_bak(start_date, end_date):
|
|
1786
1759
|
try:
|
1787
1760
|
start_date = f'{pd.to_datetime(start_date).year}-{pd.to_datetime(start_date).month}-01'
|
1788
1761
|
except Exception as e:
|
1789
|
-
logger.
|
1762
|
+
logger.error(e)
|
1790
1763
|
return []
|
1791
1764
|
# 使用pandas的date_range创建一个日期范围,频率为'MS'代表每月开始
|
1792
1765
|
date_range = pd.date_range(start=start_date, end=end_date, freq='MS')
|
mdbq/mysql/s_query.py
CHANGED
@@ -117,7 +117,7 @@ class QueryDatas:
|
|
117
117
|
df[decimal_cols] = df[decimal_cols].astype(float)
|
118
118
|
|
119
119
|
except Exception as e:
|
120
|
-
logger.
|
120
|
+
logger.error(f"Database operation failed: {str(e)}")
|
121
121
|
finally:
|
122
122
|
if connection:
|
123
123
|
connection.close()
|
@@ -195,7 +195,7 @@ class QueryDatas:
|
|
195
195
|
return False
|
196
196
|
return True
|
197
197
|
except Exception as e:
|
198
|
-
logger.
|
198
|
+
logger.error(e)
|
199
199
|
return False
|
200
200
|
finally:
|
201
201
|
connection.close() # 断开连接
|
mdbq/redis/getredis.py
CHANGED
@@ -123,7 +123,7 @@ class RedisData(object):
|
|
123
123
|
ttl = self.redis_engine.ttl(cache_key)
|
124
124
|
cache_data = self._fetch_redis_data(cache_key)
|
125
125
|
except Exception as e:
|
126
|
-
logger.
|
126
|
+
logger.error(f"Redis 连接异常: {e},直接访问 MySQL")
|
127
127
|
return self.get_from_mysql(db_name, table_name, set_year, start_date, end_date)
|
128
128
|
|
129
129
|
# 缓存失效处理逻辑
|
@@ -175,7 +175,7 @@ class RedisData(object):
|
|
175
175
|
return combined_data
|
176
176
|
|
177
177
|
except Exception as e:
|
178
|
-
logger.
|
178
|
+
logger.error(f"缓存更新失败: {cache_key} - {str(e)}")
|
179
179
|
return pd.DataFrame()
|
180
180
|
|
181
181
|
# Helper Methods ------------------------------------------------
|
@@ -197,7 +197,7 @@ class RedisData(object):
|
|
197
197
|
projection={}
|
198
198
|
)
|
199
199
|
except Exception as e:
|
200
|
-
logger.
|
200
|
+
logger.error(f"MySQL 查询异常 {db_name}.{table_name}: {e}")
|
201
201
|
return pd.DataFrame()
|
202
202
|
|
203
203
|
def _fetch_redis_data(self, cache_key: str) -> pd.DataFrame:
|
@@ -210,7 +210,7 @@ class RedisData(object):
|
|
210
210
|
df = pd.DataFrame(json.loads(data.decode("utf-8")))
|
211
211
|
return self._convert_date_columns(df)
|
212
212
|
except Exception as e:
|
213
|
-
logger.
|
213
|
+
logger.error(f"Redis 数据解析失败 {cache_key}: {e}")
|
214
214
|
return pd.DataFrame()
|
215
215
|
|
216
216
|
def _convert_date_columns(self, df: pd.DataFrame) -> pd.DataFrame:
|
@@ -18,10 +18,10 @@ mdbq/log/mylogger.py,sha256=oaT7Bp-Hb9jZt52seP3ISUuxVcI19s4UiqTeouScBO0,3258
|
|
18
18
|
mdbq/mongo/__init__.py,sha256=SILt7xMtQIQl_m-ik9WLtJSXIVf424iYgCfE_tnQFbw,13
|
19
19
|
mdbq/mongo/mongo.py,sha256=M9DUeUCMPDngkwn9-ui0uTiFrvfNU1kLs22s5SmoNm0,31899
|
20
20
|
mdbq/mysql/__init__.py,sha256=A_DPJyAoEvTSFojiI2e94zP0FKtCkkwKP1kYUCSyQzo,11
|
21
|
-
mdbq/mysql/mysql.py,sha256=
|
21
|
+
mdbq/mysql/mysql.py,sha256=v9nr-tb2L1VN0mKUVqEkfcWP1qXQRPVHIOHs3q89Cos,98531
|
22
22
|
mdbq/mysql/mysql_bak.py,sha256=_jFo2_OC1BNm5wEmoYiBG_TcuNNA2xUWKNhMBfgDiAM,99699
|
23
23
|
mdbq/mysql/recheck_mysql.py,sha256=ppBTfBLgkRWirMVZ31e_ZPULiGPJU7K3PP9G6QBZ3QI,8605
|
24
|
-
mdbq/mysql/s_query.py,sha256=
|
24
|
+
mdbq/mysql/s_query.py,sha256=pj5ioJfUT81Su9S-km9G49gF5F2MmXXfw_oAIUzhN28,8794
|
25
25
|
mdbq/mysql/year_month_day.py,sha256=VgewoE2pJxK7ErjfviL_SMTN77ki8GVbTUcao3vFUCE,1523
|
26
26
|
mdbq/other/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
|
27
27
|
mdbq/other/download_sku_picture.py,sha256=GdphR7Q3psXXVuZoyJ4u_6OWn_rWlcbT0iJ-1zPT6O0,45368
|
@@ -34,11 +34,11 @@ mdbq/pbix/pbix_refresh.py,sha256=JUjKW3bNEyoMVfVfo77UhguvS5AWkixvVhDbw4_MHco,239
|
|
34
34
|
mdbq/pbix/refresh_all.py,sha256=OBT9EewSZ0aRS9vL_FflVn74d4l2G00wzHiikCC4TC0,5926
|
35
35
|
mdbq/pbix/refresh_all_old.py,sha256=_pq3WSQ728GPtEG5pfsZI2uTJhU8D6ra-htIk1JXYzw,7192
|
36
36
|
mdbq/redis/__init__.py,sha256=YtgBlVSMDphtpwYX248wGge1x-Ex_mMufz4-8W0XRmA,12
|
37
|
-
mdbq/redis/getredis.py,sha256=
|
37
|
+
mdbq/redis/getredis.py,sha256=k4fpC-2ziRA_3HvcjX4jBeHCFIRaf3_f4oHMFlW-xjk,26642
|
38
38
|
mdbq/redis/getredis_优化hash.py,sha256=q7omKJCPw_6Zr_r6WwTv4RGSXzZzpLPkIaqJ22svJhE,29104
|
39
39
|
mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
|
40
40
|
mdbq/spider/aikucun.py,sha256=v7VO5gtEXR6_4Q6ujbTyu1FHu7TXHcwSQ6hIO249YH0,22208
|
41
|
-
mdbq-3.6.
|
42
|
-
mdbq-3.6.
|
43
|
-
mdbq-3.6.
|
44
|
-
mdbq-3.6.
|
41
|
+
mdbq-3.6.16.dist-info/METADATA,sha256=QpjJk7yttYLxg6QzdLaZGeGSNCdqTfxBWd0mVwUbtAY,244
|
42
|
+
mdbq-3.6.16.dist-info/WHEEL,sha256=cpQTJ5IWu9CdaPViMhC9YzF8gZuS5-vlfoFihTBC86A,91
|
43
|
+
mdbq-3.6.16.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
|
44
|
+
mdbq-3.6.16.dist-info/RECORD,,
|
File without changes
|
File without changes
|