mdbq 4.0.34__py3-none-any.whl → 4.0.36__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mdbq/__version__.py CHANGED
@@ -1 +1 @@
1
- VERSION = '4.0.34'
1
+ VERSION = '4.0.36'
@@ -4,6 +4,7 @@ from mdbq.mysql import uploader
4
4
  from mdbq.mysql import s_query
5
5
  from mdbq.myconf import myconf
6
6
  from mdbq.log import mylogger
7
+ from mdbq.other import error_handler
7
8
  import datetime
8
9
  from dateutil.relativedelta import relativedelta
9
10
  import pandas as pd
@@ -218,18 +219,7 @@ class MysqlDatasQuery:
218
219
  self.download_manager = download_manager
219
220
  self.pf_datas = []
220
221
 
221
- @staticmethod
222
- def try_except(func):
223
- @wraps(func)
224
- def wrapper(*args, **kwargs):
225
- try:
226
- return func(*args, **kwargs)
227
- except Exception as e:
228
- logger.info('函数执行错误', {'函数': func.__name__, '错误': str(e), 'args': args, 'kwargs': kwargs})
229
-
230
- return wrapper
231
-
232
- # @try_except
222
+ # @error_handler.log_on_exception(logger=logger)
233
223
  def tg_wxt(self, db_name='聚合数据', table_name='天猫_主体报表', is_maximize=True):
234
224
  start_date, end_date = self.months_data(num=self.months)
235
225
  projection = {
@@ -716,7 +706,7 @@ class MysqlDatasQuery:
716
706
  'unique_keys': [['日期', '推广渠道', '店铺名称', '营销场景', '商品id', '花费', '展现量', '点击量', '自然流量曝光量']], # 唯一约束列表
717
707
  }
718
708
 
719
- @try_except
709
+ @error_handler.log_on_exception(logger=logger)
720
710
  @upload_data_decorator()
721
711
  def syj(self, db_name='聚合数据', table_name='生意经_宝贝指标'):
722
712
  start_date, end_date = self.months_data(num=self.months)
@@ -791,7 +781,7 @@ class MysqlDatasQuery:
791
781
  'unique_keys': [['日期', '店铺名称', '宝贝id']], # 唯一约束列表
792
782
  }
793
783
 
794
- @try_except
784
+ @error_handler.log_on_exception(logger=logger)
795
785
  @upload_data_decorator()
796
786
  def tg_rqbb(self, db_name='聚合数据', table_name='天猫_人群报表', is_maximize=True):
797
787
  start_date, end_date = self.months_data(num=self.months)
@@ -998,7 +988,7 @@ class MysqlDatasQuery:
998
988
  'unique_keys': [['日期', '推广渠道', '店铺名称', '营销场景', '商品id', '人群名字']], # 唯一约束列表
999
989
  }
1000
990
 
1001
- @try_except
991
+ @error_handler.log_on_exception(logger=logger)
1002
992
  @upload_data_decorator()
1003
993
  def tg_gjc(self, db_name='聚合数据', table_name='天猫_关键词报表', is_maximize=True):
1004
994
  start_date, end_date = self.months_data(num=self.months)
@@ -1160,7 +1150,7 @@ class MysqlDatasQuery:
1160
1150
  return pd.DataFrame()
1161
1151
  return df
1162
1152
 
1163
- @try_except
1153
+ @error_handler.log_on_exception(logger=logger)
1164
1154
  @upload_data_decorator()
1165
1155
  def tg_cjzb(self, db_name='聚合数据', table_name='天猫_超级直播', is_maximize=True):
1166
1156
  start_date, end_date = self.months_data(num=self.months)
@@ -1297,7 +1287,7 @@ class MysqlDatasQuery:
1297
1287
  'unique_keys': [['日期', '推广渠道', '店铺名称', '营销场景', '人群名字', '计划名字']], # 唯一约束列表
1298
1288
  }
1299
1289
 
1300
- @try_except
1290
+ @error_handler.log_on_exception(logger=logger)
1301
1291
  def pxb_zh(self, db_name='聚合数据', table_name='天猫_品销宝账户报表', is_maximize=True):
1302
1292
  start_date, end_date = self.months_data(num=self.months)
1303
1293
  projection = {
@@ -1406,7 +1396,7 @@ class MysqlDatasQuery:
1406
1396
  'unique_keys': [['日期', '推广渠道', '店铺名称', '营销场景', '报表类型', '花费', '展现量']], # 唯一约束列表
1407
1397
  }
1408
1398
 
1409
- @try_except
1399
+ @error_handler.log_on_exception(logger=logger)
1410
1400
  def idbm(self, db_name='聚合数据', table_name='商品id编码表'):
1411
1401
  """ 用生意经日数据制作商品 id 和编码对照表 """
1412
1402
  projection = {
@@ -1455,7 +1445,7 @@ class MysqlDatasQuery:
1455
1445
  'unique_keys': [['宝贝id']], # 唯一约束列表
1456
1446
  }
1457
1447
 
1458
- @try_except
1448
+ @error_handler.log_on_exception(logger=logger)
1459
1449
  @upload_data_decorator()
1460
1450
  def sp_picture(self, db_name='聚合数据', table_name='商品id图片对照表'):
1461
1451
  """ """
@@ -1690,7 +1680,7 @@ class MysqlDatasQuery:
1690
1680
  'unique_keys': [['日期', '店铺名称', '商品id']], # 唯一约束列表
1691
1681
  }
1692
1682
 
1693
- # @try_except
1683
+ # @error_handler.log_on_exception(logger=logger)
1694
1684
  @upload_data_decorator()
1695
1685
  def dplyd(self, db_name='聚合数据', table_name='店铺流量来源构成'):
1696
1686
  """ 新旧版取的字段是一样的 """
@@ -1795,7 +1785,7 @@ class MysqlDatasQuery:
1795
1785
  'unique_keys': [['日期', '店铺名称', '类别', '来源构成', '一级来源', '二级来源', '三级来源']], # 唯一约束列表
1796
1786
  }
1797
1787
 
1798
- @try_except
1788
+ @error_handler.log_on_exception(logger=logger)
1799
1789
  @upload_data_decorator()
1800
1790
  def sp_cost(self, db_name='聚合数据', table_name='商品成本'):
1801
1791
  """ 电商定价 """
@@ -1832,7 +1822,7 @@ class MysqlDatasQuery:
1832
1822
  'unique_keys': [['款号']], # 唯一约束列表
1833
1823
  }
1834
1824
 
1835
- # @try_except
1825
+ # @error_handler.log_on_exception(logger=logger)
1836
1826
  @upload_data_decorator()
1837
1827
  def jdjzt(self, db_name='聚合数据', table_name='京东_京准通'):
1838
1828
  start_date, end_date = self.months_data(num=self.months)
@@ -1936,7 +1926,7 @@ class MysqlDatasQuery:
1936
1926
  'unique_keys': [['日期', '店铺名称', '产品线', '触发sku_id', '跟单sku_id', '花费']], # 唯一约束列表
1937
1927
  }
1938
1928
 
1939
- @try_except
1929
+ @error_handler.log_on_exception(logger=logger)
1940
1930
  @upload_data_decorator()
1941
1931
  def jdqzyx(self, db_name='聚合数据', table_name='京东_京准通_全站营销'):
1942
1932
  start_date, end_date = self.months_data(num=self.months)
@@ -2003,7 +1993,7 @@ class MysqlDatasQuery:
2003
1993
  'unique_keys': [['日期', '店铺名称', '产品线']], # 唯一约束列表
2004
1994
  }
2005
1995
 
2006
- @try_except
1996
+ @error_handler.log_on_exception(logger=logger)
2007
1997
  @upload_data_decorator()
2008
1998
  def jd_gjc(self, db_name='聚合数据', table_name='京东_关键词报表'):
2009
1999
  start_date, end_date = self.months_data(num=self.months)
@@ -2102,7 +2092,7 @@ class MysqlDatasQuery:
2102
2092
  'unique_keys': [['日期', '产品线', '计划id', '搜索词', '关键词']], # 唯一约束列表
2103
2093
  }
2104
2094
 
2105
- @try_except
2095
+ @error_handler.log_on_exception(logger=logger)
2106
2096
  @upload_data_decorator()
2107
2097
  def sku_sales(self, db_name='聚合数据', table_name='京东_sku_商品明细'):
2108
2098
  start_date, end_date = self.months_data(num=self.months)
@@ -2164,7 +2154,7 @@ class MysqlDatasQuery:
2164
2154
  'unique_keys': [['日期', '店铺名称', '商品id']], # 唯一约束列表
2165
2155
  }
2166
2156
 
2167
- @try_except
2157
+ @error_handler.log_on_exception(logger=logger)
2168
2158
  @upload_data_decorator()
2169
2159
  def spu_sales(self, db_name='聚合数据', table_name='京东_spu_商品明细'):
2170
2160
  start_date, end_date = self.months_data(num=self.months)
@@ -2235,7 +2225,7 @@ class MysqlDatasQuery:
2235
2225
  start_date = f'{start_date.year}-{start_date.month}-01' # 替换为 n 月以前的第一天
2236
2226
  return pd.to_datetime(start_date), pd.to_datetime(end_date)
2237
2227
 
2238
- @try_except
2228
+ @error_handler.log_on_exception(logger=logger)
2239
2229
  @upload_data_decorator()
2240
2230
  def se_search(self, db_name='聚合数据', table_name='天猫店铺来源_手淘搜索'):
2241
2231
  start_date, end_date = self.months_data(num=self.months)
@@ -2299,7 +2289,7 @@ class MysqlDatasQuery:
2299
2289
  'unique_keys': [['日期', '店铺名称', '词类型', '搜索词']], # 唯一约束列表
2300
2290
  }
2301
2291
 
2302
- @try_except
2292
+ @error_handler.log_on_exception(logger=logger)
2303
2293
  @upload_data_decorator()
2304
2294
  def zb_ccfx(self, db_name='聚合数据', table_name='生意参谋_直播场次分析'):
2305
2295
  start_date, end_date = self.months_data(num=self.months)
@@ -2437,7 +2427,7 @@ class MysqlDatasQuery:
2437
2427
  'unique_keys': [['场次id']], # 唯一约束列表
2438
2428
  }
2439
2429
 
2440
- # @try_except
2430
+ # @error_handler.log_on_exception(logger=logger)
2441
2431
  @upload_data_decorator()
2442
2432
  def tg_by_day(self, db_name='聚合数据', table_name='多店推广场景_按日聚合'):
2443
2433
  """
@@ -2896,7 +2886,7 @@ class MysqlDatasQuery:
2896
2886
  'unique_keys': [['日期', '店铺名称', '营销场景']], # 唯一约束列表
2897
2887
  }
2898
2888
 
2899
- @try_except
2889
+ @error_handler.log_on_exception(logger=logger)
2900
2890
  @upload_data_decorator()
2901
2891
  def aikucun_bd_spu(self, db_name='聚合数据', table_name='爱库存_商品spu榜单'):
2902
2892
  start_date, end_date = self.months_data(num=self.months)
@@ -3109,7 +3099,7 @@ class MysqlDatasQuery:
3109
3099
  'unique_keys': [['日期', '店铺名称', '场景id', '父渠道id']], # 唯一约束列表
3110
3100
  }
3111
3101
 
3112
- # @try_except
3102
+ # @error_handler.log_on_exception(logger=logger)
3113
3103
  @upload_data_decorator()
3114
3104
  def dmp_crowd(self, db_name='聚合数据', table_name='达摩盘_人群报表'):
3115
3105
  start_date, end_date = self.months_data(num=self.months)
@@ -3215,7 +3205,7 @@ class MysqlDatasQuery:
3215
3205
  'unique_keys': [['日期', '店铺名称', '人群id', '营销渠道', '计划基础信息', '推广单元信息']], # 唯一约束列表
3216
3206
  }
3217
3207
 
3218
- @try_except
3208
+ @error_handler.log_on_exception(logger=logger)
3219
3209
  def ret_keyword(self, keyword, as_file=False):
3220
3210
  """ 推广关键词报表,关键词分类, """
3221
3211
  datas = [
@@ -3379,7 +3369,7 @@ class MysqlDatasQuery:
3379
3369
  break
3380
3370
  return result
3381
3371
 
3382
- @try_except
3372
+ @error_handler.log_on_exception(logger=logger)
3383
3373
  def set_crowd(self, keyword, as_file=False):
3384
3374
  """ 推广人群报表,人群分类, """
3385
3375
  result_a = re.findall('_a$|_a_|_ai|^a_', str(keyword), re.IGNORECASE)
@@ -3413,7 +3403,7 @@ class MysqlDatasQuery:
3413
3403
  if not is_res:
3414
3404
  return ''
3415
3405
 
3416
- @try_except
3406
+ @error_handler.log_on_exception(logger=logger)
3417
3407
  def set_crowd2(self, keyword, as_file=False):
3418
3408
  """ 推广人群报表,人群分类, """
3419
3409
  datas = [
@@ -3508,7 +3498,7 @@ class MysqlDatasQuery:
3508
3498
  break
3509
3499
  return result
3510
3500
 
3511
- # @try_except
3501
+ # @error_handler.log_on_exception(logger=logger)
3512
3502
  @upload_data_decorator()
3513
3503
  def performance_concat(self, db_name, table_name, bb_tg=True):
3514
3504
  tg = [item['数据主体'] for item in self.pf_datas if item['集合名称'] == '天猫汇总表调用'][0]
mdbq/log/mylogger.py CHANGED
@@ -12,6 +12,7 @@ import atexit
12
12
  import traceback
13
13
  import inspect
14
14
  import psutil
15
+ import multiprocessing
15
16
 
16
17
 
17
18
  def get_caller_filename(default='mylogger'):
@@ -70,7 +71,8 @@ class MyLogger:
70
71
  enable_metrics: bool = False,
71
72
  metrics_interval: int = 300,
72
73
  message_limited: int = 1000,
73
- flush_interval: int = 5
74
+ flush_interval: int = 5,
75
+ enable_multiprocess: bool = False
74
76
  ):
75
77
  """
76
78
  初始化日志器
@@ -90,6 +92,7 @@ class MyLogger:
90
92
  :param metrics_interval: 指标采集间隔(秒)
91
93
  :param message_limited: 简化日志内容,避免过长
92
94
  :param flush_interval: 定时刷新日志器间隔(秒)
95
+ :param enable_multiprocess: 是否启用多进程安全日志
93
96
  """
94
97
  log_path = os.path.join(os.path.expanduser("~"), 'logfile')
95
98
  if name is None:
@@ -113,8 +116,14 @@ class MyLogger:
113
116
  self.filters = filters or []
114
117
  self.enable_metrics = enable_metrics
115
118
  self.metrics_interval = metrics_interval
116
- self.message_limited = message_limited
117
- self.flush_interval = flush_interval
119
+ self.message_limited = max(1, int(message_limited))
120
+ self.flush_interval = max(1, int(flush_interval))
121
+ self.enable_multiprocess = enable_multiprocess
122
+ self._mp_queue = None
123
+ self._mp_writer_process = None
124
+ self._is_main_process = multiprocessing.current_process().name == 'MainProcess'
125
+ self._stop_event = threading.Event()
126
+ self._flush_thread = None
118
127
 
119
128
  # 上下文相关
120
129
  self._context = threading.local()
@@ -133,7 +142,9 @@ class MyLogger:
133
142
  self.logger = logging.getLogger(name)
134
143
  self._init_logging()
135
144
 
136
- if self.enable_async:
145
+ if self.enable_multiprocess:
146
+ self._setup_multiprocess_logging()
147
+ elif self.enable_async:
137
148
  self._setup_async_logging()
138
149
 
139
150
  atexit.register(self.shutdown)
@@ -184,14 +195,18 @@ class MyLogger:
184
195
  class SimpleFormatter(logging.Formatter):
185
196
  def format(self, record):
186
197
  msg = super().format(record)
187
- if hasattr(record, 'extra_data') and record.extra_data:
188
- context_data = record.extra_data.get('context_data', {})
198
+ # 统一处理 extra_data 字段
199
+ extra_data = getattr(record, 'extra_data', None)
200
+ if not extra_data and hasattr(record, 'extra'):
201
+ extra_data = getattr(record, 'extra', None)
202
+ if extra_data:
203
+ context_data = extra_data.get('context_data', {})
189
204
  if context_data:
190
205
  msg += f" | Context: {context_data}"
191
- metrics = record.extra_data.get('性能指标', {})
206
+ metrics = extra_data.get('性能指标', {})
192
207
  if metrics:
193
208
  msg += f" | Metrics: {metrics}"
194
- extra = {k: v for k, v in record.extra_data.items()
209
+ extra = {k: v for k, v in extra_data.items()
195
210
  if k not in ('context_data', '性能指标')}
196
211
  if extra:
197
212
  msg += f" | Extra: {extra}"
@@ -254,6 +269,39 @@ class MyLogger:
254
269
  )
255
270
  self._queue_listener.start()
256
271
 
272
+ def _setup_multiprocess_logging(self):
273
+ """多进程安全日志:主进程写日志,子进程投递消息"""
274
+ self._mp_queue = multiprocessing.Queue(self.buffer_size)
275
+ if self._is_main_process:
276
+ # 主进程:启动写入进程
277
+ self._mp_writer_process = multiprocessing.Process(
278
+ target=self._mp_writer_worker,
279
+ args=(self._mp_queue,),
280
+ name=f"{self.name}_mp_writer",
281
+ daemon=True
282
+ )
283
+ self._mp_writer_process.start()
284
+ else:
285
+ # 子进程:不需要写入进程
286
+ pass
287
+
288
+ def _mp_writer_worker(self, log_queue):
289
+ """日志写入进程,消费队列并写日志"""
290
+ # 重新初始化logger和handlers(避免多进程fork后锁混乱)
291
+ self._init_logging()
292
+ while True:
293
+ try:
294
+ record = log_queue.get()
295
+ if record is None:
296
+ break
297
+ level, message, extra = record
298
+ self._sync_log(level, message, extra)
299
+ except Exception as e:
300
+ try:
301
+ self.logger.error(f"多进程日志写入异常: {e}", extra={'extra_data': {'mp_writer_error': str(e)}})
302
+ except:
303
+ pass
304
+
257
305
  def _get_system_metrics(self) -> Dict[str, Any]:
258
306
  """获取系统资源使用指标"""
259
307
  if not self.enable_metrics:
@@ -337,7 +385,17 @@ class MyLogger:
337
385
 
338
386
  @log_error_handler(retry_times=1, fallback_level='warning')
339
387
  def _sync_log(self, level: str, message: str, extra: Optional[Dict] = None):
340
- """同步日志记录(兼容异步,直接走logger)"""
388
+ if self.enable_multiprocess and not self._is_main_process:
389
+ # 子进程:只投递消息
390
+ try:
391
+ self._mp_queue.put((level, message, extra), block=False)
392
+ except Exception as e:
393
+ # 投递失败降级本地输出
394
+ logging.basicConfig()
395
+ fallback_logger = logging.getLogger(f"{getattr(self, 'name', 'mylogger')}_mp_fallback")
396
+ fallback_logger.warning(f"[多进程投递失败] {message} {e}")
397
+ return
398
+ # 主进程/普通模式:正常写日志
341
399
  if not hasattr(self.logger, level.lower()):
342
400
  return
343
401
  if not isinstance(message, str):
@@ -462,7 +520,7 @@ class MyLogger:
462
520
  def _format_traceback(self, exc_info):
463
521
  """格式化异常堆栈"""
464
522
  if exc_info is None:
465
- return ""
523
+ return "No traceback available"
466
524
  return ''.join(traceback.format_exception(type(exc_info), exc_info, exc_info.__traceback__))
467
525
 
468
526
  def timeit(self, message: str = "Execution time"):
@@ -481,8 +539,7 @@ class MyLogger:
481
539
 
482
540
  def __exit__(self, exc_type, exc_val, exc_tb):
483
541
  elapsed = time.time() - self.start_time
484
- self.logger.info(f"{self.message}: {elapsed:.3f}s",
485
- extra={'elapsed_seconds': elapsed})
542
+ self.logger.info(f"{self.message}: {elapsed:.3f}s", extra={'elapsed_seconds': f"{elapsed:.3f}"})
486
543
  return False
487
544
 
488
545
  def _start_flush_thread(self):
@@ -522,13 +579,27 @@ class MyLogger:
522
579
 
523
580
  def shutdown(self):
524
581
  """关闭日志记录器,确保所有日志被刷新"""
582
+ if self.enable_multiprocess and self._is_main_process and self._mp_writer_process:
583
+ try:
584
+ self._mp_queue.put(None)
585
+ self._mp_writer_process.join(timeout=5)
586
+ except:
587
+ pass
588
+ if self.enable_multiprocess and self._mp_queue is not None:
589
+ try:
590
+ self._mp_queue.close()
591
+ self._mp_queue.join_thread()
592
+ except:
593
+ pass
594
+ self._mp_queue = None
525
595
  if self.enable_async and self._queue_listener:
526
596
  self._queue_listener.stop()
527
- for handler in self.logger.handlers:
597
+ for handler in self.logger.handlers[:]:
528
598
  try:
529
599
  handler.close()
530
600
  except:
531
601
  pass
602
+ self.logger.removeHandler(handler)
532
603
  for handler in getattr(self, '_handlers', []):
533
604
  try:
534
605
  handler.close()
@@ -549,7 +620,7 @@ def main():
549
620
  sensitive_fields=[], # 敏感字段列表
550
621
  enable_metrics=False, # 是否启用性能指标
551
622
  )
552
- logger.info('123')
623
+ logger.info('123', extra={'extra_data': {'test': 'test'}})
553
624
  logger.shutdown()
554
625
 
555
626
 
@@ -0,0 +1,267 @@
1
+ import traceback
2
+ import sys
3
+ from functools import wraps
4
+ import inspect
5
+ import asyncio
6
+ from typing import Callable, Optional, Any, List
7
+ import logging
8
+ import json
9
+
10
+
11
+ class _ErrorHandlerHelper:
12
+ @staticmethod
13
+ def get_default_logger():
14
+ default_logger = logging.getLogger("mdbq.error_handler.default")
15
+ handler_exists = any(isinstance(h, logging.StreamHandler) for h in default_logger.handlers)
16
+ if not handler_exists:
17
+ handler = logging.StreamHandler()
18
+ handler.setLevel(logging.INFO)
19
+ formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
20
+ handler.setFormatter(formatter)
21
+ default_logger.addHandler(handler)
22
+ default_logger.setLevel(logging.INFO)
23
+ default_logger.propagate = False
24
+ return default_logger
25
+
26
+ @staticmethod
27
+ def filter_fields(info: dict, log_fields):
28
+ if not log_fields:
29
+ return info
30
+ return {k: info[k] for k in log_fields if k in info}
31
+
32
+ @staticmethod
33
+ def build_error_info(func, e, args, kwargs, stack_summary):
34
+ tb = traceback.extract_tb(sys.exc_info()[2])
35
+ last_tb = tb[-1] if tb else None
36
+ return {
37
+ '函数': func.__name__,
38
+ '模块': func.__module__,
39
+ '类型': type(e).__name__,
40
+ '消息': str(e),
41
+ '签名': str(inspect.signature(func)),
42
+ 'args': [str(arg) for arg in args] if args else [],
43
+ 'kwargs': {k: str(v) for k, v in kwargs.items()} if kwargs else {},
44
+ '函数文件': func.__code__.co_filename,
45
+ '函数行号': func.__code__.co_firstlineno,
46
+ '异常行号': last_tb.lineno if last_tb else None,
47
+ '异常文件': last_tb.filename if last_tb else None,
48
+ '堆栈': stack_summary,
49
+ }
50
+
51
+ @staticmethod
52
+ def build_final_error_info(func, last_exception, max_retries):
53
+ tb = traceback.extract_tb(sys.exc_info()[2])
54
+ last_tb = tb[-1] if tb else None
55
+ return {
56
+ '函数': func.__name__,
57
+ '最终错误类型': type(last_exception).__name__,
58
+ '最终错误消息': str(last_exception),
59
+ '总尝试次数': max_retries,
60
+ '堆栈跟踪': traceback.format_exc(),
61
+ '异常行号': last_tb.lineno if last_tb else None,
62
+ '异常文件': last_tb.filename if last_tb else None,
63
+ }
64
+
65
+ @staticmethod
66
+ def get_stack_summary():
67
+ stack_lines = traceback.format_exc().splitlines(keepends=True)
68
+ if len(stack_lines) > 40:
69
+ return ''.join(stack_lines[:20]) + '\n...\n' + ''.join(stack_lines[-20:])
70
+ else:
71
+ return ''.join(stack_lines)
72
+
73
+
74
+ def log_on_exception(
75
+ logger=None,
76
+ *,
77
+ on_exception: Optional[Callable[[dict], None]] = None,
78
+ default_return: Any = None,
79
+ log_fields: Optional[List[str]] = None,
80
+ ):
81
+ """
82
+ :param logger: 日志对象
83
+ :param on_exception: 异常回调,参数为 error_info 字典
84
+ :param default_return: 异常时返回的默认值
85
+ :param log_fields: 只记录 error_info 的部分字段
86
+ """
87
+ if logger is not None and not isinstance(logger, logging.Logger):
88
+ raise TypeError(f"logger 参数必须为 logging.Logger 实例或 None,当前类型为: {type(logger)}")
89
+ def decorator(func):
90
+ is_async = asyncio.iscoroutinefunction(func)
91
+ @wraps(func)
92
+ async def async_wrapper(*args, **kwargs):
93
+ try:
94
+ return await func(*args, **kwargs)
95
+ except Exception as e:
96
+ stack_summary = _ErrorHandlerHelper.get_stack_summary()
97
+ error_info = _ErrorHandlerHelper.build_error_info(func, e, args, kwargs, stack_summary)
98
+ error_info = _ErrorHandlerHelper.filter_fields(error_info, log_fields)
99
+ use_logger = logger if logger is not None else _ErrorHandlerHelper.get_default_logger()
100
+ if use_logger:
101
+ if logger is None:
102
+ use_logger.error(f"执行失败\n详细信息: {json.dumps(error_info, ensure_ascii=False, indent=2)}")
103
+ else:
104
+ use_logger.error("执行失败", {'details': error_info})
105
+ if on_exception:
106
+ try:
107
+ on_exception(error_info)
108
+ except Exception:
109
+ pass
110
+ return default_return
111
+ @wraps(func)
112
+ def sync_wrapper(*args, **kwargs):
113
+ try:
114
+ return func(*args, **kwargs)
115
+ except Exception as e:
116
+ stack_summary = _ErrorHandlerHelper.get_stack_summary()
117
+ error_info = _ErrorHandlerHelper.build_error_info(func, e, args, kwargs, stack_summary)
118
+ error_info = _ErrorHandlerHelper.filter_fields(error_info, log_fields)
119
+ use_logger = logger if logger is not None else _ErrorHandlerHelper.get_default_logger()
120
+ if use_logger:
121
+ if logger is None:
122
+ use_logger.error(f"执行失败\n详细信息: {json.dumps(error_info, ensure_ascii=False, indent=2)}")
123
+ else:
124
+ use_logger.error("执行失败", {'details': error_info})
125
+ if on_exception:
126
+ try:
127
+ on_exception(error_info)
128
+ except Exception:
129
+ pass
130
+ return default_return
131
+ return async_wrapper if is_async else sync_wrapper
132
+ return decorator
133
+
134
+
135
+ def log_on_exception_with_retry(
136
+ max_retries=3,
137
+ delay=1,
138
+ logger=None,
139
+ *,
140
+ on_exception: Optional[Callable[[dict], None]] = None,
141
+ default_return: Any = None,
142
+ log_fields: Optional[List[str]] = None,
143
+ ):
144
+ """
145
+ :param logger: 日志对象
146
+ :param on_exception: 异常回调,参数为 error_info 字典
147
+ :param default_return: 异常时返回的默认值
148
+ :param log_fields: 只记录 error_info 的部分字段
149
+ """
150
+ if logger is not None and not isinstance(logger, logging.Logger):
151
+ raise TypeError(f"logger 参数必须为 logging.Logger 实例或 None,当前类型为: {type(logger)}")
152
+ def decorator(func):
153
+ is_async = asyncio.iscoroutinefunction(func)
154
+ @wraps(func)
155
+ async def async_wrapper(*args, **kwargs):
156
+ last_exception = None
157
+ import time
158
+ for attempt in range(max_retries):
159
+ try:
160
+ return await func(*args, **kwargs)
161
+ except Exception as e:
162
+ last_exception = e
163
+ error_info = {
164
+ '函数': func.__name__,
165
+ '重试': attempt + 1,
166
+ '最大重试': max_retries,
167
+ '类型': type(e).__name__,
168
+ '消息': str(e),
169
+ }
170
+ error_info = _ErrorHandlerHelper.filter_fields(error_info, log_fields)
171
+ use_logger = logger if logger is not None else _ErrorHandlerHelper.get_default_logger()
172
+ if use_logger:
173
+ if logger is None:
174
+ use_logger.warning(f"函数 {func.__name__} 第 {attempt + 1} 次尝试失败\n详细信息: {json.dumps(error_info, ensure_ascii=False, indent=2)}")
175
+ else:
176
+ use_logger.warning(f"函数 {func.__name__} 第 {attempt + 1} 次尝试失败", {'details': error_info})
177
+ if on_exception:
178
+ try:
179
+ on_exception(error_info)
180
+ except Exception:
181
+ pass
182
+ if attempt < max_retries - 1:
183
+ await asyncio.sleep(delay)
184
+ if use_logger:
185
+ use_logger.info(f"第 {attempt + 1} 次尝试失败,{delay}秒后重试...")
186
+ else:
187
+ if use_logger:
188
+ if logger is None:
189
+ use_logger.error(f"函数 {func.__name__} 在 {max_retries} 次尝试后仍然失败\n详细信息: {json.dumps(error_info, ensure_ascii=False, indent=2)}")
190
+ else:
191
+ use_logger.error(f"函数 {func.__name__} 在 {max_retries} 次尝试后仍然失败", {'details': error_info})
192
+ final_error_info = _ErrorHandlerHelper.build_final_error_info(func, last_exception, max_retries)
193
+ final_error_info = _ErrorHandlerHelper.filter_fields(final_error_info, log_fields)
194
+ if use_logger:
195
+ if logger is None:
196
+ use_logger.error(f"最终执行失败\n详细信息: {json.dumps(final_error_info, ensure_ascii=False, indent=2)}")
197
+ else:
198
+ use_logger.error("最终执行失败", {'details': final_error_info})
199
+ if on_exception:
200
+ try:
201
+ on_exception(final_error_info)
202
+ except Exception:
203
+ pass
204
+ return default_return
205
+ @wraps(func)
206
+ def sync_wrapper(*args, **kwargs):
207
+ last_exception = None
208
+ import time
209
+ for attempt in range(max_retries):
210
+ try:
211
+ return func(*args, **kwargs)
212
+ except Exception as e:
213
+ last_exception = e
214
+ error_info = {
215
+ '函数': func.__name__,
216
+ '重试': attempt + 1,
217
+ '最大重试': max_retries,
218
+ '类型': type(e).__name__,
219
+ '消息': str(e),
220
+ }
221
+ error_info = _ErrorHandlerHelper.filter_fields(error_info, log_fields)
222
+ use_logger = logger if logger is not None else _ErrorHandlerHelper.get_default_logger()
223
+ if use_logger:
224
+ if logger is None:
225
+ use_logger.warning(f"函数 {func.__name__} 第 {attempt + 1} 次尝试失败\n详细信息: {json.dumps(error_info, ensure_ascii=False, indent=2)}")
226
+ else:
227
+ use_logger.warning(f"函数 {func.__name__} 第 {attempt + 1} 次尝试失败", {'details': error_info})
228
+ if on_exception:
229
+ try:
230
+ on_exception(error_info)
231
+ except Exception:
232
+ pass
233
+ if attempt < max_retries - 1:
234
+ time.sleep(delay)
235
+ if use_logger:
236
+ use_logger.info(f"第 {attempt + 1} 次尝试失败,{delay}秒后重试...")
237
+ else:
238
+ if use_logger:
239
+ if logger is None:
240
+ use_logger.error(f"函数 {func.__name__} 在 {max_retries} 次尝试后仍然失败\n详细信息: {json.dumps(error_info, ensure_ascii=False, indent=2)}")
241
+ else:
242
+ use_logger.error(f"函数 {func.__name__} 在 {max_retries} 次尝试后仍然失败", {'details': error_info})
243
+ final_error_info = _ErrorHandlerHelper.build_final_error_info(func, last_exception, max_retries)
244
+ final_error_info = _ErrorHandlerHelper.filter_fields(final_error_info, log_fields)
245
+ if use_logger:
246
+ if logger is None:
247
+ use_logger.error(f"最终执行失败\n详细信息: {json.dumps(final_error_info, ensure_ascii=False, indent=2)}")
248
+ else:
249
+ use_logger.error("最终执行失败", {'details': final_error_info})
250
+ if on_exception:
251
+ try:
252
+ on_exception(final_error_info)
253
+ except Exception:
254
+ pass
255
+ return default_return
256
+ return async_wrapper if is_async else sync_wrapper
257
+ return decorator
258
+
259
+
260
+ if __name__ == "__main__":
261
+ @log_on_exception(logger=None)
262
+ def divide_numbers(a, b):
263
+ """测试函数:除法运算"""
264
+ return a / b
265
+
266
+ result1 = divide_numbers(10, 0)
267
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: mdbq
3
- Version: 4.0.34
3
+ Version: 4.0.36
4
4
  Home-page: https://pypi.org/project/mdbq
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -1,9 +1,9 @@
1
1
  mdbq/__init__.py,sha256=Il5Q9ATdX8yXqVxtP_nYqUhExzxPC_qk_WXQ_4h0exg,16
2
- mdbq/__version__.py,sha256=D_wa3H06nsA6Arfalq9RVMZHoFDp3ZG4xsE24b2Lu8E,18
2
+ mdbq/__version__.py,sha256=zkcTzC4btf-Gh2O0sS9u1Pl3m_UGMa4rkyfG91EfdD0,18
3
3
  mdbq/aggregation/__init__.py,sha256=EeDqX2Aml6SPx8363J-v1lz0EcZtgwIBYyCJV6CcEDU,40
4
- mdbq/aggregation/query_data.py,sha256=VICC4R0yNktmfWHItn7X0769DyRBa2hBXhJOTp3Zh2w,169282
4
+ mdbq/aggregation/query_data.py,sha256=WtTFMN78jn43Y-nBTPAXhAK56w3wDuv_cj4YtzzGbZk,169797
5
5
  mdbq/log/__init__.py,sha256=Mpbrav0s0ifLL7lVDAuePEi1hJKiSHhxcv1byBKDl5E,15
6
- mdbq/log/mylogger.py,sha256=9w_o5mYB3FooIxobq_lSa6oCYTKIhPxDFox-jeLtUHI,21714
6
+ mdbq/log/mylogger.py,sha256=iDhWkTY6I9T3IJuERWqiXKq1sNf0VuraSEq33ZxLqdw,24930
7
7
  mdbq/myconf/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
8
8
  mdbq/myconf/myconf.py,sha256=rHvQCnQRKhQ49AZBke-Z4v28hyOLmHt4MylIuB0H6yA,33516
9
9
  mdbq/mysql/__init__.py,sha256=A_DPJyAoEvTSFojiI2e94zP0FKtCkkwKP1kYUCSyQzo,11
@@ -14,6 +14,7 @@ mdbq/mysql/unique_.py,sha256=MaztT-WIyEQUs-OOYY4pFulgHVcXR1BfCy3QUz0XM_U,21127
14
14
  mdbq/mysql/uploader.py,sha256=SVlrLxoYBEpTu_I771wAehJQVFWOCqXp-lNk2JNYFOE,81881
15
15
  mdbq/other/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
16
16
  mdbq/other/download_sku_picture.py,sha256=X66sVdvVgzoNzmgVJyPtd7bjEvctEKtLPblEPF65EWc,46940
17
+ mdbq/other/error_handler.py,sha256=XiygzLiOKy-pYE4xcMbF0cEFxKorHHAhSeVZIDbQvhY,12313
17
18
  mdbq/other/otk.py,sha256=iclBIFbQbhlqzUbcMMoePXBpcP1eZ06ZtjnhcA_EbmE,7241
18
19
  mdbq/other/pov_city.py,sha256=AEOmCOzOwyjHi9LLZWPKi6DUuSC-_M163664I52u9qw,21050
19
20
  mdbq/other/ua_sj.py,sha256=JuVYzc_5QZ9s_oQSrTHVKkQv4S_7-CWx4oIKOARn_9U,22178
@@ -24,7 +25,7 @@ mdbq/redis/__init__.py,sha256=YtgBlVSMDphtpwYX248wGge1x-Ex_mMufz4-8W0XRmA,12
24
25
  mdbq/redis/getredis.py,sha256=vpBuNc22uj9Vr-_Dh25_wpwWM1e-072EAAIBdB_IpL0,23494
25
26
  mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
26
27
  mdbq/spider/aikucun.py,sha256=XptHjGzbout9IYzWAOQUpMMV5qEgLTU8pL1ZGt8oNEA,21868
27
- mdbq-4.0.34.dist-info/METADATA,sha256=m5KXyEfQlxynSXm8ht6Owk0UZ-zi8MOo2AKy9lmYODg,364
28
- mdbq-4.0.34.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
29
- mdbq-4.0.34.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
30
- mdbq-4.0.34.dist-info/RECORD,,
28
+ mdbq-4.0.36.dist-info/METADATA,sha256=eyN-znN-wNE2_i3A5WsrJI7WnKXN6kUenvnQH2kqBHU,364
29
+ mdbq-4.0.36.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
30
+ mdbq-4.0.36.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
31
+ mdbq-4.0.36.dist-info/RECORD,,
File without changes