mdbq 3.12.7__py3-none-any.whl → 4.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mdbq/__version__.py CHANGED
@@ -1 +1 @@
1
- VERSION = '3.12.7'
1
+ VERSION = '4.0.0'
mdbq/log/mylogger.py CHANGED
@@ -18,6 +18,14 @@ try:
18
18
  except ImportError:
19
19
  HAS_PSUTIL = False
20
20
 
21
+ def get_caller_filename(default='mylogger'):
22
+ stack = inspect.stack()
23
+ for frame_info in stack:
24
+ filename = frame_info.filename
25
+ # 跳过本日志库自身
26
+ if not filename.endswith('mylogger.py'):
27
+ return os.path.splitext(os.path.basename(filename))[0]
28
+ return default
21
29
 
22
30
  class MyLogger:
23
31
  """
@@ -52,10 +60,10 @@ class MyLogger:
52
60
 
53
61
  def __init__(
54
62
  self,
55
- name: str = 'mylogger.log',
63
+ name: Optional[str] = None,
56
64
  logging_mode: str = 'console', # 'both', 'console', 'file', 'none'
57
65
  log_level: str = 'INFO', # 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
58
- log_file: str = 'm_app.log',
66
+ log_file: Optional[str] = None,
59
67
  log_format: str = 'json', # 默认json格式,可选'simple'
60
68
  max_log_size: int = 50, # MB
61
69
  backup_count: int = 5,
@@ -89,10 +97,16 @@ class MyLogger:
89
97
  :param flush_interval: 定时刷新日志器间隔(秒)
90
98
  """
91
99
  log_path = os.path.join(os.path.expanduser("~"), 'logfile')
100
+ if name is None:
101
+ name = get_caller_filename()
92
102
  self.name = name
93
103
  self.logging_mode = logging_mode.lower()
94
104
  self.log_level = log_level.upper()
95
- self.log_file = os.path.join(log_path, log_file)
105
+ # log_file 自动为 name+'.log'
106
+ if log_file is None:
107
+ self.log_file = os.path.join(log_path, f"{self.name}.log")
108
+ else:
109
+ self.log_file = os.path.join(log_path, log_file)
96
110
  if not os.path.isdir(os.path.dirname(self.log_file)):
97
111
  os.makedirs(os.path.dirname(self.log_file))
98
112
  self.log_format = log_format
@@ -116,22 +130,17 @@ class MyLogger:
116
130
  self._last_metrics_time = 0
117
131
  self._metrics_cache = {}
118
132
 
119
- # 异步日志相关
120
- self._log_queue = queue.Queue(maxsize=buffer_size)
121
- self._async_thread = None
122
- self._stop_event = threading.Event()
123
-
124
- # 定时刷新相关
125
- self._flush_thread = None
126
- self._last_flush_time = 0
127
- self._start_flush_thread()
133
+ # 异步日志相关(标准库实现)
134
+ self._log_queue = None
135
+ self._queue_listener = None
136
+ self._handlers = []
128
137
 
129
138
  # 创建日志记录器
130
139
  self.logger = logging.getLogger(name)
131
140
  self._init_logging()
132
141
 
133
142
  if self.enable_async:
134
- self._start_async_logging()
143
+ self._setup_async_logging()
135
144
 
136
145
  atexit.register(self.shutdown)
137
146
 
@@ -186,59 +195,37 @@ class MyLogger:
186
195
 
187
196
  # 定义日志格式
188
197
  if self.log_format.lower() == 'simple':
189
- # 简单文本格式
190
198
  class SimpleFormatter(logging.Formatter):
191
199
  def format(self, record):
192
- # 基础日志信息
193
200
  msg = super().format(record)
194
-
195
- # 添加上下文信息
196
201
  if hasattr(record, 'extra_data') and record.extra_data:
197
202
  context_data = record.extra_data.get('context_data', {})
198
203
  if context_data:
199
204
  msg += f" | Context: {context_data}"
200
-
201
- # 添加性能指标
202
205
  metrics = record.extra_data.get('性能指标', {})
203
206
  if metrics:
204
207
  msg += f" | Metrics: {metrics}"
205
-
206
- # 添加其他额外数据
207
208
  extra = {k: v for k, v in record.extra_data.items()
208
209
  if k not in ('context_data', '性能指标')}
209
210
  if extra:
210
211
  msg += f" | Extra: {extra}"
211
-
212
212
  return msg
213
-
214
213
  formatter = SimpleFormatter('%(asctime)s - %(levelname)s - %(message)s')
215
214
  formatter.datefmt = '%Y-%m-%d %H:%M:%S'
216
215
  else:
217
- # 结构化JSON格式
218
216
  class StructuredFormatter(logging.Formatter):
219
217
  def format(self, record):
220
218
  log_data = {
221
- 'timestamp': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
219
+ 'time': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
222
220
  'level': record.levelname,
223
- 'message': record.getMessage(),
224
- # 'name': record.name,
225
- # 'module': record.module,
226
- # 'function': record.funcName,
221
+ 'msg': record.getMessage(),
227
222
  }
228
-
229
- # 添加额外字段
230
223
  if hasattr(record, 'extra_data'):
231
224
  log_data.update(record.extra_data)
232
-
233
- # 添加上下文信息
234
225
  if hasattr(record, 'context_data'):
235
226
  log_data.update(record.context_data)
236
-
237
- # 添加异常信息
238
227
  if record.exc_info:
239
228
  log_data['异常'] = self.formatException(record.exc_info)
240
-
241
- # 过滤敏感信息
242
229
  if hasattr(record, 'extra_data') and '过滤' in record.extra_data:
243
230
  sensitive_fields = record.extra_data['过滤']
244
231
  for field in sensitive_fields:
@@ -246,23 +233,19 @@ class MyLogger:
246
233
  log_data[field] = '***'
247
234
  if isinstance(log_data.get('message'), str):
248
235
  log_data['message'] = log_data['message'].replace(field, '***')
249
-
250
236
  return json.dumps(log_data, ensure_ascii=False, default=str)
251
-
252
237
  formatter = StructuredFormatter()
253
238
 
254
- # 根据模式添加相应的handler
239
+ # 只创建handlers,不加到logger上(异步时由QueueListener管理)
240
+ self._handlers = []
255
241
  if self.logging_mode in ('both', 'console'):
256
242
  console_handler = logging.StreamHandler()
257
243
  console_handler.setFormatter(formatter)
258
- self.logger.addHandler(console_handler)
259
-
244
+ self._handlers.append(console_handler)
260
245
  if self.logging_mode in ('both', 'file'):
261
- # 确保日志目录存在
262
246
  log_dir = os.path.dirname(self.log_file)
263
247
  if log_dir and not os.path.exists(log_dir):
264
248
  os.makedirs(log_dir)
265
-
266
249
  file_handler = logging.handlers.RotatingFileHandler(
267
250
  filename=self.log_file,
268
251
  maxBytes=self.max_log_size * 1024 * 1024,
@@ -271,7 +254,21 @@ class MyLogger:
271
254
  delay=False
272
255
  )
273
256
  file_handler.setFormatter(formatter)
274
- self.logger.addHandler(file_handler)
257
+ self._handlers.append(file_handler)
258
+
259
+ if not self.enable_async:
260
+ for handler in self._handlers:
261
+ self.logger.addHandler(handler)
262
+
263
+ def _setup_async_logging(self):
264
+ # 标准库异步日志实现
265
+ self._log_queue = queue.Queue(maxsize=self.buffer_size) # 无限长度,绝不阻塞
266
+ queue_handler = logging.handlers.QueueHandler(self._log_queue)
267
+ self.logger.addHandler(queue_handler)
268
+ self._queue_listener = logging.handlers.QueueListener(
269
+ self._log_queue, *self._handlers, respect_handler_level=True
270
+ )
271
+ self._queue_listener.start()
275
272
 
276
273
  def _get_system_metrics(self) -> Dict[str, Any]:
277
274
  """获取系统资源使用指标"""
@@ -319,34 +316,6 @@ class MyLogger:
319
316
  extra={'extra_data': {'filter_error': str(e)}})
320
317
  return True # 所有过滤器都返回 True,则记录该日志
321
318
 
322
- def _async_log_worker(self):
323
- """异步日志工作线程"""
324
- while not self._stop_event.is_set() or not self._log_queue.empty():
325
- try:
326
- log_args = self._log_queue.get(timeout=0.1)
327
- if log_args:
328
- level, message, extra = log_args
329
- self._sync_log(level, message, extra)
330
- except queue.Empty:
331
- continue
332
- except Exception as e:
333
- # 防止日志线程崩溃
334
- try:
335
- self.logger.error(f"日志线程崩溃: {e}",
336
- extra={'extra_data': {'async_error': str(e)}})
337
- except:
338
- pass
339
-
340
- def _start_async_logging(self):
341
- """启动异步日志线程"""
342
- self._stop_event.clear()
343
- self._async_thread = threading.Thread(
344
- target=self._async_log_worker,
345
- name=f"{self.name}_async_logger",
346
- daemon=True
347
- )
348
- self._async_thread.start()
349
-
350
319
  def log_error_handler(retry_times=0, fallback_level='error'):
351
320
  """
352
321
  日志错误处理装饰器
@@ -387,7 +356,7 @@ class MyLogger:
387
356
 
388
357
  @log_error_handler(retry_times=1, fallback_level='warning')
389
358
  def _sync_log(self, level: str, message: str, extra: Optional[Dict] = None):
390
- """同步日志记录"""
359
+ """同步日志记录(兼容异步,直接走logger)"""
391
360
  if not hasattr(self.logger, level.lower()):
392
361
  return
393
362
 
@@ -430,7 +399,7 @@ class MyLogger:
430
399
  if not self._apply_filters(level, message, log_extra):
431
400
  return
432
401
 
433
- # 记录日志
402
+ # 记录日志(直接走logger,异步/同步由handler决定)
434
403
  getattr(self.logger, level.lower())(message, extra={'extra_data': log_extra})
435
404
 
436
405
  def log(self, level: str, message: str, extra: Optional[Dict] = None):
@@ -443,18 +412,7 @@ class MyLogger:
443
412
  """
444
413
  if not hasattr(self.logger, level.lower()):
445
414
  return
446
-
447
- if self.enable_async:
448
- try:
449
- self._log_queue.put((level, message, extra), timeout=0.1)
450
- except queue.Full:
451
- # 队列满时降级为同步日志,添加队列状态信息到extra
452
- if extra is None:
453
- extra = {}
454
- extra['queue_status'] = 'full'
455
- self._sync_log(level, message, extra)
456
- else:
457
- self._sync_log(level, message, extra)
415
+ self._sync_log(level, message, extra)
458
416
 
459
417
  def set_level(self, level: str):
460
418
  """动态设置日志级别"""
@@ -596,26 +554,19 @@ class MyLogger:
596
554
 
597
555
  def shutdown(self):
598
556
  """关闭日志记录器,确保所有日志被刷新"""
599
- if self.enable_async:
600
- self._stop_event.set()
601
- # 等待队列清空
602
- while not self._log_queue.empty():
603
- time.sleep(0.1)
604
- if self._async_thread and self._async_thread.is_alive():
605
- self._async_thread.join(timeout=0.5)
606
-
607
- # 确保所有handler被刷新
608
- if self._flush_thread:
609
- self._flush_handlers()
610
- if self._flush_thread.is_alive():
611
- self._flush_thread.join(timeout=0.5)
612
-
557
+ if self.enable_async and self._queue_listener:
558
+ self._queue_listener.stop()
613
559
  # 关闭所有handler
614
560
  for handler in self.logger.handlers:
615
561
  try:
616
562
  handler.close()
617
563
  except:
618
564
  pass
565
+ for handler in getattr(self, '_handlers', []):
566
+ try:
567
+ handler.close()
568
+ except:
569
+ pass
619
570
 
620
571
  def main():
621
572
  # 创建日志记录器
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: mdbq
3
- Version: 3.12.7
3
+ Version: 4.0.0
4
4
  Home-page: https://pypi.org/project/mdbq
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -1,11 +1,11 @@
1
1
  mdbq/__init__.py,sha256=Il5Q9ATdX8yXqVxtP_nYqUhExzxPC_qk_WXQ_4h0exg,16
2
- mdbq/__version__.py,sha256=R4aIh4bYjUTB7AEjcPn5iJfp5jMY5s481CDtN5JhL_k,18
2
+ mdbq/__version__.py,sha256=rGzKotWztr8AKlJYeZkowEC43bIAfiOqR7lVganyrJE,17
3
3
  mdbq/aggregation/__init__.py,sha256=EeDqX2Aml6SPx8363J-v1lz0EcZtgwIBYyCJV6CcEDU,40
4
4
  mdbq/aggregation/query_data.py,sha256=nxL8hSy8yI1QLlqnkTNHHQSxRfo-6WKL5OA-N4xLB7c,179832
5
5
  mdbq/config/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
6
6
  mdbq/config/config.py,sha256=eaTfrfXQ65xLqjr5I8-HkZd_jEY1JkGinEgv3TSLeoQ,3170
7
7
  mdbq/log/__init__.py,sha256=Mpbrav0s0ifLL7lVDAuePEi1hJKiSHhxcv1byBKDl5E,15
8
- mdbq/log/mylogger.py,sha256=Crw6LwVo3I3IUbzIETu8f46Quza3CTCh-qYf4edbBPo,24139
8
+ mdbq/log/mylogger.py,sha256=qBOHJK_h6R_SpfQ1yC5fAlJIEm6uro810i-47uA9C_U,22872
9
9
  mdbq/log/spider_logging.py,sha256=-ozWWEGm3HVv604ozs_OOvVwumjokmUPwbaodesUrPY,1664
10
10
  mdbq/mysql/__init__.py,sha256=A_DPJyAoEvTSFojiI2e94zP0FKtCkkwKP1kYUCSyQzo,11
11
11
  mdbq/mysql/deduplicator.py,sha256=iMloLJz4i0w5UGyIwya-GiQS1iMq-DMmA7XPU8PP5k4,73138
@@ -25,7 +25,7 @@ mdbq/redis/__init__.py,sha256=YtgBlVSMDphtpwYX248wGge1x-Ex_mMufz4-8W0XRmA,12
25
25
  mdbq/redis/getredis.py,sha256=YHgCKO8mEsslwet33K5tGss-nrDDwPnOSlhA9iBu0jY,24078
26
26
  mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
27
27
  mdbq/spider/aikucun.py,sha256=GaekqY55pDEgVxbeQzHHshnQMC2YDv3v4mA7cQwjli4,21019
28
- mdbq-3.12.7.dist-info/METADATA,sha256=-8Uu-vIm1_S6-2fqFV9AnzLDD9xnDBySgXXqZKDPIqY,364
29
- mdbq-3.12.7.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
30
- mdbq-3.12.7.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
31
- mdbq-3.12.7.dist-info/RECORD,,
28
+ mdbq-4.0.0.dist-info/METADATA,sha256=oGG1nyu37HUrUS5Tes3s5NeleIiE_neE2-rCvcceMaU,363
29
+ mdbq-4.0.0.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
30
+ mdbq-4.0.0.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
31
+ mdbq-4.0.0.dist-info/RECORD,,
File without changes