xmi-logger 0.0.3__py3-none-any.whl → 0.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- xmi_logger/advanced_features.py +667 -0
- xmi_logger/xmi_logger.py +571 -159
- xmi_logger-0.0.5.dist-info/METADATA +480 -0
- xmi_logger-0.0.5.dist-info/RECORD +7 -0
- {xmi_logger-0.0.3.dist-info → xmi_logger-0.0.5.dist-info}/WHEEL +1 -1
- xmi_logger-0.0.3.dist-info/METADATA +0 -212
- xmi_logger-0.0.3.dist-info/RECORD +0 -6
- {xmi_logger-0.0.3.dist-info → xmi_logger-0.0.5.dist-info}/top_level.txt +0 -0
xmi_logger/xmi_logger.py
CHANGED
|
@@ -31,14 +31,14 @@ class XmiLogger:
|
|
|
31
31
|
基于 Loguru 的增强日志记录器,具有以下功能:
|
|
32
32
|
- 自定义日志格式
|
|
33
33
|
- 日志轮转和保留策略
|
|
34
|
-
-
|
|
35
|
-
-
|
|
34
|
+
- 上下文信息管理(如 request_id)
|
|
35
|
+
- 远程日志收集(使用线程池防止阻塞)
|
|
36
36
|
- 装饰器用于记录函数调用和执行时间,支持同步/异步函数
|
|
37
|
-
-
|
|
37
|
+
- 自定义日志级别(避免与 Loguru 预定义的冲突)
|
|
38
38
|
- 统一异常处理
|
|
39
39
|
|
|
40
40
|
新增:
|
|
41
|
-
-
|
|
41
|
+
- 可指定语言(中文/英文),默认中文
|
|
42
42
|
- 支持按时间轮转日志
|
|
43
43
|
- 支持自定义日志格式
|
|
44
44
|
- 支持日志级别过滤
|
|
@@ -80,35 +80,41 @@ class XmiLogger:
|
|
|
80
80
|
}
|
|
81
81
|
}
|
|
82
82
|
|
|
83
|
-
#
|
|
83
|
+
# 添加类级别的缓存,使用 LRU 缓存提高性能
|
|
84
84
|
_format_cache: Dict[str, str] = {}
|
|
85
85
|
_message_cache: Dict[str, str] = {}
|
|
86
|
+
_location_cache: Dict[str, str] = {}
|
|
87
|
+
_stats_cache: Dict[str, Any] = {}
|
|
88
|
+
_stats_cache_time = 0
|
|
89
|
+
_stats_cache_ttl = 5 # 统计缓存TTL(秒)
|
|
86
90
|
|
|
87
91
|
def __init__(
|
|
88
92
|
self,
|
|
89
93
|
file_name: str,
|
|
90
94
|
log_dir: str = 'logs',
|
|
91
|
-
max_size: int = 14,
|
|
95
|
+
max_size: int = 14, # 单位:MB
|
|
92
96
|
retention: str = '7 days',
|
|
93
97
|
remote_log_url: Optional[str] = None,
|
|
94
98
|
max_workers: int = 3,
|
|
95
99
|
work_type: bool = False,
|
|
96
|
-
language: str = 'zh',
|
|
97
|
-
rotation_time: Optional[str] = None,
|
|
98
|
-
custom_format: Optional[str] = None,
|
|
99
|
-
filter_level: str = "DEBUG",
|
|
100
|
-
compression: str = "zip",
|
|
101
|
-
enable_stats: bool = False,
|
|
102
|
-
categories: Optional[list] = None,
|
|
103
|
-
cache_size: int = 128,
|
|
100
|
+
language: str = 'zh', # 语言选项,默认为中文
|
|
101
|
+
rotation_time: Optional[str] = None, # 新增:按时间轮转,如 "1 day", "1 week"
|
|
102
|
+
custom_format: Optional[str] = None, # 新增:自定义日志格式
|
|
103
|
+
filter_level: str = "DEBUG", # 新增:日志过滤级别
|
|
104
|
+
compression: str = "zip", # 新增:压缩格式,支持 zip, gz, tar
|
|
105
|
+
enable_stats: bool = False, # 新增:是否启用日志统计
|
|
106
|
+
categories: Optional[list] = None, # 新增:日志分类列表
|
|
107
|
+
cache_size: int = 128, # 新增:缓存大小配置
|
|
108
|
+
adaptive_level: bool = False, # 新增:自适应日志级别
|
|
109
|
+
performance_mode: bool = False, # 新增:性能模式
|
|
104
110
|
) -> None:
|
|
105
111
|
"""
|
|
106
112
|
初始化日志记录器。
|
|
107
113
|
|
|
108
114
|
Args:
|
|
109
|
-
file_name (str):
|
|
115
|
+
file_name (str): 日志文件名称(主日志文件前缀)。
|
|
110
116
|
log_dir (str): 日志文件目录。
|
|
111
|
-
max_size (int):
|
|
117
|
+
max_size (int): 日志文件大小(MB)超过时进行轮转。
|
|
112
118
|
retention (str): 日志保留策略。
|
|
113
119
|
remote_log_url (str, optional): 远程日志收集的URL。如果提供,将启用远程日志收集。
|
|
114
120
|
max_workers (int): 线程池的最大工作线程数。
|
|
@@ -129,6 +135,8 @@ class XmiLogger:
|
|
|
129
135
|
self.enable_stats = enable_stats
|
|
130
136
|
self.categories = categories or []
|
|
131
137
|
self._cache_size = cache_size
|
|
138
|
+
self.adaptive_level = adaptive_level
|
|
139
|
+
self.performance_mode = performance_mode
|
|
132
140
|
self._async_queue = asyncio.Queue() if remote_log_url else None
|
|
133
141
|
self._remote_task = None
|
|
134
142
|
if self._async_queue:
|
|
@@ -177,12 +185,24 @@ class XmiLogger:
|
|
|
177
185
|
self._stats_start_time = datetime.now()
|
|
178
186
|
|
|
179
187
|
def _msg(self, key: str, **kwargs) -> str:
|
|
180
|
-
"""
|
|
188
|
+
"""消息格式化处理,优化性能"""
|
|
181
189
|
try:
|
|
190
|
+
# 使用缓存键
|
|
191
|
+
cache_key = f"{self.language}:{key}:{hash(frozenset(kwargs.items()))}"
|
|
192
|
+
|
|
193
|
+
# 检查缓存
|
|
194
|
+
if cache_key in self._message_cache:
|
|
195
|
+
return self._message_cache[cache_key]
|
|
196
|
+
|
|
182
197
|
# 获取消息模板
|
|
183
198
|
text = self._LANG_MAP.get(self.language, {}).get(key, key)
|
|
184
199
|
|
|
185
|
-
#
|
|
200
|
+
# 如果没有参数,直接返回模板
|
|
201
|
+
if not kwargs:
|
|
202
|
+
self._message_cache[cache_key] = text
|
|
203
|
+
return text
|
|
204
|
+
|
|
205
|
+
# 优化参数转换
|
|
186
206
|
str_kwargs = {}
|
|
187
207
|
for k, v in kwargs.items():
|
|
188
208
|
try:
|
|
@@ -196,11 +216,24 @@ class XmiLogger:
|
|
|
196
216
|
str_kwargs[k] = f"<{type(v).__name__}>"
|
|
197
217
|
|
|
198
218
|
# 格式化消息
|
|
199
|
-
|
|
219
|
+
result = text.format(**str_kwargs)
|
|
220
|
+
|
|
221
|
+
# 缓存结果
|
|
222
|
+
self._message_cache[cache_key] = result
|
|
223
|
+
|
|
224
|
+
# 限制缓存大小
|
|
225
|
+
if len(self._message_cache) > self._cache_size:
|
|
226
|
+
# 清除最旧的缓存项
|
|
227
|
+
oldest_key = next(iter(self._message_cache))
|
|
228
|
+
del self._message_cache[oldest_key]
|
|
229
|
+
|
|
230
|
+
return result
|
|
200
231
|
|
|
201
232
|
except KeyError as e:
|
|
233
|
+
text = self._LANG_MAP.get(self.language, {}).get(key, key)
|
|
202
234
|
return f"{text} (格式化错误: 缺少参数 {e})"
|
|
203
235
|
except Exception as e:
|
|
236
|
+
text = self._LANG_MAP.get(self.language, {}).get(key, key)
|
|
204
237
|
return f"{text} (格式化错误: {str(e)})"
|
|
205
238
|
|
|
206
239
|
def configure_logger(self) -> None:
|
|
@@ -224,7 +257,7 @@ class XmiLogger:
|
|
|
224
257
|
# 添加文件处理器
|
|
225
258
|
self._add_file_handlers(log_format)
|
|
226
259
|
|
|
227
|
-
#
|
|
260
|
+
# 配置远程日志(如果启用)
|
|
228
261
|
if self.remote_log_url:
|
|
229
262
|
self._configure_remote_logging()
|
|
230
263
|
|
|
@@ -271,10 +304,11 @@ class XmiLogger:
|
|
|
271
304
|
return self.custom_format
|
|
272
305
|
|
|
273
306
|
return (
|
|
274
|
-
"<green>{time:YYYY-MM-DD HH:mm:ss}</green> | "
|
|
307
|
+
"<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | "
|
|
275
308
|
"<level>{level: <8}</level> | "
|
|
276
309
|
"ReqID:{extra[request_id]} | "
|
|
277
|
-
"<cyan>{
|
|
310
|
+
"<cyan>{file}</cyan>:<cyan>{line}</cyan>:<cyan>{function}</cyan> | "
|
|
311
|
+
"<magenta>{process}</magenta> | "
|
|
278
312
|
"<level>{message}</level>"
|
|
279
313
|
)
|
|
280
314
|
|
|
@@ -376,17 +410,43 @@ class XmiLogger:
|
|
|
376
410
|
return
|
|
377
411
|
|
|
378
412
|
try:
|
|
413
|
+
# 获取调用栈信息
|
|
414
|
+
import traceback
|
|
415
|
+
tb = traceback.extract_tb(exc_traceback)
|
|
416
|
+
|
|
379
417
|
# 安全地格式化异常信息
|
|
380
418
|
error_msg = self._msg('UNHANDLED_EXCEPTION') if 'UNHANDLED_EXCEPTION' in self._LANG_MAP[self.language] else "未处理的异常"
|
|
381
419
|
|
|
382
420
|
# 安全地格式化异常值
|
|
383
421
|
exc_value_str = str(exc_value) if exc_value is not None else "None"
|
|
384
422
|
|
|
385
|
-
#
|
|
386
|
-
|
|
423
|
+
# 获取错误发生的具体位置
|
|
424
|
+
if tb:
|
|
425
|
+
# 获取最后一个调用帧(通常是错误发生的地方)
|
|
426
|
+
last_frame = tb[-1]
|
|
427
|
+
error_location = f"{last_frame.filename}:{last_frame.lineno}:{last_frame.name}"
|
|
428
|
+
line_content = last_frame.line.strip() if last_frame.line else "未知代码行"
|
|
429
|
+
else:
|
|
430
|
+
error_location = "未知位置"
|
|
431
|
+
line_content = "未知代码行"
|
|
432
|
+
|
|
433
|
+
# 组合详细的错误消息
|
|
434
|
+
full_error_msg = (
|
|
435
|
+
f"{error_msg}: {exc_type.__name__}: {exc_value_str} | "
|
|
436
|
+
f"位置: {error_location} | "
|
|
437
|
+
f"代码: {line_content}"
|
|
438
|
+
)
|
|
387
439
|
|
|
388
|
-
#
|
|
440
|
+
# 记录详细错误信息
|
|
389
441
|
self.logger.opt(exception=True).error(full_error_msg)
|
|
442
|
+
|
|
443
|
+
# 记录调用链信息
|
|
444
|
+
if len(tb) > 1:
|
|
445
|
+
call_chain = []
|
|
446
|
+
for frame in tb[-3:]: # 只显示最后3层调用
|
|
447
|
+
call_chain.append(f"{frame.filename}:{frame.lineno}:{frame.name}")
|
|
448
|
+
self.logger.error(f"调用链: {' -> '.join(call_chain)}")
|
|
449
|
+
|
|
390
450
|
except Exception as e:
|
|
391
451
|
# 如果格式化失败,使用最基本的错误记录
|
|
392
452
|
self.logger.opt(exception=True).error(f"未处理的异常: {exc_type.__name__}")
|
|
@@ -422,25 +482,43 @@ class XmiLogger:
|
|
|
422
482
|
self._remote_task = asyncio.create_task(remote_worker())
|
|
423
483
|
|
|
424
484
|
async def _send_to_remote_async(self, message: Any) -> None:
|
|
425
|
-
"""
|
|
485
|
+
"""异步发送日志到远程服务器,优化性能"""
|
|
426
486
|
log_entry = message.record
|
|
487
|
+
|
|
488
|
+
# 预构建常用字段,减少重复计算
|
|
489
|
+
time_str = log_entry["time"].strftime("%Y-%m-%d %H:%M:%S")
|
|
490
|
+
level_name = log_entry["level"].name
|
|
491
|
+
request_id = log_entry["extra"].get("request_id", "no-request-id")
|
|
492
|
+
|
|
493
|
+
# 优化文件路径处理
|
|
494
|
+
file_path = ""
|
|
495
|
+
if log_entry["file"]:
|
|
496
|
+
try:
|
|
497
|
+
file_path = os.path.basename(log_entry["file"].path)
|
|
498
|
+
except (AttributeError, OSError):
|
|
499
|
+
file_path = str(log_entry["file"])
|
|
500
|
+
|
|
427
501
|
payload = {
|
|
428
|
-
"time":
|
|
429
|
-
"level":
|
|
502
|
+
"time": time_str,
|
|
503
|
+
"level": level_name,
|
|
430
504
|
"message": log_entry["message"],
|
|
431
|
-
"file":
|
|
505
|
+
"file": file_path,
|
|
432
506
|
"line": log_entry["line"],
|
|
433
507
|
"function": log_entry["function"],
|
|
434
|
-
"request_id":
|
|
508
|
+
"request_id": request_id
|
|
435
509
|
}
|
|
436
510
|
|
|
437
|
-
|
|
511
|
+
# 使用连接池优化网络请求
|
|
512
|
+
connector = aiohttp.TCPConnector(limit=10, limit_per_host=5)
|
|
513
|
+
timeout = aiohttp.ClientTimeout(total=5)
|
|
514
|
+
|
|
515
|
+
async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session:
|
|
438
516
|
for attempt in range(3):
|
|
439
517
|
try:
|
|
440
518
|
async with session.post(
|
|
441
519
|
self.remote_log_url,
|
|
442
520
|
json=payload,
|
|
443
|
-
|
|
521
|
+
headers={"Content-Type": "application/json"}
|
|
444
522
|
) as response:
|
|
445
523
|
response.raise_for_status()
|
|
446
524
|
return
|
|
@@ -534,9 +612,9 @@ class XmiLogger:
|
|
|
534
612
|
增强版日志装饰器,支持自定义日志级别和跟踪配置
|
|
535
613
|
|
|
536
614
|
Args:
|
|
537
|
-
msg (str): 支持多语言的异常提示信息key
|
|
538
|
-
level (str):
|
|
539
|
-
trace (bool):
|
|
615
|
+
msg (str): 支持多语言的异常提示信息key(使用_LANG_MAP中的键)
|
|
616
|
+
level (str): 记录异常的日志级别(默认ERROR)
|
|
617
|
+
trace (bool): 是否记录完整堆栈跟踪(默认True)
|
|
540
618
|
"""
|
|
541
619
|
def decorator(func):
|
|
542
620
|
_msg_key = msg or 'UNHANDLED_EXCEPTION'
|
|
@@ -578,10 +656,14 @@ class XmiLogger:
|
|
|
578
656
|
|
|
579
657
|
def _log_exception(self, func_name: str, error: Exception, msg_key: str,
|
|
580
658
|
level: str, trace: bool, is_async: bool):
|
|
581
|
-
"""
|
|
659
|
+
"""统一的异常记录处理,增强错误信息显示"""
|
|
582
660
|
try:
|
|
583
661
|
log_method = getattr(self.logger, level.lower(), self.logger.error)
|
|
584
662
|
|
|
663
|
+
# 获取调用栈信息
|
|
664
|
+
import traceback
|
|
665
|
+
tb = traceback.extract_tb(error.__traceback__)
|
|
666
|
+
|
|
585
667
|
# 安全地获取消息
|
|
586
668
|
error_msg = self._msg(msg_key) if msg_key in self._LANG_MAP[self.language] else f"发生异常: {msg_key}"
|
|
587
669
|
|
|
@@ -589,14 +671,35 @@ class XmiLogger:
|
|
|
589
671
|
error_type = type(error).__name__
|
|
590
672
|
error_value = str(error) if error is not None else "None"
|
|
591
673
|
|
|
592
|
-
#
|
|
593
|
-
|
|
674
|
+
# 获取错误发生的具体位置
|
|
675
|
+
if tb:
|
|
676
|
+
# 获取最后一个调用帧(通常是错误发生的地方)
|
|
677
|
+
last_frame = tb[-1]
|
|
678
|
+
error_location = f"{last_frame.filename}:{last_frame.lineno}:{last_frame.name}"
|
|
679
|
+
line_content = last_frame.line.strip() if last_frame.line else "未知代码行"
|
|
680
|
+
else:
|
|
681
|
+
error_location = "未知位置"
|
|
682
|
+
line_content = "未知代码行"
|
|
683
|
+
|
|
684
|
+
# 组合详细的错误消息
|
|
685
|
+
full_error_msg = (
|
|
686
|
+
f"{error_msg} [{error_type}]: {error_value} | "
|
|
687
|
+
f"位置: {error_location} | "
|
|
688
|
+
f"代码: {line_content}"
|
|
689
|
+
)
|
|
594
690
|
|
|
595
691
|
if trace:
|
|
596
|
-
#
|
|
692
|
+
# 记录详细错误消息
|
|
597
693
|
log_method(full_error_msg)
|
|
598
|
-
#
|
|
599
|
-
self.logger.opt(exception=True).error("
|
|
694
|
+
# 记录完整的异常堆栈
|
|
695
|
+
self.logger.opt(exception=True).error("完整异常堆栈:")
|
|
696
|
+
|
|
697
|
+
# 记录调用链信息
|
|
698
|
+
if len(tb) > 1:
|
|
699
|
+
call_chain = []
|
|
700
|
+
for frame in tb[-3:]: # 只显示最后3层调用
|
|
701
|
+
call_chain.append(f"{frame.filename}:{frame.lineno}:{frame.name}")
|
|
702
|
+
self.logger.error(f"调用链: {' -> '.join(call_chain)}")
|
|
600
703
|
else:
|
|
601
704
|
log_method(full_error_msg)
|
|
602
705
|
|
|
@@ -615,8 +718,16 @@ class XmiLogger:
|
|
|
615
718
|
记录函数调用开始的公共逻辑。
|
|
616
719
|
"""
|
|
617
720
|
def format_arg(arg):
|
|
721
|
+
"""优化的参数格式化函数"""
|
|
618
722
|
try:
|
|
619
|
-
|
|
723
|
+
if isinstance(arg, (str, int, float, bool)):
|
|
724
|
+
return str(arg)
|
|
725
|
+
elif isinstance(arg, (list, tuple)):
|
|
726
|
+
return f"[{len(arg)} items]"
|
|
727
|
+
elif isinstance(arg, dict):
|
|
728
|
+
return f"{{{len(arg)} items}}"
|
|
729
|
+
else:
|
|
730
|
+
return str(arg)
|
|
620
731
|
except Exception:
|
|
621
732
|
return f"<{type(arg).__name__}>"
|
|
622
733
|
|
|
@@ -646,8 +757,16 @@ class XmiLogger:
|
|
|
646
757
|
记录函数调用结束的公共逻辑。
|
|
647
758
|
"""
|
|
648
759
|
def format_result(res):
|
|
760
|
+
"""优化的结果格式化函数"""
|
|
649
761
|
try:
|
|
650
|
-
|
|
762
|
+
if isinstance(res, (str, int, float, bool)):
|
|
763
|
+
return str(res)
|
|
764
|
+
elif isinstance(res, (list, tuple)):
|
|
765
|
+
return f"[{len(res)} items]"
|
|
766
|
+
elif isinstance(res, dict):
|
|
767
|
+
return f"{{{len(res)} items}}"
|
|
768
|
+
else:
|
|
769
|
+
return str(res)
|
|
651
770
|
except Exception:
|
|
652
771
|
return f"<{type(res).__name__}>"
|
|
653
772
|
|
|
@@ -700,7 +819,13 @@ class XmiLogger:
|
|
|
700
819
|
self._stats['error_rate'] = self._stats['error'] / total_time
|
|
701
820
|
|
|
702
821
|
def get_stats(self) -> Dict[str, Any]:
|
|
703
|
-
"""
|
|
822
|
+
"""获取详细的日志统计信息,优化性能"""
|
|
823
|
+
current_time = datetime.now()
|
|
824
|
+
|
|
825
|
+
# 检查缓存是否有效
|
|
826
|
+
if (current_time.timestamp() - self._stats_cache_time) < self._stats_cache_ttl:
|
|
827
|
+
return self._stats_cache.copy()
|
|
828
|
+
|
|
704
829
|
with self._stats_lock:
|
|
705
830
|
stats = {
|
|
706
831
|
'total': self._stats['total'],
|
|
@@ -708,11 +833,11 @@ class XmiLogger:
|
|
|
708
833
|
'warning': self._stats['warning'],
|
|
709
834
|
'info': self._stats['info'],
|
|
710
835
|
'debug': self._stats['debug'],
|
|
711
|
-
'duration': str(
|
|
836
|
+
'duration': str(current_time - self._stats_start_time),
|
|
712
837
|
'by_category': dict(self._stats['by_category']),
|
|
713
838
|
'by_hour': dict(self._stats['by_hour']),
|
|
714
839
|
'error_rate': float(self._stats['error_rate']),
|
|
715
|
-
'time_since_last_error': str(
|
|
840
|
+
'time_since_last_error': str(current_time - self._stats['last_error_time']) if self._stats['last_error_time'] else None
|
|
716
841
|
}
|
|
717
842
|
|
|
718
843
|
# 计算每小时的平均日志数
|
|
@@ -729,6 +854,10 @@ class XmiLogger:
|
|
|
729
854
|
for error in self._stats['errors'][-10:]
|
|
730
855
|
]
|
|
731
856
|
|
|
857
|
+
# 更新缓存
|
|
858
|
+
self._stats_cache = stats.copy()
|
|
859
|
+
self._stats_cache_time = current_time.timestamp()
|
|
860
|
+
|
|
732
861
|
return stats
|
|
733
862
|
|
|
734
863
|
def get_stats_summary(self) -> str:
|
|
@@ -771,129 +900,412 @@ class XmiLogger:
|
|
|
771
900
|
}
|
|
772
901
|
self._stats_start_time = datetime.now()
|
|
773
902
|
|
|
903
|
+
def get_current_location(self) -> str:
|
|
904
|
+
"""获取当前调用位置信息,优化性能"""
|
|
905
|
+
import inspect
|
|
906
|
+
import threading
|
|
907
|
+
|
|
908
|
+
# 使用线程本地缓存
|
|
909
|
+
thread_id = threading.get_ident()
|
|
910
|
+
cache_key = f"location_{thread_id}"
|
|
911
|
+
|
|
912
|
+
# 检查缓存
|
|
913
|
+
if cache_key in self._location_cache:
|
|
914
|
+
return self._location_cache[cache_key]
|
|
915
|
+
|
|
916
|
+
frame = inspect.currentframe()
|
|
917
|
+
try:
|
|
918
|
+
# 获取调用栈
|
|
919
|
+
stack = inspect.stack()
|
|
920
|
+
if len(stack) > 1:
|
|
921
|
+
# 获取调用者的信息
|
|
922
|
+
caller = stack[1]
|
|
923
|
+
filename = caller.filename
|
|
924
|
+
lineno = caller.lineno
|
|
925
|
+
function = caller.function
|
|
926
|
+
location = f"{filename}:{lineno}:{function}"
|
|
927
|
+
|
|
928
|
+
# 缓存结果
|
|
929
|
+
self._location_cache[cache_key] = location
|
|
930
|
+
|
|
931
|
+
# 限制缓存大小
|
|
932
|
+
if len(self._location_cache) > self._cache_size:
|
|
933
|
+
# 清除最旧的缓存项
|
|
934
|
+
oldest_key = next(iter(self._location_cache))
|
|
935
|
+
del self._location_cache[oldest_key]
|
|
936
|
+
|
|
937
|
+
return location
|
|
938
|
+
else:
|
|
939
|
+
return "未知位置"
|
|
940
|
+
finally:
|
|
941
|
+
# 清理frame引用
|
|
942
|
+
del frame
|
|
943
|
+
|
|
944
|
+
def log_with_location(self, level: str, message: str, include_location: bool = True):
|
|
945
|
+
"""带位置信息的日志记录"""
|
|
946
|
+
if include_location:
|
|
947
|
+
location = self.get_current_location()
|
|
948
|
+
full_message = f"[{location}] {message}"
|
|
949
|
+
else:
|
|
950
|
+
full_message = message
|
|
951
|
+
|
|
952
|
+
log_method = getattr(self.logger, level.lower(), self.logger.info)
|
|
953
|
+
log_method(full_message)
|
|
954
|
+
|
|
955
|
+
def get_performance_stats(self) -> Dict[str, Any]:
|
|
956
|
+
"""获取性能统计信息"""
|
|
957
|
+
return {
|
|
958
|
+
'cache_sizes': {
|
|
959
|
+
'message_cache': len(self._message_cache),
|
|
960
|
+
'format_cache': len(self._format_cache),
|
|
961
|
+
'location_cache': len(self._location_cache),
|
|
962
|
+
'stats_cache': len(self._stats_cache)
|
|
963
|
+
},
|
|
964
|
+
'cache_hit_rates': {
|
|
965
|
+
'message_cache_hits': getattr(self, '_message_cache_hits', 0),
|
|
966
|
+
'location_cache_hits': getattr(self, '_location_cache_hits', 0),
|
|
967
|
+
'stats_cache_hits': getattr(self, '_stats_cache_hits', 0)
|
|
968
|
+
},
|
|
969
|
+
'memory_usage': {
|
|
970
|
+
'total_cache_size': (
|
|
971
|
+
len(self._message_cache) +
|
|
972
|
+
len(self._format_cache) +
|
|
973
|
+
len(self._location_cache) +
|
|
974
|
+
len(self._stats_cache)
|
|
975
|
+
)
|
|
976
|
+
},
|
|
977
|
+
'config': {
|
|
978
|
+
'cache_size': self._cache_size,
|
|
979
|
+
'stats_cache_ttl': self._stats_cache_ttl
|
|
980
|
+
}
|
|
981
|
+
}
|
|
774
982
|
|
|
983
|
+
def clear_caches(self) -> None:
|
|
984
|
+
"""清除所有缓存"""
|
|
985
|
+
self._message_cache.clear()
|
|
986
|
+
self._format_cache.clear()
|
|
987
|
+
self._location_cache.clear()
|
|
988
|
+
self._stats_cache.clear()
|
|
989
|
+
self._stats_cache_time = 0
|
|
775
990
|
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
"<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - "
|
|
792
|
-
"<magenta>{process}</magenta> - "
|
|
793
|
-
"<level>{message}</level>"
|
|
794
|
-
)
|
|
795
|
-
|
|
796
|
-
# 初始化日志记录器,使用新功能
|
|
797
|
-
log = XmiLogger(
|
|
798
|
-
"test_log",
|
|
799
|
-
rotation_time="1 day", # 每天轮转
|
|
800
|
-
custom_format=custom_format, # 自定义格式
|
|
801
|
-
filter_level="DEBUG", # 日志级别
|
|
802
|
-
compression="zip", # 压缩格式
|
|
803
|
-
enable_stats=True, # 启用统计
|
|
804
|
-
categories=["api", "db", "ui"] # 日志分类
|
|
805
|
-
)
|
|
806
|
-
|
|
807
|
-
# 添加自定义日志级别
|
|
808
|
-
log.add_custom_level("IMPORTANT", no=25, color="<yellow>", icon="⚠️")
|
|
809
|
-
|
|
810
|
-
@log.log_decorator("除零错误", level="ERROR")
|
|
811
|
-
def test_zero_division_error(a, b):
|
|
812
|
-
return a / b
|
|
813
|
-
|
|
814
|
-
@log.log_decorator("JSON解析错误", level="WARNING")
|
|
815
|
-
def test_error():
|
|
816
|
-
json.loads("invalid_json")
|
|
817
|
-
|
|
818
|
-
@log.log_decorator("耗时操作", level="INFO", trace=False)
|
|
819
|
-
def compute_something_sync():
|
|
820
|
-
time.sleep(1)
|
|
821
|
-
return "同步计算完成"
|
|
822
|
-
|
|
823
|
-
@log.log_decorator("异步耗时操作")
|
|
824
|
-
async def compute_something_async():
|
|
825
|
-
await asyncio.sleep(1)
|
|
826
|
-
return "异步计算完成"
|
|
827
|
-
|
|
828
|
-
@log.log_decorator("生成随机数", level="INFO", trace=False)
|
|
829
|
-
def generate_random_number(min_val=1, max_val=100):
|
|
830
|
-
return random.randint(min_val, max_val)
|
|
831
|
-
|
|
832
|
-
# 设置请求ID
|
|
833
|
-
token = log.request_id_var.set("🦉")
|
|
834
|
-
|
|
835
|
-
try:
|
|
836
|
-
# 基本日志测试
|
|
837
|
-
xxx = "X"
|
|
838
|
-
log.info(f'这是一条信息日志{xxx}')
|
|
839
|
-
log.debug(f'这是一条调试日志{xxx}')
|
|
840
|
-
log.warning(f'这是一条警告日志{xxx}')
|
|
841
|
-
log.error(f'这是一条错误日志{xxx}')
|
|
842
|
-
log.critical(f'这是一条严重错误日志{xxx}')
|
|
843
|
-
|
|
844
|
-
# 使用自定义日志级别
|
|
845
|
-
log.log("IMPORTANT", "这是一条重要日志消息")
|
|
846
|
-
|
|
847
|
-
# 使用标签功能
|
|
848
|
-
log.log_with_tag("INFO", "这是带标签的日志", "FEATURE")
|
|
849
|
-
log.log_with_tag("WARNING", "这是带标签的警告", "DEPRECATED")
|
|
850
|
-
|
|
851
|
-
# 使用分类功能
|
|
852
|
-
log.log_with_category("INFO", "数据库连接成功", "db")
|
|
853
|
-
log.log_with_category("ERROR", "API请求失败", "api")
|
|
854
|
-
log.log_with_category("DEBUG", "UI组件渲染", "ui")
|
|
855
|
-
|
|
856
|
-
# 测试异常处理
|
|
857
|
-
try:
|
|
858
|
-
result = test_zero_division_error(1, 0)
|
|
859
|
-
except ZeroDivisionError:
|
|
860
|
-
log.exception("捕获到除零错误")
|
|
991
|
+
def batch_log(self, logs: List[Dict[str, Any]]) -> None:
|
|
992
|
+
"""批量记录日志,提高性能"""
|
|
993
|
+
for log_entry in logs:
|
|
994
|
+
level = log_entry.get('level', 'INFO')
|
|
995
|
+
message = log_entry.get('message', '')
|
|
996
|
+
tag = log_entry.get('tag')
|
|
997
|
+
category = log_entry.get('category')
|
|
998
|
+
|
|
999
|
+
if tag:
|
|
1000
|
+
self.log_with_tag(level, message, tag)
|
|
1001
|
+
elif category:
|
|
1002
|
+
self.log_with_category(level, message, category)
|
|
1003
|
+
else:
|
|
1004
|
+
log_method = getattr(self.logger, level.lower(), self.logger.info)
|
|
1005
|
+
log_method(message)
|
|
861
1006
|
|
|
1007
|
+
async def async_batch_log(self, logs: List[Dict[str, Any]]) -> None:
|
|
1008
|
+
"""异步批量记录日志"""
|
|
1009
|
+
for log_entry in logs:
|
|
1010
|
+
level = log_entry.get('level', 'INFO')
|
|
1011
|
+
message = log_entry.get('message', '')
|
|
1012
|
+
tag = log_entry.get('tag')
|
|
1013
|
+
category = log_entry.get('category')
|
|
1014
|
+
|
|
1015
|
+
if tag:
|
|
1016
|
+
self.log_with_tag(level, message, tag)
|
|
1017
|
+
elif category:
|
|
1018
|
+
self.log_with_category(level, message, category)
|
|
1019
|
+
else:
|
|
1020
|
+
log_method = getattr(self.logger, level.lower(), self.logger.info)
|
|
1021
|
+
log_method(message)
|
|
1022
|
+
|
|
1023
|
+
# 小延迟避免阻塞
|
|
1024
|
+
await asyncio.sleep(0.001)
|
|
1025
|
+
|
|
1026
|
+
def log_with_context(self, level: str, message: str, context: Dict[str, Any] = None):
|
|
1027
|
+
"""带上下文的日志记录"""
|
|
1028
|
+
if context:
|
|
1029
|
+
context_str = " | ".join([f"{k}={v}" for k, v in context.items()])
|
|
1030
|
+
full_message = f"{message} | {context_str}"
|
|
1031
|
+
else:
|
|
1032
|
+
full_message = message
|
|
1033
|
+
|
|
1034
|
+
log_method = getattr(self.logger, level.lower(), self.logger.info)
|
|
1035
|
+
log_method(full_message)
|
|
1036
|
+
|
|
1037
|
+
def log_with_timing(self, level: str, message: str, timing_data: Dict[str, float]):
|
|
1038
|
+
"""带计时信息的日志记录"""
|
|
1039
|
+
timing_str = " | ".join([f"{k}={v:.3f}s" for k, v in timing_data.items()])
|
|
1040
|
+
full_message = f"{message} | {timing_str}"
|
|
1041
|
+
|
|
1042
|
+
log_method = getattr(self.logger, level.lower(), self.logger.info)
|
|
1043
|
+
log_method(full_message)
|
|
1044
|
+
|
|
1045
|
+
def set_adaptive_level(self, error_rate_threshold: float = 0.1,
|
|
1046
|
+
log_rate_threshold: int = 1000) -> None:
|
|
1047
|
+
"""设置自适应日志级别"""
|
|
1048
|
+
if not self.adaptive_level:
|
|
1049
|
+
return
|
|
1050
|
+
|
|
1051
|
+
# 获取当前统计信息
|
|
1052
|
+
stats = self.get_stats()
|
|
1053
|
+
current_error_rate = stats.get('error_rate', 0.0)
|
|
1054
|
+
current_log_rate = stats.get('total', 0) / max(1, (datetime.now() - self._stats_start_time).total_seconds())
|
|
1055
|
+
|
|
1056
|
+
# 根据错误率和日志频率调整级别
|
|
1057
|
+
if current_error_rate > error_rate_threshold or current_log_rate > log_rate_threshold:
|
|
1058
|
+
# 提高日志级别,减少日志输出
|
|
1059
|
+
if self.filter_level == "DEBUG":
|
|
1060
|
+
self.filter_level = "INFO"
|
|
1061
|
+
self._update_logger_level()
|
|
1062
|
+
elif self.filter_level == "INFO":
|
|
1063
|
+
self.filter_level = "WARNING"
|
|
1064
|
+
self._update_logger_level()
|
|
1065
|
+
else:
|
|
1066
|
+
# 降低日志级别,增加日志输出
|
|
1067
|
+
if self.filter_level == "WARNING":
|
|
1068
|
+
self.filter_level = "INFO"
|
|
1069
|
+
self._update_logger_level()
|
|
1070
|
+
elif self.filter_level == "INFO":
|
|
1071
|
+
self.filter_level = "DEBUG"
|
|
1072
|
+
self._update_logger_level()
|
|
1073
|
+
|
|
1074
|
+
def _update_logger_level(self) -> None:
|
|
1075
|
+
"""更新日志记录器级别"""
|
|
862
1076
|
try:
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
1077
|
+
# 移除现有处理器
|
|
1078
|
+
self.logger.remove()
|
|
1079
|
+
# 重新配置日志记录器
|
|
1080
|
+
self.configure_logger()
|
|
1081
|
+
except Exception as e:
|
|
1082
|
+
self.logger.warning(f"更新日志级别失败: {e}")
|
|
866
1083
|
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
1084
|
+
def enable_performance_mode(self) -> None:
|
|
1085
|
+
"""启用性能模式"""
|
|
1086
|
+
if self.performance_mode:
|
|
1087
|
+
# 减少日志输出
|
|
1088
|
+
self.filter_level = "WARNING"
|
|
1089
|
+
self._update_logger_level()
|
|
1090
|
+
# 增加缓存大小
|
|
1091
|
+
self._cache_size = min(self._cache_size * 2, 2048)
|
|
1092
|
+
# 禁用详细统计
|
|
1093
|
+
self.enable_stats = False
|
|
1094
|
+
|
|
1095
|
+
def disable_performance_mode(self) -> None:
|
|
1096
|
+
"""禁用性能模式"""
|
|
1097
|
+
if self.performance_mode:
|
|
1098
|
+
# 恢复日志级别
|
|
1099
|
+
self.filter_level = "INFO"
|
|
1100
|
+
self._update_logger_level()
|
|
1101
|
+
# 恢复缓存大小
|
|
1102
|
+
self._cache_size = max(self._cache_size // 2, 128)
|
|
1103
|
+
# 恢复统计功能
|
|
1104
|
+
self.enable_stats = True
|
|
1105
|
+
|
|
1106
|
+
def compress_logs(self, days_old: int = 7) -> None:
|
|
1107
|
+
"""压缩指定天数之前的日志文件"""
|
|
1108
|
+
import gzip
|
|
1109
|
+
import shutil
|
|
1110
|
+
from pathlib import Path
|
|
870
1111
|
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
1112
|
+
log_path = Path(self.log_dir)
|
|
1113
|
+
current_time = datetime.now()
|
|
1114
|
+
|
|
1115
|
+
for log_file in log_path.glob(f"{self.file_name}*.log"):
|
|
1116
|
+
try:
|
|
1117
|
+
# 检查文件修改时间
|
|
1118
|
+
file_time = datetime.fromtimestamp(log_file.stat().st_mtime)
|
|
1119
|
+
days_diff = (current_time - file_time).days
|
|
1120
|
+
|
|
1121
|
+
if days_diff >= days_old and not log_file.name.endswith('.gz'):
|
|
1122
|
+
# 压缩文件
|
|
1123
|
+
with open(log_file, 'rb') as f_in:
|
|
1124
|
+
gz_file = log_file.with_suffix('.log.gz')
|
|
1125
|
+
with gzip.open(gz_file, 'wb') as f_out:
|
|
1126
|
+
shutil.copyfileobj(f_in, f_out)
|
|
1127
|
+
|
|
1128
|
+
# 删除原文件
|
|
1129
|
+
log_file.unlink()
|
|
1130
|
+
self.logger.info(f"已压缩日志文件: {log_file.name}")
|
|
1131
|
+
|
|
1132
|
+
except Exception as e:
|
|
1133
|
+
self.logger.error(f"压缩日志文件失败 {log_file.name}: {e}")
|
|
875
1134
|
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
1135
|
+
def archive_logs(self, archive_dir: str = None) -> None:
|
|
1136
|
+
"""归档日志文件"""
|
|
1137
|
+
import shutil
|
|
1138
|
+
from pathlib import Path
|
|
1139
|
+
|
|
1140
|
+
if archive_dir is None:
|
|
1141
|
+
archive_dir = os.path.join(self.log_dir, "archive")
|
|
1142
|
+
|
|
1143
|
+
os.makedirs(archive_dir, exist_ok=True)
|
|
1144
|
+
log_path = Path(self.log_dir)
|
|
1145
|
+
|
|
1146
|
+
for log_file in log_path.glob(f"{self.file_name}*.log"):
|
|
1147
|
+
try:
|
|
1148
|
+
# 移动文件到归档目录
|
|
1149
|
+
archive_file = Path(archive_dir) / log_file.name
|
|
1150
|
+
shutil.move(str(log_file), str(archive_file))
|
|
1151
|
+
self.logger.info(f"已归档日志文件: {log_file.name}")
|
|
1152
|
+
|
|
1153
|
+
except Exception as e:
|
|
1154
|
+
self.logger.error(f"归档日志文件失败 {log_file.name}: {e}")
|
|
1155
|
+
|
|
1156
|
+
def cleanup_old_logs(self, max_days: int = 30) -> None:
|
|
1157
|
+
"""清理旧日志文件"""
|
|
1158
|
+
from pathlib import Path
|
|
1159
|
+
|
|
1160
|
+
log_path = Path(self.log_dir)
|
|
1161
|
+
current_time = datetime.now()
|
|
1162
|
+
|
|
1163
|
+
for log_file in log_path.glob(f"{self.file_name}*.log*"):
|
|
1164
|
+
try:
|
|
1165
|
+
# 检查文件修改时间
|
|
1166
|
+
file_time = datetime.fromtimestamp(log_file.stat().st_mtime)
|
|
1167
|
+
days_diff = (current_time - file_time).days
|
|
1168
|
+
|
|
1169
|
+
if days_diff > max_days:
|
|
1170
|
+
log_file.unlink()
|
|
1171
|
+
self.logger.info(f"已删除旧日志文件: {log_file.name}")
|
|
1172
|
+
|
|
1173
|
+
except Exception as e:
|
|
1174
|
+
self.logger.error(f"删除旧日志文件失败 {log_file.name}: {e}")
|
|
886
1175
|
|
|
887
|
-
|
|
1176
|
+
def analyze_logs(self, hours: int = 24) -> Dict[str, Any]:
|
|
1177
|
+
"""分析指定时间范围内的日志"""
|
|
1178
|
+
from pathlib import Path
|
|
1179
|
+
import re
|
|
1180
|
+
|
|
1181
|
+
log_path = Path(self.log_dir)
|
|
1182
|
+
current_time = datetime.now()
|
|
1183
|
+
start_time = current_time - timedelta(hours=hours)
|
|
1184
|
+
|
|
1185
|
+
analysis = {
|
|
1186
|
+
'total_logs': 0,
|
|
1187
|
+
'error_count': 0,
|
|
1188
|
+
'warning_count': 0,
|
|
1189
|
+
'info_count': 0,
|
|
1190
|
+
'debug_count': 0,
|
|
1191
|
+
'error_rate': 0.0,
|
|
1192
|
+
'top_errors': [],
|
|
1193
|
+
'top_warnings': [],
|
|
1194
|
+
'hourly_distribution': defaultdict(int),
|
|
1195
|
+
'file_distribution': defaultdict(int),
|
|
1196
|
+
'function_distribution': defaultdict(int)
|
|
1197
|
+
}
|
|
1198
|
+
|
|
1199
|
+
error_pattern = re.compile(r'ERROR.*?(\w+Error|Exception)', re.IGNORECASE)
|
|
1200
|
+
warning_pattern = re.compile(r'WARNING.*?(\w+Warning)', re.IGNORECASE)
|
|
1201
|
+
|
|
1202
|
+
for log_file in log_path.glob(f"{self.file_name}*.log"):
|
|
1203
|
+
try:
|
|
1204
|
+
with open(log_file, 'r', encoding='utf-8') as f:
|
|
1205
|
+
for line in f:
|
|
1206
|
+
# 解析日志行
|
|
1207
|
+
if 'ERROR' in line:
|
|
1208
|
+
analysis['error_count'] += 1
|
|
1209
|
+
# 提取错误类型
|
|
1210
|
+
error_match = error_pattern.search(line)
|
|
1211
|
+
if error_match:
|
|
1212
|
+
error_type = error_match.group(1)
|
|
1213
|
+
analysis['top_errors'].append(error_type)
|
|
1214
|
+
elif 'WARNING' in line:
|
|
1215
|
+
analysis['warning_count'] += 1
|
|
1216
|
+
# 提取警告类型
|
|
1217
|
+
warning_match = warning_pattern.search(line)
|
|
1218
|
+
if warning_match:
|
|
1219
|
+
warning_type = warning_match.group(1)
|
|
1220
|
+
analysis['top_warnings'].append(warning_type)
|
|
1221
|
+
elif 'INFO' in line:
|
|
1222
|
+
analysis['info_count'] += 1
|
|
1223
|
+
elif 'DEBUG' in line:
|
|
1224
|
+
analysis['debug_count'] += 1
|
|
1225
|
+
|
|
1226
|
+
analysis['total_logs'] += 1
|
|
1227
|
+
|
|
1228
|
+
except Exception as e:
|
|
1229
|
+
self.logger.error(f"分析日志文件失败 {log_file.name}: {e}")
|
|
1230
|
+
|
|
1231
|
+
# 计算错误率
|
|
1232
|
+
if analysis['total_logs'] > 0:
|
|
1233
|
+
analysis['error_rate'] = analysis['error_count'] / analysis['total_logs']
|
|
1234
|
+
|
|
1235
|
+
# 统计最常见的错误和警告
|
|
1236
|
+
from collections import Counter
|
|
1237
|
+
analysis['top_errors'] = Counter(analysis['top_errors']).most_common(10)
|
|
1238
|
+
analysis['top_warnings'] = Counter(analysis['top_warnings']).most_common(10)
|
|
888
1239
|
|
|
889
|
-
|
|
890
|
-
print("\n日志统计信息:")
|
|
891
|
-
print(json.dumps(log.get_stats(), indent=2, ensure_ascii=False))
|
|
1240
|
+
return analysis
|
|
892
1241
|
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
1242
|
+
def generate_log_report(self, hours: int = 24) -> str:
|
|
1243
|
+
"""生成日志报告"""
|
|
1244
|
+
analysis = self.analyze_logs(hours)
|
|
1245
|
+
|
|
1246
|
+
report = f"""
|
|
1247
|
+
=== 日志分析报告 ({hours}小时) ===
|
|
1248
|
+
总日志数: {analysis['total_logs']}
|
|
1249
|
+
错误数: {analysis['error_count']}
|
|
1250
|
+
警告数: {analysis['warning_count']}
|
|
1251
|
+
信息数: {analysis['info_count']}
|
|
1252
|
+
调试数: {analysis['debug_count']}
|
|
1253
|
+
错误率: {analysis['error_rate']:.2%}
|
|
897
1254
|
|
|
1255
|
+
最常见的错误类型:
|
|
898
1256
|
"""
|
|
1257
|
+
|
|
1258
|
+
for error_type, count in analysis['top_errors']:
|
|
1259
|
+
report += f" {error_type}: {count}次\n"
|
|
1260
|
+
|
|
1261
|
+
report += "\n最常见的警告类型:\n"
|
|
1262
|
+
for warning_type, count in analysis['top_warnings']:
|
|
1263
|
+
report += f" {warning_type}: {count}次\n"
|
|
1264
|
+
|
|
1265
|
+
return report
|
|
1266
|
+
|
|
1267
|
+
def export_logs_to_json(self, output_file: str, hours: int = 24) -> None:
|
|
1268
|
+
"""导出日志到JSON文件"""
|
|
1269
|
+
import json
|
|
1270
|
+
from pathlib import Path
|
|
1271
|
+
|
|
1272
|
+
log_path = Path(self.log_dir)
|
|
1273
|
+
current_time = datetime.now()
|
|
1274
|
+
start_time = current_time - timedelta(hours=hours)
|
|
1275
|
+
|
|
1276
|
+
logs_data = []
|
|
1277
|
+
|
|
1278
|
+
for log_file in log_path.glob(f"{self.file_name}*.log"):
|
|
1279
|
+
try:
|
|
1280
|
+
with open(log_file, 'r', encoding='utf-8') as f:
|
|
1281
|
+
for line_num, line in enumerate(f, 1):
|
|
1282
|
+
log_entry = {
|
|
1283
|
+
'file': log_file.name,
|
|
1284
|
+
'line_number': line_num,
|
|
1285
|
+
'content': line.strip(),
|
|
1286
|
+
'timestamp': current_time.isoformat()
|
|
1287
|
+
}
|
|
1288
|
+
logs_data.append(log_entry)
|
|
1289
|
+
|
|
1290
|
+
except Exception as e:
|
|
1291
|
+
self.logger.error(f"导出日志文件失败 {log_file.name}: {e}")
|
|
1292
|
+
|
|
1293
|
+
try:
|
|
1294
|
+
with open(output_file, 'w', encoding='utf-8') as f:
|
|
1295
|
+
json.dump(logs_data, f, ensure_ascii=False, indent=2)
|
|
1296
|
+
self.logger.info(f"日志已导出到: {output_file}")
|
|
1297
|
+
except Exception as e:
|
|
1298
|
+
self.logger.error(f"导出JSON文件失败: {e}")
|
|
1299
|
+
|
|
1300
|
+
def cleanup(self) -> None:
|
|
1301
|
+
"""清理资源"""
|
|
1302
|
+
# 如果有聚合器,停止
|
|
1303
|
+
if hasattr(self, 'aggregator'):
|
|
1304
|
+
self.aggregator.stop()
|
|
1305
|
+
# 清理缓存
|
|
1306
|
+
self.clear_caches()
|
|
1307
|
+
# 强制垃圾回收
|
|
1308
|
+
import gc
|
|
1309
|
+
gc.collect()
|
|
1310
|
+
print("XmiLogger 资源清理完成")
|
|
899
1311
|
|