xmi-logger 0.0.2__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,667 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding:utf-8 -*-
3
+
4
+ """
5
+ XmiLogger 高级功能模块
6
+ 包含智能日志过滤、聚合、监控、分布式支持等功能
7
+ """
8
+
9
+ import asyncio
10
+ import json
11
+ import time
12
+ import os
13
+ import sys
14
+ import threading
15
+ from datetime import datetime, timedelta
16
+ from typing import Dict, Any, List, Optional, Union, Callable
17
+ from functools import wraps
18
+ from collections import defaultdict, deque
19
+ import logging
20
+ import hashlib
21
+ import pickle
22
+ import zlib
23
+ import socket
24
+ import struct
25
+ from concurrent.futures import ThreadPoolExecutor, as_completed
26
+ import queue
27
+ import weakref
28
+ import gc
29
+ import psutil
30
+ import signal
31
+ from contextlib import contextmanager
32
+ import uuid
33
+ import inspect
34
+ import traceback
35
+ from dataclasses import dataclass, field
36
+ from enum import Enum
37
+ import re
38
+ import sqlite3
39
+ from pathlib import Path
40
+ import tempfile
41
+ import shutil
42
+ import gzip
43
+ import tarfile
44
+ import zipfile
45
+ import base64
46
+ import hmac
47
+ import secrets
48
+ import ssl
49
+ import certifi
50
+ import urllib3
51
+ from urllib3.util.retry import Retry
52
+ from urllib3.util import Timeout
53
+
54
+ # 新增:智能日志过滤和聚合功能
55
+ class LogFilter(Enum):
56
+ """日志过滤器类型"""
57
+ NONE = "none"
58
+ REGEX = "regex"
59
+ KEYWORD = "keyword"
60
+ PATTERN = "pattern"
61
+ CUSTOM = "custom"
62
+
63
+ class LogAggregator:
64
+ """日志聚合器"""
65
+ def __init__(self, window_size: int = 100, flush_interval: float = 5.0):
66
+ self.window_size = window_size
67
+ self.flush_interval = flush_interval
68
+ self.buffer = deque(maxlen=window_size)
69
+ self.last_flush = time.time()
70
+ self.lock = threading.Lock()
71
+ self._running = True
72
+ self._flush_thread = threading.Thread(target=self._flush_worker, daemon=True)
73
+ self._flush_thread.start()
74
+
75
+ def add_log(self, log_entry: Dict[str, Any]) -> None:
76
+ """添加日志到缓冲区"""
77
+ with self.lock:
78
+ self.buffer.append(log_entry)
79
+ if len(self.buffer) >= self.window_size:
80
+ self._flush_buffer()
81
+
82
+ def _flush_buffer(self) -> None:
83
+ """刷新缓冲区"""
84
+ if not self.buffer:
85
+ return
86
+
87
+ # 聚合日志
88
+ aggregated = self._aggregate_logs()
89
+ # 这里可以发送到外部系统或存储
90
+ print(f"聚合日志: {len(self.buffer)} 条 -> {len(aggregated)} 条")
91
+ self.buffer.clear()
92
+ self.last_flush = time.time()
93
+
94
+ def _aggregate_logs(self) -> List[Dict[str, Any]]:
95
+ """聚合日志"""
96
+ if not self.buffer:
97
+ return []
98
+
99
+ # 按级别和消息模式聚合
100
+ groups = defaultdict(list)
101
+ for log in self.buffer:
102
+ key = f"{log.get('level', 'INFO')}:{log.get('message', '')[:50]}"
103
+ groups[key].append(log)
104
+
105
+ aggregated = []
106
+ for key, logs in groups.items():
107
+ if len(logs) == 1:
108
+ aggregated.append(logs[0])
109
+ else:
110
+ # 创建聚合日志
111
+ first_log = logs[0]
112
+ aggregated_log = {
113
+ 'level': first_log.get('level', 'INFO'),
114
+ 'message': f"[聚合] {first_log.get('message', '')} (重复 {len(logs)} 次)",
115
+ 'timestamp': first_log.get('timestamp'),
116
+ 'count': len(logs),
117
+ 'original_logs': logs
118
+ }
119
+ aggregated.append(aggregated_log)
120
+
121
+ return aggregated
122
+
123
+ def _flush_worker(self) -> None:
124
+ """后台刷新工作线程"""
125
+ while self._running:
126
+ time.sleep(self.flush_interval)
127
+ with self.lock:
128
+ if self.buffer and time.time() - self.last_flush > self.flush_interval:
129
+ self._flush_buffer()
130
+
131
+ def stop(self) -> None:
132
+ """停止聚合器"""
133
+ self._running = False
134
+ self._flush_buffer()
135
+
136
+ # 新增:实时监控和性能分析
137
+ class PerformanceMonitor:
138
+ """性能监控器"""
139
+ def __init__(self):
140
+ self.metrics = {
141
+ 'log_count': 0,
142
+ 'error_count': 0,
143
+ 'avg_processing_time': 0.0,
144
+ 'memory_usage': 0.0,
145
+ 'cpu_usage': 0.0,
146
+ 'throughput': 0.0
147
+ }
148
+ self.processing_times = deque(maxlen=1000)
149
+ self.start_time = time.time()
150
+ self.lock = threading.Lock()
151
+ self._monitor_thread = threading.Thread(target=self._monitor_worker, daemon=True)
152
+ self._monitor_thread.start()
153
+
154
+ def record_log(self, level: str, processing_time: float) -> None:
155
+ """记录日志处理"""
156
+ with self.lock:
157
+ self.metrics['log_count'] += 1
158
+ if level.upper() == 'ERROR':
159
+ self.metrics['error_count'] += 1
160
+
161
+ self.processing_times.append(processing_time)
162
+ if self.processing_times:
163
+ self.metrics['avg_processing_time'] = sum(self.processing_times) / len(self.processing_times)
164
+
165
+ def _monitor_worker(self) -> None:
166
+ """监控工作线程"""
167
+ while True:
168
+ try:
169
+ # 监控系统资源
170
+ process = psutil.Process()
171
+ self.metrics['memory_usage'] = process.memory_info().rss / 1024 / 1024 # MB
172
+ self.metrics['cpu_usage'] = process.cpu_percent()
173
+
174
+ # 计算吞吐量
175
+ elapsed = time.time() - self.start_time
176
+ if elapsed > 0:
177
+ self.metrics['throughput'] = self.metrics['log_count'] / elapsed
178
+
179
+ time.sleep(5) # 每5秒更新一次
180
+ except Exception:
181
+ time.sleep(5)
182
+
183
+ def get_metrics(self) -> Dict[str, Any]:
184
+ """获取性能指标"""
185
+ with self.lock:
186
+ return self.metrics.copy()
187
+
188
+ # 新增:分布式日志支持
189
+ class DistributedLogger:
190
+ """分布式日志记录器"""
191
+ def __init__(self, node_id: str, cluster_nodes: List[str] = None):
192
+ self.node_id = node_id
193
+ self.cluster_nodes = cluster_nodes or []
194
+ self.sequence_number = 0
195
+ self.lock = threading.Lock()
196
+ self._sequence_file = f"sequence_{node_id}.dat"
197
+ self._load_sequence()
198
+
199
+ def _load_sequence(self) -> None:
200
+ """加载序列号"""
201
+ try:
202
+ if os.path.exists(self._sequence_file):
203
+ with open(self._sequence_file, 'r') as f:
204
+ self.sequence_number = int(f.read().strip())
205
+ except Exception:
206
+ self.sequence_number = 0
207
+
208
+ def _save_sequence(self) -> None:
209
+ """保存序列号"""
210
+ try:
211
+ with open(self._sequence_file, 'w') as f:
212
+ f.write(str(self.sequence_number))
213
+ except Exception:
214
+ pass
215
+
216
+ def get_log_id(self) -> str:
217
+ """获取唯一日志ID"""
218
+ with self.lock:
219
+ self.sequence_number += 1
220
+ self._save_sequence()
221
+ timestamp = int(time.time() * 1000)
222
+ return f"{self.node_id}_{timestamp}_{self.sequence_number}"
223
+
224
+ # 新增:内存优化和垃圾回收
225
+ class MemoryOptimizer:
226
+ """内存优化器"""
227
+ def __init__(self, max_memory_mb: int = 512):
228
+ self.max_memory_mb = max_memory_mb
229
+ self.last_gc_time = time.time()
230
+ self.gc_interval = 60 # 60秒执行一次GC
231
+ self._gc_thread = threading.Thread(target=self._gc_worker, daemon=True)
232
+ self._gc_thread.start()
233
+
234
+ def check_memory(self) -> bool:
235
+ """检查内存使用情况"""
236
+ process = psutil.Process()
237
+ memory_mb = process.memory_info().rss / 1024 / 1024
238
+ return memory_mb > self.max_memory_mb
239
+
240
+ def optimize_memory(self) -> None:
241
+ """优化内存使用"""
242
+ if self.check_memory():
243
+ # 强制垃圾回收
244
+ collected = gc.collect()
245
+ print(f"内存优化: 回收了 {collected} 个对象")
246
+
247
+ # 清理缓存
248
+ if hasattr(self, '_clear_caches'):
249
+ self._clear_caches()
250
+
251
+ def _gc_worker(self) -> None:
252
+ """垃圾回收工作线程"""
253
+ while True:
254
+ time.sleep(self.gc_interval)
255
+ self.optimize_memory()
256
+
257
+ # 新增:智能日志路由
258
+ class LogRouter:
259
+ """智能日志路由器"""
260
+ def __init__(self):
261
+ self.routes = {}
262
+ self.default_route = None
263
+ self.lock = threading.Lock()
264
+
265
+ def add_route(self, condition: Callable, handler: Callable) -> None:
266
+ """添加路由规则"""
267
+ with self.lock:
268
+ route_id = len(self.routes)
269
+ self.routes[route_id] = (condition, handler)
270
+
271
+ def set_default_route(self, handler: Callable) -> None:
272
+ """设置默认路由"""
273
+ self.default_route = handler
274
+
275
+ def route_log(self, log_entry: Dict[str, Any]) -> None:
276
+ """路由日志"""
277
+ with self.lock:
278
+ for route_id, (condition, handler) in self.routes.items():
279
+ if condition(log_entry):
280
+ handler(log_entry)
281
+ return
282
+
283
+ if self.default_route:
284
+ self.default_route(log_entry)
285
+
286
+ # 新增:日志加密和安全
287
+ class LogSecurity:
288
+ """日志安全模块"""
289
+ def __init__(self, encryption_key: str = None):
290
+ try:
291
+ from cryptography.fernet import Fernet
292
+ self.encryption_key = encryption_key or Fernet.generate_key()
293
+ self.cipher = Fernet(self.encryption_key)
294
+ except ImportError:
295
+ print("警告: cryptography 未安装,加密功能将不可用")
296
+ self.cipher = None
297
+
298
+ self.sensitive_patterns = [
299
+ r'(password["\']?\s*[:=]\s*["\'][^"\']*["\'])',
300
+ r'(api_key["\']?\s*[:=]\s*["\'][^"\']*["\'])',
301
+ r'(token["\']?\s*[:=]\s*["\'][^"\']*["\'])',
302
+ r'(secret["\']?\s*[:=]\s*["\'][^"\']*["\'])'
303
+ ]
304
+ self.compiled_patterns = [re.compile(pattern, re.IGNORECASE) for pattern in self.sensitive_patterns]
305
+
306
+ def sanitize_message(self, message: str) -> str:
307
+ """清理敏感信息"""
308
+ sanitized = message
309
+ for pattern in self.compiled_patterns:
310
+ sanitized = pattern.sub(r'\1=***', sanitized)
311
+ return sanitized
312
+
313
+ def encrypt_log(self, log_data: bytes) -> bytes:
314
+ """加密日志数据"""
315
+ if self.cipher is None:
316
+ return log_data
317
+ return self.cipher.encrypt(log_data)
318
+
319
+ def decrypt_log(self, encrypted_data: bytes) -> bytes:
320
+ """解密日志数据"""
321
+ if self.cipher is None:
322
+ return encrypted_data
323
+ return self.cipher.decrypt(encrypted_data)
324
+
325
+ # 新增:日志压缩和归档
326
+ class LogArchiver:
327
+ """日志归档器"""
328
+ def __init__(self, archive_dir: str = "archives"):
329
+ self.archive_dir = archive_dir
330
+ os.makedirs(archive_dir, exist_ok=True)
331
+
332
+ def compress_file(self, file_path: str, compression_type: str = "gzip") -> str:
333
+ """压缩文件"""
334
+ if compression_type == "gzip":
335
+ archive_path = f"{file_path}.gz"
336
+ with open(file_path, 'rb') as f_in:
337
+ with gzip.open(archive_path, 'wb') as f_out:
338
+ shutil.copyfileobj(f_in, f_out)
339
+ elif compression_type == "zip":
340
+ archive_path = f"{file_path}.zip"
341
+ with zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
342
+ zipf.write(file_path, os.path.basename(file_path))
343
+ elif compression_type == "tar":
344
+ archive_path = f"{file_path}.tar.gz"
345
+ with tarfile.open(archive_path, 'w:gz') as tar:
346
+ tar.add(file_path, arcname=os.path.basename(file_path))
347
+
348
+ return archive_path
349
+
350
+ def archive_logs(self, log_dir: str, days_old: int = 7) -> List[str]:
351
+ """归档旧日志"""
352
+ archived_files = []
353
+ current_time = datetime.now()
354
+
355
+ for file_path in Path(log_dir).glob("*.log"):
356
+ file_time = datetime.fromtimestamp(file_path.stat().st_mtime)
357
+ if (current_time - file_time).days >= days_old:
358
+ try:
359
+ archive_path = self.compress_file(str(file_path))
360
+ os.remove(file_path)
361
+ archived_files.append(archive_path)
362
+ except Exception as e:
363
+ print(f"归档文件失败 {file_path}: {e}")
364
+
365
+ return archived_files
366
+
367
+ # 新增:日志数据库支持
368
+ class LogDatabase:
369
+ """日志数据库支持"""
370
+ def __init__(self, db_path: str = "logs.db"):
371
+ self.db_path = db_path
372
+ self._init_database()
373
+
374
+ def _init_database(self) -> None:
375
+ """初始化数据库"""
376
+ with sqlite3.connect(self.db_path) as conn:
377
+ conn.execute("""
378
+ CREATE TABLE IF NOT EXISTS logs (
379
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
380
+ timestamp TEXT NOT NULL,
381
+ level TEXT NOT NULL,
382
+ message TEXT NOT NULL,
383
+ file TEXT,
384
+ line INTEGER,
385
+ function TEXT,
386
+ process_id INTEGER,
387
+ thread_id INTEGER,
388
+ extra_data TEXT
389
+ )
390
+ """)
391
+
392
+ conn.execute("""
393
+ CREATE INDEX IF NOT EXISTS idx_timestamp ON logs(timestamp)
394
+ """)
395
+
396
+ conn.execute("""
397
+ CREATE INDEX IF NOT EXISTS idx_level ON logs(level)
398
+ """)
399
+
400
+ def insert_log(self, log_entry: Dict[str, Any]) -> None:
401
+ """插入日志记录"""
402
+ with sqlite3.connect(self.db_path) as conn:
403
+ conn.execute("""
404
+ INSERT INTO logs (timestamp, level, message, file, line, function, process_id, thread_id, extra_data)
405
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
406
+ """, (
407
+ log_entry.get('timestamp'),
408
+ log_entry.get('level'),
409
+ log_entry.get('message'),
410
+ log_entry.get('file'),
411
+ log_entry.get('line'),
412
+ log_entry.get('function'),
413
+ log_entry.get('process_id'),
414
+ log_entry.get('thread_id'),
415
+ json.dumps(log_entry.get('extra_data', {}))
416
+ ))
417
+
418
+ def query_logs(self, conditions: Dict[str, Any] = None, limit: int = 1000) -> List[Dict[str, Any]]:
419
+ """查询日志"""
420
+ query = "SELECT * FROM logs"
421
+ params = []
422
+
423
+ if conditions:
424
+ where_clauses = []
425
+ for key, value in conditions.items():
426
+ where_clauses.append(f"{key} = ?")
427
+ params.append(value)
428
+ query += " WHERE " + " AND ".join(where_clauses)
429
+
430
+ query += " ORDER BY timestamp DESC LIMIT ?"
431
+ params.append(limit)
432
+
433
+ with sqlite3.connect(self.db_path) as conn:
434
+ cursor = conn.execute(query, params)
435
+ columns = [description[0] for description in cursor.description]
436
+ return [dict(zip(columns, row)) for row in cursor.fetchall()]
437
+
438
+ # 新增:日志流处理
439
+ class LogStreamProcessor:
440
+ """日志流处理器"""
441
+ def __init__(self, processors: List[Callable] = None):
442
+ self.processors = processors or []
443
+ self.input_queue = queue.Queue()
444
+ self.output_queue = queue.Queue()
445
+ self._running = True
446
+ self._processor_thread = threading.Thread(target=self._process_worker, daemon=True)
447
+ self._processor_thread.start()
448
+
449
+ def add_processor(self, processor: Callable) -> None:
450
+ """添加处理器"""
451
+ self.processors.append(processor)
452
+
453
+ def process_log(self, log_entry: Dict[str, Any]) -> None:
454
+ """处理日志"""
455
+ self.input_queue.put(log_entry)
456
+
457
+ def _process_worker(self) -> None:
458
+ """处理工作线程"""
459
+ while self._running:
460
+ try:
461
+ log_entry = self.input_queue.get(timeout=1)
462
+ processed_entry = log_entry
463
+
464
+ for processor in self.processors:
465
+ processed_entry = processor(processed_entry)
466
+
467
+ self.output_queue.put(processed_entry)
468
+ except queue.Empty:
469
+ continue
470
+ except Exception as e:
471
+ print(f"日志处理错误: {e}")
472
+
473
+ def get_processed_log(self) -> Optional[Dict[str, Any]]:
474
+ """获取处理后的日志"""
475
+ try:
476
+ return self.output_queue.get_nowait()
477
+ except queue.Empty:
478
+ return None
479
+
480
+ # 新增:智能日志分析
481
+ class LogAnalyzer:
482
+ """智能日志分析器"""
483
+ def __init__(self):
484
+ self.patterns = {
485
+ 'error_patterns': [
486
+ r'Exception|Error|Failed|Timeout|Connection refused',
487
+ r'HTTP \d{3}',
488
+ r'ORA-\d{5}',
489
+ r'MySQL.*error'
490
+ ],
491
+ 'warning_patterns': [
492
+ r'Warning|Deprecated|Deprecation',
493
+ r'Slow query|Performance issue',
494
+ r'Resource.*low|Memory.*high'
495
+ ],
496
+ 'security_patterns': [
497
+ r'Unauthorized|Forbidden|Authentication failed',
498
+ r'SQL injection|XSS|CSRF',
499
+ r'Failed login|Invalid credentials'
500
+ ]
501
+ }
502
+ self.compiled_patterns = {}
503
+ for category, patterns in self.patterns.items():
504
+ self.compiled_patterns[category] = [re.compile(pattern, re.IGNORECASE) for pattern in patterns]
505
+
506
+ def analyze_log(self, log_entry: Dict[str, Any]) -> Dict[str, Any]:
507
+ """分析日志"""
508
+ message = log_entry.get('message', '')
509
+ level = log_entry.get('level', 'INFO')
510
+
511
+ analysis = {
512
+ 'severity': 'normal',
513
+ 'categories': [],
514
+ 'suggestions': [],
515
+ 'patterns_found': []
516
+ }
517
+
518
+ # 检查错误模式
519
+ for pattern in self.compiled_patterns['error_patterns']:
520
+ if pattern.search(message):
521
+ analysis['severity'] = 'high'
522
+ analysis['categories'].append('error')
523
+ analysis['patterns_found'].append(pattern.pattern)
524
+
525
+ # 检查警告模式
526
+ for pattern in self.compiled_patterns['warning_patterns']:
527
+ if pattern.search(message):
528
+ if analysis['severity'] == 'normal':
529
+ analysis['severity'] = 'medium'
530
+ analysis['categories'].append('warning')
531
+ analysis['patterns_found'].append(pattern.pattern)
532
+
533
+ # 检查安全模式
534
+ for pattern in self.compiled_patterns['security_patterns']:
535
+ if pattern.search(message):
536
+ analysis['severity'] = 'critical'
537
+ analysis['categories'].append('security')
538
+ analysis['patterns_found'].append(pattern.pattern)
539
+
540
+ # 生成建议
541
+ if 'error' in analysis['categories']:
542
+ analysis['suggestions'].append('检查相关服务和依赖')
543
+ if 'security' in analysis['categories']:
544
+ analysis['suggestions'].append('立即检查安全配置')
545
+ if 'warning' in analysis['categories']:
546
+ analysis['suggestions'].append('监控系统性能')
547
+
548
+ return analysis
549
+
550
+ # 新增:日志健康检查
551
+ class LogHealthChecker:
552
+ """日志健康检查器"""
553
+ def __init__(self):
554
+ self.health_metrics = {
555
+ 'total_logs': 0,
556
+ 'error_rate': 0.0,
557
+ 'avg_response_time': 0.0,
558
+ 'memory_usage': 0.0,
559
+ 'disk_usage': 0.0,
560
+ 'last_check': None
561
+ }
562
+
563
+ def check_health(self, log_dir: str) -> Dict[str, Any]:
564
+ """检查日志系统健康状态"""
565
+ try:
566
+ # 检查磁盘使用情况
567
+ total, used, free = shutil.disk_usage(log_dir)
568
+ disk_usage_percent = (used / total) * 100
569
+
570
+ # 检查内存使用情况
571
+ process = psutil.Process()
572
+ memory_usage = process.memory_info().rss / 1024 / 1024 # MB
573
+
574
+ # 检查日志文件
575
+ log_files = list(Path(log_dir).glob("*.log"))
576
+ total_size = sum(f.stat().st_size for f in log_files)
577
+
578
+ health_status = {
579
+ 'status': 'healthy',
580
+ 'disk_usage_percent': disk_usage_percent,
581
+ 'memory_usage_mb': memory_usage,
582
+ 'log_files_count': len(log_files),
583
+ 'total_log_size_mb': total_size / 1024 / 1024,
584
+ 'last_check': datetime.now().isoformat()
585
+ }
586
+
587
+ # 判断健康状态
588
+ if disk_usage_percent > 90:
589
+ health_status['status'] = 'critical'
590
+ health_status['warnings'] = ['磁盘使用率过高']
591
+ elif disk_usage_percent > 80:
592
+ health_status['status'] = 'warning'
593
+ health_status['warnings'] = ['磁盘使用率较高']
594
+
595
+ if memory_usage > 1024: # 超过1GB
596
+ health_status['status'] = 'warning'
597
+ if 'warnings' not in health_status:
598
+ health_status['warnings'] = []
599
+ health_status['warnings'].append('内存使用量较高')
600
+
601
+ return health_status
602
+
603
+ except Exception as e:
604
+ return {
605
+ 'status': 'error',
606
+ 'error': str(e),
607
+ 'last_check': datetime.now().isoformat()
608
+ }
609
+
610
+ # 新增:日志备份和恢复
611
+ class LogBackupManager:
612
+ """日志备份管理器"""
613
+ def __init__(self, backup_dir: str = "backups"):
614
+ self.backup_dir = backup_dir
615
+ os.makedirs(backup_dir, exist_ok=True)
616
+
617
+ def create_backup(self, log_dir: str, backup_name: str = None) -> str:
618
+ """创建日志备份"""
619
+ if backup_name is None:
620
+ backup_name = f"backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
621
+
622
+ backup_path = os.path.join(self.backup_dir, f"{backup_name}.tar.gz")
623
+
624
+ with tarfile.open(backup_path, 'w:gz') as tar:
625
+ for log_file in Path(log_dir).glob("*.log"):
626
+ tar.add(log_file, arcname=log_file.name)
627
+
628
+ return backup_path
629
+
630
+ def restore_backup(self, backup_path: str, restore_dir: str) -> bool:
631
+ """恢复日志备份"""
632
+ try:
633
+ with tarfile.open(backup_path, 'r:gz') as tar:
634
+ tar.extractall(restore_dir)
635
+ return True
636
+ except Exception as e:
637
+ print(f"恢复备份失败: {e}")
638
+ return False
639
+
640
+ def list_backups(self) -> List[Dict[str, Any]]:
641
+ """列出所有备份"""
642
+ backups = []
643
+ for backup_file in Path(self.backup_dir).glob("*.tar.gz"):
644
+ stat = backup_file.stat()
645
+ backups.append({
646
+ 'name': backup_file.name,
647
+ 'size_mb': stat.st_size / 1024 / 1024,
648
+ 'created': datetime.fromtimestamp(stat.st_mtime).isoformat()
649
+ })
650
+ return sorted(backups, key=lambda x: x['created'], reverse=True)
651
+
652
+ # 导出所有类
653
+ __all__ = [
654
+ 'LogFilter',
655
+ 'LogAggregator',
656
+ 'PerformanceMonitor',
657
+ 'DistributedLogger',
658
+ 'MemoryOptimizer',
659
+ 'LogRouter',
660
+ 'LogSecurity',
661
+ 'LogArchiver',
662
+ 'LogDatabase',
663
+ 'LogStreamProcessor',
664
+ 'LogAnalyzer',
665
+ 'LogHealthChecker',
666
+ 'LogBackupManager'
667
+ ]