xmi-logger 0.0.7__tar.gz → 0.0.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {xmi_logger-0.0.7/xmi_logger.egg-info → xmi_logger-0.0.8}/PKG-INFO +151 -69
- xmi_logger-0.0.7/PKG-INFO → xmi_logger-0.0.8/README.md +134 -91
- {xmi_logger-0.0.7 → xmi_logger-0.0.8}/setup.py +17 -6
- {xmi_logger-0.0.7 → xmi_logger-0.0.8}/xmi_logger/__version__.py +2 -2
- xmi_logger-0.0.8/xmi_logger/advanced_features.py +831 -0
- {xmi_logger-0.0.7 → xmi_logger-0.0.8}/xmi_logger/xmi_logger.py +253 -171
- xmi_logger-0.0.7/README.md → xmi_logger-0.0.8/xmi_logger.egg-info/PKG-INFO +173 -64
- xmi_logger-0.0.8/xmi_logger.egg-info/requires.txt +7 -0
- xmi_logger-0.0.7/xmi_logger/advanced_features.py +0 -667
- xmi_logger-0.0.7/xmi_logger.egg-info/requires.txt +0 -3
- {xmi_logger-0.0.7 → xmi_logger-0.0.8}/setup.cfg +0 -0
- {xmi_logger-0.0.7 → xmi_logger-0.0.8}/xmi_logger/__init__.py +0 -0
- {xmi_logger-0.0.7 → xmi_logger-0.0.8}/xmi_logger.egg-info/SOURCES.txt +0 -0
- {xmi_logger-0.0.7 → xmi_logger-0.0.8}/xmi_logger.egg-info/dependency_links.txt +0 -0
- {xmi_logger-0.0.7 → xmi_logger-0.0.8}/xmi_logger.egg-info/top_level.txt +0 -0
|
@@ -1,51 +1,60 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: xmi_logger
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.8
|
|
4
4
|
Summary: An enhanced logger based on Loguru
|
|
5
5
|
Home-page: https://github.com/wang-zhibo/xmi_logger
|
|
6
6
|
Author: gm.zhibo.wang
|
|
7
7
|
Author-email: gm.zhibo.wang@gmail.com
|
|
8
|
+
License: MIT
|
|
8
9
|
Project-URL: Bug Reports, https://github.com/wang-zhibo/xmi_logger/issues
|
|
9
10
|
Project-URL: Source, https://github.com/wang-zhibo/xmi_logger
|
|
10
11
|
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
11
18
|
Classifier: Operating System :: OS Independent
|
|
12
|
-
Requires-Python: >=3.
|
|
19
|
+
Requires-Python: >=3.8
|
|
13
20
|
Description-Content-Type: text/markdown
|
|
14
|
-
Requires-Dist: loguru
|
|
15
|
-
Requires-Dist: requests
|
|
16
|
-
Requires-Dist: aiohttp
|
|
21
|
+
Requires-Dist: loguru<1.0,>=0.7
|
|
22
|
+
Requires-Dist: requests>=2.0
|
|
23
|
+
Requires-Dist: aiohttp>=3.8
|
|
24
|
+
Provides-Extra: advanced
|
|
25
|
+
Requires-Dist: psutil>=5.0; extra == "advanced"
|
|
26
|
+
Requires-Dist: cryptography>=3.4; extra == "advanced"
|
|
17
27
|
Dynamic: author
|
|
18
28
|
Dynamic: author-email
|
|
19
29
|
Dynamic: classifier
|
|
20
30
|
Dynamic: description
|
|
21
31
|
Dynamic: description-content-type
|
|
22
32
|
Dynamic: home-page
|
|
33
|
+
Dynamic: license
|
|
23
34
|
Dynamic: project-url
|
|
35
|
+
Dynamic: provides-extra
|
|
24
36
|
Dynamic: requires-dist
|
|
25
37
|
Dynamic: requires-python
|
|
26
38
|
Dynamic: summary
|
|
27
39
|
|
|
28
40
|
# XmiLogger
|
|
29
41
|
|
|
30
|
-
基于 Loguru
|
|
42
|
+
基于 Loguru 的增强日志记录器,支持多语言、异步操作、远程错误上报、统计与日志管理等能力。
|
|
31
43
|
|
|
32
44
|
## 特性
|
|
33
45
|
|
|
34
|
-
-
|
|
35
|
-
-
|
|
36
|
-
-
|
|
37
|
-
-
|
|
38
|
-
-
|
|
39
|
-
-
|
|
40
|
-
-
|
|
41
|
-
- 🌍 远程日志:支持异步远程日志收集
|
|
42
|
-
- 🐛 增强错误信息:显示详细的错误位置、调用链和代码行
|
|
43
|
-
- ⚡ 性能优化:智能缓存、连接池、内存优化
|
|
46
|
+
- 多语言输出(zh/en)
|
|
47
|
+
- 自定义格式、级别过滤、按大小或按时间轮转、保留策略、压缩
|
|
48
|
+
- request_id 上下文注入(ContextVar)
|
|
49
|
+
- 装饰器记录函数调用与耗时(同步/异步)
|
|
50
|
+
- 远程日志上报(默认仅 ERROR 及以上,异步发送避免阻塞)
|
|
51
|
+
- 基础统计(按级别/分类/小时)、缓存性能信息
|
|
52
|
+
- 日志管理(压缩、归档、清理)、简单分析与导出
|
|
44
53
|
|
|
45
54
|
## 安装
|
|
46
55
|
|
|
47
56
|
```bash
|
|
48
|
-
pip install
|
|
57
|
+
pip install xmi_logger
|
|
49
58
|
```
|
|
50
59
|
|
|
51
60
|
## 快速开始
|
|
@@ -62,10 +71,67 @@ logger = XmiLogger(
|
|
|
62
71
|
language="zh" # 使用中文输出
|
|
63
72
|
)
|
|
64
73
|
|
|
65
|
-
#
|
|
66
|
-
logger.
|
|
67
|
-
|
|
68
|
-
logger.
|
|
74
|
+
# 设置 request_id(每条日志会带 ReqID:xxx)
|
|
75
|
+
token = logger.request_id_var.set("req-001")
|
|
76
|
+
try:
|
|
77
|
+
logger.info("这是一条信息日志")
|
|
78
|
+
logger.warning("这是一条警告日志")
|
|
79
|
+
logger.error("这是一条错误日志")
|
|
80
|
+
finally:
|
|
81
|
+
logger.request_id_var.reset(token)
|
|
82
|
+
|
|
83
|
+
# 程序退出前清理(关闭远程发送、恢复 excepthook、移除 handler)
|
|
84
|
+
logger.cleanup()
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
#### request_id 用法与并发说明
|
|
88
|
+
|
|
89
|
+
`request_id_var` 是一个 `ContextVar`,用来给“同一条业务链路”的日志自动带上 `ReqID:...`:
|
|
90
|
+
|
|
91
|
+
- `set(value)`:把当前上下文的 request_id 设为 `value`,并返回一个 token(表示 set 之前的旧值)
|
|
92
|
+
- `reset(token)`:把当前上下文恢复到 set 之前的值,避免 request_id 泄漏到下一次请求/任务
|
|
93
|
+
|
|
94
|
+
推荐写法用 `try/finally` 确保一定 reset(如上例)。
|
|
95
|
+
|
|
96
|
+
多并发下的行为:
|
|
97
|
+
|
|
98
|
+
- 多线程:不同线程之间的 `request_id` 互不影响;但新线程不会自动继承父线程的 request_id,需要显式传递。
|
|
99
|
+
- asyncio:不同 Task 之间的 `request_id` 互不影响;创建 Task 时会复制一份当前上下文,所以要在 `create_task()` 之前 set。
|
|
100
|
+
|
|
101
|
+
线程/线程池中传递 request_id 的示例:
|
|
102
|
+
|
|
103
|
+
```python
|
|
104
|
+
import contextvars
|
|
105
|
+
import threading
|
|
106
|
+
|
|
107
|
+
token = logger.request_id_var.set("req-001")
|
|
108
|
+
try:
|
|
109
|
+
ctx = contextvars.copy_context()
|
|
110
|
+
t = threading.Thread(target=lambda: ctx.run(logger.info, "子线程日志也带 ReqID"))
|
|
111
|
+
t.start()
|
|
112
|
+
t.join()
|
|
113
|
+
finally:
|
|
114
|
+
logger.request_id_var.reset(token)
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
asyncio 并发示例(每个请求独立 request_id):
|
|
118
|
+
|
|
119
|
+
```python
|
|
120
|
+
import asyncio
|
|
121
|
+
|
|
122
|
+
async def handle(req_id: str):
|
|
123
|
+
token = logger.request_id_var.set(req_id)
|
|
124
|
+
try:
|
|
125
|
+
logger.info("开始处理")
|
|
126
|
+
await asyncio.sleep(0.1)
|
|
127
|
+
logger.info("处理完成")
|
|
128
|
+
finally:
|
|
129
|
+
logger.request_id_var.reset(token)
|
|
130
|
+
|
|
131
|
+
async def main():
|
|
132
|
+
await asyncio.gather(handle("req-1"), handle("req-2"))
|
|
133
|
+
|
|
134
|
+
asyncio.run(main())
|
|
69
135
|
```
|
|
70
136
|
|
|
71
137
|
### 异步函数支持
|
|
@@ -115,6 +181,8 @@ logger.log_with_location("INFO", "这是带位置信息的日志")
|
|
|
115
181
|
### 性能监控
|
|
116
182
|
|
|
117
183
|
```python
|
|
184
|
+
import json
|
|
185
|
+
|
|
118
186
|
# 获取性能统计信息
|
|
119
187
|
perf_stats = logger.get_performance_stats()
|
|
120
188
|
print(json.dumps(perf_stats, indent=2))
|
|
@@ -133,6 +201,8 @@ logger = XmiLogger(
|
|
|
133
201
|
### 批量日志处理
|
|
134
202
|
|
|
135
203
|
```python
|
|
204
|
+
import asyncio
|
|
205
|
+
|
|
136
206
|
# 批量记录日志
|
|
137
207
|
batch_logs = [
|
|
138
208
|
{'level': 'INFO', 'message': '消息1', 'tag': 'BATCH'},
|
|
@@ -141,7 +211,7 @@ batch_logs = [
|
|
|
141
211
|
]
|
|
142
212
|
|
|
143
213
|
logger.batch_log(batch_logs) # 同步批量记录
|
|
144
|
-
logger.async_batch_log(batch_logs) # 异步批量记录
|
|
214
|
+
asyncio.run(logger.async_batch_log(batch_logs)) # 异步批量记录
|
|
145
215
|
```
|
|
146
216
|
|
|
147
217
|
### 上下文日志
|
|
@@ -165,10 +235,10 @@ logger.log_with_timing("INFO", "API请求完成", {'db_query': 0.125, 'total': 0
|
|
|
165
235
|
logger = XmiLogger(
|
|
166
236
|
file_name="app",
|
|
167
237
|
adaptive_level=True, # 启用自适应级别
|
|
168
|
-
|
|
238
|
+
enable_stats=True # 自适应依赖统计
|
|
169
239
|
)
|
|
170
240
|
|
|
171
|
-
#
|
|
241
|
+
# 根据错误率/日志速率自动调整级别(需持续产生日志以更新统计)
|
|
172
242
|
logger.set_adaptive_level(error_rate_threshold=0.1)
|
|
173
243
|
```
|
|
174
244
|
|
|
@@ -200,40 +270,65 @@ print(report)
|
|
|
200
270
|
logger.export_logs_to_json("logs.json", hours=24)
|
|
201
271
|
```
|
|
202
272
|
|
|
203
|
-
###
|
|
273
|
+
### advanced_features(可选)
|
|
274
|
+
|
|
275
|
+
加密功能依赖 cryptography,性能监控的系统指标依赖 psutil:
|
|
276
|
+
|
|
277
|
+
```bash
|
|
278
|
+
pip install xmi_logger[advanced]
|
|
279
|
+
```
|
|
204
280
|
|
|
205
281
|
```python
|
|
206
|
-
|
|
282
|
+
import hashlib
|
|
283
|
+
import time
|
|
284
|
+
from datetime import datetime
|
|
285
|
+
|
|
286
|
+
from xmi_logger.advanced_features import (
|
|
287
|
+
DistributedLogger,
|
|
288
|
+
LogAggregator,
|
|
289
|
+
LogAnalyzer,
|
|
290
|
+
LogArchiver,
|
|
291
|
+
LogBackupManager,
|
|
292
|
+
LogDatabase,
|
|
293
|
+
LogHealthChecker,
|
|
294
|
+
LogSecurity,
|
|
295
|
+
LogStreamProcessor,
|
|
296
|
+
PerformanceMonitor,
|
|
297
|
+
)
|
|
207
298
|
|
|
208
|
-
#
|
|
299
|
+
# 智能日志分析(规则匹配)
|
|
209
300
|
analyzer = LogAnalyzer()
|
|
210
301
|
analysis = analyzer.analyze_log({
|
|
211
302
|
'message': '数据库连接失败: Connection refused',
|
|
212
303
|
'level': 'ERROR'
|
|
213
304
|
})
|
|
214
|
-
print(f"严重程度: {analysis['severity']}")
|
|
215
|
-
print(f"类别: {analysis['categories']}")
|
|
216
|
-
print(f"建议: {analysis['suggestions']}")
|
|
305
|
+
print(f"严重程度: {analysis['severity']}")
|
|
306
|
+
print(f"类别: {analysis['categories']}")
|
|
307
|
+
print(f"建议: {analysis['suggestions']}")
|
|
217
308
|
|
|
218
|
-
#
|
|
219
|
-
dist_logger = DistributedLogger("node-001")
|
|
220
|
-
log_id = dist_logger.get_log_id()
|
|
309
|
+
# 分布式日志 ID(跨进程重启也能递增)
|
|
310
|
+
dist_logger = DistributedLogger("node-001", persist_every=100)
|
|
311
|
+
log_id = dist_logger.get_log_id()
|
|
221
312
|
logger.info(f"分布式日志消息 (ID: {log_id})")
|
|
222
313
|
|
|
223
|
-
#
|
|
314
|
+
# 日志安全:脱敏(支持 password/token/api_key/密码/口令 等)
|
|
224
315
|
security = LogSecurity()
|
|
225
|
-
original = "用户密码: 123456"
|
|
316
|
+
original = "用户密码: 123456, token=abcd"
|
|
226
317
|
sanitized = security.sanitize_message(original)
|
|
227
|
-
print(sanitized)
|
|
318
|
+
print(sanitized)
|
|
319
|
+
|
|
320
|
+
payload = {"user": "alice", "password": "123456", "nested": {"api_key": "k-xxx"}}
|
|
321
|
+
print(security.sanitize_mapping(payload))
|
|
228
322
|
|
|
229
323
|
# 性能监控
|
|
230
324
|
monitor = PerformanceMonitor()
|
|
231
|
-
monitor.record_log("INFO", 0.05)
|
|
325
|
+
monitor.record_log("INFO", 0.05)
|
|
232
326
|
metrics = monitor.get_metrics()
|
|
233
327
|
print(f"总日志数: {metrics['log_count']}")
|
|
234
|
-
print(f"平均处理时间: {metrics['
|
|
328
|
+
print(f"平均处理时间: {metrics['avg_processing_time_ms']:.2f}ms")
|
|
329
|
+
monitor.stop()
|
|
235
330
|
|
|
236
|
-
#
|
|
331
|
+
# 日志聚合(去重合并重复日志)
|
|
237
332
|
aggregator = LogAggregator(window_size=100, flush_interval=5.0)
|
|
238
333
|
for i in range(20):
|
|
239
334
|
aggregator.add_log({
|
|
@@ -241,10 +336,12 @@ for i in range(20):
|
|
|
241
336
|
'message': '重复的日志消息',
|
|
242
337
|
'timestamp': time.time()
|
|
243
338
|
})
|
|
244
|
-
|
|
339
|
+
aggregated = aggregator.flush()
|
|
340
|
+
print(aggregated[0]["message"])
|
|
341
|
+
aggregator.stop()
|
|
245
342
|
|
|
246
|
-
#
|
|
247
|
-
processor = LogStreamProcessor()
|
|
343
|
+
# 流处理(管道式加工日志 entry)
|
|
344
|
+
processor = LogStreamProcessor(max_queue_size=1000)
|
|
248
345
|
|
|
249
346
|
def add_timestamp(log_entry):
|
|
250
347
|
log_entry['processed_timestamp'] = time.time()
|
|
@@ -260,9 +357,10 @@ processor.add_processor(add_checksum)
|
|
|
260
357
|
|
|
261
358
|
# 处理日志
|
|
262
359
|
processor.process_log({'level': 'INFO', 'message': '测试消息'})
|
|
263
|
-
processed_log = processor.get_processed_log()
|
|
360
|
+
processed_log = processor.get_processed_log(timeout=1.0)
|
|
361
|
+
processor.stop()
|
|
264
362
|
|
|
265
|
-
#
|
|
363
|
+
# SQLite 数据库存储(结构化落库 + 条件查询)
|
|
266
364
|
db = LogDatabase("logs.db")
|
|
267
365
|
db.insert_log({
|
|
268
366
|
'timestamp': datetime.now().isoformat(),
|
|
@@ -275,14 +373,15 @@ db.insert_log({
|
|
|
275
373
|
|
|
276
374
|
# 查询错误日志
|
|
277
375
|
logs = db.query_logs({'level': 'ERROR'}, limit=10)
|
|
376
|
+
db.close()
|
|
278
377
|
|
|
279
378
|
# 健康检查
|
|
280
379
|
checker = LogHealthChecker()
|
|
281
380
|
health = checker.check_health("logs")
|
|
282
|
-
print(f"状态: {health['status']}")
|
|
381
|
+
print(f"状态: {health['status']}")
|
|
283
382
|
print(f"磁盘使用率: {health['disk_usage_percent']:.1f}%")
|
|
284
383
|
|
|
285
|
-
#
|
|
384
|
+
# 安全备份/恢复(防止 tar 路径穿越)
|
|
286
385
|
backup_mgr = LogBackupManager("backups")
|
|
287
386
|
backup_path = backup_mgr.create_backup("logs", "daily_backup")
|
|
288
387
|
|
|
@@ -291,28 +390,9 @@ backups = backup_mgr.list_backups()
|
|
|
291
390
|
for backup in backups:
|
|
292
391
|
print(f"{backup['name']} - {backup['size_mb']:.2f}MB")
|
|
293
392
|
|
|
294
|
-
#
|
|
295
|
-
optimizer = MemoryOptimizer(max_memory_mb=512)
|
|
296
|
-
if optimizer.check_memory():
|
|
297
|
-
optimizer.optimize_memory() # 自动清理内存
|
|
298
|
-
|
|
299
|
-
# 智能路由
|
|
300
|
-
router = LogRouter()
|
|
301
|
-
|
|
302
|
-
def error_handler(log_entry):
|
|
303
|
-
print(f"🚨 错误日志: {log_entry['message']}")
|
|
304
|
-
|
|
305
|
-
def security_handler(log_entry):
|
|
306
|
-
print(f"🔒 安全日志: {log_entry['message']}")
|
|
307
|
-
|
|
308
|
-
router.add_route(lambda entry: entry.get('level') == 'ERROR', error_handler)
|
|
309
|
-
router.add_route(lambda entry: 'password' in entry.get('message', ''), security_handler)
|
|
310
|
-
|
|
311
|
-
router.route_log({'level': 'ERROR', 'message': '系统错误'})
|
|
312
|
-
|
|
313
|
-
# 日志归档
|
|
393
|
+
# 日志归档(压缩并移到 archives 目录)
|
|
314
394
|
archiver = LogArchiver("archives")
|
|
315
|
-
archived_files = archiver.archive_logs("logs", days_old=7)
|
|
395
|
+
archived_files = archiver.archive_logs("logs", days_old=7, compression_type="gzip")
|
|
316
396
|
print(f"归档了 {len(archived_files)} 个文件")
|
|
317
397
|
```
|
|
318
398
|
|
|
@@ -366,7 +446,10 @@ logger = XmiLogger(
|
|
|
366
446
|
compression="zip", # 日志压缩格式
|
|
367
447
|
enable_stats=False, # 是否启用统计
|
|
368
448
|
categories=None, # 日志分类列表
|
|
369
|
-
cache_size=128
|
|
449
|
+
cache_size=128, # 缓存大小
|
|
450
|
+
adaptive_level=False, # 自适应日志级别
|
|
451
|
+
performance_mode=False, # 性能模式(配合 enable_performance_mode/disable_performance_mode)
|
|
452
|
+
enable_exception_hook=False # 是否接管 sys.excepthook
|
|
370
453
|
)
|
|
371
454
|
```
|
|
372
455
|
|
|
@@ -477,4 +560,3 @@ except RuntimeError as e:
|
|
|
477
560
|
## 许可证
|
|
478
561
|
|
|
479
562
|
MIT License
|
|
480
|
-
|