mdbq 4.2.16__py3-none-any.whl → 4.2.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mdbq might be problematic. Click here for more details.

mdbq/__version__.py CHANGED
@@ -1 +1 @@
1
- VERSION = '4.2.16'
1
+ VERSION = '4.2.18'
@@ -0,0 +1,619 @@
1
+ """
2
+ 数据分析工具
3
+
4
+ 主要功能:
5
+ 1. 实时监控数据查询
6
+ 2. 访问趋势分析
7
+ 3. 性能分析报告
8
+ 4. 异常检测和告警
9
+ 5. 用户行为分析
10
+
11
+ """
12
+
13
+ import os
14
+ import json
15
+ import pymysql
16
+ from datetime import datetime, timedelta
17
+ from typing import Dict, Any, List, Optional
18
+ from dbutils.pooled_db import PooledDB
19
+ from mdbq.myconf import myconf
20
+
21
+
22
+ class MonitorAnalytics:
23
+ """监控数据分析类"""
24
+
25
+ def __init__(self, database='api_monitor_logs'):
26
+ """初始化分析工具"""
27
+ self.database = database
28
+ self.init_database_pool()
29
+
30
+ def init_database_pool(self):
31
+ """初始化数据库连接池"""
32
+ dir_path = os.path.expanduser("~")
33
+ config_file = os.path.join(dir_path, 'spd.txt')
34
+ parser = myconf.ConfigParser()
35
+
36
+ host, port, username, password = parser.get_section_values(
37
+ file_path=config_file,
38
+ section='mysql',
39
+ keys=['host', 'port', 'username', 'password'],
40
+ )
41
+
42
+ self.pool = PooledDB(
43
+ creator=pymysql,
44
+ maxconnections=5, # 增加连接数避免冲突
45
+ mincached=2, # 增加最小缓存连接数
46
+ maxcached=5, # 增加最大缓存连接数
47
+ blocking=True,
48
+ host=host,
49
+ port=int(port),
50
+ user=username,
51
+ password=password,
52
+ database=self.database,
53
+ ping=1,
54
+ charset='utf8mb4',
55
+ cursorclass=pymysql.cursors.DictCursor,
56
+ # 添加连接超时设置
57
+ connect_timeout=10,
58
+ read_timeout=30,
59
+ write_timeout=30
60
+ )
61
+
62
+ def get_realtime_metrics(self) -> Dict[str, Any]:
63
+ """获取实时监控指标"""
64
+ try:
65
+ connection = self.pool.connection()
66
+ try:
67
+ with connection.cursor() as cursor:
68
+ now = datetime.now()
69
+ last_hour = now - timedelta(hours=1)
70
+ last_day = now - timedelta(days=1)
71
+
72
+ # 最近1小时的请求统计
73
+ cursor.execute("""
74
+ SELECT
75
+ COUNT(*) as requests_last_hour,
76
+ COUNT(DISTINCT client_ip) as unique_ips_last_hour,
77
+ AVG(process_time) as avg_response_time,
78
+ MAX(process_time) as max_response_time,
79
+ SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as error_count,
80
+ SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) / COUNT(*) * 100 as error_rate,
81
+ SUM(CASE WHEN is_bot = 1 THEN 1 ELSE 0 END) as bot_requests,
82
+ SUM(CASE WHEN is_mobile = 1 THEN 1 ELSE 0 END) as mobile_requests
83
+ FROM api_request_logs
84
+ WHERE timestamp >= %s
85
+ """, (last_hour,))
86
+
87
+ hourly_stats = cursor.fetchone() or {}
88
+
89
+ # 最近24小时趋势对比
90
+ cursor.execute("""
91
+ SELECT
92
+ COUNT(*) as requests_last_day,
93
+ COUNT(DISTINCT client_ip) as unique_ips_last_day,
94
+ AVG(process_time) as avg_response_time_day
95
+ FROM api_request_logs
96
+ WHERE timestamp >= %s
97
+ """, (last_day,))
98
+
99
+ daily_stats = cursor.fetchone() or {}
100
+
101
+ # 热门端点(最近1小时)
102
+ cursor.execute("""
103
+ SELECT endpoint, COUNT(*) as request_count,
104
+ AVG(process_time) as avg_time
105
+ FROM api_request_logs
106
+ WHERE timestamp >= %s AND endpoint IS NOT NULL
107
+ GROUP BY endpoint
108
+ ORDER BY request_count DESC
109
+ LIMIT 5
110
+ """, (last_hour,))
111
+
112
+ top_endpoints = cursor.fetchall()
113
+
114
+ # 慢查询(最近1小时)
115
+ cursor.execute("""
116
+ SELECT endpoint, process_time, client_ip, timestamp
117
+ FROM api_request_logs
118
+ WHERE timestamp >= %s AND process_time > 5000
119
+ ORDER BY process_time DESC
120
+ LIMIT 10
121
+ """, (last_hour,))
122
+
123
+ slow_requests = cursor.fetchall()
124
+
125
+ # 错误请求(最近1小时)
126
+ cursor.execute("""
127
+ SELECT endpoint, response_status, COUNT(*) as error_count
128
+ FROM api_request_logs
129
+ WHERE timestamp >= %s AND response_status >= 400
130
+ GROUP BY endpoint, response_status
131
+ ORDER BY error_count DESC
132
+ LIMIT 10
133
+ """, (last_hour,))
134
+
135
+ error_requests = cursor.fetchall()
136
+
137
+ return {
138
+ 'realtime_metrics': {
139
+ 'requests_per_hour': hourly_stats.get('requests_last_hour', 0),
140
+ 'requests_per_day': daily_stats.get('requests_last_day', 0),
141
+ 'unique_ips_hour': hourly_stats.get('unique_ips_last_hour', 0),
142
+ 'unique_ips_day': daily_stats.get('unique_ips_last_day', 0),
143
+ 'avg_response_time': round(hourly_stats.get('avg_response_time', 0) or 0, 2),
144
+ 'max_response_time': round(hourly_stats.get('max_response_time', 0) or 0, 2),
145
+ 'error_rate': round(hourly_stats.get('error_rate', 0) or 0, 2),
146
+ 'error_count': hourly_stats.get('error_count', 0),
147
+ 'bot_requests': hourly_stats.get('bot_requests', 0),
148
+ 'mobile_requests': hourly_stats.get('mobile_requests', 0)
149
+ },
150
+ 'top_endpoints': top_endpoints,
151
+ 'slow_requests': slow_requests,
152
+ 'error_requests': error_requests,
153
+ 'timestamp': now.isoformat()
154
+ }
155
+ finally:
156
+ connection.close()
157
+
158
+ except Exception as e:
159
+ return {'error': str(e)}
160
+
161
+ def get_traffic_trend(self, days: int = 7) -> Dict[str, Any]:
162
+ """获取流量趋势分析"""
163
+ try:
164
+ connection = self.pool.connection()
165
+ try:
166
+ with connection.cursor() as cursor:
167
+ end_date = datetime.now().date()
168
+ start_date = end_date - timedelta(days=days)
169
+
170
+ # 按小时统计(最近7天)
171
+ cursor.execute("""
172
+ SELECT
173
+ DATE(timestamp) as date,
174
+ HOUR(timestamp) as hour,
175
+ COUNT(*) as requests,
176
+ COUNT(DISTINCT client_ip) as unique_ips,
177
+ AVG(process_time) as avg_response_time,
178
+ SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as errors
179
+ FROM api_request_logs
180
+ WHERE DATE(timestamp) BETWEEN %s AND %s
181
+ GROUP BY DATE(timestamp), HOUR(timestamp)
182
+ ORDER BY date, hour
183
+ """, (start_date, end_date))
184
+
185
+ hourly_data = cursor.fetchall()
186
+
187
+ # 按天统计
188
+ cursor.execute("""
189
+ SELECT
190
+ DATE(timestamp) as date,
191
+ COUNT(*) as requests,
192
+ COUNT(DISTINCT client_ip) as unique_ips,
193
+ AVG(process_time) as avg_response_time,
194
+ SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as errors,
195
+ SUM(CASE WHEN is_bot = 1 THEN 1 ELSE 0 END) as bot_requests,
196
+ SUM(CASE WHEN is_mobile = 1 THEN 1 ELSE 0 END) as mobile_requests
197
+ FROM api_request_logs
198
+ WHERE DATE(timestamp) BETWEEN %s AND %s
199
+ GROUP BY DATE(timestamp)
200
+ ORDER BY date
201
+ """, (start_date, end_date))
202
+
203
+ daily_data = cursor.fetchall()
204
+
205
+ # 周中模式分析
206
+ cursor.execute("""
207
+ SELECT
208
+ DAYOFWEEK(timestamp) as day_of_week,
209
+ DAYNAME(timestamp) as day_name,
210
+ COUNT(*) as total_requests,
211
+ AVG(process_time) as avg_response_time
212
+ FROM api_request_logs
213
+ WHERE DATE(timestamp) BETWEEN %s AND %s
214
+ GROUP BY DAYOFWEEK(timestamp), DAYNAME(timestamp)
215
+ ORDER BY day_of_week
216
+ """, (start_date, end_date))
217
+
218
+ weekly_pattern = cursor.fetchall()
219
+
220
+ # 小时模式分析
221
+ cursor.execute("""
222
+ SELECT
223
+ HOUR(timestamp) as hour,
224
+ COUNT(*) as total_requests,
225
+ AVG(process_time) as avg_response_time
226
+ FROM api_request_logs
227
+ WHERE DATE(timestamp) BETWEEN %s AND %s
228
+ GROUP BY HOUR(timestamp)
229
+ ORDER BY hour
230
+ """, (start_date, end_date))
231
+
232
+ hourly_pattern = cursor.fetchall()
233
+
234
+ return {
235
+ 'period': f'{start_date} to {end_date}',
236
+ 'hourly_data': hourly_data,
237
+ 'daily_data': daily_data,
238
+ 'weekly_pattern': weekly_pattern,
239
+ 'hourly_pattern': hourly_pattern
240
+ }
241
+ finally:
242
+ connection.close()
243
+
244
+ except Exception as e:
245
+ return {'error': str(e)}
246
+
247
+ def get_endpoint_analysis(self, days: int = 7) -> Dict[str, Any]:
248
+ """获取端点性能分析"""
249
+ try:
250
+ connection = self.pool.connection()
251
+ try:
252
+ with connection.cursor() as cursor:
253
+ end_date = datetime.now().date()
254
+ start_date = end_date - timedelta(days=days)
255
+
256
+ # 端点性能统计
257
+ cursor.execute("""
258
+ SELECT
259
+ endpoint,
260
+ COUNT(*) as total_requests,
261
+ AVG(process_time) as avg_response_time,
262
+ MIN(process_time) as min_response_time,
263
+ MAX(process_time) as max_response_time,
264
+ STDDEV(process_time) as response_time_stddev,
265
+ COUNT(DISTINCT client_ip) as unique_users,
266
+ SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as error_count,
267
+ SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) / COUNT(*) * 100 as error_rate,
268
+ SUM(request_size) as total_request_size,
269
+ SUM(response_size) as total_response_size
270
+ FROM api_request_logs
271
+ WHERE DATE(timestamp) BETWEEN %s AND %s
272
+ AND endpoint IS NOT NULL
273
+ GROUP BY endpoint
274
+ ORDER BY total_requests DESC
275
+ """, (start_date, end_date))
276
+
277
+ endpoint_stats = cursor.fetchall()
278
+
279
+ # 最慢的端点
280
+ slowest_endpoints = sorted(
281
+ [ep for ep in endpoint_stats if ep['avg_response_time']],
282
+ key=lambda x: x['avg_response_time'] or 0,
283
+ reverse=True
284
+ )[:10]
285
+
286
+ # 错误率最高的端点
287
+ error_prone_endpoints = sorted(
288
+ [ep for ep in endpoint_stats if (ep['error_rate'] or 0) > 0],
289
+ key=lambda x: x['error_rate'] or 0,
290
+ reverse=True
291
+ )[:10]
292
+
293
+ # 最热门的端点
294
+ popular_endpoints = endpoint_stats[:10]
295
+
296
+ return {
297
+ 'period': f'{start_date} to {end_date}',
298
+ 'all_endpoints': endpoint_stats,
299
+ 'slowest_endpoints': slowest_endpoints,
300
+ 'error_prone_endpoints': error_prone_endpoints,
301
+ 'popular_endpoints': popular_endpoints
302
+ }
303
+ finally:
304
+ connection.close()
305
+
306
+ except Exception as e:
307
+ return {'error': str(e)}
308
+
309
+ def get_user_behavior_analysis(self, days: int = 7) -> Dict[str, Any]:
310
+ """获取用户行为分析"""
311
+ try:
312
+ connection = self.pool.connection()
313
+ try:
314
+ with connection.cursor() as cursor:
315
+ end_date = datetime.now().date()
316
+ start_date = end_date - timedelta(days=days)
317
+
318
+ # IP访问模式分析
319
+ cursor.execute("""
320
+ SELECT
321
+ client_ip,
322
+ COUNT(*) as total_requests,
323
+ COUNT(DISTINCT endpoint) as unique_endpoints,
324
+ COUNT(DISTINCT DATE(timestamp)) as active_days,
325
+ MIN(timestamp) as first_access,
326
+ MAX(timestamp) as last_access,
327
+ AVG(process_time) as avg_response_time,
328
+ SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as errors,
329
+ SUM(CASE WHEN is_bot = 1 THEN 1 ELSE 0 END) as bot_requests,
330
+ user_agent
331
+ FROM api_request_logs
332
+ WHERE DATE(timestamp) BETWEEN %s AND %s
333
+ GROUP BY client_ip, user_agent
334
+ HAVING total_requests >= 10
335
+ ORDER BY total_requests DESC
336
+ LIMIT 50
337
+ """, (start_date, end_date))
338
+
339
+ ip_analysis = cursor.fetchall()
340
+
341
+ # 设备类型统计
342
+ cursor.execute("""
343
+ SELECT
344
+ browser_name,
345
+ os_name,
346
+ COUNT(*) as request_count,
347
+ COUNT(DISTINCT client_ip) as unique_users
348
+ FROM api_request_logs
349
+ WHERE DATE(timestamp) BETWEEN %s AND %s
350
+ AND browser_name != 'Unknown'
351
+ GROUP BY browser_name, os_name
352
+ ORDER BY request_count DESC
353
+ """, (start_date, end_date))
354
+
355
+ device_stats = cursor.fetchall()
356
+
357
+ # 可疑活动检测
358
+ cursor.execute("""
359
+ SELECT
360
+ client_ip,
361
+ COUNT(*) as requests_per_hour,
362
+ COUNT(DISTINCT endpoint) as endpoints_accessed,
363
+ SUM(CASE WHEN response_status = 404 THEN 1 ELSE 0 END) as not_found_errors,
364
+ SUM(CASE WHEN response_status = 403 THEN 1 ELSE 0 END) as forbidden_errors,
365
+ MAX(is_bot) as is_bot
366
+ FROM api_request_logs
367
+ WHERE timestamp >= %s
368
+ GROUP BY client_ip
369
+ HAVING requests_per_hour > 100
370
+ OR not_found_errors > 10
371
+ OR forbidden_errors > 5
372
+ ORDER BY requests_per_hour DESC
373
+ """, (datetime.now() - timedelta(hours=1),))
374
+
375
+ suspicious_activity = cursor.fetchall()
376
+
377
+ # 用户会话分析
378
+ cursor.execute("""
379
+ SELECT
380
+ session_id,
381
+ COUNT(*) as session_requests,
382
+ COUNT(DISTINCT endpoint) as endpoints_in_session,
383
+ TIMESTAMPDIFF(MINUTE, MIN(timestamp), MAX(timestamp)) as session_duration,
384
+ MIN(timestamp) as session_start,
385
+ MAX(timestamp) as session_end
386
+ FROM api_request_logs
387
+ WHERE DATE(timestamp) BETWEEN %s AND %s
388
+ AND session_id IS NOT NULL
389
+ GROUP BY session_id
390
+ HAVING session_requests >= 5
391
+ ORDER BY session_duration DESC
392
+ LIMIT 20
393
+ """, (start_date, end_date))
394
+
395
+ session_analysis = cursor.fetchall()
396
+
397
+ return {
398
+ 'period': f'{start_date} to {end_date}',
399
+ 'ip_analysis': ip_analysis,
400
+ 'device_statistics': device_stats,
401
+ 'suspicious_activity': suspicious_activity,
402
+ 'session_analysis': session_analysis
403
+ }
404
+ finally:
405
+ connection.close()
406
+
407
+ except Exception as e:
408
+ return {'error': str(e)}
409
+
410
+ def get_performance_alerts(self) -> Dict[str, Any]:
411
+ """获取性能告警信息"""
412
+ try:
413
+ connection = self.pool.connection()
414
+ try:
415
+ with connection.cursor() as cursor:
416
+ now = datetime.now()
417
+ last_hour = now - timedelta(hours=1)
418
+
419
+ alerts = []
420
+
421
+ # 检查响应时间异常
422
+ cursor.execute("""
423
+ SELECT endpoint, AVG(process_time) as avg_time
424
+ FROM api_request_logs
425
+ WHERE timestamp >= %s AND process_time IS NOT NULL
426
+ GROUP BY endpoint
427
+ HAVING avg_time > 3000
428
+ ORDER BY avg_time DESC
429
+ """, (last_hour,))
430
+
431
+ slow_endpoints = cursor.fetchall()
432
+ for endpoint in slow_endpoints:
433
+ alerts.append({
434
+ 'type': 'SLOW_RESPONSE',
435
+ 'severity': 'HIGH' if (endpoint['avg_time'] or 0) > 5000 else 'MEDIUM',
436
+ 'message': f"端点 {endpoint['endpoint']} 平均响应时间 {endpoint['avg_time']:.0f}ms",
437
+ 'timestamp': now.isoformat()
438
+ })
439
+
440
+ # 检查错误率异常
441
+ cursor.execute("""
442
+ SELECT
443
+ endpoint,
444
+ COUNT(*) as total,
445
+ SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as errors,
446
+ SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) / COUNT(*) * 100 as error_rate
447
+ FROM api_request_logs
448
+ WHERE timestamp >= %s
449
+ GROUP BY endpoint
450
+ HAVING total >= 10 AND error_rate > 10
451
+ ORDER BY error_rate DESC
452
+ """, (last_hour,))
453
+
454
+ error_endpoints = cursor.fetchall()
455
+ for endpoint in error_endpoints:
456
+ alerts.append({
457
+ 'type': 'HIGH_ERROR_RATE',
458
+ 'severity': 'HIGH' if (endpoint['error_rate'] or 0) > 20 else 'MEDIUM',
459
+ 'message': f"端点 {endpoint['endpoint']} 错误率 {endpoint['error_rate']:.1f}%",
460
+ 'timestamp': now.isoformat()
461
+ })
462
+
463
+ # 检查异常流量
464
+ cursor.execute("""
465
+ SELECT
466
+ client_ip,
467
+ COUNT(*) as request_count
468
+ FROM api_request_logs
469
+ WHERE timestamp >= %s
470
+ GROUP BY client_ip
471
+ HAVING request_count > 500
472
+ ORDER BY request_count DESC
473
+ """, (last_hour,))
474
+
475
+ high_traffic_ips = cursor.fetchall()
476
+ for ip_data in high_traffic_ips:
477
+ alerts.append({
478
+ 'type': 'HIGH_TRAFFIC',
479
+ 'severity': 'MEDIUM',
480
+ 'message': f"IP {ip_data['client_ip']} 请求量异常: {ip_data['request_count']} 次/小时",
481
+ 'timestamp': now.isoformat()
482
+ })
483
+
484
+ # 检查系统整体负载
485
+ cursor.execute("""
486
+ SELECT COUNT(*) as total_requests
487
+ FROM api_request_logs
488
+ WHERE timestamp >= %s
489
+ """, (last_hour,))
490
+
491
+ total_requests = cursor.fetchone()['total_requests']
492
+ if total_requests > 10000: # 每小时超过1万请求
493
+ alerts.append({
494
+ 'type': 'HIGH_SYSTEM_LOAD',
495
+ 'severity': 'HIGH',
496
+ 'message': f"系统负载异常: {total_requests} 请求/小时",
497
+ 'timestamp': now.isoformat()
498
+ })
499
+
500
+ return {
501
+ 'alerts': alerts,
502
+ 'alert_count': len(alerts),
503
+ 'high_severity_count': len([a for a in alerts if a['severity'] == 'HIGH']),
504
+ 'timestamp': now.isoformat()
505
+ }
506
+ finally:
507
+ connection.close()
508
+
509
+ except Exception as e:
510
+ return {'error': str(e)}
511
+
512
+ def generate_daily_report(self, target_date: datetime = None) -> Dict[str, Any]:
513
+ """生成日报告"""
514
+ if target_date is None:
515
+ target_date = datetime.now().date() - timedelta(days=1)
516
+
517
+ try:
518
+ connection = self.pool.connection()
519
+ try:
520
+ with connection.cursor() as cursor:
521
+ # 整体统计
522
+ cursor.execute("""
523
+ SELECT
524
+ COUNT(*) as total_requests,
525
+ COUNT(DISTINCT client_ip) as unique_ips,
526
+ COUNT(DISTINCT endpoint) as unique_endpoints,
527
+ AVG(process_time) as avg_response_time,
528
+ MAX(process_time) as max_response_time,
529
+ SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as total_errors,
530
+ SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) / COUNT(*) * 100 as error_rate,
531
+ SUM(CASE WHEN is_bot = 1 THEN 1 ELSE 0 END) as bot_requests,
532
+ SUM(CASE WHEN is_mobile = 1 THEN 1 ELSE 0 END) as mobile_requests,
533
+ SUM(request_size) as total_request_size,
534
+ SUM(response_size) as total_response_size
535
+ FROM api_request_logs
536
+ WHERE DATE(timestamp) = %s
537
+ """, (target_date,))
538
+
539
+ daily_summary = cursor.fetchone()
540
+
541
+ # 热门端点
542
+ cursor.execute("""
543
+ SELECT endpoint, COUNT(*) as requests, AVG(process_time) as avg_time
544
+ FROM api_request_logs
545
+ WHERE DATE(timestamp) = %s
546
+ GROUP BY endpoint
547
+ ORDER BY requests DESC
548
+ LIMIT 10
549
+ """, (target_date,))
550
+
551
+ top_endpoints = cursor.fetchall()
552
+
553
+ # 错误统计
554
+ cursor.execute("""
555
+ SELECT response_status, COUNT(*) as count
556
+ FROM api_request_logs
557
+ WHERE DATE(timestamp) = %s AND response_status >= 400
558
+ GROUP BY response_status
559
+ ORDER BY count DESC
560
+ """, (target_date,))
561
+
562
+ error_breakdown = cursor.fetchall()
563
+
564
+ # 流量分布(按小时)
565
+ cursor.execute("""
566
+ SELECT
567
+ HOUR(timestamp) as hour,
568
+ COUNT(*) as requests,
569
+ AVG(process_time) as avg_time
570
+ FROM api_request_logs
571
+ WHERE DATE(timestamp) = %s
572
+ GROUP BY HOUR(timestamp)
573
+ ORDER BY hour
574
+ """, (target_date,))
575
+
576
+ hourly_distribution = cursor.fetchall()
577
+
578
+ return {
579
+ 'date': target_date.isoformat(),
580
+ 'summary': daily_summary,
581
+ 'top_endpoints': top_endpoints,
582
+ 'error_breakdown': error_breakdown,
583
+ 'hourly_distribution': hourly_distribution,
584
+ 'generated_at': datetime.now().isoformat()
585
+ }
586
+ finally:
587
+ connection.close()
588
+
589
+ except Exception as e:
590
+ return {'error': str(e)}
591
+
592
+
593
+ # 全局分析实例
594
+ analytics = MonitorAnalytics()
595
+
596
+ # 导出分析函数
597
+ def get_realtime_metrics():
598
+ """获取实时监控指标"""
599
+ return analytics.get_realtime_metrics()
600
+
601
+ def get_traffic_trend(days: int = 7):
602
+ """获取流量趋势"""
603
+ return analytics.get_traffic_trend(days)
604
+
605
+ def get_endpoint_analysis(days: int = 7):
606
+ """获取端点分析"""
607
+ return analytics.get_endpoint_analysis(days)
608
+
609
+ def get_user_behavior_analysis(days: int = 7):
610
+ """获取用户行为分析"""
611
+ return analytics.get_user_behavior_analysis(days)
612
+
613
+ def get_performance_alerts():
614
+ """获取性能告警"""
615
+ return analytics.get_performance_alerts()
616
+
617
+ def generate_daily_report(target_date: datetime = None):
618
+ """生成日报告"""
619
+ return analytics.generate_daily_report(target_date)
mdbq/route/monitor.py CHANGED
@@ -352,8 +352,7 @@ class RouteMonitor:
352
352
  timestamp = str(int(time.time() * 1000000)) # 微秒
353
353
  random_part = uuid.uuid4().hex
354
354
  combined = f"{timestamp}_{random_part}_{os.getpid()}"
355
- hash_value = hashlib.sha256(combined.encode()).hexdigest()[:32]
356
- return f"req_{hash_value}"
355
+ return str(hashlib.sha256(combined.encode()).hexdigest()[:32])
357
356
 
358
357
  def get_real_ip(self, request) -> str:
359
358
  """
@@ -452,11 +451,27 @@ class RouteMonitor:
452
451
  user_agent = user_agent[:500]
453
452
 
454
453
  # 获取用户标识(如果有)
454
+ # 安全获取 user_id,允许为空时使用默认值
455
455
  user_id = None
456
456
  if hasattr(g, 'current_user_id'):
457
- user_id = str(g.current_user_id)
457
+ user_id = str(g.current_user_id) if g.current_user_id else None
458
458
  elif hasattr(g, 'user_id'):
459
- user_id = str(g.user_id)
459
+ user_id = str(g.user_id) if g.user_id else None
460
+
461
+ # 如果还是获取不到,尝试从 request.current_user 获取
462
+ if not user_id and hasattr(request, 'current_user'):
463
+ try:
464
+ current_user = request.current_user
465
+ if isinstance(current_user, dict):
466
+ user_id = current_user.get('user_id') or current_user.get('id')
467
+ if user_id:
468
+ user_id = str(user_id)
469
+ except Exception:
470
+ pass
471
+
472
+ # 兜底:使用默认值,避免 None
473
+ if not user_id:
474
+ user_id = 'anonymous'
460
475
 
461
476
  # 收集请求参数(GET 参数 + POST 数据)
462
477
  request_params = None
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.4
1
+ Metadata-Version: 2.2
2
2
  Name: mdbq
3
- Version: 4.2.16
3
+ Version: 4.2.18
4
4
  Home-page: https://pypi.org/project/mdbq
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -1,5 +1,5 @@
1
1
  mdbq/__init__.py,sha256=Il5Q9ATdX8yXqVxtP_nYqUhExzxPC_qk_WXQ_4h0exg,16
2
- mdbq/__version__.py,sha256=a3tWLKo3g9BwH0taQVatgSW6GPG5PQvEy1IctqDZNaE,18
2
+ mdbq/__version__.py,sha256=2tQxy-1_HWm0n0hYutPndg9tCuA5LXYgZhUUGDKlL_E,18
3
3
  mdbq/auth/__init__.py,sha256=pnPMAt63sh1B6kEvmutUuro46zVf2v2YDAG7q-jV_To,24
4
4
  mdbq/auth/auth_backend.py,sha256=iLN7AqiSq7fQgFtNtge_TIlVOR1hrCSZXH6oId6uGX4,116924
5
5
  mdbq/auth/crypto.py,sha256=M0i4dRljJuE30WH_13ythA2QGKPXZm6TgpnYp6aHOzw,17431
@@ -29,12 +29,13 @@ mdbq/redis/__init__.py,sha256=YtgBlVSMDphtpwYX248wGge1x-Ex_mMufz4-8W0XRmA,12
29
29
  mdbq/redis/getredis.py,sha256=vdg7YQEjhoMp5QzxygNGx5DQKRnePrcwPYgUrDypA6g,23672
30
30
  mdbq/redis/redis_cache.py,sha256=JWarX_l7LvdKyxtUNPANAqd-y20Jg5uqmllCbT-fyv8,45752
31
31
  mdbq/route/__init__.py,sha256=BT_dAY7V-U2o72bevq1B9Mq9QA7GodwtkxyLNdGaoE8,22
32
- mdbq/route/monitor.py,sha256=CFVGA9NcyMqY2bq8mjMHbOBsGY9IJ5uISG6PEFoxfLQ,40761
32
+ mdbq/route/analytics.py,sha256=dngj5hVwKddEUy59nSYbOoJ9C7OVrtCmCkvW6Uj9RYM,28097
33
+ mdbq/route/monitor.py,sha256=GE6lJ1fVP47xkZCsr6kwZv_Fq7RepJubcrOrWTDkXZk,41438
33
34
  mdbq/route/routes.py,sha256=QVGfTvDgu0CpcKCvk1ra74H8uojgqTLUav1fnVAqLEA,29433
34
35
  mdbq/selenium/__init__.py,sha256=AKzeEceqZyvqn2dEDoJSzDQnbuENkJSHAlbHAD0u0ZI,10
35
36
  mdbq/selenium/get_driver.py,sha256=1NTlVUE6QsyjTrVVVqTO2LOnYf578ccFWlWnvIXGtic,20903
36
37
  mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
37
- mdbq-4.2.16.dist-info/METADATA,sha256=Sw5C9lM-g2nAMhnhKfcBZ6Wzv_xiZpaVVYRZJ6HOLCo,364
38
- mdbq-4.2.16.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
39
- mdbq-4.2.16.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
40
- mdbq-4.2.16.dist-info/RECORD,,
38
+ mdbq-4.2.18.dist-info/METADATA,sha256=Q9D0fa9o6ntrfBB3ainng32katjWm5ocyIKBwzu2wd0,364
39
+ mdbq-4.2.18.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
40
+ mdbq-4.2.18.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
41
+ mdbq-4.2.18.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (75.8.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5