mdbq 4.0.80__tar.gz → 4.0.81__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mdbq-4.0.80 → mdbq-4.0.81}/PKG-INFO +1 -1
- mdbq-4.0.81/mdbq/__version__.py +1 -0
- mdbq-4.0.81/mdbq/route/__init__.py +1 -0
- mdbq-4.0.81/mdbq/route/analytics.py +618 -0
- mdbq-4.0.81/mdbq/route/example.py +378 -0
- mdbq-4.0.81/mdbq/route/monitor.py +691 -0
- mdbq-4.0.81/mdbq/route/routes.py +576 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq.egg-info/PKG-INFO +1 -1
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq.egg-info/SOURCES.txt +5 -0
- mdbq-4.0.80/mdbq/__version__.py +0 -1
- {mdbq-4.0.80 → mdbq-4.0.81}/README.txt +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/__init__.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/log/__init__.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/log/mylogger.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/myconf/__init__.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/myconf/myconf.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/mysql/__init__.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/mysql/deduplicator.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/mysql/mysql.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/mysql/s_query.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/mysql/unique_.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/mysql/uploader.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/other/__init__.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/other/download_sku_picture.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/other/error_handler.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/other/otk.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/other/pov_city.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/other/ua_sj.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/pbix/__init__.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/pbix/pbix_refresh.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/pbix/refresh_all.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/redis/__init__.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/redis/getredis.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/selenium/__init__.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/selenium/get_driver.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq/spider/__init__.py +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq.egg-info/dependency_links.txt +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/mdbq.egg-info/top_level.txt +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/setup.cfg +0 -0
- {mdbq-4.0.80 → mdbq-4.0.81}/setup.py +0 -0
@@ -0,0 +1 @@
|
|
1
|
+
VERSION = '4.0.81'
|
@@ -0,0 +1 @@
|
|
1
|
+
#
|
@@ -0,0 +1,618 @@
|
|
1
|
+
"""
|
2
|
+
路由监控数据分析工具
|
3
|
+
提供专业的监控数据查询、分析和报告功能
|
4
|
+
|
5
|
+
主要功能:
|
6
|
+
1. 实时监控数据查询
|
7
|
+
2. 访问趋势分析
|
8
|
+
3. 性能分析报告
|
9
|
+
4. 异常检测和告警
|
10
|
+
5. 用户行为分析
|
11
|
+
|
12
|
+
"""
|
13
|
+
|
14
|
+
import os
|
15
|
+
import json
|
16
|
+
import pymysql
|
17
|
+
from datetime import datetime, timedelta
|
18
|
+
from typing import Dict, Any, List, Optional
|
19
|
+
from dbutils.pooled_db import PooledDB
|
20
|
+
from mdbq.myconf import myconf
|
21
|
+
|
22
|
+
|
23
|
+
class MonitorAnalytics:
|
24
|
+
"""监控数据分析类"""
|
25
|
+
|
26
|
+
def __init__(self):
|
27
|
+
"""初始化分析工具"""
|
28
|
+
self.init_database_pool()
|
29
|
+
|
30
|
+
def init_database_pool(self):
|
31
|
+
"""初始化数据库连接池"""
|
32
|
+
dir_path = os.path.expanduser("~")
|
33
|
+
config_file = os.path.join(dir_path, 'spd.txt')
|
34
|
+
parser = myconf.ConfigParser()
|
35
|
+
|
36
|
+
host, port, username, password = parser.get_section_values(
|
37
|
+
file_path=config_file,
|
38
|
+
section='mysql',
|
39
|
+
keys=['host', 'port', 'username', 'password'],
|
40
|
+
)
|
41
|
+
|
42
|
+
self.pool = PooledDB(
|
43
|
+
creator=pymysql,
|
44
|
+
maxconnections=5, # 增加连接数避免冲突
|
45
|
+
mincached=2, # 增加最小缓存连接数
|
46
|
+
maxcached=5, # 增加最大缓存连接数
|
47
|
+
blocking=True,
|
48
|
+
host=host,
|
49
|
+
port=int(port),
|
50
|
+
user=username,
|
51
|
+
password=password,
|
52
|
+
ping=1,
|
53
|
+
charset='utf8mb4',
|
54
|
+
cursorclass=pymysql.cursors.DictCursor,
|
55
|
+
# 添加连接超时设置
|
56
|
+
connect_timeout=10,
|
57
|
+
read_timeout=30,
|
58
|
+
write_timeout=30
|
59
|
+
)
|
60
|
+
|
61
|
+
def get_realtime_metrics(self) -> Dict[str, Any]:
|
62
|
+
"""获取实时监控指标"""
|
63
|
+
try:
|
64
|
+
connection = self.pool.connection()
|
65
|
+
try:
|
66
|
+
with connection.cursor() as cursor:
|
67
|
+
now = datetime.now()
|
68
|
+
last_hour = now - timedelta(hours=1)
|
69
|
+
last_day = now - timedelta(days=1)
|
70
|
+
|
71
|
+
# 最近1小时的请求统计
|
72
|
+
cursor.execute("""
|
73
|
+
SELECT
|
74
|
+
COUNT(*) as requests_last_hour,
|
75
|
+
COUNT(DISTINCT client_ip) as unique_ips_last_hour,
|
76
|
+
AVG(process_time) as avg_response_time,
|
77
|
+
MAX(process_time) as max_response_time,
|
78
|
+
SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as error_count,
|
79
|
+
SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) / COUNT(*) * 100 as error_rate,
|
80
|
+
SUM(CASE WHEN is_bot = 1 THEN 1 ELSE 0 END) as bot_requests,
|
81
|
+
SUM(CASE WHEN is_mobile = 1 THEN 1 ELSE 0 END) as mobile_requests
|
82
|
+
FROM api_request_logs
|
83
|
+
WHERE timestamp >= %s
|
84
|
+
""", (last_hour,))
|
85
|
+
|
86
|
+
hourly_stats = cursor.fetchone() or {}
|
87
|
+
|
88
|
+
# 最近24小时趋势对比
|
89
|
+
cursor.execute("""
|
90
|
+
SELECT
|
91
|
+
COUNT(*) as requests_last_day,
|
92
|
+
COUNT(DISTINCT client_ip) as unique_ips_last_day,
|
93
|
+
AVG(process_time) as avg_response_time_day
|
94
|
+
FROM api_request_logs
|
95
|
+
WHERE timestamp >= %s
|
96
|
+
""", (last_day,))
|
97
|
+
|
98
|
+
daily_stats = cursor.fetchone() or {}
|
99
|
+
|
100
|
+
# 热门端点(最近1小时)
|
101
|
+
cursor.execute("""
|
102
|
+
SELECT endpoint, COUNT(*) as request_count,
|
103
|
+
AVG(process_time) as avg_time
|
104
|
+
FROM api_request_logs
|
105
|
+
WHERE timestamp >= %s AND endpoint IS NOT NULL
|
106
|
+
GROUP BY endpoint
|
107
|
+
ORDER BY request_count DESC
|
108
|
+
LIMIT 5
|
109
|
+
""", (last_hour,))
|
110
|
+
|
111
|
+
top_endpoints = cursor.fetchall()
|
112
|
+
|
113
|
+
# 慢查询(最近1小时)
|
114
|
+
cursor.execute("""
|
115
|
+
SELECT endpoint, process_time, client_ip, timestamp
|
116
|
+
FROM api_request_logs
|
117
|
+
WHERE timestamp >= %s AND process_time > 5000
|
118
|
+
ORDER BY process_time DESC
|
119
|
+
LIMIT 10
|
120
|
+
""", (last_hour,))
|
121
|
+
|
122
|
+
slow_requests = cursor.fetchall()
|
123
|
+
|
124
|
+
# 错误请求(最近1小时)
|
125
|
+
cursor.execute("""
|
126
|
+
SELECT endpoint, response_status, COUNT(*) as error_count
|
127
|
+
FROM api_request_logs
|
128
|
+
WHERE timestamp >= %s AND response_status >= 400
|
129
|
+
GROUP BY endpoint, response_status
|
130
|
+
ORDER BY error_count DESC
|
131
|
+
LIMIT 10
|
132
|
+
""", (last_hour,))
|
133
|
+
|
134
|
+
error_requests = cursor.fetchall()
|
135
|
+
|
136
|
+
return {
|
137
|
+
'realtime_metrics': {
|
138
|
+
'requests_per_hour': hourly_stats.get('requests_last_hour', 0),
|
139
|
+
'requests_per_day': daily_stats.get('requests_last_day', 0),
|
140
|
+
'unique_ips_hour': hourly_stats.get('unique_ips_last_hour', 0),
|
141
|
+
'unique_ips_day': daily_stats.get('unique_ips_last_day', 0),
|
142
|
+
'avg_response_time': round(hourly_stats.get('avg_response_time', 0) or 0, 2),
|
143
|
+
'max_response_time': round(hourly_stats.get('max_response_time', 0) or 0, 2),
|
144
|
+
'error_rate': round(hourly_stats.get('error_rate', 0) or 0, 2),
|
145
|
+
'error_count': hourly_stats.get('error_count', 0),
|
146
|
+
'bot_requests': hourly_stats.get('bot_requests', 0),
|
147
|
+
'mobile_requests': hourly_stats.get('mobile_requests', 0)
|
148
|
+
},
|
149
|
+
'top_endpoints': top_endpoints,
|
150
|
+
'slow_requests': slow_requests,
|
151
|
+
'error_requests': error_requests,
|
152
|
+
'timestamp': now.isoformat()
|
153
|
+
}
|
154
|
+
finally:
|
155
|
+
connection.close()
|
156
|
+
|
157
|
+
except Exception as e:
|
158
|
+
return {'error': str(e)}
|
159
|
+
|
160
|
+
def get_traffic_trend(self, days: int = 7) -> Dict[str, Any]:
|
161
|
+
"""获取流量趋势分析"""
|
162
|
+
try:
|
163
|
+
connection = self.pool.connection()
|
164
|
+
try:
|
165
|
+
with connection.cursor() as cursor:
|
166
|
+
end_date = datetime.now().date()
|
167
|
+
start_date = end_date - timedelta(days=days)
|
168
|
+
|
169
|
+
# 按小时统计(最近7天)
|
170
|
+
cursor.execute("""
|
171
|
+
SELECT
|
172
|
+
DATE(timestamp) as date,
|
173
|
+
HOUR(timestamp) as hour,
|
174
|
+
COUNT(*) as requests,
|
175
|
+
COUNT(DISTINCT client_ip) as unique_ips,
|
176
|
+
AVG(process_time) as avg_response_time,
|
177
|
+
SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as errors
|
178
|
+
FROM api_request_logs
|
179
|
+
WHERE DATE(timestamp) BETWEEN %s AND %s
|
180
|
+
GROUP BY DATE(timestamp), HOUR(timestamp)
|
181
|
+
ORDER BY date, hour
|
182
|
+
""", (start_date, end_date))
|
183
|
+
|
184
|
+
hourly_data = cursor.fetchall()
|
185
|
+
|
186
|
+
# 按天统计
|
187
|
+
cursor.execute("""
|
188
|
+
SELECT
|
189
|
+
DATE(timestamp) as date,
|
190
|
+
COUNT(*) as requests,
|
191
|
+
COUNT(DISTINCT client_ip) as unique_ips,
|
192
|
+
AVG(process_time) as avg_response_time,
|
193
|
+
SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as errors,
|
194
|
+
SUM(CASE WHEN is_bot = 1 THEN 1 ELSE 0 END) as bot_requests,
|
195
|
+
SUM(CASE WHEN is_mobile = 1 THEN 1 ELSE 0 END) as mobile_requests
|
196
|
+
FROM api_request_logs
|
197
|
+
WHERE DATE(timestamp) BETWEEN %s AND %s
|
198
|
+
GROUP BY DATE(timestamp)
|
199
|
+
ORDER BY date
|
200
|
+
""", (start_date, end_date))
|
201
|
+
|
202
|
+
daily_data = cursor.fetchall()
|
203
|
+
|
204
|
+
# 周中模式分析
|
205
|
+
cursor.execute("""
|
206
|
+
SELECT
|
207
|
+
DAYOFWEEK(timestamp) as day_of_week,
|
208
|
+
DAYNAME(timestamp) as day_name,
|
209
|
+
COUNT(*) as total_requests,
|
210
|
+
AVG(process_time) as avg_response_time
|
211
|
+
FROM api_request_logs
|
212
|
+
WHERE DATE(timestamp) BETWEEN %s AND %s
|
213
|
+
GROUP BY DAYOFWEEK(timestamp), DAYNAME(timestamp)
|
214
|
+
ORDER BY day_of_week
|
215
|
+
""", (start_date, end_date))
|
216
|
+
|
217
|
+
weekly_pattern = cursor.fetchall()
|
218
|
+
|
219
|
+
# 小时模式分析
|
220
|
+
cursor.execute("""
|
221
|
+
SELECT
|
222
|
+
HOUR(timestamp) as hour,
|
223
|
+
COUNT(*) as total_requests,
|
224
|
+
AVG(process_time) as avg_response_time
|
225
|
+
FROM api_request_logs
|
226
|
+
WHERE DATE(timestamp) BETWEEN %s AND %s
|
227
|
+
GROUP BY HOUR(timestamp)
|
228
|
+
ORDER BY hour
|
229
|
+
""", (start_date, end_date))
|
230
|
+
|
231
|
+
hourly_pattern = cursor.fetchall()
|
232
|
+
|
233
|
+
return {
|
234
|
+
'period': f'{start_date} to {end_date}',
|
235
|
+
'hourly_data': hourly_data,
|
236
|
+
'daily_data': daily_data,
|
237
|
+
'weekly_pattern': weekly_pattern,
|
238
|
+
'hourly_pattern': hourly_pattern
|
239
|
+
}
|
240
|
+
finally:
|
241
|
+
connection.close()
|
242
|
+
|
243
|
+
except Exception as e:
|
244
|
+
return {'error': str(e)}
|
245
|
+
|
246
|
+
def get_endpoint_analysis(self, days: int = 7) -> Dict[str, Any]:
|
247
|
+
"""获取端点性能分析"""
|
248
|
+
try:
|
249
|
+
connection = self.pool.connection()
|
250
|
+
try:
|
251
|
+
with connection.cursor() as cursor:
|
252
|
+
end_date = datetime.now().date()
|
253
|
+
start_date = end_date - timedelta(days=days)
|
254
|
+
|
255
|
+
# 端点性能统计
|
256
|
+
cursor.execute("""
|
257
|
+
SELECT
|
258
|
+
endpoint,
|
259
|
+
COUNT(*) as total_requests,
|
260
|
+
AVG(process_time) as avg_response_time,
|
261
|
+
MIN(process_time) as min_response_time,
|
262
|
+
MAX(process_time) as max_response_time,
|
263
|
+
STDDEV(process_time) as response_time_stddev,
|
264
|
+
COUNT(DISTINCT client_ip) as unique_users,
|
265
|
+
SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as error_count,
|
266
|
+
SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) / COUNT(*) * 100 as error_rate,
|
267
|
+
SUM(request_size) as total_request_size,
|
268
|
+
SUM(response_size) as total_response_size
|
269
|
+
FROM api_request_logs
|
270
|
+
WHERE DATE(timestamp) BETWEEN %s AND %s
|
271
|
+
AND endpoint IS NOT NULL
|
272
|
+
GROUP BY endpoint
|
273
|
+
ORDER BY total_requests DESC
|
274
|
+
""", (start_date, end_date))
|
275
|
+
|
276
|
+
endpoint_stats = cursor.fetchall()
|
277
|
+
|
278
|
+
# 最慢的端点
|
279
|
+
slowest_endpoints = sorted(
|
280
|
+
[ep for ep in endpoint_stats if ep['avg_response_time']],
|
281
|
+
key=lambda x: x['avg_response_time'] or 0,
|
282
|
+
reverse=True
|
283
|
+
)[:10]
|
284
|
+
|
285
|
+
# 错误率最高的端点
|
286
|
+
error_prone_endpoints = sorted(
|
287
|
+
[ep for ep in endpoint_stats if (ep['error_rate'] or 0) > 0],
|
288
|
+
key=lambda x: x['error_rate'] or 0,
|
289
|
+
reverse=True
|
290
|
+
)[:10]
|
291
|
+
|
292
|
+
# 最热门的端点
|
293
|
+
popular_endpoints = endpoint_stats[:10]
|
294
|
+
|
295
|
+
return {
|
296
|
+
'period': f'{start_date} to {end_date}',
|
297
|
+
'all_endpoints': endpoint_stats,
|
298
|
+
'slowest_endpoints': slowest_endpoints,
|
299
|
+
'error_prone_endpoints': error_prone_endpoints,
|
300
|
+
'popular_endpoints': popular_endpoints
|
301
|
+
}
|
302
|
+
finally:
|
303
|
+
connection.close()
|
304
|
+
|
305
|
+
except Exception as e:
|
306
|
+
return {'error': str(e)}
|
307
|
+
|
308
|
+
def get_user_behavior_analysis(self, days: int = 7) -> Dict[str, Any]:
|
309
|
+
"""获取用户行为分析"""
|
310
|
+
try:
|
311
|
+
connection = self.pool.connection()
|
312
|
+
try:
|
313
|
+
with connection.cursor() as cursor:
|
314
|
+
end_date = datetime.now().date()
|
315
|
+
start_date = end_date - timedelta(days=days)
|
316
|
+
|
317
|
+
# IP访问模式分析
|
318
|
+
cursor.execute("""
|
319
|
+
SELECT
|
320
|
+
client_ip,
|
321
|
+
COUNT(*) as total_requests,
|
322
|
+
COUNT(DISTINCT endpoint) as unique_endpoints,
|
323
|
+
COUNT(DISTINCT DATE(timestamp)) as active_days,
|
324
|
+
MIN(timestamp) as first_access,
|
325
|
+
MAX(timestamp) as last_access,
|
326
|
+
AVG(process_time) as avg_response_time,
|
327
|
+
SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as errors,
|
328
|
+
SUM(CASE WHEN is_bot = 1 THEN 1 ELSE 0 END) as bot_requests,
|
329
|
+
user_agent
|
330
|
+
FROM api_request_logs
|
331
|
+
WHERE DATE(timestamp) BETWEEN %s AND %s
|
332
|
+
GROUP BY client_ip, user_agent
|
333
|
+
HAVING total_requests >= 10
|
334
|
+
ORDER BY total_requests DESC
|
335
|
+
LIMIT 50
|
336
|
+
""", (start_date, end_date))
|
337
|
+
|
338
|
+
ip_analysis = cursor.fetchall()
|
339
|
+
|
340
|
+
# 设备类型统计
|
341
|
+
cursor.execute("""
|
342
|
+
SELECT
|
343
|
+
browser_name,
|
344
|
+
os_name,
|
345
|
+
COUNT(*) as request_count,
|
346
|
+
COUNT(DISTINCT client_ip) as unique_users
|
347
|
+
FROM api_request_logs
|
348
|
+
WHERE DATE(timestamp) BETWEEN %s AND %s
|
349
|
+
AND browser_name != 'Unknown'
|
350
|
+
GROUP BY browser_name, os_name
|
351
|
+
ORDER BY request_count DESC
|
352
|
+
""", (start_date, end_date))
|
353
|
+
|
354
|
+
device_stats = cursor.fetchall()
|
355
|
+
|
356
|
+
# 可疑活动检测
|
357
|
+
cursor.execute("""
|
358
|
+
SELECT
|
359
|
+
client_ip,
|
360
|
+
COUNT(*) as requests_per_hour,
|
361
|
+
COUNT(DISTINCT endpoint) as endpoints_accessed,
|
362
|
+
SUM(CASE WHEN response_status = 404 THEN 1 ELSE 0 END) as not_found_errors,
|
363
|
+
SUM(CASE WHEN response_status = 403 THEN 1 ELSE 0 END) as forbidden_errors,
|
364
|
+
MAX(is_bot) as is_bot
|
365
|
+
FROM api_request_logs
|
366
|
+
WHERE timestamp >= %s
|
367
|
+
GROUP BY client_ip
|
368
|
+
HAVING requests_per_hour > 100
|
369
|
+
OR not_found_errors > 10
|
370
|
+
OR forbidden_errors > 5
|
371
|
+
ORDER BY requests_per_hour DESC
|
372
|
+
""", (datetime.now() - timedelta(hours=1),))
|
373
|
+
|
374
|
+
suspicious_activity = cursor.fetchall()
|
375
|
+
|
376
|
+
# 用户会话分析
|
377
|
+
cursor.execute("""
|
378
|
+
SELECT
|
379
|
+
session_id,
|
380
|
+
COUNT(*) as session_requests,
|
381
|
+
COUNT(DISTINCT endpoint) as endpoints_in_session,
|
382
|
+
TIMESTAMPDIFF(MINUTE, MIN(timestamp), MAX(timestamp)) as session_duration,
|
383
|
+
MIN(timestamp) as session_start,
|
384
|
+
MAX(timestamp) as session_end
|
385
|
+
FROM api_request_logs
|
386
|
+
WHERE DATE(timestamp) BETWEEN %s AND %s
|
387
|
+
AND session_id IS NOT NULL
|
388
|
+
GROUP BY session_id
|
389
|
+
HAVING session_requests >= 5
|
390
|
+
ORDER BY session_duration DESC
|
391
|
+
LIMIT 20
|
392
|
+
""", (start_date, end_date))
|
393
|
+
|
394
|
+
session_analysis = cursor.fetchall()
|
395
|
+
|
396
|
+
return {
|
397
|
+
'period': f'{start_date} to {end_date}',
|
398
|
+
'ip_analysis': ip_analysis,
|
399
|
+
'device_statistics': device_stats,
|
400
|
+
'suspicious_activity': suspicious_activity,
|
401
|
+
'session_analysis': session_analysis
|
402
|
+
}
|
403
|
+
finally:
|
404
|
+
connection.close()
|
405
|
+
|
406
|
+
except Exception as e:
|
407
|
+
return {'error': str(e)}
|
408
|
+
|
409
|
+
def get_performance_alerts(self) -> Dict[str, Any]:
|
410
|
+
"""获取性能告警信息"""
|
411
|
+
try:
|
412
|
+
connection = self.pool.connection()
|
413
|
+
try:
|
414
|
+
with connection.cursor() as cursor:
|
415
|
+
now = datetime.now()
|
416
|
+
last_hour = now - timedelta(hours=1)
|
417
|
+
|
418
|
+
alerts = []
|
419
|
+
|
420
|
+
# 检查响应时间异常
|
421
|
+
cursor.execute("""
|
422
|
+
SELECT endpoint, AVG(process_time) as avg_time
|
423
|
+
FROM api_request_logs
|
424
|
+
WHERE timestamp >= %s AND process_time IS NOT NULL
|
425
|
+
GROUP BY endpoint
|
426
|
+
HAVING avg_time > 3000
|
427
|
+
ORDER BY avg_time DESC
|
428
|
+
""", (last_hour,))
|
429
|
+
|
430
|
+
slow_endpoints = cursor.fetchall()
|
431
|
+
for endpoint in slow_endpoints:
|
432
|
+
alerts.append({
|
433
|
+
'type': 'SLOW_RESPONSE',
|
434
|
+
'severity': 'HIGH' if (endpoint['avg_time'] or 0) > 5000 else 'MEDIUM',
|
435
|
+
'message': f"端点 {endpoint['endpoint']} 平均响应时间 {endpoint['avg_time']:.0f}ms",
|
436
|
+
'timestamp': now.isoformat()
|
437
|
+
})
|
438
|
+
|
439
|
+
# 检查错误率异常
|
440
|
+
cursor.execute("""
|
441
|
+
SELECT
|
442
|
+
endpoint,
|
443
|
+
COUNT(*) as total,
|
444
|
+
SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as errors,
|
445
|
+
SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) / COUNT(*) * 100 as error_rate
|
446
|
+
FROM api_request_logs
|
447
|
+
WHERE timestamp >= %s
|
448
|
+
GROUP BY endpoint
|
449
|
+
HAVING total >= 10 AND error_rate > 10
|
450
|
+
ORDER BY error_rate DESC
|
451
|
+
""", (last_hour,))
|
452
|
+
|
453
|
+
error_endpoints = cursor.fetchall()
|
454
|
+
for endpoint in error_endpoints:
|
455
|
+
alerts.append({
|
456
|
+
'type': 'HIGH_ERROR_RATE',
|
457
|
+
'severity': 'HIGH' if (endpoint['error_rate'] or 0) > 20 else 'MEDIUM',
|
458
|
+
'message': f"端点 {endpoint['endpoint']} 错误率 {endpoint['error_rate']:.1f}%",
|
459
|
+
'timestamp': now.isoformat()
|
460
|
+
})
|
461
|
+
|
462
|
+
# 检查异常流量
|
463
|
+
cursor.execute("""
|
464
|
+
SELECT
|
465
|
+
client_ip,
|
466
|
+
COUNT(*) as request_count
|
467
|
+
FROM api_request_logs
|
468
|
+
WHERE timestamp >= %s
|
469
|
+
GROUP BY client_ip
|
470
|
+
HAVING request_count > 500
|
471
|
+
ORDER BY request_count DESC
|
472
|
+
""", (last_hour,))
|
473
|
+
|
474
|
+
high_traffic_ips = cursor.fetchall()
|
475
|
+
for ip_data in high_traffic_ips:
|
476
|
+
alerts.append({
|
477
|
+
'type': 'HIGH_TRAFFIC',
|
478
|
+
'severity': 'MEDIUM',
|
479
|
+
'message': f"IP {ip_data['client_ip']} 请求量异常: {ip_data['request_count']} 次/小时",
|
480
|
+
'timestamp': now.isoformat()
|
481
|
+
})
|
482
|
+
|
483
|
+
# 检查系统整体负载
|
484
|
+
cursor.execute("""
|
485
|
+
SELECT COUNT(*) as total_requests
|
486
|
+
FROM api_request_logs
|
487
|
+
WHERE timestamp >= %s
|
488
|
+
""", (last_hour,))
|
489
|
+
|
490
|
+
total_requests = cursor.fetchone()['total_requests']
|
491
|
+
if total_requests > 10000: # 每小时超过1万请求
|
492
|
+
alerts.append({
|
493
|
+
'type': 'HIGH_SYSTEM_LOAD',
|
494
|
+
'severity': 'HIGH',
|
495
|
+
'message': f"系统负载异常: {total_requests} 请求/小时",
|
496
|
+
'timestamp': now.isoformat()
|
497
|
+
})
|
498
|
+
|
499
|
+
return {
|
500
|
+
'alerts': alerts,
|
501
|
+
'alert_count': len(alerts),
|
502
|
+
'high_severity_count': len([a for a in alerts if a['severity'] == 'HIGH']),
|
503
|
+
'timestamp': now.isoformat()
|
504
|
+
}
|
505
|
+
finally:
|
506
|
+
connection.close()
|
507
|
+
|
508
|
+
except Exception as e:
|
509
|
+
return {'error': str(e)}
|
510
|
+
|
511
|
+
def generate_daily_report(self, target_date: datetime = None) -> Dict[str, Any]:
|
512
|
+
"""生成日报告"""
|
513
|
+
if target_date is None:
|
514
|
+
target_date = datetime.now().date() - timedelta(days=1)
|
515
|
+
|
516
|
+
try:
|
517
|
+
connection = self.pool.connection()
|
518
|
+
try:
|
519
|
+
with connection.cursor() as cursor:
|
520
|
+
# 整体统计
|
521
|
+
cursor.execute("""
|
522
|
+
SELECT
|
523
|
+
COUNT(*) as total_requests,
|
524
|
+
COUNT(DISTINCT client_ip) as unique_ips,
|
525
|
+
COUNT(DISTINCT endpoint) as unique_endpoints,
|
526
|
+
AVG(process_time) as avg_response_time,
|
527
|
+
MAX(process_time) as max_response_time,
|
528
|
+
SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) as total_errors,
|
529
|
+
SUM(CASE WHEN response_status >= 400 THEN 1 ELSE 0 END) / COUNT(*) * 100 as error_rate,
|
530
|
+
SUM(CASE WHEN is_bot = 1 THEN 1 ELSE 0 END) as bot_requests,
|
531
|
+
SUM(CASE WHEN is_mobile = 1 THEN 1 ELSE 0 END) as mobile_requests,
|
532
|
+
SUM(request_size) as total_request_size,
|
533
|
+
SUM(response_size) as total_response_size
|
534
|
+
FROM api_request_logs
|
535
|
+
WHERE DATE(timestamp) = %s
|
536
|
+
""", (target_date,))
|
537
|
+
|
538
|
+
daily_summary = cursor.fetchone()
|
539
|
+
|
540
|
+
# 热门端点
|
541
|
+
cursor.execute("""
|
542
|
+
SELECT endpoint, COUNT(*) as requests, AVG(process_time) as avg_time
|
543
|
+
FROM api_request_logs
|
544
|
+
WHERE DATE(timestamp) = %s
|
545
|
+
GROUP BY endpoint
|
546
|
+
ORDER BY requests DESC
|
547
|
+
LIMIT 10
|
548
|
+
""", (target_date,))
|
549
|
+
|
550
|
+
top_endpoints = cursor.fetchall()
|
551
|
+
|
552
|
+
# 错误统计
|
553
|
+
cursor.execute("""
|
554
|
+
SELECT response_status, COUNT(*) as count
|
555
|
+
FROM api_request_logs
|
556
|
+
WHERE DATE(timestamp) = %s AND response_status >= 400
|
557
|
+
GROUP BY response_status
|
558
|
+
ORDER BY count DESC
|
559
|
+
""", (target_date,))
|
560
|
+
|
561
|
+
error_breakdown = cursor.fetchall()
|
562
|
+
|
563
|
+
# 流量分布(按小时)
|
564
|
+
cursor.execute("""
|
565
|
+
SELECT
|
566
|
+
HOUR(timestamp) as hour,
|
567
|
+
COUNT(*) as requests,
|
568
|
+
AVG(process_time) as avg_time
|
569
|
+
FROM api_request_logs
|
570
|
+
WHERE DATE(timestamp) = %s
|
571
|
+
GROUP BY HOUR(timestamp)
|
572
|
+
ORDER BY hour
|
573
|
+
""", (target_date,))
|
574
|
+
|
575
|
+
hourly_distribution = cursor.fetchall()
|
576
|
+
|
577
|
+
return {
|
578
|
+
'date': target_date.isoformat(),
|
579
|
+
'summary': daily_summary,
|
580
|
+
'top_endpoints': top_endpoints,
|
581
|
+
'error_breakdown': error_breakdown,
|
582
|
+
'hourly_distribution': hourly_distribution,
|
583
|
+
'generated_at': datetime.now().isoformat()
|
584
|
+
}
|
585
|
+
finally:
|
586
|
+
connection.close()
|
587
|
+
|
588
|
+
except Exception as e:
|
589
|
+
return {'error': str(e)}
|
590
|
+
|
591
|
+
|
592
|
+
# 全局分析实例
|
593
|
+
analytics = MonitorAnalytics()
|
594
|
+
|
595
|
+
# 导出分析函数
|
596
|
+
def get_realtime_metrics():
|
597
|
+
"""获取实时监控指标"""
|
598
|
+
return analytics.get_realtime_metrics()
|
599
|
+
|
600
|
+
def get_traffic_trend(days: int = 7):
|
601
|
+
"""获取流量趋势"""
|
602
|
+
return analytics.get_traffic_trend(days)
|
603
|
+
|
604
|
+
def get_endpoint_analysis(days: int = 7):
|
605
|
+
"""获取端点分析"""
|
606
|
+
return analytics.get_endpoint_analysis(days)
|
607
|
+
|
608
|
+
def get_user_behavior_analysis(days: int = 7):
|
609
|
+
"""获取用户行为分析"""
|
610
|
+
return analytics.get_user_behavior_analysis(days)
|
611
|
+
|
612
|
+
def get_performance_alerts():
|
613
|
+
"""获取性能告警"""
|
614
|
+
return analytics.get_performance_alerts()
|
615
|
+
|
616
|
+
def generate_daily_report(target_date: datetime = None):
|
617
|
+
"""生成日报告"""
|
618
|
+
return analytics.generate_daily_report(target_date)
|