setu-trafficmonitor 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- setu_trafficmonitor-2.0.0.dist-info/LICENSE +21 -0
- setu_trafficmonitor-2.0.0.dist-info/METADATA +401 -0
- setu_trafficmonitor-2.0.0.dist-info/RECORD +42 -0
- setu_trafficmonitor-2.0.0.dist-info/WHEEL +5 -0
- setu_trafficmonitor-2.0.0.dist-info/top_level.txt +1 -0
- trafficmonitor/__init__.py +11 -0
- trafficmonitor/admin.py +217 -0
- trafficmonitor/analytics/__init__.py +0 -0
- trafficmonitor/analytics/enhanced_queries.py +286 -0
- trafficmonitor/analytics/serializers.py +238 -0
- trafficmonitor/analytics/tests.py +757 -0
- trafficmonitor/analytics/urls.py +18 -0
- trafficmonitor/analytics/views.py +694 -0
- trafficmonitor/apps.py +7 -0
- trafficmonitor/circuit_breaker.py +63 -0
- trafficmonitor/conf.py +154 -0
- trafficmonitor/dashboard_security.py +111 -0
- trafficmonitor/db_utils.py +37 -0
- trafficmonitor/exceptions.py +93 -0
- trafficmonitor/health.py +66 -0
- trafficmonitor/load_test.py +423 -0
- trafficmonitor/load_test_api.py +307 -0
- trafficmonitor/management/__init__.py +1 -0
- trafficmonitor/management/commands/__init__.py +1 -0
- trafficmonitor/management/commands/cleanup_request_logs.py +77 -0
- trafficmonitor/middleware.py +383 -0
- trafficmonitor/migrations/0001_initial.py +93 -0
- trafficmonitor/migrations/__init__.py +0 -0
- trafficmonitor/models.py +206 -0
- trafficmonitor/monitoring.py +104 -0
- trafficmonitor/permissions.py +64 -0
- trafficmonitor/security.py +180 -0
- trafficmonitor/settings_production.py +105 -0
- trafficmonitor/static/analytics/css/dashboard.css +99 -0
- trafficmonitor/static/analytics/js/dashboard-production.js +339 -0
- trafficmonitor/static/analytics/js/dashboard-v2.js +697 -0
- trafficmonitor/static/analytics/js/dashboard.js +693 -0
- trafficmonitor/tasks.py +137 -0
- trafficmonitor/templates/analytics/dashboard.html +500 -0
- trafficmonitor/tests.py +246 -0
- trafficmonitor/views.py +3 -0
- trafficmonitor/websocket_consumers.py +128 -0
|
@@ -0,0 +1,694 @@
|
|
|
1
|
+
from datetime import datetime, timedelta
|
|
2
|
+
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
|
|
3
|
+
from django.db.models import (
|
|
4
|
+
Count, Avg, Max, Min, Q, F,
|
|
5
|
+
CharField, Value, IntegerField
|
|
6
|
+
)
|
|
7
|
+
from django.db.models.functions import (
|
|
8
|
+
TruncDate, TruncHour, TruncDay,
|
|
9
|
+
Concat, Cast
|
|
10
|
+
)
|
|
11
|
+
from django.utils import timezone
|
|
12
|
+
from django.views.generic import TemplateView
|
|
13
|
+
from django.core.cache import cache
|
|
14
|
+
from django.http import JsonResponse
|
|
15
|
+
from django.views.decorators.cache import cache_page
|
|
16
|
+
from django.views.decorators.vary import vary_on_headers
|
|
17
|
+
from rest_framework import status
|
|
18
|
+
from rest_framework.decorators import api_view, permission_classes
|
|
19
|
+
from rest_framework.permissions import IsAdminUser
|
|
20
|
+
from rest_framework.response import Response
|
|
21
|
+
import logging
|
|
22
|
+
|
|
23
|
+
from trafficmonitor.models import RequestLog
|
|
24
|
+
from trafficmonitor.permissions import HasTrafficMonitorAccess, HasTrafficMonitorAdminAccess
|
|
25
|
+
from trafficmonitor.conf import TrafficMonitorConfig
|
|
26
|
+
from trafficmonitor.monitoring import performance_timer, metrics
|
|
27
|
+
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class RoleBasedAccessMixin:
|
|
32
|
+
"""
|
|
33
|
+
Mixin for role-based access control using headers.
|
|
34
|
+
If no role header exists, allow access. Only restrict if role header is present.
|
|
35
|
+
"""
|
|
36
|
+
def dispatch(self, request, *args, **kwargs):
|
|
37
|
+
role_header = request.META.get('HTTP_X_USER_ROLE')
|
|
38
|
+
|
|
39
|
+
# If no role header, allow access
|
|
40
|
+
if not role_header:
|
|
41
|
+
return super().dispatch(request, *args, **kwargs)
|
|
42
|
+
|
|
43
|
+
# If role header exists, check if it's authorized
|
|
44
|
+
authorized_roles = ['admin', 'staff', 'manager'] # Configure as needed
|
|
45
|
+
if role_header not in authorized_roles:
|
|
46
|
+
from django.http import HttpResponseForbidden
|
|
47
|
+
return HttpResponseForbidden("Access denied. Insufficient role permissions.")
|
|
48
|
+
|
|
49
|
+
return super().dispatch(request, *args, **kwargs)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class AnalyticsDashboardView(RoleBasedAccessMixin, TemplateView):
|
|
53
|
+
"""
|
|
54
|
+
Main analytics dashboard view with role-based access control.
|
|
55
|
+
Displays enterprise-grade analytics for request logs.
|
|
56
|
+
|
|
57
|
+
Access controlled by X-User-Role header (configurable roles).
|
|
58
|
+
"""
|
|
59
|
+
template_name = 'analytics/dashboard.html'
|
|
60
|
+
|
|
61
|
+
def get_context_data(self, **kwargs):
|
|
62
|
+
context = super().get_context_data(**kwargs)
|
|
63
|
+
|
|
64
|
+
# Get date range from query params or use defaults
|
|
65
|
+
date_range = self.request.GET.get('range', 'last_7_days')
|
|
66
|
+
context['selected_range'] = date_range
|
|
67
|
+
|
|
68
|
+
# Get filter parameters
|
|
69
|
+
method_filter = self.request.GET.get('method', '')
|
|
70
|
+
status_filter = self.request.GET.get('status', '')
|
|
71
|
+
path_filter = self.request.GET.get('path', '')
|
|
72
|
+
user_filter = self.request.GET.get('user', '')
|
|
73
|
+
|
|
74
|
+
context['filters'] = {
|
|
75
|
+
'method': method_filter,
|
|
76
|
+
'status': status_filter,
|
|
77
|
+
'path': path_filter,
|
|
78
|
+
'user': user_filter,
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
return context
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class AnalyticsQueryHelper:
|
|
85
|
+
"""
|
|
86
|
+
Helper class for analytics queries with optimized database operations.
|
|
87
|
+
All queries use proper indexing and aggregation.
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
@staticmethod
|
|
91
|
+
def get_date_range(range_type='last_7_days', start_date=None, end_date=None):
|
|
92
|
+
"""
|
|
93
|
+
Get start and end datetime for a given range type.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
range_type: 'today', 'yesterday', 'last_7_days', 'last_30_days', 'custom'
|
|
97
|
+
start_date: For custom range (datetime object)
|
|
98
|
+
end_date: For custom range (datetime object)
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
Tuple of (start_datetime, end_datetime)
|
|
102
|
+
"""
|
|
103
|
+
now = timezone.now()
|
|
104
|
+
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
105
|
+
|
|
106
|
+
if range_type == 'today':
|
|
107
|
+
return today_start, now
|
|
108
|
+
elif range_type == 'yesterday':
|
|
109
|
+
yesterday_start = today_start - timedelta(days=1)
|
|
110
|
+
return yesterday_start, today_start
|
|
111
|
+
elif range_type == 'last_7_days':
|
|
112
|
+
return now - timedelta(days=7), now
|
|
113
|
+
elif range_type == 'last_30_days':
|
|
114
|
+
return now - timedelta(days=30), now
|
|
115
|
+
elif range_type == 'custom' and start_date and end_date:
|
|
116
|
+
return start_date, end_date
|
|
117
|
+
else:
|
|
118
|
+
# Default to last 7 days
|
|
119
|
+
return now - timedelta(days=7), now
|
|
120
|
+
|
|
121
|
+
@staticmethod
|
|
122
|
+
def get_base_queryset(start_date, end_date, method=None, status_code_range=None,
|
|
123
|
+
path_contains=None, user_id=None, operation_type=None):
|
|
124
|
+
"""
|
|
125
|
+
Get base queryset with filters applied.
|
|
126
|
+
Optimized for enterprise-level querying.
|
|
127
|
+
|
|
128
|
+
Args:
|
|
129
|
+
start_date: Start datetime
|
|
130
|
+
end_date: End datetime
|
|
131
|
+
method: HTTP method (GET, POST, etc.)
|
|
132
|
+
status_code_range: Tuple of (min, max) status codes
|
|
133
|
+
path_contains: String to filter paths
|
|
134
|
+
user_id: User ID to filter (requested_user_id from header)
|
|
135
|
+
operation_type: Operation type (READ, WRITE, DELETE)
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
Filtered QuerySet
|
|
139
|
+
"""
|
|
140
|
+
qs = RequestLog.objects.filter(
|
|
141
|
+
timestamp__gte=start_date,
|
|
142
|
+
timestamp__lte=end_date
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
if method:
|
|
146
|
+
qs = qs.filter(method=method)
|
|
147
|
+
|
|
148
|
+
if status_code_range:
|
|
149
|
+
min_code, max_code = status_code_range
|
|
150
|
+
qs = qs.filter(status_code__gte=min_code, status_code__lte=max_code)
|
|
151
|
+
|
|
152
|
+
if path_contains:
|
|
153
|
+
qs = qs.filter(path__icontains=path_contains)
|
|
154
|
+
|
|
155
|
+
if user_id:
|
|
156
|
+
qs = qs.filter(requested_user_id=user_id)
|
|
157
|
+
|
|
158
|
+
if operation_type:
|
|
159
|
+
qs = qs.filter(operation_type=operation_type)
|
|
160
|
+
|
|
161
|
+
return qs
|
|
162
|
+
|
|
163
|
+
@staticmethod
|
|
164
|
+
def get_total_requests(start_date, end_date, **filters):
|
|
165
|
+
"""
|
|
166
|
+
Get total request count for a date range.
|
|
167
|
+
Uses COUNT aggregation - very fast with indexed timestamp.
|
|
168
|
+
"""
|
|
169
|
+
qs = AnalyticsQueryHelper.get_base_queryset(start_date, end_date, **filters)
|
|
170
|
+
return qs.count()
|
|
171
|
+
|
|
172
|
+
@staticmethod
|
|
173
|
+
def get_requests_by_status_code(start_date, end_date, **filters):
|
|
174
|
+
"""
|
|
175
|
+
Get request counts grouped by status code.
|
|
176
|
+
Optimized with values() + annotate() pattern.
|
|
177
|
+
|
|
178
|
+
Returns:
|
|
179
|
+
List of dicts: [{'status_code': 200, 'count': 150}, ...]
|
|
180
|
+
"""
|
|
181
|
+
qs = AnalyticsQueryHelper.get_base_queryset(start_date, end_date, **filters)
|
|
182
|
+
|
|
183
|
+
results = qs.values('status_code').annotate(
|
|
184
|
+
count=Count('id')
|
|
185
|
+
).order_by('-count')
|
|
186
|
+
|
|
187
|
+
return list(results)
|
|
188
|
+
|
|
189
|
+
@staticmethod
|
|
190
|
+
def get_requests_by_method(start_date, end_date, **filters):
|
|
191
|
+
"""
|
|
192
|
+
Get request counts grouped by HTTP method.
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
List of dicts: [{'method': 'GET', 'count': 300}, ...]
|
|
196
|
+
"""
|
|
197
|
+
qs = AnalyticsQueryHelper.get_base_queryset(start_date, end_date, **filters)
|
|
198
|
+
|
|
199
|
+
results = qs.values('method').annotate(
|
|
200
|
+
count=Count('id')
|
|
201
|
+
).order_by('-count')
|
|
202
|
+
|
|
203
|
+
return list(results)
|
|
204
|
+
|
|
205
|
+
@staticmethod
|
|
206
|
+
def get_top_endpoints(start_date, end_date, limit=10, **filters):
|
|
207
|
+
"""
|
|
208
|
+
Get top API endpoints by request count.
|
|
209
|
+
Includes average response time for each endpoint.
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
List of dicts with path, count, avg_response_time
|
|
213
|
+
"""
|
|
214
|
+
qs = AnalyticsQueryHelper.get_base_queryset(start_date, end_date, **filters)
|
|
215
|
+
|
|
216
|
+
results = qs.values('path').annotate(
|
|
217
|
+
count=Count('id'),
|
|
218
|
+
avg_response_time=Avg('response_time_ms'),
|
|
219
|
+
min_response_time=Min('response_time_ms'),
|
|
220
|
+
max_response_time=Max('response_time_ms')
|
|
221
|
+
).order_by('-count')[:limit]
|
|
222
|
+
|
|
223
|
+
return list(results)
|
|
224
|
+
|
|
225
|
+
@staticmethod
|
|
226
|
+
def get_slowest_endpoints(start_date, end_date, limit=10, **filters):
|
|
227
|
+
"""
|
|
228
|
+
Get slowest endpoints by average response time.
|
|
229
|
+
Filters out None response times.
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
List of dicts with path, avg_response_time, count
|
|
233
|
+
"""
|
|
234
|
+
qs = AnalyticsQueryHelper.get_base_queryset(start_date, end_date, **filters)
|
|
235
|
+
|
|
236
|
+
results = qs.filter(
|
|
237
|
+
response_time_ms__isnull=False
|
|
238
|
+
).values('path').annotate(
|
|
239
|
+
avg_response_time=Avg('response_time_ms'),
|
|
240
|
+
count=Count('id'),
|
|
241
|
+
max_response_time=Max('response_time_ms')
|
|
242
|
+
).order_by('-avg_response_time')[:limit]
|
|
243
|
+
|
|
244
|
+
return list(results)
|
|
245
|
+
|
|
246
|
+
@staticmethod
|
|
247
|
+
def get_top_ip_addresses(start_date, end_date, limit=10, **filters):
|
|
248
|
+
"""
|
|
249
|
+
Get top IP addresses by request count.
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
List of dicts: [{'ip_address': '1.2.3.4', 'count': 50}, ...]
|
|
253
|
+
"""
|
|
254
|
+
qs = AnalyticsQueryHelper.get_base_queryset(start_date, end_date, **filters)
|
|
255
|
+
|
|
256
|
+
results = qs.filter(
|
|
257
|
+
ip_address__isnull=False
|
|
258
|
+
).values('ip_address').annotate(
|
|
259
|
+
count=Count('id')
|
|
260
|
+
).order_by('-count')[:limit]
|
|
261
|
+
|
|
262
|
+
return list(results)
|
|
263
|
+
|
|
264
|
+
@staticmethod
|
|
265
|
+
def get_requests_over_time(start_date, end_date, granularity='day', **filters):
|
|
266
|
+
"""
|
|
267
|
+
Get request counts over time with specified granularity.
|
|
268
|
+
Uses TruncDate or TruncHour for efficient grouping.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
granularity: 'hour' or 'day'
|
|
272
|
+
|
|
273
|
+
Returns:
|
|
274
|
+
List of dicts: [{'date': datetime, 'count': 100}, ...]
|
|
275
|
+
"""
|
|
276
|
+
qs = AnalyticsQueryHelper.get_base_queryset(start_date, end_date, **filters)
|
|
277
|
+
|
|
278
|
+
if granularity == 'hour':
|
|
279
|
+
results = qs.annotate(
|
|
280
|
+
period=TruncHour('timestamp')
|
|
281
|
+
).values('period').annotate(
|
|
282
|
+
count=Count('id')
|
|
283
|
+
).order_by('period')
|
|
284
|
+
else:
|
|
285
|
+
results = qs.annotate(
|
|
286
|
+
period=TruncDay('timestamp')
|
|
287
|
+
).values('period').annotate(
|
|
288
|
+
count=Count('id')
|
|
289
|
+
).order_by('period')
|
|
290
|
+
|
|
291
|
+
return list(results)
|
|
292
|
+
|
|
293
|
+
@staticmethod
|
|
294
|
+
def get_error_trend(start_date, end_date, **filters):
|
|
295
|
+
"""
|
|
296
|
+
Get error trends over time (4xx and 5xx errors).
|
|
297
|
+
Groups by day and separates client vs server errors.
|
|
298
|
+
|
|
299
|
+
Returns:
|
|
300
|
+
List of dicts with date, client_errors (4xx), server_errors (5xx)
|
|
301
|
+
"""
|
|
302
|
+
qs = AnalyticsQueryHelper.get_base_queryset(start_date, end_date, **filters)
|
|
303
|
+
|
|
304
|
+
results = qs.annotate(
|
|
305
|
+
period=TruncDay('timestamp')
|
|
306
|
+
).values('period').annotate(
|
|
307
|
+
client_errors=Count('id', filter=Q(status_code__gte=400, status_code__lt=500)),
|
|
308
|
+
server_errors=Count('id', filter=Q(status_code__gte=500)),
|
|
309
|
+
total_errors=Count('id', filter=Q(status_code__gte=400))
|
|
310
|
+
).order_by('period')
|
|
311
|
+
|
|
312
|
+
return list(results)
|
|
313
|
+
|
|
314
|
+
@staticmethod
|
|
315
|
+
def get_hourly_heatmap(start_date, end_date, **filters):
|
|
316
|
+
"""
|
|
317
|
+
Get request volume per hour of day for heatmap visualization.
|
|
318
|
+
Groups by hour (0-23) across all days in range.
|
|
319
|
+
|
|
320
|
+
Returns:
|
|
321
|
+
List of dicts: [{'hour': 0, 'count': 120}, ...]
|
|
322
|
+
"""
|
|
323
|
+
qs = AnalyticsQueryHelper.get_base_queryset(start_date, end_date, **filters)
|
|
324
|
+
|
|
325
|
+
results = qs.annotate(
|
|
326
|
+
period=TruncHour('timestamp')
|
|
327
|
+
).values('period').annotate(
|
|
328
|
+
count=Count('id')
|
|
329
|
+
).order_by('period')
|
|
330
|
+
|
|
331
|
+
# Group by hour of day
|
|
332
|
+
hourly_data = {}
|
|
333
|
+
for item in results:
|
|
334
|
+
hour = item['period'].hour
|
|
335
|
+
hourly_data[hour] = hourly_data.get(hour, 0) + item['count']
|
|
336
|
+
|
|
337
|
+
# Convert to list format
|
|
338
|
+
return [{'hour': hour, 'count': count} for hour, count in sorted(hourly_data.items())]
|
|
339
|
+
|
|
340
|
+
@staticmethod
|
|
341
|
+
def get_status_code_summary(start_date, end_date, **filters):
|
|
342
|
+
"""
|
|
343
|
+
Get summary of requests grouped by status code ranges.
|
|
344
|
+
|
|
345
|
+
Returns:
|
|
346
|
+
Dict with counts for success (2xx), redirect (3xx), client_error (4xx), server_error (5xx)
|
|
347
|
+
"""
|
|
348
|
+
qs = AnalyticsQueryHelper.get_base_queryset(start_date, end_date, **filters)
|
|
349
|
+
|
|
350
|
+
summary = qs.aggregate(
|
|
351
|
+
success=Count('id', filter=Q(status_code__gte=200, status_code__lt=300)),
|
|
352
|
+
redirect=Count('id', filter=Q(status_code__gte=300, status_code__lt=400)),
|
|
353
|
+
client_error=Count('id', filter=Q(status_code__gte=400, status_code__lt=500)),
|
|
354
|
+
server_error=Count('id', filter=Q(status_code__gte=500)),
|
|
355
|
+
total=Count('id')
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
return summary
|
|
359
|
+
|
|
360
|
+
@staticmethod
|
|
361
|
+
def get_performance_summary(start_date, end_date, **filters):
|
|
362
|
+
"""
|
|
363
|
+
Get performance metrics summary.
|
|
364
|
+
|
|
365
|
+
Returns:
|
|
366
|
+
Dict with avg_response_time, max_response_time, avg_query_count
|
|
367
|
+
"""
|
|
368
|
+
qs = AnalyticsQueryHelper.get_base_queryset(start_date, end_date, **filters)
|
|
369
|
+
|
|
370
|
+
summary = qs.aggregate(
|
|
371
|
+
avg_response_time=Avg('response_time_ms', filter=Q(response_time_ms__isnull=False)),
|
|
372
|
+
max_response_time=Max('response_time_ms'),
|
|
373
|
+
min_response_time=Min('response_time_ms'),
|
|
374
|
+
avg_query_count=Avg('query_count', filter=Q(query_count__isnull=False)),
|
|
375
|
+
max_query_count=Max('query_count')
|
|
376
|
+
)
|
|
377
|
+
|
|
378
|
+
return summary
|
|
379
|
+
|
|
380
|
+
@staticmethod
|
|
381
|
+
def get_user_activity(start_date, end_date, limit=10, **filters):
|
|
382
|
+
"""
|
|
383
|
+
Get top users by request count using requested_user_id from headers.
|
|
384
|
+
|
|
385
|
+
Returns:
|
|
386
|
+
List of dicts with user info and request count
|
|
387
|
+
"""
|
|
388
|
+
qs = AnalyticsQueryHelper.get_base_queryset(start_date, end_date, **filters)
|
|
389
|
+
|
|
390
|
+
# Users with IDs
|
|
391
|
+
identified_users = qs.filter(
|
|
392
|
+
requested_user_id__isnull=False
|
|
393
|
+
).values('requested_user_id').annotate(
|
|
394
|
+
count=Count('id')
|
|
395
|
+
).order_by('-count')[:limit]
|
|
396
|
+
|
|
397
|
+
# Anonymous requests count
|
|
398
|
+
anonymous_count = qs.filter(requested_user_id__isnull=True).count()
|
|
399
|
+
|
|
400
|
+
return {
|
|
401
|
+
'identified': list(identified_users),
|
|
402
|
+
'anonymous_count': anonymous_count
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
@staticmethod
|
|
406
|
+
def get_comprehensive_analytics(start_date, end_date, **filters):
|
|
407
|
+
"""
|
|
408
|
+
Get all analytics data in a single optimized set of queries.
|
|
409
|
+
|
|
410
|
+
Returns:
|
|
411
|
+
Dict containing all analytics data
|
|
412
|
+
"""
|
|
413
|
+
# Get total counts for different periods
|
|
414
|
+
now = timezone.now()
|
|
415
|
+
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
416
|
+
|
|
417
|
+
data = {
|
|
418
|
+
# Total counts by period
|
|
419
|
+
'totals': {
|
|
420
|
+
'today': AnalyticsQueryHelper.get_total_requests(
|
|
421
|
+
today_start, now, **filters
|
|
422
|
+
),
|
|
423
|
+
'last_7_days': AnalyticsQueryHelper.get_total_requests(
|
|
424
|
+
now - timedelta(days=7), now, **filters
|
|
425
|
+
),
|
|
426
|
+
'last_30_days': AnalyticsQueryHelper.get_total_requests(
|
|
427
|
+
now - timedelta(days=30), now, **filters
|
|
428
|
+
),
|
|
429
|
+
'selected_range': AnalyticsQueryHelper.get_total_requests(
|
|
430
|
+
start_date, end_date, **filters
|
|
431
|
+
),
|
|
432
|
+
},
|
|
433
|
+
|
|
434
|
+
# Status code breakdown
|
|
435
|
+
'status_summary': AnalyticsQueryHelper.get_status_code_summary(
|
|
436
|
+
start_date, end_date, **filters
|
|
437
|
+
),
|
|
438
|
+
|
|
439
|
+
# Status codes detail
|
|
440
|
+
'status_codes': AnalyticsQueryHelper.get_requests_by_status_code(
|
|
441
|
+
start_date, end_date, **filters
|
|
442
|
+
),
|
|
443
|
+
|
|
444
|
+
# HTTP methods
|
|
445
|
+
'methods': AnalyticsQueryHelper.get_requests_by_method(
|
|
446
|
+
start_date, end_date, **filters
|
|
447
|
+
),
|
|
448
|
+
|
|
449
|
+
# Top endpoints
|
|
450
|
+
'top_endpoints': AnalyticsQueryHelper.get_top_endpoints(
|
|
451
|
+
start_date, end_date, limit=10, **filters
|
|
452
|
+
),
|
|
453
|
+
|
|
454
|
+
# Slowest endpoints
|
|
455
|
+
'slowest_endpoints': AnalyticsQueryHelper.get_slowest_endpoints(
|
|
456
|
+
start_date, end_date, limit=10, **filters
|
|
457
|
+
),
|
|
458
|
+
|
|
459
|
+
# Top IPs
|
|
460
|
+
'top_ips': AnalyticsQueryHelper.get_top_ip_addresses(
|
|
461
|
+
start_date, end_date, limit=10, **filters
|
|
462
|
+
),
|
|
463
|
+
|
|
464
|
+
# Performance metrics
|
|
465
|
+
'performance': AnalyticsQueryHelper.get_performance_summary(
|
|
466
|
+
start_date, end_date, **filters
|
|
467
|
+
),
|
|
468
|
+
|
|
469
|
+
# User activity
|
|
470
|
+
'users': AnalyticsQueryHelper.get_user_activity(
|
|
471
|
+
start_date, end_date, limit=10, **filters
|
|
472
|
+
),
|
|
473
|
+
|
|
474
|
+
# Time series data
|
|
475
|
+
'requests_over_time': AnalyticsQueryHelper.get_requests_over_time(
|
|
476
|
+
start_date, end_date, granularity='day', **filters
|
|
477
|
+
),
|
|
478
|
+
|
|
479
|
+
# Error trends
|
|
480
|
+
'error_trend': AnalyticsQueryHelper.get_error_trend(
|
|
481
|
+
start_date, end_date, **filters
|
|
482
|
+
),
|
|
483
|
+
|
|
484
|
+
# Hourly heatmap
|
|
485
|
+
'hourly_heatmap': AnalyticsQueryHelper.get_hourly_heatmap(
|
|
486
|
+
start_date, end_date, **filters
|
|
487
|
+
),
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
return data
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
@api_view(['GET'])
|
|
494
|
+
@permission_classes([HasTrafficMonitorAccess])
|
|
495
|
+
def analytics_overview_api(request):
|
|
496
|
+
"""
|
|
497
|
+
DRF API endpoint for comprehensive analytics data.
|
|
498
|
+
Returns enterprise-grade analytics with Read/Write insights.
|
|
499
|
+
|
|
500
|
+
Requires X-User-Role header with authorized role.
|
|
501
|
+
|
|
502
|
+
Query Parameters:
|
|
503
|
+
- range: 'today', 'yesterday', 'last_7_days', 'last_30_days', 'custom'
|
|
504
|
+
- start_date: For custom range (ISO format: 2024-01-01)
|
|
505
|
+
- end_date: For custom range (ISO format: 2024-01-31)
|
|
506
|
+
- method: Filter by HTTP method (GET, POST, etc.)
|
|
507
|
+
- status: Filter by status code (200, 404, etc.)
|
|
508
|
+
- path: Filter by path contains
|
|
509
|
+
- user: Filter by requested_user_id
|
|
510
|
+
- operation_type: Filter by operation (READ, WRITE, DELETE)
|
|
511
|
+
|
|
512
|
+
Returns:
|
|
513
|
+
JSON response with all analytics data including Read/Write metrics
|
|
514
|
+
"""
|
|
515
|
+
# Parse query parameters
|
|
516
|
+
range_type = request.GET.get('range', 'last_7_days')
|
|
517
|
+
|
|
518
|
+
# Handle custom date range
|
|
519
|
+
start_date = None
|
|
520
|
+
end_date = None
|
|
521
|
+
if range_type == 'custom':
|
|
522
|
+
start_str = request.GET.get('start_date')
|
|
523
|
+
end_str = request.GET.get('end_date')
|
|
524
|
+
|
|
525
|
+
if start_str and end_str:
|
|
526
|
+
try:
|
|
527
|
+
start_date = timezone.make_aware(
|
|
528
|
+
datetime.strptime(start_str, '%Y-%m-%d')
|
|
529
|
+
)
|
|
530
|
+
end_date = timezone.make_aware(
|
|
531
|
+
datetime.strptime(end_str, '%Y-%m-%d').replace(
|
|
532
|
+
hour=23, minute=59, second=59
|
|
533
|
+
)
|
|
534
|
+
)
|
|
535
|
+
except ValueError:
|
|
536
|
+
return Response(
|
|
537
|
+
{'error': 'Invalid date format. Use YYYY-MM-DD'},
|
|
538
|
+
status=status.HTTP_400_BAD_REQUEST
|
|
539
|
+
)
|
|
540
|
+
else:
|
|
541
|
+
return Response(
|
|
542
|
+
{'error': 'start_date and end_date required for custom range'},
|
|
543
|
+
status=status.HTTP_400_BAD_REQUEST
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
# Get date range
|
|
547
|
+
start_date, end_date = AnalyticsQueryHelper.get_date_range(
|
|
548
|
+
range_type, start_date, end_date
|
|
549
|
+
)
|
|
550
|
+
|
|
551
|
+
# Build filters dict
|
|
552
|
+
filters = {}
|
|
553
|
+
if request.GET.get('method'):
|
|
554
|
+
filters['method'] = request.GET.get('method')
|
|
555
|
+
if request.GET.get('status'):
|
|
556
|
+
try:
|
|
557
|
+
filters['status_code_range'] = (
|
|
558
|
+
int(request.GET.get('status')),
|
|
559
|
+
int(request.GET.get('status'))
|
|
560
|
+
)
|
|
561
|
+
except ValueError:
|
|
562
|
+
pass
|
|
563
|
+
if request.GET.get('path'):
|
|
564
|
+
filters['path_contains'] = request.GET.get('path')
|
|
565
|
+
if request.GET.get('user'):
|
|
566
|
+
filters['user_id'] = request.GET.get('user')
|
|
567
|
+
if request.GET.get('operation_type'):
|
|
568
|
+
filters['operation_type'] = request.GET.get('operation_type')
|
|
569
|
+
|
|
570
|
+
# Get comprehensive analytics
|
|
571
|
+
analytics_data = AnalyticsQueryHelper.get_comprehensive_analytics(
|
|
572
|
+
start_date, end_date, **filters
|
|
573
|
+
)
|
|
574
|
+
|
|
575
|
+
# Add enhanced enterprise metrics
|
|
576
|
+
from trafficmonitor.analytics.enhanced_queries import EnhancedAnalyticsQueries
|
|
577
|
+
|
|
578
|
+
analytics_data['read_write_summary'] = EnhancedAnalyticsQueries.get_read_write_summary(
|
|
579
|
+
start_date, end_date, **filters
|
|
580
|
+
)
|
|
581
|
+
analytics_data['read_write_over_time'] = EnhancedAnalyticsQueries.get_read_write_over_time(
|
|
582
|
+
start_date, end_date, **filters
|
|
583
|
+
)
|
|
584
|
+
analytics_data['api_health'] = EnhancedAnalyticsQueries.get_api_health_metrics(
|
|
585
|
+
start_date, end_date, **filters
|
|
586
|
+
)
|
|
587
|
+
analytics_data['endpoint_categories'] = EnhancedAnalyticsQueries.get_endpoint_category_breakdown(
|
|
588
|
+
start_date, end_date, **filters
|
|
589
|
+
)
|
|
590
|
+
analytics_data['user_activity_detailed'] = EnhancedAnalyticsQueries.get_user_activity_detailed(
|
|
591
|
+
start_date, end_date, limit=20, **filters
|
|
592
|
+
)
|
|
593
|
+
analytics_data['performance_outliers'] = EnhancedAnalyticsQueries.get_performance_outliers(
|
|
594
|
+
start_date, end_date, **filters
|
|
595
|
+
)
|
|
596
|
+
analytics_data['throughput'] = EnhancedAnalyticsQueries.get_throughput_metrics(
|
|
597
|
+
start_date, end_date, **filters
|
|
598
|
+
)
|
|
599
|
+
|
|
600
|
+
# Add metadata
|
|
601
|
+
analytics_data['metadata'] = {
|
|
602
|
+
'range_type': range_type,
|
|
603
|
+
'start_date': start_date.isoformat(),
|
|
604
|
+
'end_date': end_date.isoformat(),
|
|
605
|
+
'filters': filters,
|
|
606
|
+
'generated_at': timezone.now().isoformat(),
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
return Response(analytics_data, status=status.HTTP_200_OK)
|
|
610
|
+
|
|
611
|
+
|
|
612
|
+
@api_view(['GET'])
|
|
613
|
+
@permission_classes([HasTrafficMonitorAccess])
|
|
614
|
+
def analytics_chart_data_api(request, chart_type):
|
|
615
|
+
"""
|
|
616
|
+
API endpoint for specific chart data.
|
|
617
|
+
Allows fetching individual chart data without full analytics.
|
|
618
|
+
|
|
619
|
+
Requires X-User-Role header with authorized role.
|
|
620
|
+
|
|
621
|
+
Args:
|
|
622
|
+
chart_type: 'time-series', 'status-codes', 'methods', 'endpoints',
|
|
623
|
+
'performance', 'heatmap', 'errors', 'read-write', 'api-health'
|
|
624
|
+
|
|
625
|
+
Returns:
|
|
626
|
+
JSON response with chart-specific data
|
|
627
|
+
"""
|
|
628
|
+
# Get date range
|
|
629
|
+
range_type = request.GET.get('range', 'last_7_days')
|
|
630
|
+
start_date, end_date = AnalyticsQueryHelper.get_date_range(range_type)
|
|
631
|
+
|
|
632
|
+
# Build filters
|
|
633
|
+
filters = {}
|
|
634
|
+
if request.GET.get('method'):
|
|
635
|
+
filters['method'] = request.GET.get('method')
|
|
636
|
+
if request.GET.get('path'):
|
|
637
|
+
filters['path_contains'] = request.GET.get('path')
|
|
638
|
+
|
|
639
|
+
# Import enhanced queries
|
|
640
|
+
from trafficmonitor.analytics.enhanced_queries import EnhancedAnalyticsQueries
|
|
641
|
+
|
|
642
|
+
# Route to appropriate query based on chart type
|
|
643
|
+
chart_data_map = {
|
|
644
|
+
'time-series': lambda: AnalyticsQueryHelper.get_requests_over_time(
|
|
645
|
+
start_date, end_date, **filters
|
|
646
|
+
),
|
|
647
|
+
'status-codes': lambda: AnalyticsQueryHelper.get_requests_by_status_code(
|
|
648
|
+
start_date, end_date, **filters
|
|
649
|
+
),
|
|
650
|
+
'methods': lambda: AnalyticsQueryHelper.get_requests_by_method(
|
|
651
|
+
start_date, end_date, **filters
|
|
652
|
+
),
|
|
653
|
+
'endpoints': lambda: AnalyticsQueryHelper.get_top_endpoints(
|
|
654
|
+
start_date, end_date, **filters
|
|
655
|
+
),
|
|
656
|
+
'performance': lambda: AnalyticsQueryHelper.get_slowest_endpoints(
|
|
657
|
+
start_date, end_date, **filters
|
|
658
|
+
),
|
|
659
|
+
'heatmap': lambda: AnalyticsQueryHelper.get_hourly_heatmap(
|
|
660
|
+
start_date, end_date, **filters
|
|
661
|
+
),
|
|
662
|
+
'errors': lambda: AnalyticsQueryHelper.get_error_trend(
|
|
663
|
+
start_date, end_date, **filters
|
|
664
|
+
),
|
|
665
|
+
'read-write': lambda: EnhancedAnalyticsQueries.get_read_write_over_time(
|
|
666
|
+
start_date, end_date, **filters
|
|
667
|
+
),
|
|
668
|
+
'api-health': lambda: EnhancedAnalyticsQueries.get_api_health_metrics(
|
|
669
|
+
start_date, end_date, **filters
|
|
670
|
+
),
|
|
671
|
+
'endpoint-categories': lambda: EnhancedAnalyticsQueries.get_endpoint_category_breakdown(
|
|
672
|
+
start_date, end_date, **filters
|
|
673
|
+
),
|
|
674
|
+
'throughput': lambda: EnhancedAnalyticsQueries.get_throughput_metrics(
|
|
675
|
+
start_date, end_date, **filters
|
|
676
|
+
),
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
if chart_type not in chart_data_map:
|
|
680
|
+
return Response(
|
|
681
|
+
{'error': f'Invalid chart type: {chart_type}'},
|
|
682
|
+
status=status.HTTP_400_BAD_REQUEST
|
|
683
|
+
)
|
|
684
|
+
|
|
685
|
+
data = chart_data_map[chart_type]()
|
|
686
|
+
|
|
687
|
+
return Response({
|
|
688
|
+
'chart_type': chart_type,
|
|
689
|
+
'data': data,
|
|
690
|
+
'metadata': {
|
|
691
|
+
'start_date': start_date.isoformat(),
|
|
692
|
+
'end_date': end_date.isoformat(),
|
|
693
|
+
}
|
|
694
|
+
}, status=status.HTTP_200_OK)
|