nginx-lens 0.3.4__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- commands/analyze.py +9 -3
- commands/diff.py +3 -2
- commands/graph.py +3 -2
- commands/health.py +22 -0
- commands/include.py +3 -2
- commands/logs.py +259 -19
- commands/resolve.py +26 -1
- commands/route.py +1 -0
- commands/syntax.py +1 -0
- commands/tree.py +3 -2
- exporter/csv.py +87 -0
- exporter/json_yaml.py +361 -0
- {nginx_lens-0.3.4.dist-info → nginx_lens-0.4.0.dist-info}/METADATA +6 -1
- {nginx_lens-0.3.4.dist-info → nginx_lens-0.4.0.dist-info}/RECORD +18 -16
- {nginx_lens-0.3.4.dist-info → nginx_lens-0.4.0.dist-info}/WHEEL +0 -0
- {nginx_lens-0.3.4.dist-info → nginx_lens-0.4.0.dist-info}/entry_points.txt +0 -0
- {nginx_lens-0.3.4.dist-info → nginx_lens-0.4.0.dist-info}/licenses/LICENSE +0 -0
- {nginx_lens-0.3.4.dist-info → nginx_lens-0.4.0.dist-info}/top_level.txt +0 -0
commands/analyze.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import sys
|
|
1
2
|
import typer
|
|
2
3
|
from rich.console import Console
|
|
3
4
|
from rich.table import Table
|
|
@@ -9,6 +10,7 @@ from analyzer.unused import find_unused_variables
|
|
|
9
10
|
from parser.nginx_parser import parse_nginx_config
|
|
10
11
|
from analyzer.rewrite import find_rewrite_issues
|
|
11
12
|
from analyzer.dead_locations import find_dead_locations
|
|
13
|
+
from exporter.json_yaml import format_analyze_results, print_export
|
|
12
14
|
|
|
13
15
|
app = typer.Typer()
|
|
14
16
|
console = Console()
|
|
@@ -41,7 +43,11 @@ ISSUE_META = {
|
|
|
41
43
|
}
|
|
42
44
|
SEVERITY_COLOR = {"high": "red", "medium": "orange3", "low": "yellow"}
|
|
43
45
|
|
|
44
|
-
def analyze(
|
|
46
|
+
def analyze(
|
|
47
|
+
config_path: str = typer.Argument(..., help="Путь к nginx.conf"),
|
|
48
|
+
json: bool = typer.Option(False, "--json", help="Экспортировать результаты в JSON"),
|
|
49
|
+
yaml: bool = typer.Option(False, "--yaml", help="Экспортировать результаты в YAML"),
|
|
50
|
+
):
|
|
45
51
|
"""
|
|
46
52
|
Анализирует конфигурацию Nginx на типовые проблемы и best practices.
|
|
47
53
|
|
|
@@ -62,10 +68,10 @@ def analyze(config_path: str = typer.Argument(..., help="Путь к nginx.conf"
|
|
|
62
68
|
tree = parse_nginx_config(config_path)
|
|
63
69
|
except FileNotFoundError:
|
|
64
70
|
console.print(f"[red]Файл {config_path} не найден. Проверьте путь к конфигу.[/red]")
|
|
65
|
-
|
|
71
|
+
sys.exit(1)
|
|
66
72
|
except Exception as e:
|
|
67
73
|
console.print(f"[red]Ошибка при разборе {config_path}: {e}[/red]")
|
|
68
|
-
|
|
74
|
+
sys.exit(1)
|
|
69
75
|
conflicts = find_location_conflicts(tree)
|
|
70
76
|
dups = find_duplicate_directives(tree)
|
|
71
77
|
empties = find_empty_blocks(tree)
|
commands/diff.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import sys
|
|
1
2
|
import typer
|
|
2
3
|
from rich.console import Console
|
|
3
4
|
from rich.table import Table
|
|
@@ -24,10 +25,10 @@ def diff(
|
|
|
24
25
|
lines2 = f2.readlines()
|
|
25
26
|
except FileNotFoundError as e:
|
|
26
27
|
console.print(f"[red]Файл {e.filename} не найден. Проверьте путь к конфигу.[/red]")
|
|
27
|
-
|
|
28
|
+
sys.exit(1)
|
|
28
29
|
except Exception as e:
|
|
29
30
|
console.print(f"[red]Ошибка при чтении файлов: {e}[/red]")
|
|
30
|
-
|
|
31
|
+
sys.exit(1)
|
|
31
32
|
maxlen = max(len(lines1), len(lines2))
|
|
32
33
|
# Выравниваем длины
|
|
33
34
|
lines1 += [''] * (maxlen - len(lines1))
|
commands/graph.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import sys
|
|
1
2
|
import typer
|
|
2
3
|
from rich.console import Console
|
|
3
4
|
from parser.nginx_parser import parse_nginx_config
|
|
@@ -21,10 +22,10 @@ def graph(
|
|
|
21
22
|
tree = parse_nginx_config(config_path)
|
|
22
23
|
except FileNotFoundError:
|
|
23
24
|
console.print(f"[red]Файл {config_path} не найден. Проверьте путь к конфигу.[/red]")
|
|
24
|
-
|
|
25
|
+
sys.exit(1)
|
|
25
26
|
except Exception as e:
|
|
26
27
|
console.print(f"[red]Ошибка при разборе {config_path}: {e}[/red]")
|
|
27
|
-
|
|
28
|
+
sys.exit(1)
|
|
28
29
|
routes = []
|
|
29
30
|
# Для каждого server/location строим маршрут
|
|
30
31
|
def walk(d, chain, upstreams):
|
commands/health.py
CHANGED
|
@@ -4,6 +4,7 @@ from rich.console import Console
|
|
|
4
4
|
from rich.table import Table
|
|
5
5
|
from upstream_checker.checker import check_upstreams, resolve_upstreams
|
|
6
6
|
from parser.nginx_parser import parse_nginx_config
|
|
7
|
+
from exporter.json_yaml import format_health_results, print_export
|
|
7
8
|
|
|
8
9
|
app = typer.Typer()
|
|
9
10
|
console = Console()
|
|
@@ -15,6 +16,8 @@ def health(
|
|
|
15
16
|
mode: str = typer.Option("tcp", help="Режим проверки: tcp или http", case_sensitive=False),
|
|
16
17
|
resolve: bool = typer.Option(False, "--resolve", "-r", help="Показать резолвленные IP-адреса"),
|
|
17
18
|
max_workers: int = typer.Option(10, "--max-workers", "-w", help="Максимальное количество потоков для параллельной обработки"),
|
|
19
|
+
json: bool = typer.Option(False, "--json", help="Экспортировать результаты в JSON"),
|
|
20
|
+
yaml: bool = typer.Option(False, "--yaml", help="Экспортировать результаты в YAML"),
|
|
18
21
|
):
|
|
19
22
|
"""
|
|
20
23
|
Проверяет доступность upstream-серверов, определённых в nginx.conf. Выводит таблицу.
|
|
@@ -45,6 +48,25 @@ def health(
|
|
|
45
48
|
if resolve:
|
|
46
49
|
resolved_info = resolve_upstreams(upstreams, max_workers=max_workers)
|
|
47
50
|
|
|
51
|
+
# Экспорт в JSON/YAML
|
|
52
|
+
if json or yaml:
|
|
53
|
+
export_data = format_health_results(results, resolved_info if resolve else None)
|
|
54
|
+
format_type = 'json' if json else 'yaml'
|
|
55
|
+
print_export(export_data, format_type)
|
|
56
|
+
# Exit code остается прежним
|
|
57
|
+
for name, servers in results.items():
|
|
58
|
+
for srv in servers:
|
|
59
|
+
if not srv["healthy"]:
|
|
60
|
+
exit_code = 1
|
|
61
|
+
if resolve and name in resolved_info:
|
|
62
|
+
for resolved_srv in resolved_info[name]:
|
|
63
|
+
if resolved_srv["address"] == srv["address"]:
|
|
64
|
+
if any("invalid resolve" in r for r in resolved_srv["resolved"]):
|
|
65
|
+
exit_code = 1
|
|
66
|
+
break
|
|
67
|
+
sys.exit(exit_code)
|
|
68
|
+
|
|
69
|
+
# Обычный вывод в таблицу
|
|
48
70
|
table = Table(show_header=True, header_style="bold blue")
|
|
49
71
|
table.add_column("Address")
|
|
50
72
|
table.add_column("Status")
|
commands/include.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import sys
|
|
1
2
|
import typer
|
|
2
3
|
from rich.console import Console
|
|
3
4
|
from rich.tree import Tree
|
|
@@ -22,10 +23,10 @@ def include_tree(
|
|
|
22
23
|
tree = build_include_tree(config_path)
|
|
23
24
|
except FileNotFoundError:
|
|
24
25
|
console.print(f"[red]Файл {config_path} не найден. Проверьте путь к конфигу.[/red]")
|
|
25
|
-
|
|
26
|
+
sys.exit(1)
|
|
26
27
|
except Exception as e:
|
|
27
28
|
console.print(f"[red]Ошибка при разборе {config_path}: {e}[/red]")
|
|
28
|
-
|
|
29
|
+
sys.exit(1)
|
|
29
30
|
rich_tree = Tree(f"[bold blue]{config_path}[/bold blue]")
|
|
30
31
|
def _add(node, t):
|
|
31
32
|
for k, v in t.items():
|
commands/logs.py
CHANGED
|
@@ -1,17 +1,36 @@
|
|
|
1
|
+
import sys
|
|
1
2
|
import typer
|
|
2
3
|
from rich.console import Console
|
|
3
4
|
from rich.table import Table
|
|
4
5
|
import re
|
|
6
|
+
import gzip
|
|
7
|
+
from datetime import datetime, timedelta
|
|
5
8
|
from collections import Counter, defaultdict
|
|
9
|
+
from typing import Optional, List, Dict, Any
|
|
10
|
+
from exporter.json_yaml import format_logs_results, print_export
|
|
11
|
+
from exporter.csv import export_logs_to_csv
|
|
6
12
|
|
|
7
13
|
app = typer.Typer(help="Анализ access.log/error.log: топ-статусы, пути, IP, User-Agent, ошибки.")
|
|
8
14
|
console = Console()
|
|
9
15
|
|
|
10
|
-
|
|
16
|
+
# Улучшенный regex для парсинга nginx access log (поддерживает response time)
|
|
17
|
+
# Формат: IP - - [timestamp] "method path protocol" status size "referer" "user-agent" "response_time"
|
|
18
|
+
log_line_re = re.compile(
|
|
19
|
+
r'(?P<ip>\S+) \S+ \S+ \[(?P<time>[^\]]+)\] "(?P<method>\S+) (?P<path>\S+) [^\"]+" '
|
|
20
|
+
r'(?P<status>\d{3}) (?P<size>\S+) "(?P<referer>[^"]*)" "(?P<user_agent>[^"]*)"'
|
|
21
|
+
r'(?: "(?P<response_time>[^"]+)")?'
|
|
22
|
+
)
|
|
11
23
|
|
|
12
24
|
def logs(
|
|
13
25
|
log_path: str = typer.Argument(..., help="Путь к access.log или error.log"),
|
|
14
|
-
top: int = typer.Option(10, help="Сколько топ-значений выводить")
|
|
26
|
+
top: int = typer.Option(10, help="Сколько топ-значений выводить"),
|
|
27
|
+
json: bool = typer.Option(False, "--json", help="Экспортировать результаты в JSON"),
|
|
28
|
+
yaml: bool = typer.Option(False, "--yaml", help="Экспортировать результаты в YAML"),
|
|
29
|
+
csv: bool = typer.Option(False, "--csv", help="Экспортировать результаты в CSV"),
|
|
30
|
+
since: Optional[str] = typer.Option(None, "--since", help="Фильтр: с даты (формат: YYYY-MM-DD или YYYY-MM-DD HH:MM:SS)"),
|
|
31
|
+
until: Optional[str] = typer.Option(None, "--until", help="Фильтр: до даты (формат: YYYY-MM-DD или YYYY-MM-DD HH:MM:SS)"),
|
|
32
|
+
status: Optional[str] = typer.Option(None, "--status", help="Фильтр по статусам (например: 404,500)"),
|
|
33
|
+
detect_anomalies: bool = typer.Option(False, "--detect-anomalies", help="Обнаруживать аномалии в логах"),
|
|
15
34
|
):
|
|
16
35
|
"""
|
|
17
36
|
Анализирует access.log/error.log.
|
|
@@ -22,40 +41,261 @@ def logs(
|
|
|
22
41
|
- Топ IP-адресов
|
|
23
42
|
- Топ User-Agent
|
|
24
43
|
- Топ путей с ошибками 404/500
|
|
44
|
+
- Анализ времени ответа (если доступно)
|
|
45
|
+
- Обнаружение аномалий
|
|
25
46
|
|
|
26
47
|
Пример:
|
|
27
48
|
nginx-lens logs /var/log/nginx/access.log --top 20
|
|
49
|
+
nginx-lens logs /var/log/nginx/access.log --since "2024-01-01" --status 404,500
|
|
50
|
+
nginx-lens logs /var/log/nginx/access.log.gz --detect-anomalies --json
|
|
28
51
|
"""
|
|
52
|
+
# Парсинг фильтров
|
|
53
|
+
status_filter = None
|
|
54
|
+
if status:
|
|
55
|
+
status_filter = set(s.strip() for s in status.split(','))
|
|
56
|
+
|
|
57
|
+
since_dt = None
|
|
58
|
+
if since:
|
|
59
|
+
try:
|
|
60
|
+
if len(since) == 10: # YYYY-MM-DD
|
|
61
|
+
since_dt = datetime.strptime(since, "%Y-%m-%d")
|
|
62
|
+
else: # YYYY-MM-DD HH:MM:SS
|
|
63
|
+
since_dt = datetime.strptime(since, "%Y-%m-%d %H:%M:%S")
|
|
64
|
+
except ValueError:
|
|
65
|
+
console.print(f"[red]Неверный формат даты для --since: {since}. Используйте YYYY-MM-DD или YYYY-MM-DD HH:MM:SS[/red]")
|
|
66
|
+
sys.exit(1)
|
|
67
|
+
|
|
68
|
+
until_dt = None
|
|
69
|
+
if until:
|
|
70
|
+
try:
|
|
71
|
+
if len(until) == 10: # YYYY-MM-DD
|
|
72
|
+
until_dt = datetime.strptime(until, "%Y-%m-%d") + timedelta(days=1)
|
|
73
|
+
else: # YYYY-MM-DD HH:MM:SS
|
|
74
|
+
until_dt = datetime.strptime(until, "%Y-%m-%d %H:%M:%S")
|
|
75
|
+
except ValueError:
|
|
76
|
+
console.print(f"[red]Неверный формат даты для --until: {until}. Используйте YYYY-MM-DD или YYYY-MM-DD HH:MM:SS[/red]")
|
|
77
|
+
sys.exit(1)
|
|
78
|
+
|
|
79
|
+
# Чтение лога (поддержка gzip)
|
|
29
80
|
try:
|
|
30
|
-
|
|
31
|
-
|
|
81
|
+
if log_path.endswith('.gz'):
|
|
82
|
+
with gzip.open(log_path, 'rt', encoding='utf-8', errors='ignore') as f:
|
|
83
|
+
lines = list(f)
|
|
84
|
+
else:
|
|
85
|
+
with open(log_path, 'r', encoding='utf-8', errors='ignore') as f:
|
|
86
|
+
lines = list(f)
|
|
32
87
|
except FileNotFoundError:
|
|
33
88
|
console.print(f"[red]Файл {log_path} не найден. Проверьте путь к логу.[/red]")
|
|
34
|
-
|
|
89
|
+
sys.exit(1)
|
|
35
90
|
except Exception as e:
|
|
36
91
|
console.print(f"[red]Ошибка при чтении {log_path}: {e}[/red]")
|
|
37
|
-
|
|
92
|
+
sys.exit(1)
|
|
38
93
|
status_counter = Counter()
|
|
39
94
|
path_counter = Counter()
|
|
40
95
|
ip_counter = Counter()
|
|
41
96
|
user_agent_counter = Counter()
|
|
42
97
|
errors = defaultdict(list)
|
|
98
|
+
response_times = []
|
|
99
|
+
log_entries = []
|
|
100
|
+
|
|
101
|
+
# Парсинг nginx формата времени: 01/Jan/2024:00:00:00 +0000
|
|
102
|
+
nginx_time_format = "%d/%b/%Y:%H:%M:%S %z"
|
|
103
|
+
|
|
43
104
|
for line in lines:
|
|
44
105
|
m = log_line_re.search(line)
|
|
45
106
|
if m:
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
107
|
+
try:
|
|
108
|
+
# Парсинг времени
|
|
109
|
+
time_str = m.group('time')
|
|
110
|
+
log_time = datetime.strptime(time_str, nginx_time_format)
|
|
111
|
+
|
|
112
|
+
# Убираем timezone для сравнения (приводим к naive datetime)
|
|
113
|
+
if log_time.tzinfo:
|
|
114
|
+
log_time = log_time.replace(tzinfo=None)
|
|
115
|
+
|
|
116
|
+
# Фильтрация по времени
|
|
117
|
+
if since_dt and log_time < since_dt:
|
|
118
|
+
continue
|
|
119
|
+
if until_dt and log_time > until_dt:
|
|
120
|
+
continue
|
|
121
|
+
|
|
122
|
+
ip = m.group('ip')
|
|
123
|
+
path = m.group('path')
|
|
124
|
+
status = m.group('status')
|
|
125
|
+
method = m.group('method')
|
|
126
|
+
user_agent = m.group('user_agent') or ''
|
|
127
|
+
response_time_str = m.group('response_time')
|
|
128
|
+
|
|
129
|
+
# Фильтрация по статусам
|
|
130
|
+
if status_filter and status not in status_filter:
|
|
131
|
+
continue
|
|
132
|
+
|
|
133
|
+
# Сбор данных
|
|
134
|
+
entry = {
|
|
135
|
+
'time': log_time,
|
|
136
|
+
'ip': ip,
|
|
137
|
+
'path': path,
|
|
138
|
+
'status': status,
|
|
139
|
+
'method': method,
|
|
140
|
+
'user_agent': user_agent,
|
|
141
|
+
'response_time': float(response_time_str) if response_time_str else None
|
|
142
|
+
}
|
|
143
|
+
log_entries.append(entry)
|
|
144
|
+
|
|
145
|
+
status_counter[status] += 1
|
|
146
|
+
path_counter[path] += 1
|
|
147
|
+
ip_counter[ip] += 1
|
|
148
|
+
|
|
149
|
+
if user_agent:
|
|
150
|
+
user_agent_counter[user_agent] += 1
|
|
151
|
+
|
|
152
|
+
if status.startswith('4') or status.startswith('5'):
|
|
153
|
+
errors[status].append(path)
|
|
154
|
+
|
|
155
|
+
if response_time_str:
|
|
156
|
+
try:
|
|
157
|
+
response_times.append(float(response_time_str))
|
|
158
|
+
except ValueError:
|
|
159
|
+
pass
|
|
160
|
+
except (ValueError, AttributeError) as e:
|
|
161
|
+
# Пропускаем строки с неверным форматом
|
|
162
|
+
continue
|
|
163
|
+
|
|
164
|
+
# Проверка на пустые результаты
|
|
165
|
+
if not log_entries:
|
|
166
|
+
if json or yaml or csv:
|
|
167
|
+
empty_data = {
|
|
168
|
+
"timestamp": __import__('datetime').datetime.now().isoformat(),
|
|
169
|
+
"summary": {"total_requests": 0},
|
|
170
|
+
"message": "Нет записей, соответствующих фильтрам"
|
|
171
|
+
}
|
|
172
|
+
if csv:
|
|
173
|
+
print("Category,Type,Value,Count\nNo Data,,,,No entries match filters")
|
|
174
|
+
else:
|
|
175
|
+
format_type = 'json' if json else 'yaml'
|
|
176
|
+
print_export(empty_data, format_type)
|
|
177
|
+
else:
|
|
178
|
+
console.print("[yellow]Нет записей, соответствующих указанным фильтрам.[/yellow]")
|
|
179
|
+
return
|
|
180
|
+
|
|
181
|
+
# Анализ времени ответа
|
|
182
|
+
response_time_stats = {}
|
|
183
|
+
if response_times:
|
|
184
|
+
response_time_stats = {
|
|
185
|
+
"min": min(response_times),
|
|
186
|
+
"max": max(response_times),
|
|
187
|
+
"avg": sum(response_times) / len(response_times),
|
|
188
|
+
"median": sorted(response_times)[len(response_times) // 2],
|
|
189
|
+
"p95": sorted(response_times)[int(len(response_times) * 0.95)] if response_times else 0,
|
|
190
|
+
"p99": sorted(response_times)[int(len(response_times) * 0.99)] if response_times else 0,
|
|
191
|
+
"total_requests_with_time": len(response_times)
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
# Обнаружение аномалий
|
|
195
|
+
anomalies = []
|
|
196
|
+
if detect_anomalies:
|
|
197
|
+
# Аномалия 1: Резкий скачок ошибок
|
|
198
|
+
if len(log_entries) > 100:
|
|
199
|
+
# Разбиваем на временные окна
|
|
200
|
+
window_size = max(100, len(log_entries) // 10)
|
|
201
|
+
error_rates = []
|
|
202
|
+
for i in range(0, len(log_entries), window_size):
|
|
203
|
+
window = log_entries[i:i+window_size]
|
|
204
|
+
error_count = sum(1 for e in window if e['status'].startswith('4') or e['status'].startswith('5'))
|
|
205
|
+
error_rates.append(error_count / len(window) if window else 0)
|
|
206
|
+
|
|
207
|
+
if len(error_rates) > 1:
|
|
208
|
+
avg_rate = sum(error_rates) / len(error_rates)
|
|
209
|
+
for i, rate in enumerate(error_rates):
|
|
210
|
+
if rate > avg_rate * 2: # Удвоение ошибок
|
|
211
|
+
anomalies.append({
|
|
212
|
+
"type": "error_spike",
|
|
213
|
+
"description": f"Резкий скачок ошибок в окне {i+1}: {rate*100:.1f}% (среднее: {avg_rate*100:.1f}%)",
|
|
214
|
+
"severity": "high"
|
|
215
|
+
})
|
|
216
|
+
|
|
217
|
+
# Аномалия 2: Медленные запросы
|
|
218
|
+
if response_times:
|
|
219
|
+
slow_threshold = response_time_stats.get("p95", 1.0) * 2
|
|
220
|
+
slow_requests = [e for e in log_entries if e.get('response_time') and e['response_time'] > slow_threshold]
|
|
221
|
+
if slow_requests:
|
|
222
|
+
anomalies.append({
|
|
223
|
+
"type": "slow_requests",
|
|
224
|
+
"description": f"Найдено {len(slow_requests)} медленных запросов (> {slow_threshold:.2f}s)",
|
|
225
|
+
"severity": "medium"
|
|
226
|
+
})
|
|
227
|
+
|
|
228
|
+
# Аномалия 3: Необычные паттерны IP
|
|
229
|
+
if len(log_entries) > 50:
|
|
230
|
+
ip_counts = Counter(e['ip'] for e in log_entries)
|
|
231
|
+
avg_ip_requests = len(log_entries) / len(ip_counts) if ip_counts else 0
|
|
232
|
+
suspicious_ips = [ip for ip, count in ip_counts.items() if count > avg_ip_requests * 5]
|
|
233
|
+
if suspicious_ips:
|
|
234
|
+
anomalies.append({
|
|
235
|
+
"type": "suspicious_ips",
|
|
236
|
+
"description": f"Подозрительная активность с IP: {', '.join(suspicious_ips[:5])}",
|
|
237
|
+
"severity": "medium"
|
|
238
|
+
})
|
|
239
|
+
|
|
240
|
+
# Аномалия 4: Необычные пути
|
|
241
|
+
if len(log_entries) > 50:
|
|
242
|
+
path_counts = Counter(e['path'] for e in log_entries)
|
|
243
|
+
avg_path_requests = len(log_entries) / len(path_counts) if path_counts else 0
|
|
244
|
+
unusual_paths = [path for path, count in path_counts.items() if count > avg_path_requests * 10]
|
|
245
|
+
if unusual_paths:
|
|
246
|
+
anomalies.append({
|
|
247
|
+
"type": "unusual_paths",
|
|
248
|
+
"description": f"Необычно много запросов к путям: {', '.join(unusual_paths[:3])}",
|
|
249
|
+
"severity": "low"
|
|
250
|
+
})
|
|
251
|
+
|
|
252
|
+
# Экспорт в CSV
|
|
253
|
+
if csv:
|
|
254
|
+
csv_output = export_logs_to_csv(
|
|
255
|
+
status_counter, path_counter, ip_counter, user_agent_counter,
|
|
256
|
+
errors, response_time_stats, anomalies
|
|
257
|
+
)
|
|
258
|
+
print(csv_output)
|
|
259
|
+
return
|
|
260
|
+
|
|
261
|
+
# Экспорт в JSON/YAML
|
|
262
|
+
if json or yaml:
|
|
263
|
+
export_data = format_logs_results(
|
|
264
|
+
status_counter, path_counter, ip_counter, user_agent_counter, errors, top,
|
|
265
|
+
response_time_stats if response_time_stats else None,
|
|
266
|
+
anomalies if anomalies else None
|
|
267
|
+
)
|
|
268
|
+
format_type = 'json' if json else 'yaml'
|
|
269
|
+
print_export(export_data, format_type)
|
|
270
|
+
return
|
|
271
|
+
|
|
272
|
+
# Показываем статистику по времени ответа
|
|
273
|
+
if response_time_stats:
|
|
274
|
+
table = Table(title="Response Time Statistics", show_header=True, header_style="bold green")
|
|
275
|
+
table.add_column("Metric")
|
|
276
|
+
table.add_column("Value")
|
|
277
|
+
for metric, value in response_time_stats.items():
|
|
278
|
+
if metric != "total_requests_with_time":
|
|
279
|
+
table.add_row(metric.replace("_", " ").title(), f"{value:.3f}s")
|
|
280
|
+
else:
|
|
281
|
+
table.add_row(metric.replace("_", " ").title(), str(int(value)))
|
|
282
|
+
console.print(table)
|
|
283
|
+
|
|
284
|
+
# Показываем аномалии
|
|
285
|
+
if anomalies:
|
|
286
|
+
table = Table(title="Detected Anomalies", show_header=True, header_style="bold red")
|
|
287
|
+
table.add_column("Type")
|
|
288
|
+
table.add_column("Description")
|
|
289
|
+
table.add_column("Severity")
|
|
290
|
+
for anomaly in anomalies:
|
|
291
|
+
severity_color = {"high": "red", "medium": "orange3", "low": "yellow"}.get(anomaly.get("severity", "low"), "white")
|
|
292
|
+
table.add_row(
|
|
293
|
+
anomaly.get("type", ""),
|
|
294
|
+
anomaly.get("description", ""),
|
|
295
|
+
f"[{severity_color}]{anomaly.get('severity', '')}[/{severity_color}]"
|
|
296
|
+
)
|
|
297
|
+
console.print(table)
|
|
298
|
+
|
|
59
299
|
# Топ статусов
|
|
60
300
|
table = Table(title="Top HTTP Status Codes", show_header=True, header_style="bold blue")
|
|
61
301
|
table.add_column("Status")
|
commands/resolve.py
CHANGED
|
@@ -4,6 +4,7 @@ from rich.console import Console
|
|
|
4
4
|
from rich.table import Table
|
|
5
5
|
from upstream_checker.checker import resolve_upstreams
|
|
6
6
|
from parser.nginx_parser import parse_nginx_config
|
|
7
|
+
from exporter.json_yaml import format_resolve_results, print_export
|
|
7
8
|
|
|
8
9
|
app = typer.Typer()
|
|
9
10
|
console = Console()
|
|
@@ -11,6 +12,8 @@ console = Console()
|
|
|
11
12
|
def resolve(
|
|
12
13
|
config_path: str = typer.Argument(..., help="Путь к nginx.conf"),
|
|
13
14
|
max_workers: int = typer.Option(10, "--max-workers", "-w", help="Максимальное количество потоков для параллельной обработки"),
|
|
15
|
+
json: bool = typer.Option(False, "--json", help="Экспортировать результаты в JSON"),
|
|
16
|
+
yaml: bool = typer.Option(False, "--yaml", help="Экспортировать результаты в YAML"),
|
|
14
17
|
):
|
|
15
18
|
"""
|
|
16
19
|
Резолвит DNS имена upstream-серверов в IP-адреса.
|
|
@@ -33,11 +36,33 @@ def resolve(
|
|
|
33
36
|
|
|
34
37
|
upstreams = tree.get_upstreams()
|
|
35
38
|
if not upstreams:
|
|
36
|
-
|
|
39
|
+
if json or yaml:
|
|
40
|
+
export_data = {
|
|
41
|
+
"timestamp": __import__('datetime').datetime.now().isoformat(),
|
|
42
|
+
"upstreams": [],
|
|
43
|
+
"summary": {"total_upstreams": 0, "total_servers": 0}
|
|
44
|
+
}
|
|
45
|
+
format_type = 'json' if json else 'yaml'
|
|
46
|
+
print_export(export_data, format_type)
|
|
47
|
+
else:
|
|
48
|
+
console.print("[yellow]Не найдено ни одного upstream в конфигурации.[/yellow]")
|
|
37
49
|
sys.exit(0) # Нет upstream - это не ошибка, просто нет чего проверять
|
|
38
50
|
|
|
39
51
|
results = resolve_upstreams(upstreams, max_workers=max_workers)
|
|
40
52
|
|
|
53
|
+
# Экспорт в JSON/YAML
|
|
54
|
+
if json or yaml:
|
|
55
|
+
export_data = format_resolve_results(results)
|
|
56
|
+
format_type = 'json' if json else 'yaml'
|
|
57
|
+
print_export(export_data, format_type)
|
|
58
|
+
# Exit code остается прежним
|
|
59
|
+
for name, servers in results.items():
|
|
60
|
+
for srv in servers:
|
|
61
|
+
if not srv["resolved"] or any("invalid resolve" in r for r in srv["resolved"]):
|
|
62
|
+
exit_code = 1
|
|
63
|
+
sys.exit(exit_code)
|
|
64
|
+
|
|
65
|
+
# Обычный вывод в таблицу
|
|
41
66
|
table = Table(show_header=True, header_style="bold blue")
|
|
42
67
|
table.add_column("Upstream Name")
|
|
43
68
|
table.add_column("Address")
|
commands/route.py
CHANGED
commands/syntax.py
CHANGED
|
@@ -36,6 +36,7 @@ def syntax(
|
|
|
36
36
|
if not os.path.isfile(config_path):
|
|
37
37
|
console.print(f"[red]Файл {config_path} не найден. Проверьте путь к конфигу.[/red]")
|
|
38
38
|
return
|
|
39
|
+
|
|
39
40
|
cmd = [nginx_path, "-t", "-c", os.path.abspath(config_path)]
|
|
40
41
|
if hasattr(os, 'geteuid') and os.geteuid() != 0:
|
|
41
42
|
cmd = ["sudo"] + cmd
|
commands/tree.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import sys
|
|
1
2
|
import typer
|
|
2
3
|
from rich.console import Console
|
|
3
4
|
from rich.tree import Tree as RichTree
|
|
@@ -38,10 +39,10 @@ def tree(
|
|
|
38
39
|
tree_obj = parse_nginx_config(config_path)
|
|
39
40
|
except FileNotFoundError:
|
|
40
41
|
console.print(f"[red]Файл {config_path} не найден. Проверьте путь к конфигу.[/red]")
|
|
41
|
-
|
|
42
|
+
sys.exit(1)
|
|
42
43
|
except Exception as e:
|
|
43
44
|
console.print(f"[red]Ошибка при разборе {config_path}: {e}[/red]")
|
|
44
|
-
|
|
45
|
+
sys.exit(1)
|
|
45
46
|
root = RichTree(f"[bold blue]nginx.conf[/bold blue]")
|
|
46
47
|
_build_tree(tree_obj.directives, root)
|
|
47
48
|
if markdown:
|
exporter/csv.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Модуль для экспорта результатов в CSV формат.
|
|
3
|
+
"""
|
|
4
|
+
import csv
|
|
5
|
+
import sys
|
|
6
|
+
from typing import List, Dict, Any
|
|
7
|
+
from io import StringIO
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def export_logs_to_csv(
|
|
11
|
+
status_counter,
|
|
12
|
+
path_counter,
|
|
13
|
+
ip_counter,
|
|
14
|
+
user_agent_counter,
|
|
15
|
+
errors: Dict[str, List[str]],
|
|
16
|
+
response_times: List[Dict[str, Any]] = None,
|
|
17
|
+
anomalies: List[Dict[str, Any]] = None
|
|
18
|
+
) -> str:
|
|
19
|
+
"""
|
|
20
|
+
Экспортирует результаты анализа логов в CSV формат.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
status_counter: Счетчик статусов
|
|
24
|
+
path_counter: Счетчик путей
|
|
25
|
+
ip_counter: Счетчик IP
|
|
26
|
+
user_agent_counter: Счетчик User-Agent
|
|
27
|
+
errors: Словарь ошибок по статусам
|
|
28
|
+
response_times: Список данных о времени ответа
|
|
29
|
+
anomalies: Список аномалий
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
CSV строка
|
|
33
|
+
"""
|
|
34
|
+
output = StringIO()
|
|
35
|
+
writer = csv.writer(output)
|
|
36
|
+
|
|
37
|
+
# Топ статусов
|
|
38
|
+
writer.writerow(["Category", "Type", "Value", "Count"])
|
|
39
|
+
writer.writerow(["Status Codes", "", "", ""])
|
|
40
|
+
for status, count in status_counter.most_common():
|
|
41
|
+
writer.writerow(["", "Status", status, count])
|
|
42
|
+
|
|
43
|
+
writer.writerow([])
|
|
44
|
+
writer.writerow(["Paths", "", "", ""])
|
|
45
|
+
for path, count in path_counter.most_common():
|
|
46
|
+
writer.writerow(["", "Path", path, count])
|
|
47
|
+
|
|
48
|
+
writer.writerow([])
|
|
49
|
+
writer.writerow(["IPs", "", "", ""])
|
|
50
|
+
for ip, count in ip_counter.most_common():
|
|
51
|
+
writer.writerow(["", "IP", ip, count])
|
|
52
|
+
|
|
53
|
+
if user_agent_counter:
|
|
54
|
+
writer.writerow([])
|
|
55
|
+
writer.writerow(["User-Agents", "", "", ""])
|
|
56
|
+
for ua, count in user_agent_counter.most_common():
|
|
57
|
+
writer.writerow(["", "User-Agent", ua, count])
|
|
58
|
+
|
|
59
|
+
# Ошибки
|
|
60
|
+
if errors:
|
|
61
|
+
writer.writerow([])
|
|
62
|
+
writer.writerow(["Errors", "", "", ""])
|
|
63
|
+
for status, paths in errors.items():
|
|
64
|
+
writer.writerow(["", f"Error {status}", f"{len(paths)} occurrences", ""])
|
|
65
|
+
|
|
66
|
+
# Response times
|
|
67
|
+
if response_times:
|
|
68
|
+
writer.writerow([])
|
|
69
|
+
writer.writerow(["Response Times", "", "", ""])
|
|
70
|
+
writer.writerow(["", "Metric", "Value", ""])
|
|
71
|
+
for metric, value in response_times.items():
|
|
72
|
+
writer.writerow(["", metric, str(value), ""])
|
|
73
|
+
|
|
74
|
+
# Аномалии
|
|
75
|
+
if anomalies:
|
|
76
|
+
writer.writerow([])
|
|
77
|
+
writer.writerow(["Anomalies", "Type", "Description", "Severity"])
|
|
78
|
+
for anomaly in anomalies:
|
|
79
|
+
writer.writerow([
|
|
80
|
+
"",
|
|
81
|
+
anomaly.get("type", ""),
|
|
82
|
+
anomaly.get("description", ""),
|
|
83
|
+
anomaly.get("severity", "")
|
|
84
|
+
])
|
|
85
|
+
|
|
86
|
+
return output.getvalue()
|
|
87
|
+
|
exporter/json_yaml.py
ADDED
|
@@ -0,0 +1,361 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Модуль для экспорта результатов команд nginx-lens в JSON и YAML форматы.
|
|
3
|
+
"""
|
|
4
|
+
import json
|
|
5
|
+
import sys
|
|
6
|
+
from typing import Any, Dict, List, Optional
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
import yaml
|
|
11
|
+
YAML_AVAILABLE = True
|
|
12
|
+
except ImportError:
|
|
13
|
+
YAML_AVAILABLE = False
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def export_json(data: Any, pretty: bool = True) -> str:
|
|
17
|
+
"""
|
|
18
|
+
Экспортирует данные в JSON формат.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
data: Данные для экспорта
|
|
22
|
+
pretty: Форматировать с отступами
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
JSON строка
|
|
26
|
+
"""
|
|
27
|
+
if pretty:
|
|
28
|
+
return json.dumps(data, ensure_ascii=False, indent=2, default=str)
|
|
29
|
+
else:
|
|
30
|
+
return json.dumps(data, ensure_ascii=False, default=str)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def export_yaml(data: Any) -> str:
|
|
34
|
+
"""
|
|
35
|
+
Экспортирует данные в YAML формат.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
data: Данные для экспорта
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
YAML строка
|
|
42
|
+
|
|
43
|
+
Raises:
|
|
44
|
+
ImportError: Если PyYAML не установлен
|
|
45
|
+
"""
|
|
46
|
+
if not YAML_AVAILABLE:
|
|
47
|
+
raise ImportError("PyYAML не установлен. Установите его: pip install pyyaml")
|
|
48
|
+
|
|
49
|
+
return yaml.dump(data, allow_unicode=True, default_flow_style=False, sort_keys=False)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def print_export(data: Any, format_type: str, file=None):
|
|
53
|
+
"""
|
|
54
|
+
Выводит данные в указанном формате.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
data: Данные для экспорта
|
|
58
|
+
format_type: 'json' или 'yaml'
|
|
59
|
+
file: Файл для вывода (по умолчанию stdout)
|
|
60
|
+
"""
|
|
61
|
+
if file is None:
|
|
62
|
+
file = sys.stdout
|
|
63
|
+
|
|
64
|
+
if format_type == 'json':
|
|
65
|
+
output = export_json(data)
|
|
66
|
+
elif format_type == 'yaml':
|
|
67
|
+
output = export_yaml(data)
|
|
68
|
+
else:
|
|
69
|
+
raise ValueError(f"Неподдерживаемый формат: {format_type}")
|
|
70
|
+
|
|
71
|
+
print(output, file=file)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def format_health_results(results: Dict[str, List[Dict]], resolved_info: Optional[Dict] = None) -> Dict[str, Any]:
|
|
75
|
+
"""
|
|
76
|
+
Форматирует результаты команды health для экспорта.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
results: Результаты check_upstreams
|
|
80
|
+
resolved_info: Результаты resolve_upstreams (опционально)
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
Словарь с данными для экспорта
|
|
84
|
+
"""
|
|
85
|
+
data = {
|
|
86
|
+
"timestamp": datetime.now().isoformat(),
|
|
87
|
+
"upstreams": []
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
for name, servers in results.items():
|
|
91
|
+
upstream_data = {
|
|
92
|
+
"name": name,
|
|
93
|
+
"servers": []
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
for srv in servers:
|
|
97
|
+
server_data = {
|
|
98
|
+
"address": srv["address"],
|
|
99
|
+
"healthy": srv["healthy"],
|
|
100
|
+
"status": "healthy" if srv["healthy"] else "unhealthy"
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if resolved_info and name in resolved_info:
|
|
104
|
+
for resolved_srv in resolved_info[name]:
|
|
105
|
+
if resolved_srv["address"] == srv["address"]:
|
|
106
|
+
server_data["resolved_ips"] = resolved_srv["resolved"]
|
|
107
|
+
# Проверяем наличие invalid resolve
|
|
108
|
+
server_data["has_invalid_resolve"] = any("invalid resolve" in r for r in resolved_srv["resolved"])
|
|
109
|
+
break
|
|
110
|
+
|
|
111
|
+
upstream_data["servers"].append(server_data)
|
|
112
|
+
|
|
113
|
+
data["upstreams"].append(upstream_data)
|
|
114
|
+
|
|
115
|
+
# Подсчитываем статистику
|
|
116
|
+
total_servers = sum(len(servers) for servers in results.values())
|
|
117
|
+
healthy_count = sum(1 for servers in results.values() for srv in servers if srv["healthy"])
|
|
118
|
+
unhealthy_count = total_servers - healthy_count
|
|
119
|
+
|
|
120
|
+
data["summary"] = {
|
|
121
|
+
"total_upstreams": len(results),
|
|
122
|
+
"total_servers": total_servers,
|
|
123
|
+
"healthy": healthy_count,
|
|
124
|
+
"unhealthy": unhealthy_count
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
return data
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def format_resolve_results(results: Dict[str, List[Dict]]) -> Dict[str, Any]:
|
|
131
|
+
"""
|
|
132
|
+
Форматирует результаты команды resolve для экспорта.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
results: Результаты resolve_upstreams
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
Словарь с данными для экспорта
|
|
139
|
+
"""
|
|
140
|
+
data = {
|
|
141
|
+
"timestamp": datetime.now().isoformat(),
|
|
142
|
+
"upstreams": []
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
failed_count = 0
|
|
146
|
+
invalid_count = 0
|
|
147
|
+
|
|
148
|
+
for name, servers in results.items():
|
|
149
|
+
upstream_data = {
|
|
150
|
+
"name": name,
|
|
151
|
+
"servers": []
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
for srv in servers:
|
|
155
|
+
server_data = {
|
|
156
|
+
"address": srv["address"],
|
|
157
|
+
"resolved_ips": srv["resolved"]
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
if not srv["resolved"]:
|
|
161
|
+
server_data["status"] = "failed"
|
|
162
|
+
failed_count += 1
|
|
163
|
+
elif any("invalid resolve" in r for r in srv["resolved"]):
|
|
164
|
+
server_data["status"] = "invalid"
|
|
165
|
+
invalid_count += 1
|
|
166
|
+
else:
|
|
167
|
+
server_data["status"] = "success"
|
|
168
|
+
|
|
169
|
+
upstream_data["servers"].append(server_data)
|
|
170
|
+
|
|
171
|
+
data["upstreams"].append(upstream_data)
|
|
172
|
+
|
|
173
|
+
data["summary"] = {
|
|
174
|
+
"total_upstreams": len(results),
|
|
175
|
+
"total_servers": sum(len(servers) for servers in results.values()),
|
|
176
|
+
"successful": sum(len(servers) for servers in results.values()) - failed_count - invalid_count,
|
|
177
|
+
"failed": failed_count,
|
|
178
|
+
"invalid": invalid_count
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
return data
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def format_analyze_results(
|
|
185
|
+
conflicts: List[Dict],
|
|
186
|
+
dups: List[Dict],
|
|
187
|
+
empties: List[Dict],
|
|
188
|
+
warnings: List[Dict],
|
|
189
|
+
unused_vars: List[Dict],
|
|
190
|
+
listen_conflicts: List[Dict],
|
|
191
|
+
rewrite_issues: List[Dict],
|
|
192
|
+
dead_locations: List[Dict],
|
|
193
|
+
issue_meta: Dict[str, tuple]
|
|
194
|
+
) -> Dict[str, Any]:
|
|
195
|
+
"""
|
|
196
|
+
Форматирует результаты команды analyze для экспорта.
|
|
197
|
+
|
|
198
|
+
Args:
|
|
199
|
+
conflicts: Конфликты location
|
|
200
|
+
dups: Дублирующиеся директивы
|
|
201
|
+
empties: Пустые блоки
|
|
202
|
+
warnings: Предупреждения
|
|
203
|
+
unused_vars: Неиспользуемые переменные
|
|
204
|
+
listen_conflicts: Конфликты listen/server_name
|
|
205
|
+
rewrite_issues: Проблемы с rewrite
|
|
206
|
+
dead_locations: Мертвые location
|
|
207
|
+
issue_meta: Метаданные о типах проблем
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
Словарь с данными для экспорта
|
|
211
|
+
"""
|
|
212
|
+
data = {
|
|
213
|
+
"timestamp": datetime.now().isoformat(),
|
|
214
|
+
"issues": []
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
# Добавляем все проблемы в единый список
|
|
218
|
+
for c in conflicts:
|
|
219
|
+
advice, severity = issue_meta.get('location_conflict', ("", "medium"))
|
|
220
|
+
data["issues"].append({
|
|
221
|
+
"type": "location_conflict",
|
|
222
|
+
"severity": severity,
|
|
223
|
+
"message": c.get('value', ''),
|
|
224
|
+
"advice": advice
|
|
225
|
+
})
|
|
226
|
+
|
|
227
|
+
for d in dups:
|
|
228
|
+
advice, severity = issue_meta.get('duplicate_directive', ("", "medium"))
|
|
229
|
+
data["issues"].append({
|
|
230
|
+
"type": "duplicate_directive",
|
|
231
|
+
"severity": severity,
|
|
232
|
+
"message": d.get('value', ''),
|
|
233
|
+
"advice": advice
|
|
234
|
+
})
|
|
235
|
+
|
|
236
|
+
for e in empties:
|
|
237
|
+
advice, severity = issue_meta.get('empty_block', ("", "low"))
|
|
238
|
+
data["issues"].append({
|
|
239
|
+
"type": "empty_block",
|
|
240
|
+
"severity": severity,
|
|
241
|
+
"message": f"{e.get('block', '')} блок пуст",
|
|
242
|
+
"advice": advice
|
|
243
|
+
})
|
|
244
|
+
|
|
245
|
+
for w in warnings:
|
|
246
|
+
issue_type = w.get('type', '')
|
|
247
|
+
advice, severity = issue_meta.get(issue_type, ("", "medium"))
|
|
248
|
+
data["issues"].append({
|
|
249
|
+
"type": issue_type,
|
|
250
|
+
"severity": severity,
|
|
251
|
+
"message": w.get('value', ''),
|
|
252
|
+
"advice": advice
|
|
253
|
+
})
|
|
254
|
+
|
|
255
|
+
for v in unused_vars:
|
|
256
|
+
advice, severity = issue_meta.get('unused_variable', ("", "low"))
|
|
257
|
+
data["issues"].append({
|
|
258
|
+
"type": "unused_variable",
|
|
259
|
+
"severity": severity,
|
|
260
|
+
"message": v.get('name', ''),
|
|
261
|
+
"advice": advice
|
|
262
|
+
})
|
|
263
|
+
|
|
264
|
+
for c in listen_conflicts:
|
|
265
|
+
advice, severity = issue_meta.get('listen_servername_conflict', ("", "high"))
|
|
266
|
+
data["issues"].append({
|
|
267
|
+
"type": "listen_servername_conflict",
|
|
268
|
+
"severity": severity,
|
|
269
|
+
"message": f"server1: {c.get('server1', {}).get('arg','')} server2: {c.get('server2', {}).get('arg','')}",
|
|
270
|
+
"advice": advice
|
|
271
|
+
})
|
|
272
|
+
|
|
273
|
+
for r in rewrite_issues:
|
|
274
|
+
advice, severity = issue_meta.get(r.get('type', ''), ("", "medium"))
|
|
275
|
+
data["issues"].append({
|
|
276
|
+
"type": r.get('type', ''),
|
|
277
|
+
"severity": severity,
|
|
278
|
+
"message": r.get('value', ''),
|
|
279
|
+
"advice": advice
|
|
280
|
+
})
|
|
281
|
+
|
|
282
|
+
for l in dead_locations:
|
|
283
|
+
advice, severity = issue_meta.get('dead_location', ("", "low"))
|
|
284
|
+
data["issues"].append({
|
|
285
|
+
"type": "dead_location",
|
|
286
|
+
"severity": severity,
|
|
287
|
+
"message": f"server: {l.get('server', {}).get('arg','')} location: {l.get('location', {}).get('arg','')}",
|
|
288
|
+
"advice": advice
|
|
289
|
+
})
|
|
290
|
+
|
|
291
|
+
# Подсчитываем статистику
|
|
292
|
+
severity_counts = {"high": 0, "medium": 0, "low": 0}
|
|
293
|
+
for issue in data["issues"]:
|
|
294
|
+
severity = issue.get("severity", "medium")
|
|
295
|
+
if severity in severity_counts:
|
|
296
|
+
severity_counts[severity] += 1
|
|
297
|
+
|
|
298
|
+
data["summary"] = {
|
|
299
|
+
"total_issues": len(data["issues"]),
|
|
300
|
+
"by_severity": severity_counts
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
return data
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
def format_logs_results(
|
|
307
|
+
status_counter,
|
|
308
|
+
path_counter,
|
|
309
|
+
ip_counter,
|
|
310
|
+
user_agent_counter,
|
|
311
|
+
errors: Dict[str, List[str]],
|
|
312
|
+
top: int,
|
|
313
|
+
response_times: Optional[Dict[str, float]] = None,
|
|
314
|
+
anomalies: Optional[List[Dict[str, Any]]] = None
|
|
315
|
+
) -> Dict[str, Any]:
|
|
316
|
+
"""
|
|
317
|
+
Форматирует результаты команды logs для экспорта.
|
|
318
|
+
|
|
319
|
+
Args:
|
|
320
|
+
status_counter: Счетчик статусов
|
|
321
|
+
path_counter: Счетчик путей
|
|
322
|
+
ip_counter: Счетчик IP
|
|
323
|
+
user_agent_counter: Счетчик User-Agent
|
|
324
|
+
errors: Словарь ошибок по статусам
|
|
325
|
+
top: Количество топ-значений
|
|
326
|
+
|
|
327
|
+
Returns:
|
|
328
|
+
Словарь с данными для экспорта
|
|
329
|
+
"""
|
|
330
|
+
data = {
|
|
331
|
+
"timestamp": datetime.now().isoformat(),
|
|
332
|
+
"top_statuses": [{"status": status, "count": count} for status, count in status_counter.most_common(top)],
|
|
333
|
+
"top_paths": [{"path": path, "count": count} for path, count in path_counter.most_common(top)],
|
|
334
|
+
"top_ips": [{"ip": ip, "count": count} for ip, count in ip_counter.most_common(top)],
|
|
335
|
+
"errors": {}
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
if user_agent_counter:
|
|
339
|
+
data["top_user_agents"] = [{"user_agent": ua, "count": count} for ua, count in user_agent_counter.most_common(top)]
|
|
340
|
+
|
|
341
|
+
for status, paths in errors.items():
|
|
342
|
+
data["errors"][status] = {
|
|
343
|
+
"count": len(paths),
|
|
344
|
+
"unique_paths": list(set(paths))[:top]
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
data["summary"] = {
|
|
348
|
+
"total_requests": sum(status_counter.values()),
|
|
349
|
+
"unique_paths": len(path_counter),
|
|
350
|
+
"unique_ips": len(ip_counter),
|
|
351
|
+
"error_requests": sum(len(paths) for paths in errors.values())
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
if response_times:
|
|
355
|
+
data["response_times"] = response_times
|
|
356
|
+
|
|
357
|
+
if anomalies:
|
|
358
|
+
data["anomalies"] = anomalies
|
|
359
|
+
|
|
360
|
+
return data
|
|
361
|
+
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: nginx-lens
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.0
|
|
4
4
|
Summary: CLI-инструмент для анализа, визуализации и диагностики конфигураций Nginx
|
|
5
5
|
Author: Daniil Astrouski
|
|
6
6
|
Author-email: shelovesuastra@gmail.com
|
|
@@ -10,9 +10,14 @@ Requires-Dist: typer[all]>=0.9.0
|
|
|
10
10
|
Requires-Dist: rich>=13.0.0
|
|
11
11
|
Requires-Dist: requests>=2.25.0
|
|
12
12
|
Requires-Dist: dnspython>=2.0.0
|
|
13
|
+
Requires-Dist: pyyaml>=6.0
|
|
14
|
+
Provides-Extra: dev
|
|
15
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
16
|
+
Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
|
|
13
17
|
Dynamic: author
|
|
14
18
|
Dynamic: author-email
|
|
15
19
|
Dynamic: license-file
|
|
20
|
+
Dynamic: provides-extra
|
|
16
21
|
Dynamic: requires-dist
|
|
17
22
|
Dynamic: requires-python
|
|
18
23
|
Dynamic: summary
|
|
@@ -11,28 +11,30 @@ analyzer/route.py,sha256=71dkmQaTrHqDTf4Up5gAsrgmgktNpXqWmxr7-0RAVtg,2370
|
|
|
11
11
|
analyzer/unused.py,sha256=Ixzv0bPsw9IafblVwLiAOgugdg2dGu1MJDtuoqzPZiY,1066
|
|
12
12
|
analyzer/warnings.py,sha256=zC36QMvegA2eQPvZ-P1eysrX_kXHx5A1MUKHKKNvG5c,5784
|
|
13
13
|
commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
|
-
commands/analyze.py,sha256=
|
|
14
|
+
commands/analyze.py,sha256=UUZpPOpEnPglo94AFklX-cMt6HoHPCafmqfW76CIZVg,8667
|
|
15
15
|
commands/cli.py,sha256=brzp6xDDWIrm7ibaoT4x94hgAdBB2DVWniXoK8dRylE,782
|
|
16
|
-
commands/diff.py,sha256=
|
|
17
|
-
commands/graph.py,sha256=
|
|
18
|
-
commands/health.py,sha256=
|
|
19
|
-
commands/include.py,sha256=
|
|
20
|
-
commands/logs.py,sha256=
|
|
21
|
-
commands/resolve.py,sha256=
|
|
22
|
-
commands/route.py,sha256
|
|
23
|
-
commands/syntax.py,sha256=
|
|
24
|
-
commands/tree.py,sha256=
|
|
16
|
+
commands/diff.py,sha256=mf6xkf_8IKa3R-AiTsWmJDUrxqjGT5gaSAX0u5D0jjY,2097
|
|
17
|
+
commands/graph.py,sha256=lBh2wCPrhsywxcEbz5UABtNdEepTMLmiIzWJt_Uu1mE,5968
|
|
18
|
+
commands/health.py,sha256=e6569IAITNf9Mji_E0GbA18S9neA0yfkSsTAZcnwFoM,5343
|
|
19
|
+
commands/include.py,sha256=hsheLfoQ3eUx3irAibhC2ndq3ko0VrLda-WGL9JgIlw,2245
|
|
20
|
+
commands/logs.py,sha256=D6vI9YNPgStZ4weM3bSrUIzK0ncpd9Ku7V7jRBDwtPY,15415
|
|
21
|
+
commands/resolve.py,sha256=eeln1_5QAR4FvsrPz1ff9cd_i_9yGlPcF_W1omkTNms,4032
|
|
22
|
+
commands/route.py,sha256=4bW4sCY2cVORk-hblnE-BJH3oRL6T9cZuigL-2KCRT4,3174
|
|
23
|
+
commands/syntax.py,sha256=9sjJXheQ9PRZoFm0sO73pEomzbkeYvbBV265XLU6Krk,3423
|
|
24
|
+
commands/tree.py,sha256=cxq0vL6V3ah5X4ozPOmWgIH4NJbr2J48TTLNxsjzOL8,2182
|
|
25
25
|
exporter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
|
+
exporter/csv.py,sha256=_hTIs9CihxP9ewB9FcoN-ERmzUKEQs8hf7U8_heHTO0,2815
|
|
26
27
|
exporter/graph.py,sha256=WYUrqUgCaK6KihgxAcRHaQn4oMo6b7ybC8yb_36ZIsA,3995
|
|
27
28
|
exporter/html.py,sha256=uquEM-WvBt2aV9GshgaI3UVhYd8sD0QQ-OmuNtvYUdU,798
|
|
29
|
+
exporter/json_yaml.py,sha256=XqLOBtrh5Xd7RMeofODU8jaTkaeBEpHMNjz4UF39WrQ,11794
|
|
28
30
|
exporter/markdown.py,sha256=_0mXQIhurGEZ0dO-eq9DbsuKNrgEDIblgtL3DAgYNo8,724
|
|
29
|
-
nginx_lens-0.
|
|
31
|
+
nginx_lens-0.4.0.dist-info/licenses/LICENSE,sha256=g8QXKdvZZC56rU8E12vIeYF6R4jeTWOsblOnYAda3K4,1073
|
|
30
32
|
parser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
31
33
|
parser/nginx_parser.py,sha256=Sa9FtGAkvTqNzoehBvgLUWPJHLLIZYWH9ugSHW50X8s,3699
|
|
32
34
|
upstream_checker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
33
35
|
upstream_checker/checker.py,sha256=i3L6XqUHUH5hcyLq5PXx6wOyjzEL_Z7xYCA3FffFOrU,11257
|
|
34
|
-
nginx_lens-0.
|
|
35
|
-
nginx_lens-0.
|
|
36
|
-
nginx_lens-0.
|
|
37
|
-
nginx_lens-0.
|
|
38
|
-
nginx_lens-0.
|
|
36
|
+
nginx_lens-0.4.0.dist-info/METADATA,sha256=W4PE6mOQdDbsW0S4RTokPg3h2m9f39Fol0RWV3zxuJo,717
|
|
37
|
+
nginx_lens-0.4.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
38
|
+
nginx_lens-0.4.0.dist-info/entry_points.txt,sha256=qEcecjSyLqcJjbIVlNlTpqAhPqDyaujUV5ZcBTAr3po,48
|
|
39
|
+
nginx_lens-0.4.0.dist-info/top_level.txt,sha256=mxLJO4rZg0rbixVGhplF3fUNFs8vxDIL25ronZNvRy4,51
|
|
40
|
+
nginx_lens-0.4.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|