nginx-lens 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- analyzer/conflicts.py +33 -1
- analyzer/dead_locations.py +34 -0
- analyzer/include.py +67 -0
- analyzer/rewrite.py +37 -0
- analyzer/warnings.py +91 -1
- commands/analyze.py +41 -50
- commands/cli.py +8 -0
- commands/diff.py +11 -10
- commands/graph.py +22 -0
- commands/health.py +9 -4
- commands/include.py +47 -0
- commands/logs.py +80 -0
- commands/syntax.py +57 -0
- exporter/graph.py +84 -0
- {nginx_lens-0.1.0.dist-info → nginx_lens-0.1.2.dist-info}/METADATA +1 -1
- nginx_lens-0.1.2.dist-info/RECORD +36 -0
- nginx_lens-0.1.0.dist-info/RECORD +0 -28
- {nginx_lens-0.1.0.dist-info → nginx_lens-0.1.2.dist-info}/WHEEL +0 -0
- {nginx_lens-0.1.0.dist-info → nginx_lens-0.1.2.dist-info}/entry_points.txt +0 -0
- {nginx_lens-0.1.0.dist-info → nginx_lens-0.1.2.dist-info}/top_level.txt +0 -0
analyzer/conflicts.py
CHANGED
|
@@ -30,4 +30,36 @@ def find_location_conflicts(tree) -> List[Dict[str, Any]]:
|
|
|
30
30
|
|
|
31
31
|
def _locations_conflict(loc1, loc2):
|
|
32
32
|
# Простая эвристика: если один путь — префикс другого
|
|
33
|
-
return loc1.startswith(loc2) or loc2.startswith(loc1)
|
|
33
|
+
return loc1.startswith(loc2) or loc2.startswith(loc1)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def find_listen_servername_conflicts(tree) -> List[Dict[str, Any]]:
|
|
37
|
+
"""
|
|
38
|
+
Находит конфликтующие listen/server_name между server-блоками.
|
|
39
|
+
Возвращает список: [{server1, server2, listen, server_name}]
|
|
40
|
+
"""
|
|
41
|
+
analyzer = Analyzer(tree)
|
|
42
|
+
servers = []
|
|
43
|
+
for d, parent in analyzer.walk():
|
|
44
|
+
if d.get('block') == 'server':
|
|
45
|
+
listens = set()
|
|
46
|
+
names = set()
|
|
47
|
+
for sub, _ in analyzer.walk(d['directives'], d):
|
|
48
|
+
if sub.get('directive') == 'listen':
|
|
49
|
+
listens.add(sub.get('args', '').strip())
|
|
50
|
+
if sub.get('directive') == 'server_name':
|
|
51
|
+
names.update(sub.get('args', '').split())
|
|
52
|
+
servers.append({'block': d, 'listen': listens, 'server_name': names})
|
|
53
|
+
conflicts = []
|
|
54
|
+
for i in range(len(servers)):
|
|
55
|
+
for j in range(i+1, len(servers)):
|
|
56
|
+
common_listen = servers[i]['listen'] & servers[j]['listen']
|
|
57
|
+
common_name = servers[i]['server_name'] & servers[j]['server_name']
|
|
58
|
+
if common_listen and common_name:
|
|
59
|
+
conflicts.append({
|
|
60
|
+
'server1': servers[i]['block'],
|
|
61
|
+
'server2': servers[j]['block'],
|
|
62
|
+
'listen': list(common_listen),
|
|
63
|
+
'server_name': list(common_name)
|
|
64
|
+
})
|
|
65
|
+
return conflicts
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from analyzer.base import Analyzer
|
|
2
|
+
from typing import List, Dict, Any, Set
|
|
3
|
+
import re
|
|
4
|
+
|
|
5
|
+
def find_dead_locations(tree) -> List[Dict[str, Any]]:
|
|
6
|
+
"""
|
|
7
|
+
Находит location-ы, которые не используются ни в одном proxy_pass, rewrite, try_files и т.д.
|
|
8
|
+
Возвращает список: [{server, location}]
|
|
9
|
+
"""
|
|
10
|
+
analyzer = Analyzer(tree)
|
|
11
|
+
locations = []
|
|
12
|
+
used = set()
|
|
13
|
+
# Собираем все location
|
|
14
|
+
for d, parent in analyzer.walk():
|
|
15
|
+
if d.get('block') == 'server':
|
|
16
|
+
for sub, _ in analyzer.walk(d.get('directives', []), d):
|
|
17
|
+
if sub.get('block') == 'location':
|
|
18
|
+
locations.append({'server': d, 'location': sub})
|
|
19
|
+
# Собираем все использования location (proxy_pass, rewrite, try_files)
|
|
20
|
+
for d, parent in analyzer.walk():
|
|
21
|
+
for key in ('proxy_pass', 'rewrite', 'try_files'):
|
|
22
|
+
if d.get('directive') == key:
|
|
23
|
+
args = d.get('args', '')
|
|
24
|
+
for l in locations:
|
|
25
|
+
loc = l['location'].get('arg', '')
|
|
26
|
+
if loc and loc in args:
|
|
27
|
+
used.add((l['server'].get('arg',''), loc))
|
|
28
|
+
# Те, что не используются
|
|
29
|
+
dead = []
|
|
30
|
+
for l in locations:
|
|
31
|
+
key = (l['server'].get('arg',''), l['location'].get('arg',''))
|
|
32
|
+
if key not in used:
|
|
33
|
+
dead.append(l)
|
|
34
|
+
return dead
|
analyzer/include.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import glob
|
|
3
|
+
from typing import List, Dict, Any, Set
|
|
4
|
+
|
|
5
|
+
def build_include_tree(path: str, visited: Set[str]=None) -> Dict[str, Any]:
|
|
6
|
+
"""
|
|
7
|
+
Строит дерево include-ов начиная с path. Возвращает dict: {file: [subincludes]}
|
|
8
|
+
"""
|
|
9
|
+
if visited is None:
|
|
10
|
+
visited = set()
|
|
11
|
+
path = os.path.abspath(path)
|
|
12
|
+
if path in visited:
|
|
13
|
+
return {path: 'cycle'}
|
|
14
|
+
visited.add(path)
|
|
15
|
+
includes = []
|
|
16
|
+
try:
|
|
17
|
+
with open(path) as f:
|
|
18
|
+
lines = f.readlines()
|
|
19
|
+
except Exception:
|
|
20
|
+
return {path: 'not_found'}
|
|
21
|
+
for line in lines:
|
|
22
|
+
line = line.split('#', 1)[0].strip()
|
|
23
|
+
if line.startswith('include '):
|
|
24
|
+
pattern = line[len('include '):].rstrip(';').strip()
|
|
25
|
+
pattern = os.path.join(os.path.dirname(path), pattern) if not os.path.isabs(pattern) else pattern
|
|
26
|
+
for inc_path in glob.glob(pattern):
|
|
27
|
+
includes.append(build_include_tree(inc_path, visited.copy()))
|
|
28
|
+
return {path: includes}
|
|
29
|
+
|
|
30
|
+
def find_include_cycles(tree: Dict[str, Any], stack=None) -> List[List[str]]:
|
|
31
|
+
"""
|
|
32
|
+
Находит циклы include-ов в дереве. Возвращает список путей.
|
|
33
|
+
"""
|
|
34
|
+
if stack is None:
|
|
35
|
+
stack = []
|
|
36
|
+
cycles = []
|
|
37
|
+
for k, v in tree.items():
|
|
38
|
+
if v == 'cycle':
|
|
39
|
+
cycles.append(stack + [k])
|
|
40
|
+
elif isinstance(v, list):
|
|
41
|
+
for sub in v:
|
|
42
|
+
if isinstance(sub, dict):
|
|
43
|
+
cycles.extend(find_include_cycles(sub, stack + [k]))
|
|
44
|
+
return cycles
|
|
45
|
+
|
|
46
|
+
def find_include_shadowing(tree: Dict[str, Any], directive: str) -> List[Dict[str, Any]]:
|
|
47
|
+
"""
|
|
48
|
+
Находит переопределения директивы в разных include-ах.
|
|
49
|
+
Возвращает список: [{file, directive, value}]
|
|
50
|
+
"""
|
|
51
|
+
found = []
|
|
52
|
+
def _walk(t):
|
|
53
|
+
for k, v in t.items():
|
|
54
|
+
if isinstance(v, list):
|
|
55
|
+
# Проверяем сам файл
|
|
56
|
+
try:
|
|
57
|
+
with open(k) as f:
|
|
58
|
+
for line in f:
|
|
59
|
+
if line.strip().startswith(directive + ' '):
|
|
60
|
+
found.append({'file': k, 'directive': directive, 'value': line.strip()})
|
|
61
|
+
except Exception:
|
|
62
|
+
pass
|
|
63
|
+
for sub in v:
|
|
64
|
+
if isinstance(sub, dict):
|
|
65
|
+
_walk(sub)
|
|
66
|
+
_walk(tree)
|
|
67
|
+
return found
|
analyzer/rewrite.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from analyzer.base import Analyzer
|
|
2
|
+
from typing import List, Dict, Any
|
|
3
|
+
import re
|
|
4
|
+
|
|
5
|
+
def find_rewrite_issues(tree) -> List[Dict[str, Any]]:
|
|
6
|
+
"""
|
|
7
|
+
Находит потенциальные проблемы с rewrite: циклы, конфликты, неэффективные правила.
|
|
8
|
+
Возвращает список: [{type, context, value}]
|
|
9
|
+
"""
|
|
10
|
+
analyzer = Analyzer(tree)
|
|
11
|
+
issues = []
|
|
12
|
+
rewrites = []
|
|
13
|
+
for d, parent in analyzer.walk():
|
|
14
|
+
if d.get('directive') == 'rewrite':
|
|
15
|
+
args = d.get('args', '')
|
|
16
|
+
parts = args.split()
|
|
17
|
+
if len(parts) >= 2:
|
|
18
|
+
pattern, target = parts[0], parts[1]
|
|
19
|
+
rewrites.append({'pattern': pattern, 'target': target, 'context': parent, 'raw': args})
|
|
20
|
+
# Проверка на циклы (rewrite на себя)
|
|
21
|
+
for r in rewrites:
|
|
22
|
+
if r['pattern'] == r['target']:
|
|
23
|
+
issues.append({'type': 'rewrite_cycle', 'context': r['context'], 'value': r['raw']})
|
|
24
|
+
# Проверка на потенциальные конфликты (два одинаковых паттерна с разными target)
|
|
25
|
+
seen = {}
|
|
26
|
+
for r in rewrites:
|
|
27
|
+
key = r['pattern']
|
|
28
|
+
if key in seen and seen[key] != r['target']:
|
|
29
|
+
issues.append({'type': 'rewrite_conflict', 'context': r['context'], 'value': f"{key} -> {seen[key]} и {key} -> {r['target']}"})
|
|
30
|
+
seen[key] = r['target']
|
|
31
|
+
# Неэффективные rewrite (например, без break/last/redirect/permanent)
|
|
32
|
+
for d, parent in analyzer.walk():
|
|
33
|
+
if d.get('directive') == 'rewrite':
|
|
34
|
+
args = d.get('args', '')
|
|
35
|
+
if not re.search(r'\b(last|break|redirect|permanent)\b', args):
|
|
36
|
+
issues.append({'type': 'rewrite_no_flag', 'context': parent, 'value': args})
|
|
37
|
+
return issues
|
analyzer/warnings.py
CHANGED
|
@@ -2,22 +2,112 @@ from analyzer.base import Analyzer
|
|
|
2
2
|
from typing import List, Dict, Any
|
|
3
3
|
import re
|
|
4
4
|
|
|
5
|
+
# Список устаревших директив (пример)
|
|
6
|
+
DEPRECATED_DIRECTIVES = {
|
|
7
|
+
'ssl': 'ssl директива устарела, используйте listen ... ssl',
|
|
8
|
+
'spdy': 'spdy устарел, используйте http2',
|
|
9
|
+
'ssl_session_cache': 'ssl_session_cache устарел в новых версиях',
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
# Список security-заголовков
|
|
13
|
+
SECURITY_HEADERS = [
|
|
14
|
+
'X-Frame-Options',
|
|
15
|
+
'Strict-Transport-Security',
|
|
16
|
+
'X-Content-Type-Options',
|
|
17
|
+
'Referrer-Policy',
|
|
18
|
+
'Content-Security-Policy',
|
|
19
|
+
]
|
|
20
|
+
|
|
21
|
+
LIMITS = {
|
|
22
|
+
'client_max_body_size': {'min': 1024*1024, 'max': 1024*1024*100}, # 1M - 100M
|
|
23
|
+
'proxy_buffer_size': {'min': 4096, 'max': 1024*1024}, # 4K - 1M
|
|
24
|
+
'proxy_buffers': {'min': 2, 'max': 32}, # 2-32
|
|
25
|
+
'proxy_busy_buffers_size': {'min': 4096, 'max': 1024*1024}, # 4K - 1M
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
def _parse_size(val):
|
|
29
|
+
# Преобразует строку типа 1m, 512k, 4096 в байты
|
|
30
|
+
val = val.strip().lower()
|
|
31
|
+
if val.endswith('k'):
|
|
32
|
+
return int(float(val[:-1]) * 1024)
|
|
33
|
+
if val.endswith('m'):
|
|
34
|
+
return int(float(val[:-1]) * 1024 * 1024)
|
|
35
|
+
try:
|
|
36
|
+
return int(val)
|
|
37
|
+
except Exception:
|
|
38
|
+
return None
|
|
39
|
+
|
|
5
40
|
def find_warnings(tree) -> List[Dict[str, Any]]:
|
|
6
41
|
"""
|
|
7
|
-
Находит потенциально опасные или неочевидные
|
|
42
|
+
Находит потенциально опасные или неочевидные директивы и нарушения best practices.
|
|
8
43
|
Возвращает список: [{type, directive, context, value}]
|
|
9
44
|
"""
|
|
10
45
|
analyzer = Analyzer(tree)
|
|
11
46
|
warnings = []
|
|
47
|
+
found_headers = set()
|
|
12
48
|
for d, parent in analyzer.walk():
|
|
49
|
+
# proxy_pass без схемы
|
|
13
50
|
if d.get('directive') == 'proxy_pass':
|
|
14
51
|
val = d.get('args', '')
|
|
15
52
|
if not re.match(r'^(http|https)://', val):
|
|
16
53
|
warnings.append({'type': 'proxy_pass_no_scheme', 'directive': 'proxy_pass', 'context': parent, 'value': val})
|
|
54
|
+
# autoindex on
|
|
17
55
|
if d.get('directive') == 'autoindex' and d.get('args', '').strip() == 'on':
|
|
18
56
|
warnings.append({'type': 'autoindex_on', 'directive': 'autoindex', 'context': parent, 'value': 'on'})
|
|
57
|
+
# if внутри блока
|
|
19
58
|
if d.get('block') == 'if':
|
|
20
59
|
warnings.append({'type': 'if_block', 'directive': 'if', 'context': parent, 'value': ''})
|
|
60
|
+
# server_tokens on
|
|
21
61
|
if d.get('directive') == 'server_tokens' and d.get('args', '').strip() == 'on':
|
|
22
62
|
warnings.append({'type': 'server_tokens_on', 'directive': 'server_tokens', 'context': parent, 'value': 'on'})
|
|
63
|
+
# ssl_certificate/ssl_certificate_key
|
|
64
|
+
if d.get('directive') == 'ssl_certificate' or d.get('directive') == 'ssl_certificate_key':
|
|
65
|
+
if not d.get('args', '').strip():
|
|
66
|
+
warnings.append({'type': 'ssl_missing', 'directive': d['directive'], 'context': parent, 'value': ''})
|
|
67
|
+
# ssl_protocols
|
|
68
|
+
if d.get('directive') == 'ssl_protocols':
|
|
69
|
+
val = d.get('args', '')
|
|
70
|
+
if 'TLSv1' in val or 'TLSv1.1' in val:
|
|
71
|
+
warnings.append({'type': 'ssl_protocols_weak', 'directive': 'ssl_protocols', 'context': parent, 'value': val})
|
|
72
|
+
# ssl_ciphers
|
|
73
|
+
if d.get('directive') == 'ssl_ciphers':
|
|
74
|
+
val = d.get('args', '')
|
|
75
|
+
if any(x in val for x in ['RC4', 'MD5', 'DES']):
|
|
76
|
+
warnings.append({'type': 'ssl_ciphers_weak', 'directive': 'ssl_ciphers', 'context': parent, 'value': val})
|
|
77
|
+
# listen 443 ssl
|
|
78
|
+
if d.get('directive') == 'listen' and '443' in d.get('args', '') and 'ssl' not in d.get('args', ''):
|
|
79
|
+
warnings.append({'type': 'listen_443_no_ssl', 'directive': 'listen', 'context': parent, 'value': d.get('args', '')})
|
|
80
|
+
# http2
|
|
81
|
+
if d.get('directive') == 'listen' and '443' in d.get('args', '') and 'http2' not in d.get('args', ''):
|
|
82
|
+
warnings.append({'type': 'listen_443_no_http2', 'directive': 'listen', 'context': parent, 'value': d.get('args', '')})
|
|
83
|
+
# limit_req/limit_conn
|
|
84
|
+
if d.get('block') == 'server':
|
|
85
|
+
has_limit = False
|
|
86
|
+
for sub, _ in analyzer.walk(d.get('directives', []), d):
|
|
87
|
+
if sub.get('directive') in ('limit_req', 'limit_conn'):
|
|
88
|
+
has_limit = True
|
|
89
|
+
if not has_limit:
|
|
90
|
+
warnings.append({'type': 'no_limit_req_conn', 'directive': 'server', 'context': d, 'value': ''})
|
|
91
|
+
# Security headers
|
|
92
|
+
if d.get('directive') == 'add_header':
|
|
93
|
+
for h in SECURITY_HEADERS:
|
|
94
|
+
if h in d.get('args', ''):
|
|
95
|
+
found_headers.add(h)
|
|
96
|
+
# Deprecated directives
|
|
97
|
+
if d.get('directive') in DEPRECATED_DIRECTIVES:
|
|
98
|
+
warnings.append({'type': 'deprecated', 'directive': d['directive'], 'context': parent, 'value': DEPRECATED_DIRECTIVES[d['directive']]})
|
|
99
|
+
# Проверка лимитов и буферов
|
|
100
|
+
for lim, rng in LIMITS.items():
|
|
101
|
+
if d.get('directive') == lim:
|
|
102
|
+
val = d.get('args', '').split()[0]
|
|
103
|
+
size = _parse_size(val)
|
|
104
|
+
if size is not None:
|
|
105
|
+
if size < rng['min']:
|
|
106
|
+
warnings.append({'type': 'limit_too_small', 'directive': lim, 'context': parent, 'value': val})
|
|
107
|
+
if size > rng['max']:
|
|
108
|
+
warnings.append({'type': 'limit_too_large', 'directive': lim, 'context': parent, 'value': val})
|
|
109
|
+
# Проверка отсутствующих security headers
|
|
110
|
+
for h in SECURITY_HEADERS:
|
|
111
|
+
if h not in found_headers:
|
|
112
|
+
warnings.append({'type': 'missing_security_header', 'directive': 'add_header', 'context': None, 'value': h})
|
|
23
113
|
return warnings
|
commands/analyze.py
CHANGED
|
@@ -1,20 +1,21 @@
|
|
|
1
1
|
import typer
|
|
2
2
|
from rich.console import Console
|
|
3
|
-
from rich.
|
|
4
|
-
from
|
|
5
|
-
from analyzer.conflicts import find_location_conflicts
|
|
3
|
+
from rich.table import Table
|
|
4
|
+
from analyzer.conflicts import find_location_conflicts, find_listen_servername_conflicts
|
|
6
5
|
from analyzer.duplicates import find_duplicate_directives
|
|
7
6
|
from analyzer.empty_blocks import find_empty_blocks
|
|
8
|
-
from parser.nginx_parser import parse_nginx_config
|
|
9
7
|
from analyzer.warnings import find_warnings
|
|
10
8
|
from analyzer.unused import find_unused_variables
|
|
9
|
+
from parser.nginx_parser import parse_nginx_config
|
|
10
|
+
from analyzer.rewrite import find_rewrite_issues
|
|
11
|
+
from analyzer.dead_locations import find_dead_locations
|
|
11
12
|
|
|
12
13
|
app = typer.Typer()
|
|
13
14
|
console = Console()
|
|
14
15
|
|
|
15
16
|
def analyze(config_path: str = typer.Argument(..., help="Путь к nginx.conf")):
|
|
16
17
|
"""
|
|
17
|
-
Анализирует конфигурацию Nginx на типовые проблемы.
|
|
18
|
+
Анализирует конфигурацию Nginx на типовые проблемы. Выводит таблицу issue_type/issue_description.
|
|
18
19
|
"""
|
|
19
20
|
tree = parse_nginx_config(config_path)
|
|
20
21
|
conflicts = find_location_conflicts(tree)
|
|
@@ -22,49 +23,39 @@ def analyze(config_path: str = typer.Argument(..., help="Путь к nginx.conf"
|
|
|
22
23
|
empties = find_empty_blocks(tree)
|
|
23
24
|
warnings = find_warnings(tree)
|
|
24
25
|
unused_vars = find_unused_variables(tree)
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
26
|
+
listen_conflicts = find_listen_servername_conflicts(tree)
|
|
27
|
+
rewrite_issues = find_rewrite_issues(tree)
|
|
28
|
+
dead_locations = find_dead_locations(tree)
|
|
29
|
+
|
|
30
|
+
table = Table(show_header=True, header_style="bold blue")
|
|
31
|
+
table.add_column("issue_type")
|
|
32
|
+
table.add_column("issue_description")
|
|
33
|
+
|
|
34
|
+
for c in conflicts:
|
|
35
|
+
table.add_row("location_conflict", f"server: {c['server'].get('arg', '')} location: {c['location1']} ↔ {c['location2']}")
|
|
36
|
+
for d in dups:
|
|
37
|
+
table.add_row("duplicate_directive", f"{d['directive']} ({d['args']}) — {d['count']} раз в блоке {d['block'].get('block', d['block'])}")
|
|
38
|
+
for e in empties:
|
|
39
|
+
table.add_row("empty_block", f"{e['block']} {e['arg'] or ''}")
|
|
40
|
+
for w in warnings:
|
|
41
|
+
if w['type'] == 'proxy_pass_no_scheme':
|
|
42
|
+
table.add_row("proxy_pass_no_scheme", f"proxy_pass без схемы: {w['value']}")
|
|
43
|
+
elif w['type'] == 'autoindex_on':
|
|
44
|
+
table.add_row("autoindex_on", f"autoindex on в блоке {w['context'].get('block','')}")
|
|
45
|
+
elif w['type'] == 'if_block':
|
|
46
|
+
table.add_row("if_block", f"директива if внутри блока {w['context'].get('block','')}")
|
|
47
|
+
elif w['type'] == 'server_tokens_on':
|
|
48
|
+
table.add_row("server_tokens_on", f"server_tokens on в блоке {w['context'].get('block','')}")
|
|
49
|
+
for v in unused_vars:
|
|
50
|
+
table.add_row("unused_variable", v['name'])
|
|
51
|
+
for c in listen_conflicts:
|
|
52
|
+
table.add_row("listen_servername_conflict", f"server1: {c['server1'].get('arg','')} server2: {c['server2'].get('arg','')} listen: {','.join(c['listen'])} server_name: {','.join(c['server_name'])}")
|
|
53
|
+
for r in rewrite_issues:
|
|
54
|
+
table.add_row(r['type'], r['value'])
|
|
55
|
+
for l in dead_locations:
|
|
56
|
+
table.add_row("dead_location", f"server: {l['server'].get('arg','')} location: {l['location'].get('arg','')}")
|
|
57
|
+
|
|
58
|
+
if table.row_count == 0:
|
|
59
|
+
console.print("[green]Проблем не найдено[/green]")
|
|
32
60
|
else:
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
if dups:
|
|
36
|
-
node = root.add("[red]Дублирующиеся директивы[/red]")
|
|
37
|
-
for d in dups:
|
|
38
|
-
node.add(f"[yellow]{d['directive']}[/yellow] ([italic]{d['args']}[/italic]) — {d['count']} раз в блоке [cyan]{d['block'].get('block', d['block'])}[/cyan]")
|
|
39
|
-
else:
|
|
40
|
-
root.add("[green]Нет дублирующихся директив[/green]")
|
|
41
|
-
|
|
42
|
-
if empties:
|
|
43
|
-
node = root.add("[red]Пустые блоки[/red]")
|
|
44
|
-
for e in empties:
|
|
45
|
-
node.add(f"[yellow]{e['block']}[/yellow] [italic]{e['arg'] or ''}[/italic]")
|
|
46
|
-
else:
|
|
47
|
-
root.add("[green]Нет пустых блоков[/green]")
|
|
48
|
-
|
|
49
|
-
if warnings:
|
|
50
|
-
node = root.add("[bold yellow]Потенциальные проблемы[/bold yellow]")
|
|
51
|
-
for w in warnings:
|
|
52
|
-
if w['type'] == 'proxy_pass_no_scheme':
|
|
53
|
-
node.add(f"[yellow]proxy_pass[/yellow] без схемы: [italic]{w['value']}[/italic]")
|
|
54
|
-
elif w['type'] == 'autoindex_on':
|
|
55
|
-
node.add(f"[yellow]autoindex on[/yellow] в блоке [cyan]{w['context'].get('block','')}[/cyan]")
|
|
56
|
-
elif w['type'] == 'if_block':
|
|
57
|
-
node.add(f"[yellow]Директива if[/yellow] внутри блока [cyan]{w['context'].get('block','')}[/cyan]")
|
|
58
|
-
elif w['type'] == 'server_tokens_on':
|
|
59
|
-
node.add(f"[yellow]server_tokens on[/yellow] в блоке [cyan]{w['context'].get('block','')}[/cyan]")
|
|
60
|
-
else:
|
|
61
|
-
root.add("[green]Нет потенциальных проблем[/green]")
|
|
62
|
-
|
|
63
|
-
if unused_vars:
|
|
64
|
-
node = root.add("[bold magenta]Неиспользуемые переменные[/bold magenta]")
|
|
65
|
-
for v in unused_vars:
|
|
66
|
-
node.add(f"[magenta]{v['name']}[/magenta]")
|
|
67
|
-
else:
|
|
68
|
-
root.add("[green]Нет неиспользуемых переменных[/green]")
|
|
69
|
-
|
|
70
|
-
console.print(root)
|
|
61
|
+
console.print(table)
|
commands/cli.py
CHANGED
|
@@ -5,6 +5,10 @@ from commands.analyze import analyze
|
|
|
5
5
|
from commands.tree import tree
|
|
6
6
|
from commands.diff import diff
|
|
7
7
|
from commands.route import route
|
|
8
|
+
from commands.include import include_tree
|
|
9
|
+
from commands.graph import graph
|
|
10
|
+
from commands.logs import app as logs_app
|
|
11
|
+
from commands.syntax import app as syntax_app
|
|
8
12
|
|
|
9
13
|
app = typer.Typer(help="nginx-lens — анализ и диагностика конфигураций Nginx")
|
|
10
14
|
console = Console()
|
|
@@ -14,6 +18,10 @@ app.command()(analyze)
|
|
|
14
18
|
app.command()(tree)
|
|
15
19
|
app.command()(diff)
|
|
16
20
|
app.command()(route)
|
|
21
|
+
app.command()(include_tree)
|
|
22
|
+
app.command()(graph)
|
|
23
|
+
app.add_typer(logs_app, name="logs")
|
|
24
|
+
app.add_typer(syntax_app, name="syntax")
|
|
17
25
|
|
|
18
26
|
if __name__ == "__main__":
|
|
19
27
|
app()
|
commands/diff.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import typer
|
|
2
2
|
from rich.console import Console
|
|
3
|
-
from rich.
|
|
4
|
-
from rich.text import Text
|
|
3
|
+
from rich.table import Table
|
|
5
4
|
from analyzer.diff import diff_trees
|
|
6
5
|
from parser.nginx_parser import parse_nginx_config
|
|
7
6
|
|
|
@@ -13,7 +12,7 @@ def diff(
|
|
|
13
12
|
config2: str = typer.Argument(..., help="Второй nginx.conf")
|
|
14
13
|
):
|
|
15
14
|
"""
|
|
16
|
-
Сравнивает две конфигурации Nginx и выводит
|
|
15
|
+
Сравнивает две конфигурации Nginx и выводит отличия side-by-side.
|
|
17
16
|
"""
|
|
18
17
|
tree1 = parse_nginx_config(config1)
|
|
19
18
|
tree2 = parse_nginx_config(config2)
|
|
@@ -21,15 +20,17 @@ def diff(
|
|
|
21
20
|
if not diffs:
|
|
22
21
|
console.print("[green]Конфигурации идентичны[/green]")
|
|
23
22
|
return
|
|
24
|
-
|
|
23
|
+
table = Table(show_header=True, header_style="bold blue")
|
|
24
|
+
table.add_column("Config 1", style="red")
|
|
25
|
+
table.add_column("Config 2", style="green")
|
|
25
26
|
for d in diffs:
|
|
26
27
|
path = "/".join(d['path'])
|
|
27
28
|
if d['type'] == 'added':
|
|
28
|
-
|
|
29
|
+
table.add_row("", f"+ {path}")
|
|
29
30
|
elif d['type'] == 'removed':
|
|
30
|
-
|
|
31
|
+
table.add_row(f"- {path}", "")
|
|
31
32
|
elif d['type'] == 'changed':
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
console.print(
|
|
33
|
+
v1 = str(d['value1'])
|
|
34
|
+
v2 = str(d['value2'])
|
|
35
|
+
table.add_row(f"! {path}\n{v1}", f"! {path}\n{v2}")
|
|
36
|
+
console.print(table)
|
commands/graph.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from rich.console import Console
|
|
3
|
+
from parser.nginx_parser import parse_nginx_config
|
|
4
|
+
from exporter.graph import tree_to_dot, tree_to_mermaid
|
|
5
|
+
|
|
6
|
+
app = typer.Typer()
|
|
7
|
+
console = Console()
|
|
8
|
+
|
|
9
|
+
def graph(
|
|
10
|
+
config_path: str = typer.Argument(..., help="Путь к nginx.conf"),
|
|
11
|
+
format: str = typer.Option("dot", help="Формат: dot или mermaid")
|
|
12
|
+
):
|
|
13
|
+
"""
|
|
14
|
+
Генерирует схему маршрутизации nginx (dot/mermaid).
|
|
15
|
+
"""
|
|
16
|
+
tree = parse_nginx_config(config_path)
|
|
17
|
+
if format == "dot":
|
|
18
|
+
console.print(tree_to_dot(tree.directives))
|
|
19
|
+
elif format == "mermaid":
|
|
20
|
+
console.print(tree_to_mermaid(tree.directives))
|
|
21
|
+
else:
|
|
22
|
+
console.print("[red]Неизвестный формат: выберите dot или mermaid[/red]")
|
commands/health.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import typer
|
|
2
2
|
from rich.console import Console
|
|
3
|
+
from rich.table import Table
|
|
3
4
|
from upstream_checker.checker import check_upstreams
|
|
4
5
|
from parser.nginx_parser import parse_nginx_config
|
|
5
6
|
|
|
@@ -12,13 +13,17 @@ def health(
|
|
|
12
13
|
retries: int = typer.Option(1, help="Количество попыток")
|
|
13
14
|
):
|
|
14
15
|
"""
|
|
15
|
-
Проверяет доступность upstream-серверов, определённых в nginx.conf.
|
|
16
|
+
Проверяет доступность upstream-серверов, определённых в nginx.conf. Выводит таблицу.
|
|
16
17
|
"""
|
|
17
18
|
tree = parse_nginx_config(config_path)
|
|
18
19
|
upstreams = tree.get_upstreams()
|
|
19
20
|
results = check_upstreams(upstreams, timeout=timeout, retries=retries)
|
|
21
|
+
table = Table(show_header=True, header_style="bold blue")
|
|
22
|
+
table.add_column("upstream_name")
|
|
23
|
+
table.add_column("upstream_status")
|
|
20
24
|
for name, servers in results.items():
|
|
21
|
-
console.print(f"[bold]{name}[/bold]")
|
|
22
25
|
for srv in servers:
|
|
23
|
-
status = "
|
|
24
|
-
|
|
26
|
+
status = "Healthy" if srv["healthy"] else "Unhealthy"
|
|
27
|
+
color = "green" if srv["healthy"] else "red"
|
|
28
|
+
table.add_row(srv["address"], f"[{color}]{status}[/{color}]")
|
|
29
|
+
console.print(table)
|
commands/include.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from rich.console import Console
|
|
3
|
+
from rich.tree import Tree
|
|
4
|
+
from rich.table import Table
|
|
5
|
+
from analyzer.include import build_include_tree, find_include_cycles, find_include_shadowing
|
|
6
|
+
|
|
7
|
+
app = typer.Typer()
|
|
8
|
+
console = Console()
|
|
9
|
+
|
|
10
|
+
def include_tree(
|
|
11
|
+
config_path: str = typer.Argument(..., help="Путь к nginx.conf"),
|
|
12
|
+
directive: str = typer.Option(None, help="Показать shadowing для директивы (например, server_name)")
|
|
13
|
+
):
|
|
14
|
+
"""
|
|
15
|
+
Показывает дерево include-ов, циклы и shadowing директив.
|
|
16
|
+
"""
|
|
17
|
+
tree = build_include_tree(config_path)
|
|
18
|
+
rich_tree = Tree(f"[bold blue]{config_path}[/bold blue]")
|
|
19
|
+
def _add(node, t):
|
|
20
|
+
for k, v in t.items():
|
|
21
|
+
if v == 'cycle':
|
|
22
|
+
node.add(f"[red]{k} (cycle)[/red]")
|
|
23
|
+
elif v == 'not_found':
|
|
24
|
+
node.add(f"[yellow]{k} (not found)[/yellow]")
|
|
25
|
+
elif isinstance(v, list):
|
|
26
|
+
sub = node.add(f"{k}")
|
|
27
|
+
for sub_t in v:
|
|
28
|
+
if isinstance(sub_t, dict):
|
|
29
|
+
_add(sub, sub_t)
|
|
30
|
+
_add(rich_tree, tree)
|
|
31
|
+
console.print(rich_tree)
|
|
32
|
+
# Циклы
|
|
33
|
+
cycles = find_include_cycles(tree)
|
|
34
|
+
if cycles:
|
|
35
|
+
console.print("[red]Обнаружены циклы include-ов:[/red]")
|
|
36
|
+
for c in cycles:
|
|
37
|
+
console.print(" -> ".join(c))
|
|
38
|
+
# Shadowing
|
|
39
|
+
if directive:
|
|
40
|
+
shadow = find_include_shadowing(tree, directive)
|
|
41
|
+
if shadow:
|
|
42
|
+
table = Table(show_header=True, header_style="bold blue")
|
|
43
|
+
table.add_column("file")
|
|
44
|
+
table.add_column("value")
|
|
45
|
+
for s in shadow:
|
|
46
|
+
table.add_row(s['file'], s['value'])
|
|
47
|
+
console.print(table)
|
commands/logs.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from rich.console import Console
|
|
3
|
+
from rich.table import Table
|
|
4
|
+
import re
|
|
5
|
+
from collections import Counter, defaultdict
|
|
6
|
+
|
|
7
|
+
app = typer.Typer()
|
|
8
|
+
console = Console()
|
|
9
|
+
|
|
10
|
+
log_line_re = re.compile(r'(?P<ip>\S+) \S+ \S+ \[(?P<time>[^\]]+)\] "(?P<method>\S+) (?P<path>\S+) [^\"]+" (?P<status>\d{3})')
|
|
11
|
+
|
|
12
|
+
@app.command()
|
|
13
|
+
def logs(
|
|
14
|
+
log_path: str = typer.Argument(..., help="Путь к access.log или error.log"),
|
|
15
|
+
top: int = typer.Option(10, help="Сколько топ-значений выводить")
|
|
16
|
+
):
|
|
17
|
+
"""
|
|
18
|
+
Анализирует access.log/error.log: топ-404, топ-500, частые запросы, подозрительные user-agent, IP.
|
|
19
|
+
"""
|
|
20
|
+
status_counter = Counter()
|
|
21
|
+
path_counter = Counter()
|
|
22
|
+
ip_counter = Counter()
|
|
23
|
+
user_agent_counter = Counter()
|
|
24
|
+
errors = defaultdict(list)
|
|
25
|
+
with open(log_path) as f:
|
|
26
|
+
for line in f:
|
|
27
|
+
m = log_line_re.search(line)
|
|
28
|
+
if m:
|
|
29
|
+
ip = m.group('ip')
|
|
30
|
+
path = m.group('path')
|
|
31
|
+
status = m.group('status')
|
|
32
|
+
status_counter[status] += 1
|
|
33
|
+
path_counter[path] += 1
|
|
34
|
+
ip_counter[ip] += 1
|
|
35
|
+
if status.startswith('4') or status.startswith('5'):
|
|
36
|
+
errors[status].append(path)
|
|
37
|
+
# user-agent (если есть)
|
|
38
|
+
if '" "' in line:
|
|
39
|
+
ua = line.rsplit('" "', 1)[-1].strip().strip('"')
|
|
40
|
+
if ua:
|
|
41
|
+
user_agent_counter[ua] += 1
|
|
42
|
+
# Топ статусов
|
|
43
|
+
table = Table(title="Top HTTP Status Codes", show_header=True, header_style="bold blue")
|
|
44
|
+
table.add_column("Status")
|
|
45
|
+
table.add_column("Count")
|
|
46
|
+
for status, count in status_counter.most_common(top):
|
|
47
|
+
table.add_row(status, str(count))
|
|
48
|
+
console.print(table)
|
|
49
|
+
# Топ путей
|
|
50
|
+
table = Table(title="Top Paths", show_header=True, header_style="bold blue")
|
|
51
|
+
table.add_column("Path")
|
|
52
|
+
table.add_column("Count")
|
|
53
|
+
for path, count in path_counter.most_common(top):
|
|
54
|
+
table.add_row(path, str(count))
|
|
55
|
+
console.print(table)
|
|
56
|
+
# Топ IP
|
|
57
|
+
table = Table(title="Top IPs", show_header=True, header_style="bold blue")
|
|
58
|
+
table.add_column("IP")
|
|
59
|
+
table.add_column("Count")
|
|
60
|
+
for ip, count in ip_counter.most_common(top):
|
|
61
|
+
table.add_row(ip, str(count))
|
|
62
|
+
console.print(table)
|
|
63
|
+
# Топ User-Agent
|
|
64
|
+
if user_agent_counter:
|
|
65
|
+
table = Table(title="Top User-Agents", show_header=True, header_style="bold blue")
|
|
66
|
+
table.add_column("User-Agent")
|
|
67
|
+
table.add_column("Count")
|
|
68
|
+
for ua, count in user_agent_counter.most_common(top):
|
|
69
|
+
table.add_row(ua, str(count))
|
|
70
|
+
console.print(table)
|
|
71
|
+
# Топ 404/500
|
|
72
|
+
for err in ('404', '500'):
|
|
73
|
+
if errors[err]:
|
|
74
|
+
table = Table(title=f"Top {err} Paths", show_header=True, header_style="bold blue")
|
|
75
|
+
table.add_column("Path")
|
|
76
|
+
table.add_column("Count")
|
|
77
|
+
c = Counter(errors[err])
|
|
78
|
+
for path, count in c.most_common(top):
|
|
79
|
+
table.add_row(path, str(count))
|
|
80
|
+
console.print(table)
|
commands/syntax.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from rich.console import Console
|
|
3
|
+
from rich.table import Table
|
|
4
|
+
import subprocess
|
|
5
|
+
import os
|
|
6
|
+
import re
|
|
7
|
+
|
|
8
|
+
app = typer.Typer()
|
|
9
|
+
console = Console()
|
|
10
|
+
|
|
11
|
+
ERROR_RE = re.compile(r'in (.+?):(\d+)')
|
|
12
|
+
|
|
13
|
+
@app.command()
|
|
14
|
+
def syntax(
|
|
15
|
+
config_path: str = typer.Argument(..., help="Путь к nginx.conf"),
|
|
16
|
+
nginx_path: str = typer.Option("nginx", help="Путь к бинарю nginx (по умолчанию 'nginx')")
|
|
17
|
+
):
|
|
18
|
+
"""
|
|
19
|
+
Проверяет синтаксис nginx-конфига через nginx -t. В случае ошибки показывает место в виде таблицы.
|
|
20
|
+
"""
|
|
21
|
+
cmd = [nginx_path, "-t", "-c", os.path.abspath(config_path)]
|
|
22
|
+
try:
|
|
23
|
+
result = subprocess.run(cmd, capture_output=True, text=True, check=False)
|
|
24
|
+
if result.returncode == 0:
|
|
25
|
+
console.print("[green]Синтаксис nginx-конфига корректен[/green]")
|
|
26
|
+
else:
|
|
27
|
+
console.print("[red]Ошибка синтаксиса![/red]")
|
|
28
|
+
console.print(result.stdout)
|
|
29
|
+
console.print(result.stderr)
|
|
30
|
+
# Парсим ошибку
|
|
31
|
+
err = result.stderr or result.stdout
|
|
32
|
+
m = ERROR_RE.search(err)
|
|
33
|
+
if m:
|
|
34
|
+
file, line = m.group(1), int(m.group(2))
|
|
35
|
+
msg = err.strip().split('\n')[-1]
|
|
36
|
+
# Читаем контекст
|
|
37
|
+
context = []
|
|
38
|
+
try:
|
|
39
|
+
with open(file) as f:
|
|
40
|
+
lines = f.readlines()
|
|
41
|
+
start = max(0, line-3)
|
|
42
|
+
end = min(len(lines), line+2)
|
|
43
|
+
for i in range(start, end):
|
|
44
|
+
mark = "->" if i+1 == line else " "
|
|
45
|
+
context.append((str(i+1), mark, lines[i].rstrip()))
|
|
46
|
+
except Exception:
|
|
47
|
+
context = []
|
|
48
|
+
table = Table(title="Ошибка синтаксиса", show_header=True, header_style="bold red")
|
|
49
|
+
table.add_column("File")
|
|
50
|
+
table.add_column("Line")
|
|
51
|
+
table.add_column("Message")
|
|
52
|
+
table.add_column("Context")
|
|
53
|
+
for ln, mark, code in context:
|
|
54
|
+
table.add_row(file, ln, msg if mark == "->" else "", f"{mark} {code}")
|
|
55
|
+
console.print(table)
|
|
56
|
+
except FileNotFoundError:
|
|
57
|
+
console.print(f"[red]Не найден бинарь nginx: {nginx_path}[/red]")
|
exporter/graph.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
from typing import List
|
|
2
|
+
|
|
3
|
+
def tree_to_dot(directives) -> str:
|
|
4
|
+
lines = ["digraph nginx {", " rankdir=LR;"]
|
|
5
|
+
node_id = 0
|
|
6
|
+
def node(label):
|
|
7
|
+
nonlocal node_id
|
|
8
|
+
node_id += 1
|
|
9
|
+
return f"n{node_id}", label
|
|
10
|
+
def walk(dirs, parent_id=None):
|
|
11
|
+
for d in dirs:
|
|
12
|
+
if 'block' in d and d['block'] == 'server':
|
|
13
|
+
nid, label = node(f"server {d.get('arg','')}")
|
|
14
|
+
lines.append(f' {nid} [label="{label}", shape=box, style=filled, fillcolor=lightblue];')
|
|
15
|
+
if parent_id:
|
|
16
|
+
lines.append(f' {parent_id} -> {nid};')
|
|
17
|
+
walk(d.get('directives', []), nid)
|
|
18
|
+
elif 'block' in d and d['block'] == 'location':
|
|
19
|
+
nid, label = node(f"location {d.get('arg','')}")
|
|
20
|
+
lines.append(f' {nid} [label="{label}", shape=ellipse, style=filled, fillcolor=lightyellow];')
|
|
21
|
+
if parent_id:
|
|
22
|
+
lines.append(f' {parent_id} -> {nid};')
|
|
23
|
+
walk(d.get('directives', []), nid)
|
|
24
|
+
elif 'upstream' in d:
|
|
25
|
+
nid, label = node(f"upstream {d['upstream']}")
|
|
26
|
+
lines.append(f' {nid} [label="{label}", shape=diamond, style=filled, fillcolor=lightgreen];')
|
|
27
|
+
if parent_id:
|
|
28
|
+
lines.append(f' {parent_id} -> {nid};')
|
|
29
|
+
for srv in d.get('servers', []):
|
|
30
|
+
sid, slabel = node(f"server {srv}")
|
|
31
|
+
lines.append(f' {sid} [label="{slabel}", shape=note];')
|
|
32
|
+
lines.append(f' {nid} -> {sid};')
|
|
33
|
+
elif 'directive' in d and d['directive'] == 'proxy_pass':
|
|
34
|
+
nid, label = node(f"proxy_pass {d.get('args','')}")
|
|
35
|
+
lines.append(f' {nid} [label="{label}", shape=parallelogram, style=filled, fillcolor=orange];')
|
|
36
|
+
if parent_id:
|
|
37
|
+
lines.append(f' {parent_id} -> {nid};')
|
|
38
|
+
walk(directives)
|
|
39
|
+
lines.append("}")
|
|
40
|
+
return '\n'.join(lines)
|
|
41
|
+
|
|
42
|
+
def tree_to_mermaid(directives) -> str:
|
|
43
|
+
lines = ["graph LR"]
|
|
44
|
+
node_id = 0
|
|
45
|
+
def node(label):
|
|
46
|
+
nonlocal node_id
|
|
47
|
+
node_id += 1
|
|
48
|
+
return f"n{node_id}", label
|
|
49
|
+
def walk(dirs, parent_id=None):
|
|
50
|
+
for d in dirs:
|
|
51
|
+
if 'block' in d and d['block'] == 'server':
|
|
52
|
+
nid, label = node(f"server {d.get('arg','')}")
|
|
53
|
+
lines.append(f'{nid}["{label}"]:::server')
|
|
54
|
+
if parent_id:
|
|
55
|
+
lines.append(f'{parent_id} --> {nid}')
|
|
56
|
+
walk(d.get('directives', []), nid)
|
|
57
|
+
elif 'block' in d and d['block'] == 'location':
|
|
58
|
+
nid, label = node(f"location {d.get('arg','')}")
|
|
59
|
+
lines.append(f'{nid}["{label}"]:::location')
|
|
60
|
+
if parent_id:
|
|
61
|
+
lines.append(f'{parent_id} --> {nid}')
|
|
62
|
+
walk(d.get('directives', []), nid)
|
|
63
|
+
elif 'upstream' in d:
|
|
64
|
+
nid, label = node(f"upstream {d['upstream']}")
|
|
65
|
+
lines.append(f'{nid}["{label}"]:::upstream')
|
|
66
|
+
if parent_id:
|
|
67
|
+
lines.append(f'{parent_id} --> {nid}')
|
|
68
|
+
for srv in d.get('servers', []):
|
|
69
|
+
sid, slabel = node(f"server {srv}")
|
|
70
|
+
lines.append(f'{sid}["{slabel}"]:::srv')
|
|
71
|
+
lines.append(f'{nid} --> {sid}')
|
|
72
|
+
elif 'directive' in d and d['directive'] == 'proxy_pass':
|
|
73
|
+
nid, label = node(f"proxy_pass {d.get('args','')}")
|
|
74
|
+
lines.append(f'{nid}["{label}"]:::proxy')
|
|
75
|
+
if parent_id:
|
|
76
|
+
lines.append(f'{parent_id} --> {nid}')
|
|
77
|
+
walk(directives)
|
|
78
|
+
# Стили
|
|
79
|
+
lines.append("classDef server fill:#b3e0ff,stroke:#333;")
|
|
80
|
+
lines.append("classDef location fill:#fff2b3,stroke:#333;")
|
|
81
|
+
lines.append("classDef upstream fill:#b3ffb3,stroke:#333;")
|
|
82
|
+
lines.append("classDef proxy fill:#ffd699,stroke:#333;")
|
|
83
|
+
lines.append("classDef srv fill:#eee,stroke:#333;")
|
|
84
|
+
return '\n'.join(lines)
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
analyzer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
analyzer/base.py,sha256=oGKg78BfMVmuzYafc08oq9p31-jEgYolGjLkUcIdkN8,607
|
|
3
|
+
analyzer/conflicts.py,sha256=NSNZc8e2x51K41dflSUvuwlDq-rzBXU5ITi6WfxFbfU,2796
|
|
4
|
+
analyzer/dead_locations.py,sha256=uvMu5qBGTVi0Nn960x3WpRvTljGbQuVFivU4nfe36oY,1435
|
|
5
|
+
analyzer/diff.py,sha256=idvXnoLzBVUYgKi_s3uDu0v2GNMV3B8aDqTROXcdQdo,1749
|
|
6
|
+
analyzer/duplicates.py,sha256=VUjbM19Y_Wwty8GEOu_7nNzOH6mr071p_z1MAkp1kBA,1012
|
|
7
|
+
analyzer/empty_blocks.py,sha256=7Zu4-5I5PS3bjhH0Ppq1CvM7rMTeRIc4fHx5n5vkMIw,517
|
|
8
|
+
analyzer/include.py,sha256=FhKR4VsogLknykjLD2N8jX9OtwxZcWik5oPpvp-_luE,2465
|
|
9
|
+
analyzer/rewrite.py,sha256=-jSLLG1jqmGU-dXWvU6NHCW6muB8Lfro6fXX1tDCHCQ,1834
|
|
10
|
+
analyzer/route.py,sha256=2xxQooQEsfn10tzGCZUoP32T0OnTMnPB6qRgBR6not8,2345
|
|
11
|
+
analyzer/unused.py,sha256=Ixzv0bPsw9IafblVwLiAOgugdg2dGu1MJDtuoqzPZiY,1066
|
|
12
|
+
analyzer/warnings.py,sha256=zC36QMvegA2eQPvZ-P1eysrX_kXHx5A1MUKHKKNvG5c,5784
|
|
13
|
+
commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
|
+
commands/analyze.py,sha256=adkc0K2VSFp4jLZQFuj_WwuBZdJkEGi0uvZyVf00EMg,3104
|
|
15
|
+
commands/cli.py,sha256=0uzz71nMXTGlY1odZkEGwtVOjf7uq7762qRLXQaQURo,780
|
|
16
|
+
commands/diff.py,sha256=BqrBzZJcD7XJWU0HdNos31yANvbwEWR1ZHaVowCoeV0,1282
|
|
17
|
+
commands/graph.py,sha256=0_Iastpx7X8ygVsN4KUwiSIm2UCPLgm1yOq75furYJk,787
|
|
18
|
+
commands/health.py,sha256=gmqeno-tADqIjO-y-LUlWhy2Nsm-K77GK5xaPV98V9w,1226
|
|
19
|
+
commands/include.py,sha256=30bPFwZusvsCNjp7U2fgS9h9wWbSQlZFB6eCwzvWwok,1756
|
|
20
|
+
commands/logs.py,sha256=chfwgO4KaClor_ySs_QkJ7M7q3V-5ahKFhyd5K8GOik,3128
|
|
21
|
+
commands/route.py,sha256=jqGKRcocjNhGAN6LQ1PvqcNaNKG_Nn437l9ZeHaD-FA,1132
|
|
22
|
+
commands/syntax.py,sha256=rABFjTHhJIumdTXSEzNlJNypZxfbXpmPlwSHkiAP7S0,2322
|
|
23
|
+
commands/tree.py,sha256=H8kFYueo6mcN82MSgIzAH6Usq9u7sYo2WlY0p8rlN_A,1669
|
|
24
|
+
exporter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
25
|
+
exporter/graph.py,sha256=WYUrqUgCaK6KihgxAcRHaQn4oMo6b7ybC8yb_36ZIsA,3995
|
|
26
|
+
exporter/html.py,sha256=uquEM-WvBt2aV9GshgaI3UVhYd8sD0QQ-OmuNtvYUdU,798
|
|
27
|
+
exporter/markdown.py,sha256=_0mXQIhurGEZ0dO-eq9DbsuKNrgEDIblgtL3DAgYNo8,724
|
|
28
|
+
parser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
29
|
+
parser/nginx_parser.py,sha256=JqZ3clNy4Nf-bmbsx_rJUL7EgRoB79b87eEu_isMeqg,3577
|
|
30
|
+
upstream_checker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
31
|
+
upstream_checker/checker.py,sha256=9-6CMUTN7gXUACP8EwX722QogfujZyV-WWWUeM3a79k,455
|
|
32
|
+
nginx_lens-0.1.2.dist-info/METADATA,sha256=pRmzXAm457F0Ed7-4KY8TH9JkOWue0Zm_PdOcvyHxOE,476
|
|
33
|
+
nginx_lens-0.1.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
34
|
+
nginx_lens-0.1.2.dist-info/entry_points.txt,sha256=qEcecjSyLqcJjbIVlNlTpqAhPqDyaujUV5ZcBTAr3po,48
|
|
35
|
+
nginx_lens-0.1.2.dist-info/top_level.txt,sha256=mxLJO4rZg0rbixVGhplF3fUNFs8vxDIL25ronZNvRy4,51
|
|
36
|
+
nginx_lens-0.1.2.dist-info/RECORD,,
|
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
analyzer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
analyzer/base.py,sha256=oGKg78BfMVmuzYafc08oq9p31-jEgYolGjLkUcIdkN8,607
|
|
3
|
-
analyzer/conflicts.py,sha256=2h8CyipWo5-81C_RYAiL2XaNZ2HYO8LHDIt5KEcser0,1398
|
|
4
|
-
analyzer/diff.py,sha256=idvXnoLzBVUYgKi_s3uDu0v2GNMV3B8aDqTROXcdQdo,1749
|
|
5
|
-
analyzer/duplicates.py,sha256=VUjbM19Y_Wwty8GEOu_7nNzOH6mr071p_z1MAkp1kBA,1012
|
|
6
|
-
analyzer/empty_blocks.py,sha256=7Zu4-5I5PS3bjhH0Ppq1CvM7rMTeRIc4fHx5n5vkMIw,517
|
|
7
|
-
analyzer/route.py,sha256=2xxQooQEsfn10tzGCZUoP32T0OnTMnPB6qRgBR6not8,2345
|
|
8
|
-
analyzer/unused.py,sha256=Ixzv0bPsw9IafblVwLiAOgugdg2dGu1MJDtuoqzPZiY,1066
|
|
9
|
-
analyzer/warnings.py,sha256=eg50TfbE1pJBR6fGUEByqKm5Km1AQBiIy284DOidacI,1244
|
|
10
|
-
commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
-
commands/analyze.py,sha256=pRL3-ZkI2nwXMdvXsBrh__MWTd9u0NFNRaiM5x8SSX0,3280
|
|
12
|
-
commands/cli.py,sha256=A47d3vdTDPHRZlRKs02PtxOFdPHjecpXTZyGhQ34r_w,490
|
|
13
|
-
commands/diff.py,sha256=yA_znERefmtcWuusdX7XUqCqRftA6hFWsYRVedc97ig,1237
|
|
14
|
-
commands/health.py,sha256=2QdjIVattoaksW7BrjM2_vqqB18_t7ILeBvKVjaxTdM,1008
|
|
15
|
-
commands/route.py,sha256=jqGKRcocjNhGAN6LQ1PvqcNaNKG_Nn437l9ZeHaD-FA,1132
|
|
16
|
-
commands/tree.py,sha256=H8kFYueo6mcN82MSgIzAH6Usq9u7sYo2WlY0p8rlN_A,1669
|
|
17
|
-
exporter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
-
exporter/html.py,sha256=uquEM-WvBt2aV9GshgaI3UVhYd8sD0QQ-OmuNtvYUdU,798
|
|
19
|
-
exporter/markdown.py,sha256=_0mXQIhurGEZ0dO-eq9DbsuKNrgEDIblgtL3DAgYNo8,724
|
|
20
|
-
parser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
|
-
parser/nginx_parser.py,sha256=JqZ3clNy4Nf-bmbsx_rJUL7EgRoB79b87eEu_isMeqg,3577
|
|
22
|
-
upstream_checker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
23
|
-
upstream_checker/checker.py,sha256=9-6CMUTN7gXUACP8EwX722QogfujZyV-WWWUeM3a79k,455
|
|
24
|
-
nginx_lens-0.1.0.dist-info/METADATA,sha256=YaK62hPH-fHD66tS3qvGVHJsc7HkAEbz5xxnvWQVAVA,476
|
|
25
|
-
nginx_lens-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
26
|
-
nginx_lens-0.1.0.dist-info/entry_points.txt,sha256=qEcecjSyLqcJjbIVlNlTpqAhPqDyaujUV5ZcBTAr3po,48
|
|
27
|
-
nginx_lens-0.1.0.dist-info/top_level.txt,sha256=mxLJO4rZg0rbixVGhplF3fUNFs8vxDIL25ronZNvRy4,51
|
|
28
|
-
nginx_lens-0.1.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|