nginx-lens 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- analyzer/__init__.py +0 -0
- analyzer/base.py +18 -0
- analyzer/conflicts.py +33 -0
- analyzer/diff.py +40 -0
- analyzer/duplicates.py +26 -0
- analyzer/empty_blocks.py +14 -0
- analyzer/route.py +60 -0
- analyzer/unused.py +29 -0
- analyzer/warnings.py +23 -0
- commands/__init__.py +0 -0
- commands/analyze.py +70 -0
- commands/cli.py +19 -0
- commands/diff.py +35 -0
- commands/health.py +24 -0
- commands/route.py +30 -0
- commands/tree.py +44 -0
- exporter/__init__.py +0 -0
- exporter/html.py +20 -0
- exporter/markdown.py +17 -0
- nginx_lens-0.1.0.dist-info/METADATA +15 -0
- nginx_lens-0.1.0.dist-info/RECORD +28 -0
- nginx_lens-0.1.0.dist-info/WHEEL +5 -0
- nginx_lens-0.1.0.dist-info/entry_points.txt +2 -0
- nginx_lens-0.1.0.dist-info/top_level.txt +5 -0
- parser/__init__.py +0 -0
- parser/nginx_parser.py +95 -0
- upstream_checker/__init__.py +0 -0
- upstream_checker/checker.py +12 -0
analyzer/__init__.py
ADDED
|
File without changes
|
analyzer/base.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from typing import Any, List, Dict
|
|
2
|
+
|
|
3
|
+
class Analyzer:
|
|
4
|
+
def __init__(self, tree):
|
|
5
|
+
self.tree = tree
|
|
6
|
+
self.directives = tree.directives
|
|
7
|
+
|
|
8
|
+
def walk(self, directives=None, parent=None):
|
|
9
|
+
"""
|
|
10
|
+
Рекурсивно обходит дерево директив.
|
|
11
|
+
Возвращает генератор (директива, родитель).
|
|
12
|
+
"""
|
|
13
|
+
if directives is None:
|
|
14
|
+
directives = self.directives
|
|
15
|
+
for d in directives:
|
|
16
|
+
yield d, parent
|
|
17
|
+
if 'directives' in d:
|
|
18
|
+
yield from self.walk(d['directives'], d)
|
analyzer/conflicts.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from analyzer.base import Analyzer
|
|
2
|
+
from typing import List, Dict, Any
|
|
3
|
+
import re
|
|
4
|
+
|
|
5
|
+
def find_location_conflicts(tree) -> List[Dict[str, Any]]:
|
|
6
|
+
"""
|
|
7
|
+
Находит пересекающиеся location-ы внутри одного server-блока.
|
|
8
|
+
Возвращает список конфликтов: [{server, location1, location2}]
|
|
9
|
+
"""
|
|
10
|
+
analyzer = Analyzer(tree)
|
|
11
|
+
conflicts = []
|
|
12
|
+
for d, parent in analyzer.walk():
|
|
13
|
+
if d.get('block') == 'server':
|
|
14
|
+
locations = []
|
|
15
|
+
for sub, _ in analyzer.walk(d['directives'], d):
|
|
16
|
+
if sub.get('block') == 'location':
|
|
17
|
+
arg = sub.get('arg')
|
|
18
|
+
if arg:
|
|
19
|
+
locations.append(arg)
|
|
20
|
+
# Проверяем пересечения
|
|
21
|
+
for i in range(len(locations)):
|
|
22
|
+
for j in range(i+1, len(locations)):
|
|
23
|
+
if _locations_conflict(locations[i], locations[j]):
|
|
24
|
+
conflicts.append({
|
|
25
|
+
'server': d,
|
|
26
|
+
'location1': locations[i],
|
|
27
|
+
'location2': locations[j]
|
|
28
|
+
})
|
|
29
|
+
return conflicts
|
|
30
|
+
|
|
31
|
+
def _locations_conflict(loc1, loc2):
|
|
32
|
+
# Простая эвристика: если один путь — префикс другого
|
|
33
|
+
return loc1.startswith(loc2) or loc2.startswith(loc1)
|
analyzer/diff.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from typing import List, Dict, Any
|
|
2
|
+
|
|
3
|
+
def diff_trees(tree1, tree2) -> List[Dict[str, Any]]:
|
|
4
|
+
"""
|
|
5
|
+
Сравнивает два дерева директив. Возвращает список отличий.
|
|
6
|
+
Формат: [{type, path, value1, value2}]
|
|
7
|
+
type: 'added', 'removed', 'changed'
|
|
8
|
+
path: путь до директивы (например, ['http', 'server', 'location /api'])
|
|
9
|
+
"""
|
|
10
|
+
diffs = []
|
|
11
|
+
_diff_blocks(tree1.directives, tree2.directives, [], diffs)
|
|
12
|
+
return diffs
|
|
13
|
+
|
|
14
|
+
def _diff_blocks(d1, d2, path, diffs):
|
|
15
|
+
# Индексируем по (block,arg) или (directive,args) или (upstream)
|
|
16
|
+
def key(d):
|
|
17
|
+
if 'block' in d:
|
|
18
|
+
return ('block', d['block'], d.get('arg'))
|
|
19
|
+
if 'directive' in d:
|
|
20
|
+
return ('directive', d['directive'], d.get('args'))
|
|
21
|
+
if 'upstream' in d:
|
|
22
|
+
return ('upstream', d['upstream'])
|
|
23
|
+
return ('other', str(d))
|
|
24
|
+
map1 = {key(x): x for x in d1}
|
|
25
|
+
map2 = {key(x): x for x in d2}
|
|
26
|
+
all_keys = set(map1) | set(map2)
|
|
27
|
+
for k in all_keys:
|
|
28
|
+
v1 = map1.get(k)
|
|
29
|
+
v2 = map2.get(k)
|
|
30
|
+
p = path + [k[1] if k[0] != 'directive' else f"{k[1]} {k[2] or ''}".strip()]
|
|
31
|
+
if v1 and not v2:
|
|
32
|
+
diffs.append({'type': 'removed', 'path': p, 'value1': v1, 'value2': None})
|
|
33
|
+
elif v2 and not v1:
|
|
34
|
+
diffs.append({'type': 'added', 'path': p, 'value1': None, 'value2': v2})
|
|
35
|
+
else:
|
|
36
|
+
# Рекурсивно сравниваем блоки
|
|
37
|
+
if 'directives' in v1 and 'directives' in v2:
|
|
38
|
+
_diff_blocks(v1['directives'], v2['directives'], p, diffs)
|
|
39
|
+
elif v1 != v2:
|
|
40
|
+
diffs.append({'type': 'changed', 'path': p, 'value1': v1, 'value2': v2})
|
analyzer/duplicates.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from analyzer.base import Analyzer
|
|
2
|
+
from typing import List, Dict, Any
|
|
3
|
+
|
|
4
|
+
def find_duplicate_directives(tree) -> List[Dict[str, Any]]:
|
|
5
|
+
"""
|
|
6
|
+
Находит дублирующиеся директивы внутри одного блока.
|
|
7
|
+
Возвращает список: [{block, directive, count}]
|
|
8
|
+
"""
|
|
9
|
+
analyzer = Analyzer(tree)
|
|
10
|
+
duplicates = []
|
|
11
|
+
for d, parent in analyzer.walk():
|
|
12
|
+
if 'directives' in d:
|
|
13
|
+
seen = {}
|
|
14
|
+
for sub, _ in analyzer.walk(d['directives'], d):
|
|
15
|
+
if 'directive' in sub:
|
|
16
|
+
key = (sub['directive'], str(sub.get('args')))
|
|
17
|
+
seen[key] = seen.get(key, 0) + 1
|
|
18
|
+
for (directive, args), count in seen.items():
|
|
19
|
+
if count > 1:
|
|
20
|
+
duplicates.append({
|
|
21
|
+
'block': d,
|
|
22
|
+
'directive': directive,
|
|
23
|
+
'args': args,
|
|
24
|
+
'count': count
|
|
25
|
+
})
|
|
26
|
+
return duplicates
|
analyzer/empty_blocks.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from analyzer.base import Analyzer
|
|
2
|
+
from typing import List, Dict, Any
|
|
3
|
+
|
|
4
|
+
def find_empty_blocks(tree) -> List[Dict[str, Any]]:
|
|
5
|
+
"""
|
|
6
|
+
Находит пустые блоки (без вложенных директив).
|
|
7
|
+
Возвращает список: [{block, arg}]
|
|
8
|
+
"""
|
|
9
|
+
analyzer = Analyzer(tree)
|
|
10
|
+
empties = []
|
|
11
|
+
for d, parent in analyzer.walk():
|
|
12
|
+
if 'block' in d and (not d.get('directives')):
|
|
13
|
+
empties.append({'block': d.get('block'), 'arg': d.get('arg')})
|
|
14
|
+
return empties
|
analyzer/route.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from urllib.parse import urlparse
|
|
2
|
+
from analyzer.base import Analyzer
|
|
3
|
+
from typing import Dict, Any, Optional
|
|
4
|
+
import re
|
|
5
|
+
|
|
6
|
+
def find_route(tree, url: str) -> Optional[Dict[str, Any]]:
|
|
7
|
+
"""
|
|
8
|
+
Находит server и location, которые обслуживают данный URL.
|
|
9
|
+
Возвращает: {'server': ..., 'location': ..., 'proxy_pass': ...}
|
|
10
|
+
"""
|
|
11
|
+
parsed = urlparse(url)
|
|
12
|
+
host = parsed.hostname
|
|
13
|
+
port = str(parsed.port or (443 if parsed.scheme == 'https' else 80))
|
|
14
|
+
path = parsed.path or '/'
|
|
15
|
+
analyzer = Analyzer(tree)
|
|
16
|
+
best_server = None
|
|
17
|
+
best_server_score = -1
|
|
18
|
+
# 1. Ищем подходящий server
|
|
19
|
+
for d, _ in analyzer.walk():
|
|
20
|
+
if d.get('block') == 'server':
|
|
21
|
+
names = []
|
|
22
|
+
listens = []
|
|
23
|
+
for sub, _ in analyzer.walk(d['directives'], d):
|
|
24
|
+
if sub.get('directive') == 'server_name':
|
|
25
|
+
names += sub.get('args', '').split()
|
|
26
|
+
if sub.get('directive') == 'listen':
|
|
27
|
+
listens.append(sub.get('args', ''))
|
|
28
|
+
score = 0
|
|
29
|
+
if host and any(_host_match(host, n) for n in names):
|
|
30
|
+
score += 2
|
|
31
|
+
if port and any(port in l for l in listens):
|
|
32
|
+
score += 1
|
|
33
|
+
if score > best_server_score:
|
|
34
|
+
best_server = d
|
|
35
|
+
best_server_score = score
|
|
36
|
+
if not best_server:
|
|
37
|
+
return None
|
|
38
|
+
# 2. Внутри server ищем лучший location (longest prefix match)
|
|
39
|
+
best_loc = None
|
|
40
|
+
best_len = -1
|
|
41
|
+
proxy_pass = None
|
|
42
|
+
for sub, _ in analyzer.walk(best_server['directives'], best_server):
|
|
43
|
+
if sub.get('block') == 'location':
|
|
44
|
+
loc = sub.get('arg', '')
|
|
45
|
+
if path.startswith(loc) and len(loc) > best_len:
|
|
46
|
+
best_loc = sub
|
|
47
|
+
best_len = len(loc)
|
|
48
|
+
if best_loc:
|
|
49
|
+
for d, _ in analyzer.walk(best_loc.get('directives', []), best_loc):
|
|
50
|
+
if d.get('directive') == 'proxy_pass':
|
|
51
|
+
proxy_pass = d.get('args')
|
|
52
|
+
return {'server': best_server, 'location': best_loc, 'proxy_pass': proxy_pass}
|
|
53
|
+
|
|
54
|
+
def _host_match(host, pattern):
|
|
55
|
+
# Примитивная поддержка wildcard
|
|
56
|
+
if pattern == '_':
|
|
57
|
+
return True
|
|
58
|
+
if pattern.startswith('*.'):
|
|
59
|
+
return host.endswith(pattern[1:])
|
|
60
|
+
return host == pattern
|
analyzer/unused.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from analyzer.base import Analyzer
|
|
2
|
+
from typing import List, Dict, Any
|
|
3
|
+
import re
|
|
4
|
+
|
|
5
|
+
def find_unused_variables(tree) -> List[Dict[str, Any]]:
|
|
6
|
+
"""
|
|
7
|
+
Находит переменные, определённые через set/map, которые не используются.
|
|
8
|
+
Возвращает список: [{name, context}]
|
|
9
|
+
"""
|
|
10
|
+
analyzer = Analyzer(tree)
|
|
11
|
+
defined = set()
|
|
12
|
+
used = set()
|
|
13
|
+
for d, parent in analyzer.walk():
|
|
14
|
+
if d.get('directive') == 'set':
|
|
15
|
+
parts = d.get('args', '').split()
|
|
16
|
+
if parts:
|
|
17
|
+
defined.add(parts[0])
|
|
18
|
+
if d.get('directive') == 'map':
|
|
19
|
+
parts = d.get('args', '').split()
|
|
20
|
+
if parts:
|
|
21
|
+
defined.add(parts[0])
|
|
22
|
+
# Ищем использование $var в любых аргументах
|
|
23
|
+
for v in re.findall(r'\$[a-zA-Z0-9_]+', str(d.get('args',''))):
|
|
24
|
+
used.add(v)
|
|
25
|
+
unused = []
|
|
26
|
+
for var in defined:
|
|
27
|
+
if var not in used:
|
|
28
|
+
unused.append({'name': var, 'context': None})
|
|
29
|
+
return unused
|
analyzer/warnings.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from analyzer.base import Analyzer
|
|
2
|
+
from typing import List, Dict, Any
|
|
3
|
+
import re
|
|
4
|
+
|
|
5
|
+
def find_warnings(tree) -> List[Dict[str, Any]]:
|
|
6
|
+
"""
|
|
7
|
+
Находит потенциально опасные или неочевидные директивы.
|
|
8
|
+
Возвращает список: [{type, directive, context, value}]
|
|
9
|
+
"""
|
|
10
|
+
analyzer = Analyzer(tree)
|
|
11
|
+
warnings = []
|
|
12
|
+
for d, parent in analyzer.walk():
|
|
13
|
+
if d.get('directive') == 'proxy_pass':
|
|
14
|
+
val = d.get('args', '')
|
|
15
|
+
if not re.match(r'^(http|https)://', val):
|
|
16
|
+
warnings.append({'type': 'proxy_pass_no_scheme', 'directive': 'proxy_pass', 'context': parent, 'value': val})
|
|
17
|
+
if d.get('directive') == 'autoindex' and d.get('args', '').strip() == 'on':
|
|
18
|
+
warnings.append({'type': 'autoindex_on', 'directive': 'autoindex', 'context': parent, 'value': 'on'})
|
|
19
|
+
if d.get('block') == 'if':
|
|
20
|
+
warnings.append({'type': 'if_block', 'directive': 'if', 'context': parent, 'value': ''})
|
|
21
|
+
if d.get('directive') == 'server_tokens' and d.get('args', '').strip() == 'on':
|
|
22
|
+
warnings.append({'type': 'server_tokens_on', 'directive': 'server_tokens', 'context': parent, 'value': 'on'})
|
|
23
|
+
return warnings
|
commands/__init__.py
ADDED
|
File without changes
|
commands/analyze.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from rich.console import Console
|
|
3
|
+
from rich.panel import Panel
|
|
4
|
+
from rich.tree import Tree
|
|
5
|
+
from analyzer.conflicts import find_location_conflicts
|
|
6
|
+
from analyzer.duplicates import find_duplicate_directives
|
|
7
|
+
from analyzer.empty_blocks import find_empty_blocks
|
|
8
|
+
from parser.nginx_parser import parse_nginx_config
|
|
9
|
+
from analyzer.warnings import find_warnings
|
|
10
|
+
from analyzer.unused import find_unused_variables
|
|
11
|
+
|
|
12
|
+
app = typer.Typer()
|
|
13
|
+
console = Console()
|
|
14
|
+
|
|
15
|
+
def analyze(config_path: str = typer.Argument(..., help="Путь к nginx.conf")):
|
|
16
|
+
"""
|
|
17
|
+
Анализирует конфигурацию Nginx на типовые проблемы.
|
|
18
|
+
"""
|
|
19
|
+
tree = parse_nginx_config(config_path)
|
|
20
|
+
conflicts = find_location_conflicts(tree)
|
|
21
|
+
dups = find_duplicate_directives(tree)
|
|
22
|
+
empties = find_empty_blocks(tree)
|
|
23
|
+
warnings = find_warnings(tree)
|
|
24
|
+
unused_vars = find_unused_variables(tree)
|
|
25
|
+
|
|
26
|
+
root = Tree("[bold blue]Анализ конфигурации Nginx[/bold blue]")
|
|
27
|
+
|
|
28
|
+
if conflicts:
|
|
29
|
+
node = root.add("[red]Конфликты location-ов[/red]")
|
|
30
|
+
for c in conflicts:
|
|
31
|
+
node.add(f"[yellow]server[/yellow]: {c['server'].get('arg', '')} [magenta]location[/magenta]: [bold]{c['location1']}[/bold] ↔ [bold]{c['location2']}[/bold]")
|
|
32
|
+
else:
|
|
33
|
+
root.add("[green]Нет конфликтов location-ов[/green]")
|
|
34
|
+
|
|
35
|
+
if dups:
|
|
36
|
+
node = root.add("[red]Дублирующиеся директивы[/red]")
|
|
37
|
+
for d in dups:
|
|
38
|
+
node.add(f"[yellow]{d['directive']}[/yellow] ([italic]{d['args']}[/italic]) — {d['count']} раз в блоке [cyan]{d['block'].get('block', d['block'])}[/cyan]")
|
|
39
|
+
else:
|
|
40
|
+
root.add("[green]Нет дублирующихся директив[/green]")
|
|
41
|
+
|
|
42
|
+
if empties:
|
|
43
|
+
node = root.add("[red]Пустые блоки[/red]")
|
|
44
|
+
for e in empties:
|
|
45
|
+
node.add(f"[yellow]{e['block']}[/yellow] [italic]{e['arg'] or ''}[/italic]")
|
|
46
|
+
else:
|
|
47
|
+
root.add("[green]Нет пустых блоков[/green]")
|
|
48
|
+
|
|
49
|
+
if warnings:
|
|
50
|
+
node = root.add("[bold yellow]Потенциальные проблемы[/bold yellow]")
|
|
51
|
+
for w in warnings:
|
|
52
|
+
if w['type'] == 'proxy_pass_no_scheme':
|
|
53
|
+
node.add(f"[yellow]proxy_pass[/yellow] без схемы: [italic]{w['value']}[/italic]")
|
|
54
|
+
elif w['type'] == 'autoindex_on':
|
|
55
|
+
node.add(f"[yellow]autoindex on[/yellow] в блоке [cyan]{w['context'].get('block','')}[/cyan]")
|
|
56
|
+
elif w['type'] == 'if_block':
|
|
57
|
+
node.add(f"[yellow]Директива if[/yellow] внутри блока [cyan]{w['context'].get('block','')}[/cyan]")
|
|
58
|
+
elif w['type'] == 'server_tokens_on':
|
|
59
|
+
node.add(f"[yellow]server_tokens on[/yellow] в блоке [cyan]{w['context'].get('block','')}[/cyan]")
|
|
60
|
+
else:
|
|
61
|
+
root.add("[green]Нет потенциальных проблем[/green]")
|
|
62
|
+
|
|
63
|
+
if unused_vars:
|
|
64
|
+
node = root.add("[bold magenta]Неиспользуемые переменные[/bold magenta]")
|
|
65
|
+
for v in unused_vars:
|
|
66
|
+
node.add(f"[magenta]{v['name']}[/magenta]")
|
|
67
|
+
else:
|
|
68
|
+
root.add("[green]Нет неиспользуемых переменных[/green]")
|
|
69
|
+
|
|
70
|
+
console.print(root)
|
commands/cli.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from rich.console import Console
|
|
3
|
+
from commands.health import health
|
|
4
|
+
from commands.analyze import analyze
|
|
5
|
+
from commands.tree import tree
|
|
6
|
+
from commands.diff import diff
|
|
7
|
+
from commands.route import route
|
|
8
|
+
|
|
9
|
+
app = typer.Typer(help="nginx-lens — анализ и диагностика конфигураций Nginx")
|
|
10
|
+
console = Console()
|
|
11
|
+
|
|
12
|
+
app.command()(health)
|
|
13
|
+
app.command()(analyze)
|
|
14
|
+
app.command()(tree)
|
|
15
|
+
app.command()(diff)
|
|
16
|
+
app.command()(route)
|
|
17
|
+
|
|
18
|
+
if __name__ == "__main__":
|
|
19
|
+
app()
|
commands/diff.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from rich.console import Console
|
|
3
|
+
from rich.tree import Tree
|
|
4
|
+
from rich.text import Text
|
|
5
|
+
from analyzer.diff import diff_trees
|
|
6
|
+
from parser.nginx_parser import parse_nginx_config
|
|
7
|
+
|
|
8
|
+
app = typer.Typer()
|
|
9
|
+
console = Console()
|
|
10
|
+
|
|
11
|
+
def diff(
|
|
12
|
+
config1: str = typer.Argument(..., help="Первый nginx.conf"),
|
|
13
|
+
config2: str = typer.Argument(..., help="Второй nginx.conf")
|
|
14
|
+
):
|
|
15
|
+
"""
|
|
16
|
+
Сравнивает две конфигурации Nginx и выводит отличия.
|
|
17
|
+
"""
|
|
18
|
+
tree1 = parse_nginx_config(config1)
|
|
19
|
+
tree2 = parse_nginx_config(config2)
|
|
20
|
+
diffs = diff_trees(tree1, tree2)
|
|
21
|
+
if not diffs:
|
|
22
|
+
console.print("[green]Конфигурации идентичны[/green]")
|
|
23
|
+
return
|
|
24
|
+
root = Tree("[bold blue]Diff nginx.conf[/bold blue]")
|
|
25
|
+
for d in diffs:
|
|
26
|
+
path = "/".join(d['path'])
|
|
27
|
+
if d['type'] == 'added':
|
|
28
|
+
root.add(f"[green]+ {path}[/green]")
|
|
29
|
+
elif d['type'] == 'removed':
|
|
30
|
+
root.add(f"[red]- {path}[/red]")
|
|
31
|
+
elif d['type'] == 'changed':
|
|
32
|
+
node = root.add(f"[yellow]! {path}[/yellow]")
|
|
33
|
+
node.add(f"[red]- {d['value1']}[/red]")
|
|
34
|
+
node.add(f"[green]+ {d['value2']}[/green]")
|
|
35
|
+
console.print(root)
|
commands/health.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from rich.console import Console
|
|
3
|
+
from upstream_checker.checker import check_upstreams
|
|
4
|
+
from parser.nginx_parser import parse_nginx_config
|
|
5
|
+
|
|
6
|
+
app = typer.Typer()
|
|
7
|
+
console = Console()
|
|
8
|
+
|
|
9
|
+
def health(
|
|
10
|
+
config_path: str = typer.Argument(..., help="Путь к nginx.conf"),
|
|
11
|
+
timeout: float = typer.Option(2.0, help="Таймаут проверки (сек)"),
|
|
12
|
+
retries: int = typer.Option(1, help="Количество попыток")
|
|
13
|
+
):
|
|
14
|
+
"""
|
|
15
|
+
Проверяет доступность upstream-серверов, определённых в nginx.conf.
|
|
16
|
+
"""
|
|
17
|
+
tree = parse_nginx_config(config_path)
|
|
18
|
+
upstreams = tree.get_upstreams()
|
|
19
|
+
results = check_upstreams(upstreams, timeout=timeout, retries=retries)
|
|
20
|
+
for name, servers in results.items():
|
|
21
|
+
console.print(f"[bold]{name}[/bold]")
|
|
22
|
+
for srv in servers:
|
|
23
|
+
status = "[green]Healthy ✅[/green]" if srv["healthy"] else "[red]Unhealthy ❌[/red]"
|
|
24
|
+
console.print(f" {srv['address']} {status}")
|
commands/route.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from rich.console import Console
|
|
3
|
+
from rich.panel import Panel
|
|
4
|
+
from analyzer.route import find_route
|
|
5
|
+
from parser.nginx_parser import parse_nginx_config
|
|
6
|
+
|
|
7
|
+
app = typer.Typer()
|
|
8
|
+
console = Console()
|
|
9
|
+
|
|
10
|
+
def route(
|
|
11
|
+
config_path: str = typer.Argument(..., help="Путь к nginx.conf"),
|
|
12
|
+
url: str = typer.Argument(..., help="URL для маршрутизации (например, http://host/path)")
|
|
13
|
+
):
|
|
14
|
+
"""
|
|
15
|
+
Показывает, какой server/location обслуживает указанный URL.
|
|
16
|
+
"""
|
|
17
|
+
tree = parse_nginx_config(config_path)
|
|
18
|
+
res = find_route(tree, url)
|
|
19
|
+
if not res:
|
|
20
|
+
console.print(Panel(f"Не найден подходящий server для {url}", style="red"))
|
|
21
|
+
return
|
|
22
|
+
server = res['server']
|
|
23
|
+
location = res['location']
|
|
24
|
+
proxy_pass = res['proxy_pass']
|
|
25
|
+
text = f"[bold]Server:[/bold] {server.get('arg','') or '[no arg]'}\n"
|
|
26
|
+
if location:
|
|
27
|
+
text += f"[bold]Location:[/bold] {location.get('arg','')}\n"
|
|
28
|
+
if proxy_pass:
|
|
29
|
+
text += f"[bold]proxy_pass:[/bold] {proxy_pass}\n"
|
|
30
|
+
console.print(Panel(text, title="Route", style="green"))
|
commands/tree.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from rich.console import Console
|
|
3
|
+
from rich.tree import Tree as RichTree
|
|
4
|
+
from parser.nginx_parser import parse_nginx_config
|
|
5
|
+
|
|
6
|
+
app = typer.Typer()
|
|
7
|
+
console = Console()
|
|
8
|
+
|
|
9
|
+
def _build_tree(directives, parent):
|
|
10
|
+
for d in directives:
|
|
11
|
+
if 'block' in d:
|
|
12
|
+
label = f"[bold]{d['block']}[/bold] {d.get('arg') or ''}".strip()
|
|
13
|
+
node = parent.add(label)
|
|
14
|
+
if d.get('directives'):
|
|
15
|
+
_build_tree(d['directives'], node)
|
|
16
|
+
elif 'upstream' in d:
|
|
17
|
+
label = f"[bold magenta]upstream[/bold magenta] {d['upstream']}"
|
|
18
|
+
node = parent.add(label)
|
|
19
|
+
for srv in d.get('servers', []):
|
|
20
|
+
node.add(f"[green]server[/green] {srv}")
|
|
21
|
+
elif 'directive' in d:
|
|
22
|
+
parent.add(f"[cyan]{d['directive']}[/cyan] {d.get('args','')}")
|
|
23
|
+
|
|
24
|
+
def tree(
|
|
25
|
+
config_path: str = typer.Argument(..., help="Путь к nginx.conf"),
|
|
26
|
+
markdown: bool = typer.Option(False, help="Экспортировать в Markdown"),
|
|
27
|
+
html: bool = typer.Option(False, help="Экспортировать в HTML")
|
|
28
|
+
):
|
|
29
|
+
"""
|
|
30
|
+
Визуализирует структуру nginx.conf в виде дерева.
|
|
31
|
+
"""
|
|
32
|
+
tree_obj = parse_nginx_config(config_path)
|
|
33
|
+
root = RichTree(f"[bold blue]nginx.conf[/bold blue]")
|
|
34
|
+
_build_tree(tree_obj.directives, root)
|
|
35
|
+
if markdown:
|
|
36
|
+
from exporter.markdown import tree_to_markdown
|
|
37
|
+
md = tree_to_markdown(tree_obj.directives)
|
|
38
|
+
console.print(md)
|
|
39
|
+
elif html:
|
|
40
|
+
from exporter.html import tree_to_html
|
|
41
|
+
html_code = tree_to_html(tree_obj.directives)
|
|
42
|
+
console.print(html_code)
|
|
43
|
+
else:
|
|
44
|
+
console.print(root)
|
exporter/__init__.py
ADDED
|
File without changes
|
exporter/html.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
def tree_to_html(directives, level=0):
|
|
2
|
+
html = []
|
|
3
|
+
html.append('<ul>')
|
|
4
|
+
for d in directives:
|
|
5
|
+
if 'block' in d:
|
|
6
|
+
label = f"{d['block']} {d.get('arg') or ''}".strip()
|
|
7
|
+
html.append(f"<li><b>{label}</b>")
|
|
8
|
+
if d.get('directives'):
|
|
9
|
+
html.append(tree_to_html(d['directives'], level+1))
|
|
10
|
+
html.append("</li>")
|
|
11
|
+
elif 'upstream' in d:
|
|
12
|
+
label = f"upstream {d['upstream']}"
|
|
13
|
+
html.append(f"<li><b>{label}</b><ul>")
|
|
14
|
+
for srv in d.get('servers', []):
|
|
15
|
+
html.append(f"<li>server {srv}</li>")
|
|
16
|
+
html.append("</ul></li>")
|
|
17
|
+
elif 'directive' in d:
|
|
18
|
+
html.append(f"<li>{d['directive']} {d.get('args','')}</li>")
|
|
19
|
+
html.append('</ul>')
|
|
20
|
+
return '\n'.join(html)
|
exporter/markdown.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
def tree_to_markdown(directives, level=0):
|
|
2
|
+
lines = []
|
|
3
|
+
prefix = ' ' * level + '- '
|
|
4
|
+
for d in directives:
|
|
5
|
+
if 'block' in d:
|
|
6
|
+
label = f"{d['block']} {d.get('arg') or ''}".strip()
|
|
7
|
+
lines.append(f"{prefix}{label}")
|
|
8
|
+
if d.get('directives'):
|
|
9
|
+
lines.append(tree_to_markdown(d['directives'], level+1))
|
|
10
|
+
elif 'upstream' in d:
|
|
11
|
+
label = f"upstream {d['upstream']}"
|
|
12
|
+
lines.append(f"{prefix}{label}")
|
|
13
|
+
for srv in d.get('servers', []):
|
|
14
|
+
lines.append(f"{' '*(level+1)}- server {srv}")
|
|
15
|
+
elif 'directive' in d:
|
|
16
|
+
lines.append(f"{prefix}{d['directive']} {d.get('args','')}")
|
|
17
|
+
return '\n'.join(lines)
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: nginx-lens
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: CLI-инструмент для анализа, визуализации и диагностики конфигураций Nginx
|
|
5
|
+
Author: Daniil Astrouski
|
|
6
|
+
Author-email: shelovesuastra@gmail.com
|
|
7
|
+
Requires-Python: >=3.8
|
|
8
|
+
Requires-Dist: typer[all]>=0.9.0
|
|
9
|
+
Requires-Dist: rich>=13.0.0
|
|
10
|
+
Requires-Dist: requests>=2.25.0
|
|
11
|
+
Dynamic: author
|
|
12
|
+
Dynamic: author-email
|
|
13
|
+
Dynamic: requires-dist
|
|
14
|
+
Dynamic: requires-python
|
|
15
|
+
Dynamic: summary
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
analyzer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
analyzer/base.py,sha256=oGKg78BfMVmuzYafc08oq9p31-jEgYolGjLkUcIdkN8,607
|
|
3
|
+
analyzer/conflicts.py,sha256=2h8CyipWo5-81C_RYAiL2XaNZ2HYO8LHDIt5KEcser0,1398
|
|
4
|
+
analyzer/diff.py,sha256=idvXnoLzBVUYgKi_s3uDu0v2GNMV3B8aDqTROXcdQdo,1749
|
|
5
|
+
analyzer/duplicates.py,sha256=VUjbM19Y_Wwty8GEOu_7nNzOH6mr071p_z1MAkp1kBA,1012
|
|
6
|
+
analyzer/empty_blocks.py,sha256=7Zu4-5I5PS3bjhH0Ppq1CvM7rMTeRIc4fHx5n5vkMIw,517
|
|
7
|
+
analyzer/route.py,sha256=2xxQooQEsfn10tzGCZUoP32T0OnTMnPB6qRgBR6not8,2345
|
|
8
|
+
analyzer/unused.py,sha256=Ixzv0bPsw9IafblVwLiAOgugdg2dGu1MJDtuoqzPZiY,1066
|
|
9
|
+
analyzer/warnings.py,sha256=eg50TfbE1pJBR6fGUEByqKm5Km1AQBiIy284DOidacI,1244
|
|
10
|
+
commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
commands/analyze.py,sha256=pRL3-ZkI2nwXMdvXsBrh__MWTd9u0NFNRaiM5x8SSX0,3280
|
|
12
|
+
commands/cli.py,sha256=A47d3vdTDPHRZlRKs02PtxOFdPHjecpXTZyGhQ34r_w,490
|
|
13
|
+
commands/diff.py,sha256=yA_znERefmtcWuusdX7XUqCqRftA6hFWsYRVedc97ig,1237
|
|
14
|
+
commands/health.py,sha256=2QdjIVattoaksW7BrjM2_vqqB18_t7ILeBvKVjaxTdM,1008
|
|
15
|
+
commands/route.py,sha256=jqGKRcocjNhGAN6LQ1PvqcNaNKG_Nn437l9ZeHaD-FA,1132
|
|
16
|
+
commands/tree.py,sha256=H8kFYueo6mcN82MSgIzAH6Usq9u7sYo2WlY0p8rlN_A,1669
|
|
17
|
+
exporter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
+
exporter/html.py,sha256=uquEM-WvBt2aV9GshgaI3UVhYd8sD0QQ-OmuNtvYUdU,798
|
|
19
|
+
exporter/markdown.py,sha256=_0mXQIhurGEZ0dO-eq9DbsuKNrgEDIblgtL3DAgYNo8,724
|
|
20
|
+
parser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
|
+
parser/nginx_parser.py,sha256=JqZ3clNy4Nf-bmbsx_rJUL7EgRoB79b87eEu_isMeqg,3577
|
|
22
|
+
upstream_checker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
23
|
+
upstream_checker/checker.py,sha256=9-6CMUTN7gXUACP8EwX722QogfujZyV-WWWUeM3a79k,455
|
|
24
|
+
nginx_lens-0.1.0.dist-info/METADATA,sha256=YaK62hPH-fHD66tS3qvGVHJsc7HkAEbz5xxnvWQVAVA,476
|
|
25
|
+
nginx_lens-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
26
|
+
nginx_lens-0.1.0.dist-info/entry_points.txt,sha256=qEcecjSyLqcJjbIVlNlTpqAhPqDyaujUV5ZcBTAr3po,48
|
|
27
|
+
nginx_lens-0.1.0.dist-info/top_level.txt,sha256=mxLJO4rZg0rbixVGhplF3fUNFs8vxDIL25ronZNvRy4,51
|
|
28
|
+
nginx_lens-0.1.0.dist-info/RECORD,,
|
parser/__init__.py
ADDED
|
File without changes
|
parser/nginx_parser.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import glob
|
|
3
|
+
from typing import Dict, List, Any
|
|
4
|
+
import re
|
|
5
|
+
|
|
6
|
+
class NginxConfigTree:
|
|
7
|
+
def __init__(self, directives=None, upstreams=None):
|
|
8
|
+
self.directives = directives or []
|
|
9
|
+
self._upstreams = upstreams or {}
|
|
10
|
+
def get_upstreams(self) -> Dict[str, List[str]]:
|
|
11
|
+
return self._upstreams
|
|
12
|
+
|
|
13
|
+
# --- Вспомогательные функции ---
|
|
14
|
+
def _strip_comments(line: str) -> str:
|
|
15
|
+
return line.split('#', 1)[0].strip()
|
|
16
|
+
|
|
17
|
+
def _parse_block(lines, base_dir) -> (List[Any], Dict[str, List[str]]):
|
|
18
|
+
directives = []
|
|
19
|
+
upstreams = {}
|
|
20
|
+
i = 0
|
|
21
|
+
while i < len(lines):
|
|
22
|
+
line = _strip_comments(lines[i])
|
|
23
|
+
if not line:
|
|
24
|
+
i += 1
|
|
25
|
+
continue
|
|
26
|
+
if line.startswith('include '):
|
|
27
|
+
pattern = line[len('include '):].rstrip(';').strip()
|
|
28
|
+
pattern = os.path.join(base_dir, pattern) if not os.path.isabs(pattern) else pattern
|
|
29
|
+
for inc_path in glob.glob(pattern):
|
|
30
|
+
with open(inc_path) as f:
|
|
31
|
+
inc_lines = f.readlines()
|
|
32
|
+
inc_directives, inc_upstreams = _parse_block(inc_lines, os.path.dirname(inc_path))
|
|
33
|
+
directives.extend(inc_directives)
|
|
34
|
+
for k, v in inc_upstreams.items():
|
|
35
|
+
upstreams.setdefault(k, []).extend(v)
|
|
36
|
+
i += 1
|
|
37
|
+
continue
|
|
38
|
+
m = re.match(r'upstream\s+(\S+)\s*{', line)
|
|
39
|
+
if m:
|
|
40
|
+
name = m.group(1)
|
|
41
|
+
block_lines = []
|
|
42
|
+
depth = 1
|
|
43
|
+
i += 1
|
|
44
|
+
while i < len(lines) and depth > 0:
|
|
45
|
+
l = _strip_comments(lines[i])
|
|
46
|
+
if '{' in l:
|
|
47
|
+
depth += l.count('{')
|
|
48
|
+
if '}' in l:
|
|
49
|
+
depth -= l.count('}')
|
|
50
|
+
if depth > 0:
|
|
51
|
+
block_lines.append(l)
|
|
52
|
+
i += 1
|
|
53
|
+
servers = []
|
|
54
|
+
for bl in block_lines:
|
|
55
|
+
m_srv = re.match(r'server\s+([^;]+);', bl)
|
|
56
|
+
if m_srv:
|
|
57
|
+
servers.append(m_srv.group(1).strip())
|
|
58
|
+
upstreams[name] = servers
|
|
59
|
+
directives.append({'upstream': name, 'servers': servers})
|
|
60
|
+
continue
|
|
61
|
+
# Блоки (например, server, http, location)
|
|
62
|
+
m = re.match(r'(\S+)\s*(\S+)?\s*{', line)
|
|
63
|
+
if m:
|
|
64
|
+
block_name = m.group(1)
|
|
65
|
+
block_arg = m.group(2)
|
|
66
|
+
block_lines = []
|
|
67
|
+
depth = 1
|
|
68
|
+
i += 1
|
|
69
|
+
while i < len(lines) and depth > 0:
|
|
70
|
+
l = _strip_comments(lines[i])
|
|
71
|
+
if '{' in l:
|
|
72
|
+
depth += l.count('{')
|
|
73
|
+
if '}' in l:
|
|
74
|
+
depth -= l.count('}')
|
|
75
|
+
if depth > 0:
|
|
76
|
+
block_lines.append(l)
|
|
77
|
+
i += 1
|
|
78
|
+
sub_directives, sub_upstreams = _parse_block(block_lines, base_dir)
|
|
79
|
+
directives.append({'block': block_name, 'arg': block_arg, 'directives': sub_directives})
|
|
80
|
+
for k, v in sub_upstreams.items():
|
|
81
|
+
upstreams.setdefault(k, []).extend(v)
|
|
82
|
+
continue
|
|
83
|
+
# Обычная директива
|
|
84
|
+
m = re.match(r'(\S+)\s+([^;]+);', line)
|
|
85
|
+
if m:
|
|
86
|
+
directives.append({'directive': m.group(1), 'args': m.group(2)})
|
|
87
|
+
i += 1
|
|
88
|
+
return directives, upstreams
|
|
89
|
+
|
|
90
|
+
def parse_nginx_config(path: str) -> NginxConfigTree:
|
|
91
|
+
base_dir = os.path.dirname(os.path.abspath(path))
|
|
92
|
+
with open(path) as f:
|
|
93
|
+
lines = f.readlines()
|
|
94
|
+
directives, upstreams = _parse_block(lines, base_dir)
|
|
95
|
+
return NginxConfigTree(directives, upstreams)
|
|
File without changes
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import socket
|
|
2
|
+
from typing import Dict, List
|
|
3
|
+
|
|
4
|
+
def check_upstreams(upstreams: Dict[str, List[str]], timeout=2.0, retries=1):
|
|
5
|
+
# TODO: Реализовать реальную проверку TCP/HTTP
|
|
6
|
+
# Сейчас возвращает все healthy
|
|
7
|
+
result = {}
|
|
8
|
+
for name, servers in upstreams.items():
|
|
9
|
+
result[name] = []
|
|
10
|
+
for srv in servers:
|
|
11
|
+
result[name].append({"address": srv, "healthy": True})
|
|
12
|
+
return result
|