nginx-lens 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. nginx_lens-0.1.0/PKG-INFO +15 -0
  2. nginx_lens-0.1.0/analyzer/__init__.py +0 -0
  3. nginx_lens-0.1.0/analyzer/base.py +18 -0
  4. nginx_lens-0.1.0/analyzer/conflicts.py +33 -0
  5. nginx_lens-0.1.0/analyzer/diff.py +40 -0
  6. nginx_lens-0.1.0/analyzer/duplicates.py +26 -0
  7. nginx_lens-0.1.0/analyzer/empty_blocks.py +14 -0
  8. nginx_lens-0.1.0/analyzer/route.py +60 -0
  9. nginx_lens-0.1.0/analyzer/unused.py +29 -0
  10. nginx_lens-0.1.0/analyzer/warnings.py +23 -0
  11. nginx_lens-0.1.0/commands/__init__.py +0 -0
  12. nginx_lens-0.1.0/commands/analyze.py +70 -0
  13. nginx_lens-0.1.0/commands/cli.py +19 -0
  14. nginx_lens-0.1.0/commands/diff.py +35 -0
  15. nginx_lens-0.1.0/commands/health.py +24 -0
  16. nginx_lens-0.1.0/commands/route.py +30 -0
  17. nginx_lens-0.1.0/commands/tree.py +44 -0
  18. nginx_lens-0.1.0/exporter/__init__.py +0 -0
  19. nginx_lens-0.1.0/exporter/html.py +20 -0
  20. nginx_lens-0.1.0/exporter/markdown.py +17 -0
  21. nginx_lens-0.1.0/nginx_lens.egg-info/PKG-INFO +15 -0
  22. nginx_lens-0.1.0/nginx_lens.egg-info/SOURCES.txt +36 -0
  23. nginx_lens-0.1.0/nginx_lens.egg-info/dependency_links.txt +1 -0
  24. nginx_lens-0.1.0/nginx_lens.egg-info/entry_points.txt +2 -0
  25. nginx_lens-0.1.0/nginx_lens.egg-info/requires.txt +3 -0
  26. nginx_lens-0.1.0/nginx_lens.egg-info/top_level.txt +5 -0
  27. nginx_lens-0.1.0/parser/__init__.py +0 -0
  28. nginx_lens-0.1.0/parser/nginx_parser.py +95 -0
  29. nginx_lens-0.1.0/pyproject.toml +3 -0
  30. nginx_lens-0.1.0/setup.cfg +4 -0
  31. nginx_lens-0.1.0/setup.py +23 -0
  32. nginx_lens-0.1.0/tests/test_conflicts.py +29 -0
  33. nginx_lens-0.1.0/tests/test_duplicates.py +27 -0
  34. nginx_lens-0.1.0/tests/test_empty_blocks.py +27 -0
  35. nginx_lens-0.1.0/tests/test_health.py +19 -0
  36. nginx_lens-0.1.0/tests/test_parser.py +62 -0
  37. nginx_lens-0.1.0/upstream_checker/__init__.py +0 -0
  38. nginx_lens-0.1.0/upstream_checker/checker.py +12 -0
@@ -0,0 +1,15 @@
1
+ Metadata-Version: 2.4
2
+ Name: nginx-lens
3
+ Version: 0.1.0
4
+ Summary: CLI-инструмент для анализа, визуализации и диагностики конфигураций Nginx
5
+ Author: Daniil Astrouski
6
+ Author-email: shelovesuastra@gmail.com
7
+ Requires-Python: >=3.8
8
+ Requires-Dist: typer[all]>=0.9.0
9
+ Requires-Dist: rich>=13.0.0
10
+ Requires-Dist: requests>=2.25.0
11
+ Dynamic: author
12
+ Dynamic: author-email
13
+ Dynamic: requires-dist
14
+ Dynamic: requires-python
15
+ Dynamic: summary
File without changes
@@ -0,0 +1,18 @@
1
+ from typing import Any, List, Dict
2
+
3
+ class Analyzer:
4
+ def __init__(self, tree):
5
+ self.tree = tree
6
+ self.directives = tree.directives
7
+
8
+ def walk(self, directives=None, parent=None):
9
+ """
10
+ Рекурсивно обходит дерево директив.
11
+ Возвращает генератор (директива, родитель).
12
+ """
13
+ if directives is None:
14
+ directives = self.directives
15
+ for d in directives:
16
+ yield d, parent
17
+ if 'directives' in d:
18
+ yield from self.walk(d['directives'], d)
@@ -0,0 +1,33 @@
1
+ from analyzer.base import Analyzer
2
+ from typing import List, Dict, Any
3
+ import re
4
+
5
+ def find_location_conflicts(tree) -> List[Dict[str, Any]]:
6
+ """
7
+ Находит пересекающиеся location-ы внутри одного server-блока.
8
+ Возвращает список конфликтов: [{server, location1, location2}]
9
+ """
10
+ analyzer = Analyzer(tree)
11
+ conflicts = []
12
+ for d, parent in analyzer.walk():
13
+ if d.get('block') == 'server':
14
+ locations = []
15
+ for sub, _ in analyzer.walk(d['directives'], d):
16
+ if sub.get('block') == 'location':
17
+ arg = sub.get('arg')
18
+ if arg:
19
+ locations.append(arg)
20
+ # Проверяем пересечения
21
+ for i in range(len(locations)):
22
+ for j in range(i+1, len(locations)):
23
+ if _locations_conflict(locations[i], locations[j]):
24
+ conflicts.append({
25
+ 'server': d,
26
+ 'location1': locations[i],
27
+ 'location2': locations[j]
28
+ })
29
+ return conflicts
30
+
31
+ def _locations_conflict(loc1, loc2):
32
+ # Простая эвристика: если один путь — префикс другого
33
+ return loc1.startswith(loc2) or loc2.startswith(loc1)
@@ -0,0 +1,40 @@
1
+ from typing import List, Dict, Any
2
+
3
+ def diff_trees(tree1, tree2) -> List[Dict[str, Any]]:
4
+ """
5
+ Сравнивает два дерева директив. Возвращает список отличий.
6
+ Формат: [{type, path, value1, value2}]
7
+ type: 'added', 'removed', 'changed'
8
+ path: путь до директивы (например, ['http', 'server', 'location /api'])
9
+ """
10
+ diffs = []
11
+ _diff_blocks(tree1.directives, tree2.directives, [], diffs)
12
+ return diffs
13
+
14
+ def _diff_blocks(d1, d2, path, diffs):
15
+ # Индексируем по (block,arg) или (directive,args) или (upstream)
16
+ def key(d):
17
+ if 'block' in d:
18
+ return ('block', d['block'], d.get('arg'))
19
+ if 'directive' in d:
20
+ return ('directive', d['directive'], d.get('args'))
21
+ if 'upstream' in d:
22
+ return ('upstream', d['upstream'])
23
+ return ('other', str(d))
24
+ map1 = {key(x): x for x in d1}
25
+ map2 = {key(x): x for x in d2}
26
+ all_keys = set(map1) | set(map2)
27
+ for k in all_keys:
28
+ v1 = map1.get(k)
29
+ v2 = map2.get(k)
30
+ p = path + [k[1] if k[0] != 'directive' else f"{k[1]} {k[2] or ''}".strip()]
31
+ if v1 and not v2:
32
+ diffs.append({'type': 'removed', 'path': p, 'value1': v1, 'value2': None})
33
+ elif v2 and not v1:
34
+ diffs.append({'type': 'added', 'path': p, 'value1': None, 'value2': v2})
35
+ else:
36
+ # Рекурсивно сравниваем блоки
37
+ if 'directives' in v1 and 'directives' in v2:
38
+ _diff_blocks(v1['directives'], v2['directives'], p, diffs)
39
+ elif v1 != v2:
40
+ diffs.append({'type': 'changed', 'path': p, 'value1': v1, 'value2': v2})
@@ -0,0 +1,26 @@
1
+ from analyzer.base import Analyzer
2
+ from typing import List, Dict, Any
3
+
4
+ def find_duplicate_directives(tree) -> List[Dict[str, Any]]:
5
+ """
6
+ Находит дублирующиеся директивы внутри одного блока.
7
+ Возвращает список: [{block, directive, count}]
8
+ """
9
+ analyzer = Analyzer(tree)
10
+ duplicates = []
11
+ for d, parent in analyzer.walk():
12
+ if 'directives' in d:
13
+ seen = {}
14
+ for sub, _ in analyzer.walk(d['directives'], d):
15
+ if 'directive' in sub:
16
+ key = (sub['directive'], str(sub.get('args')))
17
+ seen[key] = seen.get(key, 0) + 1
18
+ for (directive, args), count in seen.items():
19
+ if count > 1:
20
+ duplicates.append({
21
+ 'block': d,
22
+ 'directive': directive,
23
+ 'args': args,
24
+ 'count': count
25
+ })
26
+ return duplicates
@@ -0,0 +1,14 @@
1
+ from analyzer.base import Analyzer
2
+ from typing import List, Dict, Any
3
+
4
+ def find_empty_blocks(tree) -> List[Dict[str, Any]]:
5
+ """
6
+ Находит пустые блоки (без вложенных директив).
7
+ Возвращает список: [{block, arg}]
8
+ """
9
+ analyzer = Analyzer(tree)
10
+ empties = []
11
+ for d, parent in analyzer.walk():
12
+ if 'block' in d and (not d.get('directives')):
13
+ empties.append({'block': d.get('block'), 'arg': d.get('arg')})
14
+ return empties
@@ -0,0 +1,60 @@
1
+ from urllib.parse import urlparse
2
+ from analyzer.base import Analyzer
3
+ from typing import Dict, Any, Optional
4
+ import re
5
+
6
+ def find_route(tree, url: str) -> Optional[Dict[str, Any]]:
7
+ """
8
+ Находит server и location, которые обслуживают данный URL.
9
+ Возвращает: {'server': ..., 'location': ..., 'proxy_pass': ...}
10
+ """
11
+ parsed = urlparse(url)
12
+ host = parsed.hostname
13
+ port = str(parsed.port or (443 if parsed.scheme == 'https' else 80))
14
+ path = parsed.path or '/'
15
+ analyzer = Analyzer(tree)
16
+ best_server = None
17
+ best_server_score = -1
18
+ # 1. Ищем подходящий server
19
+ for d, _ in analyzer.walk():
20
+ if d.get('block') == 'server':
21
+ names = []
22
+ listens = []
23
+ for sub, _ in analyzer.walk(d['directives'], d):
24
+ if sub.get('directive') == 'server_name':
25
+ names += sub.get('args', '').split()
26
+ if sub.get('directive') == 'listen':
27
+ listens.append(sub.get('args', ''))
28
+ score = 0
29
+ if host and any(_host_match(host, n) for n in names):
30
+ score += 2
31
+ if port and any(port in l for l in listens):
32
+ score += 1
33
+ if score > best_server_score:
34
+ best_server = d
35
+ best_server_score = score
36
+ if not best_server:
37
+ return None
38
+ # 2. Внутри server ищем лучший location (longest prefix match)
39
+ best_loc = None
40
+ best_len = -1
41
+ proxy_pass = None
42
+ for sub, _ in analyzer.walk(best_server['directives'], best_server):
43
+ if sub.get('block') == 'location':
44
+ loc = sub.get('arg', '')
45
+ if path.startswith(loc) and len(loc) > best_len:
46
+ best_loc = sub
47
+ best_len = len(loc)
48
+ if best_loc:
49
+ for d, _ in analyzer.walk(best_loc.get('directives', []), best_loc):
50
+ if d.get('directive') == 'proxy_pass':
51
+ proxy_pass = d.get('args')
52
+ return {'server': best_server, 'location': best_loc, 'proxy_pass': proxy_pass}
53
+
54
+ def _host_match(host, pattern):
55
+ # Примитивная поддержка wildcard
56
+ if pattern == '_':
57
+ return True
58
+ if pattern.startswith('*.'):
59
+ return host.endswith(pattern[1:])
60
+ return host == pattern
@@ -0,0 +1,29 @@
1
+ from analyzer.base import Analyzer
2
+ from typing import List, Dict, Any
3
+ import re
4
+
5
+ def find_unused_variables(tree) -> List[Dict[str, Any]]:
6
+ """
7
+ Находит переменные, определённые через set/map, которые не используются.
8
+ Возвращает список: [{name, context}]
9
+ """
10
+ analyzer = Analyzer(tree)
11
+ defined = set()
12
+ used = set()
13
+ for d, parent in analyzer.walk():
14
+ if d.get('directive') == 'set':
15
+ parts = d.get('args', '').split()
16
+ if parts:
17
+ defined.add(parts[0])
18
+ if d.get('directive') == 'map':
19
+ parts = d.get('args', '').split()
20
+ if parts:
21
+ defined.add(parts[0])
22
+ # Ищем использование $var в любых аргументах
23
+ for v in re.findall(r'\$[a-zA-Z0-9_]+', str(d.get('args',''))):
24
+ used.add(v)
25
+ unused = []
26
+ for var in defined:
27
+ if var not in used:
28
+ unused.append({'name': var, 'context': None})
29
+ return unused
@@ -0,0 +1,23 @@
1
+ from analyzer.base import Analyzer
2
+ from typing import List, Dict, Any
3
+ import re
4
+
5
+ def find_warnings(tree) -> List[Dict[str, Any]]:
6
+ """
7
+ Находит потенциально опасные или неочевидные директивы.
8
+ Возвращает список: [{type, directive, context, value}]
9
+ """
10
+ analyzer = Analyzer(tree)
11
+ warnings = []
12
+ for d, parent in analyzer.walk():
13
+ if d.get('directive') == 'proxy_pass':
14
+ val = d.get('args', '')
15
+ if not re.match(r'^(http|https)://', val):
16
+ warnings.append({'type': 'proxy_pass_no_scheme', 'directive': 'proxy_pass', 'context': parent, 'value': val})
17
+ if d.get('directive') == 'autoindex' and d.get('args', '').strip() == 'on':
18
+ warnings.append({'type': 'autoindex_on', 'directive': 'autoindex', 'context': parent, 'value': 'on'})
19
+ if d.get('block') == 'if':
20
+ warnings.append({'type': 'if_block', 'directive': 'if', 'context': parent, 'value': ''})
21
+ if d.get('directive') == 'server_tokens' and d.get('args', '').strip() == 'on':
22
+ warnings.append({'type': 'server_tokens_on', 'directive': 'server_tokens', 'context': parent, 'value': 'on'})
23
+ return warnings
File without changes
@@ -0,0 +1,70 @@
1
+ import typer
2
+ from rich.console import Console
3
+ from rich.panel import Panel
4
+ from rich.tree import Tree
5
+ from analyzer.conflicts import find_location_conflicts
6
+ from analyzer.duplicates import find_duplicate_directives
7
+ from analyzer.empty_blocks import find_empty_blocks
8
+ from parser.nginx_parser import parse_nginx_config
9
+ from analyzer.warnings import find_warnings
10
+ from analyzer.unused import find_unused_variables
11
+
12
+ app = typer.Typer()
13
+ console = Console()
14
+
15
+ def analyze(config_path: str = typer.Argument(..., help="Путь к nginx.conf")):
16
+ """
17
+ Анализирует конфигурацию Nginx на типовые проблемы.
18
+ """
19
+ tree = parse_nginx_config(config_path)
20
+ conflicts = find_location_conflicts(tree)
21
+ dups = find_duplicate_directives(tree)
22
+ empties = find_empty_blocks(tree)
23
+ warnings = find_warnings(tree)
24
+ unused_vars = find_unused_variables(tree)
25
+
26
+ root = Tree("[bold blue]Анализ конфигурации Nginx[/bold blue]")
27
+
28
+ if conflicts:
29
+ node = root.add("[red]Конфликты location-ов[/red]")
30
+ for c in conflicts:
31
+ node.add(f"[yellow]server[/yellow]: {c['server'].get('arg', '')} [magenta]location[/magenta]: [bold]{c['location1']}[/bold] ↔ [bold]{c['location2']}[/bold]")
32
+ else:
33
+ root.add("[green]Нет конфликтов location-ов[/green]")
34
+
35
+ if dups:
36
+ node = root.add("[red]Дублирующиеся директивы[/red]")
37
+ for d in dups:
38
+ node.add(f"[yellow]{d['directive']}[/yellow] ([italic]{d['args']}[/italic]) — {d['count']} раз в блоке [cyan]{d['block'].get('block', d['block'])}[/cyan]")
39
+ else:
40
+ root.add("[green]Нет дублирующихся директив[/green]")
41
+
42
+ if empties:
43
+ node = root.add("[red]Пустые блоки[/red]")
44
+ for e in empties:
45
+ node.add(f"[yellow]{e['block']}[/yellow] [italic]{e['arg'] or ''}[/italic]")
46
+ else:
47
+ root.add("[green]Нет пустых блоков[/green]")
48
+
49
+ if warnings:
50
+ node = root.add("[bold yellow]Потенциальные проблемы[/bold yellow]")
51
+ for w in warnings:
52
+ if w['type'] == 'proxy_pass_no_scheme':
53
+ node.add(f"[yellow]proxy_pass[/yellow] без схемы: [italic]{w['value']}[/italic]")
54
+ elif w['type'] == 'autoindex_on':
55
+ node.add(f"[yellow]autoindex on[/yellow] в блоке [cyan]{w['context'].get('block','')}[/cyan]")
56
+ elif w['type'] == 'if_block':
57
+ node.add(f"[yellow]Директива if[/yellow] внутри блока [cyan]{w['context'].get('block','')}[/cyan]")
58
+ elif w['type'] == 'server_tokens_on':
59
+ node.add(f"[yellow]server_tokens on[/yellow] в блоке [cyan]{w['context'].get('block','')}[/cyan]")
60
+ else:
61
+ root.add("[green]Нет потенциальных проблем[/green]")
62
+
63
+ if unused_vars:
64
+ node = root.add("[bold magenta]Неиспользуемые переменные[/bold magenta]")
65
+ for v in unused_vars:
66
+ node.add(f"[magenta]{v['name']}[/magenta]")
67
+ else:
68
+ root.add("[green]Нет неиспользуемых переменных[/green]")
69
+
70
+ console.print(root)
@@ -0,0 +1,19 @@
1
+ import typer
2
+ from rich.console import Console
3
+ from commands.health import health
4
+ from commands.analyze import analyze
5
+ from commands.tree import tree
6
+ from commands.diff import diff
7
+ from commands.route import route
8
+
9
+ app = typer.Typer(help="nginx-lens — анализ и диагностика конфигураций Nginx")
10
+ console = Console()
11
+
12
+ app.command()(health)
13
+ app.command()(analyze)
14
+ app.command()(tree)
15
+ app.command()(diff)
16
+ app.command()(route)
17
+
18
+ if __name__ == "__main__":
19
+ app()
@@ -0,0 +1,35 @@
1
+ import typer
2
+ from rich.console import Console
3
+ from rich.tree import Tree
4
+ from rich.text import Text
5
+ from analyzer.diff import diff_trees
6
+ from parser.nginx_parser import parse_nginx_config
7
+
8
+ app = typer.Typer()
9
+ console = Console()
10
+
11
+ def diff(
12
+ config1: str = typer.Argument(..., help="Первый nginx.conf"),
13
+ config2: str = typer.Argument(..., help="Второй nginx.conf")
14
+ ):
15
+ """
16
+ Сравнивает две конфигурации Nginx и выводит отличия.
17
+ """
18
+ tree1 = parse_nginx_config(config1)
19
+ tree2 = parse_nginx_config(config2)
20
+ diffs = diff_trees(tree1, tree2)
21
+ if not diffs:
22
+ console.print("[green]Конфигурации идентичны[/green]")
23
+ return
24
+ root = Tree("[bold blue]Diff nginx.conf[/bold blue]")
25
+ for d in diffs:
26
+ path = "/".join(d['path'])
27
+ if d['type'] == 'added':
28
+ root.add(f"[green]+ {path}[/green]")
29
+ elif d['type'] == 'removed':
30
+ root.add(f"[red]- {path}[/red]")
31
+ elif d['type'] == 'changed':
32
+ node = root.add(f"[yellow]! {path}[/yellow]")
33
+ node.add(f"[red]- {d['value1']}[/red]")
34
+ node.add(f"[green]+ {d['value2']}[/green]")
35
+ console.print(root)
@@ -0,0 +1,24 @@
1
+ import typer
2
+ from rich.console import Console
3
+ from upstream_checker.checker import check_upstreams
4
+ from parser.nginx_parser import parse_nginx_config
5
+
6
+ app = typer.Typer()
7
+ console = Console()
8
+
9
+ def health(
10
+ config_path: str = typer.Argument(..., help="Путь к nginx.conf"),
11
+ timeout: float = typer.Option(2.0, help="Таймаут проверки (сек)"),
12
+ retries: int = typer.Option(1, help="Количество попыток")
13
+ ):
14
+ """
15
+ Проверяет доступность upstream-серверов, определённых в nginx.conf.
16
+ """
17
+ tree = parse_nginx_config(config_path)
18
+ upstreams = tree.get_upstreams()
19
+ results = check_upstreams(upstreams, timeout=timeout, retries=retries)
20
+ for name, servers in results.items():
21
+ console.print(f"[bold]{name}[/bold]")
22
+ for srv in servers:
23
+ status = "[green]Healthy ✅[/green]" if srv["healthy"] else "[red]Unhealthy ❌[/red]"
24
+ console.print(f" {srv['address']} {status}")
@@ -0,0 +1,30 @@
1
+ import typer
2
+ from rich.console import Console
3
+ from rich.panel import Panel
4
+ from analyzer.route import find_route
5
+ from parser.nginx_parser import parse_nginx_config
6
+
7
+ app = typer.Typer()
8
+ console = Console()
9
+
10
+ def route(
11
+ config_path: str = typer.Argument(..., help="Путь к nginx.conf"),
12
+ url: str = typer.Argument(..., help="URL для маршрутизации (например, http://host/path)")
13
+ ):
14
+ """
15
+ Показывает, какой server/location обслуживает указанный URL.
16
+ """
17
+ tree = parse_nginx_config(config_path)
18
+ res = find_route(tree, url)
19
+ if not res:
20
+ console.print(Panel(f"Не найден подходящий server для {url}", style="red"))
21
+ return
22
+ server = res['server']
23
+ location = res['location']
24
+ proxy_pass = res['proxy_pass']
25
+ text = f"[bold]Server:[/bold] {server.get('arg','') or '[no arg]'}\n"
26
+ if location:
27
+ text += f"[bold]Location:[/bold] {location.get('arg','')}\n"
28
+ if proxy_pass:
29
+ text += f"[bold]proxy_pass:[/bold] {proxy_pass}\n"
30
+ console.print(Panel(text, title="Route", style="green"))
@@ -0,0 +1,44 @@
1
+ import typer
2
+ from rich.console import Console
3
+ from rich.tree import Tree as RichTree
4
+ from parser.nginx_parser import parse_nginx_config
5
+
6
+ app = typer.Typer()
7
+ console = Console()
8
+
9
+ def _build_tree(directives, parent):
10
+ for d in directives:
11
+ if 'block' in d:
12
+ label = f"[bold]{d['block']}[/bold] {d.get('arg') or ''}".strip()
13
+ node = parent.add(label)
14
+ if d.get('directives'):
15
+ _build_tree(d['directives'], node)
16
+ elif 'upstream' in d:
17
+ label = f"[bold magenta]upstream[/bold magenta] {d['upstream']}"
18
+ node = parent.add(label)
19
+ for srv in d.get('servers', []):
20
+ node.add(f"[green]server[/green] {srv}")
21
+ elif 'directive' in d:
22
+ parent.add(f"[cyan]{d['directive']}[/cyan] {d.get('args','')}")
23
+
24
+ def tree(
25
+ config_path: str = typer.Argument(..., help="Путь к nginx.conf"),
26
+ markdown: bool = typer.Option(False, help="Экспортировать в Markdown"),
27
+ html: bool = typer.Option(False, help="Экспортировать в HTML")
28
+ ):
29
+ """
30
+ Визуализирует структуру nginx.conf в виде дерева.
31
+ """
32
+ tree_obj = parse_nginx_config(config_path)
33
+ root = RichTree(f"[bold blue]nginx.conf[/bold blue]")
34
+ _build_tree(tree_obj.directives, root)
35
+ if markdown:
36
+ from exporter.markdown import tree_to_markdown
37
+ md = tree_to_markdown(tree_obj.directives)
38
+ console.print(md)
39
+ elif html:
40
+ from exporter.html import tree_to_html
41
+ html_code = tree_to_html(tree_obj.directives)
42
+ console.print(html_code)
43
+ else:
44
+ console.print(root)
File without changes
@@ -0,0 +1,20 @@
1
+ def tree_to_html(directives, level=0):
2
+ html = []
3
+ html.append('<ul>')
4
+ for d in directives:
5
+ if 'block' in d:
6
+ label = f"{d['block']} {d.get('arg') or ''}".strip()
7
+ html.append(f"<li><b>{label}</b>")
8
+ if d.get('directives'):
9
+ html.append(tree_to_html(d['directives'], level+1))
10
+ html.append("</li>")
11
+ elif 'upstream' in d:
12
+ label = f"upstream {d['upstream']}"
13
+ html.append(f"<li><b>{label}</b><ul>")
14
+ for srv in d.get('servers', []):
15
+ html.append(f"<li>server {srv}</li>")
16
+ html.append("</ul></li>")
17
+ elif 'directive' in d:
18
+ html.append(f"<li>{d['directive']} {d.get('args','')}</li>")
19
+ html.append('</ul>')
20
+ return '\n'.join(html)
@@ -0,0 +1,17 @@
1
+ def tree_to_markdown(directives, level=0):
2
+ lines = []
3
+ prefix = ' ' * level + '- '
4
+ for d in directives:
5
+ if 'block' in d:
6
+ label = f"{d['block']} {d.get('arg') or ''}".strip()
7
+ lines.append(f"{prefix}{label}")
8
+ if d.get('directives'):
9
+ lines.append(tree_to_markdown(d['directives'], level+1))
10
+ elif 'upstream' in d:
11
+ label = f"upstream {d['upstream']}"
12
+ lines.append(f"{prefix}{label}")
13
+ for srv in d.get('servers', []):
14
+ lines.append(f"{' '*(level+1)}- server {srv}")
15
+ elif 'directive' in d:
16
+ lines.append(f"{prefix}{d['directive']} {d.get('args','')}")
17
+ return '\n'.join(lines)
@@ -0,0 +1,15 @@
1
+ Metadata-Version: 2.4
2
+ Name: nginx-lens
3
+ Version: 0.1.0
4
+ Summary: CLI-инструмент для анализа, визуализации и диагностики конфигураций Nginx
5
+ Author: Daniil Astrouski
6
+ Author-email: shelovesuastra@gmail.com
7
+ Requires-Python: >=3.8
8
+ Requires-Dist: typer[all]>=0.9.0
9
+ Requires-Dist: rich>=13.0.0
10
+ Requires-Dist: requests>=2.25.0
11
+ Dynamic: author
12
+ Dynamic: author-email
13
+ Dynamic: requires-dist
14
+ Dynamic: requires-python
15
+ Dynamic: summary
@@ -0,0 +1,36 @@
1
+ pyproject.toml
2
+ setup.py
3
+ analyzer/__init__.py
4
+ analyzer/base.py
5
+ analyzer/conflicts.py
6
+ analyzer/diff.py
7
+ analyzer/duplicates.py
8
+ analyzer/empty_blocks.py
9
+ analyzer/route.py
10
+ analyzer/unused.py
11
+ analyzer/warnings.py
12
+ commands/__init__.py
13
+ commands/analyze.py
14
+ commands/cli.py
15
+ commands/diff.py
16
+ commands/health.py
17
+ commands/route.py
18
+ commands/tree.py
19
+ exporter/__init__.py
20
+ exporter/html.py
21
+ exporter/markdown.py
22
+ nginx_lens.egg-info/PKG-INFO
23
+ nginx_lens.egg-info/SOURCES.txt
24
+ nginx_lens.egg-info/dependency_links.txt
25
+ nginx_lens.egg-info/entry_points.txt
26
+ nginx_lens.egg-info/requires.txt
27
+ nginx_lens.egg-info/top_level.txt
28
+ parser/__init__.py
29
+ parser/nginx_parser.py
30
+ tests/test_conflicts.py
31
+ tests/test_duplicates.py
32
+ tests/test_empty_blocks.py
33
+ tests/test_health.py
34
+ tests/test_parser.py
35
+ upstream_checker/__init__.py
36
+ upstream_checker/checker.py
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ nginx-lens = commands.cli:app
@@ -0,0 +1,3 @@
1
+ typer[all]>=0.9.0
2
+ rich>=13.0.0
3
+ requests>=2.25.0
@@ -0,0 +1,5 @@
1
+ analyzer
2
+ commands
3
+ exporter
4
+ parser
5
+ upstream_checker
File without changes
@@ -0,0 +1,95 @@
1
+ import os
2
+ import glob
3
+ from typing import Dict, List, Any
4
+ import re
5
+
6
+ class NginxConfigTree:
7
+ def __init__(self, directives=None, upstreams=None):
8
+ self.directives = directives or []
9
+ self._upstreams = upstreams or {}
10
+ def get_upstreams(self) -> Dict[str, List[str]]:
11
+ return self._upstreams
12
+
13
+ # --- Вспомогательные функции ---
14
+ def _strip_comments(line: str) -> str:
15
+ return line.split('#', 1)[0].strip()
16
+
17
+ def _parse_block(lines, base_dir) -> (List[Any], Dict[str, List[str]]):
18
+ directives = []
19
+ upstreams = {}
20
+ i = 0
21
+ while i < len(lines):
22
+ line = _strip_comments(lines[i])
23
+ if not line:
24
+ i += 1
25
+ continue
26
+ if line.startswith('include '):
27
+ pattern = line[len('include '):].rstrip(';').strip()
28
+ pattern = os.path.join(base_dir, pattern) if not os.path.isabs(pattern) else pattern
29
+ for inc_path in glob.glob(pattern):
30
+ with open(inc_path) as f:
31
+ inc_lines = f.readlines()
32
+ inc_directives, inc_upstreams = _parse_block(inc_lines, os.path.dirname(inc_path))
33
+ directives.extend(inc_directives)
34
+ for k, v in inc_upstreams.items():
35
+ upstreams.setdefault(k, []).extend(v)
36
+ i += 1
37
+ continue
38
+ m = re.match(r'upstream\s+(\S+)\s*{', line)
39
+ if m:
40
+ name = m.group(1)
41
+ block_lines = []
42
+ depth = 1
43
+ i += 1
44
+ while i < len(lines) and depth > 0:
45
+ l = _strip_comments(lines[i])
46
+ if '{' in l:
47
+ depth += l.count('{')
48
+ if '}' in l:
49
+ depth -= l.count('}')
50
+ if depth > 0:
51
+ block_lines.append(l)
52
+ i += 1
53
+ servers = []
54
+ for bl in block_lines:
55
+ m_srv = re.match(r'server\s+([^;]+);', bl)
56
+ if m_srv:
57
+ servers.append(m_srv.group(1).strip())
58
+ upstreams[name] = servers
59
+ directives.append({'upstream': name, 'servers': servers})
60
+ continue
61
+ # Блоки (например, server, http, location)
62
+ m = re.match(r'(\S+)\s*(\S+)?\s*{', line)
63
+ if m:
64
+ block_name = m.group(1)
65
+ block_arg = m.group(2)
66
+ block_lines = []
67
+ depth = 1
68
+ i += 1
69
+ while i < len(lines) and depth > 0:
70
+ l = _strip_comments(lines[i])
71
+ if '{' in l:
72
+ depth += l.count('{')
73
+ if '}' in l:
74
+ depth -= l.count('}')
75
+ if depth > 0:
76
+ block_lines.append(l)
77
+ i += 1
78
+ sub_directives, sub_upstreams = _parse_block(block_lines, base_dir)
79
+ directives.append({'block': block_name, 'arg': block_arg, 'directives': sub_directives})
80
+ for k, v in sub_upstreams.items():
81
+ upstreams.setdefault(k, []).extend(v)
82
+ continue
83
+ # Обычная директива
84
+ m = re.match(r'(\S+)\s+([^;]+);', line)
85
+ if m:
86
+ directives.append({'directive': m.group(1), 'args': m.group(2)})
87
+ i += 1
88
+ return directives, upstreams
89
+
90
+ def parse_nginx_config(path: str) -> NginxConfigTree:
91
+ base_dir = os.path.dirname(os.path.abspath(path))
92
+ with open(path) as f:
93
+ lines = f.readlines()
94
+ directives, upstreams = _parse_block(lines, base_dir)
95
+ return NginxConfigTree(directives, upstreams)
@@ -0,0 +1,3 @@
1
+ [build-system]
2
+ requires = ["setuptools>=61.0", "wheel"]
3
+ build-backend = "setuptools.build_meta"
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,23 @@
1
+ from setuptools import setup, find_packages
2
+
3
+ setup(
4
+ name="nginx-lens",
5
+ version="0.1.0",
6
+ description="CLI-инструмент для анализа, визуализации и диагностики конфигураций Nginx",
7
+ author="Daniil Astrouski",
8
+ author_email="shelovesuastra@gmail.com",
9
+ packages=find_packages(),
10
+ include_package_data=True,
11
+ install_requires=[
12
+ "typer[all]>=0.9.0",
13
+ "rich>=13.0.0",
14
+ "requests>=2.25.0",
15
+ # "textual>=0.30.0", # опционально для интерактивного режима
16
+ ],
17
+ entry_points={
18
+ "console_scripts": [
19
+ "nginx-lens=commands.cli:app",
20
+ ],
21
+ },
22
+ python_requires=">=3.8",
23
+ )
@@ -0,0 +1,29 @@
1
+ from parser.nginx_parser import parse_nginx_config
2
+ from analyzer.conflicts import find_location_conflicts
3
+ import tempfile
4
+ import os
5
+
6
+ def test_location_conflicts():
7
+ conf = """
8
+ server {
9
+ location /api {
10
+ proxy_pass http://backend;
11
+ }
12
+ location /api/v1 {
13
+ proxy_pass http://backend_v1;
14
+ }
15
+ location /static {
16
+ root /var/www/static;
17
+ }
18
+ }
19
+ """
20
+ with tempfile.NamedTemporaryFile("w+", delete=False) as f:
21
+ f.write(conf)
22
+ f.flush()
23
+ tree = parse_nginx_config(f.name)
24
+ conflicts = find_location_conflicts(tree)
25
+ # /api и /api/v1 должны конфликтовать
26
+ assert any("/api" in (c["location1"], c["location2"]) and "/api/v1" in (c["location1"], c["location2"]) for c in conflicts)
27
+ # /static не должен конфликтовать
28
+ assert not any("/static" in (c["location1"], c["location2"]) and ("/api" in (c["location1"], c["location2"]) or "/api/v1" in (c["location1"], c["location2"])) for c in conflicts)
29
+ os.unlink(f.name)
@@ -0,0 +1,27 @@
1
+ from parser.nginx_parser import parse_nginx_config
2
+ from analyzer.duplicates import find_duplicate_directives
3
+ import tempfile
4
+ import os
5
+
6
+ def test_duplicate_directives():
7
+ conf = """
8
+ server {
9
+ listen 80;
10
+ listen 80;
11
+ server_name example.com;
12
+ server_name example.com;
13
+ location / {
14
+ proxy_pass http://backend;
15
+ proxy_pass http://backend;
16
+ }
17
+ }
18
+ """
19
+ with tempfile.NamedTemporaryFile("w+", delete=False) as f:
20
+ f.write(conf)
21
+ f.flush()
22
+ tree = parse_nginx_config(f.name)
23
+ dups = find_duplicate_directives(tree)
24
+ assert any(d['directive'] == 'listen' and d['count'] == 2 for d in dups)
25
+ assert any(d['directive'] == 'server_name' and d['count'] == 2 for d in dups)
26
+ assert any(d['directive'] == 'proxy_pass' and d['count'] == 2 for d in dups)
27
+ os.unlink(f.name)
@@ -0,0 +1,27 @@
1
+ from parser.nginx_parser import parse_nginx_config
2
+ from analyzer.empty_blocks import find_empty_blocks
3
+ import tempfile
4
+ import os
5
+
6
+ def test_empty_blocks():
7
+ conf = """
8
+ http {
9
+ server {
10
+ }
11
+ upstream backend {
12
+ }
13
+ location / {
14
+ proxy_pass http://backend;
15
+ }
16
+ }
17
+ """
18
+ with tempfile.NamedTemporaryFile("w+", delete=False) as f:
19
+ f.write(conf)
20
+ f.flush()
21
+ tree = parse_nginx_config(f.name)
22
+ empties = find_empty_blocks(tree)
23
+ # Должны быть пустые server и backend
24
+ assert any(e['block'] == 'server' for e in empties)
25
+ assert any(e['block'] == 'upstream' for e in empties)
26
+ os.unlink(f.name)
27
+
@@ -0,0 +1,19 @@
1
+ import typer
2
+ from typer.testing import CliRunner
3
+ from commands.cli import app
4
+ import pytest
5
+
6
+ runner = CliRunner()
7
+
8
+ def test_health(monkeypatch):
9
+ # Мокаем парсер и чекер
10
+ from commands import health as health_mod
11
+ monkeypatch.setattr(health_mod, "parse_nginx_config", lambda path: type("T", (), {"get_upstreams": lambda self: {"test_up": ["127.0.0.1:9999", "badhost:80"]}})())
12
+ monkeypatch.setattr(health_mod, "check_upstreams", lambda ups, timeout, retries: {"test_up": [{"address": "127.0.0.1:9999", "healthy": True}, {"address": "badhost:80", "healthy": False}]})
13
+ result = runner.invoke(app, ["health", "nginx.conf"])
14
+ assert "test_up" in result.output
15
+ assert "127.0.0.1:9999" in result.output
16
+ assert "Healthy" in result.output
17
+ assert "badhost:80" in result.output
18
+ assert "Unhealthy" in result.output
19
+ assert result.exit_code == 0
@@ -0,0 +1,62 @@
1
+ import tempfile
2
+ import os
3
+ from parser.nginx_parser import parse_nginx_config
4
+
5
+ def test_simple_upstream():
6
+ conf = """
7
+ upstream backend {
8
+ server 127.0.0.1:8080;
9
+ server 10.0.0.1:80;
10
+ }
11
+ """
12
+ with tempfile.NamedTemporaryFile("w+", delete=False) as f:
13
+ f.write(conf)
14
+ f.flush()
15
+ tree = parse_nginx_config(f.name)
16
+ ups = tree.get_upstreams()
17
+ assert "backend" in ups
18
+ assert set(ups["backend"]) == {"127.0.0.1:8080", "10.0.0.1:80"}
19
+ os.unlink(f.name)
20
+
21
+ def test_upstream_with_include():
22
+ conf_main = """
23
+ include sub.conf;
24
+ """
25
+ conf_sub = """
26
+ upstream api {
27
+ server api1:9000;
28
+ }
29
+ """
30
+ with tempfile.TemporaryDirectory() as d:
31
+ main_path = os.path.join(d, "nginx.conf")
32
+ sub_path = os.path.join(d, "sub.conf")
33
+ with open(main_path, "w") as f:
34
+ f.write(conf_main)
35
+ with open(sub_path, "w") as f:
36
+ f.write(conf_sub)
37
+ tree = parse_nginx_config(main_path)
38
+ ups = tree.get_upstreams()
39
+ assert "api" in ups
40
+ assert ups["api"] == ["api1:9000"]
41
+
42
+ def test_nested_blocks_and_comments():
43
+ conf = """
44
+ # http block
45
+ http {
46
+ upstream u1 { server 1.1.1.1:80; } # inline
47
+ server { # comment
48
+ listen 80;
49
+ location /api {
50
+ proxy_pass http://u1;
51
+ }
52
+ }
53
+ }
54
+ """
55
+ with tempfile.NamedTemporaryFile("w+", delete=False) as f:
56
+ f.write(conf)
57
+ f.flush()
58
+ tree = parse_nginx_config(f.name)
59
+ ups = tree.get_upstreams()
60
+ assert "u1" in ups
61
+ assert ups["u1"] == ["1.1.1.1:80"]
62
+ os.unlink(f.name)
File without changes
@@ -0,0 +1,12 @@
1
+ import socket
2
+ from typing import Dict, List
3
+
4
+ def check_upstreams(upstreams: Dict[str, List[str]], timeout=2.0, retries=1):
5
+ # TODO: Реализовать реальную проверку TCP/HTTP
6
+ # Сейчас возвращает все healthy
7
+ result = {}
8
+ for name, servers in upstreams.items():
9
+ result[name] = []
10
+ for srv in servers:
11
+ result[name].append({"address": srv, "healthy": True})
12
+ return result