nginx-lens 0.3.3__tar.gz → 0.3.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nginx-lens might be problematic. Click here for more details.

Files changed (49) hide show
  1. {nginx_lens-0.3.3/nginx_lens.egg-info → nginx_lens-0.3.4}/PKG-INFO +1 -1
  2. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/commands/health.py +5 -2
  3. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/commands/resolve.py +4 -1
  4. {nginx_lens-0.3.3 → nginx_lens-0.3.4/nginx_lens.egg-info}/PKG-INFO +1 -1
  5. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/setup.py +1 -1
  6. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/upstream_checker/checker.py +95 -20
  7. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/LICENSE +0 -0
  8. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/README.md +0 -0
  9. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/analyzer/__init__.py +0 -0
  10. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/analyzer/base.py +0 -0
  11. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/analyzer/conflicts.py +0 -0
  12. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/analyzer/dead_locations.py +0 -0
  13. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/analyzer/diff.py +0 -0
  14. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/analyzer/duplicates.py +0 -0
  15. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/analyzer/empty_blocks.py +0 -0
  16. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/analyzer/include.py +0 -0
  17. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/analyzer/rewrite.py +0 -0
  18. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/analyzer/route.py +0 -0
  19. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/analyzer/unused.py +0 -0
  20. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/analyzer/warnings.py +0 -0
  21. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/commands/__init__.py +0 -0
  22. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/commands/analyze.py +0 -0
  23. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/commands/cli.py +0 -0
  24. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/commands/diff.py +0 -0
  25. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/commands/graph.py +0 -0
  26. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/commands/include.py +0 -0
  27. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/commands/logs.py +0 -0
  28. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/commands/route.py +0 -0
  29. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/commands/syntax.py +0 -0
  30. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/commands/tree.py +0 -0
  31. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/exporter/__init__.py +0 -0
  32. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/exporter/graph.py +0 -0
  33. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/exporter/html.py +0 -0
  34. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/exporter/markdown.py +0 -0
  35. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/nginx_lens.egg-info/SOURCES.txt +0 -0
  36. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/nginx_lens.egg-info/dependency_links.txt +0 -0
  37. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/nginx_lens.egg-info/entry_points.txt +0 -0
  38. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/nginx_lens.egg-info/requires.txt +0 -0
  39. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/nginx_lens.egg-info/top_level.txt +0 -0
  40. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/parser/__init__.py +0 -0
  41. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/parser/nginx_parser.py +0 -0
  42. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/pyproject.toml +0 -0
  43. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/setup.cfg +0 -0
  44. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/tests/test_conflicts.py +0 -0
  45. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/tests/test_duplicates.py +0 -0
  46. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/tests/test_empty_blocks.py +0 -0
  47. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/tests/test_health.py +0 -0
  48. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/tests/test_parser.py +0 -0
  49. {nginx_lens-0.3.3 → nginx_lens-0.3.4}/upstream_checker/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nginx-lens
3
- Version: 0.3.3
3
+ Version: 0.3.4
4
4
  Summary: CLI-инструмент для анализа, визуализации и диагностики конфигураций Nginx
5
5
  Author: Daniil Astrouski
6
6
  Author-email: shelovesuastra@gmail.com
@@ -14,14 +14,17 @@ def health(
14
14
  retries: int = typer.Option(1, help="Количество попыток"),
15
15
  mode: str = typer.Option("tcp", help="Режим проверки: tcp или http", case_sensitive=False),
16
16
  resolve: bool = typer.Option(False, "--resolve", "-r", help="Показать резолвленные IP-адреса"),
17
+ max_workers: int = typer.Option(10, "--max-workers", "-w", help="Максимальное количество потоков для параллельной обработки"),
17
18
  ):
18
19
  """
19
20
  Проверяет доступность upstream-серверов, определённых в nginx.conf. Выводит таблицу.
21
+ Использует параллельную обработку для ускорения проверки множества upstream серверов.
20
22
 
21
23
  Пример:
22
24
  nginx-lens health /etc/nginx/nginx.conf
23
25
  nginx-lens health /etc/nginx/nginx.conf --timeout 5 --retries 3 --mode http
24
26
  nginx-lens health /etc/nginx/nginx.conf --resolve
27
+ nginx-lens health /etc/nginx/nginx.conf --max-workers 20
25
28
  """
26
29
  exit_code = 0
27
30
 
@@ -35,12 +38,12 @@ def health(
35
38
  sys.exit(1)
36
39
 
37
40
  upstreams = tree.get_upstreams()
38
- results = check_upstreams(upstreams, timeout=timeout, retries=retries, mode=mode.lower())
41
+ results = check_upstreams(upstreams, timeout=timeout, retries=retries, mode=mode.lower(), max_workers=max_workers)
39
42
 
40
43
  # Если нужно показать резолвленные IP-адреса
41
44
  resolved_info = {}
42
45
  if resolve:
43
- resolved_info = resolve_upstreams(upstreams)
46
+ resolved_info = resolve_upstreams(upstreams, max_workers=max_workers)
44
47
 
45
48
  table = Table(show_header=True, header_style="bold blue")
46
49
  table.add_column("Address")
@@ -10,12 +10,15 @@ console = Console()
10
10
 
11
11
  def resolve(
12
12
  config_path: str = typer.Argument(..., help="Путь к nginx.conf"),
13
+ max_workers: int = typer.Option(10, "--max-workers", "-w", help="Максимальное количество потоков для параллельной обработки"),
13
14
  ):
14
15
  """
15
16
  Резолвит DNS имена upstream-серверов в IP-адреса.
17
+ Использует параллельную обработку для ускорения резолвинга множества upstream серверов.
16
18
 
17
19
  Пример:
18
20
  nginx-lens resolve /etc/nginx/nginx.conf
21
+ nginx-lens resolve /etc/nginx/nginx.conf --max-workers 20
19
22
  """
20
23
  exit_code = 0
21
24
 
@@ -33,7 +36,7 @@ def resolve(
33
36
  console.print("[yellow]Не найдено ни одного upstream в конфигурации.[/yellow]")
34
37
  sys.exit(0) # Нет upstream - это не ошибка, просто нет чего проверять
35
38
 
36
- results = resolve_upstreams(upstreams)
39
+ results = resolve_upstreams(upstreams, max_workers=max_workers)
37
40
 
38
41
  table = Table(show_header=True, header_style="bold blue")
39
42
  table.add_column("Upstream Name")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nginx-lens
3
- Version: 0.3.3
3
+ Version: 0.3.4
4
4
  Summary: CLI-инструмент для анализа, визуализации и диагностики конфигураций Nginx
5
5
  Author: Daniil Astrouski
6
6
  Author-email: shelovesuastra@gmail.com
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name="nginx-lens",
5
- version="0.3.3",
5
+ version="0.3.4",
6
6
  description="CLI-инструмент для анализа, визуализации и диагностики конфигураций Nginx",
7
7
  author="Daniil Astrouski",
8
8
  author_email="shelovesuastra@gmail.com",
@@ -3,7 +3,8 @@
3
3
  import socket
4
4
  import time
5
5
  import http.client
6
- from typing import Dict, List
6
+ from typing import Dict, List, Tuple
7
+ from concurrent.futures import ThreadPoolExecutor, as_completed
7
8
  try:
8
9
  import dns.resolver
9
10
  import dns.exception
@@ -185,11 +186,16 @@ def _resolve_with_socket(host: str, port: str) -> List[str]:
185
186
 
186
187
 
187
188
  def resolve_upstreams(
188
- upstreams: Dict[str, List[str]]
189
+ upstreams: Dict[str, List[str]],
190
+ max_workers: int = 10
189
191
  ) -> Dict[str, List[dict]]:
190
192
  """
191
193
  Резолвит DNS имена upstream-серверов в IP-адреса.
192
194
 
195
+ Args:
196
+ upstreams: Словарь upstream серверов
197
+ max_workers: Максимальное количество потоков для параллельной обработки
198
+
193
199
  Возвращает:
194
200
  {
195
201
  "backend": [
@@ -200,27 +206,71 @@ def resolve_upstreams(
200
206
  ]
201
207
  }
202
208
  """
203
- results = {}
209
+ # Собираем все задачи для параллельной обработки
210
+ tasks = []
211
+ task_to_key = {}
212
+
204
213
  for name, servers in upstreams.items():
205
- results[name] = []
206
- for srv in servers:
207
- resolved = resolve_address(srv)
208
- results[name].append({
209
- "address": srv,
210
- "resolved": resolved
211
- })
214
+ for idx, srv in enumerate(servers):
215
+ key = (name, idx, srv)
216
+ tasks.append((key, srv))
217
+ task_to_key[key] = (name, idx)
218
+
219
+ results = {}
220
+ for name in upstreams.keys():
221
+ results[name] = [None] * len(upstreams[name])
222
+
223
+ # Если нет задач, возвращаем пустой результат
224
+ if not tasks:
225
+ return results
226
+
227
+ # Параллельная обработка резолвинга
228
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
229
+ future_to_key = {executor.submit(resolve_address, srv): key for key, srv in tasks}
230
+
231
+ for future in as_completed(future_to_key):
232
+ key = future_to_key[future]
233
+ name, idx = task_to_key[key]
234
+ try:
235
+ resolved = future.result()
236
+ results[name][idx] = {
237
+ "address": key[2],
238
+ "resolved": resolved
239
+ }
240
+ except Exception:
241
+ results[name][idx] = {
242
+ "address": key[2],
243
+ "resolved": []
244
+ }
245
+
212
246
  return results
213
247
 
214
248
 
249
+ def _check_single_upstream(srv: str, timeout: float, retries: int, mode: str) -> Tuple[str, bool]:
250
+ """Вспомогательная функция для проверки одного upstream сервера."""
251
+ if mode.lower() == "http":
252
+ healthy = check_http(srv, timeout, retries)
253
+ else:
254
+ healthy = check_tcp(srv, timeout, retries)
255
+ return (srv, healthy)
256
+
257
+
215
258
  def check_upstreams(
216
259
  upstreams: Dict[str, List[str]],
217
260
  timeout: float = 2.0,
218
261
  retries: int = 1,
219
- mode: str = "tcp"
262
+ mode: str = "tcp",
263
+ max_workers: int = 10
220
264
  ) -> Dict[str, List[dict]]:
221
265
  """
222
266
  Проверяет доступность upstream-серверов.
223
- mode: "tcp" (по умолчанию) или "http"
267
+
268
+ Args:
269
+ upstreams: Словарь upstream серверов
270
+ timeout: Таймаут проверки (сек)
271
+ retries: Количество попыток
272
+ mode: "tcp" (по умолчанию) или "http"
273
+ max_workers: Максимальное количество потоков для параллельной обработки
224
274
 
225
275
  Возвращает:
226
276
  {
@@ -230,13 +280,38 @@ def check_upstreams(
230
280
  ]
231
281
  }
232
282
  """
233
- results = {}
283
+ # Собираем все задачи для параллельной обработки
284
+ tasks = []
285
+ task_to_key = {}
286
+
234
287
  for name, servers in upstreams.items():
235
- results[name] = []
236
- for srv in servers:
237
- if mode.lower() == "http":
238
- healthy = check_http(srv, timeout, retries)
239
- else:
240
- healthy = check_tcp(srv, timeout, retries)
241
- results[name].append({"address": srv, "healthy": healthy})
288
+ for idx, srv in enumerate(servers):
289
+ key = (name, idx, srv)
290
+ tasks.append((key, srv))
291
+ task_to_key[key] = (name, idx)
292
+
293
+ results = {}
294
+ for name in upstreams.keys():
295
+ results[name] = [None] * len(upstreams[name])
296
+
297
+ # Если нет задач, возвращаем пустой результат
298
+ if not tasks:
299
+ return results
300
+
301
+ # Параллельная обработка проверок
302
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
303
+ future_to_key = {
304
+ executor.submit(_check_single_upstream, srv, timeout, retries, mode): key
305
+ for key, srv in tasks
306
+ }
307
+
308
+ for future in as_completed(future_to_key):
309
+ key = future_to_key[future]
310
+ name, idx = task_to_key[key]
311
+ try:
312
+ srv, healthy = future.result()
313
+ results[name][idx] = {"address": srv, "healthy": healthy}
314
+ except Exception:
315
+ results[name][idx] = {"address": key[2], "healthy": False}
316
+
242
317
  return results
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes