nginx-lens 0.3.4__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
exporter/csv.py ADDED
@@ -0,0 +1,87 @@
1
+ """
2
+ Модуль для экспорта результатов в CSV формат.
3
+ """
4
+ import csv
5
+ import sys
6
+ from typing import List, Dict, Any
7
+ from io import StringIO
8
+
9
+
10
+ def export_logs_to_csv(
11
+ status_counter,
12
+ path_counter,
13
+ ip_counter,
14
+ user_agent_counter,
15
+ errors: Dict[str, List[str]],
16
+ response_times: List[Dict[str, Any]] = None,
17
+ anomalies: List[Dict[str, Any]] = None
18
+ ) -> str:
19
+ """
20
+ Экспортирует результаты анализа логов в CSV формат.
21
+
22
+ Args:
23
+ status_counter: Счетчик статусов
24
+ path_counter: Счетчик путей
25
+ ip_counter: Счетчик IP
26
+ user_agent_counter: Счетчик User-Agent
27
+ errors: Словарь ошибок по статусам
28
+ response_times: Список данных о времени ответа
29
+ anomalies: Список аномалий
30
+
31
+ Returns:
32
+ CSV строка
33
+ """
34
+ output = StringIO()
35
+ writer = csv.writer(output)
36
+
37
+ # Топ статусов
38
+ writer.writerow(["Category", "Type", "Value", "Count"])
39
+ writer.writerow(["Status Codes", "", "", ""])
40
+ for status, count in status_counter.most_common():
41
+ writer.writerow(["", "Status", status, count])
42
+
43
+ writer.writerow([])
44
+ writer.writerow(["Paths", "", "", ""])
45
+ for path, count in path_counter.most_common():
46
+ writer.writerow(["", "Path", path, count])
47
+
48
+ writer.writerow([])
49
+ writer.writerow(["IPs", "", "", ""])
50
+ for ip, count in ip_counter.most_common():
51
+ writer.writerow(["", "IP", ip, count])
52
+
53
+ if user_agent_counter:
54
+ writer.writerow([])
55
+ writer.writerow(["User-Agents", "", "", ""])
56
+ for ua, count in user_agent_counter.most_common():
57
+ writer.writerow(["", "User-Agent", ua, count])
58
+
59
+ # Ошибки
60
+ if errors:
61
+ writer.writerow([])
62
+ writer.writerow(["Errors", "", "", ""])
63
+ for status, paths in errors.items():
64
+ writer.writerow(["", f"Error {status}", f"{len(paths)} occurrences", ""])
65
+
66
+ # Response times
67
+ if response_times:
68
+ writer.writerow([])
69
+ writer.writerow(["Response Times", "", "", ""])
70
+ writer.writerow(["", "Metric", "Value", ""])
71
+ for metric, value in response_times.items():
72
+ writer.writerow(["", metric, str(value), ""])
73
+
74
+ # Аномалии
75
+ if anomalies:
76
+ writer.writerow([])
77
+ writer.writerow(["Anomalies", "Type", "Description", "Severity"])
78
+ for anomaly in anomalies:
79
+ writer.writerow([
80
+ "",
81
+ anomaly.get("type", ""),
82
+ anomaly.get("description", ""),
83
+ anomaly.get("severity", "")
84
+ ])
85
+
86
+ return output.getvalue()
87
+
exporter/json_yaml.py ADDED
@@ -0,0 +1,361 @@
1
+ """
2
+ Модуль для экспорта результатов команд nginx-lens в JSON и YAML форматы.
3
+ """
4
+ import json
5
+ import sys
6
+ from typing import Any, Dict, List, Optional
7
+ from datetime import datetime
8
+
9
+ try:
10
+ import yaml
11
+ YAML_AVAILABLE = True
12
+ except ImportError:
13
+ YAML_AVAILABLE = False
14
+
15
+
16
+ def export_json(data: Any, pretty: bool = True) -> str:
17
+ """
18
+ Экспортирует данные в JSON формат.
19
+
20
+ Args:
21
+ data: Данные для экспорта
22
+ pretty: Форматировать с отступами
23
+
24
+ Returns:
25
+ JSON строка
26
+ """
27
+ if pretty:
28
+ return json.dumps(data, ensure_ascii=False, indent=2, default=str)
29
+ else:
30
+ return json.dumps(data, ensure_ascii=False, default=str)
31
+
32
+
33
+ def export_yaml(data: Any) -> str:
34
+ """
35
+ Экспортирует данные в YAML формат.
36
+
37
+ Args:
38
+ data: Данные для экспорта
39
+
40
+ Returns:
41
+ YAML строка
42
+
43
+ Raises:
44
+ ImportError: Если PyYAML не установлен
45
+ """
46
+ if not YAML_AVAILABLE:
47
+ raise ImportError("PyYAML не установлен. Установите его: pip install pyyaml")
48
+
49
+ return yaml.dump(data, allow_unicode=True, default_flow_style=False, sort_keys=False)
50
+
51
+
52
+ def print_export(data: Any, format_type: str, file=None):
53
+ """
54
+ Выводит данные в указанном формате.
55
+
56
+ Args:
57
+ data: Данные для экспорта
58
+ format_type: 'json' или 'yaml'
59
+ file: Файл для вывода (по умолчанию stdout)
60
+ """
61
+ if file is None:
62
+ file = sys.stdout
63
+
64
+ if format_type == 'json':
65
+ output = export_json(data)
66
+ elif format_type == 'yaml':
67
+ output = export_yaml(data)
68
+ else:
69
+ raise ValueError(f"Неподдерживаемый формат: {format_type}")
70
+
71
+ print(output, file=file)
72
+
73
+
74
+ def format_health_results(results: Dict[str, List[Dict]], resolved_info: Optional[Dict] = None) -> Dict[str, Any]:
75
+ """
76
+ Форматирует результаты команды health для экспорта.
77
+
78
+ Args:
79
+ results: Результаты check_upstreams
80
+ resolved_info: Результаты resolve_upstreams (опционально)
81
+
82
+ Returns:
83
+ Словарь с данными для экспорта
84
+ """
85
+ data = {
86
+ "timestamp": datetime.now().isoformat(),
87
+ "upstreams": []
88
+ }
89
+
90
+ for name, servers in results.items():
91
+ upstream_data = {
92
+ "name": name,
93
+ "servers": []
94
+ }
95
+
96
+ for srv in servers:
97
+ server_data = {
98
+ "address": srv["address"],
99
+ "healthy": srv["healthy"],
100
+ "status": "healthy" if srv["healthy"] else "unhealthy"
101
+ }
102
+
103
+ if resolved_info and name in resolved_info:
104
+ for resolved_srv in resolved_info[name]:
105
+ if resolved_srv["address"] == srv["address"]:
106
+ server_data["resolved_ips"] = resolved_srv["resolved"]
107
+ # Проверяем наличие invalid resolve
108
+ server_data["has_invalid_resolve"] = any("invalid resolve" in r for r in resolved_srv["resolved"])
109
+ break
110
+
111
+ upstream_data["servers"].append(server_data)
112
+
113
+ data["upstreams"].append(upstream_data)
114
+
115
+ # Подсчитываем статистику
116
+ total_servers = sum(len(servers) for servers in results.values())
117
+ healthy_count = sum(1 for servers in results.values() for srv in servers if srv["healthy"])
118
+ unhealthy_count = total_servers - healthy_count
119
+
120
+ data["summary"] = {
121
+ "total_upstreams": len(results),
122
+ "total_servers": total_servers,
123
+ "healthy": healthy_count,
124
+ "unhealthy": unhealthy_count
125
+ }
126
+
127
+ return data
128
+
129
+
130
+ def format_resolve_results(results: Dict[str, List[Dict]]) -> Dict[str, Any]:
131
+ """
132
+ Форматирует результаты команды resolve для экспорта.
133
+
134
+ Args:
135
+ results: Результаты resolve_upstreams
136
+
137
+ Returns:
138
+ Словарь с данными для экспорта
139
+ """
140
+ data = {
141
+ "timestamp": datetime.now().isoformat(),
142
+ "upstreams": []
143
+ }
144
+
145
+ failed_count = 0
146
+ invalid_count = 0
147
+
148
+ for name, servers in results.items():
149
+ upstream_data = {
150
+ "name": name,
151
+ "servers": []
152
+ }
153
+
154
+ for srv in servers:
155
+ server_data = {
156
+ "address": srv["address"],
157
+ "resolved_ips": srv["resolved"]
158
+ }
159
+
160
+ if not srv["resolved"]:
161
+ server_data["status"] = "failed"
162
+ failed_count += 1
163
+ elif any("invalid resolve" in r for r in srv["resolved"]):
164
+ server_data["status"] = "invalid"
165
+ invalid_count += 1
166
+ else:
167
+ server_data["status"] = "success"
168
+
169
+ upstream_data["servers"].append(server_data)
170
+
171
+ data["upstreams"].append(upstream_data)
172
+
173
+ data["summary"] = {
174
+ "total_upstreams": len(results),
175
+ "total_servers": sum(len(servers) for servers in results.values()),
176
+ "successful": sum(len(servers) for servers in results.values()) - failed_count - invalid_count,
177
+ "failed": failed_count,
178
+ "invalid": invalid_count
179
+ }
180
+
181
+ return data
182
+
183
+
184
+ def format_analyze_results(
185
+ conflicts: List[Dict],
186
+ dups: List[Dict],
187
+ empties: List[Dict],
188
+ warnings: List[Dict],
189
+ unused_vars: List[Dict],
190
+ listen_conflicts: List[Dict],
191
+ rewrite_issues: List[Dict],
192
+ dead_locations: List[Dict],
193
+ issue_meta: Dict[str, tuple]
194
+ ) -> Dict[str, Any]:
195
+ """
196
+ Форматирует результаты команды analyze для экспорта.
197
+
198
+ Args:
199
+ conflicts: Конфликты location
200
+ dups: Дублирующиеся директивы
201
+ empties: Пустые блоки
202
+ warnings: Предупреждения
203
+ unused_vars: Неиспользуемые переменные
204
+ listen_conflicts: Конфликты listen/server_name
205
+ rewrite_issues: Проблемы с rewrite
206
+ dead_locations: Мертвые location
207
+ issue_meta: Метаданные о типах проблем
208
+
209
+ Returns:
210
+ Словарь с данными для экспорта
211
+ """
212
+ data = {
213
+ "timestamp": datetime.now().isoformat(),
214
+ "issues": []
215
+ }
216
+
217
+ # Добавляем все проблемы в единый список
218
+ for c in conflicts:
219
+ advice, severity = issue_meta.get('location_conflict', ("", "medium"))
220
+ data["issues"].append({
221
+ "type": "location_conflict",
222
+ "severity": severity,
223
+ "message": c.get('value', ''),
224
+ "advice": advice
225
+ })
226
+
227
+ for d in dups:
228
+ advice, severity = issue_meta.get('duplicate_directive', ("", "medium"))
229
+ data["issues"].append({
230
+ "type": "duplicate_directive",
231
+ "severity": severity,
232
+ "message": d.get('value', ''),
233
+ "advice": advice
234
+ })
235
+
236
+ for e in empties:
237
+ advice, severity = issue_meta.get('empty_block', ("", "low"))
238
+ data["issues"].append({
239
+ "type": "empty_block",
240
+ "severity": severity,
241
+ "message": f"{e.get('block', '')} блок пуст",
242
+ "advice": advice
243
+ })
244
+
245
+ for w in warnings:
246
+ issue_type = w.get('type', '')
247
+ advice, severity = issue_meta.get(issue_type, ("", "medium"))
248
+ data["issues"].append({
249
+ "type": issue_type,
250
+ "severity": severity,
251
+ "message": w.get('value', ''),
252
+ "advice": advice
253
+ })
254
+
255
+ for v in unused_vars:
256
+ advice, severity = issue_meta.get('unused_variable', ("", "low"))
257
+ data["issues"].append({
258
+ "type": "unused_variable",
259
+ "severity": severity,
260
+ "message": v.get('name', ''),
261
+ "advice": advice
262
+ })
263
+
264
+ for c in listen_conflicts:
265
+ advice, severity = issue_meta.get('listen_servername_conflict', ("", "high"))
266
+ data["issues"].append({
267
+ "type": "listen_servername_conflict",
268
+ "severity": severity,
269
+ "message": f"server1: {c.get('server1', {}).get('arg','')} server2: {c.get('server2', {}).get('arg','')}",
270
+ "advice": advice
271
+ })
272
+
273
+ for r in rewrite_issues:
274
+ advice, severity = issue_meta.get(r.get('type', ''), ("", "medium"))
275
+ data["issues"].append({
276
+ "type": r.get('type', ''),
277
+ "severity": severity,
278
+ "message": r.get('value', ''),
279
+ "advice": advice
280
+ })
281
+
282
+ for l in dead_locations:
283
+ advice, severity = issue_meta.get('dead_location', ("", "low"))
284
+ data["issues"].append({
285
+ "type": "dead_location",
286
+ "severity": severity,
287
+ "message": f"server: {l.get('server', {}).get('arg','')} location: {l.get('location', {}).get('arg','')}",
288
+ "advice": advice
289
+ })
290
+
291
+ # Подсчитываем статистику
292
+ severity_counts = {"high": 0, "medium": 0, "low": 0}
293
+ for issue in data["issues"]:
294
+ severity = issue.get("severity", "medium")
295
+ if severity in severity_counts:
296
+ severity_counts[severity] += 1
297
+
298
+ data["summary"] = {
299
+ "total_issues": len(data["issues"]),
300
+ "by_severity": severity_counts
301
+ }
302
+
303
+ return data
304
+
305
+
306
+ def format_logs_results(
307
+ status_counter,
308
+ path_counter,
309
+ ip_counter,
310
+ user_agent_counter,
311
+ errors: Dict[str, List[str]],
312
+ top: int,
313
+ response_times: Optional[Dict[str, float]] = None,
314
+ anomalies: Optional[List[Dict[str, Any]]] = None
315
+ ) -> Dict[str, Any]:
316
+ """
317
+ Форматирует результаты команды logs для экспорта.
318
+
319
+ Args:
320
+ status_counter: Счетчик статусов
321
+ path_counter: Счетчик путей
322
+ ip_counter: Счетчик IP
323
+ user_agent_counter: Счетчик User-Agent
324
+ errors: Словарь ошибок по статусам
325
+ top: Количество топ-значений
326
+
327
+ Returns:
328
+ Словарь с данными для экспорта
329
+ """
330
+ data = {
331
+ "timestamp": datetime.now().isoformat(),
332
+ "top_statuses": [{"status": status, "count": count} for status, count in status_counter.most_common(top)],
333
+ "top_paths": [{"path": path, "count": count} for path, count in path_counter.most_common(top)],
334
+ "top_ips": [{"ip": ip, "count": count} for ip, count in ip_counter.most_common(top)],
335
+ "errors": {}
336
+ }
337
+
338
+ if user_agent_counter:
339
+ data["top_user_agents"] = [{"user_agent": ua, "count": count} for ua, count in user_agent_counter.most_common(top)]
340
+
341
+ for status, paths in errors.items():
342
+ data["errors"][status] = {
343
+ "count": len(paths),
344
+ "unique_paths": list(set(paths))[:top]
345
+ }
346
+
347
+ data["summary"] = {
348
+ "total_requests": sum(status_counter.values()),
349
+ "unique_paths": len(path_counter),
350
+ "unique_ips": len(ip_counter),
351
+ "error_requests": sum(len(paths) for paths in errors.values())
352
+ }
353
+
354
+ if response_times:
355
+ data["response_times"] = response_times
356
+
357
+ if anomalies:
358
+ data["anomalies"] = anomalies
359
+
360
+ return data
361
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nginx-lens
3
- Version: 0.3.4
3
+ Version: 0.5.0
4
4
  Summary: CLI-инструмент для анализа, визуализации и диагностики конфигураций Nginx
5
5
  Author: Daniil Astrouski
6
6
  Author-email: shelovesuastra@gmail.com
@@ -10,9 +10,14 @@ Requires-Dist: typer[all]>=0.9.0
10
10
  Requires-Dist: rich>=13.0.0
11
11
  Requires-Dist: requests>=2.25.0
12
12
  Requires-Dist: dnspython>=2.0.0
13
+ Requires-Dist: pyyaml>=6.0
14
+ Provides-Extra: dev
15
+ Requires-Dist: pytest>=7.0.0; extra == "dev"
16
+ Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
13
17
  Dynamic: author
14
18
  Dynamic: author-email
15
19
  Dynamic: license-file
20
+ Dynamic: provides-extra
16
21
  Dynamic: requires-dist
17
22
  Dynamic: requires-python
18
23
  Dynamic: summary
@@ -0,0 +1,48 @@
1
+ analyzer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ analyzer/base.py,sha256=oGKg78BfMVmuzYafc08oq9p31-jEgYolGjLkUcIdkN8,607
3
+ analyzer/conflicts.py,sha256=NSNZc8e2x51K41dflSUvuwlDq-rzBXU5ITi6WfxFbfU,2796
4
+ analyzer/dead_locations.py,sha256=uvMu5qBGTVi0Nn960x3WpRvTljGbQuVFivU4nfe36oY,1435
5
+ analyzer/diff.py,sha256=idvXnoLzBVUYgKi_s3uDu0v2GNMV3B8aDqTROXcdQdo,1749
6
+ analyzer/duplicates.py,sha256=jpy_6k-BzWxaXFt2Wb3rlulIXUEzbFe9xYRm7rWR50U,1215
7
+ analyzer/empty_blocks.py,sha256=7Zu4-5I5PS3bjhH0Ppq1CvM7rMTeRIc4fHx5n5vkMIw,517
8
+ analyzer/include.py,sha256=FhKR4VsogLknykjLD2N8jX9OtwxZcWik5oPpvp-_luE,2465
9
+ analyzer/rewrite.py,sha256=-jSLLG1jqmGU-dXWvU6NHCW6muB8Lfro6fXX1tDCHCQ,1834
10
+ analyzer/route.py,sha256=71dkmQaTrHqDTf4Up5gAsrgmgktNpXqWmxr7-0RAVtg,2370
11
+ analyzer/unused.py,sha256=Ixzv0bPsw9IafblVwLiAOgugdg2dGu1MJDtuoqzPZiY,1066
12
+ analyzer/warnings.py,sha256=zC36QMvegA2eQPvZ-P1eysrX_kXHx5A1MUKHKKNvG5c,5784
13
+ commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ commands/analyze.py,sha256=UUZpPOpEnPglo94AFklX-cMt6HoHPCafmqfW76CIZVg,8667
15
+ commands/cli.py,sha256=_vpXlQr0Rf5bUN2t_fV2x40eAHU6nyOjAVRr8tC0eNo,1081
16
+ commands/completion.py,sha256=I7g70tM9JDc8PddIOS-tAGiqc3HkmQhFuAzMCMe_Jck,6223
17
+ commands/diff.py,sha256=mf6xkf_8IKa3R-AiTsWmJDUrxqjGT5gaSAX0u5D0jjY,2097
18
+ commands/graph.py,sha256=lBh2wCPrhsywxcEbz5UABtNdEepTMLmiIzWJt_Uu1mE,5968
19
+ commands/health.py,sha256=Q2qGB02dcMFbp247qg5uxkJFWw84rmCEzZozXcy2wtQ,7606
20
+ commands/include.py,sha256=hsheLfoQ3eUx3irAibhC2ndq3ko0VrLda-WGL9JgIlw,2245
21
+ commands/logs.py,sha256=scs5_AD4w4z1LXyYZvB4IBLd3T-LSV_45hD8KtHrdhc,15763
22
+ commands/metrics.py,sha256=MawshQaCOkdeLxlopuOzDAE-Gyjlbs_u1bIP4EsB10Q,18955
23
+ commands/resolve.py,sha256=hxGiavoESKj_RPfezelh7vsxGSccUFMQI0K4-izdrRk,5868
24
+ commands/route.py,sha256=4bW4sCY2cVORk-hblnE-BJH3oRL6T9cZuigL-2KCRT4,3174
25
+ commands/syntax.py,sha256=9sjJXheQ9PRZoFm0sO73pEomzbkeYvbBV265XLU6Krk,3423
26
+ commands/tree.py,sha256=cxq0vL6V3ah5X4ozPOmWgIH4NJbr2J48TTLNxsjzOL8,2182
27
+ commands/validate.py,sha256=LB_a4RHsaIZxXrm9jUZ1McgLefcP1aZ_bJ_i6z5huNM,22682
28
+ config/__init__.py,sha256=vsP76D53exH8CkpWttJLTgXlW52LFgP_zBnxPKkWAtQ,107
29
+ config/config_loader.py,sha256=fqJEnHojkEMEpAez9ymAHUR1TbD0D_RSFGZsdI7zObQ,6927
30
+ exporter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
+ exporter/csv.py,sha256=_hTIs9CihxP9ewB9FcoN-ERmzUKEQs8hf7U8_heHTO0,2815
32
+ exporter/graph.py,sha256=WYUrqUgCaK6KihgxAcRHaQn4oMo6b7ybC8yb_36ZIsA,3995
33
+ exporter/html.py,sha256=uquEM-WvBt2aV9GshgaI3UVhYd8sD0QQ-OmuNtvYUdU,798
34
+ exporter/json_yaml.py,sha256=XqLOBtrh5Xd7RMeofODU8jaTkaeBEpHMNjz4UF39WrQ,11794
35
+ exporter/markdown.py,sha256=_0mXQIhurGEZ0dO-eq9DbsuKNrgEDIblgtL3DAgYNo8,724
36
+ nginx_lens-0.5.0.dist-info/licenses/LICENSE,sha256=g8QXKdvZZC56rU8E12vIeYF6R4jeTWOsblOnYAda3K4,1073
37
+ parser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
+ parser/nginx_parser.py,sha256=Sa9FtGAkvTqNzoehBvgLUWPJHLLIZYWH9ugSHW50X8s,3699
39
+ upstream_checker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
+ upstream_checker/checker.py,sha256=b3E7P9f_7JRWqXa_mSei6LKchD9yQoLNuwindnvfWYI,13258
41
+ upstream_checker/dns_cache.py,sha256=RiGgDFKaIvVQz8Rrm8lBorct8WzbyXHLKZy6W7WhYg4,6903
42
+ utils/__init__.py,sha256=tl98tkuTjz9Q5TKD8cxAxBh6n1Yk65TgKCdIbPFsnz4,43
43
+ utils/progress.py,sha256=Aqb1EW7yGJUSSzw5hTJYiKQ3XjU7ABEqAByfQo9t9P0,4797
44
+ nginx_lens-0.5.0.dist-info/METADATA,sha256=oKRRMSyqUAZEhEVSiU5Y_uPqzC9a7kQlOtxxBljzRM4,717
45
+ nginx_lens-0.5.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
46
+ nginx_lens-0.5.0.dist-info/entry_points.txt,sha256=qEcecjSyLqcJjbIVlNlTpqAhPqDyaujUV5ZcBTAr3po,48
47
+ nginx_lens-0.5.0.dist-info/top_level.txt,sha256=W4rp9juDAaGS642PMW3zfoHyFMj0yTtXKAwlVz87bao,64
48
+ nginx_lens-0.5.0.dist-info/RECORD,,
@@ -1,5 +1,7 @@
1
1
  analyzer
2
2
  commands
3
+ config
3
4
  exporter
4
5
  parser
5
6
  upstream_checker
7
+ utils
@@ -3,8 +3,9 @@
3
3
  import socket
4
4
  import time
5
5
  import http.client
6
- from typing import Dict, List, Tuple
6
+ from typing import Dict, List, Tuple, Optional
7
7
  from concurrent.futures import ThreadPoolExecutor, as_completed
8
+ from utils.progress import ProgressManager
8
9
  try:
9
10
  import dns.resolver
10
11
  import dns.exception
@@ -12,6 +13,8 @@ try:
12
13
  except ImportError:
13
14
  DNS_AVAILABLE = False
14
15
 
16
+ from upstream_checker.dns_cache import get_cache, is_cache_enabled, disable_cache, enable_cache
17
+
15
18
 
16
19
  def check_tcp(address: str, timeout: float, retries: int) -> bool:
17
20
  """
@@ -56,12 +59,15 @@ def check_http(address: str, timeout: float, retries: int) -> bool:
56
59
  return False
57
60
 
58
61
 
59
- def resolve_address(address: str) -> List[str]:
62
+ def resolve_address(address: str, use_cache: bool = True, cache_ttl: int = 300, cache_dir: Optional[str] = None) -> List[str]:
60
63
  """
61
64
  Резолвит адрес upstream сервера в IP-адреса с информацией о CNAME.
62
65
 
63
66
  Args:
64
67
  address: Адрес в формате "host:port" или "host:port параметры"
68
+ use_cache: Использовать ли кэш (по умолчанию True)
69
+ cache_ttl: Время жизни кэша в секундах (по умолчанию 300)
70
+ cache_dir: Директория для кэша (опционально)
65
71
 
66
72
  Returns:
67
73
  Список строк в формате:
@@ -81,6 +87,7 @@ def resolve_address(address: str) -> List[str]:
81
87
  return []
82
88
  host, port = parts
83
89
 
90
+ # Проверка на IP адрес (не кэшируем IP адреса)
84
91
  try:
85
92
  socket.inet_aton(host)
86
93
  return [host_port]
@@ -95,10 +102,25 @@ def resolve_address(address: str) -> List[str]:
95
102
  except (socket.error, OSError):
96
103
  pass
97
104
 
105
+ # Проверяем кэш перед резолвингом
106
+ if use_cache and is_cache_enabled():
107
+ cache = get_cache(ttl=cache_ttl, cache_dir=cache_dir)
108
+ cached_result = cache.get(host, port)
109
+ if cached_result is not None:
110
+ return cached_result
111
+
112
+ # Выполняем резолвинг
98
113
  if DNS_AVAILABLE:
99
- return _resolve_with_dns(host, port)
114
+ result = _resolve_with_dns(host, port)
100
115
  else:
101
- return _resolve_with_socket(host, port)
116
+ result = _resolve_with_socket(host, port)
117
+
118
+ # Сохраняем в кэш
119
+ if use_cache and is_cache_enabled():
120
+ cache = get_cache(ttl=cache_ttl, cache_dir=cache_dir)
121
+ cache.set(host, port, result)
122
+
123
+ return result
102
124
  except (ValueError, IndexError, AttributeError):
103
125
  return []
104
126
 
@@ -187,7 +209,11 @@ def _resolve_with_socket(host: str, port: str) -> List[str]:
187
209
 
188
210
  def resolve_upstreams(
189
211
  upstreams: Dict[str, List[str]],
190
- max_workers: int = 10
212
+ max_workers: int = 10,
213
+ use_cache: bool = True,
214
+ cache_ttl: int = 300,
215
+ cache_dir: Optional[str] = None,
216
+ progress_manager: Optional[ProgressManager] = None
191
217
  ) -> Dict[str, List[dict]]:
192
218
  """
193
219
  Резолвит DNS имена upstream-серверов в IP-адреса.
@@ -195,6 +221,9 @@ def resolve_upstreams(
195
221
  Args:
196
222
  upstreams: Словарь upstream серверов
197
223
  max_workers: Максимальное количество потоков для параллельной обработки
224
+ use_cache: Использовать ли кэш (по умолчанию True)
225
+ cache_ttl: Время жизни кэша в секундах (по умолчанию 300)
226
+ cache_dir: Директория для кэша (опционально)
198
227
 
199
228
  Возвращает:
200
229
  {
@@ -226,7 +255,10 @@ def resolve_upstreams(
226
255
 
227
256
  # Параллельная обработка резолвинга
228
257
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
229
- future_to_key = {executor.submit(resolve_address, srv): key for key, srv in tasks}
258
+ future_to_key = {
259
+ executor.submit(resolve_address, srv, use_cache, cache_ttl, cache_dir): key
260
+ for key, srv in tasks
261
+ }
230
262
 
231
263
  for future in as_completed(future_to_key):
232
264
  key = future_to_key[future]
@@ -260,7 +292,8 @@ def check_upstreams(
260
292
  timeout: float = 2.0,
261
293
  retries: int = 1,
262
294
  mode: str = "tcp",
263
- max_workers: int = 10
295
+ max_workers: int = 10,
296
+ progress_manager: Optional[ProgressManager] = None
264
297
  ) -> Dict[str, List[dict]]:
265
298
  """
266
299
  Проверяет доступность upstream-серверов.
@@ -305,6 +338,9 @@ def check_upstreams(
305
338
  for key, srv in tasks
306
339
  }
307
340
 
341
+ completed = 0
342
+ total = len(tasks)
343
+
308
344
  for future in as_completed(future_to_key):
309
345
  key = future_to_key[future]
310
346
  name, idx = task_to_key[key]
@@ -313,5 +349,9 @@ def check_upstreams(
313
349
  results[name][idx] = {"address": srv, "healthy": healthy}
314
350
  except Exception:
315
351
  results[name][idx] = {"address": key[2], "healthy": False}
352
+
353
+ completed += 1
354
+ if progress_manager:
355
+ progress_manager.update(completed, total=total, description=f"Проверка upstream серверов ({completed}/{total})")
316
356
 
317
357
  return results