project-diagnose 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- project_diagnose/__init__.py +1 -0
- project_diagnose/analyzer.py +438 -0
- project_diagnose/cli.py +55 -0
- project_diagnose/config_schema.py +34 -0
- project_diagnose/rendering.py +18 -0
- project_diagnose/static/script.js +76 -0
- project_diagnose/static/style.css +144 -0
- project_diagnose/templates/index.html +65 -0
- project_diagnose/templates/settings.html +293 -0
- project_diagnose/web.py +125 -0
- project_diagnose-0.1.9.dist-info/METADATA +157 -0
- project_diagnose-0.1.9.dist-info/RECORD +16 -0
- project_diagnose-0.1.9.dist-info/WHEEL +5 -0
- project_diagnose-0.1.9.dist-info/entry_points.txt +2 -0
- project_diagnose-0.1.9.dist-info/licenses/LICENSE +5 -0
- project_diagnose-0.1.9.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.1.9"
|
|
@@ -0,0 +1,438 @@
|
|
|
1
|
+
# DIAGNOSE/project_diagnose/analyzer.py
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import math
|
|
5
|
+
import datetime
|
|
6
|
+
from .config_schema import DiagnoseConfig
|
|
7
|
+
|
|
8
|
+
ROOT = os.getcwd()
|
|
9
|
+
OUTPUT_FILE = os.path.join(ROOT, "all_code.txt")
|
|
10
|
+
|
|
11
|
+
EXCLUDE_DIRS = {"venv", "__pycache__", ".git", ".idea", ".vscode"}
|
|
12
|
+
INCLUDE_EXT = {".py", ".json"}
|
|
13
|
+
SPECIAL_JSON = {"config_ui.json"}
|
|
14
|
+
|
|
15
|
+
USER_CFG_PATH = os.path.join(ROOT, "config.diagnose")
|
|
16
|
+
|
|
17
|
+
def generate_default_config(path):
|
|
18
|
+
default_cfg = {
|
|
19
|
+
"include_ext": [".py", ".json"],
|
|
20
|
+
"exclude_dirs": ["venv", "__pycache__", ".git"],
|
|
21
|
+
"exclude_files": [],
|
|
22
|
+
"include_files": [],
|
|
23
|
+
"special_json": ["config_ui.json"]
|
|
24
|
+
}
|
|
25
|
+
try:
|
|
26
|
+
with open(path, "w", encoding="utf-8") as f:
|
|
27
|
+
import json
|
|
28
|
+
json.dump(default_cfg, f, indent=4, ensure_ascii=False)
|
|
29
|
+
print("[diagnose] Создан новый config.diagnose (пустой шаблон).")
|
|
30
|
+
except Exception as e:
|
|
31
|
+
print(f"[diagnose] Не удалось создать config.diagnose: {e}")
|
|
32
|
+
|
|
33
|
+
def load_user_config():
|
|
34
|
+
if not os.path.exists(USER_CFG_PATH):
|
|
35
|
+
generate_default_config(USER_CFG_PATH)
|
|
36
|
+
print("[diagnose] Создан шаблон config.diagnose.")
|
|
37
|
+
return DiagnoseConfig() # дефолты
|
|
38
|
+
|
|
39
|
+
try:
|
|
40
|
+
with open(USER_CFG_PATH, "r", encoding="utf-8") as f:
|
|
41
|
+
raw = f.read()
|
|
42
|
+
|
|
43
|
+
import json
|
|
44
|
+
parsed = json.loads(raw)
|
|
45
|
+
|
|
46
|
+
cfg = DiagnoseConfig(**parsed)
|
|
47
|
+
print("[diagnose] Загружен и провалидирован config.diagnose")
|
|
48
|
+
return cfg
|
|
49
|
+
|
|
50
|
+
except Exception as e:
|
|
51
|
+
print(f"[diagnose] Ошибка в config.diagnose → {e}")
|
|
52
|
+
print("[diagnose] Использую конфигурацию по умолчанию.")
|
|
53
|
+
return DiagnoseConfig()
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
USER_CFG = load_user_config()
|
|
57
|
+
|
|
58
|
+
# применяем
|
|
59
|
+
EXCLUDE_DIRS = set(USER_CFG.exclude_dirs)
|
|
60
|
+
INCLUDE_EXT = set(USER_CFG.include_ext)
|
|
61
|
+
SPECIAL_JSON = set(USER_CFG.special_json)
|
|
62
|
+
EXCLUDE_FILES = set(USER_CFG.exclude_files)
|
|
63
|
+
INCLUDE_FILES_EXTRA = set(USER_CFG.include_files)
|
|
64
|
+
|
|
65
|
+
def should_skip_dir(dirname: str) -> bool:
|
|
66
|
+
name = os.path.basename(dirname)
|
|
67
|
+
return name in EXCLUDE_DIRS
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def is_valid_file(filename: str) -> bool:
|
|
71
|
+
# пользовательские исключения
|
|
72
|
+
if filename in EXCLUDE_FILES:
|
|
73
|
+
return False
|
|
74
|
+
|
|
75
|
+
# пользовательские explicit include
|
|
76
|
+
if filename in INCLUDE_FILES_EXTRA:
|
|
77
|
+
return True
|
|
78
|
+
|
|
79
|
+
_, ext = os.path.splitext(filename)
|
|
80
|
+
|
|
81
|
+
# особые JSON
|
|
82
|
+
if ext == ".json" and filename in SPECIAL_JSON:
|
|
83
|
+
return True
|
|
84
|
+
|
|
85
|
+
# обычный случай
|
|
86
|
+
return ext in INCLUDE_EXT
|
|
87
|
+
|
|
88
|
+
def build_tree_json():
|
|
89
|
+
tree = {}
|
|
90
|
+
|
|
91
|
+
for root, dirs, files in os.walk(ROOT):
|
|
92
|
+
rel = os.path.relpath(root, ROOT)
|
|
93
|
+
|
|
94
|
+
parts = [] if rel == "." else rel.split(os.sep)
|
|
95
|
+
|
|
96
|
+
node = tree
|
|
97
|
+
for p in parts:
|
|
98
|
+
node = node.setdefault(p, {})
|
|
99
|
+
|
|
100
|
+
node["_files"] = files
|
|
101
|
+
|
|
102
|
+
return tree
|
|
103
|
+
|
|
104
|
+
def collect_files():
|
|
105
|
+
collected = []
|
|
106
|
+
|
|
107
|
+
for root, dirs, files in os.walk(ROOT):
|
|
108
|
+
dirs[:] = [d for d in dirs if not should_skip_dir(os.path.join(root, d))]
|
|
109
|
+
for f in files:
|
|
110
|
+
if is_valid_file(f):
|
|
111
|
+
full_path = os.path.join(root, f)
|
|
112
|
+
collected.append(full_path)
|
|
113
|
+
|
|
114
|
+
collected.sort()
|
|
115
|
+
return collected
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
# ========================== НОВОЕ: структура с подписями ==========================
|
|
119
|
+
|
|
120
|
+
def format_size(bytes_count: int) -> str:
|
|
121
|
+
if bytes_count < 1024:
|
|
122
|
+
return f"{bytes_count} B"
|
|
123
|
+
elif bytes_count < 1024 * 1024:
|
|
124
|
+
return f"{bytes_count / 1024:.1f} KB"
|
|
125
|
+
else:
|
|
126
|
+
return f"{bytes_count / 1024 / 1024:.1f} MB"
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def count_lines(path: str) -> int:
|
|
130
|
+
try:
|
|
131
|
+
with open(path, "r", encoding="utf-8") as f:
|
|
132
|
+
return sum(1 for _ in f)
|
|
133
|
+
except:
|
|
134
|
+
return 0
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def build_tree():
|
|
138
|
+
tree_lines = []
|
|
139
|
+
|
|
140
|
+
for root, dirs, files in os.walk(ROOT):
|
|
141
|
+
dirs[:] = [d for d in dirs if not should_skip_dir(os.path.join(root, d))]
|
|
142
|
+
|
|
143
|
+
level = os.path.relpath(root, ROOT).count(os.sep)
|
|
144
|
+
indent = " " * level
|
|
145
|
+
|
|
146
|
+
dirname = os.path.basename(root)
|
|
147
|
+
tree_lines.append(f"{indent}{dirname}/")
|
|
148
|
+
|
|
149
|
+
for f in files:
|
|
150
|
+
full_path = os.path.join(root, f)
|
|
151
|
+
|
|
152
|
+
size = format_size(os.path.getsize(full_path))
|
|
153
|
+
mtime = datetime.datetime.fromtimestamp(os.path.getmtime(full_path))
|
|
154
|
+
mtime_str = mtime.strftime("%Y-%m-%d %H:%M")
|
|
155
|
+
|
|
156
|
+
lines = count_lines(full_path)
|
|
157
|
+
|
|
158
|
+
tree_lines.append(
|
|
159
|
+
f"{indent} {f} [{size}, {mtime_str}, {lines} строк]"
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
return "\n".join(tree_lines)
|
|
163
|
+
|
|
164
|
+
# ========================== АНАЛИТИКА ПРОЕКТА ==========================
|
|
165
|
+
|
|
166
|
+
def gather_stats(files):
|
|
167
|
+
stats = {
|
|
168
|
+
"total_size": 0,
|
|
169
|
+
"ext_lines": {},
|
|
170
|
+
"heavy_files": [],
|
|
171
|
+
"long_files": [],
|
|
172
|
+
"tiny_files": []
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
for path in files:
|
|
176
|
+
size = os.path.getsize(path)
|
|
177
|
+
lines = count_lines(path)
|
|
178
|
+
ext = os.path.splitext(path)[1]
|
|
179
|
+
|
|
180
|
+
stats["total_size"] += size
|
|
181
|
+
|
|
182
|
+
# строки по расширению
|
|
183
|
+
stats["ext_lines"].setdefault(ext, 0)
|
|
184
|
+
stats["ext_lines"][ext] += lines
|
|
185
|
+
|
|
186
|
+
stats["heavy_files"].append((size, path))
|
|
187
|
+
stats["long_files"].append((lines, path))
|
|
188
|
+
|
|
189
|
+
if lines <= 5 or size < 200:
|
|
190
|
+
stats["tiny_files"].append((size, lines, path))
|
|
191
|
+
|
|
192
|
+
# сортировки
|
|
193
|
+
stats["heavy_files"].sort(reverse=True)
|
|
194
|
+
stats["long_files"].sort(reverse=True)
|
|
195
|
+
stats["tiny_files"].sort()
|
|
196
|
+
|
|
197
|
+
return stats
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def format_stats(stats):
|
|
201
|
+
out = []
|
|
202
|
+
out.append("# ===== Аналитика проекта =====")
|
|
203
|
+
|
|
204
|
+
# общий вес
|
|
205
|
+
out.append(f"Суммарный вес файлов: {format_size(stats['total_size'])}\n")
|
|
206
|
+
|
|
207
|
+
# строки по расширениям
|
|
208
|
+
out.append("Количество строк по расширениям:")
|
|
209
|
+
for ext, count in stats["ext_lines"].items():
|
|
210
|
+
out.append(f" {ext}: {count} строк")
|
|
211
|
+
out.append("")
|
|
212
|
+
|
|
213
|
+
# топ-5 тяжёлых
|
|
214
|
+
out.append("Топ 5 самых тяжёлых файлов:")
|
|
215
|
+
for size, path in stats["heavy_files"][:5]:
|
|
216
|
+
out.append(f" {format_size(size):>8} {os.path.relpath(path, ROOT)}")
|
|
217
|
+
out.append("")
|
|
218
|
+
|
|
219
|
+
# топ-5 длинных
|
|
220
|
+
out.append("Топ 5 самых длинных файлов:")
|
|
221
|
+
for lines, path in stats["long_files"][:5]:
|
|
222
|
+
out.append(f" {lines:>6} строк {os.path.relpath(path, ROOT)}")
|
|
223
|
+
out.append("")
|
|
224
|
+
|
|
225
|
+
# подозрительно маленькие файлы
|
|
226
|
+
out.append("Подозрительно маленькие файлы (возможно мусор):")
|
|
227
|
+
for size, lines, path in stats["tiny_files"][:7]:
|
|
228
|
+
out.append(f" {format_size(size):>8}, {lines:3} строк {os.path.relpath(path, ROOT)}")
|
|
229
|
+
|
|
230
|
+
out.append("\n")
|
|
231
|
+
return "\n".join(out)
|
|
232
|
+
|
|
233
|
+
# ====================== AI-оценка безнадёжности проекта ======================
|
|
234
|
+
|
|
235
|
+
def build_pie_chart(stats):
|
|
236
|
+
total = sum(stats["ext_lines"].values())
|
|
237
|
+
if total == 0:
|
|
238
|
+
return "Нет данных для построения диаграммы."
|
|
239
|
+
|
|
240
|
+
pieces = []
|
|
241
|
+
for ext, lines in stats["ext_lines"].items():
|
|
242
|
+
pct = lines / total
|
|
243
|
+
bars = int(pct * 20)
|
|
244
|
+
pieces.append(f"{ext:6} | {'#' * bars}{'.' * (20 - bars)} | {pct * 100:5.1f}%")
|
|
245
|
+
|
|
246
|
+
return "\n".join(pieces)
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def calc_chaos_score(stats):
|
|
250
|
+
# хаос = количество типов файлов * коэффициент разброса размеров * коэффициент мелкого мусора
|
|
251
|
+
unique_ext = len(stats["ext_lines"])
|
|
252
|
+
size_spread = (stats["heavy_files"][0][0] + 1) / (stats["tiny_files"][0][0] + 1)
|
|
253
|
+
tiny_penalty = len(stats["tiny_files"]) * 0.7
|
|
254
|
+
|
|
255
|
+
chaos = unique_ext * math.log(size_spread + 3) + tiny_penalty
|
|
256
|
+
return round(chaos, 2)
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def calc_tech_karma(stats):
|
|
260
|
+
# карма = средняя длина файла / количество маленьких файлов
|
|
261
|
+
avg_lines = sum(l for l, _ in stats["long_files"]) / len(stats["long_files"])
|
|
262
|
+
tiny_count = len(stats["tiny_files"]) or 1
|
|
263
|
+
karma = avg_lines / tiny_count
|
|
264
|
+
return round(karma, 2)
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
def ai_verdict(chaos, karma):
|
|
268
|
+
if chaos < 8 and karma > 150:
|
|
269
|
+
return "Структура удивительно приличная. Почти не стыдно."
|
|
270
|
+
if chaos < 15:
|
|
271
|
+
return "Нормально, жить можно. Иногда."
|
|
272
|
+
if chaos < 22:
|
|
273
|
+
return "Ты как будто пытался. Видно старание. И боль."
|
|
274
|
+
if chaos < 35:
|
|
275
|
+
return "Проект напоминает чердак, куда сносят всё подряд «на потом»."
|
|
276
|
+
return "Сожги и беги. Я сделал вид, что не видел это."
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def format_ai_analysis(stats):
|
|
280
|
+
out = []
|
|
281
|
+
out.append("# ===== AI-оценка состояния проекта =====")
|
|
282
|
+
|
|
283
|
+
out.append("\nСтруктура по строкам (ASCII pie chart):")
|
|
284
|
+
out.append(build_pie_chart(stats))
|
|
285
|
+
|
|
286
|
+
chaos = calc_chaos_score(stats)
|
|
287
|
+
karma = calc_tech_karma(stats)
|
|
288
|
+
|
|
289
|
+
out.append(f"\nИндекс хаоса: {chaos}")
|
|
290
|
+
out.append(f"Техническая карма: {karma}")
|
|
291
|
+
|
|
292
|
+
verdict = ai_verdict(chaos, karma)
|
|
293
|
+
out.append(f"\nAI-вердикт: {verdict}\n")
|
|
294
|
+
|
|
295
|
+
# оценки файлов типа «пора выбросить»
|
|
296
|
+
out.append("Файлы, вызывающие сомнения:")
|
|
297
|
+
suspicious = sorted(stats["tiny_files"], key=lambda x: (x[0], x[1]))
|
|
298
|
+
for size, lines, path in suspicious[:10]:
|
|
299
|
+
score = (5 - min(lines, 5)) * 10 + (200 - min(size, 200)) / 10
|
|
300
|
+
score = round(score, 1)
|
|
301
|
+
out.append(f" {os.path.relpath(path, ROOT)} | уровень бесполезности {score}/100")
|
|
302
|
+
|
|
303
|
+
out.append("\n")
|
|
304
|
+
return "\n".join(out)
|
|
305
|
+
|
|
306
|
+
# ====================== Индекс будущего сожаления ======================
|
|
307
|
+
|
|
308
|
+
def calc_future_regret_index(stats):
|
|
309
|
+
long_files = stats["long_files"]
|
|
310
|
+
tiny_files = stats["tiny_files"]
|
|
311
|
+
|
|
312
|
+
if not long_files:
|
|
313
|
+
return 0
|
|
314
|
+
|
|
315
|
+
max_lines = long_files[0][0]
|
|
316
|
+
min_lines = long_files[-1][0] if long_files else 1
|
|
317
|
+
spread = max_lines - min_lines
|
|
318
|
+
|
|
319
|
+
tiny_factor = len(tiny_files) * 4
|
|
320
|
+
long_factor = math.sqrt(max_lines) * 1.2
|
|
321
|
+
spread_factor = math.log(spread + 5) * 6
|
|
322
|
+
|
|
323
|
+
fri = tiny_factor + long_factor + spread_factor
|
|
324
|
+
return round(fri, 2)
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
# ====================== Граф зависимостей мусорных файлов ======================
|
|
328
|
+
|
|
329
|
+
def extract_imports(path):
|
|
330
|
+
imports = []
|
|
331
|
+
try:
|
|
332
|
+
with open(path, "r", encoding="utf-8") as f:
|
|
333
|
+
for line in f:
|
|
334
|
+
line = line.strip()
|
|
335
|
+
if line.startswith("import "):
|
|
336
|
+
parts = line.split()
|
|
337
|
+
if len(parts) > 1:
|
|
338
|
+
imports.append(parts[1].split('.')[0])
|
|
339
|
+
elif line.startswith("from "):
|
|
340
|
+
parts = line.split()
|
|
341
|
+
if len(parts) > 1:
|
|
342
|
+
imports.append(parts[1].split('.')[0])
|
|
343
|
+
except:
|
|
344
|
+
pass
|
|
345
|
+
return imports
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
def build_suspicious_graph(stats):
|
|
349
|
+
lines = []
|
|
350
|
+
tiny_files = stats["tiny_files"]
|
|
351
|
+
|
|
352
|
+
module_map = {}
|
|
353
|
+
for _, _, path in tiny_files:
|
|
354
|
+
mod = os.path.splitext(os.path.basename(path))[0]
|
|
355
|
+
module_map[mod] = path
|
|
356
|
+
|
|
357
|
+
# строим граф
|
|
358
|
+
for _, _, path in tiny_files:
|
|
359
|
+
mod = os.path.splitext(os.path.basename(path))[0]
|
|
360
|
+
imports = extract_imports(path)
|
|
361
|
+
|
|
362
|
+
for imp in imports:
|
|
363
|
+
if imp in module_map:
|
|
364
|
+
lines.append(f"{mod} --> {imp}")
|
|
365
|
+
else:
|
|
366
|
+
lines.append(f"{mod} --> {imp} (влияет на основной код)")
|
|
367
|
+
|
|
368
|
+
if not lines:
|
|
369
|
+
return "Нет зависимостей. Эти файлы бесполезны в гордом одиночестве."
|
|
370
|
+
|
|
371
|
+
return "\n".join(lines)
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
# ====================== Форматирование блока ======================
|
|
375
|
+
|
|
376
|
+
def format_future_analysis(stats):
|
|
377
|
+
fri = calc_future_regret_index(stats)
|
|
378
|
+
graph = build_suspicious_graph(stats)
|
|
379
|
+
|
|
380
|
+
out = []
|
|
381
|
+
out.append("# ===== Прогноз страданий разработчика =====")
|
|
382
|
+
out.append(f"Индекс будущего сожаления (FRI): {fri}")
|
|
383
|
+
|
|
384
|
+
if fri < 25:
|
|
385
|
+
out.append("Переживать не о чем. Даже приятно посмотреть.")
|
|
386
|
+
elif fri < 60:
|
|
387
|
+
out.append("Через полгода ты слегка вздохнёшь и продолжишь.")
|
|
388
|
+
elif fri < 120:
|
|
389
|
+
out.append("Будущая боль ощутима. Закладывай время на рефакторинг.")
|
|
390
|
+
else:
|
|
391
|
+
out.append("Просто оставлю это здесь. Ты знаешь, что делается.")
|
|
392
|
+
|
|
393
|
+
out.append("\nГраф зависимостей подозрительных файлов:")
|
|
394
|
+
out.append(graph)
|
|
395
|
+
out.append("\n")
|
|
396
|
+
|
|
397
|
+
return "\n".join(out)
|
|
398
|
+
|
|
399
|
+
# ========================== запись итогового файла ==========================
|
|
400
|
+
|
|
401
|
+
def write_all_code(files):
|
|
402
|
+
with open(OUTPUT_FILE, "w", encoding="utf-8") as out:
|
|
403
|
+
|
|
404
|
+
# АНАЛИТИКА
|
|
405
|
+
stats = gather_stats(files)
|
|
406
|
+
out.write(format_stats(stats))
|
|
407
|
+
# AI-анализ
|
|
408
|
+
out.write(format_ai_analysis(stats))
|
|
409
|
+
# прогноз страданий
|
|
410
|
+
out.write(format_future_analysis(stats))
|
|
411
|
+
# ДЕРЕВО
|
|
412
|
+
out.write("# ===== Структура проекта =====\n")
|
|
413
|
+
out.write(build_tree())
|
|
414
|
+
out.write("\n\n")
|
|
415
|
+
|
|
416
|
+
# КОД
|
|
417
|
+
for path in files:
|
|
418
|
+
rel = os.path.relpath(path, ROOT)
|
|
419
|
+
out.write(f"# ------- {rel}\n")
|
|
420
|
+
try:
|
|
421
|
+
with open(path, "r", encoding="utf-8") as src:
|
|
422
|
+
out.write(src.read())
|
|
423
|
+
except Exception as e:
|
|
424
|
+
out.write(f"<<Error reading file: {e}>>")
|
|
425
|
+
out.write("\n\n")
|
|
426
|
+
|
|
427
|
+
print(f"Готово! Файл собран: {OUTPUT_FILE}")
|
|
428
|
+
|
|
429
|
+
def dump_project():
|
|
430
|
+
files = collect_files()
|
|
431
|
+
write_all_code(files)
|
|
432
|
+
|
|
433
|
+
def analyze_project():
|
|
434
|
+
files = collect_files()
|
|
435
|
+
stats = gather_stats(files)
|
|
436
|
+
stats["files"] = files
|
|
437
|
+
stats["tree"] = build_tree()
|
|
438
|
+
return stats
|
project_diagnose/cli.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# DIAGNOSE/project_diagnose/cli.py
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
from .analyzer import analyze_project, dump_project
|
|
5
|
+
from .rendering import render_text_report, render_tree
|
|
6
|
+
from .web import run_web
|
|
7
|
+
|
|
8
|
+
def main():
|
|
9
|
+
parser = argparse.ArgumentParser(description="Диагностика проекта")
|
|
10
|
+
|
|
11
|
+
parser.add_argument(
|
|
12
|
+
"command",
|
|
13
|
+
choices=["analyze", "report", "tree", "full", "web", "dump"],
|
|
14
|
+
help="Команда анализа"
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
parser.add_argument(
|
|
18
|
+
"--html",
|
|
19
|
+
action="store_true",
|
|
20
|
+
help="Вывести отчёт в HTML"
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
args = parser.parse_args()
|
|
24
|
+
|
|
25
|
+
if args.command == "dump":
|
|
26
|
+
dump_project()
|
|
27
|
+
return
|
|
28
|
+
|
|
29
|
+
stats = analyze_project()
|
|
30
|
+
|
|
31
|
+
if args.command == "analyze":
|
|
32
|
+
print("Анализ выполнен.")
|
|
33
|
+
return
|
|
34
|
+
|
|
35
|
+
if args.command == "tree":
|
|
36
|
+
print(render_tree(stats))
|
|
37
|
+
return
|
|
38
|
+
|
|
39
|
+
if args.command == "web":
|
|
40
|
+
run_web()
|
|
41
|
+
return
|
|
42
|
+
|
|
43
|
+
if args.command == "report":
|
|
44
|
+
if args.html:
|
|
45
|
+
print("<pre>")
|
|
46
|
+
print(render_text_report(stats))
|
|
47
|
+
print("</pre>")
|
|
48
|
+
else:
|
|
49
|
+
print(render_text_report(stats))
|
|
50
|
+
return
|
|
51
|
+
|
|
52
|
+
if args.command == "full":
|
|
53
|
+
print(render_tree(stats))
|
|
54
|
+
print()
|
|
55
|
+
print(render_text_report(stats))
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from pydantic import BaseModel, Field, validator
|
|
2
|
+
from typing import List
|
|
3
|
+
|
|
4
|
+
DEFAULT_EXT = [".py", ".json"]
|
|
5
|
+
DEFAULT_EXCLUDE = ["venv", "__pycache__", ".git"]
|
|
6
|
+
DEFAULT_SPECIAL_JSON = ["config_ui.json"]
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class DiagnoseConfig(BaseModel):
|
|
10
|
+
include_ext: List[str] = Field(default_factory=lambda: DEFAULT_EXT.copy())
|
|
11
|
+
exclude_dirs: List[str] = Field(default_factory=lambda: DEFAULT_EXCLUDE.copy())
|
|
12
|
+
exclude_files: List[str] = Field(default_factory=list)
|
|
13
|
+
include_files: List[str] = Field(default_factory=list)
|
|
14
|
+
special_json: List[str] = Field(default_factory=lambda: DEFAULT_SPECIAL_JSON.copy())
|
|
15
|
+
|
|
16
|
+
# валидация расширений
|
|
17
|
+
@validator("include_ext", each_item=True)
|
|
18
|
+
def validate_ext(cls, v):
|
|
19
|
+
if not v.startswith("."):
|
|
20
|
+
raise ValueError(f"Расширение '{v}' должно начинаться с точки")
|
|
21
|
+
return v
|
|
22
|
+
|
|
23
|
+
# валидация директорий
|
|
24
|
+
@validator("exclude_dirs", each_item=True)
|
|
25
|
+
def validate_dir(cls, v):
|
|
26
|
+
if "/" in v or "\\" in v:
|
|
27
|
+
raise ValueError(f"Имя директории '{v}' должно быть только именем, без пути")
|
|
28
|
+
return v
|
|
29
|
+
|
|
30
|
+
@validator("exclude_files", "include_files", each_item=True)
|
|
31
|
+
def validate_filename(cls, v):
|
|
32
|
+
if "/" in v or "\\" in v:
|
|
33
|
+
raise ValueError(f"Имя файла '{v}' должно быть только именем файла")
|
|
34
|
+
return v
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# DIAGNOSE/project_diagnose/rendering.py
|
|
2
|
+
|
|
3
|
+
from .analyzer import (
|
|
4
|
+
format_stats,
|
|
5
|
+
format_ai_analysis,
|
|
6
|
+
format_future_analysis,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
def render_tree(stats):
|
|
10
|
+
return "# ===== Структура проекта =====\n" + stats.get("tree", "")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def render_text_report(stats):
|
|
14
|
+
blocks = []
|
|
15
|
+
blocks.append(format_stats(stats))
|
|
16
|
+
blocks.append(format_ai_analysis(stats))
|
|
17
|
+
blocks.append(format_future_analysis(stats))
|
|
18
|
+
return "\n".join(blocks)
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
// ============ ТЁМНАЯ / СВЕТЛАЯ ТЕМА ============
|
|
2
|
+
const body = document.body;
|
|
3
|
+
const toggle = document.getElementById("themeToggle");
|
|
4
|
+
|
|
5
|
+
if (localStorage.getItem("theme") === "dark") {
|
|
6
|
+
body.classList.add("dark");
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
toggle.addEventListener("click", () => {
|
|
10
|
+
body.classList.toggle("dark");
|
|
11
|
+
localStorage.setItem("theme", body.classList.contains("dark") ? "dark" : "light");
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
// ============ COLLAPSIBLE PANELS ============
|
|
15
|
+
document.querySelectorAll(".collapsible").forEach(h => {
|
|
16
|
+
h.addEventListener("click", () => {
|
|
17
|
+
const content = h.nextElementSibling;
|
|
18
|
+
h.classList.toggle("active");
|
|
19
|
+
content.classList.toggle("open");
|
|
20
|
+
});
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
// ============ КНОПКА «РЕФАКТОРИНГ» ============
|
|
24
|
+
document.getElementById("refactorBtn").addEventListener("click", () => {
|
|
25
|
+
alert("Рефакторинг не реализован.\n\nНо проект явно намекает.");
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
// ============ ПОИСК В ОТЧЁТЕ ============
|
|
29
|
+
const searchInput = document.getElementById("searchInput");
|
|
30
|
+
const reportBlock = document.getElementById("reportBlock");
|
|
31
|
+
const originalText = reportBlock.textContent;
|
|
32
|
+
|
|
33
|
+
searchInput.addEventListener("input", () => {
|
|
34
|
+
const q = searchInput.value.trim();
|
|
35
|
+
if (!q) {
|
|
36
|
+
reportBlock.textContent = originalText;
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
const regex = new RegExp(q, "gi");
|
|
40
|
+
reportBlock.innerHTML = originalText.replace(regex, m => `<span class="highlight">${m}</span>`);
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
// ============ Chart.js: строки по расширениям ============
|
|
44
|
+
if (window.PROJECT_METRICS && window.PROJECT_METRICS.ext_lines) {
|
|
45
|
+
const ctx = document.getElementById("extChart");
|
|
46
|
+
if (ctx && window.Chart) {
|
|
47
|
+
const labels = Object.keys(window.PROJECT_METRICS.ext_lines);
|
|
48
|
+
const data = Object.values(window.PROJECT_METRICS.ext_lines);
|
|
49
|
+
|
|
50
|
+
new Chart(ctx, {
|
|
51
|
+
type: "bar",
|
|
52
|
+
data: {
|
|
53
|
+
labels,
|
|
54
|
+
datasets: [{
|
|
55
|
+
label: "Строк кода по расширениям",
|
|
56
|
+
data
|
|
57
|
+
}]
|
|
58
|
+
},
|
|
59
|
+
options: {
|
|
60
|
+
responsive: true,
|
|
61
|
+
plugins: {
|
|
62
|
+
legend: {
|
|
63
|
+
display: false
|
|
64
|
+
},
|
|
65
|
+
title: {
|
|
66
|
+
display: false
|
|
67
|
+
}
|
|
68
|
+
},
|
|
69
|
+
scales: {
|
|
70
|
+
x: { ticks: { color: getComputedStyle(document.body).color } },
|
|
71
|
+
y: { ticks: { color: getComputedStyle(document.body).color } }
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
}
|