hanzo-mcp 0.1.21__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,879 @@
1
+ """Project analysis tools for Hanzo Dev MCP.
2
+
3
+ This module provides tools for analyzing project structure and dependencies.
4
+ """
5
+
6
+ import json
7
+ from pathlib import Path
8
+ from typing import Any, Callable, final
9
+
10
+ from mcp.server.fastmcp import Context as MCPContext
11
+ from mcp.server.fastmcp import FastMCP
12
+
13
+ from hanzo_mcp.tools.common.context import DocumentContext, create_tool_context
14
+ from hanzo_mcp.tools.common.permissions import PermissionManager
15
+ from hanzo_mcp.tools.common.validation import validate_path_parameter
16
+ from hanzo_mcp.tools.shell.command_executor import CommandExecutor
17
+
18
+
19
+ @final
20
+ class ProjectAnalyzer:
21
+ """Analyzes project structure and dependencies."""
22
+
23
+ def __init__(self, command_executor: CommandExecutor) -> None:
24
+ """Initialize the project analyzer.
25
+
26
+ Args:
27
+ command_executor: The command executor for running analysis scripts
28
+ """
29
+ self.command_executor: CommandExecutor = command_executor
30
+
31
+ async def analyze_python_dependencies(self, project_dir: str) -> dict[str, Any]:
32
+ """Analyze Python project dependencies.
33
+
34
+ Args:
35
+ project_dir: The project directory
36
+
37
+ Returns:
38
+ Dictionary of dependency information
39
+ """
40
+ script: str = """
41
+ import os
42
+ import sys
43
+ import json
44
+ import pkg_resources
45
+ from pathlib import Path
46
+
47
+ # Scan for requirements files
48
+ requirements_files = []
49
+ for root, _, files in os.walk('.'):
50
+ for file in files:
51
+ if file in ('requirements.txt', 'pyproject.toml', 'setup.py'):
52
+ requirements_files.append(os.path.join(root, file))
53
+
54
+ # Get installed packages
55
+ installed_packages = {pkg.key: pkg.version for pkg in pkg_resources.working_set}
56
+
57
+ # Scan for import statements
58
+ imports = set()
59
+ for root, _, files in os.walk('.'):
60
+ for file in files:
61
+ if file.endswith('.py'):
62
+ try:
63
+ with open(os.path.join(root, file), 'r', encoding='utf-8') as f:
64
+ for line in f:
65
+ line = line.strip()
66
+ if line.startswith('import ') or line.startswith('from '):
67
+ parts = line.split()
68
+ if parts[0] == 'import':
69
+ imports.add(parts[1].split('.')[0])
70
+ elif parts[0] == 'from' and parts[1] != '.':
71
+ imports.add(parts[1].split('.')[0])
72
+ except:
73
+ pass # Skip files that can't be read
74
+
75
+ # Create result
76
+ result = {
77
+ 'requirements_files': requirements_files,
78
+ 'installed_packages': installed_packages,
79
+ 'imports': list(imports)
80
+ }
81
+
82
+ print(json.dumps(result))
83
+ """
84
+
85
+ # Execute script
86
+ result = await self.command_executor.execute_script_from_file(
87
+ script=script, language="python", cwd=project_dir, timeout=30.0
88
+ )
89
+ code, stdout, stderr = result.return_code, result.stdout, result.stderr
90
+
91
+ if code != 0:
92
+ return {"error": f"Failed to analyze Python dependencies: {stderr}"}
93
+
94
+ try:
95
+ return json.loads(stdout)
96
+ except json.JSONDecodeError:
97
+ return {"error": "Failed to parse analysis result"}
98
+
99
+ async def analyze_javascript_dependencies(self, project_dir: str) -> dict[str, Any]:
100
+ """Analyze JavaScript/Node.js project dependencies.
101
+
102
+ Args:
103
+ project_dir: The project directory
104
+
105
+ Returns:
106
+ Dictionary of dependency information
107
+ """
108
+ script: str = """
109
+ const fs = require('fs');
110
+ const path = require('path');
111
+
112
+ // Scan for package.json files
113
+ const packageFiles = [];
114
+ function findPackageFiles(dir) {
115
+ const files = fs.readdirSync(dir, { withFileTypes: true });
116
+
117
+ for (const file of files) {
118
+ const filePath = path.join(dir, file.name);
119
+
120
+ if (file.isDirectory() && file.name !== 'node_modules') {
121
+ findPackageFiles(filePath);
122
+ } else if (file.name === 'package.json') {
123
+ packageFiles.push(filePath);
124
+ }
125
+ }
126
+ }
127
+
128
+ // Find imports
129
+ const imports = new Set();
130
+ function scanImports(dir) {
131
+ const files = fs.readdirSync(dir, { withFileTypes: true });
132
+
133
+ for (const file of files) {
134
+ const filePath = path.join(dir, file.name);
135
+
136
+ if (file.isDirectory() && file.name !== 'node_modules') {
137
+ scanImports(filePath);
138
+ } else if (file.name.endsWith('.js') || file.name.endsWith('.jsx') ||
139
+ file.name.endsWith('.ts') || file.name.endsWith('.tsx')) {
140
+ try {
141
+ const content = fs.readFileSync(filePath, 'utf-8');
142
+
143
+ // Match import statements
144
+ const importRegex = /import.*?from\\s+['"](.*?)['"];/g;
145
+ let match;
146
+ while (match = importRegex.exec(content)) {
147
+ const importPath = match[1];
148
+ if (!importPath.startsWith('.')) {
149
+ imports.add(importPath.split('/')[0]);
150
+ }
151
+ }
152
+
153
+ // Match require statements
154
+ const requireRegex = /require\\(['"](.*?)['"]\\)/g;
155
+ while (match = requireRegex.exec(content)) {
156
+ const importPath = match[1];
157
+ if (!importPath.startsWith('.')) {
158
+ imports.add(importPath.split('/')[0]);
159
+ }
160
+ }
161
+ } catch (err) {
162
+ // Skip files that can't be read
163
+ }
164
+ }
165
+ }
166
+ }
167
+
168
+ try {
169
+ findPackageFiles('.');
170
+ scanImports('.');
171
+
172
+ // Parse package.json files
173
+ const packageDetails = [];
174
+ for (const pkgFile of packageFiles) {
175
+ try {
176
+ const pkgJson = JSON.parse(fs.readFileSync(pkgFile, 'utf-8'));
177
+ packageDetails.push({
178
+ path: pkgFile,
179
+ name: pkgJson.name,
180
+ version: pkgJson.version,
181
+ dependencies: pkgJson.dependencies || {},
182
+ devDependencies: pkgJson.devDependencies || {}
183
+ });
184
+ } catch (err) {
185
+ packageDetails.push({
186
+ path: pkgFile,
187
+ error: 'Failed to parse package.json'
188
+ });
189
+ }
190
+ }
191
+
192
+ const result = {
193
+ packageFiles: packageFiles,
194
+ packageDetails: packageDetails,
195
+ imports: Array.from(imports)
196
+ };
197
+
198
+ console.log(JSON.stringify(result));
199
+ } catch (err) {
200
+ console.error(err.message);
201
+ process.exit(1);
202
+ }
203
+ """
204
+
205
+ # Execute script
206
+ result = await self.command_executor.execute_script_from_file(
207
+ script=script, language="javascript", cwd=project_dir, timeout=30.0
208
+ )
209
+ code, stdout, stderr = result.return_code, result.stdout, result.stderr
210
+
211
+ if code != 0:
212
+ return {"error": f"Failed to analyze JavaScript dependencies: {stderr}"}
213
+
214
+ try:
215
+ return json.loads(stdout)
216
+ except json.JSONDecodeError:
217
+ return {"error": "Failed to parse analysis result"}
218
+
219
+ async def analyze_project_structure(self, project_dir: str) -> dict[str, Any]:
220
+ """Analyze project structure.
221
+
222
+ Args:
223
+ project_dir: The project directory
224
+
225
+ Returns:
226
+ Dictionary of project structure information
227
+ """
228
+ script: str = """
229
+ import os
230
+ import json
231
+ from pathlib import Path
232
+
233
+ def count_lines(file_path):
234
+ try:
235
+ with open(file_path, 'r', encoding='utf-8') as f:
236
+ return len(f.readlines())
237
+ except:
238
+ return 0
239
+
240
+ # Get file extensions
241
+ extensions = {}
242
+ file_count = 0
243
+ dir_count = 0
244
+ total_size = 0
245
+ total_lines = 0
246
+
247
+ # Scan files
248
+ for root, dirs, files in os.walk('.'):
249
+ dir_count += len(dirs)
250
+ file_count += len(files)
251
+
252
+ for file in files:
253
+ file_path = Path(root) / file
254
+ ext = file_path.suffix.lower()
255
+ size = file_path.stat().st_size
256
+ total_size += size
257
+
258
+ if ext in ('.py', '.js', '.jsx', '.ts', '.tsx', '.java', '.c', '.cpp', '.h', '.go', '.rb', '.php'):
259
+ lines = count_lines(file_path)
260
+ total_lines += lines
261
+
262
+ if ext in extensions:
263
+ extensions[ext]['count'] += 1
264
+ extensions[ext]['size'] += size
265
+ else:
266
+ extensions[ext] = {'count': 1, 'size': size}
267
+
268
+ # Sort extensions by count
269
+ sorted_extensions = {k: v for k, v in sorted(
270
+ extensions.items(),
271
+ key=lambda item: item[1]['count'],
272
+ reverse=True
273
+ )}
274
+
275
+ # Create result
276
+ result = {
277
+ 'file_count': file_count,
278
+ 'directory_count': dir_count,
279
+ 'total_size': total_size,
280
+ 'total_lines': total_lines,
281
+ 'extensions': sorted_extensions
282
+ }
283
+
284
+ print(json.dumps(result))
285
+ """
286
+
287
+ # Execute script
288
+ result = await self.command_executor.execute_script_from_file(
289
+ script=script, language="python", cwd=project_dir, timeout=30.0
290
+ )
291
+ code, stdout, stderr = result.return_code, result.stdout, result.stderr
292
+
293
+ if code != 0:
294
+ return {"error": f"Failed to analyze project structure: {stderr}"}
295
+
296
+ try:
297
+ return json.loads(stdout)
298
+ except json.JSONDecodeError:
299
+ return {"error": "Failed to parse analysis result"}
300
+
301
+
302
+ @final
303
+ class ProjectManager:
304
+ """Manages project context and understanding."""
305
+
306
+ def __init__(
307
+ self,
308
+ document_context: DocumentContext,
309
+ permission_manager: PermissionManager,
310
+ project_analyzer: ProjectAnalyzer,
311
+ ) -> None:
312
+ """Initialize the project manager.
313
+
314
+ Args:
315
+ document_context: The document context for storing files
316
+ permission_manager: The permission manager for checking permissions
317
+ project_analyzer: The project analyzer for analyzing project structure
318
+ """
319
+ self.document_context: DocumentContext = document_context
320
+ self.permission_manager: PermissionManager = permission_manager
321
+ self.project_analyzer: ProjectAnalyzer = project_analyzer
322
+
323
+ # Project metadata
324
+ self.project_root: str | None = None
325
+ self.project_metadata: dict[str, Any] = {}
326
+ self.project_analysis: dict[str, Any] = {}
327
+ self.project_files: dict[str, dict[str, Any]] = {}
328
+
329
+ # Source code stats
330
+ self.stats: dict[str, int] = {
331
+ "files": 0,
332
+ "directories": 0,
333
+ "lines_of_code": 0,
334
+ "functions": 0,
335
+ "classes": 0,
336
+ }
337
+
338
+ # Programming languages detected
339
+ self.languages: dict[str, int] = {}
340
+
341
+ # Detected framework/library usage
342
+ self.frameworks: dict[str, dict[str, Any]] = {}
343
+
344
+ def set_project_root(self, root_path: str) -> bool:
345
+ """Set the project root directory.
346
+
347
+ Args:
348
+ root_path: The root directory of the project
349
+
350
+ Returns:
351
+ True if successful, False otherwise
352
+ """
353
+ if not self.permission_manager.is_path_allowed(root_path):
354
+ return False
355
+
356
+ path: Path = Path(root_path)
357
+ if not path.exists() or not path.is_dir():
358
+ return False
359
+
360
+ self.project_root = str(path.resolve())
361
+ return True
362
+
363
+ def detect_programming_languages(self) -> dict[str, int]:
364
+ """Detect programming languages used in the project.
365
+
366
+ Returns:
367
+ Dictionary mapping language names to file counts
368
+ """
369
+ if not self.project_root:
370
+ return {}
371
+
372
+ extension_to_language: dict[str, str] = {
373
+ ".py": "Python",
374
+ ".js": "JavaScript",
375
+ ".jsx": "JavaScript (React)",
376
+ ".ts": "TypeScript",
377
+ ".tsx": "TypeScript (React)",
378
+ ".html": "HTML",
379
+ ".css": "CSS",
380
+ ".scss": "SCSS",
381
+ ".less": "LESS",
382
+ ".java": "Java",
383
+ ".kt": "Kotlin",
384
+ ".rb": "Ruby",
385
+ ".php": "PHP",
386
+ ".go": "Go",
387
+ ".rs": "Rust",
388
+ ".swift": "Swift",
389
+ ".c": "C",
390
+ ".cpp": "C++",
391
+ ".h": "C/C++ Header",
392
+ ".cs": "C#",
393
+ ".sh": "Shell",
394
+ ".bat": "Batch",
395
+ ".ps1": "PowerShell",
396
+ ".md": "Markdown",
397
+ ".json": "JSON",
398
+ ".yaml": "YAML",
399
+ ".yml": "YAML",
400
+ ".toml": "TOML",
401
+ ".xml": "XML",
402
+ ".sql": "SQL",
403
+ ".r": "R",
404
+ ".scala": "Scala",
405
+ }
406
+
407
+ languages: dict[str, int] = {}
408
+ root_path: Path = Path(self.project_root)
409
+
410
+ for ext, lang in extension_to_language.items():
411
+ files: list[Path] = list(root_path.glob(f"**/*{ext}"))
412
+
413
+ # Filter out files in excluded directories
414
+ filtered_files: list[Path] = []
415
+ for file in files:
416
+ if self.permission_manager.is_path_allowed(str(file)):
417
+ filtered_files.append(file)
418
+
419
+ if filtered_files:
420
+ languages[lang] = len(filtered_files)
421
+
422
+ self.languages = languages
423
+ return languages
424
+
425
+ def detect_project_type(self) -> dict[str, Any]:
426
+ """Detect the type of project.
427
+
428
+ Returns:
429
+ Dictionary describing the project type and frameworks
430
+ """
431
+ if not self.project_root:
432
+ return {"type": "unknown"}
433
+
434
+ root_path: Path = Path(self.project_root)
435
+ result: dict[str, Any] = {"type": "unknown", "frameworks": []}
436
+
437
+ # Define type checker functions with proper type annotations
438
+ def check_package_dependency(p: Path, dependency: str) -> bool:
439
+ return dependency in self._read_json(p).get("dependencies", {})
440
+
441
+ def check_requirement(p: Path, prefix: str) -> bool:
442
+ return any(x.startswith(prefix) for x in self._read_lines(p))
443
+
444
+ def always_true(p: Path) -> bool:
445
+ return True
446
+
447
+ def is_directory(p: Path) -> bool:
448
+ return p.is_dir()
449
+
450
+ # Check for common project markers using list of tuples with properly typed functions
451
+ markers: dict[str, list[tuple[str, Callable[[Path], bool]]]] = {
452
+ "web-frontend": [
453
+ ("package.json", lambda p: check_package_dependency(p, "react")),
454
+ ("package.json", lambda p: check_package_dependency(p, "vue")),
455
+ ("package.json", lambda p: check_package_dependency(p, "angular")),
456
+ ("angular.json", always_true),
457
+ ("next.config.js", always_true),
458
+ ("nuxt.config.js", always_true),
459
+ ],
460
+ "web-backend": [
461
+ ("requirements.txt", lambda p: check_requirement(p, "django")),
462
+ ("requirements.txt", lambda p: check_requirement(p, "flask")),
463
+ ("requirements.txt", lambda p: check_requirement(p, "fastapi")),
464
+ ("package.json", lambda p: check_package_dependency(p, "express")),
465
+ ("package.json", lambda p: check_package_dependency(p, "koa")),
466
+ ("package.json", lambda p: check_package_dependency(p, "nest")),
467
+ ("pom.xml", always_true),
468
+ ("build.gradle", always_true),
469
+ ],
470
+ "mobile": [
471
+ ("pubspec.yaml", always_true), # Flutter
472
+ ("AndroidManifest.xml", always_true),
473
+ ("Info.plist", always_true),
474
+ ("package.json", lambda p: check_package_dependency(p, "react-native")),
475
+ ],
476
+ "desktop": [
477
+ ("package.json", lambda p: check_package_dependency(p, "electron")),
478
+ ("CMakeLists.txt", always_true),
479
+ ("Makefile", always_true),
480
+ ],
481
+ "data-science": [
482
+ ("requirements.txt", lambda p: check_requirement(p, "pandas")),
483
+ ("requirements.txt", lambda p: check_requirement(p, "numpy")),
484
+ ("requirements.txt", lambda p: check_requirement(p, "jupyter")),
485
+ ("environment.yml", always_true),
486
+ ],
487
+ "devops": [
488
+ (".github/workflows", is_directory),
489
+ (".gitlab-ci.yml", always_true),
490
+ ("Dockerfile", always_true),
491
+ ("docker-compose.yml", always_true),
492
+ ("Jenkinsfile", always_true),
493
+ ("terraform.tf", always_true),
494
+ ],
495
+ "game": [
496
+ ("UnityProject.sln", always_true),
497
+ ("Assembly-CSharp.csproj", always_true),
498
+ ("ProjectSettings/ProjectSettings.asset", always_true),
499
+ ("Godot", always_true),
500
+ ("project.godot", always_true),
501
+ ],
502
+ }
503
+
504
+ # Check markers
505
+ for project_type, type_markers in markers.items():
506
+ for marker, condition in type_markers:
507
+ marker_path: Path = root_path / marker
508
+ if marker_path.exists() and condition(marker_path):
509
+ result["type"] = project_type
510
+ break
511
+
512
+ # Detect frameworks
513
+ self._detect_frameworks(result)
514
+
515
+ return result
516
+
517
+ def _detect_frameworks(self, result: dict[str, Any]) -> None:
518
+ """Detect frameworks used in the project.
519
+
520
+ Args:
521
+ result: Dictionary to update with framework information
522
+ """
523
+ if not self.project_root:
524
+ return
525
+
526
+ root_path: Path = Path(self.project_root)
527
+ frameworks: list[str] = []
528
+
529
+ # Package.json based detection
530
+ package_json: Path = root_path / "package.json"
531
+ if package_json.exists() and package_json.is_file():
532
+ try:
533
+ data: dict[str, Any] = self._read_json(package_json)
534
+ dependencies: dict[str, Any] = {
535
+ **data.get("dependencies", {}),
536
+ **data.get("devDependencies", {}),
537
+ }
538
+
539
+ framework_markers: dict[str, list[str]] = {
540
+ "React": ["react", "react-dom"],
541
+ "Vue.js": ["vue"],
542
+ "Angular": ["@angular/core"],
543
+ "Next.js": ["next"],
544
+ "Nuxt.js": ["nuxt"],
545
+ "Express": ["express"],
546
+ "NestJS": ["@nestjs/core"],
547
+ "React Native": ["react-native"],
548
+ "Electron": ["electron"],
549
+ "jQuery": ["jquery"],
550
+ "Bootstrap": ["bootstrap"],
551
+ "Tailwind CSS": ["tailwindcss"],
552
+ "Material UI": ["@mui/material", "@material-ui/core"],
553
+ "Redux": ["redux"],
554
+ "Gatsby": ["gatsby"],
555
+ "Svelte": ["svelte"],
556
+ "Jest": ["jest"],
557
+ "Mocha": ["mocha"],
558
+ "Cypress": ["cypress"],
559
+ }
560
+
561
+ for framework, markers in framework_markers.items():
562
+ if any(marker in dependencies for marker in markers):
563
+ frameworks.append(framework)
564
+
565
+ except Exception:
566
+ pass
567
+
568
+ # Python requirements.txt based detection
569
+ requirements_txt: Path = root_path / "requirements.txt"
570
+ if requirements_txt.exists() and requirements_txt.is_file():
571
+ try:
572
+ requirements: list[str] = self._read_lines(requirements_txt)
573
+
574
+ framework_markers: dict[str, list[str]] = {
575
+ "Django": ["django"],
576
+ "Flask": ["flask"],
577
+ "FastAPI": ["fastapi"],
578
+ "Pandas": ["pandas"],
579
+ "NumPy": ["numpy"],
580
+ "TensorFlow": ["tensorflow"],
581
+ "PyTorch": ["torch"],
582
+ "Scikit-learn": ["scikit-learn", "sklearn"],
583
+ "Jupyter": ["jupyter", "ipython"],
584
+ "Pytest": ["pytest"],
585
+ "SQLAlchemy": ["sqlalchemy"],
586
+ }
587
+
588
+ for framework, markers in framework_markers.items():
589
+ if any(
590
+ any(req.lower().startswith(marker) for marker in markers)
591
+ for req in requirements
592
+ ):
593
+ frameworks.append(framework)
594
+
595
+ except Exception:
596
+ pass
597
+
598
+ result["frameworks"] = frameworks
599
+ self.frameworks = {f: {"detected": True} for f in frameworks}
600
+
601
+ def _read_json(self, path: Path) -> dict[str, Any]:
602
+ """Read a JSON file.
603
+
604
+ Args:
605
+ path: Path to the JSON file
606
+
607
+ Returns:
608
+ Dictionary containing the JSON data, or empty dict on error
609
+ """
610
+ try:
611
+ with open(path, "r", encoding="utf-8") as f:
612
+ return json.load(f)
613
+ except Exception:
614
+ return {}
615
+
616
+ def _read_lines(self, path: Path) -> list[str]:
617
+ """Read lines from a text file.
618
+
619
+ Args:
620
+ path: Path to the text file
621
+
622
+ Returns:
623
+ List of lines, or empty list on error
624
+ """
625
+ try:
626
+ with open(path, "r", encoding="utf-8") as f:
627
+ return f.readlines()
628
+ except Exception:
629
+ return []
630
+
631
+ async def analyze_project(self) -> dict[str, Any]:
632
+ """Analyze the project structure and dependencies.
633
+
634
+ Returns:
635
+ Dictionary containing analysis results
636
+ """
637
+ if not self.project_root:
638
+ return {"error": "Project root not set"}
639
+
640
+ result: dict[str, Any] = {}
641
+
642
+ # Detect languages
643
+ result["languages"] = self.detect_programming_languages()
644
+
645
+ # Detect project type
646
+ result["project_type"] = self.detect_project_type()
647
+
648
+ # Analyze structure
649
+ structure: dict[
650
+ str, Any
651
+ ] = await self.project_analyzer.analyze_project_structure(self.project_root)
652
+ result["structure"] = structure
653
+
654
+ # Analyze dependencies based on project type
655
+ if "Python" in result["languages"]:
656
+ python_deps: dict[
657
+ str, Any
658
+ ] = await self.project_analyzer.analyze_python_dependencies(
659
+ self.project_root
660
+ )
661
+ result["python_dependencies"] = python_deps
662
+
663
+ if "JavaScript" in result["languages"] or "TypeScript" in result["languages"]:
664
+ js_deps: dict[
665
+ str, Any
666
+ ] = await self.project_analyzer.analyze_javascript_dependencies(
667
+ self.project_root
668
+ )
669
+ result["javascript_dependencies"] = js_deps
670
+
671
+ self.project_analysis = result
672
+ return result
673
+
674
+ def generate_project_summary(self) -> str:
675
+ """Generate a human-readable summary of the project.
676
+
677
+ Returns:
678
+ Formatted string with project summary
679
+ """
680
+ if not self.project_root or not self.project_analysis:
681
+ return "No project analysis available. Please set project root and run analysis first."
682
+
683
+ # Build summary
684
+ summary: list[str] = [f"# Project Summary: {Path(self.project_root).name}\n"]
685
+
686
+ # Project type
687
+ project_type: dict[str, Any] = self.project_analysis.get("project_type", {})
688
+ if project_type:
689
+ summary.append(
690
+ f"## Project Type: {project_type.get('type', 'Unknown').title()}"
691
+ )
692
+
693
+ frameworks: list[str] = project_type.get("frameworks", [])
694
+ if frameworks:
695
+ summary.append("### Frameworks/Libraries")
696
+ summary.append(", ".join(frameworks))
697
+
698
+ summary.append("")
699
+
700
+ # Languages
701
+ languages: dict[str, int] = self.project_analysis.get("languages", {})
702
+ if languages:
703
+ summary.append("## Programming Languages")
704
+ for lang, count in languages.items():
705
+ summary.append(f"- {lang}: {count} files")
706
+ summary.append("")
707
+
708
+ # Structure
709
+ structure: dict[str, Any] = self.project_analysis.get("structure", {})
710
+ if structure and not isinstance(structure, str):
711
+ summary.append("## Project Structure")
712
+ summary.append(f"- Files: {structure.get('file_count', 0)}")
713
+ summary.append(f"- Directories: {structure.get('directory_count', 0)}")
714
+ summary.append(
715
+ f"- Total size: {self._format_size(structure.get('total_size', 0))}"
716
+ )
717
+
718
+ if structure.get("total_lines"):
719
+ summary.append(
720
+ f"- Total lines of code: {structure.get('total_lines', 0)}"
721
+ )
722
+
723
+ # File extensions
724
+ extensions: dict[str, dict[str, int]] = structure.get("extensions", {})
725
+ if extensions:
726
+ summary.append("\n### File Types")
727
+ for ext, info in list(extensions.items())[:10]: # Show top 10
728
+ if ext:
729
+ summary.append(
730
+ f"- {ext}: {info.get('count', 0)} files ({self._format_size(info.get('size', 0))})"
731
+ )
732
+
733
+ summary.append("")
734
+
735
+ # Dependencies
736
+ py_deps: dict[str, Any] = self.project_analysis.get("python_dependencies", {})
737
+ if py_deps and not isinstance(py_deps, str) and not py_deps.get("error"):
738
+ summary.append("## Python Dependencies")
739
+
740
+ # Requirements files
741
+ req_files: list[str] = py_deps.get("requirements_files", [])
742
+ if req_files:
743
+ summary.append("### Dependency Files")
744
+ for req in req_files:
745
+ summary.append(f"- {req}")
746
+
747
+ # Imports
748
+ imports: list[str] = py_deps.get("imports", [])
749
+ if imports:
750
+ summary.append("\n### Top Imports")
751
+ for imp in sorted(imports)[:15]: # Show top 15
752
+ summary.append(f"- {imp}")
753
+
754
+ summary.append("")
755
+
756
+ js_deps: dict[str, Any] = self.project_analysis.get(
757
+ "javascript_dependencies", {}
758
+ )
759
+ if js_deps and not isinstance(js_deps, str) and not js_deps.get("error"):
760
+ summary.append("## JavaScript/TypeScript Dependencies")
761
+
762
+ # Package files
763
+ pkg_files: list[str] = js_deps.get("packageFiles", [])
764
+ if pkg_files:
765
+ summary.append("### Package Files")
766
+ for pkg in pkg_files:
767
+ summary.append(f"- {pkg}")
768
+
769
+ # Imports
770
+ imports: list[str] = js_deps.get("imports", [])
771
+ if imports:
772
+ summary.append("\n### Top Imports")
773
+ for imp in sorted(imports)[:15]: # Show top 15
774
+ summary.append(f"- {imp}")
775
+
776
+ summary.append("")
777
+
778
+ return "\n".join(summary)
779
+
780
+ def _format_size(self, size_bytes: float) -> str:
781
+ """Format file size in human-readable form.
782
+
783
+ Args:
784
+ size_bytes: Size in bytes
785
+
786
+ Returns:
787
+ Formatted size string
788
+ """
789
+ for unit in ["B", "KB", "MB", "GB"]:
790
+ if size_bytes < 1024.0:
791
+ return f"{size_bytes:.1f} {unit}"
792
+ size_bytes = size_bytes / 1024.0
793
+ return f"{size_bytes:.1f} TB"
794
+
795
+
796
+ @final
797
+ class ProjectAnalysis:
798
+ """Project analysis tools for Hanzo Dev MCP."""
799
+
800
+ def __init__(
801
+ self,
802
+ project_manager: ProjectManager,
803
+ project_analyzer: ProjectAnalyzer,
804
+ permission_manager: PermissionManager,
805
+ ) -> None:
806
+ """Initialize project analysis.
807
+
808
+ Args:
809
+ project_manager: Project manager for tracking projects
810
+ project_analyzer: Project analyzer for analyzing project structure and dependencies
811
+ permission_manager: Permission manager for access control
812
+ """
813
+ self.project_manager: ProjectManager = project_manager
814
+ self.project_analyzer: ProjectAnalyzer = project_analyzer
815
+ self.permission_manager: PermissionManager = permission_manager
816
+
817
+ def register_tools(self, mcp_server: FastMCP) -> None:
818
+ """Register project analysis tools with the MCP server.
819
+
820
+ Args:
821
+ mcp_server: The FastMCP server instance
822
+ """
823
+
824
+ # Project analysis tool
825
+ @mcp_server.tool()
826
+ async def project_analyze_tool(project_dir: str, ctx: MCPContext) -> str:
827
+ """Analyze a project directory structure and dependencies.
828
+
829
+ Args:
830
+ project_dir: Path to the project directory
831
+
832
+ Returns:
833
+ Analysis of the project
834
+ """
835
+ tool_ctx = create_tool_context(ctx)
836
+ tool_ctx.set_tool_info("project_analyze")
837
+
838
+ # Validate project_dir parameter
839
+ path_validation = validate_path_parameter(project_dir, "project_dir")
840
+ if path_validation.is_error:
841
+ await tool_ctx.error(path_validation.error_message)
842
+ return f"Error: {path_validation.error_message}"
843
+
844
+ await tool_ctx.info(f"Analyzing project: {project_dir}")
845
+
846
+ # Check if directory is allowed
847
+ if not self.permission_manager.is_path_allowed(project_dir):
848
+ await tool_ctx.error(f"Directory not allowed: {project_dir}")
849
+ return f"Error: Directory not allowed: {project_dir}"
850
+
851
+ # Set project root
852
+ if not self.project_manager.set_project_root(project_dir):
853
+ await tool_ctx.error(f"Failed to set project root: {project_dir}")
854
+ return f"Error: Failed to set project root: {project_dir}"
855
+
856
+ await tool_ctx.info("Analyzing project structure...")
857
+
858
+ # Report intermediate progress
859
+ await tool_ctx.report_progress(10, 100)
860
+
861
+ # Analyze project
862
+ analysis: dict[str, Any] = await self.project_manager.analyze_project()
863
+ if "error" in analysis:
864
+ await tool_ctx.error(f"Error analyzing project: {analysis['error']}")
865
+ return f"Error analyzing project: {analysis['error']}"
866
+
867
+ # Report more progress
868
+ await tool_ctx.report_progress(50, 100)
869
+
870
+ await tool_ctx.info("Generating project summary...")
871
+
872
+ # Generate summary
873
+ summary = self.project_manager.generate_project_summary()
874
+
875
+ # Complete progress
876
+ await tool_ctx.report_progress(100, 100)
877
+
878
+ await tool_ctx.info("Project analysis complete")
879
+ return summary