mesh-arch 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mesh_arch-1.0.0/PKG-INFO +34 -0
- mesh_arch-1.0.0/README.md +93 -0
- mesh_arch-1.0.0/mesh/__init__.py +0 -0
- mesh_arch-1.0.0/mesh/analysis/__init__.py +0 -0
- mesh_arch-1.0.0/mesh/analysis/builder.py +435 -0
- mesh_arch-1.0.0/mesh/cli.py +321 -0
- mesh_arch-1.0.0/mesh/core/__init__.py +0 -0
- mesh_arch-1.0.0/mesh/core/graph.py +300 -0
- mesh_arch-1.0.0/mesh/core/parser.py +776 -0
- mesh_arch-1.0.0/mesh/core/storage.py +455 -0
- mesh_arch-1.0.0/mesh/enforcement/__init__.py +0 -0
- mesh_arch-1.0.0/mesh/enforcement/checker.py +409 -0
- mesh_arch-1.0.0/mesh/enforcement/history.py +291 -0
- mesh_arch-1.0.0/mesh/enforcement/hook.py +204 -0
- mesh_arch-1.0.0/mesh/enforcement/ignorer.py +340 -0
- mesh_arch-1.0.0/mesh/enforcement/reporter.py +211 -0
- mesh_arch-1.0.0/mesh/mcp/__init__.py +0 -0
- mesh_arch-1.0.0/mesh/mcp/server.py +267 -0
- mesh_arch-1.0.0/mesh/mcp/summary.py +225 -0
- mesh_arch-1.0.0/mesh/mcp/tools.py +18 -0
- mesh_arch-1.0.0/mesh/ollama/__init__.py +0 -0
- mesh_arch-1.0.0/mesh/ollama/compatibility.py +250 -0
- mesh_arch-1.0.0/mesh/ollama/detector.py +251 -0
- mesh_arch-1.0.0/mesh/ollama/server.py +196 -0
- mesh_arch-1.0.0/mesh/ollama/wizard.py +333 -0
- mesh_arch-1.0.0/mesh/tests/__init__.py +0 -0
- mesh_arch-1.0.0/mesh_arch.egg-info/PKG-INFO +34 -0
- mesh_arch-1.0.0/mesh_arch.egg-info/SOURCES.txt +33 -0
- mesh_arch-1.0.0/mesh_arch.egg-info/dependency_links.txt +1 -0
- mesh_arch-1.0.0/mesh_arch.egg-info/entry_points.txt +2 -0
- mesh_arch-1.0.0/mesh_arch.egg-info/requires.txt +31 -0
- mesh_arch-1.0.0/mesh_arch.egg-info/top_level.txt +1 -0
- mesh_arch-1.0.0/pyproject.toml +73 -0
- mesh_arch-1.0.0/setup.cfg +4 -0
- mesh_arch-1.0.0/tests/test_mesh_v2.py +262 -0
mesh_arch-1.0.0/PKG-INFO
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: mesh-arch
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Architectural coherence layer for AI-generated codebases
|
|
5
|
+
Requires-Python: >=3.10
|
|
6
|
+
Requires-Dist: tree-sitter>=0.21.0
|
|
7
|
+
Requires-Dist: tree-sitter-python>=0.21.0
|
|
8
|
+
Requires-Dist: tree-sitter-typescript>=0.21.0
|
|
9
|
+
Requires-Dist: networkx>=3.3
|
|
10
|
+
Requires-Dist: click>=8.1.0
|
|
11
|
+
Requires-Dist: watchdog>=4.0.0
|
|
12
|
+
Requires-Dist: rich>=13.0.0
|
|
13
|
+
Requires-Dist: msgpack>=1.0.0
|
|
14
|
+
Requires-Dist: anyio>=4.0.0
|
|
15
|
+
Requires-Dist: psutil>=5.9.0
|
|
16
|
+
Requires-Dist: ollama>=0.3.0
|
|
17
|
+
Requires-Dist: transformers>=4.40.0
|
|
18
|
+
Requires-Dist: accelerate>=0.30.0
|
|
19
|
+
Requires-Dist: datasets>=2.20.0
|
|
20
|
+
Provides-Extra: train
|
|
21
|
+
Requires-Dist: torch>=2.3.0; extra == "train"
|
|
22
|
+
Requires-Dist: torch-geometric>=2.5.0; extra == "train"
|
|
23
|
+
Requires-Dist: datasets>=2.20.0; extra == "train"
|
|
24
|
+
Requires-Dist: tqdm>=4.66.0; extra == "train"
|
|
25
|
+
Requires-Dist: numpy>=1.26.0; extra == "train"
|
|
26
|
+
Requires-Dist: scikit-learn>=1.5.0; extra == "train"
|
|
27
|
+
Provides-Extra: dev
|
|
28
|
+
Requires-Dist: pytest>=8.0.0; extra == "dev"
|
|
29
|
+
Requires-Dist: pytest-cov>=5.0.0; extra == "dev"
|
|
30
|
+
Requires-Dist: pytest-mock>=3.14.0; extra == "dev"
|
|
31
|
+
Requires-Dist: pytest-asyncio>=0.23.0; extra == "dev"
|
|
32
|
+
Requires-Dist: black>=24.0.0; extra == "dev"
|
|
33
|
+
Requires-Dist: mypy>=1.10.0; extra == "dev"
|
|
34
|
+
Requires-Dist: ruff>=0.4.0; extra == "dev"
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
# Mesh
|
|
2
|
+
|
|
3
|
+
**Architectural coherence layer for AI-generated codebases**
|
|
4
|
+
|
|
5
|
+
Mesh ensures AI-generated code follows your codebase's architecture. It detects duplicate functions, circular dependencies, naming violations, and data flow issues before they enter your codebase.
|
|
6
|
+
|
|
7
|
+
## Quickstart
|
|
8
|
+
|
|
9
|
+
**Two steps. Everything else is automatic.**
|
|
10
|
+
|
|
11
|
+
### Step 1 — Install Mesh
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
pip install mesh-ai
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
### Step 2 — Set up in your project
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
cd your-project
|
|
21
|
+
mesh setup
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
The setup wizard will:
|
|
25
|
+
- Analyse your codebase
|
|
26
|
+
- Check for Ollama (install guide if needed)
|
|
27
|
+
- Show all your installed AI models with compatibility ratings
|
|
28
|
+
- Register Mesh with Cursor and Claude Code automatically
|
|
29
|
+
|
|
30
|
+
### That's it
|
|
31
|
+
|
|
32
|
+
Open Cursor or Claude Code. Every AI coding session now automatically receives your codebase's architectural context.
|
|
33
|
+
|
|
34
|
+
The AI will never create duplicate functions, never introduce circular dependencies, and always follow your naming conventions.
|
|
35
|
+
|
|
36
|
+
---
|
|
37
|
+
|
|
38
|
+
**Don't have a model yet?**
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
ollama pull qwen3.5:9b # recommended (6.6GB)
|
|
42
|
+
ollama pull qwen3.5:4b # smaller option (3.4GB)
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
Then run `mesh setup` again.
|
|
46
|
+
|
|
47
|
+
## Commands
|
|
48
|
+
|
|
49
|
+
| Command | Description |
|
|
50
|
+
|---------|-------------|
|
|
51
|
+
| `mesh init` | Analyse codebase and build graphs |
|
|
52
|
+
| `mesh setup` | Run interactive setup wizard |
|
|
53
|
+
| `mesh serve` | Start MCP server for AI coding tools |
|
|
54
|
+
| `mesh check` | Check for code violations |
|
|
55
|
+
| `mesh doctor` | Comprehensive health check |
|
|
56
|
+
| `mesh model list` | List installed Ollama models |
|
|
57
|
+
| `mesh model select <name>` | Select a specific model |
|
|
58
|
+
| `mesh model status` | Show configured model status |
|
|
59
|
+
| `mesh model test` | Test model with sample prompt |
|
|
60
|
+
| `mesh install-hook` | Install git pre-commit hook |
|
|
61
|
+
|
|
62
|
+
## Architecture
|
|
63
|
+
|
|
64
|
+
Mesh works in three layers:
|
|
65
|
+
|
|
66
|
+
1. **Analysis (Phase 1)** — Builds call graphs, data flow graphs, and type dependency graphs
|
|
67
|
+
2. **Enforcement (Phase 2)** — Git hooks and CI checks that block incoherent code
|
|
68
|
+
3. **Context Injection (Phase 3/4.5)** — MCP server that injects architectural context into AI sessions
|
|
69
|
+
|
|
70
|
+
## Requirements
|
|
71
|
+
|
|
72
|
+
- Python 3.10+
|
|
73
|
+
- Ollama (for AI model inference)
|
|
74
|
+
- Git (for pre-commit hooks)
|
|
75
|
+
|
|
76
|
+
## Development
|
|
77
|
+
|
|
78
|
+
```bash
|
|
79
|
+
# Install with dev dependencies
|
|
80
|
+
pip install -e ".[dev]"
|
|
81
|
+
|
|
82
|
+
# Run tests
|
|
83
|
+
pytest tests/ -v
|
|
84
|
+
|
|
85
|
+
# Run linting
|
|
86
|
+
black mesh/ tests/
|
|
87
|
+
mypy mesh/
|
|
88
|
+
ruff check mesh/
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
## License
|
|
92
|
+
|
|
93
|
+
MIT
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,435 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Analysis builder — wires parser, graph, and storage together.
|
|
3
|
+
|
|
4
|
+
Builds call graphs, data flow graphs, and type dependency graphs
|
|
5
|
+
using the universal parser and rustworkx-backed MeshGraph.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import logging
|
|
11
|
+
from dataclasses import dataclass
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any
|
|
14
|
+
|
|
15
|
+
from mesh.core.graph import MeshGraph
|
|
16
|
+
from mesh.core.parser import UniversalParser, ParsedFunction, ParsedClass
|
|
17
|
+
from mesh.core.storage import MeshStorage
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class AnalysisResult:
|
|
24
|
+
"""Result of running analysis."""
|
|
25
|
+
|
|
26
|
+
files_analyzed: int
|
|
27
|
+
functions_found: int
|
|
28
|
+
edges_created: int
|
|
29
|
+
duration_seconds: float
|
|
30
|
+
errors: list[str]
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class AnalysisBuilder:
|
|
34
|
+
"""
|
|
35
|
+
Builds all Mesh graphs from source code.
|
|
36
|
+
|
|
37
|
+
Wires:
|
|
38
|
+
- UniversalParser (25-language parsing)
|
|
39
|
+
- MeshGraph (rustworkx-backed)
|
|
40
|
+
- MeshStorage (SQLite persistence)
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
def __init__(self, codebase_root: Path):
|
|
44
|
+
"""
|
|
45
|
+
Initialize builder.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
codebase_root: Root directory of codebase
|
|
49
|
+
"""
|
|
50
|
+
self._root = codebase_root
|
|
51
|
+
self._parser = UniversalParser(codebase_root)
|
|
52
|
+
self._storage = MeshStorage(codebase_root)
|
|
53
|
+
|
|
54
|
+
def run_full_analysis(self) -> AnalysisResult:
|
|
55
|
+
"""
|
|
56
|
+
Run complete analysis: parse all files, build all graphs.
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
AnalysisResult with statistics
|
|
60
|
+
"""
|
|
61
|
+
import time
|
|
62
|
+
|
|
63
|
+
start = time.perf_counter()
|
|
64
|
+
|
|
65
|
+
# Clear existing data
|
|
66
|
+
self._storage.clear()
|
|
67
|
+
|
|
68
|
+
# Parse all files
|
|
69
|
+
functions = self._parser.parse_directory(self._root)
|
|
70
|
+
|
|
71
|
+
# Parse classes for type dependencies
|
|
72
|
+
classes = self._parser.parse_classes_directory(self._root)
|
|
73
|
+
|
|
74
|
+
# Build call graph
|
|
75
|
+
call_graph = self._build_call_graph(functions)
|
|
76
|
+
|
|
77
|
+
# Build type dependency graph
|
|
78
|
+
type_graph = self._build_type_graph(classes)
|
|
79
|
+
|
|
80
|
+
# Build data flow graph
|
|
81
|
+
data_flow_graph = self._build_data_flow_graph(functions)
|
|
82
|
+
|
|
83
|
+
# Store all graphs in database
|
|
84
|
+
self._store_graph(call_graph, "call")
|
|
85
|
+
self._store_graph(type_graph, "type")
|
|
86
|
+
self._store_graph(data_flow_graph, "dataflow")
|
|
87
|
+
|
|
88
|
+
duration = time.perf_counter() - start
|
|
89
|
+
|
|
90
|
+
return AnalysisResult(
|
|
91
|
+
files_analyzed=len(set(f.file_path for f in functions)),
|
|
92
|
+
functions_found=len(functions),
|
|
93
|
+
edges_created=call_graph.edge_count
|
|
94
|
+
+ type_graph.edge_count
|
|
95
|
+
+ data_flow_graph.edge_count,
|
|
96
|
+
duration_seconds=duration,
|
|
97
|
+
errors=[],
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
def _build_call_graph(self, functions: list[ParsedFunction]) -> MeshGraph:
|
|
101
|
+
"""
|
|
102
|
+
Build call graph from parsed functions.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
functions: List of parsed functions
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
MeshGraph with call relationships
|
|
109
|
+
"""
|
|
110
|
+
graph = MeshGraph("call")
|
|
111
|
+
|
|
112
|
+
# Add all function nodes
|
|
113
|
+
for func in functions:
|
|
114
|
+
graph.add_node(
|
|
115
|
+
func.id,
|
|
116
|
+
{
|
|
117
|
+
"name": func.name,
|
|
118
|
+
"file_path": func.file_path,
|
|
119
|
+
"line_start": func.line_start,
|
|
120
|
+
"line_end": func.line_end,
|
|
121
|
+
"signature": func.signature,
|
|
122
|
+
"docstring": func.docstring,
|
|
123
|
+
},
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
# Build name-to-id mapping for call resolution
|
|
127
|
+
name_to_id: dict[tuple[str, str], str] = {}
|
|
128
|
+
for func in functions:
|
|
129
|
+
key = (func.file_path, func.name)
|
|
130
|
+
name_to_id[key] = func.id
|
|
131
|
+
|
|
132
|
+
# Add call edges
|
|
133
|
+
for func in functions:
|
|
134
|
+
for call_name in func.calls:
|
|
135
|
+
# Try to find the called function
|
|
136
|
+
# First, try exact file match
|
|
137
|
+
target_id = name_to_id.get((func.file_path, call_name))
|
|
138
|
+
|
|
139
|
+
# If not found, try any file with that name
|
|
140
|
+
if target_id is None:
|
|
141
|
+
for key, fid in name_to_id.items():
|
|
142
|
+
if key[1] == call_name:
|
|
143
|
+
target_id = fid
|
|
144
|
+
break
|
|
145
|
+
|
|
146
|
+
if target_id and target_id != func.id:
|
|
147
|
+
graph.add_edge(
|
|
148
|
+
func.id,
|
|
149
|
+
target_id,
|
|
150
|
+
{"type": "calls", "call_site_line": func.line_end},
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
return graph
|
|
154
|
+
|
|
155
|
+
def _build_type_graph(self, classes: list[ParsedClass]) -> MeshGraph:
|
|
156
|
+
"""
|
|
157
|
+
Build type dependency graph from parsed classes.
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
classes: List of parsed classes
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
MeshGraph with type relationships
|
|
164
|
+
"""
|
|
165
|
+
graph = MeshGraph("type")
|
|
166
|
+
|
|
167
|
+
# Add all class nodes
|
|
168
|
+
for cls in classes:
|
|
169
|
+
graph.add_node(
|
|
170
|
+
cls.id,
|
|
171
|
+
{
|
|
172
|
+
"name": cls.name,
|
|
173
|
+
"file_path": cls.file_path,
|
|
174
|
+
"line_start": cls.line_start,
|
|
175
|
+
"line_end": cls.line_end,
|
|
176
|
+
"bases": cls.bases,
|
|
177
|
+
"methods": cls.methods,
|
|
178
|
+
"attributes": cls.attributes,
|
|
179
|
+
"kind": cls.kind,
|
|
180
|
+
},
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
# Build base class mapping
|
|
184
|
+
name_to_id = {}
|
|
185
|
+
for cls in classes:
|
|
186
|
+
key = (cls.file_path, cls.name)
|
|
187
|
+
name_to_id[key] = cls.id
|
|
188
|
+
name_to_id[cls.name] = cls.id
|
|
189
|
+
|
|
190
|
+
# Add inheritance edges
|
|
191
|
+
for cls in classes:
|
|
192
|
+
for base in cls.bases:
|
|
193
|
+
target_id = name_to_id.get(base)
|
|
194
|
+
if target_id and target_id != cls.id:
|
|
195
|
+
graph.add_edge(
|
|
196
|
+
cls.id,
|
|
197
|
+
target_id,
|
|
198
|
+
{"type": "extends"},
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
return graph
|
|
202
|
+
|
|
203
|
+
def _build_data_flow_graph(self, functions: list[ParsedFunction]) -> MeshGraph:
|
|
204
|
+
"""
|
|
205
|
+
Build data flow graph from parsed functions.
|
|
206
|
+
|
|
207
|
+
Args:
|
|
208
|
+
functions: List of parsed functions
|
|
209
|
+
|
|
210
|
+
Returns:
|
|
211
|
+
MeshGraph with data flow relationships
|
|
212
|
+
"""
|
|
213
|
+
graph = MeshGraph("dataflow")
|
|
214
|
+
|
|
215
|
+
# Add all function nodes with data flow info
|
|
216
|
+
for func in functions:
|
|
217
|
+
graph.add_node(
|
|
218
|
+
func.id,
|
|
219
|
+
{
|
|
220
|
+
"name": func.name,
|
|
221
|
+
"file_path": func.file_path,
|
|
222
|
+
"line_start": func.line_start,
|
|
223
|
+
"line_end": func.line_end,
|
|
224
|
+
"params": func.params,
|
|
225
|
+
"returns": func.returns,
|
|
226
|
+
"kind": "function",
|
|
227
|
+
},
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
# Build parameter-based edges (simplified data flow)
|
|
231
|
+
# If function A takes a param and returns it, that's a data flow
|
|
232
|
+
# For now, we connect functions that share similar parameters/returns
|
|
233
|
+
|
|
234
|
+
name_to_id = {}
|
|
235
|
+
for func in functions:
|
|
236
|
+
key = (func.file_path, func.name)
|
|
237
|
+
name_to_id[key] = func.id
|
|
238
|
+
|
|
239
|
+
return graph
|
|
240
|
+
|
|
241
|
+
def _store_graph(self, graph: MeshGraph, graph_type: str) -> None:
|
|
242
|
+
"""
|
|
243
|
+
Store graph in database.
|
|
244
|
+
|
|
245
|
+
Args:
|
|
246
|
+
graph: MeshGraph to store
|
|
247
|
+
graph_type: Type of graph (call, dataflow, typedep)
|
|
248
|
+
"""
|
|
249
|
+
# Store nodes with graph-type prefix to avoid ID collisions
|
|
250
|
+
type_prefix = f"{graph_type}:"
|
|
251
|
+
for node_data in graph.nodes():
|
|
252
|
+
node_id = node_data.get("id", "")
|
|
253
|
+
if not node_id:
|
|
254
|
+
continue
|
|
255
|
+
prefixed_id = f"{type_prefix}{node_id}"
|
|
256
|
+
self._storage.upsert_node(
|
|
257
|
+
node_id=prefixed_id,
|
|
258
|
+
node_type=graph_type,
|
|
259
|
+
file_path=node_data.get("file_path", ""),
|
|
260
|
+
data=node_data,
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
# Store edges with prefixed IDs
|
|
264
|
+
for edge_data in graph.get_all_edges():
|
|
265
|
+
from_id = edge_data.get("from_id", "")
|
|
266
|
+
to_id = edge_data.get("to_id", "")
|
|
267
|
+
if from_id and to_id:
|
|
268
|
+
self._storage.upsert_edge(
|
|
269
|
+
from_id=f"{type_prefix}{from_id}",
|
|
270
|
+
to_id=f"{type_prefix}{to_id}",
|
|
271
|
+
edge_type=edge_data.get("type", "calls"),
|
|
272
|
+
data=edge_data,
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
def load_call_graph(self) -> MeshGraph:
|
|
276
|
+
"""
|
|
277
|
+
Load call graph from storage.
|
|
278
|
+
|
|
279
|
+
Returns:
|
|
280
|
+
MeshGraph with call relationships
|
|
281
|
+
"""
|
|
282
|
+
graph = MeshGraph("call")
|
|
283
|
+
|
|
284
|
+
# Load nodes
|
|
285
|
+
nodes = self._storage.get_nodes("call")
|
|
286
|
+
for node in nodes:
|
|
287
|
+
graph.add_node(node["id"], node["data"])
|
|
288
|
+
|
|
289
|
+
# Load edges
|
|
290
|
+
edges = self._storage.get_edges("calls")
|
|
291
|
+
for edge in edges:
|
|
292
|
+
graph.add_edge(edge["from_id"], edge["to_id"], edge["data"])
|
|
293
|
+
|
|
294
|
+
return graph
|
|
295
|
+
|
|
296
|
+
@property
|
|
297
|
+
def storage(self) -> MeshStorage:
|
|
298
|
+
"""Get storage instance."""
|
|
299
|
+
return self._storage
|
|
300
|
+
|
|
301
|
+
def close(self) -> None:
|
|
302
|
+
"""Close resources."""
|
|
303
|
+
self._storage.close()
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
def detect_duplicates(graph: MeshGraph) -> list[dict]:
|
|
307
|
+
"""Detect duplicate function names across different files."""
|
|
308
|
+
from collections import defaultdict
|
|
309
|
+
|
|
310
|
+
name_to_locations: dict[str, list[str]] = defaultdict(list)
|
|
311
|
+
for node_data in graph.nodes():
|
|
312
|
+
name = node_data.get("name", "")
|
|
313
|
+
file_path = node_data.get("file_path", "")
|
|
314
|
+
if name and file_path:
|
|
315
|
+
name_to_locations[name].append(file_path)
|
|
316
|
+
|
|
317
|
+
violations = []
|
|
318
|
+
for name, locations in name_to_locations.items():
|
|
319
|
+
unique_files = list(set(locations))
|
|
320
|
+
if len(unique_files) > 1:
|
|
321
|
+
violations.append(
|
|
322
|
+
{
|
|
323
|
+
"kind": "duplicate",
|
|
324
|
+
"severity": "error",
|
|
325
|
+
"message": f"{name}() exists in {len(unique_files)} files",
|
|
326
|
+
"file_path": unique_files[0],
|
|
327
|
+
"line": 0,
|
|
328
|
+
"related_files": unique_files[1:],
|
|
329
|
+
"fix_hint": f"Consolidate {name}() into one location",
|
|
330
|
+
}
|
|
331
|
+
)
|
|
332
|
+
return violations
|
|
333
|
+
|
|
334
|
+
|
|
335
|
+
def detect_circular_calls(graph: MeshGraph) -> list[dict]:
|
|
336
|
+
"""Detect circular dependencies using rustworkx."""
|
|
337
|
+
import rustworkx as rx
|
|
338
|
+
|
|
339
|
+
violations = []
|
|
340
|
+
try:
|
|
341
|
+
cycles = list(rx.simple_cycles(graph._graph))
|
|
342
|
+
for cycle in cycles:
|
|
343
|
+
if len(cycle) < 2:
|
|
344
|
+
continue
|
|
345
|
+
cycle_ids = []
|
|
346
|
+
for idx in cycle:
|
|
347
|
+
node_data = graph._graph[idx]
|
|
348
|
+
module = (
|
|
349
|
+
node_data.get("file_path", "").split("/")[0]
|
|
350
|
+
if node_data.get("file_path")
|
|
351
|
+
else "unknown"
|
|
352
|
+
)
|
|
353
|
+
if module not in cycle_ids:
|
|
354
|
+
cycle_ids.append(module)
|
|
355
|
+
|
|
356
|
+
if len(set(cycle_ids)) > 1:
|
|
357
|
+
violations.append(
|
|
358
|
+
{
|
|
359
|
+
"kind": "circular",
|
|
360
|
+
"severity": "error",
|
|
361
|
+
"message": f"Circular dependency: {' -> '.join(cycle_ids)} -> {cycle_ids[0]}",
|
|
362
|
+
"file_path": cycle_ids[0],
|
|
363
|
+
"line": 0,
|
|
364
|
+
"related_files": cycle_ids[1:],
|
|
365
|
+
"fix_hint": "Extract shared code to a common module",
|
|
366
|
+
}
|
|
367
|
+
)
|
|
368
|
+
except Exception:
|
|
369
|
+
pass
|
|
370
|
+
|
|
371
|
+
return violations
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
def detect_circular_dependencies(graph: MeshGraph) -> list[dict]:
|
|
375
|
+
"""Alias for detect_circular_calls."""
|
|
376
|
+
return detect_circular_calls(graph)
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
def detect_naming_violations(graph: MeshGraph) -> list[dict]:
|
|
380
|
+
"""Auto-detect naming convention and flag violations."""
|
|
381
|
+
from collections import Counter
|
|
382
|
+
|
|
383
|
+
def classify_name(name: str) -> str:
|
|
384
|
+
if "_" in name and name == name.lower():
|
|
385
|
+
return "snake_case"
|
|
386
|
+
elif name and name[0].islower() and any(c.isupper() for c in name[1:]):
|
|
387
|
+
return "camelCase"
|
|
388
|
+
elif name and name[0].isupper():
|
|
389
|
+
return "PascalCase"
|
|
390
|
+
return "other"
|
|
391
|
+
|
|
392
|
+
convention_counts: Counter = Counter()
|
|
393
|
+
node_conventions: list[tuple[dict, str]] = []
|
|
394
|
+
|
|
395
|
+
for node_data in graph.nodes():
|
|
396
|
+
name = node_data.get("name", "")
|
|
397
|
+
if not name or name.startswith("_") or name in ("__init__", "__main__"):
|
|
398
|
+
continue
|
|
399
|
+
conv = classify_name(name)
|
|
400
|
+
convention_counts[conv] += 1
|
|
401
|
+
node_conventions.append((node_data, conv))
|
|
402
|
+
|
|
403
|
+
if not convention_counts:
|
|
404
|
+
return []
|
|
405
|
+
|
|
406
|
+
majority = convention_counts.most_common(1)[0][0]
|
|
407
|
+
total = sum(convention_counts.values())
|
|
408
|
+
majority_pct = convention_counts[majority] / total if total > 0 else 0
|
|
409
|
+
|
|
410
|
+
if majority_pct < 0.70:
|
|
411
|
+
return []
|
|
412
|
+
|
|
413
|
+
violations = []
|
|
414
|
+
for node_data, conv in node_conventions:
|
|
415
|
+
if conv != majority and conv != "other":
|
|
416
|
+
name = node_data.get("name", "")
|
|
417
|
+
file_path = node_data.get("file_path", "")
|
|
418
|
+
violations.append(
|
|
419
|
+
{
|
|
420
|
+
"kind": "naming",
|
|
421
|
+
"severity": "warning",
|
|
422
|
+
"message": f"{name}() uses {conv} but codebase is {int(majority_pct*100)}% {majority}",
|
|
423
|
+
"file_path": file_path,
|
|
424
|
+
"line": node_data.get("line_start", 0),
|
|
425
|
+
"related_files": [],
|
|
426
|
+
"fix_hint": f"Rename to {majority} convention",
|
|
427
|
+
}
|
|
428
|
+
)
|
|
429
|
+
|
|
430
|
+
return violations
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
def find_naming_violations(graph: MeshGraph, type_graph: Any = None) -> list[dict]:
|
|
434
|
+
"""Alias for detect_naming_violations (for compatibility)."""
|
|
435
|
+
return detect_naming_violations(graph)
|