soft-archmap 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- soft_archmap/__init__.py +0 -0
- soft_archmap/adapters/__init__.py +0 -0
- soft_archmap/adapters/python_adapter.py +198 -0
- soft_archmap/analysis/__init__.py +0 -0
- soft_archmap/analysis/cycles.py +13 -0
- soft_archmap/analysis/health.py +15 -0
- soft_archmap/analysis/impact.py +22 -0
- soft_archmap/analysis/metrics.py +24 -0
- soft_archmap/analysis/risk.py +33 -0
- soft_archmap/analysis/top_risk.py +45 -0
- soft_archmap/analysis/visualize.py +23 -0
- soft_archmap/cli.py +225 -0
- soft_archmap/core/__init__.py +0 -0
- soft_archmap/core/graph.py +125 -0
- soft_archmap/core/model.py +105 -0
- soft_archmap/export/__init__.py +0 -0
- soft_archmap/export/graphviz.py +92 -0
- soft_archmap/export/json_export.py +31 -0
- soft_archmap-0.1.0.dist-info/METADATA +91 -0
- soft_archmap-0.1.0.dist-info/RECORD +24 -0
- soft_archmap-0.1.0.dist-info/WHEEL +5 -0
- soft_archmap-0.1.0.dist-info/entry_points.txt +2 -0
- soft_archmap-0.1.0.dist-info/licenses/LICENSE +8 -0
- soft_archmap-0.1.0.dist-info/top_level.txt +1 -0
soft_archmap/__init__.py
ADDED
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
import ast
|
|
2
|
+
import os
|
|
3
|
+
from soft_archmap.core.model import Entity, Relation
|
|
4
|
+
|
|
5
|
+
class PythonParser:
|
|
6
|
+
def __init__(self, repo_path: str, model):
|
|
7
|
+
self.repo_path = repo_path
|
|
8
|
+
self.model = model
|
|
9
|
+
|
|
10
|
+
self.current_module = None
|
|
11
|
+
self.current_class = None
|
|
12
|
+
self.current_function = None
|
|
13
|
+
|
|
14
|
+
# NEW: track import aliases
|
|
15
|
+
self.import_aliases = {}
|
|
16
|
+
|
|
17
|
+
# ----------------------------
|
|
18
|
+
# PUBLIC
|
|
19
|
+
# ----------------------------
|
|
20
|
+
def parse_file(self, file_path: str):
|
|
21
|
+
if any(part in {"venv", ".venv", "env", ".env"} for part in file_path.split(os.sep)):
|
|
22
|
+
return
|
|
23
|
+
|
|
24
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
25
|
+
print(f"Parsing {file_path}...")
|
|
26
|
+
source = f.read()
|
|
27
|
+
|
|
28
|
+
try:
|
|
29
|
+
tree = ast.parse(source, filename=file_path)
|
|
30
|
+
except SyntaxError as e:
|
|
31
|
+
print(f"Skipping {file_path} due to syntax error: {e}")
|
|
32
|
+
return
|
|
33
|
+
|
|
34
|
+
module_name = os.path.relpath(file_path, self.repo_path) \
|
|
35
|
+
.replace(os.sep, ".") \
|
|
36
|
+
.replace(".py", "")
|
|
37
|
+
|
|
38
|
+
self.current_module = module_name
|
|
39
|
+
module_id = f"module:{module_name}"
|
|
40
|
+
|
|
41
|
+
# Ensure module entity exists
|
|
42
|
+
self._ensure_entity(module_id, "module", module_name, file_path)
|
|
43
|
+
|
|
44
|
+
for node in tree.body:
|
|
45
|
+
|
|
46
|
+
# -------- Imports --------
|
|
47
|
+
if isinstance(node, ast.Import):
|
|
48
|
+
for alias in node.names:
|
|
49
|
+
real_name = alias.name
|
|
50
|
+
alias_name = alias.asname or alias.name
|
|
51
|
+
|
|
52
|
+
self.import_aliases[alias_name] = real_name
|
|
53
|
+
self._add_import(module_id, real_name)
|
|
54
|
+
|
|
55
|
+
elif isinstance(node, ast.ImportFrom):
|
|
56
|
+
if node.module:
|
|
57
|
+
for alias in node.names:
|
|
58
|
+
alias_name = alias.asname or alias.name
|
|
59
|
+
full_name = f"{node.module}.{alias.name}"
|
|
60
|
+
self.import_aliases[alias_name] = full_name
|
|
61
|
+
|
|
62
|
+
self._add_import(module_id, node.module)
|
|
63
|
+
|
|
64
|
+
# -------- Classes --------
|
|
65
|
+
elif isinstance(node, ast.ClassDef):
|
|
66
|
+
self._handle_class(node, module_id, module_name, file_path)
|
|
67
|
+
|
|
68
|
+
# -------- Top-level Functions --------
|
|
69
|
+
elif isinstance(node, ast.FunctionDef):
|
|
70
|
+
self._handle_function(
|
|
71
|
+
node, module_id, module_name, file_path, parent_class=None
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
# ----------------------------
|
|
75
|
+
# ENTITY HELPERS
|
|
76
|
+
# ----------------------------
|
|
77
|
+
def _ensure_entity(self, entity_id, entity_type, name, file):
|
|
78
|
+
if entity_id not in self.model.entities:
|
|
79
|
+
self.model.add_entity(Entity(
|
|
80
|
+
id=entity_id,
|
|
81
|
+
type=entity_type,
|
|
82
|
+
name=name,
|
|
83
|
+
file=file
|
|
84
|
+
))
|
|
85
|
+
|
|
86
|
+
def _ensure_external_entity(self, entity_id):
|
|
87
|
+
if entity_id not in self.model.entities:
|
|
88
|
+
self.model.add_entity(Entity(
|
|
89
|
+
id=entity_id,
|
|
90
|
+
type="external",
|
|
91
|
+
name=entity_id.split(":", 1)[-1],
|
|
92
|
+
file="external"
|
|
93
|
+
))
|
|
94
|
+
|
|
95
|
+
def _add_relation(self, src, dst, rel_type):
|
|
96
|
+
self._ensure_external_entity(dst)
|
|
97
|
+
|
|
98
|
+
self.model.add_relation(Relation(
|
|
99
|
+
src=src,
|
|
100
|
+
dst=dst,
|
|
101
|
+
type=rel_type
|
|
102
|
+
))
|
|
103
|
+
|
|
104
|
+
# ----------------------------
|
|
105
|
+
# IMPORTS
|
|
106
|
+
# ----------------------------
|
|
107
|
+
def _add_import(self, module_id: str, imported_module: str):
|
|
108
|
+
target_id = f"module:{imported_module}"
|
|
109
|
+
self._add_relation(module_id, target_id, "imports")
|
|
110
|
+
|
|
111
|
+
# ----------------------------
|
|
112
|
+
# CLASS HANDLER
|
|
113
|
+
# ----------------------------
|
|
114
|
+
def _handle_class(self, node, module_id, module_name, file_path):
|
|
115
|
+
class_name = node.name
|
|
116
|
+
qualname = f"{module_name}.{class_name}"
|
|
117
|
+
class_id = f"class:{qualname}"
|
|
118
|
+
|
|
119
|
+
self._ensure_entity(class_id, "class", qualname, file_path)
|
|
120
|
+
|
|
121
|
+
self._add_relation(module_id, class_id, "contains")
|
|
122
|
+
|
|
123
|
+
# -------- Inheritance --------
|
|
124
|
+
for base in node.bases:
|
|
125
|
+
if isinstance(base, ast.Name):
|
|
126
|
+
base_id = f"class:{base.id}"
|
|
127
|
+
self._add_relation(class_id, base_id, "inherits")
|
|
128
|
+
|
|
129
|
+
prev_class = self.current_class
|
|
130
|
+
self.current_class = class_name
|
|
131
|
+
|
|
132
|
+
for item in node.body:
|
|
133
|
+
if isinstance(item, ast.FunctionDef):
|
|
134
|
+
self._handle_function(
|
|
135
|
+
item, module_id, module_name, file_path, parent_class=class_name
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
self.current_class = prev_class
|
|
139
|
+
|
|
140
|
+
# ----------------------------
|
|
141
|
+
# FUNCTION / METHOD HANDLER
|
|
142
|
+
# ----------------------------
|
|
143
|
+
def _handle_function(self, node, module_id, module_name, file_path, parent_class):
|
|
144
|
+
func_name = node.name
|
|
145
|
+
|
|
146
|
+
if parent_class:
|
|
147
|
+
qualname = f"{module_name}.{parent_class}.{func_name}"
|
|
148
|
+
func_id = f"function:{qualname}"
|
|
149
|
+
parent_id = f"class:{module_name}.{parent_class}"
|
|
150
|
+
entity_type = "method"
|
|
151
|
+
else:
|
|
152
|
+
qualname = f"{module_name}.{func_name}"
|
|
153
|
+
func_id = f"function:{qualname}"
|
|
154
|
+
parent_id = module_id
|
|
155
|
+
entity_type = "function"
|
|
156
|
+
|
|
157
|
+
self._ensure_entity(func_id, entity_type, qualname, file_path)
|
|
158
|
+
|
|
159
|
+
self._add_relation(parent_id, func_id, "contains")
|
|
160
|
+
|
|
161
|
+
# -------- Calls --------
|
|
162
|
+
prev_function = self.current_function
|
|
163
|
+
self.current_function = func_id
|
|
164
|
+
|
|
165
|
+
for child in ast.walk(node):
|
|
166
|
+
if isinstance(child, ast.Call):
|
|
167
|
+
called_name = self._resolve_call(child.func)
|
|
168
|
+
if called_name:
|
|
169
|
+
target_id = f"function:{called_name}"
|
|
170
|
+
self._add_relation(func_id, target_id, "calls")
|
|
171
|
+
|
|
172
|
+
self.current_function = prev_function
|
|
173
|
+
|
|
174
|
+
# ----------------------------
|
|
175
|
+
# CALL RESOLUTION (IMPROVED)
|
|
176
|
+
# ----------------------------
|
|
177
|
+
def _resolve_call(self, node):
|
|
178
|
+
# foo()
|
|
179
|
+
if isinstance(node, ast.Name):
|
|
180
|
+
return self._resolve_alias(node.id)
|
|
181
|
+
|
|
182
|
+
# obj.method()
|
|
183
|
+
elif isinstance(node, ast.Attribute):
|
|
184
|
+
parts = []
|
|
185
|
+
while isinstance(node, ast.Attribute):
|
|
186
|
+
parts.append(node.attr)
|
|
187
|
+
node = node.value
|
|
188
|
+
|
|
189
|
+
if isinstance(node, ast.Name):
|
|
190
|
+
base = self._resolve_alias(node.id)
|
|
191
|
+
parts.append(base)
|
|
192
|
+
|
|
193
|
+
return ".".join(reversed(parts))
|
|
194
|
+
|
|
195
|
+
return None
|
|
196
|
+
|
|
197
|
+
def _resolve_alias(self, name):
|
|
198
|
+
return self.import_aliases.get(name, name)
|
|
File without changes
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# analysis/cycles.py
|
|
2
|
+
from soft_archmap.core.graph import DependencyGraph
|
|
3
|
+
|
|
4
|
+
def detect_cycles(model):
|
|
5
|
+
"""
|
|
6
|
+
Detect cycles in architecture.
|
|
7
|
+
Returns a list of cycles (list of entity IDs).
|
|
8
|
+
"""
|
|
9
|
+
graph = DependencyGraph()
|
|
10
|
+
for r in model.relations:
|
|
11
|
+
graph.add_edge(r.src, r.dst)
|
|
12
|
+
|
|
13
|
+
return graph.find_cycles()
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# analysis/health.py
|
|
2
|
+
from soft_archmap.core.graph import DependencyGraph
|
|
3
|
+
|
|
4
|
+
def compute_health(model):
|
|
5
|
+
"""
|
|
6
|
+
Simple health metric: ratio of used entities to total entities.
|
|
7
|
+
"""
|
|
8
|
+
graph = DependencyGraph()
|
|
9
|
+
for r in model.relations:
|
|
10
|
+
graph.add_edge(r.src, r.dst)
|
|
11
|
+
|
|
12
|
+
total = len(model.entities)
|
|
13
|
+
unused = len([e for e in model.entities.values() if not graph.dependents(e.id)])
|
|
14
|
+
score = (total - unused) / total if total else 1.0
|
|
15
|
+
return round(score, 2)
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from collections import defaultdict, deque
|
|
2
|
+
from soft_archmap.core.graph import DependencyGraph
|
|
3
|
+
|
|
4
|
+
class ImpactAnalyzer:
|
|
5
|
+
def __init__(self, model):
|
|
6
|
+
# Build a dependency graph from the model
|
|
7
|
+
self.graph = DependencyGraph()
|
|
8
|
+
self.graph.build_from_model(model)
|
|
9
|
+
|
|
10
|
+
def analyze(self, target):
|
|
11
|
+
visited = set()
|
|
12
|
+
result = []
|
|
13
|
+
|
|
14
|
+
def dfs(node):
|
|
15
|
+
for dep in self.graph.dependents(node):
|
|
16
|
+
if dep not in visited:
|
|
17
|
+
visited.add(dep)
|
|
18
|
+
result.append(dep)
|
|
19
|
+
dfs(dep)
|
|
20
|
+
|
|
21
|
+
dfs(target)
|
|
22
|
+
return result
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# analysis/metrics.py
|
|
2
|
+
from soft_archmap.core.graph import DependencyGraph
|
|
3
|
+
|
|
4
|
+
def compute_metrics(model):
|
|
5
|
+
"""
|
|
6
|
+
Compute simple architecture metrics.
|
|
7
|
+
"""
|
|
8
|
+
graph = DependencyGraph()
|
|
9
|
+
for r in model.relations:
|
|
10
|
+
graph.add_edge(r.src, r.dst)
|
|
11
|
+
|
|
12
|
+
num_modules = sum(1 for e in model.entities.values() if e.type == "module")
|
|
13
|
+
num_classes = sum(1 for e in model.entities.values() if e.type == "class")
|
|
14
|
+
num_functions = sum(1 for e in model.entities.values() if e.type == "function")
|
|
15
|
+
num_relations = len(model.relations)
|
|
16
|
+
num_cycles = len(graph.find_cycles())
|
|
17
|
+
|
|
18
|
+
return {
|
|
19
|
+
"modules": num_modules,
|
|
20
|
+
"classes": num_classes,
|
|
21
|
+
"functions": num_functions,
|
|
22
|
+
"relations": num_relations,
|
|
23
|
+
"cycles": num_cycles
|
|
24
|
+
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
class RiskEngine:
|
|
2
|
+
def __init__(self, graph):
|
|
3
|
+
self.graph = graph
|
|
4
|
+
|
|
5
|
+
# -------------------------
|
|
6
|
+
# CORE RISK SCORE
|
|
7
|
+
# -------------------------
|
|
8
|
+
def compute_risk(self, node_id):
|
|
9
|
+
direct = len(self.graph.dependents(node_id))
|
|
10
|
+
|
|
11
|
+
# indirect risk (2-hop approximation)
|
|
12
|
+
indirect = 0
|
|
13
|
+
for child in self.graph.dependents(node_id):
|
|
14
|
+
indirect += len(self.graph.dependents(child))
|
|
15
|
+
|
|
16
|
+
# cycle penalty
|
|
17
|
+
cycles = self._in_cycle(node_id)
|
|
18
|
+
cycle_penalty = 2 if cycles else 0
|
|
19
|
+
|
|
20
|
+
risk_score = (
|
|
21
|
+
direct * 0.6 +
|
|
22
|
+
indirect * 0.2 +
|
|
23
|
+
cycle_penalty * 0.2
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
return round(risk_score, 3)
|
|
27
|
+
|
|
28
|
+
# -------------------------
|
|
29
|
+
# CHECK IF NODE IN CYCLE
|
|
30
|
+
# -------------------------
|
|
31
|
+
def _in_cycle(self, node_id):
|
|
32
|
+
cycles = self.graph.find_cycles()
|
|
33
|
+
return any(node_id in cycle for cycle in cycles)
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
from soft_archmap.analysis.risk import RiskEngine
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
# class TopRiskAnalyzer:
|
|
5
|
+
# def __init__(self, graph):
|
|
6
|
+
# self.graph = graph
|
|
7
|
+
# self.risk_engine = RiskEngine(graph)
|
|
8
|
+
|
|
9
|
+
# def get_top_risk_modules(self, top_k=10):
|
|
10
|
+
# scores = []
|
|
11
|
+
|
|
12
|
+
# for node in self.graph.nodes():
|
|
13
|
+
# score = self.risk_engine.compute_risk(node)
|
|
14
|
+
# scores.append((node, score))
|
|
15
|
+
|
|
16
|
+
# # sort by risk descending
|
|
17
|
+
# scores.sort(key=lambda x: x[1], reverse=True)
|
|
18
|
+
|
|
19
|
+
# return {
|
|
20
|
+
# "top_risk": scores[:top_k]
|
|
21
|
+
# }
|
|
22
|
+
|
|
23
|
+
class TopRiskAnalyzer:
|
|
24
|
+
def __init__(self, model):
|
|
25
|
+
self.model = model
|
|
26
|
+
self.graph = model.graph
|
|
27
|
+
self.engine = RiskEngine(self.graph)
|
|
28
|
+
|
|
29
|
+
def get_top_risk_modules(self):
|
|
30
|
+
results = []
|
|
31
|
+
|
|
32
|
+
for node_id, entity in self.model.entities.items():
|
|
33
|
+
|
|
34
|
+
# 🚫 SKIP EXTERNAL CODE HERE
|
|
35
|
+
if entity.type == "external":
|
|
36
|
+
continue
|
|
37
|
+
|
|
38
|
+
score = self.engine.compute_risk(node_id)
|
|
39
|
+
results.append((node_id, score))
|
|
40
|
+
|
|
41
|
+
results.sort(key=lambda x: x[1], reverse=True)
|
|
42
|
+
|
|
43
|
+
return {
|
|
44
|
+
"top_risk": results[:10]
|
|
45
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# analysis/visualize.py
|
|
2
|
+
import os
|
|
3
|
+
import subprocess
|
|
4
|
+
import shutil
|
|
5
|
+
|
|
6
|
+
def visualize_architecture(dot_file, output_file="architecture.png"):
|
|
7
|
+
"""
|
|
8
|
+
Generate PNG from Graphviz DOT file.
|
|
9
|
+
Requires 'dot' (Graphviz) installed.
|
|
10
|
+
"""
|
|
11
|
+
# png_path = os.path.join(output_dir, "architecture.png")
|
|
12
|
+
if not shutil.which("dot"):
|
|
13
|
+
print("Graphviz 'dot' not found. Install Graphviz to generate visualization.")
|
|
14
|
+
return
|
|
15
|
+
|
|
16
|
+
try:
|
|
17
|
+
subprocess.run(
|
|
18
|
+
["dot", "-Tpng", dot_file, "-o", output_file],
|
|
19
|
+
check=True
|
|
20
|
+
)
|
|
21
|
+
print(f"✅ Architecture visualization saved to {output_file}")
|
|
22
|
+
except subprocess.CalledProcessError as e:
|
|
23
|
+
print(f"Error generating visualization: {e}")
|
soft_archmap/cli.py
ADDED
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
from soft_archmap.core.model import ArchitectureModel
|
|
5
|
+
from soft_archmap.adapters.python_adapter import PythonParser
|
|
6
|
+
from soft_archmap.export.graphviz import export_graphviz
|
|
7
|
+
from soft_archmap.export.json_export import export_json
|
|
8
|
+
from soft_archmap.analysis.cycles import detect_cycles
|
|
9
|
+
from soft_archmap.analysis.health import compute_health
|
|
10
|
+
from soft_archmap.analysis.metrics import compute_metrics
|
|
11
|
+
from soft_archmap.analysis.impact import ImpactAnalyzer
|
|
12
|
+
from soft_archmap.analysis.risk import RiskEngine
|
|
13
|
+
from soft_archmap.analysis.top_risk import TopRiskAnalyzer
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class CLI:
|
|
17
|
+
def __init__(self):
|
|
18
|
+
self.args = sys.argv
|
|
19
|
+
self.command = self.args[1] if len(self.args) > 1 else None
|
|
20
|
+
self.input_repo = self.args[2] if len(self.args) > 2 else None
|
|
21
|
+
self.target = self.args[3] if len(self.args) > 3 else None
|
|
22
|
+
|
|
23
|
+
self.model = None
|
|
24
|
+
self.graph = None
|
|
25
|
+
self.parsed = False # avoid multiple parses
|
|
26
|
+
|
|
27
|
+
# -------------------------
|
|
28
|
+
# ENTRY POINT
|
|
29
|
+
# -------------------------
|
|
30
|
+
def run(self):
|
|
31
|
+
if not self.command:
|
|
32
|
+
self.help()
|
|
33
|
+
return
|
|
34
|
+
|
|
35
|
+
command_map = {
|
|
36
|
+
"analyze": self.analyze,
|
|
37
|
+
"impact": self.impact,
|
|
38
|
+
"report": self.report,
|
|
39
|
+
"top-risk": self.top_risk
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
handler = command_map.get(self.command)
|
|
43
|
+
|
|
44
|
+
if not handler:
|
|
45
|
+
print(f"Unknown command: {self.command}")
|
|
46
|
+
self.help()
|
|
47
|
+
return
|
|
48
|
+
|
|
49
|
+
# Parse repo first if command needs it
|
|
50
|
+
if self.command in ["analyze", "impact", "top-risk"]:
|
|
51
|
+
if not self.input_repo:
|
|
52
|
+
print("Error: repo path is required for this command")
|
|
53
|
+
return
|
|
54
|
+
self.parse_repo()
|
|
55
|
+
|
|
56
|
+
# Execute command
|
|
57
|
+
handler()
|
|
58
|
+
|
|
59
|
+
# -------------------------
|
|
60
|
+
# PARSING REPO ONCE
|
|
61
|
+
# -------------------------
|
|
62
|
+
def parse_repo(self):
|
|
63
|
+
if self.parsed:
|
|
64
|
+
return
|
|
65
|
+
|
|
66
|
+
if not self.input_repo or not os.path.isdir(self.input_repo):
|
|
67
|
+
print("Invalid repository path")
|
|
68
|
+
sys.exit(1)
|
|
69
|
+
|
|
70
|
+
print(f"Parsing repository: {self.input_repo} ...")
|
|
71
|
+
self.model = ArchitectureModel()
|
|
72
|
+
parser = PythonParser(self.input_repo, self.model)
|
|
73
|
+
|
|
74
|
+
for root, dirs, files in os.walk(self.input_repo):
|
|
75
|
+
dirs[:] = [d for d in dirs if d not in {
|
|
76
|
+
"venv", ".venv", "env", ".env",
|
|
77
|
+
"__pycache__", ".git",
|
|
78
|
+
"build", "dist",
|
|
79
|
+
".mypy_cache", ".pytest_cache"}]
|
|
80
|
+
for file in files:
|
|
81
|
+
if file.endswith(".py"):
|
|
82
|
+
file_path = os.path.join(root, file)
|
|
83
|
+
print(f"Parsing {file_path}")
|
|
84
|
+
parser.parse_file(file_path)
|
|
85
|
+
|
|
86
|
+
# Ensure we have a dependency graph
|
|
87
|
+
from soft_archmap.core.graph import DependencyGraph
|
|
88
|
+
if not hasattr(self.model, "graph") or self.model.graph is None:
|
|
89
|
+
self.model.graph = DependencyGraph()
|
|
90
|
+
self.model.graph.build_from_model(self.model)
|
|
91
|
+
|
|
92
|
+
self.graph = self.model.graph
|
|
93
|
+
self.parsed = True
|
|
94
|
+
print("Repository parsed successfully.\n")
|
|
95
|
+
|
|
96
|
+
# -------------------------
|
|
97
|
+
# COMMANDS
|
|
98
|
+
# -------------------------
|
|
99
|
+
def analyze(self):
|
|
100
|
+
print(f"Analyzing repo: {self.input_repo}")
|
|
101
|
+
|
|
102
|
+
output_dir = self.get_output_dir()
|
|
103
|
+
|
|
104
|
+
json_path = os.path.join(output_dir, "architecture.json")
|
|
105
|
+
dot_path = os.path.join(output_dir, "architecture.dot")
|
|
106
|
+
|
|
107
|
+
export_json(self.model, json_path)
|
|
108
|
+
export_graphviz(self.model, dot_path)
|
|
109
|
+
|
|
110
|
+
print("\n--- ANALYSIS COMPLETE ---")
|
|
111
|
+
print("Cycles:", detect_cycles(self.model))
|
|
112
|
+
print("Health:", compute_health(self.model))
|
|
113
|
+
print("Metrics:", compute_metrics(self.model))
|
|
114
|
+
print("Analysis complete.")
|
|
115
|
+
|
|
116
|
+
def impact(self):
|
|
117
|
+
if not self.target:
|
|
118
|
+
print("Error: target is required for impact analysis")
|
|
119
|
+
return
|
|
120
|
+
|
|
121
|
+
targets = self.resolve_target(self.target)
|
|
122
|
+
if not targets:
|
|
123
|
+
print(f"No entities matched target '{self.target}'")
|
|
124
|
+
return
|
|
125
|
+
|
|
126
|
+
analyzer = ImpactAnalyzer(self.model)
|
|
127
|
+
result = []
|
|
128
|
+
for t in targets:
|
|
129
|
+
result.extend(analyzer.analyze(t))
|
|
130
|
+
|
|
131
|
+
# Remove duplicates
|
|
132
|
+
result = list(set(result))
|
|
133
|
+
result.sort(key=lambda x: ("module" in x, "class" in x, "function" in x))
|
|
134
|
+
|
|
135
|
+
print("\n--- IMPACT RESULT ---")
|
|
136
|
+
print({
|
|
137
|
+
"target": self.target,
|
|
138
|
+
"impact_count": len(result),
|
|
139
|
+
"impacted_nodes": result
|
|
140
|
+
})
|
|
141
|
+
|
|
142
|
+
def top_risk(self):
|
|
143
|
+
analyzer = TopRiskAnalyzer(self.model)
|
|
144
|
+
# analyzer = TopRiskAnalyzer(self.graph)
|
|
145
|
+
result = analyzer.get_top_risk_modules()
|
|
146
|
+
|
|
147
|
+
print("\n--- TOP RISK MODULES ---")
|
|
148
|
+
for node, score in result["top_risk"]:
|
|
149
|
+
print(f"{node} -> {score}")
|
|
150
|
+
|
|
151
|
+
# -------------------------
|
|
152
|
+
# HELPERS
|
|
153
|
+
# -------------------------
|
|
154
|
+
def resolve_target(self, target: str):
|
|
155
|
+
if not target:
|
|
156
|
+
return []
|
|
157
|
+
|
|
158
|
+
matches = []
|
|
159
|
+
for eid, entity in self.model.entities.items():
|
|
160
|
+
file_match = entity.file and target in entity.file
|
|
161
|
+
name_match = target == entity.name
|
|
162
|
+
id_match = target == eid
|
|
163
|
+
if file_match or name_match or id_match:
|
|
164
|
+
matches.append(eid)
|
|
165
|
+
|
|
166
|
+
return list(set(matches))
|
|
167
|
+
|
|
168
|
+
def report(self):
|
|
169
|
+
print("[REPORT GENERATION] pending....")
|
|
170
|
+
|
|
171
|
+
def get_output_dir(self):
|
|
172
|
+
"""
|
|
173
|
+
Ask user for output directory with validation.
|
|
174
|
+
Falls back to current working directory.
|
|
175
|
+
"""
|
|
176
|
+
|
|
177
|
+
while True:
|
|
178
|
+
path = input("Enter output directory path (press Enter for default): ").strip()
|
|
179
|
+
|
|
180
|
+
# fallback case
|
|
181
|
+
if not path:
|
|
182
|
+
print(f"Using default path: {os.getcwd()}")
|
|
183
|
+
return os.getcwd()
|
|
184
|
+
|
|
185
|
+
# valid path
|
|
186
|
+
if os.path.isdir(path):
|
|
187
|
+
return os.path.abspath(path)
|
|
188
|
+
|
|
189
|
+
# try creating it
|
|
190
|
+
try:
|
|
191
|
+
os.makedirs(path, exist_ok=True)
|
|
192
|
+
print(f"Created and using: {path}")
|
|
193
|
+
return os.path.abspath(path)
|
|
194
|
+
except Exception:
|
|
195
|
+
print("Invalid path. Try again.")
|
|
196
|
+
|
|
197
|
+
def help(self):
|
|
198
|
+
print("""
|
|
199
|
+
Usage:
|
|
200
|
+
soft-archmap analyze <repo_path>
|
|
201
|
+
soft-archmap impact <repo_path> <node>
|
|
202
|
+
soft-archmap report <repo_path> -----> not yet available
|
|
203
|
+
soft-archmap top-risk <repo_path>
|
|
204
|
+
""")
|
|
205
|
+
|
|
206
|
+
def main():
|
|
207
|
+
# Display welcome message and contact info
|
|
208
|
+
border = '\n' + '=' * 100 + '\n'
|
|
209
|
+
lines = [
|
|
210
|
+
border,
|
|
211
|
+
"You are using ArchMap-Python by Excited Nuclei Tech Labs",
|
|
212
|
+
"For feedback, please email: excitednuclei.techlabs@gmail.com",
|
|
213
|
+
border
|
|
214
|
+
]
|
|
215
|
+
|
|
216
|
+
# Print each line centered in a terminal width of 100
|
|
217
|
+
for line in lines:
|
|
218
|
+
print(line.center(100))
|
|
219
|
+
|
|
220
|
+
# Run the command-line interface
|
|
221
|
+
CLI().run()
|
|
222
|
+
|
|
223
|
+
if __name__ == "__main__":
|
|
224
|
+
# CLI().run()
|
|
225
|
+
main()
|
|
File without changes
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class DependencyGraph:
|
|
5
|
+
def __init__(self):
|
|
6
|
+
# node -> {neighbor: set(relation_types)}
|
|
7
|
+
self.adj = defaultdict(lambda: defaultdict(set))
|
|
8
|
+
|
|
9
|
+
# reverse graph
|
|
10
|
+
self.rev_adj = defaultdict(lambda: defaultdict(set))
|
|
11
|
+
|
|
12
|
+
# ----------------------------
|
|
13
|
+
# BUILD GRAPH
|
|
14
|
+
# ----------------------------
|
|
15
|
+
def add_edge(self, src: str, dst: str, rel_type: str = "depends"):
|
|
16
|
+
self.adj[src][dst].add(rel_type)
|
|
17
|
+
self.rev_adj[dst][src].add(rel_type)
|
|
18
|
+
|
|
19
|
+
def build_from_model(self, model, allowed_types=None):
|
|
20
|
+
"""
|
|
21
|
+
Build graph from ArchitectureModel
|
|
22
|
+
"""
|
|
23
|
+
for r in model.relations:
|
|
24
|
+
if allowed_types and r.type not in allowed_types:
|
|
25
|
+
continue
|
|
26
|
+
|
|
27
|
+
self.add_edge(r.src, r.dst, r.type)
|
|
28
|
+
|
|
29
|
+
# ----------------------------
|
|
30
|
+
# BASIC QUERIES
|
|
31
|
+
# ----------------------------
|
|
32
|
+
def nodes(self):
|
|
33
|
+
return set(self.adj.keys()) | set(self.rev_adj.keys())
|
|
34
|
+
|
|
35
|
+
def neighbors(self, node, rel_type=None):
|
|
36
|
+
if node not in self.adj:
|
|
37
|
+
return set()
|
|
38
|
+
|
|
39
|
+
if rel_type:
|
|
40
|
+
return {
|
|
41
|
+
n for n, types in self.adj[node].items()
|
|
42
|
+
if rel_type in types
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return set(self.adj[node].keys())
|
|
46
|
+
|
|
47
|
+
def dependents(self, node):
|
|
48
|
+
return set(self.rev_adj.get(node, {}).keys())
|
|
49
|
+
|
|
50
|
+
# ----------------------------
|
|
51
|
+
# CYCLE DETECTION (IMPROVED)
|
|
52
|
+
# ----------------------------
|
|
53
|
+
def find_cycles(self):
|
|
54
|
+
visited = set()
|
|
55
|
+
stack = []
|
|
56
|
+
stack_set = set()
|
|
57
|
+
cycles = set()
|
|
58
|
+
|
|
59
|
+
def normalize_cycle(cycle):
|
|
60
|
+
"""
|
|
61
|
+
Normalize cycle to avoid duplicates:
|
|
62
|
+
rotate so smallest node is first
|
|
63
|
+
"""
|
|
64
|
+
min_node = min(cycle)
|
|
65
|
+
min_index = cycle.index(min_node)
|
|
66
|
+
rotated = cycle[min_index:] + cycle[:min_index]
|
|
67
|
+
return tuple(rotated)
|
|
68
|
+
|
|
69
|
+
def dfs(node):
|
|
70
|
+
visited.add(node)
|
|
71
|
+
stack.append(node)
|
|
72
|
+
stack_set.add(node)
|
|
73
|
+
|
|
74
|
+
for neighbor in self.adj.get(node, {}):
|
|
75
|
+
if neighbor not in visited:
|
|
76
|
+
dfs(neighbor)
|
|
77
|
+
elif neighbor in stack_set:
|
|
78
|
+
idx = stack.index(neighbor)
|
|
79
|
+
cycle = stack[idx:]
|
|
80
|
+
cycles.add(normalize_cycle(cycle))
|
|
81
|
+
|
|
82
|
+
stack.pop()
|
|
83
|
+
stack_set.remove(node)
|
|
84
|
+
|
|
85
|
+
for node in self.nodes():
|
|
86
|
+
if node not in visited:
|
|
87
|
+
dfs(node)
|
|
88
|
+
|
|
89
|
+
return [list(c) for c in cycles]
|
|
90
|
+
|
|
91
|
+
# ----------------------------
|
|
92
|
+
# ADVANCED ANALYSIS
|
|
93
|
+
# ----------------------------
|
|
94
|
+
def find_dependency_chain(self, start, end, max_depth=10):
|
|
95
|
+
"""
|
|
96
|
+
Find path from start → end
|
|
97
|
+
"""
|
|
98
|
+
paths = []
|
|
99
|
+
|
|
100
|
+
def dfs(node, path):
|
|
101
|
+
if len(path) > max_depth:
|
|
102
|
+
return
|
|
103
|
+
|
|
104
|
+
if node == end:
|
|
105
|
+
paths.append(path[:])
|
|
106
|
+
return
|
|
107
|
+
|
|
108
|
+
for neighbor in self.adj.get(node, {}):
|
|
109
|
+
if neighbor not in path:
|
|
110
|
+
dfs(neighbor, path + [neighbor])
|
|
111
|
+
|
|
112
|
+
dfs(start, [start])
|
|
113
|
+
return paths
|
|
114
|
+
|
|
115
|
+
def subgraph(self, nodes):
|
|
116
|
+
"""
|
|
117
|
+
Extract subgraph
|
|
118
|
+
"""
|
|
119
|
+
g = DependencyGraph()
|
|
120
|
+
for src in nodes:
|
|
121
|
+
for dst, types in self.adj.get(src, {}).items():
|
|
122
|
+
if dst in nodes:
|
|
123
|
+
for t in types:
|
|
124
|
+
g.add_edge(src, dst, t)
|
|
125
|
+
return g
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import Dict, List, Set, Tuple, Optional
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
# ----------------------------
|
|
7
|
+
# ENTITY
|
|
8
|
+
# ----------------------------
|
|
9
|
+
@dataclass
|
|
10
|
+
class Entity:
|
|
11
|
+
id: str
|
|
12
|
+
type: str # "module" | "class" | "function" | "method" | "external"
|
|
13
|
+
name: str
|
|
14
|
+
file: str
|
|
15
|
+
line: Optional[int] = None # NEW (useful later)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
# ----------------------------
|
|
19
|
+
# RELATION
|
|
20
|
+
# ----------------------------
|
|
21
|
+
@dataclass
|
|
22
|
+
class Relation:
|
|
23
|
+
src: str
|
|
24
|
+
dst: str
|
|
25
|
+
type: str # "imports" | "contains" | "inherits" | "calls"
|
|
26
|
+
weight: int = 1 # NEW: useful for call frequency
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# ----------------------------
|
|
30
|
+
# ARCHITECTURE MODEL
|
|
31
|
+
# ----------------------------
|
|
32
|
+
@dataclass
|
|
33
|
+
class ArchitectureModel:
|
|
34
|
+
entities: Dict[str, Entity] = field(default_factory=dict)
|
|
35
|
+
relations: List[Relation] = field(default_factory=list)
|
|
36
|
+
|
|
37
|
+
# NEW: for deduplication + fast lookup
|
|
38
|
+
_relation_set: Set[Tuple[str, str, str]] = field(default_factory=set, init=False)
|
|
39
|
+
_forward_index: Dict[str, Set[str]] = field(default_factory=lambda: defaultdict(set), init=False)
|
|
40
|
+
_reverse_index: Dict[str, Set[str]] = field(default_factory=lambda: defaultdict(set), init=False)
|
|
41
|
+
|
|
42
|
+
# ----------------------------
|
|
43
|
+
# ENTITY METHODS
|
|
44
|
+
# ----------------------------
|
|
45
|
+
def add_entity(self, entity: Entity):
|
|
46
|
+
"""
|
|
47
|
+
Add or update an entity.
|
|
48
|
+
"""
|
|
49
|
+
self.entities[entity.id] = entity
|
|
50
|
+
|
|
51
|
+
def get_entity(self, entity_id: str) -> Optional[Entity]:
|
|
52
|
+
return self.entities.get(entity_id)
|
|
53
|
+
|
|
54
|
+
def ensure_entity(self, entity_id: str, entity_type="external", name=None, file="external"):
|
|
55
|
+
"""
|
|
56
|
+
Create entity if it doesn't exist.
|
|
57
|
+
"""
|
|
58
|
+
if entity_id not in self.entities:
|
|
59
|
+
self.entities[entity_id] = Entity(
|
|
60
|
+
id=entity_id,
|
|
61
|
+
type=entity_type,
|
|
62
|
+
name=name or entity_id.split(":", 1)[-1],
|
|
63
|
+
file=file
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
# ----------------------------
|
|
67
|
+
# RELATION METHODS
|
|
68
|
+
# ----------------------------
|
|
69
|
+
def add_relation(self, relation: Relation):
|
|
70
|
+
key = (relation.src, relation.dst, relation.type)
|
|
71
|
+
|
|
72
|
+
if key not in self._relation_set:
|
|
73
|
+
self.relations.append(relation)
|
|
74
|
+
self._relation_set.add(key)
|
|
75
|
+
|
|
76
|
+
# build graph indexes
|
|
77
|
+
self._forward_index[relation.src].add(relation.dst)
|
|
78
|
+
self._reverse_index[relation.dst].add(relation.src)
|
|
79
|
+
|
|
80
|
+
else:
|
|
81
|
+
for r in self.relations:
|
|
82
|
+
if (r.src, r.dst, r.type) == key:
|
|
83
|
+
r.weight += 1
|
|
84
|
+
break
|
|
85
|
+
|
|
86
|
+
def dependencies(self, node_id: str) -> Set[str]:
|
|
87
|
+
return self._forward_index.get(node_id, set())
|
|
88
|
+
|
|
89
|
+
def dependents(self, node_id: str) -> Set[str]:
|
|
90
|
+
return self._reverse_index.get(node_id, set())
|
|
91
|
+
|
|
92
|
+
def get_relations(self, src=None, dst=None, type=None) -> List[Relation]:
|
|
93
|
+
"""
|
|
94
|
+
Flexible query API (VERY useful later).
|
|
95
|
+
"""
|
|
96
|
+
results = self.relations
|
|
97
|
+
|
|
98
|
+
if src:
|
|
99
|
+
results = [r for r in results if r.src == src]
|
|
100
|
+
if dst:
|
|
101
|
+
results = [r for r in results if r.dst == dst]
|
|
102
|
+
if type:
|
|
103
|
+
results = [r for r in results if r.type == type]
|
|
104
|
+
|
|
105
|
+
return results
|
|
File without changes
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from collections import defaultdict
|
|
3
|
+
from soft_archmap.core.model import ArchitectureModel
|
|
4
|
+
|
|
5
|
+
def sanitize_id(value: str) -> str:
|
|
6
|
+
return re.sub(r"[^a-zA-Z0-9_]", "_", value)
|
|
7
|
+
|
|
8
|
+
def escape_label(value: str) -> str:
|
|
9
|
+
return value.replace('"', r'\"').replace("\n", r"\n")
|
|
10
|
+
|
|
11
|
+
def node_style(entity_type: str):
|
|
12
|
+
styles = {
|
|
13
|
+
"module": ("folder", "#ECEFF1"),
|
|
14
|
+
"class": ("box", "#E3F2FD"),
|
|
15
|
+
"function": ("oval", "#E8F5E9"),
|
|
16
|
+
"method": ("oval", "#FFF3E0"),
|
|
17
|
+
}
|
|
18
|
+
return styles.get(entity_type, ("box", "#FFFFFF"))
|
|
19
|
+
|
|
20
|
+
def edge_style(relation_type: str):
|
|
21
|
+
styles = {
|
|
22
|
+
"imports": 'style=dashed color="#546E7A"',
|
|
23
|
+
"calls": 'color="#2E7D32"',
|
|
24
|
+
"inherits": 'arrowhead=empty color="#1565C0"',
|
|
25
|
+
"contains": 'arrowhead=none color="#616161"',
|
|
26
|
+
}
|
|
27
|
+
return styles.get(relation_type, "")
|
|
28
|
+
|
|
29
|
+
def export_graphviz(model, path="architecture.dot", group_by_file=True):
|
|
30
|
+
"""
|
|
31
|
+
Export ArchitectureModel to Graphviz DOT format
|
|
32
|
+
"""
|
|
33
|
+
lines = []
|
|
34
|
+
lines.append("digraph Architecture {")
|
|
35
|
+
lines.append(" rankdir=LR;")
|
|
36
|
+
lines.append(" fontname=\"Helvetica\";")
|
|
37
|
+
lines.append(" node [fontname=\"Helvetica\", style=filled];")
|
|
38
|
+
lines.append(" edge [fontname=\"Helvetica\"];")
|
|
39
|
+
lines.append("")
|
|
40
|
+
|
|
41
|
+
# Group by file
|
|
42
|
+
entities_by_file = defaultdict(list)
|
|
43
|
+
for e in sorted(model.entities.values(), key=lambda x: (x.file, x.type, x.name)):
|
|
44
|
+
entities_by_file[e.file].append(e)
|
|
45
|
+
|
|
46
|
+
for file_name, entities in sorted(entities_by_file.items()):
|
|
47
|
+
cluster_name = sanitize_id(f"cluster_{file_name}")
|
|
48
|
+
if group_by_file:
|
|
49
|
+
lines.append(f' subgraph {cluster_name} {{')
|
|
50
|
+
lines.append(f' label="module: {escape_label(file_name)}";')
|
|
51
|
+
lines.append(" style=rounded;")
|
|
52
|
+
|
|
53
|
+
for e in entities:
|
|
54
|
+
safe_id = sanitize_id(e.id)
|
|
55
|
+
label = f"{e.type}\\n{escape_label(e.name)}"
|
|
56
|
+
shape, color = node_style(e.type)
|
|
57
|
+
lines.append(
|
|
58
|
+
f' "{safe_id}" [label="{label}", shape={shape}, fillcolor="{color}"];'
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
if group_by_file:
|
|
62
|
+
lines.append(" }")
|
|
63
|
+
lines.append("")
|
|
64
|
+
|
|
65
|
+
# Add edges
|
|
66
|
+
for r in sorted(model.relations, key=lambda x: (x.type, x.src, x.dst)):
|
|
67
|
+
src_id = sanitize_id(r.src)
|
|
68
|
+
dst_id = sanitize_id(r.dst)
|
|
69
|
+
style = edge_style(r.type)
|
|
70
|
+
lines.append(
|
|
71
|
+
f' "{src_id}" -> "{dst_id}" [label="{r.type}" {style}];'
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
# Legend
|
|
75
|
+
lines.append("""
|
|
76
|
+
subgraph cluster_legend {
|
|
77
|
+
label="Legend";
|
|
78
|
+
fontsize=12;
|
|
79
|
+
style=dashed;
|
|
80
|
+
|
|
81
|
+
key_module [label="module", shape=folder, fillcolor="#ECEFF1"];
|
|
82
|
+
key_class [label="class", shape=box, fillcolor="#E3F2FD"];
|
|
83
|
+
key_function [label="function", shape=oval, fillcolor="#E8F5E9"];
|
|
84
|
+
key_method [label="method", shape=oval, fillcolor="#FFF3E0"];
|
|
85
|
+
}
|
|
86
|
+
""")
|
|
87
|
+
lines.append("}")
|
|
88
|
+
|
|
89
|
+
with open(path, "w", encoding="utf-8") as f:
|
|
90
|
+
f.write("\n".join(lines))
|
|
91
|
+
|
|
92
|
+
print(f"✅ Graphviz DOT exported to {path}")
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from soft_archmap.core.model import ArchitectureModel
|
|
3
|
+
|
|
4
|
+
def export_json(model, path="architecture.json"):
|
|
5
|
+
"""
|
|
6
|
+
Export ArchitectureModel to JSON
|
|
7
|
+
"""
|
|
8
|
+
data = {
|
|
9
|
+
"entities": [],
|
|
10
|
+
"relations": []
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
for e in model.entities.values():
|
|
14
|
+
data["entities"].append({
|
|
15
|
+
"id": e.id,
|
|
16
|
+
"type": e.type,
|
|
17
|
+
"name": e.name,
|
|
18
|
+
"file": e.file
|
|
19
|
+
})
|
|
20
|
+
|
|
21
|
+
for r in model.relations:
|
|
22
|
+
data["relations"].append({
|
|
23
|
+
"src": r.src,
|
|
24
|
+
"dst": r.dst,
|
|
25
|
+
"type": r.type
|
|
26
|
+
})
|
|
27
|
+
|
|
28
|
+
with open(path, "w", encoding="utf-8") as f:
|
|
29
|
+
json.dump(data, f, indent=2)
|
|
30
|
+
|
|
31
|
+
print(f"✅ JSON exported to {path}")
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: soft-archmap
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Python tool to analyze architecture and dependencies in Python projects.
|
|
5
|
+
Author-email: Excited Nuclei Tech Labs <excitednuclei.techlabs@gmail.com>
|
|
6
|
+
Project-URL: Homepage, https://github.com/EN-Tech-Labs/ArchMap
|
|
7
|
+
Project-URL: Repository, https://github.com/EN-Tech-Labs/ArchMap
|
|
8
|
+
Project-URL: Documentation, https://github.com/EN-Tech-Labs/ArchMap
|
|
9
|
+
Keywords: architecture,dependency,python,analysis
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Classifier: License :: Other/Proprietary License
|
|
14
|
+
Requires-Python: >=3.10
|
|
15
|
+
Description-Content-Type: text/markdown
|
|
16
|
+
License-File: LICENSE
|
|
17
|
+
Requires-Dist: graphviz>=0.21
|
|
18
|
+
Requires-Dist: networkx>=3.6.1
|
|
19
|
+
Requires-Dist: pyvis>=0.3.2
|
|
20
|
+
Requires-Dist: rich>=15.0.0
|
|
21
|
+
Requires-Dist: requests>=2.33.1
|
|
22
|
+
Provides-Extra: dev
|
|
23
|
+
Requires-Dist: asttokens==3.0.1; extra == "dev"
|
|
24
|
+
Requires-Dist: build==1.5.0; extra == "dev"
|
|
25
|
+
Requires-Dist: certifi==2026.4.22; extra == "dev"
|
|
26
|
+
Requires-Dist: charset-normalizer==3.4.7; extra == "dev"
|
|
27
|
+
Requires-Dist: colorama==0.4.6; extra == "dev"
|
|
28
|
+
Requires-Dist: comm==0.2.3; extra == "dev"
|
|
29
|
+
Requires-Dist: debugpy==1.8.20; extra == "dev"
|
|
30
|
+
Requires-Dist: decorator==5.2.1; extra == "dev"
|
|
31
|
+
Requires-Dist: docutils==0.22.4; extra == "dev"
|
|
32
|
+
Requires-Dist: executing==2.2.1; extra == "dev"
|
|
33
|
+
Requires-Dist: id==1.6.1; extra == "dev"
|
|
34
|
+
Requires-Dist: idna==3.13; extra == "dev"
|
|
35
|
+
Requires-Dist: ipykernel==7.2.0; extra == "dev"
|
|
36
|
+
Requires-Dist: ipython==9.13.0; extra == "dev"
|
|
37
|
+
Requires-Dist: ipython_pygments_lexers==1.1.1; extra == "dev"
|
|
38
|
+
Requires-Dist: jaraco.classes==3.4.0; extra == "dev"
|
|
39
|
+
Requires-Dist: jaraco.context==6.1.2; extra == "dev"
|
|
40
|
+
Requires-Dist: jaraco.functools==4.4.0; extra == "dev"
|
|
41
|
+
Requires-Dist: jedi==0.19.2; extra == "dev"
|
|
42
|
+
Requires-Dist: Jinja2==3.1.6; extra == "dev"
|
|
43
|
+
Requires-Dist: jsonpickle==4.1.1; extra == "dev"
|
|
44
|
+
Requires-Dist: jupyter_client==8.8.0; extra == "dev"
|
|
45
|
+
Requires-Dist: jupyter_core==5.9.1; extra == "dev"
|
|
46
|
+
Requires-Dist: keyring==25.7.0; extra == "dev"
|
|
47
|
+
Requires-Dist: markdown-it-py==4.0.0; extra == "dev"
|
|
48
|
+
Requires-Dist: MarkupSafe==3.0.3; extra == "dev"
|
|
49
|
+
Requires-Dist: matplotlib-inline==0.2.1; extra == "dev"
|
|
50
|
+
Requires-Dist: mdurl==0.1.2; extra == "dev"
|
|
51
|
+
Requires-Dist: more-itertools==11.0.2; extra == "dev"
|
|
52
|
+
Requires-Dist: nest-asyncio==1.6.0; extra == "dev"
|
|
53
|
+
Requires-Dist: nh3==0.3.5; extra == "dev"
|
|
54
|
+
Requires-Dist: packaging==26.1; extra == "dev"
|
|
55
|
+
Requires-Dist: parso==0.8.6; extra == "dev"
|
|
56
|
+
Requires-Dist: platformdirs==4.9.6; extra == "dev"
|
|
57
|
+
Requires-Dist: prompt_toolkit==3.0.52; extra == "dev"
|
|
58
|
+
Requires-Dist: psutil==7.2.2; extra == "dev"
|
|
59
|
+
Requires-Dist: pure_eval==0.2.3; extra == "dev"
|
|
60
|
+
Requires-Dist: Pygments==2.20.0; extra == "dev"
|
|
61
|
+
Requires-Dist: pyproject_hooks==1.2.0; extra == "dev"
|
|
62
|
+
Requires-Dist: python-dateutil==2.9.0.post0; extra == "dev"
|
|
63
|
+
Requires-Dist: pywin32-ctypes==0.2.3; extra == "dev"
|
|
64
|
+
Requires-Dist: pyzmq==27.1.0; extra == "dev"
|
|
65
|
+
Requires-Dist: readme_renderer==44.0; extra == "dev"
|
|
66
|
+
Requires-Dist: requests-toolbelt==1.0.0; extra == "dev"
|
|
67
|
+
Requires-Dist: rfc3986==2.0.0; extra == "dev"
|
|
68
|
+
Requires-Dist: six==1.17.0; extra == "dev"
|
|
69
|
+
Requires-Dist: stack-data==0.6.3; extra == "dev"
|
|
70
|
+
Requires-Dist: tornado==6.5.5; extra == "dev"
|
|
71
|
+
Requires-Dist: traitlets==5.14.3; extra == "dev"
|
|
72
|
+
Requires-Dist: twine==6.2.0; extra == "dev"
|
|
73
|
+
Requires-Dist: urllib3==2.6.3; extra == "dev"
|
|
74
|
+
Requires-Dist: wcwidth==0.6.0; extra == "dev"
|
|
75
|
+
Dynamic: license-file
|
|
76
|
+
|
|
77
|
+
# ArchMap-Python
|
|
78
|
+
|
|
79
|
+
ArchMap-Python is a CLI tool by **Excited Nuclei Tech Labs** for analyzing Python software architecture. It helps developers understand dependencies, detect cycles, measure module health, compute risk, and visualize the architecture.
|
|
80
|
+
|
|
81
|
+
## Features
|
|
82
|
+
|
|
83
|
+
- Dependency graph generation
|
|
84
|
+
- Cycle detection
|
|
85
|
+
- Health metrics
|
|
86
|
+
- Risk scoring of modules/functions
|
|
87
|
+
- Impact analysis
|
|
88
|
+
- Graphviz visualization and JSON export
|
|
89
|
+
|
|
90
|
+
## Installation
|
|
91
|
+
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
soft_archmap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
soft_archmap/cli.py,sha256=9mhwXfKcZzupEf0YSg8il30RKqRw0YlOpqLjdLANkWw,7288
|
|
3
|
+
soft_archmap/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
soft_archmap/adapters/python_adapter.py,sha256=N7IKYi4DBPVCT9t7vmB6_Wxc74YIelGjcf2ARWSvGdg,6780
|
|
5
|
+
soft_archmap/analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
+
soft_archmap/analysis/cycles.py,sha256=1Usg5shzRJdlwV9BHPIQpjlYppdGmWKQdNc_HrDKsjE,342
|
|
7
|
+
soft_archmap/analysis/health.py,sha256=G8FWtqY4z0OfPrp5SAB_STyTQKCI3M2pgu0oMbQScJc,495
|
|
8
|
+
soft_archmap/analysis/impact.py,sha256=xTesRJ3D-HZVC35lQhXpc76LUkNAtmAwxS_gMcxa70A,640
|
|
9
|
+
soft_archmap/analysis/metrics.py,sha256=fx31_xuR_cXpt3tES9BOEMqFlQ65uhG3NE7AIip1eZE,791
|
|
10
|
+
soft_archmap/analysis/risk.py,sha256=n-MtdCJmoqs7bQYsdG_vpgPnauApDj9wmsXI4vZNy0Q,958
|
|
11
|
+
soft_archmap/analysis/top_risk.py,sha256=fGBAYdEorCEb_dlNBdJQYL8oxeGv5mB74jbvakLlmB0,1221
|
|
12
|
+
soft_archmap/analysis/visualize.py,sha256=3UMdq6l1FjHzfj_QwvCVXp7cNjeZLVAjSVat7rpaapc,751
|
|
13
|
+
soft_archmap/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
|
+
soft_archmap/core/graph.py,sha256=5s8BTkvTAE4_TfGCW57quqngQ88xW5Z3eInEFJnhfWM,3647
|
|
15
|
+
soft_archmap/core/model.py,sha256=o359dQJF7fcrz5ZASyY61P8-afL6BXblcH9j24DKhlY,3389
|
|
16
|
+
soft_archmap/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
|
+
soft_archmap/export/graphviz.py,sha256=JTDUzWqCQdlNXzXCMrQG7Mqcr9g-UpeC3M6a6bM8F3M,3122
|
|
18
|
+
soft_archmap/export/json_export.py,sha256=Wxzxg6EHByGoVgqWikvOMKLQi9vmXmPl92eiYg06yTE,743
|
|
19
|
+
soft_archmap-0.1.0.dist-info/licenses/LICENSE,sha256=lqGthu8C6vpsctKYU8Ut1VKkLyZIH35aH-LRPLLtmdE,385
|
|
20
|
+
soft_archmap-0.1.0.dist-info/METADATA,sha256=A0P5rxIA-ad7KMxPHk3j9shQf2GHH1USgNylVV5CqIk,4012
|
|
21
|
+
soft_archmap-0.1.0.dist-info/WHEEL,sha256=aeYiig01lYGDzBgS8HxWXOg3uV61G9ijOsup-k9o1sk,91
|
|
22
|
+
soft_archmap-0.1.0.dist-info/entry_points.txt,sha256=lE_xF1fhLvsWsncW62Vrw0RTK2FJb9vnn9GHMacyyV0,55
|
|
23
|
+
soft_archmap-0.1.0.dist-info/top_level.txt,sha256=HbBX86q0aJSaO6AF0dG9cNbJQmVEQf3qM2ogQGPNYcw,13
|
|
24
|
+
soft_archmap-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
Copyright (c) 2026 Excited Nuclei Tech Labs
|
|
2
|
+
All rights reserved.
|
|
3
|
+
|
|
4
|
+
No part of this software may be reproduced, distributed, or transmitted in any form
|
|
5
|
+
or by any means, including photocopying, recording, or other electronic or mechanical
|
|
6
|
+
methods, without the prior written permission of Excited Nuclei Tech Labs.
|
|
7
|
+
|
|
8
|
+
For permission requests, contact: excitednuclei.techlabs@gmail.com
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
soft_archmap
|