ai-coding-assistant 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ai_coding_assistant-0.5.0.dist-info/METADATA +226 -0
- ai_coding_assistant-0.5.0.dist-info/RECORD +89 -0
- ai_coding_assistant-0.5.0.dist-info/WHEEL +4 -0
- ai_coding_assistant-0.5.0.dist-info/entry_points.txt +3 -0
- ai_coding_assistant-0.5.0.dist-info/licenses/LICENSE +21 -0
- coding_assistant/__init__.py +3 -0
- coding_assistant/__main__.py +19 -0
- coding_assistant/cli/__init__.py +1 -0
- coding_assistant/cli/app.py +158 -0
- coding_assistant/cli/commands/__init__.py +19 -0
- coding_assistant/cli/commands/ask.py +178 -0
- coding_assistant/cli/commands/config.py +438 -0
- coding_assistant/cli/commands/diagram.py +267 -0
- coding_assistant/cli/commands/document.py +410 -0
- coding_assistant/cli/commands/explain.py +192 -0
- coding_assistant/cli/commands/fix.py +249 -0
- coding_assistant/cli/commands/index.py +162 -0
- coding_assistant/cli/commands/refactor.py +245 -0
- coding_assistant/cli/commands/search.py +182 -0
- coding_assistant/cli/commands/serve_docs.py +128 -0
- coding_assistant/cli/repl.py +381 -0
- coding_assistant/cli/theme.py +90 -0
- coding_assistant/codebase/__init__.py +1 -0
- coding_assistant/codebase/crawler.py +93 -0
- coding_assistant/codebase/parser.py +266 -0
- coding_assistant/config/__init__.py +25 -0
- coding_assistant/config/config_manager.py +615 -0
- coding_assistant/config/settings.py +82 -0
- coding_assistant/context/__init__.py +19 -0
- coding_assistant/context/chunker.py +443 -0
- coding_assistant/context/enhanced_retriever.py +322 -0
- coding_assistant/context/hybrid_search.py +311 -0
- coding_assistant/context/ranker.py +355 -0
- coding_assistant/context/retriever.py +119 -0
- coding_assistant/context/window.py +362 -0
- coding_assistant/documentation/__init__.py +23 -0
- coding_assistant/documentation/agents/__init__.py +27 -0
- coding_assistant/documentation/agents/coordinator.py +510 -0
- coding_assistant/documentation/agents/module_documenter.py +111 -0
- coding_assistant/documentation/agents/synthesizer.py +139 -0
- coding_assistant/documentation/agents/task_delegator.py +100 -0
- coding_assistant/documentation/decomposition/__init__.py +21 -0
- coding_assistant/documentation/decomposition/context_preserver.py +477 -0
- coding_assistant/documentation/decomposition/module_detector.py +302 -0
- coding_assistant/documentation/decomposition/partitioner.py +621 -0
- coding_assistant/documentation/generators/__init__.py +14 -0
- coding_assistant/documentation/generators/dataflow_generator.py +440 -0
- coding_assistant/documentation/generators/diagram_generator.py +511 -0
- coding_assistant/documentation/graph/__init__.py +13 -0
- coding_assistant/documentation/graph/dependency_builder.py +468 -0
- coding_assistant/documentation/graph/module_analyzer.py +475 -0
- coding_assistant/documentation/writers/__init__.py +11 -0
- coding_assistant/documentation/writers/markdown_writer.py +322 -0
- coding_assistant/embeddings/__init__.py +0 -0
- coding_assistant/embeddings/generator.py +89 -0
- coding_assistant/embeddings/store.py +187 -0
- coding_assistant/exceptions/__init__.py +50 -0
- coding_assistant/exceptions/base.py +110 -0
- coding_assistant/exceptions/llm.py +249 -0
- coding_assistant/exceptions/recovery.py +263 -0
- coding_assistant/exceptions/storage.py +213 -0
- coding_assistant/exceptions/validation.py +230 -0
- coding_assistant/llm/__init__.py +1 -0
- coding_assistant/llm/client.py +277 -0
- coding_assistant/llm/gemini_client.py +181 -0
- coding_assistant/llm/groq_client.py +160 -0
- coding_assistant/llm/prompts.py +98 -0
- coding_assistant/llm/together_client.py +160 -0
- coding_assistant/operations/__init__.py +13 -0
- coding_assistant/operations/differ.py +369 -0
- coding_assistant/operations/generator.py +347 -0
- coding_assistant/operations/linter.py +430 -0
- coding_assistant/operations/validator.py +406 -0
- coding_assistant/storage/__init__.py +9 -0
- coding_assistant/storage/database.py +363 -0
- coding_assistant/storage/session.py +231 -0
- coding_assistant/utils/__init__.py +31 -0
- coding_assistant/utils/cache.py +477 -0
- coding_assistant/utils/hardware.py +132 -0
- coding_assistant/utils/keystore.py +206 -0
- coding_assistant/utils/logger.py +32 -0
- coding_assistant/utils/progress.py +311 -0
- coding_assistant/validation/__init__.py +13 -0
- coding_assistant/validation/files.py +305 -0
- coding_assistant/validation/inputs.py +335 -0
- coding_assistant/validation/params.py +280 -0
- coding_assistant/validation/sanitizers.py +243 -0
- coding_assistant/vcs/__init__.py +5 -0
- coding_assistant/vcs/git.py +269 -0
|
@@ -0,0 +1,477 @@
|
|
|
1
|
+
"""Preserve architectural context during decomposition.
|
|
2
|
+
|
|
3
|
+
This module ensures that even when a repository is partitioned into smaller pieces,
|
|
4
|
+
the documentation maintains coherence by preserving architectural context, shared
|
|
5
|
+
interfaces, design patterns, and relationships between partitions.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Dict, List, Set, Tuple, Optional
|
|
9
|
+
from dataclasses import dataclass, field
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
import networkx as nx
|
|
12
|
+
from collections import defaultdict, Counter
|
|
13
|
+
|
|
14
|
+
from coding_assistant.documentation.decomposition.partitioner import Partition
|
|
15
|
+
from coding_assistant.utils.logger import get_logger
|
|
16
|
+
|
|
17
|
+
logger = get_logger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class ModuleContext:
|
|
22
|
+
"""
|
|
23
|
+
Context information for a module partition.
|
|
24
|
+
|
|
25
|
+
Preserves architectural understanding across partition boundaries.
|
|
26
|
+
"""
|
|
27
|
+
partition_name: str
|
|
28
|
+
related_partitions: List[str] = field(default_factory=list) # Connected partitions
|
|
29
|
+
shared_interfaces: List[str] = field(default_factory=list) # Shared APIs/interfaces
|
|
30
|
+
common_patterns: List[str] = field(default_factory=list) # Design patterns used
|
|
31
|
+
architectural_role: str = "" # e.g., "data layer", "API layer"
|
|
32
|
+
key_files: List[str] = field(default_factory=list) # Most important files
|
|
33
|
+
exports: List[str] = field(default_factory=list) # Public interfaces
|
|
34
|
+
imports_from: Dict[str, List[str]] = field(default_factory=dict) # What it imports
|
|
35
|
+
metadata: Dict = field(default_factory=dict)
|
|
36
|
+
|
|
37
|
+
def __repr__(self):
|
|
38
|
+
return f"ModuleContext({self.partition_name}, role={self.architectural_role})"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class ContextPreserver:
|
|
42
|
+
"""
|
|
43
|
+
Preserve architectural context across partitions.
|
|
44
|
+
|
|
45
|
+
Ensures documentation remains coherent despite decomposition by:
|
|
46
|
+
- Identifying related partitions and their relationships
|
|
47
|
+
- Extracting shared interfaces and APIs
|
|
48
|
+
- Detecting design patterns within partitions
|
|
49
|
+
- Inferring architectural roles (data layer, API layer, UI, etc.)
|
|
50
|
+
- Preserving cross-partition dependencies
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
def __init__(self):
|
|
54
|
+
"""Initialize the context preserver."""
|
|
55
|
+
pass
|
|
56
|
+
|
|
57
|
+
def extract_context(self,
|
|
58
|
+
partition: Partition,
|
|
59
|
+
all_partitions: List[Partition],
|
|
60
|
+
dependency_graph: nx.DiGraph,
|
|
61
|
+
parsed_files: Optional[Dict[str, Dict]] = None) -> ModuleContext:
|
|
62
|
+
"""
|
|
63
|
+
Extract architectural context for a partition.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
partition: The partition to analyze
|
|
67
|
+
all_partitions: All partitions in the repository
|
|
68
|
+
dependency_graph: Full dependency graph
|
|
69
|
+
parsed_files: Optional dictionary of parsed file information
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
ModuleContext with extracted architectural information
|
|
73
|
+
"""
|
|
74
|
+
logger.debug(f"Extracting context for partition: {partition.name}")
|
|
75
|
+
|
|
76
|
+
# Identify related partitions
|
|
77
|
+
related = self._find_related_partitions(partition, all_partitions, dependency_graph)
|
|
78
|
+
|
|
79
|
+
# Extract shared interfaces
|
|
80
|
+
interfaces = self._extract_shared_interfaces(
|
|
81
|
+
partition, related, dependency_graph, parsed_files
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
# Detect design patterns
|
|
85
|
+
patterns = self._detect_patterns(partition, parsed_files)
|
|
86
|
+
|
|
87
|
+
# Infer architectural role
|
|
88
|
+
role = self._infer_architectural_role(partition, all_partitions, dependency_graph)
|
|
89
|
+
|
|
90
|
+
# Find key files (most central within partition)
|
|
91
|
+
key_files = self._identify_key_files(partition, dependency_graph)
|
|
92
|
+
|
|
93
|
+
# Extract exports (public interfaces)
|
|
94
|
+
exports = self._extract_exports(partition, parsed_files)
|
|
95
|
+
|
|
96
|
+
# Extract import relationships
|
|
97
|
+
imports_from = self._extract_import_relationships(
|
|
98
|
+
partition, all_partitions, dependency_graph
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
context = ModuleContext(
|
|
102
|
+
partition_name=partition.name,
|
|
103
|
+
related_partitions=[p.name for p in related],
|
|
104
|
+
shared_interfaces=interfaces,
|
|
105
|
+
common_patterns=patterns,
|
|
106
|
+
architectural_role=role,
|
|
107
|
+
key_files=key_files,
|
|
108
|
+
exports=exports,
|
|
109
|
+
imports_from=imports_from,
|
|
110
|
+
metadata={
|
|
111
|
+
'file_count': len(partition.files),
|
|
112
|
+
'loc': partition.size_loc,
|
|
113
|
+
'cohesion': partition.cohesion_score,
|
|
114
|
+
'level': partition.level
|
|
115
|
+
}
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
logger.debug(f"Context extracted: role={role}, patterns={len(patterns)}, related={len(related)}")
|
|
119
|
+
|
|
120
|
+
return context
|
|
121
|
+
|
|
122
|
+
def _find_related_partitions(self,
|
|
123
|
+
partition: Partition,
|
|
124
|
+
all_partitions: List[Partition],
|
|
125
|
+
graph: nx.DiGraph) -> List[Partition]:
|
|
126
|
+
"""
|
|
127
|
+
Find partitions that are closely related to this one.
|
|
128
|
+
|
|
129
|
+
Criteria:
|
|
130
|
+
- Direct dependencies (imports/exports)
|
|
131
|
+
- High coupling score
|
|
132
|
+
- Shared file patterns
|
|
133
|
+
"""
|
|
134
|
+
related = []
|
|
135
|
+
partition_files = set(partition.files)
|
|
136
|
+
|
|
137
|
+
for other in all_partitions:
|
|
138
|
+
if other.name == partition.name:
|
|
139
|
+
continue
|
|
140
|
+
|
|
141
|
+
# Check if there's a dependency relationship
|
|
142
|
+
if partition.name in other.dependencies or other.name in partition.dependencies:
|
|
143
|
+
related.append(other)
|
|
144
|
+
continue
|
|
145
|
+
|
|
146
|
+
# Compute coupling
|
|
147
|
+
coupling = self._compute_coupling(partition_files, set(other.files), graph)
|
|
148
|
+
|
|
149
|
+
# Consider related if coupling > threshold
|
|
150
|
+
if coupling > 0.05: # 5% coupling threshold
|
|
151
|
+
related.append(other)
|
|
152
|
+
|
|
153
|
+
return related
|
|
154
|
+
|
|
155
|
+
def _compute_coupling(self,
|
|
156
|
+
files1: Set[str],
|
|
157
|
+
files2: Set[str],
|
|
158
|
+
graph: nx.DiGraph) -> float:
|
|
159
|
+
"""Compute coupling between two file sets."""
|
|
160
|
+
if not files1 or not files2:
|
|
161
|
+
return 0.0
|
|
162
|
+
|
|
163
|
+
edges_between = 0
|
|
164
|
+
|
|
165
|
+
for file1 in files1:
|
|
166
|
+
for file2 in files2:
|
|
167
|
+
if graph.has_edge(file1, file2) or graph.has_edge(file2, file1):
|
|
168
|
+
edges_between += 1
|
|
169
|
+
|
|
170
|
+
possible_edges = len(files1) * len(files2) * 2
|
|
171
|
+
|
|
172
|
+
return edges_between / possible_edges if possible_edges > 0 else 0.0
|
|
173
|
+
|
|
174
|
+
def _extract_shared_interfaces(self,
|
|
175
|
+
partition: Partition,
|
|
176
|
+
related: List[Partition],
|
|
177
|
+
graph: nx.DiGraph,
|
|
178
|
+
parsed_files: Optional[Dict[str, Dict]]) -> List[str]:
|
|
179
|
+
"""
|
|
180
|
+
Extract APIs/interfaces shared between partitions.
|
|
181
|
+
|
|
182
|
+
Looks for:
|
|
183
|
+
- Public functions/classes used by other partitions
|
|
184
|
+
- Exported symbols
|
|
185
|
+
- Common interfaces
|
|
186
|
+
"""
|
|
187
|
+
if not parsed_files:
|
|
188
|
+
return []
|
|
189
|
+
|
|
190
|
+
interfaces = []
|
|
191
|
+
partition_files = set(partition.files)
|
|
192
|
+
related_files = set()
|
|
193
|
+
|
|
194
|
+
for related_partition in related:
|
|
195
|
+
related_files.update(related_partition.files)
|
|
196
|
+
|
|
197
|
+
# Find functions/classes in this partition used by related partitions
|
|
198
|
+
for file_path in partition.files:
|
|
199
|
+
if file_path not in parsed_files:
|
|
200
|
+
continue
|
|
201
|
+
|
|
202
|
+
parsed = parsed_files[file_path]
|
|
203
|
+
functions = parsed.get('functions', [])
|
|
204
|
+
classes = parsed.get('classes', [])
|
|
205
|
+
|
|
206
|
+
# Check if any function/class is likely public (not starting with _)
|
|
207
|
+
for func in functions:
|
|
208
|
+
func_name = func.get('name', '')
|
|
209
|
+
if not func_name.startswith('_'):
|
|
210
|
+
# This is potentially a public interface
|
|
211
|
+
interfaces.append(f"{Path(file_path).stem}.{func_name}")
|
|
212
|
+
|
|
213
|
+
for cls in classes:
|
|
214
|
+
cls_name = cls.get('name', '')
|
|
215
|
+
if not cls_name.startswith('_'):
|
|
216
|
+
interfaces.append(f"{Path(file_path).stem}.{cls_name}")
|
|
217
|
+
|
|
218
|
+
# Limit to most likely interfaces (deduplicate and limit)
|
|
219
|
+
return list(set(interfaces))[:20]
|
|
220
|
+
|
|
221
|
+
def _detect_patterns(self,
|
|
222
|
+
partition: Partition,
|
|
223
|
+
parsed_files: Optional[Dict[str, Dict]]) -> List[str]:
|
|
224
|
+
"""
|
|
225
|
+
Detect common design patterns in partition.
|
|
226
|
+
|
|
227
|
+
Heuristics for detecting:
|
|
228
|
+
- Factory pattern (Factory in class names)
|
|
229
|
+
- Singleton pattern (getInstance methods)
|
|
230
|
+
- Builder pattern (Builder in class names)
|
|
231
|
+
- Strategy pattern (Strategy in class names)
|
|
232
|
+
- Observer pattern (Observer, Listener in names)
|
|
233
|
+
- MVC pattern (Model, View, Controller in names)
|
|
234
|
+
"""
|
|
235
|
+
if not parsed_files:
|
|
236
|
+
return []
|
|
237
|
+
|
|
238
|
+
patterns = set()
|
|
239
|
+
|
|
240
|
+
# Collect all class and function names
|
|
241
|
+
all_names = []
|
|
242
|
+
|
|
243
|
+
for file_path in partition.files:
|
|
244
|
+
if file_path not in parsed_files:
|
|
245
|
+
continue
|
|
246
|
+
|
|
247
|
+
parsed = parsed_files[file_path]
|
|
248
|
+
|
|
249
|
+
for func in parsed.get('functions', []):
|
|
250
|
+
all_names.append(func.get('name', '').lower())
|
|
251
|
+
|
|
252
|
+
for cls in parsed.get('classes', []):
|
|
253
|
+
all_names.append(cls.get('name', '').lower())
|
|
254
|
+
|
|
255
|
+
# Pattern detection heuristics
|
|
256
|
+
pattern_keywords = {
|
|
257
|
+
'Factory Pattern': ['factory', 'create', 'builder'],
|
|
258
|
+
'Singleton Pattern': ['singleton', 'getinstance', 'instance'],
|
|
259
|
+
'Builder Pattern': ['builder', 'build'],
|
|
260
|
+
'Strategy Pattern': ['strategy', 'algorithm'],
|
|
261
|
+
'Observer Pattern': ['observer', 'listener', 'subscriber', 'event'],
|
|
262
|
+
'Repository Pattern': ['repository', 'repo', 'store'],
|
|
263
|
+
'Service Pattern': ['service', 'handler', 'processor'],
|
|
264
|
+
'MVC Pattern': ['model', 'view', 'controller'],
|
|
265
|
+
'Dependency Injection': ['inject', 'provider', 'container'],
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
for pattern_name, keywords in pattern_keywords.items():
|
|
269
|
+
for keyword in keywords:
|
|
270
|
+
if any(keyword in name for name in all_names):
|
|
271
|
+
patterns.add(pattern_name)
|
|
272
|
+
break
|
|
273
|
+
|
|
274
|
+
return sorted(list(patterns))
|
|
275
|
+
|
|
276
|
+
def _infer_architectural_role(self,
|
|
277
|
+
partition: Partition,
|
|
278
|
+
all_partitions: List[Partition],
|
|
279
|
+
graph: nx.DiGraph) -> str:
|
|
280
|
+
"""
|
|
281
|
+
Infer the architectural role of this partition.
|
|
282
|
+
|
|
283
|
+
Roles:
|
|
284
|
+
- Data Layer: Database, models, entities
|
|
285
|
+
- Business Logic: Services, processors, handlers
|
|
286
|
+
- API Layer: Controllers, routes, endpoints
|
|
287
|
+
- UI Layer: Views, components, templates
|
|
288
|
+
- Infrastructure: Config, utilities, helpers
|
|
289
|
+
- Integration: External APIs, adapters
|
|
290
|
+
"""
|
|
291
|
+
# Analyze partition name and file names
|
|
292
|
+
name_lower = partition.name.lower()
|
|
293
|
+
file_names = [Path(f).stem.lower() for f in partition.files]
|
|
294
|
+
|
|
295
|
+
# Role keywords
|
|
296
|
+
role_keywords = {
|
|
297
|
+
'Data Layer': ['model', 'entity', 'database', 'db', 'repository', 'dao', 'schema'],
|
|
298
|
+
'Business Logic': ['service', 'processor', 'handler', 'manager', 'business', 'logic'],
|
|
299
|
+
'API Layer': ['api', 'controller', 'route', 'endpoint', 'rest', 'graphql'],
|
|
300
|
+
'UI Layer': ['view', 'component', 'template', 'ui', 'frontend', 'page'],
|
|
301
|
+
'Infrastructure': ['config', 'util', 'helper', 'common', 'core', 'base'],
|
|
302
|
+
'Integration': ['client', 'adapter', 'integration', 'external', 'connector'],
|
|
303
|
+
'Testing': ['test', 'spec', 'mock', 'fixture'],
|
|
304
|
+
'CLI': ['cli', 'command', 'console'],
|
|
305
|
+
'Documentation': ['doc', 'documentation'],
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
# Score each role
|
|
309
|
+
role_scores = defaultdict(int)
|
|
310
|
+
|
|
311
|
+
for role, keywords in role_keywords.items():
|
|
312
|
+
for keyword in keywords:
|
|
313
|
+
# Check partition name
|
|
314
|
+
if keyword in name_lower:
|
|
315
|
+
role_scores[role] += 3
|
|
316
|
+
|
|
317
|
+
# Check file names
|
|
318
|
+
for file_name in file_names:
|
|
319
|
+
if keyword in file_name:
|
|
320
|
+
role_scores[role] += 1
|
|
321
|
+
|
|
322
|
+
if not role_scores:
|
|
323
|
+
return "General Purpose"
|
|
324
|
+
|
|
325
|
+
# Return role with highest score
|
|
326
|
+
best_role = max(role_scores.items(), key=lambda x: x[1])
|
|
327
|
+
|
|
328
|
+
return best_role[0]
|
|
329
|
+
|
|
330
|
+
def _identify_key_files(self,
|
|
331
|
+
partition: Partition,
|
|
332
|
+
graph: nx.DiGraph,
|
|
333
|
+
top_n: int = 5) -> List[str]:
|
|
334
|
+
"""
|
|
335
|
+
Identify the most important files within a partition.
|
|
336
|
+
|
|
337
|
+
Uses centrality within the partition subgraph.
|
|
338
|
+
"""
|
|
339
|
+
if len(partition.files) <= top_n:
|
|
340
|
+
return partition.files
|
|
341
|
+
|
|
342
|
+
try:
|
|
343
|
+
subgraph = graph.subgraph(partition.files)
|
|
344
|
+
centrality = nx.betweenness_centrality(subgraph)
|
|
345
|
+
|
|
346
|
+
sorted_files = sorted(
|
|
347
|
+
centrality.items(),
|
|
348
|
+
key=lambda x: x[1],
|
|
349
|
+
reverse=True
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
return [f for f, _ in sorted_files[:top_n]]
|
|
353
|
+
|
|
354
|
+
except:
|
|
355
|
+
# Fallback: return first N files
|
|
356
|
+
return partition.files[:top_n]
|
|
357
|
+
|
|
358
|
+
def _extract_exports(self,
|
|
359
|
+
partition: Partition,
|
|
360
|
+
parsed_files: Optional[Dict[str, Dict]]) -> List[str]:
|
|
361
|
+
"""
|
|
362
|
+
Extract public interfaces (exports) from partition.
|
|
363
|
+
|
|
364
|
+
Returns list of exported symbols (functions, classes).
|
|
365
|
+
"""
|
|
366
|
+
if not parsed_files:
|
|
367
|
+
return []
|
|
368
|
+
|
|
369
|
+
exports = []
|
|
370
|
+
|
|
371
|
+
for file_path in partition.files:
|
|
372
|
+
if file_path not in parsed_files:
|
|
373
|
+
continue
|
|
374
|
+
|
|
375
|
+
parsed = parsed_files[file_path]
|
|
376
|
+
file_name = Path(file_path).stem
|
|
377
|
+
|
|
378
|
+
# Extract public functions
|
|
379
|
+
for func in parsed.get('functions', []):
|
|
380
|
+
func_name = func.get('name', '')
|
|
381
|
+
if not func_name.startswith('_'): # Public function
|
|
382
|
+
exports.append(f"{file_name}.{func_name}")
|
|
383
|
+
|
|
384
|
+
# Extract public classes
|
|
385
|
+
for cls in parsed.get('classes', []):
|
|
386
|
+
cls_name = cls.get('name', '')
|
|
387
|
+
if not cls_name.startswith('_'): # Public class
|
|
388
|
+
exports.append(f"{file_name}.{cls_name}")
|
|
389
|
+
|
|
390
|
+
return exports[:30] # Limit to top 30 exports
|
|
391
|
+
|
|
392
|
+
def _extract_import_relationships(self,
|
|
393
|
+
partition: Partition,
|
|
394
|
+
all_partitions: List[Partition],
|
|
395
|
+
graph: nx.DiGraph) -> Dict[str, List[str]]:
|
|
396
|
+
"""
|
|
397
|
+
Extract what this partition imports from other partitions.
|
|
398
|
+
|
|
399
|
+
Returns:
|
|
400
|
+
Dictionary mapping partition names to lists of imported files
|
|
401
|
+
"""
|
|
402
|
+
# Build file -> partition mapping
|
|
403
|
+
file_to_partition = {}
|
|
404
|
+
for p in all_partitions:
|
|
405
|
+
for f in p.files:
|
|
406
|
+
file_to_partition[f] = p.name
|
|
407
|
+
|
|
408
|
+
imports_from = defaultdict(list)
|
|
409
|
+
|
|
410
|
+
for file in partition.files:
|
|
411
|
+
if not graph.has_node(file):
|
|
412
|
+
continue
|
|
413
|
+
|
|
414
|
+
# Check all outgoing edges (imports)
|
|
415
|
+
for _, target in graph.out_edges(file):
|
|
416
|
+
if target in file_to_partition:
|
|
417
|
+
target_partition = file_to_partition[target]
|
|
418
|
+
if target_partition != partition.name:
|
|
419
|
+
imports_from[target_partition].append(target)
|
|
420
|
+
|
|
421
|
+
# Deduplicate and convert to regular dict
|
|
422
|
+
return {
|
|
423
|
+
partition_name: list(set(files))
|
|
424
|
+
for partition_name, files in imports_from.items()
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
def generate_context_summary(self, context: ModuleContext) -> str:
|
|
428
|
+
"""
|
|
429
|
+
Generate a human-readable summary of module context.
|
|
430
|
+
|
|
431
|
+
Args:
|
|
432
|
+
context: ModuleContext to summarize
|
|
433
|
+
|
|
434
|
+
Returns:
|
|
435
|
+
Formatted summary string
|
|
436
|
+
"""
|
|
437
|
+
lines = [
|
|
438
|
+
f"Module Context: {context.partition_name}",
|
|
439
|
+
"=" * 60,
|
|
440
|
+
f"Architectural Role: {context.architectural_role}",
|
|
441
|
+
f"Files: {context.metadata.get('file_count', 0)}",
|
|
442
|
+
f"Lines of Code: {context.metadata.get('loc', 0)}",
|
|
443
|
+
f"Cohesion: {context.metadata.get('cohesion', 0):.2f}",
|
|
444
|
+
"",
|
|
445
|
+
"Related Partitions:",
|
|
446
|
+
]
|
|
447
|
+
|
|
448
|
+
for related in context.related_partitions[:5]:
|
|
449
|
+
lines.append(f" - {related}")
|
|
450
|
+
|
|
451
|
+
if context.common_patterns:
|
|
452
|
+
lines.extend([
|
|
453
|
+
"",
|
|
454
|
+
"Design Patterns:",
|
|
455
|
+
])
|
|
456
|
+
for pattern in context.common_patterns:
|
|
457
|
+
lines.append(f" - {pattern}")
|
|
458
|
+
|
|
459
|
+
if context.key_files:
|
|
460
|
+
lines.extend([
|
|
461
|
+
"",
|
|
462
|
+
"Key Files:",
|
|
463
|
+
])
|
|
464
|
+
for file_path in context.key_files:
|
|
465
|
+
lines.append(f" - {Path(file_path).name}")
|
|
466
|
+
|
|
467
|
+
if context.exports:
|
|
468
|
+
lines.extend([
|
|
469
|
+
"",
|
|
470
|
+
f"Public Interfaces ({len(context.exports)}):",
|
|
471
|
+
])
|
|
472
|
+
for export in context.exports[:10]:
|
|
473
|
+
lines.append(f" - {export}")
|
|
474
|
+
|
|
475
|
+
lines.append("=" * 60)
|
|
476
|
+
|
|
477
|
+
return "\n".join(lines)
|