ai-coding-assistant 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. ai_coding_assistant-0.5.0.dist-info/METADATA +226 -0
  2. ai_coding_assistant-0.5.0.dist-info/RECORD +89 -0
  3. ai_coding_assistant-0.5.0.dist-info/WHEEL +4 -0
  4. ai_coding_assistant-0.5.0.dist-info/entry_points.txt +3 -0
  5. ai_coding_assistant-0.5.0.dist-info/licenses/LICENSE +21 -0
  6. coding_assistant/__init__.py +3 -0
  7. coding_assistant/__main__.py +19 -0
  8. coding_assistant/cli/__init__.py +1 -0
  9. coding_assistant/cli/app.py +158 -0
  10. coding_assistant/cli/commands/__init__.py +19 -0
  11. coding_assistant/cli/commands/ask.py +178 -0
  12. coding_assistant/cli/commands/config.py +438 -0
  13. coding_assistant/cli/commands/diagram.py +267 -0
  14. coding_assistant/cli/commands/document.py +410 -0
  15. coding_assistant/cli/commands/explain.py +192 -0
  16. coding_assistant/cli/commands/fix.py +249 -0
  17. coding_assistant/cli/commands/index.py +162 -0
  18. coding_assistant/cli/commands/refactor.py +245 -0
  19. coding_assistant/cli/commands/search.py +182 -0
  20. coding_assistant/cli/commands/serve_docs.py +128 -0
  21. coding_assistant/cli/repl.py +381 -0
  22. coding_assistant/cli/theme.py +90 -0
  23. coding_assistant/codebase/__init__.py +1 -0
  24. coding_assistant/codebase/crawler.py +93 -0
  25. coding_assistant/codebase/parser.py +266 -0
  26. coding_assistant/config/__init__.py +25 -0
  27. coding_assistant/config/config_manager.py +615 -0
  28. coding_assistant/config/settings.py +82 -0
  29. coding_assistant/context/__init__.py +19 -0
  30. coding_assistant/context/chunker.py +443 -0
  31. coding_assistant/context/enhanced_retriever.py +322 -0
  32. coding_assistant/context/hybrid_search.py +311 -0
  33. coding_assistant/context/ranker.py +355 -0
  34. coding_assistant/context/retriever.py +119 -0
  35. coding_assistant/context/window.py +362 -0
  36. coding_assistant/documentation/__init__.py +23 -0
  37. coding_assistant/documentation/agents/__init__.py +27 -0
  38. coding_assistant/documentation/agents/coordinator.py +510 -0
  39. coding_assistant/documentation/agents/module_documenter.py +111 -0
  40. coding_assistant/documentation/agents/synthesizer.py +139 -0
  41. coding_assistant/documentation/agents/task_delegator.py +100 -0
  42. coding_assistant/documentation/decomposition/__init__.py +21 -0
  43. coding_assistant/documentation/decomposition/context_preserver.py +477 -0
  44. coding_assistant/documentation/decomposition/module_detector.py +302 -0
  45. coding_assistant/documentation/decomposition/partitioner.py +621 -0
  46. coding_assistant/documentation/generators/__init__.py +14 -0
  47. coding_assistant/documentation/generators/dataflow_generator.py +440 -0
  48. coding_assistant/documentation/generators/diagram_generator.py +511 -0
  49. coding_assistant/documentation/graph/__init__.py +13 -0
  50. coding_assistant/documentation/graph/dependency_builder.py +468 -0
  51. coding_assistant/documentation/graph/module_analyzer.py +475 -0
  52. coding_assistant/documentation/writers/__init__.py +11 -0
  53. coding_assistant/documentation/writers/markdown_writer.py +322 -0
  54. coding_assistant/embeddings/__init__.py +0 -0
  55. coding_assistant/embeddings/generator.py +89 -0
  56. coding_assistant/embeddings/store.py +187 -0
  57. coding_assistant/exceptions/__init__.py +50 -0
  58. coding_assistant/exceptions/base.py +110 -0
  59. coding_assistant/exceptions/llm.py +249 -0
  60. coding_assistant/exceptions/recovery.py +263 -0
  61. coding_assistant/exceptions/storage.py +213 -0
  62. coding_assistant/exceptions/validation.py +230 -0
  63. coding_assistant/llm/__init__.py +1 -0
  64. coding_assistant/llm/client.py +277 -0
  65. coding_assistant/llm/gemini_client.py +181 -0
  66. coding_assistant/llm/groq_client.py +160 -0
  67. coding_assistant/llm/prompts.py +98 -0
  68. coding_assistant/llm/together_client.py +160 -0
  69. coding_assistant/operations/__init__.py +13 -0
  70. coding_assistant/operations/differ.py +369 -0
  71. coding_assistant/operations/generator.py +347 -0
  72. coding_assistant/operations/linter.py +430 -0
  73. coding_assistant/operations/validator.py +406 -0
  74. coding_assistant/storage/__init__.py +9 -0
  75. coding_assistant/storage/database.py +363 -0
  76. coding_assistant/storage/session.py +231 -0
  77. coding_assistant/utils/__init__.py +31 -0
  78. coding_assistant/utils/cache.py +477 -0
  79. coding_assistant/utils/hardware.py +132 -0
  80. coding_assistant/utils/keystore.py +206 -0
  81. coding_assistant/utils/logger.py +32 -0
  82. coding_assistant/utils/progress.py +311 -0
  83. coding_assistant/validation/__init__.py +13 -0
  84. coding_assistant/validation/files.py +305 -0
  85. coding_assistant/validation/inputs.py +335 -0
  86. coding_assistant/validation/params.py +280 -0
  87. coding_assistant/validation/sanitizers.py +243 -0
  88. coding_assistant/vcs/__init__.py +5 -0
  89. coding_assistant/vcs/git.py +269 -0
@@ -0,0 +1,302 @@
1
+ """Module detection for identifying logical code groupings.
2
+
3
+ This module provides enhanced module detection capabilities beyond basic
4
+ community detection, incorporating file structure, naming conventions,
5
+ and architectural patterns.
6
+ """
7
+
8
+ from typing import Dict, List, Set, Optional
9
+ from pathlib import Path
10
+ import networkx as nx
11
+ from collections import defaultdict
12
+
13
+ from coding_assistant.utils.logger import get_logger
14
+
15
+ logger = get_logger(__name__)
16
+
17
+
18
+ class ModuleDetector:
19
+ """
20
+ Enhanced module detection using multiple strategies.
21
+
22
+ Combines:
23
+ - Directory structure analysis
24
+ - Community detection (from Phase 1)
25
+ - Naming convention patterns
26
+ - File relationship analysis
27
+ """
28
+
29
+ def __init__(self, dependency_graph: nx.DiGraph):
30
+ """
31
+ Initialize module detector.
32
+
33
+ Args:
34
+ dependency_graph: File dependency graph
35
+ """
36
+ self.graph = dependency_graph
37
+
38
+ def detect_modules(self,
39
+ strategy: str = 'hybrid',
40
+ min_module_size: int = 2) -> Dict[str, List[str]]:
41
+ """
42
+ Detect logical modules using specified strategy.
43
+
44
+ Args:
45
+ strategy: Detection strategy ('directory', 'community', 'hybrid')
46
+ min_module_size: Minimum files per module
47
+
48
+ Returns:
49
+ Dictionary mapping module names to file lists
50
+ """
51
+ logger.info(f"Detecting modules using '{strategy}' strategy")
52
+
53
+ if strategy == 'directory':
54
+ modules = self._detect_by_directory()
55
+ elif strategy == 'community':
56
+ modules = self._detect_by_community()
57
+ elif strategy == 'hybrid':
58
+ modules = self._detect_hybrid()
59
+ else:
60
+ raise ValueError(f"Unknown strategy: {strategy}")
61
+
62
+ # Filter out small modules
63
+ filtered = {
64
+ name: files for name, files in modules.items()
65
+ if len(files) >= min_module_size
66
+ }
67
+
68
+ logger.info(f"Detected {len(filtered)} modules (filtered from {len(modules)})")
69
+
70
+ return filtered
71
+
72
+ def _detect_by_directory(self) -> Dict[str, List[str]]:
73
+ """
74
+ Detect modules based on directory structure.
75
+
76
+ Groups files by their parent directory.
77
+ """
78
+ modules = defaultdict(list)
79
+
80
+ for node in self.graph.nodes():
81
+ path = Path(node)
82
+
83
+ # Get parent directory as module name
84
+ if len(path.parts) > 1:
85
+ module_name = '.'.join(path.parts[:-1])
86
+ else:
87
+ module_name = 'root'
88
+
89
+ modules[module_name].append(node)
90
+
91
+ return dict(modules)
92
+
93
+ def _detect_by_community(self) -> Dict[str, List[str]]:
94
+ """
95
+ Detect modules using community detection (Louvain algorithm).
96
+
97
+ Delegates to ModuleAnalyzer from Phase 1.
98
+ """
99
+ from coding_assistant.documentation.graph.module_analyzer import ModuleAnalyzer
100
+
101
+ analyzer = ModuleAnalyzer(self.graph)
102
+ modules = analyzer.detect_modules()
103
+
104
+ return modules
105
+
106
+ def _detect_hybrid(self) -> Dict[str, List[str]]:
107
+ """
108
+ Hybrid approach combining directory structure and community detection.
109
+
110
+ Algorithm:
111
+ 1. Start with directory-based grouping
112
+ 2. Apply community detection within each directory group
113
+ 3. Merge highly coupled cross-directory modules
114
+ """
115
+ # Step 1: Directory-based grouping
116
+ dir_modules = self._detect_by_directory()
117
+
118
+ # Step 2: Refine each directory module with community detection
119
+ refined_modules = {}
120
+
121
+ for dir_module_name, files in dir_modules.items():
122
+ if len(files) < 3:
123
+ # Too small for community detection
124
+ refined_modules[dir_module_name] = files
125
+ continue
126
+
127
+ # Create subgraph
128
+ try:
129
+ subgraph = self.graph.subgraph(files)
130
+
131
+ # Apply community detection
132
+ import community as community_louvain
133
+ undirected = subgraph.to_undirected()
134
+
135
+ partition = community_louvain.best_partition(undirected)
136
+
137
+ # Group by community
138
+ communities = defaultdict(list)
139
+ for node, community_id in partition.items():
140
+ communities[community_id].append(node)
141
+
142
+ # Create refined module names
143
+ if len(communities) == 1:
144
+ # Single community, keep original name
145
+ refined_modules[dir_module_name] = files
146
+ else:
147
+ # Multiple communities, create sub-modules
148
+ for community_id, community_files in communities.items():
149
+ module_name = f"{dir_module_name}.sub{community_id}"
150
+ refined_modules[module_name] = community_files
151
+
152
+ except Exception as e:
153
+ logger.warning(f"Community detection failed for {dir_module_name}: {e}")
154
+ refined_modules[dir_module_name] = files
155
+
156
+ return refined_modules
157
+
158
+ def compute_module_quality(self, modules: Dict[str, List[str]]) -> Dict[str, float]:
159
+ """
160
+ Compute quality scores for detected modules.
161
+
162
+ Quality considers:
163
+ - Cohesion (internal connectivity)
164
+ - Size balance
165
+ - Clear boundaries
166
+
167
+ Args:
168
+ modules: Dictionary of module -> files
169
+
170
+ Returns:
171
+ Dictionary of module -> quality score (0-1)
172
+ """
173
+ quality_scores = {}
174
+
175
+ for module_name, files in modules.items():
176
+ cohesion = self._compute_cohesion(files)
177
+ size_score = self._compute_size_score(len(files), len(modules))
178
+ boundary_score = self._compute_boundary_score(files, modules)
179
+
180
+ # Weighted average
181
+ quality = (
182
+ 0.5 * cohesion +
183
+ 0.3 * size_score +
184
+ 0.2 * boundary_score
185
+ )
186
+
187
+ quality_scores[module_name] = quality
188
+
189
+ return quality_scores
190
+
191
+ def _compute_cohesion(self, files: List[str]) -> float:
192
+ """Compute internal cohesion of a module."""
193
+ if len(files) < 2:
194
+ return 1.0
195
+
196
+ try:
197
+ subgraph = self.graph.subgraph(files)
198
+ edges = subgraph.number_of_edges()
199
+ possible = len(files) * (len(files) - 1)
200
+
201
+ return edges / possible if possible > 0 else 0.0
202
+
203
+ except:
204
+ return 0.0
205
+
206
+ def _compute_size_score(self, module_size: int, total_modules: int) -> float:
207
+ """
208
+ Compute size balance score.
209
+
210
+ Prefers modules that are not too large or too small.
211
+ """
212
+ ideal_size = max(3, len(list(self.graph.nodes())) // total_modules)
213
+
214
+ # Penalty for deviation from ideal
215
+ deviation = abs(module_size - ideal_size) / ideal_size
216
+ score = max(0, 1.0 - deviation)
217
+
218
+ return score
219
+
220
+ def _compute_boundary_score(self,
221
+ files: List[str],
222
+ all_modules: Dict[str, List[str]]) -> float:
223
+ """
224
+ Compute how well-defined the module boundaries are.
225
+
226
+ Lower external coupling = higher score.
227
+ """
228
+ external_edges = 0
229
+ internal_edges = 0
230
+
231
+ files_set = set(files)
232
+
233
+ for file in files:
234
+ if not self.graph.has_node(file):
235
+ continue
236
+
237
+ for _, target in self.graph.out_edges(file):
238
+ if target in files_set:
239
+ internal_edges += 1
240
+ else:
241
+ external_edges += 1
242
+
243
+ total_edges = internal_edges + external_edges
244
+
245
+ if total_edges == 0:
246
+ return 1.0
247
+
248
+ # Higher score for more internal edges
249
+ return internal_edges / total_edges
250
+
251
+ def suggest_module_improvements(self,
252
+ modules: Dict[str, List[str]]) -> List[str]:
253
+ """
254
+ Suggest improvements to module structure.
255
+
256
+ Args:
257
+ modules: Current module grouping
258
+
259
+ Returns:
260
+ List of improvement suggestions
261
+ """
262
+ suggestions = []
263
+
264
+ quality_scores = self.compute_module_quality(modules)
265
+
266
+ # Identify low-quality modules
267
+ low_quality = [
268
+ name for name, score in quality_scores.items()
269
+ if score < 0.5
270
+ ]
271
+
272
+ if low_quality:
273
+ suggestions.append(
274
+ f"Consider refactoring {len(low_quality)} low-quality modules: "
275
+ f"{', '.join(low_quality[:3])}"
276
+ )
277
+
278
+ # Check for very small modules
279
+ small_modules = [
280
+ name for name, files in modules.items()
281
+ if len(files) == 1
282
+ ]
283
+
284
+ if small_modules:
285
+ suggestions.append(
286
+ f"Merge {len(small_modules)} single-file modules with related modules"
287
+ )
288
+
289
+ # Check for very large modules
290
+ avg_size = sum(len(files) for files in modules.values()) / len(modules)
291
+ large_modules = [
292
+ name for name, files in modules.items()
293
+ if len(files) > avg_size * 3
294
+ ]
295
+
296
+ if large_modules:
297
+ suggestions.append(
298
+ f"Consider splitting {len(large_modules)} large modules: "
299
+ f"{', '.join(large_modules[:3])}"
300
+ )
301
+
302
+ return suggestions