ai-coding-assistant 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ai_coding_assistant-0.5.0.dist-info/METADATA +226 -0
- ai_coding_assistant-0.5.0.dist-info/RECORD +89 -0
- ai_coding_assistant-0.5.0.dist-info/WHEEL +4 -0
- ai_coding_assistant-0.5.0.dist-info/entry_points.txt +3 -0
- ai_coding_assistant-0.5.0.dist-info/licenses/LICENSE +21 -0
- coding_assistant/__init__.py +3 -0
- coding_assistant/__main__.py +19 -0
- coding_assistant/cli/__init__.py +1 -0
- coding_assistant/cli/app.py +158 -0
- coding_assistant/cli/commands/__init__.py +19 -0
- coding_assistant/cli/commands/ask.py +178 -0
- coding_assistant/cli/commands/config.py +438 -0
- coding_assistant/cli/commands/diagram.py +267 -0
- coding_assistant/cli/commands/document.py +410 -0
- coding_assistant/cli/commands/explain.py +192 -0
- coding_assistant/cli/commands/fix.py +249 -0
- coding_assistant/cli/commands/index.py +162 -0
- coding_assistant/cli/commands/refactor.py +245 -0
- coding_assistant/cli/commands/search.py +182 -0
- coding_assistant/cli/commands/serve_docs.py +128 -0
- coding_assistant/cli/repl.py +381 -0
- coding_assistant/cli/theme.py +90 -0
- coding_assistant/codebase/__init__.py +1 -0
- coding_assistant/codebase/crawler.py +93 -0
- coding_assistant/codebase/parser.py +266 -0
- coding_assistant/config/__init__.py +25 -0
- coding_assistant/config/config_manager.py +615 -0
- coding_assistant/config/settings.py +82 -0
- coding_assistant/context/__init__.py +19 -0
- coding_assistant/context/chunker.py +443 -0
- coding_assistant/context/enhanced_retriever.py +322 -0
- coding_assistant/context/hybrid_search.py +311 -0
- coding_assistant/context/ranker.py +355 -0
- coding_assistant/context/retriever.py +119 -0
- coding_assistant/context/window.py +362 -0
- coding_assistant/documentation/__init__.py +23 -0
- coding_assistant/documentation/agents/__init__.py +27 -0
- coding_assistant/documentation/agents/coordinator.py +510 -0
- coding_assistant/documentation/agents/module_documenter.py +111 -0
- coding_assistant/documentation/agents/synthesizer.py +139 -0
- coding_assistant/documentation/agents/task_delegator.py +100 -0
- coding_assistant/documentation/decomposition/__init__.py +21 -0
- coding_assistant/documentation/decomposition/context_preserver.py +477 -0
- coding_assistant/documentation/decomposition/module_detector.py +302 -0
- coding_assistant/documentation/decomposition/partitioner.py +621 -0
- coding_assistant/documentation/generators/__init__.py +14 -0
- coding_assistant/documentation/generators/dataflow_generator.py +440 -0
- coding_assistant/documentation/generators/diagram_generator.py +511 -0
- coding_assistant/documentation/graph/__init__.py +13 -0
- coding_assistant/documentation/graph/dependency_builder.py +468 -0
- coding_assistant/documentation/graph/module_analyzer.py +475 -0
- coding_assistant/documentation/writers/__init__.py +11 -0
- coding_assistant/documentation/writers/markdown_writer.py +322 -0
- coding_assistant/embeddings/__init__.py +0 -0
- coding_assistant/embeddings/generator.py +89 -0
- coding_assistant/embeddings/store.py +187 -0
- coding_assistant/exceptions/__init__.py +50 -0
- coding_assistant/exceptions/base.py +110 -0
- coding_assistant/exceptions/llm.py +249 -0
- coding_assistant/exceptions/recovery.py +263 -0
- coding_assistant/exceptions/storage.py +213 -0
- coding_assistant/exceptions/validation.py +230 -0
- coding_assistant/llm/__init__.py +1 -0
- coding_assistant/llm/client.py +277 -0
- coding_assistant/llm/gemini_client.py +181 -0
- coding_assistant/llm/groq_client.py +160 -0
- coding_assistant/llm/prompts.py +98 -0
- coding_assistant/llm/together_client.py +160 -0
- coding_assistant/operations/__init__.py +13 -0
- coding_assistant/operations/differ.py +369 -0
- coding_assistant/operations/generator.py +347 -0
- coding_assistant/operations/linter.py +430 -0
- coding_assistant/operations/validator.py +406 -0
- coding_assistant/storage/__init__.py +9 -0
- coding_assistant/storage/database.py +363 -0
- coding_assistant/storage/session.py +231 -0
- coding_assistant/utils/__init__.py +31 -0
- coding_assistant/utils/cache.py +477 -0
- coding_assistant/utils/hardware.py +132 -0
- coding_assistant/utils/keystore.py +206 -0
- coding_assistant/utils/logger.py +32 -0
- coding_assistant/utils/progress.py +311 -0
- coding_assistant/validation/__init__.py +13 -0
- coding_assistant/validation/files.py +305 -0
- coding_assistant/validation/inputs.py +335 -0
- coding_assistant/validation/params.py +280 -0
- coding_assistant/validation/sanitizers.py +243 -0
- coding_assistant/vcs/__init__.py +5 -0
- coding_assistant/vcs/git.py +269 -0
|
@@ -0,0 +1,615 @@
|
|
|
1
|
+
"""Configuration management system."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import yaml
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, Dict, Optional, List
|
|
7
|
+
from dataclasses import dataclass, asdict, field
|
|
8
|
+
|
|
9
|
+
from coding_assistant.exceptions.validation import ConfigurationError
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class ProjectConfig:
|
|
14
|
+
"""Project-specific configuration."""
|
|
15
|
+
|
|
16
|
+
name: str = "my-project"
|
|
17
|
+
root: str = "."
|
|
18
|
+
exclude: List[str] = field(default_factory=lambda: [
|
|
19
|
+
"node_modules",
|
|
20
|
+
"venv",
|
|
21
|
+
".venv",
|
|
22
|
+
"__pycache__",
|
|
23
|
+
".git",
|
|
24
|
+
".pytest_cache",
|
|
25
|
+
"dist",
|
|
26
|
+
"build",
|
|
27
|
+
"*.egg-info"
|
|
28
|
+
])
|
|
29
|
+
include_hidden: bool = False
|
|
30
|
+
max_file_size_mb: int = 10
|
|
31
|
+
|
|
32
|
+
def __post_init__(self):
|
|
33
|
+
"""Validate configuration."""
|
|
34
|
+
if not self.name:
|
|
35
|
+
raise ConfigurationError(
|
|
36
|
+
Path("config"),
|
|
37
|
+
"Project name cannot be empty"
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class LLMConfig:
|
|
43
|
+
"""LLM provider configuration."""
|
|
44
|
+
|
|
45
|
+
provider: str = "ollama"
|
|
46
|
+
model: str = "qwen2.5-coder:7b"
|
|
47
|
+
endpoint: Optional[str] = None
|
|
48
|
+
api_key: Optional[str] = None
|
|
49
|
+
max_tokens: int = 200000
|
|
50
|
+
temperature: float = 0.7
|
|
51
|
+
timeout_seconds: int = 120
|
|
52
|
+
|
|
53
|
+
def __post_init__(self):
|
|
54
|
+
"""Validate and set defaults."""
|
|
55
|
+
# Get API key from environment if not set
|
|
56
|
+
if not self.api_key:
|
|
57
|
+
if self.provider == "openai":
|
|
58
|
+
self.api_key = os.getenv("OPENAI_API_KEY")
|
|
59
|
+
elif self.provider == "claude":
|
|
60
|
+
self.api_key = os.getenv("ANTHROPIC_API_KEY")
|
|
61
|
+
elif self.provider == "groq":
|
|
62
|
+
self.api_key = os.getenv("GROQ_API_KEY")
|
|
63
|
+
elif self.provider == "together":
|
|
64
|
+
self.api_key = os.getenv("TOGETHER_API_KEY")
|
|
65
|
+
|
|
66
|
+
# Set default endpoint if not provided
|
|
67
|
+
if not self.endpoint:
|
|
68
|
+
if self.provider == "ollama":
|
|
69
|
+
self.endpoint = "http://localhost:11434"
|
|
70
|
+
elif self.provider == "openai":
|
|
71
|
+
self.endpoint = "https://api.openai.com/v1"
|
|
72
|
+
elif self.provider == "claude":
|
|
73
|
+
self.endpoint = "https://api.anthropic.com/v1"
|
|
74
|
+
elif self.provider == "groq":
|
|
75
|
+
self.endpoint = "https://api.groq.com/openai/v1"
|
|
76
|
+
elif self.provider == "together":
|
|
77
|
+
self.endpoint = "https://api.together.xyz/v1"
|
|
78
|
+
|
|
79
|
+
# Validate provider
|
|
80
|
+
valid_providers = ["ollama", "openai", "claude", "groq", "together", "mock"]
|
|
81
|
+
if self.provider not in valid_providers:
|
|
82
|
+
raise ConfigurationError(
|
|
83
|
+
Path("config"),
|
|
84
|
+
f"Invalid LLM provider: {self.provider}. "
|
|
85
|
+
f"Must be one of: {', '.join(valid_providers)}"
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
# Validate temperature
|
|
89
|
+
if not 0.0 <= self.temperature <= 2.0:
|
|
90
|
+
raise ConfigurationError(
|
|
91
|
+
Path("config"),
|
|
92
|
+
f"Temperature must be between 0.0 and 2.0, got {self.temperature}"
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
@dataclass
|
|
97
|
+
class EmbeddingConfig:
|
|
98
|
+
"""Embedding model configuration."""
|
|
99
|
+
|
|
100
|
+
model: str = "all-MiniLM-L6-v2"
|
|
101
|
+
cache_dir: str = ".assistant/cache"
|
|
102
|
+
batch_size: int = 32
|
|
103
|
+
device: str = "cpu" # cpu, cuda, mps
|
|
104
|
+
|
|
105
|
+
def __post_init__(self):
|
|
106
|
+
"""Validate configuration."""
|
|
107
|
+
valid_devices = ["cpu", "cuda", "mps"]
|
|
108
|
+
if self.device not in valid_devices:
|
|
109
|
+
raise ConfigurationError(
|
|
110
|
+
Path("config"),
|
|
111
|
+
f"Invalid device: {self.device}. "
|
|
112
|
+
f"Must be one of: {', '.join(valid_devices)}"
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
@dataclass
|
|
117
|
+
class SearchConfig:
|
|
118
|
+
"""Search and retrieval configuration."""
|
|
119
|
+
|
|
120
|
+
top_k: int = 10
|
|
121
|
+
hybrid_alpha: float = 0.7 # Weight for vector search (0.0 = keyword only, 1.0 = vector only)
|
|
122
|
+
min_similarity: float = 0.3
|
|
123
|
+
max_context_chunks: int = 20
|
|
124
|
+
|
|
125
|
+
def __post_init__(self):
|
|
126
|
+
"""Validate configuration."""
|
|
127
|
+
if not 1 <= self.top_k <= 100:
|
|
128
|
+
raise ConfigurationError(
|
|
129
|
+
Path("config"),
|
|
130
|
+
f"top_k must be between 1 and 100, got {self.top_k}"
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
if not 0.0 <= self.hybrid_alpha <= 1.0:
|
|
134
|
+
raise ConfigurationError(
|
|
135
|
+
Path("config"),
|
|
136
|
+
f"hybrid_alpha must be between 0.0 and 1.0, got {self.hybrid_alpha}"
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
if not 0.0 <= self.min_similarity <= 1.0:
|
|
140
|
+
raise ConfigurationError(
|
|
141
|
+
Path("config"),
|
|
142
|
+
f"min_similarity must be between 0.0 and 1.0, got {self.min_similarity}"
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
@dataclass
|
|
147
|
+
class CacheConfig:
|
|
148
|
+
"""Caching configuration."""
|
|
149
|
+
|
|
150
|
+
enabled: bool = True
|
|
151
|
+
max_size_mb: int = 500
|
|
152
|
+
ttl_seconds: int = 3600
|
|
153
|
+
embedding_cache_enabled: bool = True
|
|
154
|
+
response_cache_enabled: bool = True
|
|
155
|
+
|
|
156
|
+
def __post_init__(self):
|
|
157
|
+
"""Validate configuration."""
|
|
158
|
+
if self.max_size_mb < 0:
|
|
159
|
+
raise ConfigurationError(
|
|
160
|
+
Path("config"),
|
|
161
|
+
f"max_size_mb must be positive, got {self.max_size_mb}"
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
@dataclass
|
|
166
|
+
class UIConfig:
|
|
167
|
+
"""User interface configuration."""
|
|
168
|
+
|
|
169
|
+
color: bool = True
|
|
170
|
+
progress_bars: bool = True
|
|
171
|
+
verbose: bool = False
|
|
172
|
+
editor: Optional[str] = None
|
|
173
|
+
pager: Optional[str] = None
|
|
174
|
+
|
|
175
|
+
def __post_init__(self):
|
|
176
|
+
"""Set defaults from environment."""
|
|
177
|
+
if not self.editor:
|
|
178
|
+
self.editor = os.getenv("EDITOR", "vim")
|
|
179
|
+
|
|
180
|
+
if not self.pager:
|
|
181
|
+
self.pager = os.getenv("PAGER", "less")
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
@dataclass
|
|
185
|
+
class StorageConfig:
|
|
186
|
+
"""Storage and database configuration."""
|
|
187
|
+
|
|
188
|
+
db_path: str = ".assistant/sessions.db"
|
|
189
|
+
auto_save: bool = True
|
|
190
|
+
max_sessions: int = 100
|
|
191
|
+
compress_old_sessions: bool = True
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
@dataclass
|
|
195
|
+
class Config:
|
|
196
|
+
"""
|
|
197
|
+
Main configuration class.
|
|
198
|
+
|
|
199
|
+
Aggregates all configuration sections and provides
|
|
200
|
+
methods for loading, saving, and validating config.
|
|
201
|
+
"""
|
|
202
|
+
|
|
203
|
+
project: ProjectConfig = field(default_factory=ProjectConfig)
|
|
204
|
+
llm: LLMConfig = field(default_factory=LLMConfig)
|
|
205
|
+
embeddings: EmbeddingConfig = field(default_factory=EmbeddingConfig)
|
|
206
|
+
search: SearchConfig = field(default_factory=SearchConfig)
|
|
207
|
+
cache: CacheConfig = field(default_factory=CacheConfig)
|
|
208
|
+
ui: UIConfig = field(default_factory=UIConfig)
|
|
209
|
+
storage: StorageConfig = field(default_factory=StorageConfig)
|
|
210
|
+
|
|
211
|
+
@classmethod
|
|
212
|
+
def from_file(cls, config_file: Path) -> 'Config':
|
|
213
|
+
"""
|
|
214
|
+
Load configuration from YAML file.
|
|
215
|
+
|
|
216
|
+
Args:
|
|
217
|
+
config_file: Path to config file (.assistant.yml)
|
|
218
|
+
|
|
219
|
+
Returns:
|
|
220
|
+
Config object
|
|
221
|
+
|
|
222
|
+
Raises:
|
|
223
|
+
ConfigurationError: If config file is invalid
|
|
224
|
+
"""
|
|
225
|
+
if not config_file.exists():
|
|
226
|
+
# Return default config if file doesn't exist
|
|
227
|
+
return cls.default()
|
|
228
|
+
|
|
229
|
+
try:
|
|
230
|
+
with open(config_file, 'r') as f:
|
|
231
|
+
data = yaml.safe_load(f)
|
|
232
|
+
|
|
233
|
+
if data is None:
|
|
234
|
+
data = {}
|
|
235
|
+
|
|
236
|
+
# Create config with data
|
|
237
|
+
return cls(
|
|
238
|
+
project=ProjectConfig(**data.get('project', {})),
|
|
239
|
+
llm=LLMConfig(**data.get('llm', {})),
|
|
240
|
+
embeddings=EmbeddingConfig(**data.get('embeddings', {})),
|
|
241
|
+
search=SearchConfig(**data.get('search', {})),
|
|
242
|
+
cache=CacheConfig(**data.get('cache', {})),
|
|
243
|
+
ui=UIConfig(**data.get('ui', {})),
|
|
244
|
+
storage=StorageConfig(**data.get('storage', {}))
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
except yaml.YAMLError as e:
|
|
248
|
+
raise ConfigurationError(
|
|
249
|
+
config_file,
|
|
250
|
+
f"Invalid YAML syntax: {e}"
|
|
251
|
+
)
|
|
252
|
+
except TypeError as e:
|
|
253
|
+
raise ConfigurationError(
|
|
254
|
+
config_file,
|
|
255
|
+
f"Invalid configuration structure: {e}"
|
|
256
|
+
)
|
|
257
|
+
except Exception as e:
|
|
258
|
+
raise ConfigurationError(
|
|
259
|
+
config_file,
|
|
260
|
+
f"Failed to load configuration: {e}"
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
@classmethod
|
|
264
|
+
def from_dict(cls, data: Dict[str, Any]) -> 'Config':
|
|
265
|
+
"""
|
|
266
|
+
Create config from dictionary.
|
|
267
|
+
|
|
268
|
+
Args:
|
|
269
|
+
data: Configuration dictionary
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
Config object
|
|
273
|
+
"""
|
|
274
|
+
return cls(
|
|
275
|
+
project=ProjectConfig(**data.get('project', {})),
|
|
276
|
+
llm=LLMConfig(**data.get('llm', {})),
|
|
277
|
+
embeddings=EmbeddingConfig(**data.get('embeddings', {})),
|
|
278
|
+
search=SearchConfig(**data.get('search', {})),
|
|
279
|
+
cache=CacheConfig(**data.get('cache', {})),
|
|
280
|
+
ui=UIConfig(**data.get('ui', {})),
|
|
281
|
+
storage=StorageConfig(**data.get('storage', {}))
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
@classmethod
|
|
285
|
+
def default(cls) -> 'Config':
|
|
286
|
+
"""
|
|
287
|
+
Create default configuration.
|
|
288
|
+
|
|
289
|
+
Returns:
|
|
290
|
+
Config with default values
|
|
291
|
+
"""
|
|
292
|
+
return cls()
|
|
293
|
+
|
|
294
|
+
def save(self, config_file: Path, create_dirs: bool = True) -> None:
|
|
295
|
+
"""
|
|
296
|
+
Save configuration to YAML file.
|
|
297
|
+
|
|
298
|
+
Args:
|
|
299
|
+
config_file: Path to save config
|
|
300
|
+
create_dirs: Whether to create parent directories
|
|
301
|
+
|
|
302
|
+
Raises:
|
|
303
|
+
ConfigurationError: If cannot save config
|
|
304
|
+
"""
|
|
305
|
+
try:
|
|
306
|
+
# Create parent directories if needed
|
|
307
|
+
if create_dirs:
|
|
308
|
+
config_file.parent.mkdir(parents=True, exist_ok=True)
|
|
309
|
+
|
|
310
|
+
# Convert to dict
|
|
311
|
+
data = self.to_dict()
|
|
312
|
+
|
|
313
|
+
# Write YAML
|
|
314
|
+
with open(config_file, 'w') as f:
|
|
315
|
+
yaml.dump(
|
|
316
|
+
data,
|
|
317
|
+
f,
|
|
318
|
+
default_flow_style=False,
|
|
319
|
+
sort_keys=False,
|
|
320
|
+
indent=2
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
except Exception as e:
|
|
324
|
+
raise ConfigurationError(
|
|
325
|
+
config_file,
|
|
326
|
+
f"Failed to save configuration: {e}"
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
330
|
+
"""
|
|
331
|
+
Convert config to dictionary.
|
|
332
|
+
|
|
333
|
+
Returns:
|
|
334
|
+
Dictionary representation
|
|
335
|
+
"""
|
|
336
|
+
return {
|
|
337
|
+
'project': asdict(self.project),
|
|
338
|
+
'llm': asdict(self.llm),
|
|
339
|
+
'embeddings': asdict(self.embeddings),
|
|
340
|
+
'search': asdict(self.search),
|
|
341
|
+
'cache': asdict(self.cache),
|
|
342
|
+
'ui': asdict(self.ui),
|
|
343
|
+
'storage': asdict(self.storage)
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
def merge(self, other: 'Config') -> 'Config':
|
|
347
|
+
"""
|
|
348
|
+
Merge with another config (other takes precedence).
|
|
349
|
+
|
|
350
|
+
Args:
|
|
351
|
+
other: Config to merge with
|
|
352
|
+
|
|
353
|
+
Returns:
|
|
354
|
+
New merged config
|
|
355
|
+
"""
|
|
356
|
+
merged_dict = self.to_dict()
|
|
357
|
+
|
|
358
|
+
for section, values in other.to_dict().items():
|
|
359
|
+
if section in merged_dict:
|
|
360
|
+
merged_dict[section].update(values)
|
|
361
|
+
|
|
362
|
+
return Config.from_dict(merged_dict)
|
|
363
|
+
|
|
364
|
+
def validate(self) -> List[str]:
|
|
365
|
+
"""
|
|
366
|
+
Validate configuration.
|
|
367
|
+
|
|
368
|
+
Returns:
|
|
369
|
+
List of validation errors (empty if valid)
|
|
370
|
+
"""
|
|
371
|
+
errors = []
|
|
372
|
+
|
|
373
|
+
# Validate project root exists
|
|
374
|
+
project_root = Path(self.project.root)
|
|
375
|
+
if not project_root.exists():
|
|
376
|
+
errors.append(f"Project root does not exist: {project_root}")
|
|
377
|
+
|
|
378
|
+
# Validate cache directory is writable
|
|
379
|
+
cache_dir = Path(self.embeddings.cache_dir)
|
|
380
|
+
try:
|
|
381
|
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
382
|
+
except Exception as e:
|
|
383
|
+
errors.append(f"Cannot create cache directory: {e}")
|
|
384
|
+
|
|
385
|
+
# Validate database directory is writable
|
|
386
|
+
db_path = Path(self.storage.db_path)
|
|
387
|
+
try:
|
|
388
|
+
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
389
|
+
except Exception as e:
|
|
390
|
+
errors.append(f"Cannot create database directory: {e}")
|
|
391
|
+
|
|
392
|
+
return errors
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
class ConfigManager:
|
|
396
|
+
"""
|
|
397
|
+
Manages configuration loading and saving.
|
|
398
|
+
|
|
399
|
+
Handles config file discovery, environment variables,
|
|
400
|
+
and command-line overrides.
|
|
401
|
+
"""
|
|
402
|
+
|
|
403
|
+
DEFAULT_CONFIG_FILES = [
|
|
404
|
+
".assistant.yml",
|
|
405
|
+
".assistant.yaml",
|
|
406
|
+
"assistant.yml",
|
|
407
|
+
"assistant.yaml"
|
|
408
|
+
]
|
|
409
|
+
|
|
410
|
+
@staticmethod
|
|
411
|
+
def find_config_file(start_path: Path = None) -> Optional[Path]:
|
|
412
|
+
"""
|
|
413
|
+
Find configuration file in current or parent directories.
|
|
414
|
+
|
|
415
|
+
Args:
|
|
416
|
+
start_path: Starting directory (default: current directory)
|
|
417
|
+
|
|
418
|
+
Returns:
|
|
419
|
+
Path to config file or None
|
|
420
|
+
"""
|
|
421
|
+
if start_path is None:
|
|
422
|
+
start_path = Path.cwd()
|
|
423
|
+
|
|
424
|
+
# Check current directory and parents
|
|
425
|
+
current = start_path.resolve()
|
|
426
|
+
|
|
427
|
+
while True:
|
|
428
|
+
for config_name in ConfigManager.DEFAULT_CONFIG_FILES:
|
|
429
|
+
config_file = current / config_name
|
|
430
|
+
if config_file.exists():
|
|
431
|
+
return config_file
|
|
432
|
+
|
|
433
|
+
# Move to parent
|
|
434
|
+
parent = current.parent
|
|
435
|
+
if parent == current: # Reached root
|
|
436
|
+
break
|
|
437
|
+
current = parent
|
|
438
|
+
|
|
439
|
+
return None
|
|
440
|
+
|
|
441
|
+
@staticmethod
|
|
442
|
+
def load(config_file: Optional[Path] = None) -> Config:
|
|
443
|
+
"""
|
|
444
|
+
Load configuration.
|
|
445
|
+
|
|
446
|
+
Search order:
|
|
447
|
+
1. Provided config_file
|
|
448
|
+
2. Found config file (search up from cwd)
|
|
449
|
+
3. Default config
|
|
450
|
+
|
|
451
|
+
Args:
|
|
452
|
+
config_file: Optional explicit config file path
|
|
453
|
+
|
|
454
|
+
Returns:
|
|
455
|
+
Loaded configuration
|
|
456
|
+
"""
|
|
457
|
+
# Use provided file or search for one
|
|
458
|
+
if config_file is None:
|
|
459
|
+
config_file = ConfigManager.find_config_file()
|
|
460
|
+
|
|
461
|
+
# Load from file or use default
|
|
462
|
+
if config_file and config_file.exists():
|
|
463
|
+
config = Config.from_file(config_file)
|
|
464
|
+
else:
|
|
465
|
+
config = Config.default()
|
|
466
|
+
|
|
467
|
+
# Apply environment variable overrides
|
|
468
|
+
ConfigManager._apply_env_overrides(config)
|
|
469
|
+
|
|
470
|
+
return config
|
|
471
|
+
|
|
472
|
+
@staticmethod
|
|
473
|
+
def _apply_env_overrides(config: Config) -> None:
|
|
474
|
+
"""
|
|
475
|
+
Apply environment variable overrides to config.
|
|
476
|
+
|
|
477
|
+
Environment variables follow pattern:
|
|
478
|
+
ASSISTANT_SECTION_KEY=value
|
|
479
|
+
|
|
480
|
+
Examples:
|
|
481
|
+
- ASSISTANT_LLM_PROVIDER=ollama
|
|
482
|
+
- ASSISTANT_UI_VERBOSE=true
|
|
483
|
+
- ASSISTANT_CACHE_ENABLED=false
|
|
484
|
+
|
|
485
|
+
Args:
|
|
486
|
+
config: Config to modify
|
|
487
|
+
"""
|
|
488
|
+
prefix = "ASSISTANT_"
|
|
489
|
+
|
|
490
|
+
for env_var, value in os.environ.items():
|
|
491
|
+
if not env_var.startswith(prefix):
|
|
492
|
+
continue
|
|
493
|
+
|
|
494
|
+
# Parse env var name
|
|
495
|
+
parts = env_var[len(prefix):].lower().split('_', 1)
|
|
496
|
+
if len(parts) != 2:
|
|
497
|
+
continue
|
|
498
|
+
|
|
499
|
+
section, key = parts
|
|
500
|
+
|
|
501
|
+
# Get config section
|
|
502
|
+
if section == "project":
|
|
503
|
+
obj = config.project
|
|
504
|
+
elif section == "llm":
|
|
505
|
+
obj = config.llm
|
|
506
|
+
elif section == "embeddings":
|
|
507
|
+
obj = config.embeddings
|
|
508
|
+
elif section == "search":
|
|
509
|
+
obj = config.search
|
|
510
|
+
elif section == "cache":
|
|
511
|
+
obj = config.cache
|
|
512
|
+
elif section == "ui":
|
|
513
|
+
obj = config.ui
|
|
514
|
+
elif section == "storage":
|
|
515
|
+
obj = config.storage
|
|
516
|
+
else:
|
|
517
|
+
continue
|
|
518
|
+
|
|
519
|
+
# Set value if attribute exists
|
|
520
|
+
if hasattr(obj, key):
|
|
521
|
+
# Parse value based on current type
|
|
522
|
+
current_value = getattr(obj, key)
|
|
523
|
+
|
|
524
|
+
if isinstance(current_value, bool):
|
|
525
|
+
parsed = value.lower() in ('true', '1', 'yes', 'on')
|
|
526
|
+
elif isinstance(current_value, int):
|
|
527
|
+
parsed = int(value)
|
|
528
|
+
elif isinstance(current_value, float):
|
|
529
|
+
parsed = float(value)
|
|
530
|
+
else:
|
|
531
|
+
parsed = value
|
|
532
|
+
|
|
533
|
+
setattr(obj, key, parsed)
|
|
534
|
+
|
|
535
|
+
@staticmethod
|
|
536
|
+
def generate_default_config(output_file: Path) -> None:
|
|
537
|
+
"""
|
|
538
|
+
Generate default configuration file with comments.
|
|
539
|
+
|
|
540
|
+
Args:
|
|
541
|
+
output_file: Where to save config file
|
|
542
|
+
"""
|
|
543
|
+
config = Config.default()
|
|
544
|
+
|
|
545
|
+
# Format exclude list properly (quote items with special chars)
|
|
546
|
+
def format_item(item):
|
|
547
|
+
if any(char in item for char in ['*', '?', '[', ']', '{', '}', '!', '@', '#', '$', '%', '^', '&']):
|
|
548
|
+
return f" - '{item}'"
|
|
549
|
+
return f' - {item}'
|
|
550
|
+
|
|
551
|
+
exclude_list = '\n'.join(format_item(item) for item in config.project.exclude)
|
|
552
|
+
|
|
553
|
+
# Create commented YAML
|
|
554
|
+
content = f"""# AI Coding Assistant Configuration
|
|
555
|
+
# Generated by: assistant config --generate
|
|
556
|
+
|
|
557
|
+
# Project Settings
|
|
558
|
+
project:
|
|
559
|
+
name: {config.project.name}
|
|
560
|
+
root: {config.project.root}
|
|
561
|
+
exclude:
|
|
562
|
+
{exclude_list}
|
|
563
|
+
include_hidden: {str(config.project.include_hidden).lower()}
|
|
564
|
+
max_file_size_mb: {config.project.max_file_size_mb}
|
|
565
|
+
|
|
566
|
+
# LLM Provider Settings
|
|
567
|
+
llm:
|
|
568
|
+
provider: {config.llm.provider} # Options: ollama, openai, claude, mock
|
|
569
|
+
model: {config.llm.model}
|
|
570
|
+
# endpoint: http://localhost:11434 # Optional: custom endpoint
|
|
571
|
+
# api_key: your-api-key-here # Or set OPENAI_API_KEY / ANTHROPIC_API_KEY env var
|
|
572
|
+
max_tokens: {config.llm.max_tokens}
|
|
573
|
+
temperature: {config.llm.temperature} # 0.0 = deterministic, 1.0 = creative
|
|
574
|
+
timeout_seconds: {config.llm.timeout_seconds}
|
|
575
|
+
|
|
576
|
+
# Embedding Model Settings
|
|
577
|
+
embeddings:
|
|
578
|
+
model: {config.embeddings.model}
|
|
579
|
+
cache_dir: {config.embeddings.cache_dir}
|
|
580
|
+
batch_size: {config.embeddings.batch_size}
|
|
581
|
+
device: {config.embeddings.device} # cpu, cuda, or mps
|
|
582
|
+
|
|
583
|
+
# Search Settings
|
|
584
|
+
search:
|
|
585
|
+
top_k: {config.search.top_k} # Number of results to return
|
|
586
|
+
hybrid_alpha: {config.search.hybrid_alpha} # 0.0 = keyword only, 1.0 = vector only
|
|
587
|
+
min_similarity: {config.search.min_similarity}
|
|
588
|
+
max_context_chunks: {config.search.max_context_chunks}
|
|
589
|
+
|
|
590
|
+
# Cache Settings
|
|
591
|
+
cache:
|
|
592
|
+
enabled: {str(config.cache.enabled).lower()}
|
|
593
|
+
max_size_mb: {config.cache.max_size_mb}
|
|
594
|
+
ttl_seconds: {config.cache.ttl_seconds} # Time to live for cached items
|
|
595
|
+
embedding_cache_enabled: {str(config.cache.embedding_cache_enabled).lower()}
|
|
596
|
+
response_cache_enabled: {str(config.cache.response_cache_enabled).lower()}
|
|
597
|
+
|
|
598
|
+
# UI Settings
|
|
599
|
+
ui:
|
|
600
|
+
color: {str(config.ui.color).lower()}
|
|
601
|
+
progress_bars: {str(config.ui.progress_bars).lower()}
|
|
602
|
+
verbose: {str(config.ui.verbose).lower()}
|
|
603
|
+
# editor: vim # Or use $EDITOR env var
|
|
604
|
+
# pager: less # Or use $PAGER env var
|
|
605
|
+
|
|
606
|
+
# Storage Settings
|
|
607
|
+
storage:
|
|
608
|
+
db_path: {config.storage.db_path}
|
|
609
|
+
auto_save: {str(config.storage.auto_save).lower()}
|
|
610
|
+
max_sessions: {config.storage.max_sessions}
|
|
611
|
+
compress_old_sessions: {str(config.storage.compress_old_sessions).lower()}
|
|
612
|
+
"""
|
|
613
|
+
|
|
614
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
|
615
|
+
output_file.write_text(content)
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"""Configuration management for the assistant."""
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Optional
|
|
4
|
+
import os
|
|
5
|
+
from dotenv import load_dotenv
|
|
6
|
+
|
|
7
|
+
# Load environment variables from .env file
|
|
8
|
+
load_dotenv()
|
|
9
|
+
|
|
10
|
+
# Import keystore for API key loading
|
|
11
|
+
from coding_assistant.utils.keystore import KeyStore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Settings:
|
|
15
|
+
"""Application settings."""
|
|
16
|
+
|
|
17
|
+
def __init__(self):
|
|
18
|
+
# Project paths
|
|
19
|
+
self.project_path: Path = Path.cwd()
|
|
20
|
+
self.data_dir: Path = Path.home() / ".coding_assistant"
|
|
21
|
+
self.data_dir.mkdir(exist_ok=True)
|
|
22
|
+
|
|
23
|
+
# Initialize keystore for API key loading
|
|
24
|
+
keystore = KeyStore()
|
|
25
|
+
|
|
26
|
+
# LLM settings
|
|
27
|
+
# Priority: env var > config file > auto-detect
|
|
28
|
+
self.llm_provider: Optional[str] = os.getenv("LLM_PROVIDER")
|
|
29
|
+
if not self.llm_provider:
|
|
30
|
+
# Try to load from config file
|
|
31
|
+
config_file = Path(".assistant.yml")
|
|
32
|
+
if config_file.exists():
|
|
33
|
+
try:
|
|
34
|
+
from coding_assistant.config.config_manager import Config
|
|
35
|
+
config = Config.from_file(config_file)
|
|
36
|
+
self.llm_provider = config.llm.provider if config.llm.provider != "ollama" else None
|
|
37
|
+
except:
|
|
38
|
+
pass # If config load fails, fall back to auto-detect
|
|
39
|
+
self.ollama_base_url: str = os.getenv("OLLAMA_BASE_URL", "http://localhost:11434")
|
|
40
|
+
self.ollama_model: str = os.getenv("OLLAMA_MODEL", "qwen2.5-coder:7b")
|
|
41
|
+
|
|
42
|
+
# Groq settings (free tier cloud provider)
|
|
43
|
+
# Priority: env var > keystore
|
|
44
|
+
self.groq_api_key: Optional[str] = (
|
|
45
|
+
os.getenv("GROQ_API_KEY") or keystore.get_key("groq")
|
|
46
|
+
)
|
|
47
|
+
self.groq_model: str = os.getenv("GROQ_MODEL", "llama-3.3-70b-versatile")
|
|
48
|
+
|
|
49
|
+
# Together AI settings (free trial cloud provider)
|
|
50
|
+
# Priority: env var > keystore
|
|
51
|
+
self.together_api_key: Optional[str] = (
|
|
52
|
+
os.getenv("TOGETHER_API_KEY") or keystore.get_key("together")
|
|
53
|
+
)
|
|
54
|
+
self.together_model: str = os.getenv("TOGETHER_MODEL", "Qwen/Qwen2.5-Coder-32B-Instruct")
|
|
55
|
+
|
|
56
|
+
# Google Gemini settings (free tier cloud provider)
|
|
57
|
+
# Priority: env var > keystore
|
|
58
|
+
self.gemini_api_key: Optional[str] = (
|
|
59
|
+
os.getenv("GEMINI_API_KEY") or keystore.get_key("gemini")
|
|
60
|
+
)
|
|
61
|
+
self.gemini_model: str = os.getenv("GEMINI_MODEL", "gemini-2.0-flash-exp")
|
|
62
|
+
|
|
63
|
+
# API keys (optional - for future expansion)
|
|
64
|
+
# Priority: env var > keystore
|
|
65
|
+
self.anthropic_api_key: Optional[str] = (
|
|
66
|
+
os.getenv("ANTHROPIC_API_KEY") or keystore.get_key("claude") or keystore.get_key("anthropic")
|
|
67
|
+
)
|
|
68
|
+
self.openai_api_key: Optional[str] = (
|
|
69
|
+
os.getenv("OPENAI_API_KEY") or keystore.get_key("openai")
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
# General settings
|
|
73
|
+
self.verbose: bool = False
|
|
74
|
+
self.max_context_files: int = 10
|
|
75
|
+
|
|
76
|
+
def set_project_path(self, path: Path):
|
|
77
|
+
"""Set the project path."""
|
|
78
|
+
self.project_path = path.resolve()
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
# Global settings instance
|
|
82
|
+
settings = Settings()
|