cognautic-cli 1.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognautic/__init__.py +7 -0
- cognautic/ai_engine.py +2213 -0
- cognautic/auto_continuation.py +196 -0
- cognautic/cli.py +1064 -0
- cognautic/config.py +245 -0
- cognautic/file_tagger.py +194 -0
- cognautic/memory.py +419 -0
- cognautic/provider_endpoints.py +424 -0
- cognautic/rules.py +246 -0
- cognautic/tools/__init__.py +19 -0
- cognautic/tools/base.py +59 -0
- cognautic/tools/code_analysis.py +391 -0
- cognautic/tools/command_runner.py +292 -0
- cognautic/tools/file_operations.py +394 -0
- cognautic/tools/registry.py +115 -0
- cognautic/tools/response_control.py +48 -0
- cognautic/tools/web_search.py +336 -0
- cognautic/utils.py +297 -0
- cognautic/websocket_server.py +485 -0
- cognautic_cli-1.1.1.dist-info/METADATA +604 -0
- cognautic_cli-1.1.1.dist-info/RECORD +25 -0
- cognautic_cli-1.1.1.dist-info/WHEEL +5 -0
- cognautic_cli-1.1.1.dist-info/entry_points.txt +2 -0
- cognautic_cli-1.1.1.dist-info/licenses/LICENSE +21 -0
- cognautic_cli-1.1.1.dist-info/top_level.txt +1 -0
cognautic/utils.py
ADDED
|
@@ -0,0 +1,297 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Utility functions for Cognautic CLI
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import sys
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Dict, Any, List, Optional
|
|
9
|
+
import json
|
|
10
|
+
import yaml
|
|
11
|
+
from rich.console import Console
|
|
12
|
+
from rich.table import Table
|
|
13
|
+
from rich.progress import Progress, SpinnerColumn, TextColumn
|
|
14
|
+
|
|
15
|
+
console = Console()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def ensure_directory(path: str) -> Path:
|
|
19
|
+
"""Ensure directory exists, create if it doesn't"""
|
|
20
|
+
dir_path = Path(path)
|
|
21
|
+
dir_path.mkdir(parents=True, exist_ok=True)
|
|
22
|
+
return dir_path
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def load_json_file(file_path: str) -> Dict[str, Any]:
|
|
26
|
+
"""Load JSON file safely"""
|
|
27
|
+
try:
|
|
28
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
29
|
+
return json.load(f)
|
|
30
|
+
except FileNotFoundError:
|
|
31
|
+
return {}
|
|
32
|
+
except json.JSONDecodeError as e:
|
|
33
|
+
console.print(f"❌ Invalid JSON in {file_path}: {e}", style="red")
|
|
34
|
+
return {}
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def save_json_file(file_path: str, data: Dict[str, Any], indent: int = 2):
|
|
38
|
+
"""Save data to JSON file"""
|
|
39
|
+
try:
|
|
40
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
41
|
+
json.dump(data, f, indent=indent, ensure_ascii=False)
|
|
42
|
+
except Exception as e:
|
|
43
|
+
console.print(f"❌ Error saving JSON to {file_path}: {e}", style="red")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def load_yaml_file(file_path: str) -> Dict[str, Any]:
|
|
47
|
+
"""Load YAML file safely"""
|
|
48
|
+
try:
|
|
49
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
50
|
+
return yaml.safe_load(f) or {}
|
|
51
|
+
except FileNotFoundError:
|
|
52
|
+
return {}
|
|
53
|
+
except yaml.YAMLError as e:
|
|
54
|
+
console.print(f"❌ Invalid YAML in {file_path}: {e}", style="red")
|
|
55
|
+
return {}
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def save_yaml_file(file_path: str, data: Dict[str, Any]):
|
|
59
|
+
"""Save data to YAML file"""
|
|
60
|
+
try:
|
|
61
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
62
|
+
yaml.dump(data, f, default_flow_style=False, indent=2)
|
|
63
|
+
except Exception as e:
|
|
64
|
+
console.print(f"❌ Error saving YAML to {file_path}: {e}", style="red")
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def detect_project_type(project_path: str) -> Optional[str]:
|
|
68
|
+
"""Detect project type based on files present"""
|
|
69
|
+
path = Path(project_path)
|
|
70
|
+
|
|
71
|
+
# Check for specific files that indicate project type
|
|
72
|
+
indicators = {
|
|
73
|
+
'python': ['requirements.txt', 'setup.py', 'pyproject.toml', 'Pipfile'],
|
|
74
|
+
'node': ['package.json', 'yarn.lock', 'package-lock.json'],
|
|
75
|
+
'java': ['pom.xml', 'build.gradle', 'build.xml'],
|
|
76
|
+
'go': ['go.mod', 'go.sum'],
|
|
77
|
+
'rust': ['Cargo.toml', 'Cargo.lock'],
|
|
78
|
+
'php': ['composer.json', 'composer.lock'],
|
|
79
|
+
'ruby': ['Gemfile', 'Gemfile.lock'],
|
|
80
|
+
'dotnet': ['*.csproj', '*.sln', 'project.json'],
|
|
81
|
+
'docker': ['Dockerfile', 'docker-compose.yml', 'docker-compose.yaml']
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
for project_type, files in indicators.items():
|
|
85
|
+
for file_pattern in files:
|
|
86
|
+
if '*' in file_pattern:
|
|
87
|
+
# Handle glob patterns
|
|
88
|
+
if list(path.glob(file_pattern)):
|
|
89
|
+
return project_type
|
|
90
|
+
else:
|
|
91
|
+
# Handle exact file names
|
|
92
|
+
if (path / file_pattern).exists():
|
|
93
|
+
return project_type
|
|
94
|
+
|
|
95
|
+
# Check by file extensions
|
|
96
|
+
extensions = set()
|
|
97
|
+
for file_path in path.rglob('*'):
|
|
98
|
+
if file_path.is_file() and not any(part.startswith('.') for part in file_path.parts):
|
|
99
|
+
extensions.add(file_path.suffix.lower())
|
|
100
|
+
|
|
101
|
+
extension_map = {
|
|
102
|
+
'.py': 'python',
|
|
103
|
+
'.js': 'javascript',
|
|
104
|
+
'.ts': 'typescript',
|
|
105
|
+
'.java': 'java',
|
|
106
|
+
'.go': 'go',
|
|
107
|
+
'.rs': 'rust',
|
|
108
|
+
'.php': 'php',
|
|
109
|
+
'.rb': 'ruby',
|
|
110
|
+
'.cs': 'csharp',
|
|
111
|
+
'.cpp': 'cpp',
|
|
112
|
+
'.c': 'c'
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
for ext, lang in extension_map.items():
|
|
116
|
+
if ext in extensions:
|
|
117
|
+
return lang
|
|
118
|
+
|
|
119
|
+
return None
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def format_file_size(size_bytes: int) -> str:
|
|
123
|
+
"""Format file size in human readable format"""
|
|
124
|
+
if size_bytes == 0:
|
|
125
|
+
return "0 B"
|
|
126
|
+
|
|
127
|
+
size_names = ["B", "KB", "MB", "GB", "TB"]
|
|
128
|
+
i = 0
|
|
129
|
+
while size_bytes >= 1024 and i < len(size_names) - 1:
|
|
130
|
+
size_bytes /= 1024.0
|
|
131
|
+
i += 1
|
|
132
|
+
|
|
133
|
+
return f"{size_bytes:.1f} {size_names[i]}"
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def create_progress_bar(description: str = "Processing..."):
|
|
137
|
+
"""Create a progress bar with spinner"""
|
|
138
|
+
return Progress(
|
|
139
|
+
SpinnerColumn(),
|
|
140
|
+
TextColumn("[progress.description]{task.description}"),
|
|
141
|
+
console=console
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def display_table(data: List[Dict[str, Any]], title: str = None, columns: List[str] = None):
|
|
146
|
+
"""Display data in a formatted table"""
|
|
147
|
+
if not data:
|
|
148
|
+
console.print("No data to display", style="yellow")
|
|
149
|
+
return
|
|
150
|
+
|
|
151
|
+
# Auto-detect columns if not provided
|
|
152
|
+
if not columns:
|
|
153
|
+
columns = list(data[0].keys())
|
|
154
|
+
|
|
155
|
+
table = Table(title=title)
|
|
156
|
+
|
|
157
|
+
# Add columns
|
|
158
|
+
for column in columns:
|
|
159
|
+
table.add_column(column.replace('_', ' ').title())
|
|
160
|
+
|
|
161
|
+
# Add rows
|
|
162
|
+
for row in data:
|
|
163
|
+
table.add_row(*[str(row.get(col, '')) for col in columns])
|
|
164
|
+
|
|
165
|
+
console.print(table)
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def validate_api_key(api_key: str, provider: str) -> bool:
|
|
169
|
+
"""Validate API key format for different providers"""
|
|
170
|
+
if not api_key:
|
|
171
|
+
return False
|
|
172
|
+
|
|
173
|
+
# Basic validation patterns
|
|
174
|
+
patterns = {
|
|
175
|
+
'openai': lambda k: k.startswith('sk-') and len(k) > 20,
|
|
176
|
+
'anthropic': lambda k: k.startswith('sk-ant-') and len(k) > 20,
|
|
177
|
+
'google': lambda k: len(k) > 20, # Google API keys vary in format
|
|
178
|
+
'together': lambda k: len(k) > 20,
|
|
179
|
+
'openrouter': lambda k: k.startswith('sk-or-') and len(k) > 20
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
validator = patterns.get(provider.lower())
|
|
183
|
+
if validator:
|
|
184
|
+
return validator(api_key)
|
|
185
|
+
|
|
186
|
+
# Default validation - just check length
|
|
187
|
+
return len(api_key) > 10
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def get_system_info() -> Dict[str, Any]:
|
|
191
|
+
"""Get system information"""
|
|
192
|
+
return {
|
|
193
|
+
'platform': sys.platform,
|
|
194
|
+
'python_version': sys.version,
|
|
195
|
+
'python_executable': sys.executable,
|
|
196
|
+
'working_directory': os.getcwd(),
|
|
197
|
+
'home_directory': str(Path.home()),
|
|
198
|
+
'environment_variables': dict(os.environ)
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def sanitize_filename(filename: str) -> str:
|
|
203
|
+
"""Sanitize filename for safe file system usage"""
|
|
204
|
+
# Remove or replace invalid characters
|
|
205
|
+
invalid_chars = '<>:"/\\|?*'
|
|
206
|
+
for char in invalid_chars:
|
|
207
|
+
filename = filename.replace(char, '_')
|
|
208
|
+
|
|
209
|
+
# Remove leading/trailing spaces and dots
|
|
210
|
+
filename = filename.strip(' .')
|
|
211
|
+
|
|
212
|
+
# Limit length
|
|
213
|
+
if len(filename) > 255:
|
|
214
|
+
filename = filename[:255]
|
|
215
|
+
|
|
216
|
+
return filename
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def parse_requirements_txt(file_path: str) -> List[Dict[str, str]]:
|
|
220
|
+
"""Parse requirements.txt file"""
|
|
221
|
+
requirements = []
|
|
222
|
+
|
|
223
|
+
try:
|
|
224
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
225
|
+
for line in f:
|
|
226
|
+
line = line.strip()
|
|
227
|
+
|
|
228
|
+
# Skip empty lines and comments
|
|
229
|
+
if not line or line.startswith('#'):
|
|
230
|
+
continue
|
|
231
|
+
|
|
232
|
+
# Parse package name and version
|
|
233
|
+
if '==' in line:
|
|
234
|
+
package, version = line.split('==', 1)
|
|
235
|
+
requirements.append({
|
|
236
|
+
'package': package.strip(),
|
|
237
|
+
'version': version.strip(),
|
|
238
|
+
'operator': '=='
|
|
239
|
+
})
|
|
240
|
+
elif '>=' in line:
|
|
241
|
+
package, version = line.split('>=', 1)
|
|
242
|
+
requirements.append({
|
|
243
|
+
'package': package.strip(),
|
|
244
|
+
'version': version.strip(),
|
|
245
|
+
'operator': '>='
|
|
246
|
+
})
|
|
247
|
+
elif '<=' in line:
|
|
248
|
+
package, version = line.split('<=', 1)
|
|
249
|
+
requirements.append({
|
|
250
|
+
'package': package.strip(),
|
|
251
|
+
'version': version.strip(),
|
|
252
|
+
'operator': '<='
|
|
253
|
+
})
|
|
254
|
+
else:
|
|
255
|
+
requirements.append({
|
|
256
|
+
'package': line.strip(),
|
|
257
|
+
'version': None,
|
|
258
|
+
'operator': None
|
|
259
|
+
})
|
|
260
|
+
|
|
261
|
+
except FileNotFoundError:
|
|
262
|
+
console.print(f"❌ Requirements file not found: {file_path}", style="red")
|
|
263
|
+
except Exception as e:
|
|
264
|
+
console.print(f"❌ Error parsing requirements: {e}", style="red")
|
|
265
|
+
|
|
266
|
+
return requirements
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def check_dependencies() -> Dict[str, bool]:
|
|
270
|
+
"""Check if required dependencies are installed"""
|
|
271
|
+
required_packages = [
|
|
272
|
+
'click', 'asyncio', 'websockets', 'aiohttp', 'pydantic', 'rich',
|
|
273
|
+
'requests', 'beautifulsoup4', 'cryptography', 'keyring'
|
|
274
|
+
]
|
|
275
|
+
|
|
276
|
+
optional_packages = [
|
|
277
|
+
'openai', 'anthropic', 'google-generativeai', 'together',
|
|
278
|
+
'gitpython', 'psutil'
|
|
279
|
+
]
|
|
280
|
+
|
|
281
|
+
status = {'required': {}, 'optional': {}}
|
|
282
|
+
|
|
283
|
+
for package in required_packages:
|
|
284
|
+
try:
|
|
285
|
+
__import__(package)
|
|
286
|
+
status['required'][package] = True
|
|
287
|
+
except ImportError:
|
|
288
|
+
status['required'][package] = False
|
|
289
|
+
|
|
290
|
+
for package in optional_packages:
|
|
291
|
+
try:
|
|
292
|
+
__import__(package)
|
|
293
|
+
status['optional'][package] = True
|
|
294
|
+
except ImportError:
|
|
295
|
+
status['optional'][package] = False
|
|
296
|
+
|
|
297
|
+
return status
|