cognautic-cli 1.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognautic/__init__.py +7 -0
- cognautic/ai_engine.py +2213 -0
- cognautic/auto_continuation.py +196 -0
- cognautic/cli.py +1064 -0
- cognautic/config.py +245 -0
- cognautic/file_tagger.py +194 -0
- cognautic/memory.py +419 -0
- cognautic/provider_endpoints.py +424 -0
- cognautic/rules.py +246 -0
- cognautic/tools/__init__.py +19 -0
- cognautic/tools/base.py +59 -0
- cognautic/tools/code_analysis.py +391 -0
- cognautic/tools/command_runner.py +292 -0
- cognautic/tools/file_operations.py +394 -0
- cognautic/tools/registry.py +115 -0
- cognautic/tools/response_control.py +48 -0
- cognautic/tools/web_search.py +336 -0
- cognautic/utils.py +297 -0
- cognautic/websocket_server.py +485 -0
- cognautic_cli-1.1.1.dist-info/METADATA +604 -0
- cognautic_cli-1.1.1.dist-info/RECORD +25 -0
- cognautic_cli-1.1.1.dist-info/WHEEL +5 -0
- cognautic_cli-1.1.1.dist-info/entry_points.txt +2 -0
- cognautic_cli-1.1.1.dist-info/licenses/LICENSE +21 -0
- cognautic_cli-1.1.1.dist-info/top_level.txt +1 -0
cognautic/tools/base.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Base classes for tools
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from abc import ABC, abstractmethod
|
|
6
|
+
from typing import Dict, Any, List
|
|
7
|
+
from enum import Enum
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class PermissionLevel(Enum):
|
|
11
|
+
"""Permission levels for tool execution"""
|
|
12
|
+
READ_ONLY = "read_only"
|
|
13
|
+
SAFE_OPERATIONS = "safe_operations"
|
|
14
|
+
SYSTEM_OPERATIONS = "system_operations"
|
|
15
|
+
UNRESTRICTED = "unrestricted"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ToolResult:
|
|
19
|
+
"""Result of a tool execution"""
|
|
20
|
+
|
|
21
|
+
def __init__(self, success: bool, data: Any = None, error: str = None):
|
|
22
|
+
self.success = success
|
|
23
|
+
self.data = data
|
|
24
|
+
self.error = error
|
|
25
|
+
|
|
26
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
27
|
+
return {
|
|
28
|
+
'success': self.success,
|
|
29
|
+
'data': self.data,
|
|
30
|
+
'error': self.error
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class BaseTool(ABC):
|
|
35
|
+
"""Base class for all tools"""
|
|
36
|
+
|
|
37
|
+
def __init__(self, name: str, description: str, permission_level: PermissionLevel):
|
|
38
|
+
self.name = name
|
|
39
|
+
self.description = description
|
|
40
|
+
self.permission_level = permission_level
|
|
41
|
+
|
|
42
|
+
@abstractmethod
|
|
43
|
+
async def execute(self, **kwargs) -> ToolResult:
|
|
44
|
+
"""Execute the tool with given parameters"""
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
@abstractmethod
|
|
48
|
+
def get_capabilities(self) -> List[str]:
|
|
49
|
+
"""Get list of capabilities this tool provides"""
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
def get_info(self) -> Dict[str, Any]:
|
|
53
|
+
"""Get tool information"""
|
|
54
|
+
return {
|
|
55
|
+
'name': self.name,
|
|
56
|
+
'description': self.description,
|
|
57
|
+
'permission_level': self.permission_level.value,
|
|
58
|
+
'capabilities': self.get_capabilities()
|
|
59
|
+
}
|
|
@@ -0,0 +1,391 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Code analysis tool for understanding code structure
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import ast
|
|
6
|
+
import os
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import List, Dict, Any, Optional
|
|
9
|
+
import subprocess
|
|
10
|
+
import json
|
|
11
|
+
|
|
12
|
+
from .base import BaseTool, ToolResult, PermissionLevel
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class CodeAnalysisTool(BaseTool):
|
|
16
|
+
"""Tool for analyzing and understanding code structure"""
|
|
17
|
+
|
|
18
|
+
def __init__(self):
|
|
19
|
+
super().__init__(
|
|
20
|
+
name="code_analysis",
|
|
21
|
+
description="Analyze and understand code structure",
|
|
22
|
+
permission_level=PermissionLevel.READ_ONLY
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
def get_capabilities(self) -> List[str]:
|
|
26
|
+
return [
|
|
27
|
+
"parse_ast",
|
|
28
|
+
"analyze_dependencies",
|
|
29
|
+
"find_functions",
|
|
30
|
+
"find_classes",
|
|
31
|
+
"get_code_metrics"
|
|
32
|
+
]
|
|
33
|
+
|
|
34
|
+
async def execute(self, operation: str, **kwargs) -> ToolResult:
|
|
35
|
+
"""Execute code analysis operation"""
|
|
36
|
+
|
|
37
|
+
operations = {
|
|
38
|
+
'parse_ast': self._parse_ast,
|
|
39
|
+
'analyze_dependencies': self._analyze_dependencies,
|
|
40
|
+
'find_functions': self._find_functions,
|
|
41
|
+
'find_classes': self._find_classes,
|
|
42
|
+
'get_code_metrics': self._get_code_metrics
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if operation not in operations:
|
|
46
|
+
return ToolResult(
|
|
47
|
+
success=False,
|
|
48
|
+
error=f"Unknown operation: {operation}"
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
try:
|
|
52
|
+
result = await operations[operation](**kwargs)
|
|
53
|
+
return ToolResult(success=True, data=result)
|
|
54
|
+
except Exception as e:
|
|
55
|
+
return ToolResult(success=False, error=str(e))
|
|
56
|
+
|
|
57
|
+
async def _parse_ast(self, file_path: str) -> Dict[str, Any]:
|
|
58
|
+
"""Parse Python file and return AST information"""
|
|
59
|
+
|
|
60
|
+
path = Path(file_path)
|
|
61
|
+
if not path.exists():
|
|
62
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
63
|
+
|
|
64
|
+
if path.suffix != '.py':
|
|
65
|
+
raise ValueError(f"File is not a Python file: {file_path}")
|
|
66
|
+
|
|
67
|
+
with open(path, 'r', encoding='utf-8') as f:
|
|
68
|
+
source_code = f.read()
|
|
69
|
+
|
|
70
|
+
try:
|
|
71
|
+
tree = ast.parse(source_code)
|
|
72
|
+
except SyntaxError as e:
|
|
73
|
+
raise ValueError(f"Syntax error in Python file: {str(e)}")
|
|
74
|
+
|
|
75
|
+
# Extract information from AST
|
|
76
|
+
info = {
|
|
77
|
+
'file_path': str(path),
|
|
78
|
+
'imports': [],
|
|
79
|
+
'functions': [],
|
|
80
|
+
'classes': [],
|
|
81
|
+
'variables': [],
|
|
82
|
+
'docstring': ast.get_docstring(tree)
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
for node in ast.walk(tree):
|
|
86
|
+
if isinstance(node, ast.Import):
|
|
87
|
+
for alias in node.names:
|
|
88
|
+
info['imports'].append({
|
|
89
|
+
'type': 'import',
|
|
90
|
+
'name': alias.name,
|
|
91
|
+
'alias': alias.asname,
|
|
92
|
+
'line': node.lineno
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
elif isinstance(node, ast.ImportFrom):
|
|
96
|
+
for alias in node.names:
|
|
97
|
+
info['imports'].append({
|
|
98
|
+
'type': 'from_import',
|
|
99
|
+
'module': node.module,
|
|
100
|
+
'name': alias.name,
|
|
101
|
+
'alias': alias.asname,
|
|
102
|
+
'line': node.lineno
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
elif isinstance(node, ast.FunctionDef):
|
|
106
|
+
func_info = {
|
|
107
|
+
'name': node.name,
|
|
108
|
+
'line': node.lineno,
|
|
109
|
+
'args': [arg.arg for arg in node.args.args],
|
|
110
|
+
'docstring': ast.get_docstring(node),
|
|
111
|
+
'decorators': [ast.unparse(dec) for dec in node.decorator_list],
|
|
112
|
+
'is_async': False
|
|
113
|
+
}
|
|
114
|
+
info['functions'].append(func_info)
|
|
115
|
+
|
|
116
|
+
elif isinstance(node, ast.AsyncFunctionDef):
|
|
117
|
+
func_info = {
|
|
118
|
+
'name': node.name,
|
|
119
|
+
'line': node.lineno,
|
|
120
|
+
'args': [arg.arg for arg in node.args.args],
|
|
121
|
+
'docstring': ast.get_docstring(node),
|
|
122
|
+
'decorators': [ast.unparse(dec) for dec in node.decorator_list],
|
|
123
|
+
'is_async': True
|
|
124
|
+
}
|
|
125
|
+
info['functions'].append(func_info)
|
|
126
|
+
|
|
127
|
+
elif isinstance(node, ast.ClassDef):
|
|
128
|
+
class_info = {
|
|
129
|
+
'name': node.name,
|
|
130
|
+
'line': node.lineno,
|
|
131
|
+
'bases': [ast.unparse(base) for base in node.bases],
|
|
132
|
+
'docstring': ast.get_docstring(node),
|
|
133
|
+
'decorators': [ast.unparse(dec) for dec in node.decorator_list],
|
|
134
|
+
'methods': []
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
# Find methods in the class
|
|
138
|
+
for item in node.body:
|
|
139
|
+
if isinstance(item, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
|
140
|
+
method_info = {
|
|
141
|
+
'name': item.name,
|
|
142
|
+
'line': item.lineno,
|
|
143
|
+
'args': [arg.arg for arg in item.args.args],
|
|
144
|
+
'is_async': isinstance(item, ast.AsyncFunctionDef)
|
|
145
|
+
}
|
|
146
|
+
class_info['methods'].append(method_info)
|
|
147
|
+
|
|
148
|
+
info['classes'].append(class_info)
|
|
149
|
+
|
|
150
|
+
elif isinstance(node, ast.Assign):
|
|
151
|
+
for target in node.targets:
|
|
152
|
+
if isinstance(target, ast.Name):
|
|
153
|
+
info['variables'].append({
|
|
154
|
+
'name': target.id,
|
|
155
|
+
'line': node.lineno,
|
|
156
|
+
'type': 'assignment'
|
|
157
|
+
})
|
|
158
|
+
|
|
159
|
+
return info
|
|
160
|
+
|
|
161
|
+
async def _analyze_dependencies(self, project_path: str) -> Dict[str, Any]:
|
|
162
|
+
"""Analyze project dependencies"""
|
|
163
|
+
|
|
164
|
+
path = Path(project_path)
|
|
165
|
+
if not path.exists():
|
|
166
|
+
raise FileNotFoundError(f"Project path not found: {project_path}")
|
|
167
|
+
|
|
168
|
+
dependencies = {
|
|
169
|
+
'project_path': str(path),
|
|
170
|
+
'requirements_files': [],
|
|
171
|
+
'package_files': [],
|
|
172
|
+
'imports': set(),
|
|
173
|
+
'external_packages': set(),
|
|
174
|
+
'internal_modules': set()
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
# Find requirements files
|
|
178
|
+
req_files = ['requirements.txt', 'requirements-dev.txt', 'Pipfile', 'pyproject.toml', 'setup.py']
|
|
179
|
+
for req_file in req_files:
|
|
180
|
+
req_path = path / req_file
|
|
181
|
+
if req_path.exists():
|
|
182
|
+
dependencies['requirements_files'].append(str(req_path))
|
|
183
|
+
|
|
184
|
+
# Find package files
|
|
185
|
+
for package_file in path.rglob('*.py'):
|
|
186
|
+
if not any(part.startswith('.') for part in package_file.parts):
|
|
187
|
+
dependencies['package_files'].append(str(package_file))
|
|
188
|
+
|
|
189
|
+
# Analyze imports in each file
|
|
190
|
+
try:
|
|
191
|
+
ast_info = await self._parse_ast(str(package_file))
|
|
192
|
+
for imp in ast_info['imports']:
|
|
193
|
+
if imp['type'] == 'import':
|
|
194
|
+
dependencies['imports'].add(imp['name'])
|
|
195
|
+
elif imp['type'] == 'from_import' and imp['module']:
|
|
196
|
+
dependencies['imports'].add(imp['module'])
|
|
197
|
+
except Exception:
|
|
198
|
+
continue
|
|
199
|
+
|
|
200
|
+
# Categorize imports
|
|
201
|
+
stdlib_modules = self._get_stdlib_modules()
|
|
202
|
+
|
|
203
|
+
for imp in dependencies['imports']:
|
|
204
|
+
root_module = imp.split('.')[0]
|
|
205
|
+
if root_module in stdlib_modules:
|
|
206
|
+
continue # Skip standard library
|
|
207
|
+
elif any(root_module in str(f) for f in dependencies['package_files']):
|
|
208
|
+
dependencies['internal_modules'].add(imp)
|
|
209
|
+
else:
|
|
210
|
+
dependencies['external_packages'].add(imp)
|
|
211
|
+
|
|
212
|
+
# Convert sets to lists for JSON serialization
|
|
213
|
+
dependencies['imports'] = list(dependencies['imports'])
|
|
214
|
+
dependencies['external_packages'] = list(dependencies['external_packages'])
|
|
215
|
+
dependencies['internal_modules'] = list(dependencies['internal_modules'])
|
|
216
|
+
|
|
217
|
+
return dependencies
|
|
218
|
+
|
|
219
|
+
async def _find_functions(
|
|
220
|
+
self,
|
|
221
|
+
project_path: str,
|
|
222
|
+
function_name: str = None,
|
|
223
|
+
include_methods: bool = True
|
|
224
|
+
) -> List[Dict[str, Any]]:
|
|
225
|
+
"""Find functions in a project"""
|
|
226
|
+
|
|
227
|
+
path = Path(project_path)
|
|
228
|
+
if not path.exists():
|
|
229
|
+
raise FileNotFoundError(f"Project path not found: {project_path}")
|
|
230
|
+
|
|
231
|
+
functions = []
|
|
232
|
+
|
|
233
|
+
for py_file in path.rglob('*.py'):
|
|
234
|
+
if any(part.startswith('.') for part in py_file.parts):
|
|
235
|
+
continue
|
|
236
|
+
|
|
237
|
+
try:
|
|
238
|
+
ast_info = await self._parse_ast(str(py_file))
|
|
239
|
+
|
|
240
|
+
# Add standalone functions
|
|
241
|
+
for func in ast_info['functions']:
|
|
242
|
+
if not function_name or function_name.lower() in func['name'].lower():
|
|
243
|
+
func['file'] = str(py_file)
|
|
244
|
+
func['type'] = 'function'
|
|
245
|
+
functions.append(func)
|
|
246
|
+
|
|
247
|
+
# Add class methods if requested
|
|
248
|
+
if include_methods:
|
|
249
|
+
for cls in ast_info['classes']:
|
|
250
|
+
for method in cls['methods']:
|
|
251
|
+
if not function_name or function_name.lower() in method['name'].lower():
|
|
252
|
+
method['file'] = str(py_file)
|
|
253
|
+
method['class'] = cls['name']
|
|
254
|
+
method['type'] = 'method'
|
|
255
|
+
functions.append(method)
|
|
256
|
+
|
|
257
|
+
except Exception:
|
|
258
|
+
continue
|
|
259
|
+
|
|
260
|
+
return functions
|
|
261
|
+
|
|
262
|
+
async def _find_classes(
|
|
263
|
+
self,
|
|
264
|
+
project_path: str,
|
|
265
|
+
class_name: str = None
|
|
266
|
+
) -> List[Dict[str, Any]]:
|
|
267
|
+
"""Find classes in a project"""
|
|
268
|
+
|
|
269
|
+
path = Path(project_path)
|
|
270
|
+
if not path.exists():
|
|
271
|
+
raise FileNotFoundError(f"Project path not found: {project_path}")
|
|
272
|
+
|
|
273
|
+
classes = []
|
|
274
|
+
|
|
275
|
+
for py_file in path.rglob('*.py'):
|
|
276
|
+
if any(part.startswith('.') for part in py_file.parts):
|
|
277
|
+
continue
|
|
278
|
+
|
|
279
|
+
try:
|
|
280
|
+
ast_info = await self._parse_ast(str(py_file))
|
|
281
|
+
|
|
282
|
+
for cls in ast_info['classes']:
|
|
283
|
+
if not class_name or class_name.lower() in cls['name'].lower():
|
|
284
|
+
cls['file'] = str(py_file)
|
|
285
|
+
classes.append(cls)
|
|
286
|
+
|
|
287
|
+
except Exception:
|
|
288
|
+
continue
|
|
289
|
+
|
|
290
|
+
return classes
|
|
291
|
+
|
|
292
|
+
async def _get_code_metrics(self, project_path: str) -> Dict[str, Any]:
|
|
293
|
+
"""Get code metrics for a project"""
|
|
294
|
+
|
|
295
|
+
path = Path(project_path)
|
|
296
|
+
if not path.exists():
|
|
297
|
+
raise FileNotFoundError(f"Project path not found: {project_path}")
|
|
298
|
+
|
|
299
|
+
metrics = {
|
|
300
|
+
'project_path': str(path),
|
|
301
|
+
'total_files': 0,
|
|
302
|
+
'python_files': 0,
|
|
303
|
+
'total_lines': 0,
|
|
304
|
+
'code_lines': 0,
|
|
305
|
+
'comment_lines': 0,
|
|
306
|
+
'blank_lines': 0,
|
|
307
|
+
'functions': 0,
|
|
308
|
+
'classes': 0,
|
|
309
|
+
'imports': 0,
|
|
310
|
+
'files_by_extension': {},
|
|
311
|
+
'largest_files': []
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
file_sizes = []
|
|
315
|
+
|
|
316
|
+
for file_path in path.rglob('*'):
|
|
317
|
+
if file_path.is_file() and not any(part.startswith('.') for part in file_path.parts):
|
|
318
|
+
metrics['total_files'] += 1
|
|
319
|
+
|
|
320
|
+
# Count by extension
|
|
321
|
+
ext = file_path.suffix.lower()
|
|
322
|
+
metrics['files_by_extension'][ext] = metrics['files_by_extension'].get(ext, 0) + 1
|
|
323
|
+
|
|
324
|
+
# Analyze Python files
|
|
325
|
+
if ext == '.py':
|
|
326
|
+
metrics['python_files'] += 1
|
|
327
|
+
|
|
328
|
+
try:
|
|
329
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
330
|
+
lines = f.readlines()
|
|
331
|
+
|
|
332
|
+
file_lines = len(lines)
|
|
333
|
+
file_code_lines = 0
|
|
334
|
+
file_comment_lines = 0
|
|
335
|
+
file_blank_lines = 0
|
|
336
|
+
|
|
337
|
+
for line in lines:
|
|
338
|
+
stripped = line.strip()
|
|
339
|
+
if not stripped:
|
|
340
|
+
file_blank_lines += 1
|
|
341
|
+
elif stripped.startswith('#'):
|
|
342
|
+
file_comment_lines += 1
|
|
343
|
+
else:
|
|
344
|
+
file_code_lines += 1
|
|
345
|
+
|
|
346
|
+
metrics['total_lines'] += file_lines
|
|
347
|
+
metrics['code_lines'] += file_code_lines
|
|
348
|
+
metrics['comment_lines'] += file_comment_lines
|
|
349
|
+
metrics['blank_lines'] += file_blank_lines
|
|
350
|
+
|
|
351
|
+
file_sizes.append({
|
|
352
|
+
'file': str(file_path),
|
|
353
|
+
'lines': file_lines,
|
|
354
|
+
'code_lines': file_code_lines
|
|
355
|
+
})
|
|
356
|
+
|
|
357
|
+
# Count functions and classes
|
|
358
|
+
try:
|
|
359
|
+
ast_info = await self._parse_ast(str(file_path))
|
|
360
|
+
metrics['functions'] += len(ast_info['functions'])
|
|
361
|
+
metrics['classes'] += len(ast_info['classes'])
|
|
362
|
+
metrics['imports'] += len(ast_info['imports'])
|
|
363
|
+
except Exception:
|
|
364
|
+
pass
|
|
365
|
+
|
|
366
|
+
except Exception:
|
|
367
|
+
continue
|
|
368
|
+
|
|
369
|
+
# Get largest files
|
|
370
|
+
file_sizes.sort(key=lambda x: x['lines'], reverse=True)
|
|
371
|
+
metrics['largest_files'] = file_sizes[:10]
|
|
372
|
+
|
|
373
|
+
return metrics
|
|
374
|
+
|
|
375
|
+
def _get_stdlib_modules(self) -> set:
|
|
376
|
+
"""Get set of Python standard library modules"""
|
|
377
|
+
# This is a simplified list of common stdlib modules
|
|
378
|
+
# In a full implementation, you might want to use a more comprehensive approach
|
|
379
|
+
return {
|
|
380
|
+
'os', 'sys', 'json', 'datetime', 'time', 'random', 'math', 'collections',
|
|
381
|
+
'itertools', 'functools', 'operator', 're', 'string', 'io', 'pathlib',
|
|
382
|
+
'urllib', 'http', 'email', 'html', 'xml', 'csv', 'sqlite3', 'pickle',
|
|
383
|
+
'hashlib', 'hmac', 'secrets', 'uuid', 'base64', 'binascii', 'struct',
|
|
384
|
+
'codecs', 'unicodedata', 'stringprep', 'readline', 'rlcompleter',
|
|
385
|
+
'subprocess', 'threading', 'multiprocessing', 'concurrent', 'queue',
|
|
386
|
+
'sched', 'asyncio', 'socket', 'ssl', 'select', 'selectors', 'signal',
|
|
387
|
+
'mmap', 'ctypes', 'array', 'weakref', 'types', 'copy', 'pprint',
|
|
388
|
+
'reprlib', 'enum', 'numbers', 'cmath', 'decimal', 'fractions',
|
|
389
|
+
'statistics', 'logging', 'getopt', 'argparse', 'fileinput', 'filecmp',
|
|
390
|
+
'tempfile', 'glob', 'fnmatch', 'linecache', 'shutil', 'macpath'
|
|
391
|
+
}
|