akitallm 0.1.1__py3-none-any.whl → 1.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- akita/cli/main.py +75 -10
- akita/core/ast_utils.py +77 -0
- akita/core/indexing.py +94 -0
- akita/core/plugins.py +81 -0
- akita/core/trace.py +18 -0
- akita/plugins/__init__.py +1 -0
- akita/plugins/files.py +34 -0
- akita/reasoning/engine.py +44 -18
- akita/reasoning/session.py +15 -0
- akita/tools/base.py +6 -1
- akita/tools/context.py +54 -9
- akita/tools/diff.py +100 -25
- {akitallm-0.1.1.dist-info → akitallm-1.0.3.dist-info}/METADATA +8 -11
- akitallm-1.0.3.dist-info/RECORD +22 -0
- akitallm-1.0.3.dist-info/entry_points.txt +5 -0
- akitallm-0.1.1.dist-info/RECORD +0 -15
- akitallm-0.1.1.dist-info/entry_points.txt +0 -2
- {akitallm-0.1.1.dist-info → akitallm-1.0.3.dist-info}/WHEEL +0 -0
- {akitallm-0.1.1.dist-info → akitallm-1.0.3.dist-info}/licenses/LICENSE +0 -0
- {akitallm-0.1.1.dist-info → akitallm-1.0.3.dist-info}/top_level.txt +0 -0
akita/cli/main.py
CHANGED
|
@@ -2,12 +2,14 @@ import typer
|
|
|
2
2
|
from rich.console import Console
|
|
3
3
|
from rich.panel import Panel
|
|
4
4
|
from akita.reasoning.engine import ReasoningEngine
|
|
5
|
+
from akita.core.indexing import CodeIndexer
|
|
5
6
|
from akita.models.base import get_model
|
|
6
7
|
from akita.core.config import load_config, save_config, reset_config, CONFIG_FILE
|
|
7
8
|
from rich.table import Table
|
|
8
9
|
from rich.markdown import Markdown
|
|
9
10
|
from rich.syntax import Syntax
|
|
10
11
|
from dotenv import load_dotenv
|
|
12
|
+
from akita.tools.diff import DiffApplier
|
|
11
13
|
|
|
12
14
|
# Load environment variables from .env file
|
|
13
15
|
load_dotenv()
|
|
@@ -119,27 +121,53 @@ def review(
|
|
|
119
121
|
@app.command()
|
|
120
122
|
def solve(
|
|
121
123
|
query: str,
|
|
124
|
+
interactive: bool = typer.Option(False, "--interactive", "-i", help="Run in interactive mode to refine the solution."),
|
|
125
|
+
trace: bool = typer.Option(False, "--trace", help="Show the internal reasoning trace."),
|
|
122
126
|
dry_run: bool = typer.Option(False, "--dry-run", help="Run in dry-run mode.")
|
|
123
127
|
):
|
|
124
128
|
"""
|
|
125
|
-
Generate a solution for the given query.
|
|
129
|
+
Generate and apply a solution for the given query.
|
|
126
130
|
"""
|
|
127
131
|
model = get_model()
|
|
128
132
|
engine = ReasoningEngine(model)
|
|
129
133
|
console.print(Panel(f"[bold blue]Akita[/] is thinking about: [italic]{query}[/]", title="Solve Mode"))
|
|
130
134
|
|
|
135
|
+
current_query = query
|
|
136
|
+
session = None
|
|
137
|
+
|
|
131
138
|
try:
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
139
|
+
while True:
|
|
140
|
+
diff_output = engine.run_solve(current_query, session=session)
|
|
141
|
+
session = engine.session
|
|
142
|
+
|
|
143
|
+
if trace:
|
|
144
|
+
console.print(Panel(str(engine.trace), title="[bold cyan]Reasoning Trace[/]", border_style="cyan"))
|
|
145
|
+
console.print(Panel("[bold green]Suggested Code Changes (Unified Diff):[/]"))
|
|
146
|
+
syntax = Syntax(diff_output, "diff", theme="monokai", line_numbers=True)
|
|
147
|
+
console.print(syntax)
|
|
148
|
+
|
|
149
|
+
if interactive:
|
|
150
|
+
action = typer.prompt("\n[A]pprove, [R]efine with feedback, or [C]ancel?", default="A").upper()
|
|
151
|
+
if action == "A":
|
|
152
|
+
break
|
|
153
|
+
elif action == "R":
|
|
154
|
+
current_query = typer.prompt("Enter your feedback/refinement")
|
|
155
|
+
continue
|
|
156
|
+
else:
|
|
157
|
+
console.print("[yellow]Operation cancelled.[/]")
|
|
158
|
+
return
|
|
159
|
+
else:
|
|
160
|
+
break
|
|
161
|
+
|
|
138
162
|
if not dry_run:
|
|
139
163
|
confirm = typer.confirm("\nDo you want to apply these changes?")
|
|
140
164
|
if confirm:
|
|
141
|
-
console.print("[bold yellow]Applying changes...
|
|
142
|
-
|
|
165
|
+
console.print("[bold yellow]🚀 Applying changes...[/]")
|
|
166
|
+
success = DiffApplier.apply_unified_diff(diff_output)
|
|
167
|
+
if success:
|
|
168
|
+
console.print("[bold green]✅ Changes applied successfully![/]")
|
|
169
|
+
else:
|
|
170
|
+
console.print("[bold red]❌ Failed to apply changes.[/]")
|
|
143
171
|
else:
|
|
144
172
|
console.print("[bold yellow]Changes discarded.[/]")
|
|
145
173
|
except Exception as e:
|
|
@@ -165,12 +193,29 @@ def plan(
|
|
|
165
193
|
console.print(f"[bold red]Planning failed:[/] {e}")
|
|
166
194
|
raise typer.Exit(code=1)
|
|
167
195
|
|
|
196
|
+
@app.command()
|
|
197
|
+
def index(
|
|
198
|
+
path: str = typer.Argument(".", help="Path to index for RAG.")
|
|
199
|
+
):
|
|
200
|
+
"""
|
|
201
|
+
Build a local vector index (RAG) for the project.
|
|
202
|
+
"""
|
|
203
|
+
console.print(Panel(f"🔍 [bold blue]Akita[/] is indexing: [yellow]{path}[/]", title="Index Mode"))
|
|
204
|
+
try:
|
|
205
|
+
indexer = CodeIndexer(path)
|
|
206
|
+
with console.status("[bold green]Indexing project files..."):
|
|
207
|
+
indexer.index_project()
|
|
208
|
+
console.print("[bold green]✅ Indexing complete! Semantic search is now active.[/]")
|
|
209
|
+
except Exception as e:
|
|
210
|
+
console.print(f"[bold red]Indexing failed:[/] {e}")
|
|
211
|
+
raise typer.Exit(code=1)
|
|
212
|
+
|
|
168
213
|
@app.command()
|
|
169
214
|
def test():
|
|
170
215
|
"""
|
|
171
216
|
Run automated tests in the project.
|
|
172
217
|
"""
|
|
173
|
-
console.print(Panel("
|
|
218
|
+
console.print(Panel("[bold blue]Akita[/] is running tests...", title="Test Mode"))
|
|
174
219
|
from akita.tools.base import ShellTools
|
|
175
220
|
result = ShellTools.execute("pytest")
|
|
176
221
|
if result.success:
|
|
@@ -180,6 +225,26 @@ def test():
|
|
|
180
225
|
console.print("[bold red]Tests failed![/]")
|
|
181
226
|
console.print(result.error or result.output)
|
|
182
227
|
|
|
228
|
+
@app.command()
|
|
229
|
+
def docs():
|
|
230
|
+
"""
|
|
231
|
+
Start the local documentation server.
|
|
232
|
+
"""
|
|
233
|
+
import subprocess
|
|
234
|
+
import sys
|
|
235
|
+
|
|
236
|
+
console.print(Panel("[bold blue]Akita[/] Documentation", title="Docs Mode"))
|
|
237
|
+
console.print("[dim]Starting MkDocs server...[/]")
|
|
238
|
+
console.print("[bold green]Open your browser at: http://127.0.0.1:8000[/]")
|
|
239
|
+
|
|
240
|
+
try:
|
|
241
|
+
subprocess.run([sys.executable, "-m", "mkdocs", "serve"], check=True)
|
|
242
|
+
except FileNotFoundError:
|
|
243
|
+
console.print("[red]MkDocs not found. Install it with: pip install mkdocs-material[/]")
|
|
244
|
+
raise typer.Exit(code=1)
|
|
245
|
+
except KeyboardInterrupt:
|
|
246
|
+
console.print("[yellow]Documentation server stopped.[/]")
|
|
247
|
+
|
|
183
248
|
# Config Command Group
|
|
184
249
|
config_app = typer.Typer(help="Manage AkitaLLM configuration.")
|
|
185
250
|
app.add_typer(config_app, name="config")
|
akita/core/ast_utils.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import tree_sitter_python as tspython
|
|
2
|
+
from tree_sitter import Language, Parser
|
|
3
|
+
import pathlib
|
|
4
|
+
from typing import List, Dict, Any, Optional
|
|
5
|
+
|
|
6
|
+
class ASTParser:
|
|
7
|
+
def __init__(self):
|
|
8
|
+
self.language = Language(tspython.language())
|
|
9
|
+
self.parser = Parser(self.language)
|
|
10
|
+
|
|
11
|
+
def parse_file(self, file_path: str) -> Optional[Any]:
|
|
12
|
+
path = pathlib.Path(file_path)
|
|
13
|
+
if not path.exists():
|
|
14
|
+
return None
|
|
15
|
+
|
|
16
|
+
with open(path, "rb") as f:
|
|
17
|
+
content = f.read()
|
|
18
|
+
|
|
19
|
+
return self.parser.parse(content)
|
|
20
|
+
|
|
21
|
+
def get_definitions(self, file_path: str) -> List[Dict[str, Any]]:
|
|
22
|
+
"""Extract classes and functions with their line ranges using recursive traversal."""
|
|
23
|
+
tree = self.parse_file(file_path)
|
|
24
|
+
if not tree:
|
|
25
|
+
return []
|
|
26
|
+
|
|
27
|
+
with open(file_path, "rb") as f:
|
|
28
|
+
content = f.read()
|
|
29
|
+
|
|
30
|
+
definitions = []
|
|
31
|
+
|
|
32
|
+
def explore(node):
|
|
33
|
+
# Check for definitions
|
|
34
|
+
if node.type in ["class_definition", "function_definition", "decorated_definition"]:
|
|
35
|
+
# Find name
|
|
36
|
+
name_node = node.child_by_field_name("name")
|
|
37
|
+
if not name_node and node.type == "decorated_definition":
|
|
38
|
+
# For decorated definitions, the name is in the class/function child
|
|
39
|
+
inner = node.child_by_field_name("definition")
|
|
40
|
+
if inner:
|
|
41
|
+
name_node = inner.child_by_field_name("name")
|
|
42
|
+
|
|
43
|
+
name = content[name_node.start_byte:name_node.end_byte].decode("utf-8") if name_node else "anonymous"
|
|
44
|
+
|
|
45
|
+
# Docstring extraction
|
|
46
|
+
docstring = None
|
|
47
|
+
body = node.child_by_field_name("body")
|
|
48
|
+
if body and body.children:
|
|
49
|
+
for stmt in body.children:
|
|
50
|
+
if stmt.type == "expression_statement":
|
|
51
|
+
child = stmt.children[0]
|
|
52
|
+
if child.type == "string":
|
|
53
|
+
docstring = content[child.start_byte:child.end_byte].decode("utf-8").strip('"\' \n')
|
|
54
|
+
break # Only first statement
|
|
55
|
+
|
|
56
|
+
definitions.append({
|
|
57
|
+
"name": name,
|
|
58
|
+
"type": "class" if "class" in node.type else "function",
|
|
59
|
+
"start_line": node.start_point[0] + 1,
|
|
60
|
+
"end_line": node.end_point[0] + 1,
|
|
61
|
+
"docstring": docstring
|
|
62
|
+
})
|
|
63
|
+
|
|
64
|
+
# Always explore children regardless of current node type
|
|
65
|
+
for child in node.children:
|
|
66
|
+
explore(child)
|
|
67
|
+
|
|
68
|
+
explore(tree.root_node)
|
|
69
|
+
return definitions
|
|
70
|
+
|
|
71
|
+
def get_source_segment(self, file_path: str, start_line: int, end_line: int) -> str:
|
|
72
|
+
"""Extract a segment of code from a file by line numbers."""
|
|
73
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
74
|
+
lines = f.readlines()
|
|
75
|
+
|
|
76
|
+
# Lines are 1-indexed in our definitions, but 0-indexed in the list
|
|
77
|
+
return "".join(lines[start_line-1 : end_line])
|
akita/core/indexing.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
import re
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import List, Dict, Any, Optional
|
|
6
|
+
from akita.core.ast_utils import ASTParser
|
|
7
|
+
|
|
8
|
+
class CodeIndexer:
|
|
9
|
+
"""
|
|
10
|
+
A lightweight, zero-dependency semantic-keyword indexer.
|
|
11
|
+
Uses basic TF-IDF principles and AST-aware keyword weighting.
|
|
12
|
+
Works perfectly even in restricted environments like Python 3.14.
|
|
13
|
+
"""
|
|
14
|
+
def __init__(self, project_path: str):
|
|
15
|
+
self.project_path = Path(project_path)
|
|
16
|
+
self.index_file = self.project_path / ".akita" / "index.json"
|
|
17
|
+
self.ast_parser = ASTParser()
|
|
18
|
+
self.data: List[Dict[str, Any]] = []
|
|
19
|
+
self.load_index()
|
|
20
|
+
|
|
21
|
+
def load_index(self):
|
|
22
|
+
if self.index_file.exists():
|
|
23
|
+
try:
|
|
24
|
+
with open(self.index_file, "r", encoding="utf-8") as f:
|
|
25
|
+
self.data = json.load(f)
|
|
26
|
+
except Exception:
|
|
27
|
+
self.data = []
|
|
28
|
+
|
|
29
|
+
def save_index(self):
|
|
30
|
+
self.index_file.parent.mkdir(parents=True, exist_ok=True)
|
|
31
|
+
with open(self.index_file, "w", encoding="utf-8") as f:
|
|
32
|
+
json.dump(self.data, f, indent=2)
|
|
33
|
+
|
|
34
|
+
def index_project(self):
|
|
35
|
+
"""Index all Python files in the project."""
|
|
36
|
+
self.data = []
|
|
37
|
+
for root, _, files in os.walk(self.project_path):
|
|
38
|
+
if ".akita" in root or ".git" in root or "__pycache__" in root:
|
|
39
|
+
continue
|
|
40
|
+
|
|
41
|
+
for file in files:
|
|
42
|
+
if file.endswith(".py"):
|
|
43
|
+
full_path = Path(root) / file
|
|
44
|
+
rel_path = full_path.relative_to(self.project_path)
|
|
45
|
+
self._index_file(full_path, str(rel_path))
|
|
46
|
+
self.save_index()
|
|
47
|
+
|
|
48
|
+
def _index_file(self, file_path: Path, rel_path: str):
|
|
49
|
+
try:
|
|
50
|
+
definitions = self.ast_parser.get_definitions(str(file_path))
|
|
51
|
+
for d in definitions:
|
|
52
|
+
source = self.ast_parser.get_source_segment(
|
|
53
|
+
str(file_path), d["start_line"], d["end_line"]
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
# Create a searchable representation (keyword rich)
|
|
57
|
+
# We normalize case and extract meaningful words
|
|
58
|
+
search_blob = f"{d['name']} {d['type']} {d['docstring'] or ''} {source}"
|
|
59
|
+
keywords = set(re.findall(r'\w+', search_blob.lower()))
|
|
60
|
+
|
|
61
|
+
self.data.append({
|
|
62
|
+
"path": rel_path,
|
|
63
|
+
"name": d["name"],
|
|
64
|
+
"type": d["type"],
|
|
65
|
+
"start_line": d["start_line"],
|
|
66
|
+
"end_line": d["end_line"],
|
|
67
|
+
"keywords": list(keywords),
|
|
68
|
+
"content": source[:500] # Store snippet preview
|
|
69
|
+
})
|
|
70
|
+
except Exception:
|
|
71
|
+
pass
|
|
72
|
+
|
|
73
|
+
def search(self, query: str, n_results: int = 5) -> List[Dict[str, Any]]:
|
|
74
|
+
"""Search using Jaccard Similarity on keywords (Lite Contextual Search)."""
|
|
75
|
+
query_keywords = set(re.findall(r'\w+', query.lower()))
|
|
76
|
+
if not query_keywords:
|
|
77
|
+
return []
|
|
78
|
+
|
|
79
|
+
scores = []
|
|
80
|
+
for item in self.data:
|
|
81
|
+
item_keywords = set(item["keywords"])
|
|
82
|
+
intersection = query_keywords.intersection(item_keywords)
|
|
83
|
+
# Simple intersection count as score, weighted by name match
|
|
84
|
+
score = len(intersection)
|
|
85
|
+
if any(qk in item["name"].lower() for qk in query_keywords):
|
|
86
|
+
score += 5 # Boost explicit name matches
|
|
87
|
+
|
|
88
|
+
if score > 0:
|
|
89
|
+
scores.append((score, item))
|
|
90
|
+
|
|
91
|
+
# Sort by score descending
|
|
92
|
+
scores.sort(key=lambda x: x[0], reverse=True)
|
|
93
|
+
|
|
94
|
+
return [s[1] for s in scores[:n_results]]
|
akita/core/plugins.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import importlib
|
|
3
|
+
import importlib.metadata
|
|
4
|
+
import inspect
|
|
5
|
+
import pkgutil
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import List, Dict, Any, Type, Optional
|
|
8
|
+
|
|
9
|
+
class AkitaPlugin(abc.ABC):
|
|
10
|
+
"""Base class for all AkitaLLM plugins."""
|
|
11
|
+
|
|
12
|
+
@property
|
|
13
|
+
@abc.abstractmethod
|
|
14
|
+
def name(self) -> str:
|
|
15
|
+
"""Unique name of the plugin."""
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
@property
|
|
19
|
+
@abc.abstractmethod
|
|
20
|
+
def description(self) -> str:
|
|
21
|
+
"""Brief description of what the plugin does."""
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
@abc.abstractmethod
|
|
25
|
+
def get_tools(self) -> List[Dict[str, Any]]:
|
|
26
|
+
"""Return a list of tools (functions) provided by this plugin."""
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
class PluginManager:
|
|
30
|
+
def __init__(self, internal_plugins_path: Optional[str] = None):
|
|
31
|
+
self.plugins: Dict[str, AkitaPlugin] = {}
|
|
32
|
+
self.internal_path = internal_plugins_path or str(Path(__file__).parent.parent / "plugins")
|
|
33
|
+
|
|
34
|
+
def discover_all(self):
|
|
35
|
+
"""Discover both internal and external plugins."""
|
|
36
|
+
self._discover_internal()
|
|
37
|
+
self._discover_external()
|
|
38
|
+
|
|
39
|
+
def _discover_internal(self):
|
|
40
|
+
"""Load plugins from the akita/plugins directory."""
|
|
41
|
+
path = Path(self.internal_path)
|
|
42
|
+
if not path.exists():
|
|
43
|
+
return
|
|
44
|
+
|
|
45
|
+
for loader, module_name, is_pkg in pkgutil.iter_modules([str(path)]):
|
|
46
|
+
full_module_name = f"akita.plugins.{module_name}"
|
|
47
|
+
try:
|
|
48
|
+
module = importlib.import_module(full_module_name)
|
|
49
|
+
self._load_from_module(module)
|
|
50
|
+
except Exception as e:
|
|
51
|
+
print(f"Error loading internal plugin {module_name}: {e}")
|
|
52
|
+
|
|
53
|
+
def _discover_external(self):
|
|
54
|
+
"""Load plugins registered via entry_points (akitallm.plugins)."""
|
|
55
|
+
try:
|
|
56
|
+
# Python 3.10+
|
|
57
|
+
eps = importlib.metadata.entry_points(group='akitallm.plugins')
|
|
58
|
+
for entry_point in eps:
|
|
59
|
+
try:
|
|
60
|
+
plugin_class = entry_point.load()
|
|
61
|
+
if inspect.isclass(plugin_class) and issubclass(plugin_class, AkitaPlugin):
|
|
62
|
+
instance = plugin_class()
|
|
63
|
+
self.plugins[instance.name] = instance
|
|
64
|
+
except Exception as e:
|
|
65
|
+
print(f"Error loading external plugin {entry_point.name}: {e}")
|
|
66
|
+
except Exception:
|
|
67
|
+
pass
|
|
68
|
+
|
|
69
|
+
def _load_from_module(self, module):
|
|
70
|
+
"""Extract AkitaPlugin classes from a module and instantiate them."""
|
|
71
|
+
for name, obj in inspect.getmembers(module):
|
|
72
|
+
if inspect.isclass(obj) and issubclass(obj, AkitaPlugin) and obj is not AkitaPlugin:
|
|
73
|
+
instance = obj()
|
|
74
|
+
self.plugins[instance.name] = instance
|
|
75
|
+
|
|
76
|
+
def get_all_tools(self) -> List[Dict[str, Any]]:
|
|
77
|
+
"""Collect all tools from all loaded plugins."""
|
|
78
|
+
all_tools = []
|
|
79
|
+
for plugin in self.plugins.values():
|
|
80
|
+
all_tools.extend(plugin.get_tools())
|
|
81
|
+
return all_tools
|
akita/core/trace.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from typing import List, Dict, Any
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from pydantic import BaseModel, Field
|
|
4
|
+
|
|
5
|
+
class TraceStep(BaseModel):
|
|
6
|
+
timestamp: datetime = Field(default_factory=datetime.now)
|
|
7
|
+
action: str
|
|
8
|
+
details: str
|
|
9
|
+
metadata: Dict[str, Any] = Field(default_factory=dict)
|
|
10
|
+
|
|
11
|
+
class ReasoningTrace(BaseModel):
|
|
12
|
+
steps: List[TraceStep] = Field(default_factory=list)
|
|
13
|
+
|
|
14
|
+
def add_step(self, action: str, details: str, metadata: Dict[str, Any] = None):
|
|
15
|
+
self.steps.append(TraceStep(action=action, details=details, metadata=metadata or {}))
|
|
16
|
+
|
|
17
|
+
def __str__(self):
|
|
18
|
+
return "\n".join([f"[{s.timestamp.strftime('%H:%M:%S')}] {s.action}: {s.details}" for s in self.steps])
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# Official AkitaLLM Plugins
|
akita/plugins/files.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from akita.core.plugins import AkitaPlugin
|
|
2
|
+
from akita.tools.base import FileSystemTools
|
|
3
|
+
from typing import List, Dict, Any
|
|
4
|
+
|
|
5
|
+
class FilesPlugin(AkitaPlugin):
|
|
6
|
+
@property
|
|
7
|
+
def name(self) -> str:
|
|
8
|
+
return "files"
|
|
9
|
+
|
|
10
|
+
@property
|
|
11
|
+
def description(self) -> str:
|
|
12
|
+
return "Standard filesystem operations (read, write, list)."
|
|
13
|
+
|
|
14
|
+
def get_tools(self) -> List[Dict[str, Any]]:
|
|
15
|
+
return [
|
|
16
|
+
{
|
|
17
|
+
"name": "read_file",
|
|
18
|
+
"description": "Read content from a file.",
|
|
19
|
+
"parameters": {"path": "string"},
|
|
20
|
+
"func": FileSystemTools.read_file
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
"name": "write_file",
|
|
24
|
+
"description": "Write content to a file.",
|
|
25
|
+
"parameters": {"path": "string", "content": "string"},
|
|
26
|
+
"func": FileSystemTools.write_file
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
"name": "list_dir",
|
|
30
|
+
"description": "List files in a directory.",
|
|
31
|
+
"parameters": {"path": "string"},
|
|
32
|
+
"func": FileSystemTools.list_dir
|
|
33
|
+
}
|
|
34
|
+
]
|
akita/reasoning/engine.py
CHANGED
|
@@ -1,16 +1,23 @@
|
|
|
1
1
|
from typing import List, Dict, Any, Optional
|
|
2
2
|
from akita.models.base import AIModel, get_model
|
|
3
|
-
from akita.tools.base import ShellTools
|
|
3
|
+
from akita.tools.base import ShellTools
|
|
4
|
+
from akita.core.plugins import PluginManager
|
|
4
5
|
from akita.tools.context import ContextBuilder
|
|
5
6
|
from akita.schemas.review import ReviewResult
|
|
7
|
+
from akita.core.trace import ReasoningTrace
|
|
8
|
+
from akita.reasoning.session import ConversationSession
|
|
6
9
|
import json
|
|
7
10
|
from rich.console import Console
|
|
8
11
|
|
|
9
12
|
console = Console()
|
|
10
|
-
|
|
13
|
+
|
|
11
14
|
class ReasoningEngine:
|
|
12
15
|
def __init__(self, model: AIModel):
|
|
13
16
|
self.model = model
|
|
17
|
+
self.plugin_manager = PluginManager()
|
|
18
|
+
self.plugin_manager.discover_all()
|
|
19
|
+
self.trace = ReasoningTrace()
|
|
20
|
+
self.session: Optional[ConversationSession] = None
|
|
14
21
|
|
|
15
22
|
def run_review(self, path: str) -> ReviewResult:
|
|
16
23
|
"""
|
|
@@ -91,27 +98,46 @@ class ReasoningEngine:
|
|
|
91
98
|
])
|
|
92
99
|
return response.content
|
|
93
100
|
|
|
94
|
-
def run_solve(self, query: str, path: str = ".") -> str:
|
|
101
|
+
def run_solve(self, query: str, path: str = ".", session: Optional[ConversationSession] = None) -> str:
|
|
95
102
|
"""
|
|
96
103
|
Generates a Unified Diff solution for the given query.
|
|
104
|
+
Supports iterative refinement if a session is provided.
|
|
97
105
|
"""
|
|
98
|
-
|
|
99
|
-
builder = ContextBuilder(path)
|
|
100
|
-
snapshot = builder.build()
|
|
101
|
-
|
|
102
|
-
files_str = "\n---\n".join([f"FILE: {f.path}\nCONTENT:\n{f.content}" for f in snapshot.files[:10]]) # Limit for solve
|
|
106
|
+
self.trace.add_step("Solve", f"Starting solve for query: {query}")
|
|
103
107
|
|
|
104
|
-
|
|
105
|
-
"
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
108
|
+
if not session:
|
|
109
|
+
self.trace.add_step("Context", f"Building context for {path}")
|
|
110
|
+
builder = ContextBuilder(path)
|
|
111
|
+
snapshot = builder.build(query=query)
|
|
112
|
+
|
|
113
|
+
files_str = "\n---\n".join([f"FILE: {f.path}\nCONTENT:\n{f.content}" for f in snapshot.files[:10]])
|
|
114
|
+
|
|
115
|
+
rag_str = ""
|
|
116
|
+
if snapshot.rag_snippets:
|
|
117
|
+
rag_str = "\n\nRELEVANT SNIPPETS (RAG):\n" + "\n".join([
|
|
118
|
+
f"- {s['path']} ({s['name']}):\n{s['content']}" for s in snapshot.rag_snippets
|
|
119
|
+
])
|
|
120
|
+
|
|
121
|
+
tools_info = "\n".join([f"- {t['name']}: {t['description']}" for t in self.plugin_manager.get_all_tools()])
|
|
122
|
+
|
|
123
|
+
system_prompt = (
|
|
124
|
+
"You are an Expert Programmer. Solve the requested task by providing code changes in Unified Diff format. "
|
|
125
|
+
"Respond ONLY with the Diff block. Use +++ and --- with file paths relative to project root.\n\n"
|
|
126
|
+
f"Available Tools:\n{tools_info}"
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
session = ConversationSession()
|
|
130
|
+
session.add_message("system", system_prompt)
|
|
131
|
+
session.add_message("user", f"Task: {query}\n\nContext:\n{files_str}{rag_str}")
|
|
132
|
+
self.session = session
|
|
133
|
+
else:
|
|
134
|
+
session.add_message("user", query)
|
|
135
|
+
|
|
136
|
+
console.print("🤖 [bold green]Thinking...[/]")
|
|
137
|
+
response = self.model.chat(session.get_messages_dict())
|
|
138
|
+
session.add_message("assistant", response.content)
|
|
109
139
|
|
|
110
|
-
|
|
111
|
-
response = self.model.chat([
|
|
112
|
-
{"role": "system", "content": system_prompt},
|
|
113
|
-
{"role": "user", "content": user_prompt}
|
|
114
|
-
])
|
|
140
|
+
self.trace.add_step("LLM Response", "Received solution from model")
|
|
115
141
|
return response.content
|
|
116
142
|
|
|
117
143
|
def run_pipeline(self, task: str):
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from typing import List, Dict, Any
|
|
2
|
+
from pydantic import BaseModel, Field
|
|
3
|
+
|
|
4
|
+
class ChatMessage(BaseModel):
|
|
5
|
+
role: str
|
|
6
|
+
content: str
|
|
7
|
+
|
|
8
|
+
class ConversationSession(BaseModel):
|
|
9
|
+
messages: List[ChatMessage] = Field(default_factory=list)
|
|
10
|
+
|
|
11
|
+
def add_message(self, role: str, content: str):
|
|
12
|
+
self.messages.append(ChatMessage(role=role, content=content))
|
|
13
|
+
|
|
14
|
+
def get_messages_dict(self) -> List[Dict[str, str]]:
|
|
15
|
+
return [m.model_dump() for m in self.messages]
|
akita/tools/base.py
CHANGED
|
@@ -15,7 +15,12 @@ class FileSystemTools:
|
|
|
15
15
|
return f.read()
|
|
16
16
|
|
|
17
17
|
@staticmethod
|
|
18
|
-
def
|
|
18
|
+
def write_file(path: str, content: str):
|
|
19
|
+
with open(path, 'w', encoding='utf-8') as f:
|
|
20
|
+
f.write(content)
|
|
21
|
+
|
|
22
|
+
@staticmethod
|
|
23
|
+
def list_dir(path: str) -> List[str]:
|
|
19
24
|
return os.listdir(path)
|
|
20
25
|
|
|
21
26
|
class ShellTools:
|
akita/tools/context.py
CHANGED
|
@@ -1,16 +1,18 @@
|
|
|
1
1
|
import os
|
|
2
2
|
from pathlib import Path
|
|
3
|
-
from typing import List, Dict, Optional
|
|
3
|
+
from typing import Any, List, Dict, Optional
|
|
4
4
|
from pydantic import BaseModel
|
|
5
5
|
|
|
6
6
|
class FileContext(BaseModel):
|
|
7
7
|
path: str
|
|
8
8
|
content: str
|
|
9
9
|
extension: str
|
|
10
|
+
summary: Optional[str] = None # New field for semantic summary
|
|
10
11
|
|
|
11
12
|
class ContextSnapshot(BaseModel):
|
|
12
13
|
files: List[FileContext]
|
|
13
14
|
project_structure: List[str]
|
|
15
|
+
rag_snippets: Optional[List[Dict[str, Any]]] = None
|
|
14
16
|
|
|
15
17
|
class ContextBuilder:
|
|
16
18
|
def __init__(
|
|
@@ -19,26 +21,49 @@ class ContextBuilder:
|
|
|
19
21
|
extensions: Optional[List[str]] = None,
|
|
20
22
|
exclude_dirs: Optional[List[str]] = None,
|
|
21
23
|
max_file_size_kb: int = 50,
|
|
22
|
-
max_files: int = 50
|
|
24
|
+
max_files: int = 50,
|
|
25
|
+
use_semantical_context: bool = True
|
|
23
26
|
):
|
|
24
27
|
self.base_path = Path(base_path)
|
|
25
28
|
self.extensions = extensions or [".py", ".js", ".ts", ".cpp", ".h", ".toml", ".md", ".json"]
|
|
26
29
|
self.exclude_dirs = exclude_dirs or [".git", ".venv", "node_modules", "__pycache__", "dist", "build"]
|
|
27
30
|
self.max_file_size_kb = max_file_size_kb
|
|
28
31
|
self.max_files = max_files
|
|
32
|
+
self.use_semantical_context = use_semantical_context
|
|
33
|
+
|
|
34
|
+
if self.use_semantical_context:
|
|
35
|
+
try:
|
|
36
|
+
from akita.core.ast_utils import ASTParser
|
|
37
|
+
from akita.core.indexing import CodeIndexer
|
|
38
|
+
self.ast_parser = ASTParser()
|
|
39
|
+
self.indexer = CodeIndexer(str(self.base_path))
|
|
40
|
+
except ImportError:
|
|
41
|
+
self.ast_parser = None
|
|
42
|
+
self.indexer = None
|
|
29
43
|
|
|
30
|
-
def build(self) -> ContextSnapshot:
|
|
31
|
-
"""
|
|
44
|
+
def build(self, query: Optional[str] = None) -> ContextSnapshot:
|
|
45
|
+
"""
|
|
46
|
+
Scan the path and build a context snapshot.
|
|
47
|
+
If a query is provided and indexer is available, it includes RAG snippets.
|
|
48
|
+
"""
|
|
32
49
|
files_context = []
|
|
33
50
|
project_structure = []
|
|
51
|
+
rag_snippets = None
|
|
34
52
|
|
|
53
|
+
if query and self.indexer:
|
|
54
|
+
try:
|
|
55
|
+
# Ensure index exists (lazy indexing for now)
|
|
56
|
+
# In production, we'd have a separate command or check timestamps
|
|
57
|
+
rag_snippets = self.indexer.search(query, n_results=10)
|
|
58
|
+
except Exception:
|
|
59
|
+
pass
|
|
60
|
+
|
|
35
61
|
if self.base_path.is_file():
|
|
36
62
|
if self._should_include_file(self.base_path):
|
|
37
63
|
files_context.append(self._read_file(self.base_path))
|
|
38
64
|
project_structure.append(str(self.base_path.name))
|
|
39
65
|
else:
|
|
40
66
|
for root, dirs, files in os.walk(self.base_path):
|
|
41
|
-
# Filter out excluded directories
|
|
42
67
|
dirs[:] = [d for d in dirs if d not in self.exclude_dirs]
|
|
43
68
|
|
|
44
69
|
rel_root = os.path.relpath(root, self.base_path)
|
|
@@ -54,7 +79,11 @@ class ContextBuilder:
|
|
|
54
79
|
files_context.append(context)
|
|
55
80
|
project_structure.append(os.path.join(rel_root, file))
|
|
56
81
|
|
|
57
|
-
return ContextSnapshot(
|
|
82
|
+
return ContextSnapshot(
|
|
83
|
+
files=files_context,
|
|
84
|
+
project_structure=project_structure,
|
|
85
|
+
rag_snippets=rag_snippets
|
|
86
|
+
)
|
|
58
87
|
|
|
59
88
|
def _should_include_file(self, path: Path) -> bool:
|
|
60
89
|
if path.name == ".env" or path.suffix == ".env":
|
|
@@ -66,8 +95,12 @@ class ContextBuilder:
|
|
|
66
95
|
if not path.exists():
|
|
67
96
|
return False
|
|
68
97
|
|
|
69
|
-
# Check size
|
|
70
|
-
|
|
98
|
+
# Check size (we can be more lenient if using semantic summaries)
|
|
99
|
+
size_limit = self.max_file_size_kb * 1024
|
|
100
|
+
if self.use_semantical_context:
|
|
101
|
+
size_limit *= 2 # Allow larger files if we can summarize them
|
|
102
|
+
|
|
103
|
+
if path.stat().st_size > size_limit:
|
|
71
104
|
return False
|
|
72
105
|
|
|
73
106
|
return True
|
|
@@ -76,10 +109,22 @@ class ContextBuilder:
|
|
|
76
109
|
try:
|
|
77
110
|
with open(path, 'r', encoding='utf-8') as f:
|
|
78
111
|
content = f.read()
|
|
112
|
+
|
|
113
|
+
summary = None
|
|
114
|
+
if self.use_semantical_context and self.ast_parser and path.suffix == ".py":
|
|
115
|
+
try:
|
|
116
|
+
defs = self.ast_parser.get_definitions(str(path))
|
|
117
|
+
if defs:
|
|
118
|
+
summary_lines = [f"{d['type'].upper()} {d['name']} (L{d['start_line']}-L{d['end_line']})" for d in defs]
|
|
119
|
+
summary = "\n".join(summary_lines)
|
|
120
|
+
except Exception:
|
|
121
|
+
pass
|
|
122
|
+
|
|
79
123
|
return FileContext(
|
|
80
124
|
path=str(path.relative_to(self.base_path) if self.base_path.is_dir() else path.name),
|
|
81
125
|
content=content,
|
|
82
|
-
extension=path.suffix
|
|
126
|
+
extension=path.suffix,
|
|
127
|
+
summary=summary
|
|
83
128
|
)
|
|
84
129
|
except Exception:
|
|
85
130
|
return None
|
akita/tools/diff.py
CHANGED
|
@@ -1,35 +1,110 @@
|
|
|
1
1
|
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import pathlib
|
|
2
4
|
from pathlib import Path
|
|
3
|
-
import
|
|
5
|
+
import whatthepatch
|
|
6
|
+
from typing import List, Tuple, Optional
|
|
4
7
|
|
|
5
8
|
class DiffApplier:
|
|
6
9
|
@staticmethod
|
|
7
|
-
def apply_unified_diff(diff_text: str, base_path: str = "."):
|
|
10
|
+
def apply_unified_diff(diff_text: str, base_path: str = ".") -> bool:
|
|
8
11
|
"""
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
For AkitaLLM, we keep it simple for now.
|
|
12
|
+
Applies a unified diff to files in the base_path.
|
|
13
|
+
Includes backup and rollback logic for atomicity.
|
|
12
14
|
"""
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
15
|
+
patches = list(whatthepatch.parse_patch(diff_text))
|
|
16
|
+
if not patches:
|
|
17
|
+
print("ERROR: No valid patches found in the diff text.")
|
|
18
|
+
return False
|
|
19
|
+
|
|
20
|
+
backups: List[Tuple[Path, Path]] = []
|
|
21
|
+
base = Path(base_path)
|
|
22
|
+
backup_dir = base / ".akita" / "backups"
|
|
23
|
+
backup_dir.mkdir(parents=True, exist_ok=True)
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
for patch in patches:
|
|
27
|
+
if not patch.header:
|
|
28
|
+
continue
|
|
29
|
+
|
|
30
|
+
# whatthepatch identifies the target file in the header
|
|
31
|
+
# We usually want the 'new' filename (the +++ part)
|
|
32
|
+
rel_path = patch.header.new_path
|
|
33
|
+
is_new = (patch.header.old_path == "/dev/null")
|
|
34
|
+
is_delete = (patch.header.new_path == "/dev/null")
|
|
35
|
+
|
|
36
|
+
if is_new:
|
|
37
|
+
rel_path = patch.header.new_path
|
|
38
|
+
elif is_delete:
|
|
39
|
+
rel_path = patch.header.old_path
|
|
40
|
+
else:
|
|
41
|
+
rel_path = patch.header.new_path or patch.header.old_path
|
|
42
|
+
|
|
43
|
+
if not rel_path or rel_path == "/dev/null":
|
|
44
|
+
continue
|
|
45
|
+
|
|
46
|
+
# Clean up path (sometimes they have a/ or b/ prefixes)
|
|
47
|
+
if rel_path.startswith("a/") or rel_path.startswith("b/"):
|
|
48
|
+
rel_path = rel_path[2:]
|
|
49
|
+
|
|
50
|
+
target_file = (base / rel_path).resolve()
|
|
51
|
+
|
|
52
|
+
if not is_new and not target_file.exists():
|
|
53
|
+
print(f"ERROR: Target file {target_file} does not exist for patching.")
|
|
54
|
+
return False
|
|
55
|
+
|
|
56
|
+
# 1. Create backup
|
|
57
|
+
if target_file.exists():
|
|
58
|
+
backup_file = backup_dir / f"{target_file.name}.bak"
|
|
59
|
+
shutil.copy2(target_file, backup_file)
|
|
60
|
+
backups.append((target_file, backup_file))
|
|
61
|
+
else:
|
|
62
|
+
backups.append((target_file, None)) # Mark for deletion on rollback if it's a new file
|
|
63
|
+
|
|
64
|
+
# 2. Apply patch
|
|
65
|
+
content = ""
|
|
66
|
+
if target_file.exists():
|
|
67
|
+
with open(target_file, "r", encoding="utf-8") as f:
|
|
68
|
+
content = f.read()
|
|
69
|
+
|
|
70
|
+
lines = content.splitlines()
|
|
71
|
+
# whatthepatch apply_diff returns a generator of lines
|
|
72
|
+
patched_lines = whatthepatch.apply_diff(patch, lines)
|
|
73
|
+
|
|
74
|
+
if patched_lines is None:
|
|
75
|
+
print(f"ERROR: Failed to apply patch to {rel_path}.")
|
|
76
|
+
raise Exception(f"Patch failure on {rel_path}")
|
|
77
|
+
|
|
78
|
+
# 3. Write new content
|
|
79
|
+
target_file.parent.mkdir(parents=True, exist_ok=True)
|
|
80
|
+
with open(target_file, "w", encoding="utf-8") as f:
|
|
81
|
+
f.write("\n".join(patched_lines) + "\n")
|
|
82
|
+
|
|
83
|
+
print(f"SUCCESS: Applied {len(patches)} patches successfully.")
|
|
84
|
+
|
|
85
|
+
# 4. Pre-flight Validation
|
|
86
|
+
# Run tests to ensure the patch didn't break anything
|
|
87
|
+
if (base / "tests").exists():
|
|
88
|
+
print("🧪 Running pre-flight validation (pytest)...")
|
|
89
|
+
import subprocess
|
|
90
|
+
# Run pytest in the base_path
|
|
91
|
+
result = subprocess.run(["pytest"], cwd=str(base), capture_output=True, text=True)
|
|
92
|
+
if result.returncode != 0:
|
|
93
|
+
print(f"❌ Validation FAILED:\n{result.stdout}")
|
|
94
|
+
raise Exception("Pre-flight validation failed. Tests are broken.")
|
|
95
|
+
else:
|
|
96
|
+
print("✅ Pre-flight validation passed!")
|
|
97
|
+
|
|
98
|
+
return True
|
|
99
|
+
|
|
100
|
+
except Exception as e:
|
|
101
|
+
print(f"CRITICAL ERROR: {e}. Starting rollback...")
|
|
102
|
+
for target, backup in backups:
|
|
103
|
+
if backup and backup.exists():
|
|
104
|
+
shutil.move(str(backup), str(target))
|
|
105
|
+
elif not backup and target.exists():
|
|
106
|
+
target.unlink() # Delete newly created file
|
|
107
|
+
return False
|
|
33
108
|
|
|
34
109
|
@staticmethod
|
|
35
110
|
def apply_whole_file(file_path: str, content: str):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: akitallm
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 1.0.3
|
|
4
4
|
Summary: AkitaLLM: An open-source local-first AI system for programming.
|
|
5
5
|
Author: KerubinDev
|
|
6
6
|
License: MIT
|
|
@@ -28,17 +28,11 @@ Requires-Dist: pytest-mock
|
|
|
28
28
|
Requires-Dist: gitpython
|
|
29
29
|
Requires-Dist: tomli-w
|
|
30
30
|
Requires-Dist: tomli
|
|
31
|
+
Requires-Dist: whatthepatch>=1.0.5
|
|
32
|
+
Requires-Dist: tree-sitter>=0.21.3
|
|
33
|
+
Requires-Dist: tree-sitter-python>=0.21.0
|
|
31
34
|
Dynamic: license-file
|
|
32
35
|
|
|
33
|
-
```text
|
|
34
|
-
_ _ _ _ _ _ __ __
|
|
35
|
-
/ \ | | _(_) |_ __ _| | | | | \/ |
|
|
36
|
-
/ _ \ | |/ / | __/ _` | | | | | |\/| |
|
|
37
|
-
/ ___ \ | <| | || (_| | |___| |___| | | |
|
|
38
|
-
/_/ \_\ |_|\_\_|\__\__,_|_____|_____|_| |_|
|
|
39
|
-
|
|
40
|
-
```
|
|
41
|
-
|
|
42
36
|
# AkitaLLM
|
|
43
37
|
### A deterministic, local-first AI orchestrator for software engineers.
|
|
44
38
|
|
|
@@ -132,7 +126,10 @@ akita solve "Improve error handling in the reasoning engine to prevent silent fa
|
|
|
132
126
|
|
|
133
127
|
---
|
|
134
128
|
|
|
135
|
-
|
|
129
|
+
### 🔌 Extensibility
|
|
130
|
+
AkitaLLM is built to be extended. You can create your own tools and plugins. Check the [Plugin Development Guide](PLUGINS.md) for more details.
|
|
131
|
+
|
|
132
|
+
## 🤝 Contributing
|
|
136
133
|
|
|
137
134
|
We are looking for engineers, not just coders. If you value robust abstractions, clean code, and predictable systems, your contribution is welcome.
|
|
138
135
|
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
akita/__init__.py,sha256=rnObPjuBcEStqSO0S6gsdS_ot8ITOQjVj_-P1LUUYpg,22
|
|
2
|
+
akita/cli/main.py,sha256=WOr-Z_PurjLxpHGT7gNpXwCv8Ghr0_vGjz4hbF6QunQ,10206
|
|
3
|
+
akita/core/ast_utils.py,sha256=8JrTZgfWjIvbzY5KzV2G9PuyOi8IxVdLMjDCPPLiz_I,3127
|
|
4
|
+
akita/core/config.py,sha256=GsfkKqg0SlMhsQ2fHsRATjzDy6BzVSX8efIyh8o8DZw,1312
|
|
5
|
+
akita/core/indexing.py,sha256=2j_NK8buZ1ugH3foa9KFQEtGOD-Lgoo2Se3Lx6Q7ZO4,3686
|
|
6
|
+
akita/core/plugins.py,sha256=P3azOFJ-yTw-kDdvjmHfNiU7nfvXQFadVPRnp1O7h-c,2951
|
|
7
|
+
akita/core/trace.py,sha256=AxXUVZ7P8a0l5QTK5w9iSnUncUe62FGfRzDNN9xG5dg,692
|
|
8
|
+
akita/models/base.py,sha256=eZGCT-R9WEBaf8WaiMAkpQTdACewl-1F2uDEhs6ocQ4,1584
|
|
9
|
+
akita/plugins/__init__.py,sha256=kfjmQqBhzhqQrH-Rd0jh0KxXyIT9T5DtEh-BETQD0FM,28
|
|
10
|
+
akita/plugins/files.py,sha256=Ha4YxmCz2G7iafqdr2TRE_xRlq1oeOBo6By3_S86jkE,1113
|
|
11
|
+
akita/reasoning/engine.py,sha256=w1gB-Y_Tzoan66T-EFd04X7V5mDcVOkSP2G6X3lmvxU,8634
|
|
12
|
+
akita/reasoning/session.py,sha256=rcJxcJXNjObjRwfuCY8NQKpKCqxeIppqkUpN-3mVRpE,472
|
|
13
|
+
akita/schemas/review.py,sha256=zzjLzTuiEpJfu4etS0NUBWfS3wyNobNDmDMhb5amWTI,905
|
|
14
|
+
akita/tools/base.py,sha256=jDA3jTP2qo6TjoTF6BSIb71BSfCJGSqbueIQz6lxuCM,1235
|
|
15
|
+
akita/tools/context.py,sha256=i6QjKMsKCZMIdCx82hkhMUzBQJolrcch2v1x-6nLy8U,5008
|
|
16
|
+
akita/tools/diff.py,sha256=bVH6_vHWoC9oYoS1RU4eOEnZHh6eFNtt6HCCzeGb6wY,4805
|
|
17
|
+
akitallm-1.0.3.dist-info/licenses/LICENSE,sha256=WE7_tfGR-IzkulSh6Pos02gucCXKboaXguAdr0bI9V0,1067
|
|
18
|
+
akitallm-1.0.3.dist-info/METADATA,sha256=RT2JGCtgRaip7AMajYySH9hIvAKnTCDq5TyvFBgUBHc,5500
|
|
19
|
+
akitallm-1.0.3.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
20
|
+
akitallm-1.0.3.dist-info/entry_points.txt,sha256=Au1aAXCO2lX4kgElgknSVDpq7BcN5xAJJ0WvOAkhLzU,105
|
|
21
|
+
akitallm-1.0.3.dist-info/top_level.txt,sha256=duGU-i6qCRLqjo_b1XUqfhlSQky3QIO0Hlvfn2OV3hU,6
|
|
22
|
+
akitallm-1.0.3.dist-info/RECORD,,
|
akitallm-0.1.1.dist-info/RECORD
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
akita/__init__.py,sha256=rnObPjuBcEStqSO0S6gsdS_ot8ITOQjVj_-P1LUUYpg,22
|
|
2
|
-
akita/cli/main.py,sha256=BRtB4klB1y2zFeoYIQfwUdcg91mvsmPf3EV1A73T89s,7582
|
|
3
|
-
akita/core/config.py,sha256=GsfkKqg0SlMhsQ2fHsRATjzDy6BzVSX8efIyh8o8DZw,1312
|
|
4
|
-
akita/models/base.py,sha256=eZGCT-R9WEBaf8WaiMAkpQTdACewl-1F2uDEhs6ocQ4,1584
|
|
5
|
-
akita/reasoning/engine.py,sha256=b436nuhli-87ADtgMK2VJCv1WE5LBtGFFJaNS5chiYw,7389
|
|
6
|
-
akita/schemas/review.py,sha256=zzjLzTuiEpJfu4etS0NUBWfS3wyNobNDmDMhb5amWTI,905
|
|
7
|
-
akita/tools/base.py,sha256=cufLJv8CtmzzNdmMlu-d8iC5QYXWXxc-X4mCYisChkU,1091
|
|
8
|
-
akita/tools/context.py,sha256=Gryy9SjAIXkujWJdue5WH1tB_nSPc7ZAxC5o_r2Gnlg,3084
|
|
9
|
-
akita/tools/diff.py,sha256=gPDOPCqxgjT8DWRc-x4GgNEtt2LOZvICtYyUaf_aSWM,1422
|
|
10
|
-
akitallm-0.1.1.dist-info/licenses/LICENSE,sha256=WE7_tfGR-IzkulSh6Pos02gucCXKboaXguAdr0bI9V0,1067
|
|
11
|
-
akitallm-0.1.1.dist-info/METADATA,sha256=X7n2ohb9aBeiYp9Z6o8xrAbeFJJcQu_oZi_NWGpT2TM,5515
|
|
12
|
-
akitallm-0.1.1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
13
|
-
akitallm-0.1.1.dist-info/entry_points.txt,sha256=JGjCc_PusPfOXSVmY5U5VxDDG-yMNcXySluxVKjn_QA,45
|
|
14
|
-
akitallm-0.1.1.dist-info/top_level.txt,sha256=duGU-i6qCRLqjo_b1XUqfhlSQky3QIO0Hlvfn2OV3hU,6
|
|
15
|
-
akitallm-0.1.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|