codegraph-cli 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. codegraph_cli/__init__.py +4 -0
  2. codegraph_cli/agents.py +191 -0
  3. codegraph_cli/bug_detector.py +386 -0
  4. codegraph_cli/chat_agent.py +352 -0
  5. codegraph_cli/chat_session.py +220 -0
  6. codegraph_cli/cli.py +330 -0
  7. codegraph_cli/cli_chat.py +367 -0
  8. codegraph_cli/cli_diagnose.py +133 -0
  9. codegraph_cli/cli_refactor.py +230 -0
  10. codegraph_cli/cli_setup.py +470 -0
  11. codegraph_cli/cli_test.py +177 -0
  12. codegraph_cli/cli_v2.py +267 -0
  13. codegraph_cli/codegen_agent.py +265 -0
  14. codegraph_cli/config.py +31 -0
  15. codegraph_cli/config_manager.py +341 -0
  16. codegraph_cli/context_manager.py +500 -0
  17. codegraph_cli/crew_agents.py +123 -0
  18. codegraph_cli/crew_chat.py +159 -0
  19. codegraph_cli/crew_tools.py +497 -0
  20. codegraph_cli/diff_engine.py +265 -0
  21. codegraph_cli/embeddings.py +241 -0
  22. codegraph_cli/graph_export.py +144 -0
  23. codegraph_cli/llm.py +642 -0
  24. codegraph_cli/models.py +47 -0
  25. codegraph_cli/models_v2.py +185 -0
  26. codegraph_cli/orchestrator.py +49 -0
  27. codegraph_cli/parser.py +800 -0
  28. codegraph_cli/performance_analyzer.py +223 -0
  29. codegraph_cli/project_context.py +230 -0
  30. codegraph_cli/rag.py +200 -0
  31. codegraph_cli/refactor_agent.py +452 -0
  32. codegraph_cli/security_scanner.py +366 -0
  33. codegraph_cli/storage.py +390 -0
  34. codegraph_cli/templates/graph_interactive.html +257 -0
  35. codegraph_cli/testgen_agent.py +316 -0
  36. codegraph_cli/validation_engine.py +285 -0
  37. codegraph_cli/vector_store.py +293 -0
  38. codegraph_cli-2.0.0.dist-info/METADATA +318 -0
  39. codegraph_cli-2.0.0.dist-info/RECORD +43 -0
  40. codegraph_cli-2.0.0.dist-info/WHEEL +5 -0
  41. codegraph_cli-2.0.0.dist-info/entry_points.txt +2 -0
  42. codegraph_cli-2.0.0.dist-info/licenses/LICENSE +21 -0
  43. codegraph_cli-2.0.0.dist-info/top_level.txt +1 -0
codegraph_cli/cli.py ADDED
@@ -0,0 +1,330 @@
1
+ """Typer-based CLI for CodeGraph local code intelligence."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pathlib import Path
6
+ from typing import Optional
7
+
8
+ import typer
9
+
10
+ from . import __version__, config
11
+ from .cli_chat import chat_app
12
+ from .cli_setup import setup as setup_wizard, set_llm, unset_llm, show_llm
13
+ from .cli_v2 import v2_app
14
+ from .graph_export import export_dot, export_html
15
+ from .orchestrator import MCPOrchestrator
16
+ from .storage import GraphStore, ProjectManager
17
+
18
+ app = typer.Typer(
19
+ help="🧠 CodeGraph CLI — AI-powered code intelligence & multi-agent assistant.",
20
+ no_args_is_help=True,
21
+ rich_markup_mode="rich",
22
+ )
23
+
24
+ # Register v2 commands
25
+ app.add_typer(v2_app, name="v2")
26
+
27
+ # Register chat commands
28
+ app.add_typer(chat_app, name="chat")
29
+
30
+ # Register setup wizard as direct command
31
+ app.command("setup")(setup_wizard)
32
+
33
+ # Register LLM management commands
34
+ app.command("set-llm")(set_llm)
35
+ app.command("unset-llm")(unset_llm)
36
+ app.command("show-llm")(show_llm)
37
+
38
+
39
+ def version_callback(value: bool):
40
+ """Print version and exit."""
41
+ if value:
42
+ typer.echo(f"CodeGraph CLI v{__version__}")
43
+ raise typer.Exit()
44
+
45
+
46
+ @app.callback()
47
+ def main(
48
+ version: Optional[bool] = typer.Option(
49
+ None,
50
+ "--version",
51
+ "-v",
52
+ help="Show version and exit.",
53
+ callback=version_callback,
54
+ is_eager=True,
55
+ )
56
+ ):
57
+ """CodeGraph CLI: Local-first code intelligence with AI-powered impact analysis."""
58
+ pass
59
+
60
+
61
+ def _project_name_from_path(project_path: Path) -> str:
62
+ return project_path.resolve().name.replace(" ", "_")
63
+
64
+
65
+ def _open_current_store(pm: ProjectManager) -> GraphStore:
66
+ project = pm.get_current_project()
67
+ if not project:
68
+ raise typer.BadParameter("No project loaded. Use 'cg load-project <name>' or run 'cg index <path>'.")
69
+ project_dir = pm.project_dir(project)
70
+ if not project_dir.exists():
71
+ raise typer.BadParameter(f"Loaded project '{project}' does not exist in memory.")
72
+ return GraphStore(project_dir)
73
+
74
+
75
+ @app.command("index")
76
+ def index_project(
77
+ project_path: Path = typer.Argument(..., exists=True, file_okay=False, help="Path to source project."),
78
+ project_name: Optional[str] = typer.Option(None, "--name", "-n", help="Explicit memory name for project."),
79
+ llm_model: str = typer.Option("qwen2.5-coder:7b", help="Local LLM model name for reasoning operations."),
80
+ llm_provider: str = typer.Option("ollama", help="LLM provider: ollama, groq, openai, anthropic."),
81
+ llm_api_key: Optional[str] = typer.Option(None, help="API key for cloud LLM providers."),
82
+ ):
83
+ """Parse and index a project into local semantic memory."""
84
+ from datetime import datetime
85
+
86
+ pm = ProjectManager()
87
+ resolved_path = project_path.resolve()
88
+ name = project_name or _project_name_from_path(resolved_path)
89
+ project_dir = pm.create_or_get_project(name)
90
+
91
+ store = GraphStore(project_dir)
92
+ orchestrator = MCPOrchestrator(
93
+ store,
94
+ llm_model=llm_model,
95
+ llm_provider=llm_provider,
96
+ llm_api_key=llm_api_key,
97
+ )
98
+ stats = orchestrator.index(resolved_path)
99
+
100
+ # Store project metadata including source path
101
+ store.set_metadata({
102
+ **store.get_metadata(),
103
+ "project_name": name,
104
+ "source_path": str(resolved_path),
105
+ "indexed_at": datetime.now().isoformat()
106
+ })
107
+
108
+ pm.set_current_project(name)
109
+ store.close()
110
+
111
+ typer.echo(f"Indexed '{resolved_path}' as project '{name}'.")
112
+ typer.echo(f"Nodes: {stats['nodes']} | Edges: {stats['edges']}")
113
+
114
+
115
+ @app.command("list-projects")
116
+ def list_projects():
117
+ """List all persisted project memories."""
118
+ pm = ProjectManager()
119
+ projects = pm.list_projects()
120
+ current = pm.get_current_project()
121
+
122
+ if not projects:
123
+ typer.echo("No projects indexed yet.")
124
+ raise typer.Exit(code=0)
125
+
126
+ for p in projects:
127
+ marker = "*" if p == current else " "
128
+ typer.echo(f"{marker} {p}")
129
+
130
+
131
+ @app.command("load-project")
132
+ def load_project(project_name: str = typer.Argument(..., help="Name of project memory to load.")):
133
+ """Switch active project memory."""
134
+ pm = ProjectManager()
135
+ if project_name not in pm.list_projects():
136
+ raise typer.BadParameter(f"Project '{project_name}' not found.")
137
+ pm.set_current_project(project_name)
138
+ typer.echo(f"Loaded project '{project_name}'.")
139
+
140
+
141
+ @app.command("unload-project")
142
+ def unload_project():
143
+ """Unload active project memory without deleting data."""
144
+ pm = ProjectManager()
145
+ pm.unload_project()
146
+ typer.echo("Unloaded active project.")
147
+
148
+
149
+ @app.command("delete-project")
150
+ def delete_project(project_name: str = typer.Argument(..., help="Project memory to delete.")):
151
+ """Delete persisted project memory."""
152
+ pm = ProjectManager()
153
+ deleted = pm.delete_project(project_name)
154
+ if not deleted:
155
+ raise typer.BadParameter(f"Project '{project_name}' not found.")
156
+ if pm.get_current_project() == project_name:
157
+ pm.unload_project()
158
+ typer.echo(f"Deleted project '{project_name}'.")
159
+
160
+
161
+ @app.command("merge-projects")
162
+ def merge_projects(
163
+ source_project: str = typer.Argument(..., help="Project to merge from."),
164
+ target_project: str = typer.Argument(..., help="Project to merge into."),
165
+ ):
166
+ """Merge one project memory into another."""
167
+ pm = ProjectManager()
168
+ if source_project not in pm.list_projects() or target_project not in pm.list_projects():
169
+ raise typer.BadParameter("Both source and target projects must exist.")
170
+
171
+ source_store = GraphStore(pm.project_dir(source_project))
172
+ target_store = GraphStore(pm.project_dir(target_project))
173
+ target_store.merge_from(source_store, source_project)
174
+ source_store.close()
175
+ target_store.close()
176
+
177
+ typer.echo(f"Merged '{source_project}' into '{target_project}'.")
178
+
179
+
180
+ @app.command("search")
181
+ def search(
182
+ query: str = typer.Argument(..., help="Semantic query for code discovery."),
183
+ top_k: int = typer.Option(5, min=1, max=30, help="Maximum number of matches."),
184
+ ):
185
+ """Run semantic search across currently loaded project memory."""
186
+ pm = ProjectManager()
187
+ store = _open_current_store(pm)
188
+ orchestrator = MCPOrchestrator(store)
189
+ results = orchestrator.search(query, top_k=top_k)
190
+
191
+ if not results:
192
+ typer.echo("No semantic matches found.")
193
+ store.close()
194
+ raise typer.Exit(code=0)
195
+
196
+ for item in results:
197
+ typer.echo(f"[{item.node_type}] {item.qualname} score={item.score:.3f}")
198
+ typer.echo(f" {item.file_path}:{item.start_line}-{item.end_line}")
199
+ snippet = item.snippet.strip().splitlines()
200
+ if snippet:
201
+ typer.echo(f" {snippet[0][:120]}")
202
+
203
+ store.close()
204
+
205
+
206
+ @app.command("impact")
207
+ def impact(
208
+ symbol: str = typer.Argument(..., help="Function/class/module symbol to analyze."),
209
+ hops: int = typer.Option(2, min=1, max=6, help="Dependency traversal depth."),
210
+ show_graph: bool = typer.Option(True, "--show-graph/--no-graph", help="Include ASCII graph output."),
211
+ llm_provider: str = typer.Option(
212
+ config.LLM_PROVIDER,
213
+ help="LLM provider: ollama, groq, openai, anthropic, gemini, openrouter.",
214
+ ),
215
+ llm_api_key: Optional[str] = typer.Option(
216
+ config.LLM_API_KEY or None,
217
+ help="API key for cloud LLM providers.",
218
+ ),
219
+ llm_model: str = typer.Option(
220
+ config.LLM_MODEL,
221
+ help="LLM model name.",
222
+ ),
223
+ ):
224
+ """Run multi-hop impact analysis using graph + RAG + local LLM."""
225
+ pm = ProjectManager()
226
+ store = _open_current_store(pm)
227
+ orchestrator = MCPOrchestrator(
228
+ store,
229
+ llm_model=llm_model,
230
+ llm_provider=llm_provider,
231
+ llm_api_key=llm_api_key,
232
+ )
233
+
234
+ report = orchestrator.impact(symbol, hops=hops)
235
+
236
+ # Check if symbol was found
237
+ if "not found" in report.explanation.lower() and not report.impacted:
238
+ typer.echo(f"❌ Symbol '{symbol}' not found in current project.", err=True)
239
+
240
+ # Try to find similar symbols
241
+ search_results = orchestrator.search(symbol, top_k=3)
242
+ if search_results:
243
+ typer.echo(f"\n💡 Did you mean one of these?", err=True)
244
+ for result in search_results:
245
+ typer.echo(f" - {result.qualname} ({result.node_type})", err=True)
246
+ typer.echo(f"\n💡 Tip: Use 'cg search {symbol}' to find similar symbols", err=True)
247
+ store.close()
248
+ raise typer.Exit(code=1)
249
+
250
+ typer.echo(f"Root: {report.root}")
251
+ if report.impacted:
252
+ typer.echo("Impacted symbols:")
253
+ for impacted in report.impacted:
254
+ typer.echo(f"- {impacted}")
255
+ else:
256
+ typer.echo("Impacted symbols: none found")
257
+
258
+ if show_graph:
259
+ typer.echo("\nASCII graph:")
260
+ typer.echo(report.ascii_graph)
261
+
262
+ typer.echo("\nExplanation:")
263
+ typer.echo(report.explanation)
264
+ store.close()
265
+
266
+
267
+ @app.command("graph")
268
+ def graph(
269
+ symbol: str = typer.Argument(..., help="Function/class/module symbol to inspect."),
270
+ depth: int = typer.Option(2, min=1, max=6, help="Traversal depth."),
271
+ ):
272
+ """Show lightweight ASCII dependency graph around a symbol."""
273
+ pm = ProjectManager()
274
+ store = _open_current_store(pm)
275
+ orchestrator = MCPOrchestrator(store)
276
+ text = orchestrator.graph(symbol, depth=depth)
277
+ typer.echo(text)
278
+ store.close()
279
+
280
+
281
+ @app.command("export-graph")
282
+ def export_graph(
283
+ symbol: str = typer.Argument("", help="Optional focus symbol to export local subgraph."),
284
+ fmt: str = typer.Option("html", "--format", "-f", help="Export format: html or dot."),
285
+ output: Optional[Path] = typer.Option(None, "--output", "-o", help="Output file path."),
286
+ ):
287
+ """Export graph to standalone HTML or Graphviz DOT."""
288
+ fmt = fmt.lower()
289
+ if fmt not in {"html", "dot"}:
290
+ raise typer.BadParameter("Format must be one of: html, dot")
291
+
292
+ pm = ProjectManager()
293
+ store = _open_current_store(pm)
294
+ current = pm.get_current_project() or "project"
295
+
296
+ if output is None:
297
+ output = Path.cwd() / f"{current}_graph.{fmt}"
298
+
299
+ if fmt == "html":
300
+ export_html(store, output, focus=symbol)
301
+ else:
302
+ export_dot(store, output, focus=symbol)
303
+
304
+ typer.echo(f"Exported graph to {output}")
305
+ store.close()
306
+
307
+
308
+ @app.command("current-project")
309
+ def current_project():
310
+ """Print active project memory name."""
311
+ pm = ProjectManager()
312
+ current = pm.get_current_project()
313
+ typer.echo(current or "No project loaded")
314
+
315
+
316
+ @app.command("rag-context")
317
+ def rag_context(
318
+ query: str = typer.Argument(..., help="Query to retrieve code context without analysis."),
319
+ top_k: int = typer.Option(6, min=1, max=30, help="Number of snippets to fetch."),
320
+ ):
321
+ """Retrieve top semantic snippets to inspect RAG context directly."""
322
+ pm = ProjectManager()
323
+ store = _open_current_store(pm)
324
+ orchestrator = MCPOrchestrator(store)
325
+ typer.echo(orchestrator.rag_context(query, top_k=top_k))
326
+ store.close()
327
+
328
+
329
+ if __name__ == "__main__":
330
+ app()
@@ -0,0 +1,367 @@
1
+ """Interactive chat CLI for conversational coding assistance."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ import shutil
7
+ from datetime import datetime
8
+ from pathlib import Path
9
+ from typing import Optional
10
+
11
+ import typer
12
+
13
+ from . import config
14
+ from .chat_agent import ChatAgent
15
+ from .chat_session import SessionManager
16
+ from .crew_chat import CrewChatAgent
17
+ from .llm import LocalLLM
18
+ from .orchestrator import MCPOrchestrator
19
+ from .rag import RAGRetriever
20
+ from .storage import GraphStore, ProjectManager
21
+
22
+
23
+ # ── Theme colors ──────────────────────────────────────────────
24
+ C_RESET = "\033[0m"
25
+ C_BOLD = "\033[1m"
26
+ C_DIM = "\033[2m"
27
+ C_CYAN = "\033[36m"
28
+ C_GREEN = "\033[32m"
29
+ C_YELLOW = "\033[33m"
30
+ C_MAGENTA = "\033[35m"
31
+ C_RED = "\033[31m"
32
+ C_BLUE = "\033[34m"
33
+ C_WHITE = "\033[97m"
34
+ C_BG_DARK = "\033[48;5;235m"
35
+
36
+
37
+ def _term_width() -> int:
38
+ """Get terminal width, default 80."""
39
+ return shutil.get_terminal_size((80, 24)).columns
40
+
41
+
42
+ def _box(text: str, color: str = C_CYAN, width: int = 0) -> str:
43
+ """Draw a box around text."""
44
+ w = width or min(_term_width(), 70)
45
+ inner = w - 4
46
+ lines = text.split("\n")
47
+ out = [f"{color}╭{'─' * (w - 2)}╮{C_RESET}"]
48
+ for line in lines:
49
+ padded = line.ljust(inner)[:inner]
50
+ out.append(f"{color}│{C_RESET} {padded} {color}│{C_RESET}")
51
+ out.append(f"{color}╰{'─' * (w - 2)}╯{C_RESET}")
52
+ return "\n".join(out)
53
+
54
+
55
+ def _divider(char: str = "─", color: str = C_DIM) -> str:
56
+ w = min(_term_width(), 70)
57
+ return f"{color}{char * w}{C_RESET}"
58
+
59
+
60
+ def _print_welcome(project_name: str, use_crew: bool, provider: str, model: str):
61
+ """Print the modern welcome banner."""
62
+ mode = "CrewAI Multi-Agent" if use_crew else "Chat"
63
+
64
+ banner = (
65
+ f" {C_BOLD}{C_CYAN}⚡ CodeGraph {mode}{C_RESET}\n"
66
+ f" {C_DIM}Project: {C_WHITE}{project_name}{C_RESET}\n"
67
+ f" {C_DIM}LLM: {C_WHITE}{provider}/{model}{C_RESET}"
68
+ )
69
+ print(f"\n{_box(f'⚡ CodeGraph {mode}', C_CYAN)}")
70
+ print(f" {C_DIM}Project {C_RESET}{C_WHITE}{project_name}{C_RESET}")
71
+ print(f" {C_DIM}LLM {C_RESET}{C_WHITE}{provider}/{model}{C_RESET}")
72
+ print(f" {C_DIM}Type {C_YELLOW}/help{C_DIM} for commands, {C_YELLOW}/exit{C_DIM} to quit{C_RESET}")
73
+ print(_divider())
74
+ print()
75
+
76
+
77
+ def _print_help(use_crew: bool):
78
+ """Print help with styled formatting."""
79
+ print(f"\n {C_BOLD}{C_CYAN}📖 Commands{C_RESET}")
80
+ print(_divider("─", C_DIM))
81
+ cmds = [
82
+ ("/exit", "Exit chat session"),
83
+ ("/clear", "Clear conversation history & start fresh"),
84
+ ("/new", "Start a brand new session"),
85
+ ("/help", "Show this help"),
86
+ ]
87
+ if use_crew:
88
+ cmds.extend([
89
+ ("/backups", "List all file backups"),
90
+ ("/rollback <file>", "Rollback a file to its last backup"),
91
+ ("/undo <file>", "Alias for /rollback"),
92
+ ])
93
+ else:
94
+ cmds.extend([
95
+ ("/apply", "Apply pending code proposal"),
96
+ ("/preview", "Preview pending changes"),
97
+ ])
98
+ for cmd, desc in cmds:
99
+ print(f" {C_YELLOW}{cmd:<22}{C_RESET}{C_DIM}{desc}{C_RESET}")
100
+ print()
101
+
102
+
103
+ def _print_response(text: str):
104
+ """Print assistant response with styling."""
105
+ print(f"\n {C_GREEN}●{C_RESET} {C_BOLD}Assistant{C_RESET}")
106
+ # Indent each line for clean look
107
+ for line in text.split("\n"):
108
+ print(f" {line}")
109
+ print()
110
+
111
+
112
+ def _print_status(emoji: str, msg: str, color: str = C_GREEN):
113
+ """Print a status message."""
114
+ print(f" {color}{emoji} {msg}{C_RESET}")
115
+
116
+
117
+ def start_chat_repl(
118
+ agent, # Can be ChatAgent or CrewChatAgent
119
+ session_manager: SessionManager,
120
+ project_name: str,
121
+ session_id: Optional[str] = None,
122
+ use_crew: bool = False,
123
+ provider: str = "",
124
+ model: str = "",
125
+ ):
126
+ """Start interactive REPL for chat."""
127
+ # Load or create session
128
+ if session_id:
129
+ session = session_manager.load_session(session_id)
130
+ if not session:
131
+ _print_status("🆕", "Session not found. Starting new session.", C_YELLOW)
132
+ session = session_manager.create_session(project_name)
133
+ else:
134
+ _print_status("📂", f"Resumed session ({session.message_count} messages)")
135
+ else:
136
+ # Try to load latest session for this project
137
+ latest_id = session_manager.get_latest_session(project_name)
138
+ if latest_id:
139
+ session = session_manager.load_session(latest_id)
140
+ _print_status("📂", f"Resumed session ({session.message_count} messages)")
141
+ else:
142
+ session = session_manager.create_session(project_name)
143
+ _print_status("🆕", "Started new chat session")
144
+
145
+ # Welcome
146
+ _print_welcome(project_name, use_crew, provider, model)
147
+
148
+ # REPL loop
149
+ while True:
150
+ try:
151
+ # Prompt
152
+ try:
153
+ user_input = input(f" {C_BLUE}●{C_RESET} {C_BOLD}You ›{C_RESET} ").strip()
154
+ except EOFError:
155
+ print(f"\n {C_DIM}👋 Goodbye! Session saved.{C_RESET}\n")
156
+ break
157
+
158
+ if not user_input:
159
+ continue
160
+
161
+ # ── Handle commands ──────────────────────────────
162
+ if user_input.startswith("/"):
163
+ cmd = user_input.lower().split()[0]
164
+
165
+ if cmd == "/exit":
166
+ session_manager.save_session(session)
167
+ print(f"\n {C_DIM}👋 Goodbye! Session saved.{C_RESET}\n")
168
+ break
169
+
170
+ elif cmd == "/clear":
171
+ session.clear_history()
172
+ session.clear_proposals()
173
+ session_manager.save_session(session)
174
+ _print_status("🧹", "Conversation cleared. Fresh start!", C_GREEN)
175
+ print()
176
+ continue
177
+
178
+ elif cmd == "/new":
179
+ session_manager.save_session(session)
180
+ session = session_manager.create_session(project_name)
181
+ _print_status("🆕", "New session started.", C_GREEN)
182
+ print()
183
+ continue
184
+
185
+ elif cmd == "/help":
186
+ _print_help(use_crew)
187
+ continue
188
+
189
+ elif not use_crew:
190
+ if cmd == "/apply":
191
+ if hasattr(agent, 'apply_pending_proposal'):
192
+ result = agent.apply_pending_proposal(session)
193
+ _print_status("📋", result)
194
+ else:
195
+ _print_status("📋", "No pending proposals.", C_YELLOW)
196
+ continue
197
+ elif cmd == "/preview":
198
+ if session.pending_proposals:
199
+ for i, prop in enumerate(session.pending_proposals):
200
+ print(f"\n {C_BOLD}Proposal {i+1}:{C_RESET} {prop.description}")
201
+ for ch in prop.changes:
202
+ icon = {"create": "🆕", "modify": "✏️", "delete": "🗑️"}.get(ch.change_type, "📄")
203
+ print(f" {icon} {ch.file_path}")
204
+ else:
205
+ _print_status("📋", "No pending proposals.", C_YELLOW)
206
+ print()
207
+ continue
208
+
209
+ elif use_crew:
210
+ if cmd in ("/rollback", "/undo"):
211
+ parts = user_input.split(maxsplit=1)
212
+ if len(parts) < 2:
213
+ _print_status("❓", "Usage: /rollback <file_path> [timestamp]", C_YELLOW)
214
+ continue
215
+ args = parts[1].split()
216
+ file_path = args[0]
217
+ ts = args[1] if len(args) > 1 else None
218
+ result = agent.rollback(file_path, ts)
219
+ _print_status("⏪", result)
220
+ print()
221
+ continue
222
+
223
+ elif cmd == "/backups":
224
+ backups = agent.list_all_backups()
225
+ if not backups:
226
+ _print_status("📦", "No backups found.", C_YELLOW)
227
+ else:
228
+ print(f"\n {C_BOLD}{C_CYAN}📦 File Backups{C_RESET}")
229
+ print(_divider("─", C_DIM))
230
+ for b in backups:
231
+ ts = b["timestamp"]
232
+ fp = b["original_path"]
233
+ print(f" {C_WHITE}{ts}{C_RESET} {C_DIM}{fp}{C_RESET}")
234
+ print(f"\n {C_DIM}Use /rollback <file_path> to restore{C_RESET}")
235
+ print()
236
+ continue
237
+
238
+ _print_status("❓", f"Unknown command: {cmd}. Type /help", C_YELLOW)
239
+ continue
240
+
241
+ # ── Process message ──────────────────────────────
242
+ session.add_message("user", user_input, datetime.now().isoformat())
243
+
244
+ # Show thinking indicator
245
+ print(f"\n {C_DIM}⏳ Thinking...{C_RESET}", end="", flush=True)
246
+
247
+ if use_crew:
248
+ response = agent.process_message(user_input)
249
+ else:
250
+ response = agent.process_message(user_input, session)
251
+
252
+ # Clear thinking indicator
253
+ print(f"\r{' ' * 30}\r", end="")
254
+
255
+ # Save & display
256
+ session.add_message("assistant", response, datetime.now().isoformat())
257
+ session_manager.save_session(session)
258
+
259
+ _print_response(response)
260
+
261
+ except KeyboardInterrupt:
262
+ session_manager.save_session(session)
263
+ print(f"\n\n {C_DIM}👋 Goodbye! Session saved.{C_RESET}\n")
264
+ break
265
+ except Exception as e:
266
+ print(f"\n {C_RED}❌ Error: {str(e)}{C_RESET}\n")
267
+
268
+
269
+ # ── Typer app ────────────────────────────────────────────────
270
+ chat_app = typer.Typer(help="💬 Interactive chat with AI agents")
271
+
272
+
273
+ @chat_app.command("start")
274
+ def start_chat(
275
+ session_id: Optional[str] = typer.Option(None, "--session", "-s", help="Resume specific session ID"),
276
+ llm_model: str = typer.Option(config.LLM_MODEL, help="LLM model to use"),
277
+ llm_provider: str = typer.Option(config.LLM_PROVIDER, help="LLM provider"),
278
+ llm_api_key: Optional[str] = typer.Option(config.LLM_API_KEY, help="API key for cloud providers"),
279
+ llm_endpoint: Optional[str] = typer.Option(config.LLM_ENDPOINT, help="LLM endpoint URL"),
280
+ use_crew: bool = typer.Option(False, "--crew", help="Use CrewAI multi-agent system"),
281
+ new_session: bool = typer.Option(False, "--new", "-n", help="Force start a new session"),
282
+ ):
283
+ """Start interactive chat session."""
284
+ from .embeddings import HashEmbeddingModel
285
+ from .project_context import ProjectContext
286
+
287
+ pm = ProjectManager()
288
+ project = pm.get_current_project()
289
+
290
+ if not project:
291
+ print(f"\n {C_RED}❌ No project loaded.{C_RESET}")
292
+ print(f" {C_DIM}Use: cg load-project <name> or cg index <path>{C_RESET}\n")
293
+ raise typer.Exit(1)
294
+
295
+ # Initialize components
296
+ context = ProjectContext(project, pm)
297
+ embedding_model = HashEmbeddingModel()
298
+ llm = LocalLLM(model=llm_model, provider=llm_provider, api_key=llm_api_key, endpoint=llm_endpoint)
299
+ rag_retriever = RAGRetriever(context.store, embedding_model)
300
+
301
+ if use_crew:
302
+ print(f"\n {C_MAGENTA}🤖 Initializing CrewAI multi-agent system...{C_RESET}")
303
+ agent = CrewChatAgent(context, llm, rag_retriever)
304
+ else:
305
+ orchestrator = MCPOrchestrator(
306
+ context.store,
307
+ llm_model=llm_model,
308
+ llm_provider=llm_provider,
309
+ llm_api_key=llm_api_key,
310
+ llm_endpoint=llm_endpoint
311
+ )
312
+ agent = ChatAgent(context, llm, orchestrator, rag_retriever)
313
+
314
+ session_manager = SessionManager()
315
+
316
+ # Force new session if requested
317
+ effective_session_id = None if new_session else session_id
318
+
319
+ try:
320
+ start_chat_repl(
321
+ agent, session_manager, project, effective_session_id,
322
+ use_crew=use_crew,
323
+ provider=llm_provider,
324
+ model=llm_model,
325
+ )
326
+ finally:
327
+ context.close()
328
+
329
+
330
+ @chat_app.command("list")
331
+ def list_sessions(
332
+ project: Optional[str] = typer.Option(None, "--project", "-p", help="Filter by project")
333
+ ):
334
+ """List all chat sessions."""
335
+ session_manager = SessionManager()
336
+ sessions = session_manager.list_sessions(project_name=project)
337
+
338
+ if not sessions:
339
+ print(f"\n {C_DIM}No chat sessions found.{C_RESET}\n")
340
+ return
341
+
342
+ print(f"\n {C_BOLD}{C_CYAN}📋 Chat Sessions ({len(sessions)}){C_RESET}")
343
+ print(_divider())
344
+
345
+ for i, sess in enumerate(sessions, 1):
346
+ created = sess['created_at'][:16].replace("T", " ")
347
+ msgs = sess['message_count']
348
+ proj = sess['project_name']
349
+ sid = sess['id'][:8]
350
+
351
+ print(f" {C_WHITE}{i}.{C_RESET} {C_BOLD}{proj}{C_RESET} {C_DIM}({msgs} msgs, {created}){C_RESET} {C_DIM}id:{sid}…{C_RESET}")
352
+
353
+ print()
354
+
355
+
356
+ @chat_app.command("delete")
357
+ def delete_session(
358
+ session_id: str = typer.Argument(..., help="Session ID to delete")
359
+ ):
360
+ """Delete a chat session."""
361
+ session_manager = SessionManager()
362
+
363
+ if session_manager.delete_session(session_id):
364
+ _print_status("✅", f"Deleted session {session_id[:8]}…")
365
+ else:
366
+ print(f" {C_RED}❌ Session not found{C_RESET}")
367
+ raise typer.Exit(1)