nexus-dev 3.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nexus_dev/__init__.py +4 -0
- nexus_dev/agent_templates/__init__.py +26 -0
- nexus_dev/agent_templates/api_designer.yaml +26 -0
- nexus_dev/agent_templates/code_reviewer.yaml +26 -0
- nexus_dev/agent_templates/debug_detective.yaml +26 -0
- nexus_dev/agent_templates/doc_writer.yaml +26 -0
- nexus_dev/agent_templates/performance_optimizer.yaml +26 -0
- nexus_dev/agent_templates/refactor_architect.yaml +26 -0
- nexus_dev/agent_templates/security_auditor.yaml +26 -0
- nexus_dev/agent_templates/test_engineer.yaml +26 -0
- nexus_dev/agents/__init__.py +20 -0
- nexus_dev/agents/agent_config.py +97 -0
- nexus_dev/agents/agent_executor.py +197 -0
- nexus_dev/agents/agent_manager.py +104 -0
- nexus_dev/agents/prompt_factory.py +91 -0
- nexus_dev/chunkers/__init__.py +168 -0
- nexus_dev/chunkers/base.py +202 -0
- nexus_dev/chunkers/docs_chunker.py +291 -0
- nexus_dev/chunkers/java_chunker.py +343 -0
- nexus_dev/chunkers/javascript_chunker.py +312 -0
- nexus_dev/chunkers/python_chunker.py +308 -0
- nexus_dev/cli.py +2017 -0
- nexus_dev/config.py +261 -0
- nexus_dev/database.py +569 -0
- nexus_dev/embeddings.py +703 -0
- nexus_dev/gateway/__init__.py +10 -0
- nexus_dev/gateway/connection_manager.py +348 -0
- nexus_dev/github_importer.py +247 -0
- nexus_dev/mcp_client.py +281 -0
- nexus_dev/mcp_config.py +184 -0
- nexus_dev/schemas/mcp_config_schema.json +166 -0
- nexus_dev/server.py +1866 -0
- nexus_dev/templates/pre-commit-hook +56 -0
- nexus_dev-3.3.1.data/data/nexus_dev/agent_templates/__init__.py +26 -0
- nexus_dev-3.3.1.data/data/nexus_dev/agent_templates/api_designer.yaml +26 -0
- nexus_dev-3.3.1.data/data/nexus_dev/agent_templates/code_reviewer.yaml +26 -0
- nexus_dev-3.3.1.data/data/nexus_dev/agent_templates/debug_detective.yaml +26 -0
- nexus_dev-3.3.1.data/data/nexus_dev/agent_templates/doc_writer.yaml +26 -0
- nexus_dev-3.3.1.data/data/nexus_dev/agent_templates/performance_optimizer.yaml +26 -0
- nexus_dev-3.3.1.data/data/nexus_dev/agent_templates/refactor_architect.yaml +26 -0
- nexus_dev-3.3.1.data/data/nexus_dev/agent_templates/security_auditor.yaml +26 -0
- nexus_dev-3.3.1.data/data/nexus_dev/agent_templates/test_engineer.yaml +26 -0
- nexus_dev-3.3.1.data/data/nexus_dev/templates/pre-commit-hook +56 -0
- nexus_dev-3.3.1.dist-info/METADATA +668 -0
- nexus_dev-3.3.1.dist-info/RECORD +48 -0
- nexus_dev-3.3.1.dist-info/WHEEL +4 -0
- nexus_dev-3.3.1.dist-info/entry_points.txt +14 -0
- nexus_dev-3.3.1.dist-info/licenses/LICENSE +21 -0
nexus_dev/cli.py
ADDED
|
@@ -0,0 +1,2017 @@
|
|
|
1
|
+
"""Nexus-Dev CLI commands.
|
|
2
|
+
|
|
3
|
+
Provides commands for:
|
|
4
|
+
- nexus-init: Initialize Nexus-Dev in a project
|
|
5
|
+
- nexus-index: Manually index files
|
|
6
|
+
- nexus-status: Show project statistics
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import asyncio
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import shutil
|
|
15
|
+
import stat
|
|
16
|
+
from collections import defaultdict
|
|
17
|
+
from collections.abc import Coroutine
|
|
18
|
+
from datetime import datetime
|
|
19
|
+
from fnmatch import fnmatch
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
from typing import Any, Literal
|
|
22
|
+
|
|
23
|
+
import click
|
|
24
|
+
import yaml
|
|
25
|
+
|
|
26
|
+
from .chunkers import ChunkerRegistry
|
|
27
|
+
from .config import NexusConfig
|
|
28
|
+
from .database import Document, DocumentType, NexusDatabase, generate_document_id
|
|
29
|
+
from .embeddings import create_embedder
|
|
30
|
+
from .github_importer import GitHubImporter
|
|
31
|
+
from .mcp_client import MCPClientManager, MCPServerConnection
|
|
32
|
+
from .mcp_config import MCPConfig, MCPServerConfig
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _find_project_root(start_path: Path | None = None) -> Path | None:
|
|
36
|
+
"""Find project root by walking up to find nexus_config.json.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
start_path: Starting directory (defaults to cwd)
|
|
40
|
+
|
|
41
|
+
Returns:
|
|
42
|
+
Path to project root if found, None otherwise.
|
|
43
|
+
"""
|
|
44
|
+
current = (start_path or Path.cwd()).resolve()
|
|
45
|
+
|
|
46
|
+
for parent in [current] + list(current.parents):
|
|
47
|
+
if (parent / "nexus_config.json").exists():
|
|
48
|
+
return parent
|
|
49
|
+
if parent == parent.parent: # Reached filesystem root
|
|
50
|
+
break
|
|
51
|
+
|
|
52
|
+
return None
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _run_async(coro: Coroutine[Any, Any, Any]) -> Any:
|
|
56
|
+
"""Run async function in sync context."""
|
|
57
|
+
return asyncio.get_event_loop().run_until_complete(coro)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@click.group()
|
|
61
|
+
@click.version_option(version="0.1.0", prog_name="nexus-dev")
|
|
62
|
+
def cli() -> None:
|
|
63
|
+
"""Nexus-Dev CLI - Local RAG for AI coding agents.
|
|
64
|
+
|
|
65
|
+
Nexus-Dev provides persistent memory for AI coding assistants by indexing
|
|
66
|
+
your code and documentation into a local vector database.
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@cli.command("init")
|
|
71
|
+
@click.option(
|
|
72
|
+
"--project-name",
|
|
73
|
+
help="Human-readable name for the project",
|
|
74
|
+
)
|
|
75
|
+
@click.option(
|
|
76
|
+
"--embedding-provider",
|
|
77
|
+
type=click.Choice(["openai", "ollama"]),
|
|
78
|
+
default="openai",
|
|
79
|
+
help="Embedding provider to use (default: openai)",
|
|
80
|
+
)
|
|
81
|
+
@click.option(
|
|
82
|
+
"--install-hook/--no-hook",
|
|
83
|
+
default=False,
|
|
84
|
+
help="Install pre-commit hook for automatic indexing",
|
|
85
|
+
)
|
|
86
|
+
@click.option(
|
|
87
|
+
"--link-hook",
|
|
88
|
+
is_flag=True,
|
|
89
|
+
default=False,
|
|
90
|
+
help="Install hook linked to parent project configuration (for multi-repo projects)",
|
|
91
|
+
)
|
|
92
|
+
@click.option(
|
|
93
|
+
"--discover-repos",
|
|
94
|
+
is_flag=True,
|
|
95
|
+
default=False,
|
|
96
|
+
help="Auto-discover git repositories and offer to install hooks",
|
|
97
|
+
)
|
|
98
|
+
def init_command(
|
|
99
|
+
project_name: str | None,
|
|
100
|
+
embedding_provider: Literal["openai", "ollama"],
|
|
101
|
+
install_hook: bool,
|
|
102
|
+
link_hook: bool,
|
|
103
|
+
discover_repos: bool,
|
|
104
|
+
) -> None:
|
|
105
|
+
"""Initialize Nexus-Dev in the current repository.
|
|
106
|
+
|
|
107
|
+
Creates configuration file, lessons directory, and optionally installs
|
|
108
|
+
the pre-commit hook for automatic indexing.
|
|
109
|
+
|
|
110
|
+
Multi-repository projects:
|
|
111
|
+
- Use --link-hook to install a hook in a sub-repository that links to parent config
|
|
112
|
+
- Use --discover-repos to auto-find all git repos and install hooks
|
|
113
|
+
"""
|
|
114
|
+
cwd = Path.cwd()
|
|
115
|
+
|
|
116
|
+
# Handle --link-hook: Install hook in sub-repo linked to parent config
|
|
117
|
+
if link_hook:
|
|
118
|
+
git_dir = cwd / ".git"
|
|
119
|
+
if not git_dir.exists():
|
|
120
|
+
click.echo("❌ Not a git repository. Cannot install hook.", err=True)
|
|
121
|
+
return
|
|
122
|
+
|
|
123
|
+
# Find parent project root
|
|
124
|
+
project_root = _find_project_root(cwd.parent)
|
|
125
|
+
if not project_root:
|
|
126
|
+
click.echo("❌ No parent nexus_config.json found.", err=True)
|
|
127
|
+
click.echo(
|
|
128
|
+
" Run 'nexus-init' in the parent directory first, "
|
|
129
|
+
"or use 'nexus-init' without --link-hook to create a new project."
|
|
130
|
+
)
|
|
131
|
+
return
|
|
132
|
+
|
|
133
|
+
# Load parent config to display project info
|
|
134
|
+
parent_config = NexusConfig.load(project_root / "nexus_config.json")
|
|
135
|
+
|
|
136
|
+
# Install hook
|
|
137
|
+
_install_hook(cwd, project_root)
|
|
138
|
+
|
|
139
|
+
click.echo("")
|
|
140
|
+
click.echo(f"✅ Linked to parent project: {parent_config.project_name}")
|
|
141
|
+
click.echo(f" Project ID: {parent_config.project_id}")
|
|
142
|
+
click.echo(f" Project Root: {project_root}")
|
|
143
|
+
return
|
|
144
|
+
|
|
145
|
+
# Handle --discover-repos: Find and install hooks in all sub-repositories
|
|
146
|
+
if discover_repos:
|
|
147
|
+
# Ensure we have a config in current directory
|
|
148
|
+
config_path = cwd / "nexus_config.json"
|
|
149
|
+
if not config_path.exists():
|
|
150
|
+
click.echo("❌ No nexus_config.json in current directory.", err=True)
|
|
151
|
+
click.echo(" Run 'nexus-init' first to create project configuration.")
|
|
152
|
+
return
|
|
153
|
+
|
|
154
|
+
config = NexusConfig.load(config_path)
|
|
155
|
+
|
|
156
|
+
# Find all .git directories
|
|
157
|
+
git_repos = []
|
|
158
|
+
for root, dirs, _ in os.walk(cwd):
|
|
159
|
+
# Skip the root .git if there is one
|
|
160
|
+
if ".git" in dirs:
|
|
161
|
+
repo_path = Path(root)
|
|
162
|
+
if repo_path != cwd: # Don't include parent directory itself
|
|
163
|
+
git_repos.append(repo_path)
|
|
164
|
+
# Don't traverse into .git directories
|
|
165
|
+
dirs.remove(".git")
|
|
166
|
+
|
|
167
|
+
if not git_repos:
|
|
168
|
+
click.echo("No git repositories found in subdirectories.")
|
|
169
|
+
return
|
|
170
|
+
|
|
171
|
+
click.echo(f"Found {len(git_repos)} git repositor{'y' if len(git_repos) == 1 else 'ies'}:")
|
|
172
|
+
for repo in git_repos:
|
|
173
|
+
rel_path = repo.relative_to(cwd)
|
|
174
|
+
click.echo(f" 📁 {rel_path}")
|
|
175
|
+
|
|
176
|
+
click.echo("")
|
|
177
|
+
if not click.confirm("Install hooks in all repositories?"):
|
|
178
|
+
click.echo("Aborted.")
|
|
179
|
+
return
|
|
180
|
+
|
|
181
|
+
# Install hooks
|
|
182
|
+
installed = 0
|
|
183
|
+
for repo in git_repos:
|
|
184
|
+
try:
|
|
185
|
+
_install_hook(repo, cwd)
|
|
186
|
+
installed += 1
|
|
187
|
+
rel_path = repo.relative_to(cwd)
|
|
188
|
+
click.echo(f" ✅ {rel_path}")
|
|
189
|
+
except Exception as e:
|
|
190
|
+
rel_path = repo.relative_to(cwd)
|
|
191
|
+
click.echo(f" ❌ {rel_path}: {e}")
|
|
192
|
+
|
|
193
|
+
click.echo("")
|
|
194
|
+
click.echo(f"✅ Installed hooks in {installed}/{len(git_repos)} repositories")
|
|
195
|
+
click.echo(f" All repositories linked to project: {config.project_name}")
|
|
196
|
+
return
|
|
197
|
+
|
|
198
|
+
# Normal initialization flow
|
|
199
|
+
config_path = cwd / "nexus_config.json"
|
|
200
|
+
|
|
201
|
+
# Prompt for project name if not provided
|
|
202
|
+
if not project_name:
|
|
203
|
+
project_name = click.prompt("Project name")
|
|
204
|
+
|
|
205
|
+
# Check if already initialized
|
|
206
|
+
if config_path.exists():
|
|
207
|
+
click.echo("⚠️ nexus_config.json already exists.")
|
|
208
|
+
if not click.confirm("Overwrite existing configuration?"):
|
|
209
|
+
click.echo("Aborted.")
|
|
210
|
+
return
|
|
211
|
+
|
|
212
|
+
# Create configuration
|
|
213
|
+
config = NexusConfig.create_new(
|
|
214
|
+
project_name=project_name,
|
|
215
|
+
embedding_provider=embedding_provider,
|
|
216
|
+
)
|
|
217
|
+
config.save(config_path)
|
|
218
|
+
click.echo("✅ Created nexus_config.json")
|
|
219
|
+
|
|
220
|
+
# Create .nexus/lessons directory
|
|
221
|
+
lessons_dir = cwd / ".nexus" / "lessons"
|
|
222
|
+
lessons_dir.mkdir(parents=True, exist_ok=True)
|
|
223
|
+
click.echo("✅ Created .nexus/lessons/")
|
|
224
|
+
|
|
225
|
+
# Create .gitkeep so the directory is tracked
|
|
226
|
+
gitkeep = lessons_dir / ".gitkeep"
|
|
227
|
+
gitkeep.touch(exist_ok=True)
|
|
228
|
+
|
|
229
|
+
# Create database directory
|
|
230
|
+
db_path = config.get_db_path()
|
|
231
|
+
db_path.mkdir(parents=True, exist_ok=True)
|
|
232
|
+
click.echo(f"✅ Created database directory at {db_path}")
|
|
233
|
+
|
|
234
|
+
# Optionally install pre-commit hook
|
|
235
|
+
if install_hook:
|
|
236
|
+
_install_hook(cwd, cwd)
|
|
237
|
+
|
|
238
|
+
# Configure .gitignore
|
|
239
|
+
click.echo("")
|
|
240
|
+
ignore_choice = click.prompt(
|
|
241
|
+
"Configure .gitignore for .nexus folder?",
|
|
242
|
+
type=click.Choice(["allow-lessons", "ignore-all", "skip"]),
|
|
243
|
+
default="allow-lessons",
|
|
244
|
+
show_default=True,
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
if ignore_choice != "skip":
|
|
248
|
+
_update_gitignore(cwd, ignore_choice)
|
|
249
|
+
|
|
250
|
+
click.echo("")
|
|
251
|
+
click.echo(f"Project ID: {config.project_id}")
|
|
252
|
+
|
|
253
|
+
if embedding_provider == "openai":
|
|
254
|
+
click.echo("")
|
|
255
|
+
click.echo("⚠️ Using OpenAI embeddings. Ensure OPENAI_API_KEY is set.")
|
|
256
|
+
|
|
257
|
+
click.echo("")
|
|
258
|
+
click.echo("----------------------------------------------------------------")
|
|
259
|
+
click.echo("🤖 COPY-PASTE THIS INTO YOUR AGENT'S SYSTEM PROMPT OR RULES:")
|
|
260
|
+
click.echo("----------------------------------------------------------------")
|
|
261
|
+
click.echo(f"""
|
|
262
|
+
## Nexus-Dev Knowledge Base
|
|
263
|
+
|
|
264
|
+
You have access to a local RAG system for this project.
|
|
265
|
+
|
|
266
|
+
**Project ID:** {config.project_id}
|
|
267
|
+
|
|
268
|
+
**MANDATORY**: You MUST use `nexus-dev` tools BEFORE answering questions about this code.
|
|
269
|
+
1. `search_knowledge("{config.project_name} <query>")` - Search code, docs, and lessons
|
|
270
|
+
2. `search_code("<class/function_name>")` - Find specific code definitions
|
|
271
|
+
3. `search_lessons("<error/problem>")` - Check for past solutions
|
|
272
|
+
4. `record_lesson(...)` - Save solutions after fixing non-trivial bugs
|
|
273
|
+
|
|
274
|
+
**Best Practice:**
|
|
275
|
+
- Start every session with `get_project_context()`
|
|
276
|
+
- Search before writing code
|
|
277
|
+
- Record insights with `record_insight()`
|
|
278
|
+
""")
|
|
279
|
+
click.echo("----------------------------------------------------------------")
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
def _install_hook(git_dir_parent: Path, project_root: Path | None = None) -> None:
|
|
283
|
+
"""Install pre-commit hook.
|
|
284
|
+
|
|
285
|
+
Args:
|
|
286
|
+
git_dir_parent: Directory containing .git/
|
|
287
|
+
project_root: Optional project root for multi-repo setups.
|
|
288
|
+
If None, assumes git_dir_parent is the project root.
|
|
289
|
+
"""
|
|
290
|
+
git_dir = git_dir_parent / ".git"
|
|
291
|
+
if not git_dir.exists():
|
|
292
|
+
click.echo("⚠️ Not a git repository. Skipping hook installation.")
|
|
293
|
+
return
|
|
294
|
+
|
|
295
|
+
hooks_dir = git_dir / "hooks"
|
|
296
|
+
hooks_dir.mkdir(exist_ok=True)
|
|
297
|
+
|
|
298
|
+
hook_path = hooks_dir / "pre-commit"
|
|
299
|
+
|
|
300
|
+
# Check if hook already exists
|
|
301
|
+
if hook_path.exists():
|
|
302
|
+
click.echo("⚠️ pre-commit hook already exists. Skipping.")
|
|
303
|
+
return
|
|
304
|
+
|
|
305
|
+
# Copy template
|
|
306
|
+
template_path = Path(__file__).parent / "templates" / "pre-commit-hook"
|
|
307
|
+
if template_path.exists():
|
|
308
|
+
shutil.copy(template_path, hook_path)
|
|
309
|
+
else:
|
|
310
|
+
# Write inline
|
|
311
|
+
hook_content = """#!/bin/bash
|
|
312
|
+
# Nexus-Dev Pre-commit Hook
|
|
313
|
+
|
|
314
|
+
set -e
|
|
315
|
+
|
|
316
|
+
echo "🧠 Nexus-Dev: Checking for files to index..."
|
|
317
|
+
|
|
318
|
+
MODIFIED_FILES=$(git diff --cached --name-only --diff-filter=ACM | \
|
|
319
|
+
grep -E '\\.(py|js|jsx|ts|tsx|java)$' || true)
|
|
320
|
+
|
|
321
|
+
if [ -n "$MODIFIED_FILES" ]; then
|
|
322
|
+
echo "📁 Indexing modified code files..."
|
|
323
|
+
for file in $MODIFIED_FILES; do
|
|
324
|
+
if [ -f "$file" ]; then
|
|
325
|
+
python -m nexus_dev.cli index "$file" --quiet 2>/dev/null || true
|
|
326
|
+
fi
|
|
327
|
+
done
|
|
328
|
+
fi
|
|
329
|
+
|
|
330
|
+
LESSON_FILES=$(git diff --cached --name-only --diff-filter=A | \
|
|
331
|
+
grep -E '^\\.nexus/lessons/.*\\.md$' || true)
|
|
332
|
+
|
|
333
|
+
if [ -n "$LESSON_FILES" ]; then
|
|
334
|
+
echo "📚 Indexing new lessons..."
|
|
335
|
+
for file in $LESSON_FILES; do
|
|
336
|
+
if [ -f "$file" ]; then
|
|
337
|
+
python -m nexus_dev.cli index-lesson "$file" --quiet 2>/dev/null || true
|
|
338
|
+
fi
|
|
339
|
+
done
|
|
340
|
+
fi
|
|
341
|
+
|
|
342
|
+
echo "✅ Nexus-Dev indexing complete"
|
|
343
|
+
"""
|
|
344
|
+
hook_path.write_text(hook_content)
|
|
345
|
+
|
|
346
|
+
# Make executable
|
|
347
|
+
current_mode = hook_path.stat().st_mode
|
|
348
|
+
hook_path.chmod(current_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
|
349
|
+
|
|
350
|
+
if project_root and project_root != git_dir_parent:
|
|
351
|
+
click.echo(f"✅ Installed pre-commit hook (linked to {project_root.name}/)")
|
|
352
|
+
else:
|
|
353
|
+
click.echo("✅ Installed pre-commit hook")
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
def _update_gitignore(cwd: Path, choice: str) -> None:
|
|
357
|
+
"""Update .gitignore based on user choice."""
|
|
358
|
+
gitignore_path = cwd / ".gitignore"
|
|
359
|
+
|
|
360
|
+
# define mapping for content
|
|
361
|
+
content_map = {
|
|
362
|
+
"allow-lessons": [
|
|
363
|
+
"\n# Nexus-Dev",
|
|
364
|
+
".nexus_config.json",
|
|
365
|
+
".nexus/*",
|
|
366
|
+
"!.nexus/lessons/",
|
|
367
|
+
"",
|
|
368
|
+
],
|
|
369
|
+
"ignore-all": ["\n# Nexus-Dev", ".nexus_config.json", ".nexus/", ""],
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
new_lines = content_map.get(choice, [])
|
|
373
|
+
if not new_lines:
|
|
374
|
+
return
|
|
375
|
+
|
|
376
|
+
# Create if doesn't exist
|
|
377
|
+
if not gitignore_path.exists():
|
|
378
|
+
gitignore_path.write_text("\n".join(new_lines), encoding="utf-8")
|
|
379
|
+
click.echo("✅ Created .gitignore")
|
|
380
|
+
return
|
|
381
|
+
|
|
382
|
+
# Append if exists
|
|
383
|
+
current_content = gitignore_path.read_text(encoding="utf-8")
|
|
384
|
+
|
|
385
|
+
# simple check to avoid duplication (imperfect but sufficient for init)
|
|
386
|
+
if ".nexus" in current_content:
|
|
387
|
+
click.echo("⚠️ .nexus already in .gitignore, skipping update.")
|
|
388
|
+
return
|
|
389
|
+
|
|
390
|
+
with gitignore_path.open("a", encoding="utf-8") as f:
|
|
391
|
+
f.write("\n".join(new_lines))
|
|
392
|
+
|
|
393
|
+
click.echo(f"✅ Updated .gitignore ({choice})")
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
@cli.command("index")
|
|
397
|
+
@click.argument("paths", nargs=-1, required=True)
|
|
398
|
+
@click.option(
|
|
399
|
+
"-r",
|
|
400
|
+
"--recursive",
|
|
401
|
+
is_flag=True,
|
|
402
|
+
help="Index directories recursively",
|
|
403
|
+
)
|
|
404
|
+
@click.option(
|
|
405
|
+
"-q",
|
|
406
|
+
"--quiet",
|
|
407
|
+
is_flag=True,
|
|
408
|
+
help="Suppress output",
|
|
409
|
+
)
|
|
410
|
+
def index_command(paths: tuple[str, ...], recursive: bool, quiet: bool) -> None:
|
|
411
|
+
"""Manually index files or directories.
|
|
412
|
+
|
|
413
|
+
PATHS can be files or directories. Use -r to recursively index directories.
|
|
414
|
+
|
|
415
|
+
Examples:
|
|
416
|
+
nexus-index src/
|
|
417
|
+
nexus-index docs/ -r
|
|
418
|
+
nexus-index main.py utils.py
|
|
419
|
+
"""
|
|
420
|
+
# Load config
|
|
421
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
422
|
+
if not config_path.exists():
|
|
423
|
+
click.echo("❌ nexus_config.json not found. Run 'nexus-init' first.", err=True)
|
|
424
|
+
return
|
|
425
|
+
|
|
426
|
+
config = NexusConfig.load(config_path)
|
|
427
|
+
embedder = create_embedder(config)
|
|
428
|
+
database = NexusDatabase(config, embedder)
|
|
429
|
+
database.connect()
|
|
430
|
+
|
|
431
|
+
# Collect files to index
|
|
432
|
+
files_to_index: list[Path] = []
|
|
433
|
+
|
|
434
|
+
for path_str in paths:
|
|
435
|
+
path = Path(path_str)
|
|
436
|
+
if not path.is_absolute():
|
|
437
|
+
path = Path.cwd() / path
|
|
438
|
+
|
|
439
|
+
if not path.exists():
|
|
440
|
+
if not quiet:
|
|
441
|
+
click.echo(f"⚠️ Path not found: {path_str}")
|
|
442
|
+
continue
|
|
443
|
+
|
|
444
|
+
if path.is_file():
|
|
445
|
+
files_to_index.append(path)
|
|
446
|
+
elif path.is_dir():
|
|
447
|
+
if recursive:
|
|
448
|
+
# Recursively find files using os.walk to prune ignored directories
|
|
449
|
+
for root, dirs, files in os.walk(path):
|
|
450
|
+
root_path = Path(root)
|
|
451
|
+
|
|
452
|
+
# Compute relative path for pattern matching
|
|
453
|
+
rel_root = str(root_path.relative_to(Path.cwd()))
|
|
454
|
+
if rel_root == ".":
|
|
455
|
+
rel_root = ""
|
|
456
|
+
|
|
457
|
+
# Filter directories to prevent traversal into ignored paths
|
|
458
|
+
# We must modify dirs in-place to prune the walk
|
|
459
|
+
i = 0
|
|
460
|
+
while i < len(dirs):
|
|
461
|
+
d = dirs[i]
|
|
462
|
+
d_path = root_path / d
|
|
463
|
+
# We construct a mock path string for the directory check
|
|
464
|
+
# (relative path + directory name)
|
|
465
|
+
check_path = str(d_path.relative_to(Path.cwd()))
|
|
466
|
+
|
|
467
|
+
# Use a simpler check: if the directory ITSELF matches exclude pattern
|
|
468
|
+
# we should remove it.
|
|
469
|
+
should_exclude = False
|
|
470
|
+
|
|
471
|
+
# Check excludes for this directory
|
|
472
|
+
# We treat the directory string as match target for exclude patterns
|
|
473
|
+
# excluding the trailing slash for fnmatch
|
|
474
|
+
for pattern in config.exclude_patterns:
|
|
475
|
+
# Normalize pattern: remove trailing slash for directory matching
|
|
476
|
+
clean_pat = pattern.rstrip("/")
|
|
477
|
+
if clean_pat.endswith("/**"):
|
|
478
|
+
clean_pat = clean_pat[:-3]
|
|
479
|
+
|
|
480
|
+
# Simple fnmatch on the logic
|
|
481
|
+
if fnmatch(check_path, pattern) or fnmatch(check_path, clean_pat):
|
|
482
|
+
should_exclude = True
|
|
483
|
+
break
|
|
484
|
+
|
|
485
|
+
# Handle recursive wildcard start (e.g. **/node_modules)
|
|
486
|
+
if clean_pat.startswith("**/"):
|
|
487
|
+
suffix = clean_pat[3:]
|
|
488
|
+
if (
|
|
489
|
+
check_path == suffix
|
|
490
|
+
or check_path.endswith("/" + suffix)
|
|
491
|
+
or fnmatch(check_path, suffix)
|
|
492
|
+
):
|
|
493
|
+
should_exclude = True
|
|
494
|
+
break
|
|
495
|
+
|
|
496
|
+
if should_exclude:
|
|
497
|
+
if not quiet:
|
|
498
|
+
# Optional: debug output if needed, but keeping it clean for now
|
|
499
|
+
pass
|
|
500
|
+
del dirs[i]
|
|
501
|
+
else:
|
|
502
|
+
i += 1
|
|
503
|
+
|
|
504
|
+
# Add files
|
|
505
|
+
for file in files:
|
|
506
|
+
file_path = root_path / file
|
|
507
|
+
if _should_index(file_path, config):
|
|
508
|
+
files_to_index.append(file_path)
|
|
509
|
+
else:
|
|
510
|
+
# Only immediate children
|
|
511
|
+
for file_path in path.iterdir():
|
|
512
|
+
if file_path.is_file():
|
|
513
|
+
# For explicit paths/directories, we check excludes but ignore
|
|
514
|
+
# include patterns to allow indexing "anything I point at"
|
|
515
|
+
# unless specifically excluded
|
|
516
|
+
is_excluded = _is_excluded(file_path, config)
|
|
517
|
+
if not is_excluded:
|
|
518
|
+
files_to_index.append(file_path)
|
|
519
|
+
|
|
520
|
+
if not files_to_index:
|
|
521
|
+
if not quiet:
|
|
522
|
+
click.echo("No files to index.")
|
|
523
|
+
return
|
|
524
|
+
|
|
525
|
+
if not quiet:
|
|
526
|
+
_print_file_summary(files_to_index)
|
|
527
|
+
if not click.confirm("Proceed with indexing?"):
|
|
528
|
+
click.echo("Aborted.")
|
|
529
|
+
return
|
|
530
|
+
|
|
531
|
+
# Index files
|
|
532
|
+
total_chunks = 0
|
|
533
|
+
errors = 0
|
|
534
|
+
|
|
535
|
+
for file_path in files_to_index:
|
|
536
|
+
try:
|
|
537
|
+
# Read file
|
|
538
|
+
content = file_path.read_text(encoding="utf-8")
|
|
539
|
+
|
|
540
|
+
# Detect smart type from frontmatter
|
|
541
|
+
detected_type, metadata = _detect_document_type_and_metadata(content)
|
|
542
|
+
|
|
543
|
+
# Determine type
|
|
544
|
+
if detected_type:
|
|
545
|
+
doc_type = detected_type
|
|
546
|
+
else:
|
|
547
|
+
ext = file_path.suffix.lower()
|
|
548
|
+
doc_type = (
|
|
549
|
+
DocumentType.DOCUMENTATION
|
|
550
|
+
if ext in (".md", ".markdown", ".rst", ".txt")
|
|
551
|
+
else DocumentType.CODE
|
|
552
|
+
)
|
|
553
|
+
|
|
554
|
+
# Delete existing
|
|
555
|
+
_run_async(database.delete_by_file(str(file_path), config.project_id))
|
|
556
|
+
|
|
557
|
+
# Chunk file
|
|
558
|
+
chunks = ChunkerRegistry.chunk_file(file_path, content)
|
|
559
|
+
|
|
560
|
+
if chunks:
|
|
561
|
+
# Generate embeddings and store
|
|
562
|
+
chunk_count = _run_async(
|
|
563
|
+
_index_chunks_sync(
|
|
564
|
+
chunks,
|
|
565
|
+
config.project_id,
|
|
566
|
+
doc_type,
|
|
567
|
+
embedder,
|
|
568
|
+
database,
|
|
569
|
+
metadata=metadata,
|
|
570
|
+
)
|
|
571
|
+
)
|
|
572
|
+
total_chunks += chunk_count
|
|
573
|
+
|
|
574
|
+
if not quiet:
|
|
575
|
+
click.echo(f" ✅ {file_path.name}: {chunk_count} chunks")
|
|
576
|
+
|
|
577
|
+
except Exception as e:
|
|
578
|
+
errors += 1
|
|
579
|
+
if not quiet:
|
|
580
|
+
click.echo(f" ❌ {file_path.name}: {e!s}")
|
|
581
|
+
|
|
582
|
+
if not quiet:
|
|
583
|
+
click.echo("")
|
|
584
|
+
click.echo(f"✅ Indexed {total_chunks} chunks from {len(files_to_index) - errors} files")
|
|
585
|
+
if errors:
|
|
586
|
+
click.echo(f"⚠️ {errors} file(s) failed")
|
|
587
|
+
|
|
588
|
+
|
|
589
|
+
async def _index_chunks_sync(
|
|
590
|
+
chunks: list[Any],
|
|
591
|
+
project_id: str,
|
|
592
|
+
doc_type: DocumentType,
|
|
593
|
+
embedder: Any,
|
|
594
|
+
database: NexusDatabase,
|
|
595
|
+
metadata: dict[str, Any] | None = None,
|
|
596
|
+
) -> int:
|
|
597
|
+
"""Index chunks synchronously."""
|
|
598
|
+
if not chunks:
|
|
599
|
+
return 0
|
|
600
|
+
|
|
601
|
+
texts = [chunk.get_searchable_text() for chunk in chunks]
|
|
602
|
+
embeddings = await embedder.embed_batch(texts)
|
|
603
|
+
|
|
604
|
+
documents = []
|
|
605
|
+
for chunk, embedding in zip(chunks, embeddings, strict=True):
|
|
606
|
+
doc_id = generate_document_id(
|
|
607
|
+
project_id,
|
|
608
|
+
chunk.file_path,
|
|
609
|
+
chunk.name,
|
|
610
|
+
chunk.start_line,
|
|
611
|
+
)
|
|
612
|
+
|
|
613
|
+
# Prepare document kwargs
|
|
614
|
+
doc_kwargs = {
|
|
615
|
+
"id": doc_id,
|
|
616
|
+
"text": chunk.get_searchable_text(),
|
|
617
|
+
"vector": embedding,
|
|
618
|
+
"project_id": project_id,
|
|
619
|
+
"file_path": chunk.file_path,
|
|
620
|
+
"doc_type": doc_type,
|
|
621
|
+
"chunk_type": chunk.chunk_type.value,
|
|
622
|
+
"language": chunk.language,
|
|
623
|
+
"name": chunk.name,
|
|
624
|
+
"start_line": chunk.start_line,
|
|
625
|
+
"end_line": chunk.end_line,
|
|
626
|
+
}
|
|
627
|
+
|
|
628
|
+
# Add metadata if present
|
|
629
|
+
if metadata and "timestamp" in metadata:
|
|
630
|
+
try:
|
|
631
|
+
# Handle ISO format from export
|
|
632
|
+
if isinstance(metadata["timestamp"], str):
|
|
633
|
+
doc_kwargs["timestamp"] = datetime.fromisoformat(metadata["timestamp"])
|
|
634
|
+
elif isinstance(metadata["timestamp"], datetime):
|
|
635
|
+
doc_kwargs["timestamp"] = metadata["timestamp"]
|
|
636
|
+
except Exception:
|
|
637
|
+
# Fallback to current time if parse fails
|
|
638
|
+
pass
|
|
639
|
+
|
|
640
|
+
doc = Document(**doc_kwargs)
|
|
641
|
+
documents.append(doc)
|
|
642
|
+
|
|
643
|
+
await database.upsert_documents(documents)
|
|
644
|
+
return len(documents)
|
|
645
|
+
|
|
646
|
+
|
|
647
|
+
def _should_index(file_path: Path, config: NexusConfig) -> bool:
|
|
648
|
+
"""Check if file should be indexed based on config patterns."""
|
|
649
|
+
rel_path = str(file_path.relative_to(Path.cwd()))
|
|
650
|
+
|
|
651
|
+
# Check exclude patterns
|
|
652
|
+
for pattern in config.exclude_patterns:
|
|
653
|
+
if fnmatch(rel_path, pattern):
|
|
654
|
+
return False
|
|
655
|
+
|
|
656
|
+
# Also check without leading **/ if present (for root matches)
|
|
657
|
+
if pattern.startswith("**/") and fnmatch(rel_path, pattern[3:]):
|
|
658
|
+
return False
|
|
659
|
+
|
|
660
|
+
# Check include patterns
|
|
661
|
+
for pattern in config.include_patterns:
|
|
662
|
+
if fnmatch(rel_path, pattern):
|
|
663
|
+
return True
|
|
664
|
+
|
|
665
|
+
# Also include docs folders
|
|
666
|
+
for docs_folder in config.docs_folders:
|
|
667
|
+
if rel_path.startswith(docs_folder) or rel_path == docs_folder.rstrip("/"):
|
|
668
|
+
return True
|
|
669
|
+
|
|
670
|
+
return False
|
|
671
|
+
|
|
672
|
+
|
|
673
|
+
def _is_excluded(file_path: Path, config: NexusConfig) -> bool:
|
|
674
|
+
"""Check if file is explicitly excluded by config patterns."""
|
|
675
|
+
rel_path = str(file_path.relative_to(Path.cwd()))
|
|
676
|
+
|
|
677
|
+
# Check exclude patterns
|
|
678
|
+
for pattern in config.exclude_patterns:
|
|
679
|
+
if fnmatch(rel_path, pattern):
|
|
680
|
+
return True
|
|
681
|
+
|
|
682
|
+
# Also check without leading **/ if present (for root matches)
|
|
683
|
+
if pattern.startswith("**/") and fnmatch(rel_path, pattern[3:]):
|
|
684
|
+
return True
|
|
685
|
+
|
|
686
|
+
return False
|
|
687
|
+
|
|
688
|
+
|
|
689
|
+
def _detect_document_type_and_metadata(
|
|
690
|
+
content: str,
|
|
691
|
+
) -> tuple[DocumentType | None, dict[str, Any]]:
|
|
692
|
+
"""Detect document type and metadata from frontmatter."""
|
|
693
|
+
if not content.startswith("---\n"):
|
|
694
|
+
return None, {}
|
|
695
|
+
|
|
696
|
+
try:
|
|
697
|
+
# Extract frontmatter
|
|
698
|
+
_, frontmatter, _ = content.split("---", 2)
|
|
699
|
+
data = yaml.safe_load(frontmatter)
|
|
700
|
+
|
|
701
|
+
if not isinstance(data, dict):
|
|
702
|
+
return None, {}
|
|
703
|
+
|
|
704
|
+
# Detect type based on keys/values
|
|
705
|
+
if data.get("category") in ["discovery", "mistake", "backtrack", "optimization"]:
|
|
706
|
+
return DocumentType.INSIGHT, data
|
|
707
|
+
|
|
708
|
+
if "problem" in data and "solution" in data:
|
|
709
|
+
return DocumentType.LESSON, data
|
|
710
|
+
|
|
711
|
+
if (
|
|
712
|
+
"summary" in data
|
|
713
|
+
and "approach" in data
|
|
714
|
+
and ("files_changed" in data or "design_decisions" in data)
|
|
715
|
+
):
|
|
716
|
+
return DocumentType.IMPLEMENTATION, data
|
|
717
|
+
|
|
718
|
+
if data.get("type") == "github_issue":
|
|
719
|
+
return DocumentType.GITHUB_ISSUE, data
|
|
720
|
+
|
|
721
|
+
if data.get("type") == "github_pr":
|
|
722
|
+
return DocumentType.GITHUB_PR, data
|
|
723
|
+
|
|
724
|
+
return None, data
|
|
725
|
+
|
|
726
|
+
except Exception:
|
|
727
|
+
return None, {}
|
|
728
|
+
|
|
729
|
+
|
|
730
|
+
def _print_file_summary(files: list[Path]) -> None:
|
|
731
|
+
"""Print a summary of files to be indexed."""
|
|
732
|
+
if not files:
|
|
733
|
+
click.echo("No files to index.")
|
|
734
|
+
return
|
|
735
|
+
|
|
736
|
+
# Group by directory
|
|
737
|
+
by_dir: dict[str, int] = defaultdict(int)
|
|
738
|
+
for f in files:
|
|
739
|
+
parent = str(f.parent.relative_to(Path.cwd()) if f.is_absolute() else f.parent)
|
|
740
|
+
if parent == ".":
|
|
741
|
+
parent = "Root"
|
|
742
|
+
by_dir[parent] += 1
|
|
743
|
+
|
|
744
|
+
click.echo(f" Found {len(files)} files to index:")
|
|
745
|
+
click.echo("")
|
|
746
|
+
|
|
747
|
+
# Sort by directory name
|
|
748
|
+
for directory, count in sorted(by_dir.items()):
|
|
749
|
+
click.echo(f" 📁 {directory:<40} {count} files")
|
|
750
|
+
|
|
751
|
+
click.echo("")
|
|
752
|
+
|
|
753
|
+
|
|
754
|
+
@cli.command("index-lesson")
|
|
755
|
+
@click.argument("lesson_file")
|
|
756
|
+
@click.option("-q", "--quiet", is_flag=True, help="Suppress output")
|
|
757
|
+
def index_lesson_command(lesson_file: str, quiet: bool) -> None:
|
|
758
|
+
"""Index a lesson file from .nexus/lessons/."""
|
|
759
|
+
path = Path(lesson_file)
|
|
760
|
+
if not path.is_absolute():
|
|
761
|
+
path = Path.cwd() / path
|
|
762
|
+
|
|
763
|
+
if not path.exists():
|
|
764
|
+
if not quiet:
|
|
765
|
+
click.echo(f"❌ Lesson file not found: {lesson_file}", err=True)
|
|
766
|
+
return
|
|
767
|
+
|
|
768
|
+
# Load config
|
|
769
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
770
|
+
if not config_path.exists():
|
|
771
|
+
click.echo("❌ nexus_config.json not found. Run 'nexus-init' first.", err=True)
|
|
772
|
+
return
|
|
773
|
+
|
|
774
|
+
config = NexusConfig.load(config_path)
|
|
775
|
+
embedder = create_embedder(config)
|
|
776
|
+
database = NexusDatabase(config, embedder)
|
|
777
|
+
database.connect()
|
|
778
|
+
|
|
779
|
+
try:
|
|
780
|
+
content = path.read_text(encoding="utf-8")
|
|
781
|
+
|
|
782
|
+
# Generate embedding
|
|
783
|
+
embedding = _run_async(embedder.embed(content))
|
|
784
|
+
|
|
785
|
+
# Create document
|
|
786
|
+
doc_id = generate_document_id(
|
|
787
|
+
config.project_id,
|
|
788
|
+
str(path),
|
|
789
|
+
path.stem,
|
|
790
|
+
0,
|
|
791
|
+
)
|
|
792
|
+
|
|
793
|
+
doc = Document(
|
|
794
|
+
id=doc_id,
|
|
795
|
+
text=content,
|
|
796
|
+
vector=embedding,
|
|
797
|
+
project_id=config.project_id,
|
|
798
|
+
file_path=str(path),
|
|
799
|
+
doc_type=DocumentType.LESSON,
|
|
800
|
+
chunk_type="lesson",
|
|
801
|
+
language="markdown",
|
|
802
|
+
name=path.stem,
|
|
803
|
+
start_line=0,
|
|
804
|
+
end_line=0,
|
|
805
|
+
)
|
|
806
|
+
|
|
807
|
+
_run_async(database.upsert_document(doc))
|
|
808
|
+
|
|
809
|
+
if not quiet:
|
|
810
|
+
click.echo(f"✅ Indexed lesson: {path.name}")
|
|
811
|
+
|
|
812
|
+
except Exception as e:
|
|
813
|
+
if not quiet:
|
|
814
|
+
click.echo(f"❌ Failed to index lesson: {e!s}", err=True)
|
|
815
|
+
|
|
816
|
+
|
|
817
|
+
@cli.command("export")
|
|
818
|
+
@click.option("--project-id", help="Project ID to export (defaults to current config)")
|
|
819
|
+
@click.option(
|
|
820
|
+
"--output",
|
|
821
|
+
"-o",
|
|
822
|
+
type=click.Path(path_type=Path),
|
|
823
|
+
help="Output directory (default: ./nexus-export)",
|
|
824
|
+
)
|
|
825
|
+
def export_command(project_id: str | None, output: Path | None) -> None:
|
|
826
|
+
"""Export project knowledge to markdown files."""
|
|
827
|
+
from .config import NexusConfig
|
|
828
|
+
from .database import DocumentType, NexusDatabase
|
|
829
|
+
from .embeddings import create_embedder
|
|
830
|
+
|
|
831
|
+
async def _export() -> None:
|
|
832
|
+
# Load config
|
|
833
|
+
config = None
|
|
834
|
+
try:
|
|
835
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
836
|
+
if config_path.exists():
|
|
837
|
+
config = NexusConfig.load(config_path)
|
|
838
|
+
except Exception:
|
|
839
|
+
pass
|
|
840
|
+
|
|
841
|
+
effective_project_id = project_id
|
|
842
|
+
if not effective_project_id and config:
|
|
843
|
+
effective_project_id = config.project_id
|
|
844
|
+
|
|
845
|
+
if not effective_project_id:
|
|
846
|
+
click.secho("Error: No project-id provided and no nexus_config.json found.", fg="red")
|
|
847
|
+
return
|
|
848
|
+
|
|
849
|
+
# Initialize DB
|
|
850
|
+
if not config:
|
|
851
|
+
# Create temporary config for DB access
|
|
852
|
+
config = NexusConfig.create_new("temp")
|
|
853
|
+
|
|
854
|
+
try:
|
|
855
|
+
embedder = create_embedder(config)
|
|
856
|
+
db = NexusDatabase(config, embedder)
|
|
857
|
+
db.connect()
|
|
858
|
+
|
|
859
|
+
click.echo(f"Exporting knowledge for project: {effective_project_id}")
|
|
860
|
+
|
|
861
|
+
# Get all documents for this project
|
|
862
|
+
# searching with empty query returns all items for project/type
|
|
863
|
+
# We fetch strictly structured data types: Lesson, Insight, Implementation
|
|
864
|
+
types_to_export = [
|
|
865
|
+
(DocumentType.LESSON, "lessons"),
|
|
866
|
+
(DocumentType.INSIGHT, "insights"),
|
|
867
|
+
(DocumentType.IMPLEMENTATION, "implementations"),
|
|
868
|
+
]
|
|
869
|
+
|
|
870
|
+
base_dir = output or Path.cwd() / "nexus-export"
|
|
871
|
+
base_dir.mkdir(parents=True, exist_ok=True)
|
|
872
|
+
|
|
873
|
+
total_count = 0
|
|
874
|
+
|
|
875
|
+
for doc_type, dirname in types_to_export:
|
|
876
|
+
# We use a hack: search for " " (space) which usually matches everything
|
|
877
|
+
# or rely on search implementation to support wildcards.
|
|
878
|
+
# Since vector search always returns something, we use a high limit
|
|
879
|
+
results = await db.search(
|
|
880
|
+
query="*", # Some vector DBs verify query length
|
|
881
|
+
project_id=effective_project_id,
|
|
882
|
+
doc_type=doc_type,
|
|
883
|
+
limit=1000,
|
|
884
|
+
)
|
|
885
|
+
|
|
886
|
+
if not results:
|
|
887
|
+
continue
|
|
888
|
+
|
|
889
|
+
type_dir = base_dir / dirname
|
|
890
|
+
type_dir.mkdir(exist_ok=True)
|
|
891
|
+
|
|
892
|
+
click.echo(f" - Found {len(results)} {dirname}")
|
|
893
|
+
|
|
894
|
+
for res in results:
|
|
895
|
+
# Use ID from metadata if available, else generate safe name
|
|
896
|
+
safe_name = "".join(c for c in res.name if c.isalnum() or c in "-_")
|
|
897
|
+
filename = f"{safe_name}.md"
|
|
898
|
+
|
|
899
|
+
file_path = type_dir / filename
|
|
900
|
+
file_path.write_text(res.text, encoding="utf-8")
|
|
901
|
+
total_count += 1
|
|
902
|
+
|
|
903
|
+
click.secho(f"\nSuccessfully exported {total_count} files to {base_dir}", fg="green")
|
|
904
|
+
|
|
905
|
+
except Exception as e:
|
|
906
|
+
click.secho(f"Export failed: {e}", fg="red")
|
|
907
|
+
|
|
908
|
+
_run_async(_export())
|
|
909
|
+
|
|
910
|
+
|
|
911
|
+
@cli.command("status")
|
|
912
|
+
@click.option("-v", "--verbose", is_flag=True, help="Show detailed debug information")
|
|
913
|
+
def status_command(verbose: bool) -> None:
|
|
914
|
+
"""Show Nexus-Dev status and statistics."""
|
|
915
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
916
|
+
|
|
917
|
+
if not config_path.exists():
|
|
918
|
+
click.echo("❌ Nexus-Dev not initialized in this directory.")
|
|
919
|
+
click.echo(" Run 'nexus-init' to get started.")
|
|
920
|
+
return
|
|
921
|
+
|
|
922
|
+
config = NexusConfig.load(config_path)
|
|
923
|
+
|
|
924
|
+
click.echo("📊 Nexus-Dev Status")
|
|
925
|
+
click.echo("")
|
|
926
|
+
click.echo(f"Project: {config.project_name}")
|
|
927
|
+
click.echo(f"Project ID: {config.project_id}")
|
|
928
|
+
click.echo(f"Embedding Provider: {config.embedding_provider}")
|
|
929
|
+
click.echo(f"Embedding Model: {config.embedding_model}")
|
|
930
|
+
click.echo(f"Database: {config.get_db_path()}")
|
|
931
|
+
click.echo("")
|
|
932
|
+
|
|
933
|
+
try:
|
|
934
|
+
embedder = create_embedder(config)
|
|
935
|
+
database = NexusDatabase(config, embedder)
|
|
936
|
+
database.connect()
|
|
937
|
+
|
|
938
|
+
if verbose:
|
|
939
|
+
click.echo("🔍 Debug Info:")
|
|
940
|
+
click.echo(f" Database path exists: {config.get_db_path().exists()}")
|
|
941
|
+
click.echo(f" Querying for project_id: {config.project_id}")
|
|
942
|
+
click.echo("")
|
|
943
|
+
|
|
944
|
+
stats = _run_async(database.get_project_stats(config.project_id))
|
|
945
|
+
|
|
946
|
+
click.echo("📈 Statistics:")
|
|
947
|
+
click.echo(f" Total chunks: {stats.get('total', 0)}")
|
|
948
|
+
click.echo(f" Code: {stats.get('code', 0)}")
|
|
949
|
+
click.echo(f" Documentation: {stats.get('documentation', 0)}")
|
|
950
|
+
click.echo(f" Lessons: {stats.get('lesson', 0)}")
|
|
951
|
+
|
|
952
|
+
if verbose and stats.get("total", 0) > 0:
|
|
953
|
+
click.echo("")
|
|
954
|
+
click.echo(" Document type breakdown:")
|
|
955
|
+
for doc_type, count in stats.items():
|
|
956
|
+
if doc_type != "total":
|
|
957
|
+
click.echo(f" - {doc_type}: {count}")
|
|
958
|
+
|
|
959
|
+
except Exception as e:
|
|
960
|
+
click.echo(f"⚠️ Could not connect to database: {e!s}")
|
|
961
|
+
if verbose:
|
|
962
|
+
import traceback
|
|
963
|
+
|
|
964
|
+
click.echo("")
|
|
965
|
+
click.echo("Full traceback:")
|
|
966
|
+
click.echo(traceback.format_exc())
|
|
967
|
+
|
|
968
|
+
|
|
969
|
+
@cli.command("inspect")
|
|
970
|
+
@click.option("--project-id", help="Filter by project ID (default: current project)")
|
|
971
|
+
@click.option("--limit", default=5, help="Number of sample documents to show")
|
|
972
|
+
@click.option("--all-projects", is_flag=True, help="Show all projects in database")
|
|
973
|
+
def inspect_command(project_id: str | None, limit: int, all_projects: bool) -> None:
|
|
974
|
+
"""Inspect database contents for debugging."""
|
|
975
|
+
# Load config for default project_id
|
|
976
|
+
config = None
|
|
977
|
+
if not all_projects and not project_id:
|
|
978
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
979
|
+
if config_path.exists():
|
|
980
|
+
config = NexusConfig.load(config_path)
|
|
981
|
+
project_id = config.project_id
|
|
982
|
+
|
|
983
|
+
# Get database path from config or use default
|
|
984
|
+
if config:
|
|
985
|
+
embedder = create_embedder(config)
|
|
986
|
+
database = NexusDatabase(config, embedder)
|
|
987
|
+
else:
|
|
988
|
+
# Use default config to access shared database
|
|
989
|
+
default_config = NexusConfig.create_new("temp")
|
|
990
|
+
embedder = create_embedder(default_config)
|
|
991
|
+
database = NexusDatabase(default_config, embedder)
|
|
992
|
+
|
|
993
|
+
database.connect()
|
|
994
|
+
|
|
995
|
+
click.echo("🔍 Nexus-Dev Database Inspection")
|
|
996
|
+
click.echo("")
|
|
997
|
+
|
|
998
|
+
try:
|
|
999
|
+
# Get database info
|
|
1000
|
+
db_path = database.config.get_db_path()
|
|
1001
|
+
click.echo(f"Database location: {db_path}")
|
|
1002
|
+
|
|
1003
|
+
if db_path.exists():
|
|
1004
|
+
# Calculate database size
|
|
1005
|
+
total_size = sum(f.stat().st_size for f in db_path.rglob("*") if f.is_file())
|
|
1006
|
+
click.echo(f"Database size: {total_size / 1024 / 1024:.2f} MB")
|
|
1007
|
+
click.echo("")
|
|
1008
|
+
|
|
1009
|
+
# Get all project statistics
|
|
1010
|
+
all_stats = _run_async(database.get_project_stats(None))
|
|
1011
|
+
click.echo(f"📊 Total documents across all projects: {all_stats.get('total', 0)}")
|
|
1012
|
+
click.echo("")
|
|
1013
|
+
|
|
1014
|
+
# Get table and show project breakdown
|
|
1015
|
+
table = database._ensure_connected()
|
|
1016
|
+
df = table.to_pandas()
|
|
1017
|
+
|
|
1018
|
+
if len(df) == 0:
|
|
1019
|
+
click.echo("⚠️ Database is empty")
|
|
1020
|
+
return
|
|
1021
|
+
|
|
1022
|
+
# Group by project
|
|
1023
|
+
project_counts = df.groupby("project_id").size().sort_values(ascending=False)
|
|
1024
|
+
|
|
1025
|
+
click.echo("📁 Projects in database:")
|
|
1026
|
+
for pid, count in project_counts.items():
|
|
1027
|
+
marker = "👉" if pid == project_id else " "
|
|
1028
|
+
click.echo(f"{marker} {pid}: {count} chunks")
|
|
1029
|
+
click.echo("")
|
|
1030
|
+
|
|
1031
|
+
# Show document type statistics for specific project or all
|
|
1032
|
+
if project_id:
|
|
1033
|
+
project_df = df[df["project_id"] == project_id]
|
|
1034
|
+
if len(project_df) == 0:
|
|
1035
|
+
click.echo(f"⚠️ No documents found for project: {project_id}")
|
|
1036
|
+
return
|
|
1037
|
+
|
|
1038
|
+
click.echo(f"📈 Document types for project {project_id}:")
|
|
1039
|
+
type_counts = project_df.groupby("doc_type").size()
|
|
1040
|
+
for doc_type, count in type_counts.items():
|
|
1041
|
+
click.echo(f" {doc_type}: {count}")
|
|
1042
|
+
click.echo("")
|
|
1043
|
+
|
|
1044
|
+
# Show sample documents
|
|
1045
|
+
click.echo(f"📄 Sample documents (limit: {limit}):")
|
|
1046
|
+
samples = project_df.head(limit)
|
|
1047
|
+
for _idx, row in samples.iterrows():
|
|
1048
|
+
click.echo(f" - [{row['doc_type']}] {row['name']}")
|
|
1049
|
+
click.echo(f" File: {row['file_path']}")
|
|
1050
|
+
if row["start_line"] > 0:
|
|
1051
|
+
click.echo(f" Lines: {row['start_line']}-{row['end_line']}")
|
|
1052
|
+
click.echo("")
|
|
1053
|
+
else:
|
|
1054
|
+
# Show overall document type breakdown
|
|
1055
|
+
click.echo("📈 Document type breakdown (all projects):")
|
|
1056
|
+
type_counts = df.groupby("doc_type").size()
|
|
1057
|
+
for doc_type, count in type_counts.items():
|
|
1058
|
+
click.echo(f" {doc_type}: {count}")
|
|
1059
|
+
|
|
1060
|
+
except Exception as e:
|
|
1061
|
+
click.echo(f"❌ Error inspecting database: {e!s}", err=True)
|
|
1062
|
+
import traceback
|
|
1063
|
+
|
|
1064
|
+
click.echo(traceback.format_exc(), err=True)
|
|
1065
|
+
|
|
1066
|
+
|
|
1067
|
+
@cli.command("clean")
|
|
1068
|
+
@click.option("--project-id", help="Project ID to clean (default: current project)")
|
|
1069
|
+
@click.option("--all", "clean_all", is_flag=True, help="Delete ALL projects (dangerous!)")
|
|
1070
|
+
@click.option("--dry-run", is_flag=True, help="Show what would be deleted without deleting")
|
|
1071
|
+
def clean_command(project_id: str | None, clean_all: bool, dry_run: bool) -> None:
|
|
1072
|
+
"""Delete indexed data for a project."""
|
|
1073
|
+
# Validate options
|
|
1074
|
+
if clean_all and project_id:
|
|
1075
|
+
click.echo("❌ Cannot use both --all and --project-id", err=True)
|
|
1076
|
+
return
|
|
1077
|
+
|
|
1078
|
+
if not clean_all and not project_id:
|
|
1079
|
+
# Try to get project_id from current directory
|
|
1080
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
1081
|
+
if config_path.exists():
|
|
1082
|
+
config = NexusConfig.load(config_path)
|
|
1083
|
+
project_id = config.project_id
|
|
1084
|
+
else:
|
|
1085
|
+
click.echo("❌ No project-id specified and no nexus_config.json found", err=True)
|
|
1086
|
+
click.echo(" Use --project-id or run from a project directory", err=True)
|
|
1087
|
+
return
|
|
1088
|
+
|
|
1089
|
+
# Load database
|
|
1090
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
1091
|
+
if config_path.exists():
|
|
1092
|
+
config = NexusConfig.load(config_path)
|
|
1093
|
+
else:
|
|
1094
|
+
config = NexusConfig.create_new("temp")
|
|
1095
|
+
|
|
1096
|
+
embedder = create_embedder(config)
|
|
1097
|
+
database = NexusDatabase(config, embedder)
|
|
1098
|
+
database.connect()
|
|
1099
|
+
|
|
1100
|
+
try:
|
|
1101
|
+
if clean_all:
|
|
1102
|
+
# Get total count
|
|
1103
|
+
stats = _run_async(database.get_project_stats(None))
|
|
1104
|
+
total = stats.get("total", 0)
|
|
1105
|
+
|
|
1106
|
+
if total == 0:
|
|
1107
|
+
click.echo("⚠️ Database is already empty")
|
|
1108
|
+
return
|
|
1109
|
+
|
|
1110
|
+
click.echo(f"⚠️ WARNING: This will delete ALL {total} documents from the database!")
|
|
1111
|
+
click.echo("")
|
|
1112
|
+
|
|
1113
|
+
if dry_run:
|
|
1114
|
+
click.echo("[DRY RUN] Would delete entire database")
|
|
1115
|
+
return
|
|
1116
|
+
|
|
1117
|
+
if not click.confirm("Are you absolutely sure?"):
|
|
1118
|
+
click.echo("Aborted.")
|
|
1119
|
+
return
|
|
1120
|
+
|
|
1121
|
+
# Delete all by resetting
|
|
1122
|
+
database.reset()
|
|
1123
|
+
click.echo(f"✅ Deleted all {total} documents")
|
|
1124
|
+
|
|
1125
|
+
else:
|
|
1126
|
+
# Delete specific project
|
|
1127
|
+
stats = _run_async(database.get_project_stats(project_id))
|
|
1128
|
+
count = stats.get("total", 0)
|
|
1129
|
+
|
|
1130
|
+
if count == 0:
|
|
1131
|
+
click.echo(f"⚠️ No documents found for project: {project_id}")
|
|
1132
|
+
return
|
|
1133
|
+
|
|
1134
|
+
click.echo(f"Found {count} documents for project: {project_id}")
|
|
1135
|
+
click.echo("")
|
|
1136
|
+
click.echo("Document types:")
|
|
1137
|
+
for doc_type, type_count in stats.items():
|
|
1138
|
+
if doc_type != "total":
|
|
1139
|
+
click.echo(f" - {doc_type}: {type_count}")
|
|
1140
|
+
click.echo("")
|
|
1141
|
+
|
|
1142
|
+
if dry_run:
|
|
1143
|
+
click.echo(f"[DRY RUN] Would delete {count} documents for project {project_id}")
|
|
1144
|
+
return
|
|
1145
|
+
|
|
1146
|
+
if not click.confirm(f"Delete {count} documents?"):
|
|
1147
|
+
click.echo("Aborted.")
|
|
1148
|
+
return
|
|
1149
|
+
|
|
1150
|
+
# project_id is guaranteed to be set by validation logic above
|
|
1151
|
+
assert project_id is not None
|
|
1152
|
+
deleted = _run_async(database.delete_by_project(project_id))
|
|
1153
|
+
click.echo(f"✅ Deleted {deleted} documents for project {project_id}")
|
|
1154
|
+
|
|
1155
|
+
except Exception as e:
|
|
1156
|
+
click.echo(f"❌ Error during cleanup: {e!s}", err=True)
|
|
1157
|
+
|
|
1158
|
+
|
|
1159
|
+
@cli.command("reindex")
|
|
1160
|
+
def reindex_command() -> None:
|
|
1161
|
+
"""Re-index entire project (clear and rebuild)."""
|
|
1162
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
1163
|
+
|
|
1164
|
+
if not config_path.exists():
|
|
1165
|
+
click.echo("❌ nexus_config.json not found. Run 'nexus-init' first.", err=True)
|
|
1166
|
+
return
|
|
1167
|
+
|
|
1168
|
+
config = NexusConfig.load(config_path)
|
|
1169
|
+
|
|
1170
|
+
# Collect files first to show summary
|
|
1171
|
+
click.echo("🔍 Scanning files...")
|
|
1172
|
+
|
|
1173
|
+
cwd = Path.cwd()
|
|
1174
|
+
files_to_index: list[Path] = []
|
|
1175
|
+
|
|
1176
|
+
for pattern in config.include_patterns:
|
|
1177
|
+
for file_path in cwd.glob(pattern):
|
|
1178
|
+
if file_path.is_file() and _should_index(file_path, config):
|
|
1179
|
+
files_to_index.append(file_path)
|
|
1180
|
+
|
|
1181
|
+
# Also index docs folders
|
|
1182
|
+
for docs_folder in config.docs_folders:
|
|
1183
|
+
docs_path = cwd / docs_folder
|
|
1184
|
+
if docs_path.is_file():
|
|
1185
|
+
files_to_index.append(docs_path)
|
|
1186
|
+
elif docs_path.is_dir():
|
|
1187
|
+
for root, _, files in os.walk(docs_path):
|
|
1188
|
+
# Apply same pruning logic for docs if needed, though usually docs are safer
|
|
1189
|
+
# For consistency let's just collect files
|
|
1190
|
+
for file in files:
|
|
1191
|
+
files_to_index.append(Path(root) / file)
|
|
1192
|
+
|
|
1193
|
+
# Remove duplicates
|
|
1194
|
+
files_to_index = list(set(files_to_index))
|
|
1195
|
+
|
|
1196
|
+
# Show summary and ask for confirmation
|
|
1197
|
+
_print_file_summary(files_to_index)
|
|
1198
|
+
|
|
1199
|
+
if not click.confirm("This will CLEAR the database and re-index the above files. Continue?"):
|
|
1200
|
+
click.echo("Aborted.")
|
|
1201
|
+
return
|
|
1202
|
+
|
|
1203
|
+
# Proceed with DB operations
|
|
1204
|
+
embedder = create_embedder(config)
|
|
1205
|
+
database = NexusDatabase(config, embedder)
|
|
1206
|
+
database.connect()
|
|
1207
|
+
|
|
1208
|
+
click.echo("🗑️ Clearing existing index...")
|
|
1209
|
+
# Reset database to handle schema changes
|
|
1210
|
+
database.reset()
|
|
1211
|
+
# Re-connect to create new table with updated schema
|
|
1212
|
+
database.connect()
|
|
1213
|
+
click.echo(" Index cleared and schema updated")
|
|
1214
|
+
|
|
1215
|
+
click.echo("")
|
|
1216
|
+
click.echo("📁 Re-indexing project...")
|
|
1217
|
+
|
|
1218
|
+
# Index all files
|
|
1219
|
+
total_chunks = 0
|
|
1220
|
+
for file_path in files_to_index:
|
|
1221
|
+
try:
|
|
1222
|
+
content = file_path.read_text(encoding="utf-8")
|
|
1223
|
+
ext = file_path.suffix.lower()
|
|
1224
|
+
doc_type = (
|
|
1225
|
+
DocumentType.DOCUMENTATION
|
|
1226
|
+
if ext in (".md", ".markdown", ".rst", ".txt")
|
|
1227
|
+
else DocumentType.CODE
|
|
1228
|
+
)
|
|
1229
|
+
|
|
1230
|
+
chunks = ChunkerRegistry.chunk_file(file_path, content)
|
|
1231
|
+
if chunks:
|
|
1232
|
+
count = _run_async(
|
|
1233
|
+
_index_chunks_sync(chunks, config.project_id, doc_type, embedder, database)
|
|
1234
|
+
)
|
|
1235
|
+
total_chunks += count
|
|
1236
|
+
click.echo(f" ✅ {file_path.name}: {count} chunks")
|
|
1237
|
+
|
|
1238
|
+
except Exception as e:
|
|
1239
|
+
click.echo(f" ❌ Failed to index {file_path.name}: {e!s}", err=True)
|
|
1240
|
+
|
|
1241
|
+
click.echo("")
|
|
1242
|
+
click.echo(f"✅ Re-indexed {total_chunks} chunks from {len(files_to_index)} files")
|
|
1243
|
+
|
|
1244
|
+
|
|
1245
|
+
@cli.command("import-github")
|
|
1246
|
+
@click.option("--repo", required=True, help="Repository name")
|
|
1247
|
+
@click.option("--owner", required=True, help="Repository owner")
|
|
1248
|
+
@click.option("--limit", default=20, help="Maximum number of issues to import")
|
|
1249
|
+
@click.option("--state", default="all", help="Issue state (open, closed, all)")
|
|
1250
|
+
def import_github_command(repo: str, owner: str, limit: int, state: str) -> None:
|
|
1251
|
+
"""Import GitHub issues and PRs."""
|
|
1252
|
+
# Load config
|
|
1253
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
1254
|
+
if not config_path.exists():
|
|
1255
|
+
click.echo("❌ nexus_config.json not found. Run 'nexus-init' first.", err=True)
|
|
1256
|
+
return
|
|
1257
|
+
|
|
1258
|
+
config = NexusConfig.load(config_path)
|
|
1259
|
+
embedder = create_embedder(config)
|
|
1260
|
+
database = NexusDatabase(config, embedder)
|
|
1261
|
+
database.connect()
|
|
1262
|
+
|
|
1263
|
+
database.connect()
|
|
1264
|
+
|
|
1265
|
+
# Load MCP config handled by manager inside generally, but here we can rely on standard init
|
|
1266
|
+
client_manager = MCPClientManager()
|
|
1267
|
+
|
|
1268
|
+
# Load MCP config
|
|
1269
|
+
mcp_config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1270
|
+
mcp_config = None
|
|
1271
|
+
if mcp_config_path.exists():
|
|
1272
|
+
try:
|
|
1273
|
+
mcp_config = MCPConfig.load(mcp_config_path)
|
|
1274
|
+
except Exception as e:
|
|
1275
|
+
click.echo(f"⚠️ Failed to load MCP config: {e}", err=True)
|
|
1276
|
+
|
|
1277
|
+
if not mcp_config:
|
|
1278
|
+
click.echo("⚠️ No MCP config found. GitHub import may fail if server not found.")
|
|
1279
|
+
|
|
1280
|
+
importer = GitHubImporter(database, config.project_id, client_manager, mcp_config)
|
|
1281
|
+
|
|
1282
|
+
click.echo(f"📥 Importing issues from {owner}/{repo}...")
|
|
1283
|
+
|
|
1284
|
+
try:
|
|
1285
|
+
count = _run_async(importer.import_issues(owner, repo, limit, state))
|
|
1286
|
+
click.echo(f"✅ Imported {count} issues/PRs")
|
|
1287
|
+
except Exception as e:
|
|
1288
|
+
click.echo(f"❌ Import failed: {e}", err=True)
|
|
1289
|
+
|
|
1290
|
+
|
|
1291
|
+
@cli.command("search")
|
|
1292
|
+
@click.argument("query")
|
|
1293
|
+
@click.option("--type", "content_type", help="Content type to filter by")
|
|
1294
|
+
@click.option("--limit", default=5, help="Number of results")
|
|
1295
|
+
def search_command(query: str, content_type: str | None, limit: int) -> None:
|
|
1296
|
+
"""Search the knowledge base."""
|
|
1297
|
+
# Load config
|
|
1298
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
1299
|
+
if not config_path.exists():
|
|
1300
|
+
click.echo("❌ nexus_config.json not found. Run 'nexus-init' first.", err=True)
|
|
1301
|
+
return
|
|
1302
|
+
|
|
1303
|
+
config = NexusConfig.load(config_path)
|
|
1304
|
+
embedder = create_embedder(config)
|
|
1305
|
+
database = NexusDatabase(config, embedder)
|
|
1306
|
+
database.connect()
|
|
1307
|
+
|
|
1308
|
+
click.echo(f"🔍 Searching for '{query}'...")
|
|
1309
|
+
|
|
1310
|
+
doc_type_enum = None
|
|
1311
|
+
if content_type:
|
|
1312
|
+
try:
|
|
1313
|
+
doc_type_enum = DocumentType(content_type)
|
|
1314
|
+
except ValueError:
|
|
1315
|
+
click.echo(f"⚠️ Invalid type '{content_type}'. Ignoring filter.")
|
|
1316
|
+
|
|
1317
|
+
results = _run_async(database.search(query, limit=limit, doc_type=doc_type_enum))
|
|
1318
|
+
|
|
1319
|
+
if not results:
|
|
1320
|
+
click.echo("No results found.")
|
|
1321
|
+
return
|
|
1322
|
+
|
|
1323
|
+
click.echo(f"\nFound {len(results)} results:\n")
|
|
1324
|
+
|
|
1325
|
+
for i, doc in enumerate(results, 1):
|
|
1326
|
+
click.echo(f"{i}. [{doc.doc_type.upper()}] {doc.name} (Score: {doc.score:.3f})")
|
|
1327
|
+
click.echo(f" path: {doc.file_path}")
|
|
1328
|
+
# Preview text
|
|
1329
|
+
text = doc.text.replace("\n", " ").strip()
|
|
1330
|
+
if len(text) > 100:
|
|
1331
|
+
text = text[:97] + "..."
|
|
1332
|
+
click.echo(f' "{text}"')
|
|
1333
|
+
click.echo("")
|
|
1334
|
+
|
|
1335
|
+
|
|
1336
|
+
@cli.command("index-mcp")
|
|
1337
|
+
@click.option("--server", "-s", help="Server name to index (from MCP config)")
|
|
1338
|
+
@click.option(
|
|
1339
|
+
"--config",
|
|
1340
|
+
"-c",
|
|
1341
|
+
type=click.Path(exists=True),
|
|
1342
|
+
help="Path to MCP config file (default: ~/.config/mcp/config.json)",
|
|
1343
|
+
)
|
|
1344
|
+
@click.option("--all", "-a", "index_all", is_flag=True, help="Index all configured servers")
|
|
1345
|
+
def index_mcp_command(server: str | None, config: str | None, index_all: bool) -> None:
|
|
1346
|
+
"""Index MCP tool documentation into the knowledge base.
|
|
1347
|
+
|
|
1348
|
+
This command reads tool schemas from MCP servers and indexes them
|
|
1349
|
+
for semantic search via the search_tools command.
|
|
1350
|
+
|
|
1351
|
+
Examples:
|
|
1352
|
+
nexus-index-mcp --server github
|
|
1353
|
+
nexus-index-mcp --all
|
|
1354
|
+
nexus-index-mcp --config ~/my-mcp-config.json --all
|
|
1355
|
+
"""
|
|
1356
|
+
# Load MCP config
|
|
1357
|
+
mcp_config_data: dict[str, Any] | MCPConfig
|
|
1358
|
+
if config:
|
|
1359
|
+
config_path = Path(config)
|
|
1360
|
+
else:
|
|
1361
|
+
# Prioritize local project config
|
|
1362
|
+
local_config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1363
|
+
if local_config_path.exists():
|
|
1364
|
+
config_path = local_config_path
|
|
1365
|
+
else:
|
|
1366
|
+
config_path = Path.home() / ".config" / "mcp" / "config.json"
|
|
1367
|
+
|
|
1368
|
+
if not config_path.exists():
|
|
1369
|
+
click.echo(f"MCP config not found: {config_path}")
|
|
1370
|
+
click.echo("Specify --config or create ~/.config/mcp/config.json")
|
|
1371
|
+
return
|
|
1372
|
+
|
|
1373
|
+
try:
|
|
1374
|
+
if config_path.name == "mcp_config.json" and config_path.parent.name == ".nexus":
|
|
1375
|
+
# Project-specific config
|
|
1376
|
+
mcp_config_data = MCPConfig.load(config_path)
|
|
1377
|
+
else:
|
|
1378
|
+
# Global config (or custom dict-based config)
|
|
1379
|
+
mcp_config_data = json.loads(config_path.read_text())
|
|
1380
|
+
except json.JSONDecodeError as e:
|
|
1381
|
+
click.echo(f"❌ Invalid JSON in MCP config: {e}", err=True)
|
|
1382
|
+
return
|
|
1383
|
+
except Exception as e:
|
|
1384
|
+
click.echo(f"❌ Failed to load MCP config: {e}", err=True)
|
|
1385
|
+
return
|
|
1386
|
+
|
|
1387
|
+
# Determine which servers to index
|
|
1388
|
+
servers_to_index = []
|
|
1389
|
+
if isinstance(mcp_config_data, MCPConfig):
|
|
1390
|
+
all_servers = list(mcp_config_data.servers.keys())
|
|
1391
|
+
else:
|
|
1392
|
+
all_servers = list(mcp_config_data.get("mcpServers", {}).keys())
|
|
1393
|
+
|
|
1394
|
+
if index_all:
|
|
1395
|
+
servers_to_index = all_servers
|
|
1396
|
+
elif server:
|
|
1397
|
+
servers_to_index = [server]
|
|
1398
|
+
else:
|
|
1399
|
+
click.echo("Specify --server or --all")
|
|
1400
|
+
return
|
|
1401
|
+
|
|
1402
|
+
# Index each server
|
|
1403
|
+
asyncio.run(_index_mcp_servers(mcp_config_data, servers_to_index))
|
|
1404
|
+
|
|
1405
|
+
|
|
1406
|
+
async def _index_mcp_servers(
|
|
1407
|
+
mcp_config: dict[str, Any] | MCPConfig, server_names: list[str]
|
|
1408
|
+
) -> None:
|
|
1409
|
+
"""Index tools from specified MCP servers."""
|
|
1410
|
+
# Load config
|
|
1411
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
1412
|
+
if not config_path.exists():
|
|
1413
|
+
click.echo("❌ nexus_config.json not found. Run 'nexus-init' first.", err=True)
|
|
1414
|
+
return
|
|
1415
|
+
|
|
1416
|
+
config = NexusConfig.load(config_path)
|
|
1417
|
+
client = MCPClientManager()
|
|
1418
|
+
embedder = create_embedder(config)
|
|
1419
|
+
database = NexusDatabase(config, embedder)
|
|
1420
|
+
database.connect()
|
|
1421
|
+
|
|
1422
|
+
for name in server_names:
|
|
1423
|
+
if isinstance(mcp_config, MCPConfig):
|
|
1424
|
+
server_config = mcp_config.servers.get(name)
|
|
1425
|
+
if not server_config:
|
|
1426
|
+
click.echo(f"Server not found: {name}")
|
|
1427
|
+
continue
|
|
1428
|
+
# Convert to internal connection format
|
|
1429
|
+
connection = MCPServerConnection(
|
|
1430
|
+
name=name,
|
|
1431
|
+
command=server_config.command or "",
|
|
1432
|
+
args=server_config.args,
|
|
1433
|
+
env=server_config.env,
|
|
1434
|
+
transport=server_config.transport,
|
|
1435
|
+
url=server_config.url,
|
|
1436
|
+
headers=server_config.headers,
|
|
1437
|
+
timeout=server_config.timeout,
|
|
1438
|
+
)
|
|
1439
|
+
else:
|
|
1440
|
+
server_dict = mcp_config.get("mcpServers", {}).get(name)
|
|
1441
|
+
if not server_dict:
|
|
1442
|
+
click.echo(f"Server not found: {name}")
|
|
1443
|
+
continue
|
|
1444
|
+
connection = MCPServerConnection(
|
|
1445
|
+
name=name,
|
|
1446
|
+
command=server_dict.get("command", ""),
|
|
1447
|
+
args=server_dict.get("args", []),
|
|
1448
|
+
env=server_dict.get("env"),
|
|
1449
|
+
transport=server_dict.get("transport", "stdio"),
|
|
1450
|
+
url=server_dict.get("url"),
|
|
1451
|
+
headers=server_dict.get("headers"),
|
|
1452
|
+
timeout=server_dict.get("timeout", 30.0),
|
|
1453
|
+
)
|
|
1454
|
+
|
|
1455
|
+
# Connect and index
|
|
1456
|
+
|
|
1457
|
+
click.echo(f"Indexing tools from: {name}")
|
|
1458
|
+
|
|
1459
|
+
try:
|
|
1460
|
+
tools = await client.get_tools(connection)
|
|
1461
|
+
click.echo(f" Found {len(tools)} tools")
|
|
1462
|
+
|
|
1463
|
+
# Create documents and index
|
|
1464
|
+
for tool in tools:
|
|
1465
|
+
text = f"{name}.{tool.name}: {tool.description}"
|
|
1466
|
+
vector = await embedder.embed(text)
|
|
1467
|
+
|
|
1468
|
+
doc = Document(
|
|
1469
|
+
id=f"{name}:{tool.name}",
|
|
1470
|
+
text=text,
|
|
1471
|
+
vector=vector,
|
|
1472
|
+
project_id=f"{config.project_id}_mcp_tools",
|
|
1473
|
+
file_path=f"mcp://{name}/{tool.name}",
|
|
1474
|
+
doc_type=DocumentType.TOOL,
|
|
1475
|
+
chunk_type="tool",
|
|
1476
|
+
language="mcp",
|
|
1477
|
+
name=tool.name,
|
|
1478
|
+
start_line=0,
|
|
1479
|
+
end_line=0,
|
|
1480
|
+
server_name=name,
|
|
1481
|
+
parameters_schema=json.dumps(tool.input_schema),
|
|
1482
|
+
)
|
|
1483
|
+
|
|
1484
|
+
await database.upsert_document(doc)
|
|
1485
|
+
|
|
1486
|
+
click.echo(f" ✅ Indexed {len(tools)} tools from {name}")
|
|
1487
|
+
|
|
1488
|
+
except Exception as e:
|
|
1489
|
+
# Handle ExceptionGroup from anyio/TaskGroup
|
|
1490
|
+
if hasattr(e, "exceptions"):
|
|
1491
|
+
for sub_e in e.exceptions:
|
|
1492
|
+
click.echo(f" ❌ Failed to index {name}: {sub_e}")
|
|
1493
|
+
else:
|
|
1494
|
+
click.echo(f" ❌ Failed to index {name}: {e}")
|
|
1495
|
+
|
|
1496
|
+
click.echo("Done!")
|
|
1497
|
+
|
|
1498
|
+
|
|
1499
|
+
@cli.group("mcp")
|
|
1500
|
+
def mcp_group() -> None:
|
|
1501
|
+
"""Manage MCP server configurations."""
|
|
1502
|
+
|
|
1503
|
+
|
|
1504
|
+
@mcp_group.command("init")
|
|
1505
|
+
@click.option(
|
|
1506
|
+
"--from-global",
|
|
1507
|
+
is_flag=True,
|
|
1508
|
+
help="Import servers from ~/.config/mcp/config.json",
|
|
1509
|
+
)
|
|
1510
|
+
def mcp_init_command(from_global: bool) -> None:
|
|
1511
|
+
"""Initialize MCP configuration for this project.
|
|
1512
|
+
|
|
1513
|
+
Creates .nexus/mcp_config.json with an empty configuration
|
|
1514
|
+
or imports from your global MCP config.
|
|
1515
|
+
|
|
1516
|
+
Examples:
|
|
1517
|
+
nexus-mcp init
|
|
1518
|
+
nexus-mcp init --from-global
|
|
1519
|
+
"""
|
|
1520
|
+
config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1521
|
+
|
|
1522
|
+
if config_path.exists() and not click.confirm("MCP config exists. Overwrite?"):
|
|
1523
|
+
click.echo("Aborted.")
|
|
1524
|
+
return
|
|
1525
|
+
|
|
1526
|
+
# Ensure .nexus directory exists
|
|
1527
|
+
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1528
|
+
|
|
1529
|
+
if from_global:
|
|
1530
|
+
# Import from global config
|
|
1531
|
+
global_path = Path.home() / ".config" / "mcp" / "config.json"
|
|
1532
|
+
if not global_path.exists():
|
|
1533
|
+
click.echo(f"Global config not found: {global_path}")
|
|
1534
|
+
return
|
|
1535
|
+
|
|
1536
|
+
try:
|
|
1537
|
+
global_config = json.loads(global_path.read_text())
|
|
1538
|
+
except json.JSONDecodeError as e:
|
|
1539
|
+
click.echo(f"❌ Invalid JSON in global config: {e}")
|
|
1540
|
+
return
|
|
1541
|
+
|
|
1542
|
+
servers = {}
|
|
1543
|
+
|
|
1544
|
+
for name, cfg in global_config.get("mcpServers", {}).items():
|
|
1545
|
+
servers[name] = MCPServerConfig(
|
|
1546
|
+
command=cfg.get("command", ""),
|
|
1547
|
+
args=cfg.get("args", []),
|
|
1548
|
+
env=cfg.get("env", {}),
|
|
1549
|
+
enabled=True,
|
|
1550
|
+
)
|
|
1551
|
+
|
|
1552
|
+
mcp_config = MCPConfig(
|
|
1553
|
+
version="1.0",
|
|
1554
|
+
servers=servers,
|
|
1555
|
+
profiles={},
|
|
1556
|
+
)
|
|
1557
|
+
click.echo(f"Imported {len(servers)} servers from global config")
|
|
1558
|
+
else:
|
|
1559
|
+
# Create empty config
|
|
1560
|
+
mcp_config = MCPConfig(
|
|
1561
|
+
version="1.0",
|
|
1562
|
+
servers={},
|
|
1563
|
+
profiles={"default": []},
|
|
1564
|
+
)
|
|
1565
|
+
|
|
1566
|
+
mcp_config.save(config_path)
|
|
1567
|
+
click.echo(f"✅ Created {config_path}")
|
|
1568
|
+
click.echo("")
|
|
1569
|
+
click.echo("Configuration initialized successfully!")
|
|
1570
|
+
click.echo("You can manually edit the config file to add MCP servers.")
|
|
1571
|
+
|
|
1572
|
+
|
|
1573
|
+
@mcp_group.command("add")
|
|
1574
|
+
@click.argument("name")
|
|
1575
|
+
@click.option("--command", "-c", required=True, help="Command to run MCP server")
|
|
1576
|
+
@click.option("--args", "-a", multiple=True, help="Arguments for the command")
|
|
1577
|
+
@click.option("--env", "-e", multiple=True, help="Environment vars (KEY=value or KEY=${VAR})")
|
|
1578
|
+
@click.option("--profile", "-p", default="default", help="Add to profile (default: default)")
|
|
1579
|
+
def mcp_add_command(
|
|
1580
|
+
name: str, command: str, args: tuple[str, ...], env: tuple[str, ...], profile: str
|
|
1581
|
+
) -> None:
|
|
1582
|
+
"""Add an MCP server to the configuration.
|
|
1583
|
+
|
|
1584
|
+
Examples:
|
|
1585
|
+
nexus-mcp add github --command "npx" --args "-y" \\
|
|
1586
|
+
--args "@modelcontextprotocol/server-github"
|
|
1587
|
+
nexus-mcp add myserver --command "my-mcp" --env "API_KEY=${MY_API_KEY}"
|
|
1588
|
+
"""
|
|
1589
|
+
config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1590
|
+
if not config_path.exists():
|
|
1591
|
+
click.echo("Run 'nexus-mcp init' first")
|
|
1592
|
+
return
|
|
1593
|
+
|
|
1594
|
+
mcp_config = MCPConfig.load(config_path)
|
|
1595
|
+
|
|
1596
|
+
# Parse environment variables
|
|
1597
|
+
env_dict = {}
|
|
1598
|
+
for e in env:
|
|
1599
|
+
if "=" in e:
|
|
1600
|
+
k, v = e.split("=", 1)
|
|
1601
|
+
env_dict[k] = v
|
|
1602
|
+
|
|
1603
|
+
# Add server
|
|
1604
|
+
mcp_config.servers[name] = MCPServerConfig(
|
|
1605
|
+
command=command,
|
|
1606
|
+
args=list(args),
|
|
1607
|
+
env=env_dict,
|
|
1608
|
+
enabled=True,
|
|
1609
|
+
)
|
|
1610
|
+
|
|
1611
|
+
# Add to profile
|
|
1612
|
+
if profile not in mcp_config.profiles:
|
|
1613
|
+
mcp_config.profiles[profile] = []
|
|
1614
|
+
if name not in mcp_config.profiles[profile]:
|
|
1615
|
+
mcp_config.profiles[profile].append(name)
|
|
1616
|
+
|
|
1617
|
+
mcp_config.save(config_path)
|
|
1618
|
+
click.echo(f"Added {name} to profile '{profile}'")
|
|
1619
|
+
|
|
1620
|
+
|
|
1621
|
+
@mcp_group.command("list")
|
|
1622
|
+
@click.option(
|
|
1623
|
+
"--all", "-a", "show_all", is_flag=True, help="Show all servers, not just active profile"
|
|
1624
|
+
)
|
|
1625
|
+
def mcp_list_command(show_all: bool) -> None:
|
|
1626
|
+
"""List configured MCP servers.
|
|
1627
|
+
|
|
1628
|
+
Examples:
|
|
1629
|
+
nexus-mcp list
|
|
1630
|
+
nexus-mcp list --all
|
|
1631
|
+
"""
|
|
1632
|
+
config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1633
|
+
if not config_path.exists():
|
|
1634
|
+
click.echo("No MCP config. Run 'nexus-mcp init' first")
|
|
1635
|
+
return
|
|
1636
|
+
|
|
1637
|
+
mcp_config = MCPConfig.load(config_path)
|
|
1638
|
+
|
|
1639
|
+
click.echo(f"Active profile: {mcp_config.active_profile}")
|
|
1640
|
+
click.echo("")
|
|
1641
|
+
|
|
1642
|
+
if show_all:
|
|
1643
|
+
click.echo("All servers:")
|
|
1644
|
+
servers_to_show = list(mcp_config.servers.items())
|
|
1645
|
+
else:
|
|
1646
|
+
click.echo("Active servers:")
|
|
1647
|
+
# Get active profile server names
|
|
1648
|
+
if mcp_config.active_profile in mcp_config.profiles:
|
|
1649
|
+
active_server_names = mcp_config.profiles[mcp_config.active_profile]
|
|
1650
|
+
# Filter to only enabled servers
|
|
1651
|
+
servers_to_show = [
|
|
1652
|
+
(name, mcp_config.servers[name])
|
|
1653
|
+
for name in active_server_names
|
|
1654
|
+
if name in mcp_config.servers and mcp_config.servers[name].enabled
|
|
1655
|
+
]
|
|
1656
|
+
else:
|
|
1657
|
+
# If no active profile, show all enabled servers
|
|
1658
|
+
servers_to_show = [
|
|
1659
|
+
(name, server) for name, server in mcp_config.servers.items() if server.enabled
|
|
1660
|
+
]
|
|
1661
|
+
|
|
1662
|
+
for name, server in servers_to_show:
|
|
1663
|
+
status = "✓" if server.enabled else "✗"
|
|
1664
|
+
click.echo(f" {status} {name}")
|
|
1665
|
+
click.echo(f" Command: {server.command} {' '.join(server.args)}")
|
|
1666
|
+
if server.env:
|
|
1667
|
+
click.echo(f" Env: {', '.join(server.env.keys())}")
|
|
1668
|
+
|
|
1669
|
+
click.echo("")
|
|
1670
|
+
click.echo(f"Profiles: {', '.join(mcp_config.profiles.keys())}")
|
|
1671
|
+
|
|
1672
|
+
|
|
1673
|
+
@mcp_group.command("profile")
|
|
1674
|
+
@click.argument("name", required=False)
|
|
1675
|
+
@click.option("--add", "-a", multiple=True, help="Add server to profile")
|
|
1676
|
+
@click.option("--remove", "-r", multiple=True, help="Remove server from profile")
|
|
1677
|
+
@click.option("--create", is_flag=True, help="Create new profile")
|
|
1678
|
+
def mcp_profile_command(
|
|
1679
|
+
name: str | None, add: tuple[str, ...], remove: tuple[str, ...], create: bool
|
|
1680
|
+
) -> None:
|
|
1681
|
+
"""Manage MCP profiles.
|
|
1682
|
+
|
|
1683
|
+
Without arguments, shows current profile. With name, switches to that profile.
|
|
1684
|
+
|
|
1685
|
+
Examples:
|
|
1686
|
+
nexus-mcp profile # Show current
|
|
1687
|
+
nexus-mcp profile dev # Switch to 'dev'
|
|
1688
|
+
nexus-mcp profile dev --create # Create new 'dev' profile
|
|
1689
|
+
nexus-mcp profile default --add homeassistant
|
|
1690
|
+
nexus-mcp profile default --remove github
|
|
1691
|
+
"""
|
|
1692
|
+
config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1693
|
+
if not config_path.exists():
|
|
1694
|
+
click.echo("Run 'nexus-mcp init' first")
|
|
1695
|
+
return
|
|
1696
|
+
|
|
1697
|
+
mcp_config = MCPConfig.load(config_path)
|
|
1698
|
+
|
|
1699
|
+
if not name:
|
|
1700
|
+
# Show current profile
|
|
1701
|
+
click.echo(f"Active: {mcp_config.active_profile}")
|
|
1702
|
+
servers = mcp_config.profiles.get(mcp_config.active_profile, [])
|
|
1703
|
+
click.echo(f"Servers: {', '.join(servers) or '(none)'}")
|
|
1704
|
+
return
|
|
1705
|
+
|
|
1706
|
+
if create:
|
|
1707
|
+
if name in mcp_config.profiles:
|
|
1708
|
+
click.echo(f"Profile '{name}' exists")
|
|
1709
|
+
return
|
|
1710
|
+
mcp_config.profiles[name] = []
|
|
1711
|
+
click.echo(f"Created profile: {name}")
|
|
1712
|
+
|
|
1713
|
+
if name not in mcp_config.profiles:
|
|
1714
|
+
click.echo(f"Profile '{name}' not found")
|
|
1715
|
+
return
|
|
1716
|
+
|
|
1717
|
+
# Add servers
|
|
1718
|
+
for server in add:
|
|
1719
|
+
if server not in mcp_config.profiles[name]:
|
|
1720
|
+
mcp_config.profiles[name].append(server)
|
|
1721
|
+
click.echo(f"Added {server} to {name}")
|
|
1722
|
+
# Warn if server not defined yet
|
|
1723
|
+
if server not in mcp_config.servers:
|
|
1724
|
+
click.echo(f" ⚠️ Server '{server}' not defined. Add it with 'nexus-mcp add'")
|
|
1725
|
+
else:
|
|
1726
|
+
click.echo(f"Server {server} already in {name}")
|
|
1727
|
+
|
|
1728
|
+
# Remove servers
|
|
1729
|
+
for server in remove:
|
|
1730
|
+
if server in mcp_config.profiles[name]:
|
|
1731
|
+
mcp_config.profiles[name].remove(server)
|
|
1732
|
+
click.echo(f"Removed {server} from {name}")
|
|
1733
|
+
|
|
1734
|
+
# Switch profile
|
|
1735
|
+
if not add and not remove and not create:
|
|
1736
|
+
mcp_config.active_profile = name
|
|
1737
|
+
click.echo(f"Switched to profile: {name}")
|
|
1738
|
+
|
|
1739
|
+
mcp_config.save(config_path)
|
|
1740
|
+
|
|
1741
|
+
|
|
1742
|
+
def _set_server_enabled(name: str, enabled: bool) -> None:
|
|
1743
|
+
"""Set server enabled status."""
|
|
1744
|
+
config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1745
|
+
if not config_path.exists():
|
|
1746
|
+
click.echo("Run 'nexus-mcp init' first")
|
|
1747
|
+
return
|
|
1748
|
+
|
|
1749
|
+
mcp_config = MCPConfig.load(config_path)
|
|
1750
|
+
|
|
1751
|
+
if name not in mcp_config.servers:
|
|
1752
|
+
click.echo(f"Server not found: {name}")
|
|
1753
|
+
return
|
|
1754
|
+
|
|
1755
|
+
mcp_config.servers[name].enabled = enabled
|
|
1756
|
+
mcp_config.save(config_path)
|
|
1757
|
+
|
|
1758
|
+
status = "enabled" if enabled else "disabled"
|
|
1759
|
+
click.echo(f"{name}: {status}")
|
|
1760
|
+
|
|
1761
|
+
|
|
1762
|
+
@mcp_group.command("enable")
|
|
1763
|
+
@click.argument("name")
|
|
1764
|
+
def mcp_enable_command(name: str) -> None:
|
|
1765
|
+
"""Enable an MCP server."""
|
|
1766
|
+
_set_server_enabled(name, True)
|
|
1767
|
+
|
|
1768
|
+
|
|
1769
|
+
@mcp_group.command("disable")
|
|
1770
|
+
@click.argument("name")
|
|
1771
|
+
def mcp_disable_command(name: str) -> None:
|
|
1772
|
+
"""Disable an MCP server."""
|
|
1773
|
+
_set_server_enabled(name, False)
|
|
1774
|
+
|
|
1775
|
+
|
|
1776
|
+
# Agent management commands
|
|
1777
|
+
@cli.group("agent")
|
|
1778
|
+
def agent_group() -> None:
|
|
1779
|
+
"""Manage custom agents."""
|
|
1780
|
+
|
|
1781
|
+
|
|
1782
|
+
@agent_group.command("templates")
|
|
1783
|
+
def agent_templates_command() -> None:
|
|
1784
|
+
"""List available agent templates."""
|
|
1785
|
+
from .agent_templates import list_templates
|
|
1786
|
+
|
|
1787
|
+
templates = list_templates()
|
|
1788
|
+
|
|
1789
|
+
if not templates:
|
|
1790
|
+
click.echo("No templates found.")
|
|
1791
|
+
return
|
|
1792
|
+
|
|
1793
|
+
click.echo("📋 Available Agent Templates:")
|
|
1794
|
+
click.echo("")
|
|
1795
|
+
|
|
1796
|
+
# Load and display each template
|
|
1797
|
+
import yaml
|
|
1798
|
+
|
|
1799
|
+
from .agent_templates import get_template_path
|
|
1800
|
+
|
|
1801
|
+
for template_name in sorted(templates):
|
|
1802
|
+
try:
|
|
1803
|
+
template_path = get_template_path(template_name)
|
|
1804
|
+
with open(template_path, encoding="utf-8") as f:
|
|
1805
|
+
data = yaml.safe_load(f)
|
|
1806
|
+
|
|
1807
|
+
display_name = data.get("display_name", template_name)
|
|
1808
|
+
role = data.get("profile", {}).get("role", "Unknown")
|
|
1809
|
+
model = data.get("llm_config", {}).get("model_hint", "auto")
|
|
1810
|
+
|
|
1811
|
+
click.echo(f" • {display_name} ({template_name})")
|
|
1812
|
+
click.echo(f" Role: {role}")
|
|
1813
|
+
click.echo(f" Model: {model}")
|
|
1814
|
+
click.echo("")
|
|
1815
|
+
except Exception as e:
|
|
1816
|
+
click.echo(f" ⚠️ {template_name}: Failed to load ({e})")
|
|
1817
|
+
|
|
1818
|
+
|
|
1819
|
+
@agent_group.command("init")
|
|
1820
|
+
@click.argument("name")
|
|
1821
|
+
@click.option("--from-template", "-t", "template_name", help="Create from template")
|
|
1822
|
+
@click.option("--model", "-m", "custom_model", help="Override template model")
|
|
1823
|
+
@click.option("--role", prompt=False, default=None)
|
|
1824
|
+
@click.option("--goal", prompt=False, default=None)
|
|
1825
|
+
@click.option(
|
|
1826
|
+
"--backstory",
|
|
1827
|
+
prompt=False,
|
|
1828
|
+
default=None,
|
|
1829
|
+
)
|
|
1830
|
+
def agent_init_command(
|
|
1831
|
+
name: str,
|
|
1832
|
+
template_name: str | None,
|
|
1833
|
+
custom_model: str | None,
|
|
1834
|
+
role: str | None,
|
|
1835
|
+
goal: str | None,
|
|
1836
|
+
backstory: str | None,
|
|
1837
|
+
) -> None:
|
|
1838
|
+
"""Create a new agent configuration.
|
|
1839
|
+
|
|
1840
|
+
NAME is the agent identifier (lowercase with underscores).
|
|
1841
|
+
|
|
1842
|
+
Examples:
|
|
1843
|
+
nexus-agent init my_reviewer --from-template code_reviewer
|
|
1844
|
+
nexus-agent init security_check -t security_auditor --model claude-opus-4.5
|
|
1845
|
+
nexus-agent init my_custom_agent
|
|
1846
|
+
"""
|
|
1847
|
+
import re
|
|
1848
|
+
|
|
1849
|
+
import yaml
|
|
1850
|
+
|
|
1851
|
+
from .agent_templates import get_template_path, list_templates
|
|
1852
|
+
|
|
1853
|
+
agents_dir = Path.cwd() / "agents"
|
|
1854
|
+
agents_dir.mkdir(exist_ok=True)
|
|
1855
|
+
|
|
1856
|
+
# Normalize name
|
|
1857
|
+
agent_name = name.lower().replace(" ", "_").replace("-", "_")
|
|
1858
|
+
|
|
1859
|
+
# Validate name format
|
|
1860
|
+
if not re.match(r"^[a-z][a-z0-9_]*$", agent_name):
|
|
1861
|
+
click.echo(f"❌ Invalid agent name: {agent_name}", err=True)
|
|
1862
|
+
click.echo(
|
|
1863
|
+
" Name must start with a letter and contain only lowercase letters, "
|
|
1864
|
+
"numbers, and underscores."
|
|
1865
|
+
)
|
|
1866
|
+
return
|
|
1867
|
+
|
|
1868
|
+
# Load from template if specified
|
|
1869
|
+
if template_name:
|
|
1870
|
+
available_templates = list_templates()
|
|
1871
|
+
if template_name not in available_templates:
|
|
1872
|
+
click.echo(f"❌ Template '{template_name}' not found.", err=True)
|
|
1873
|
+
click.echo(f" Available templates: {', '.join(available_templates)}")
|
|
1874
|
+
return
|
|
1875
|
+
|
|
1876
|
+
template_path = get_template_path(template_name)
|
|
1877
|
+
with open(template_path, encoding="utf-8") as f:
|
|
1878
|
+
config = yaml.safe_load(f)
|
|
1879
|
+
|
|
1880
|
+
# Customize the template
|
|
1881
|
+
config["name"] = agent_name
|
|
1882
|
+
config["display_name"] = name.replace("_", " ").title()
|
|
1883
|
+
config["description"] = f"Delegate tasks to the {name.replace('_', ' ').title()} agent."
|
|
1884
|
+
|
|
1885
|
+
# Override model if specified
|
|
1886
|
+
if custom_model:
|
|
1887
|
+
config["llm_config"]["model_hint"] = custom_model
|
|
1888
|
+
|
|
1889
|
+
click.echo(f"✅ Created agent from template: {template_name}")
|
|
1890
|
+
else:
|
|
1891
|
+
# Interactive mode
|
|
1892
|
+
if not role:
|
|
1893
|
+
role = click.prompt("Agent role (e.g., 'Code Reviewer')")
|
|
1894
|
+
if not goal:
|
|
1895
|
+
goal = click.prompt("Agent goal (e.g., 'Review code for best practices')")
|
|
1896
|
+
if not backstory:
|
|
1897
|
+
backstory = click.prompt(
|
|
1898
|
+
"Agent backstory", default="Expert developer with years of experience."
|
|
1899
|
+
)
|
|
1900
|
+
|
|
1901
|
+
# Generate YAML content
|
|
1902
|
+
config = {
|
|
1903
|
+
"name": agent_name,
|
|
1904
|
+
"display_name": name.replace("_", " ").title(),
|
|
1905
|
+
"description": f"Delegate tasks to the {name.replace('_', ' ').title()} agent.",
|
|
1906
|
+
"profile": {
|
|
1907
|
+
"role": role,
|
|
1908
|
+
"goal": goal,
|
|
1909
|
+
"backstory": backstory,
|
|
1910
|
+
"tone": "Professional and helpful",
|
|
1911
|
+
},
|
|
1912
|
+
"memory": {
|
|
1913
|
+
"enabled": True,
|
|
1914
|
+
"rag_limit": 5,
|
|
1915
|
+
"search_types": ["code", "documentation", "lesson"],
|
|
1916
|
+
},
|
|
1917
|
+
"tools": [],
|
|
1918
|
+
"llm_config": {
|
|
1919
|
+
"model_hint": custom_model or "claude-sonnet-4.5",
|
|
1920
|
+
"fallback_hints": ["auto"],
|
|
1921
|
+
"temperature": 0.5,
|
|
1922
|
+
"max_tokens": 4000,
|
|
1923
|
+
},
|
|
1924
|
+
}
|
|
1925
|
+
|
|
1926
|
+
output_file = agents_dir / f"{agent_name}.yaml"
|
|
1927
|
+
|
|
1928
|
+
if output_file.exists() and not click.confirm(
|
|
1929
|
+
f"Agent {agent_name}.yaml already exists. Overwrite?"
|
|
1930
|
+
):
|
|
1931
|
+
click.echo("Aborted.")
|
|
1932
|
+
return
|
|
1933
|
+
|
|
1934
|
+
with open(output_file, "w", encoding="utf-8") as f:
|
|
1935
|
+
yaml.dump(config, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
1936
|
+
|
|
1937
|
+
click.echo(f"✅ Created agent: {output_file}")
|
|
1938
|
+
click.echo("")
|
|
1939
|
+
click.echo("Next steps:")
|
|
1940
|
+
click.echo(f" 1. Edit {output_file} to customize your agent")
|
|
1941
|
+
click.echo(" 2. Restart the MCP server to activate this agent")
|
|
1942
|
+
click.echo(f" 3. Use the 'ask_{agent_name}' tool in your IDE")
|
|
1943
|
+
|
|
1944
|
+
|
|
1945
|
+
@agent_group.command("list")
|
|
1946
|
+
def agent_list_command() -> None:
|
|
1947
|
+
"""List all configured agents."""
|
|
1948
|
+
agents_dir = Path.cwd() / "agents"
|
|
1949
|
+
|
|
1950
|
+
if not agents_dir.exists():
|
|
1951
|
+
click.echo("No agents directory found.")
|
|
1952
|
+
click.echo("Create an agent with: nexus-agent init <name>")
|
|
1953
|
+
return
|
|
1954
|
+
|
|
1955
|
+
yaml_files = list(agents_dir.glob("*.yaml")) + list(agents_dir.glob("*.yml"))
|
|
1956
|
+
|
|
1957
|
+
if not yaml_files:
|
|
1958
|
+
click.echo("No agents found.")
|
|
1959
|
+
click.echo("Create an agent with: nexus-agent init <name>")
|
|
1960
|
+
return
|
|
1961
|
+
|
|
1962
|
+
click.echo("📋 Custom Agents:")
|
|
1963
|
+
click.echo("")
|
|
1964
|
+
|
|
1965
|
+
import yaml
|
|
1966
|
+
|
|
1967
|
+
for yaml_file in sorted(yaml_files):
|
|
1968
|
+
try:
|
|
1969
|
+
with open(yaml_file, encoding="utf-8") as f:
|
|
1970
|
+
data = yaml.safe_load(f)
|
|
1971
|
+
name = data.get("name", yaml_file.stem)
|
|
1972
|
+
display_name = data.get("display_name", name)
|
|
1973
|
+
role = data.get("profile", {}).get("role", "Unknown")
|
|
1974
|
+
click.echo(f" • {display_name} (ask_{name})")
|
|
1975
|
+
click.echo(f" Role: {role}")
|
|
1976
|
+
click.echo("")
|
|
1977
|
+
except Exception as e:
|
|
1978
|
+
click.echo(f" ⚠️ {yaml_file.name}: Failed to load ({e})")
|
|
1979
|
+
|
|
1980
|
+
|
|
1981
|
+
# Entry points for pyproject.toml scripts
|
|
1982
|
+
def init_command_entry() -> None:
|
|
1983
|
+
"""Entry point for nexus-init."""
|
|
1984
|
+
cli(["init"])
|
|
1985
|
+
|
|
1986
|
+
|
|
1987
|
+
def index_command_entry() -> None:
|
|
1988
|
+
"""Entry point for nexus-index."""
|
|
1989
|
+
# Get args after the command name
|
|
1990
|
+
import sys
|
|
1991
|
+
|
|
1992
|
+
cli(["index"] + sys.argv[1:])
|
|
1993
|
+
|
|
1994
|
+
|
|
1995
|
+
def index_mcp_command_entry() -> None:
|
|
1996
|
+
"""Entry point for nexus-index-mcp."""
|
|
1997
|
+
import sys
|
|
1998
|
+
|
|
1999
|
+
cli(["index-mcp"] + sys.argv[1:])
|
|
2000
|
+
|
|
2001
|
+
|
|
2002
|
+
def mcp_command_entry() -> None:
|
|
2003
|
+
"""Entry point for nexus-mcp."""
|
|
2004
|
+
import sys
|
|
2005
|
+
|
|
2006
|
+
cli(["mcp"] + sys.argv[1:])
|
|
2007
|
+
|
|
2008
|
+
|
|
2009
|
+
def agent_command_entry() -> None:
|
|
2010
|
+
"""Entry point for nexus-agent."""
|
|
2011
|
+
import sys
|
|
2012
|
+
|
|
2013
|
+
cli(["agent"] + sys.argv[1:])
|
|
2014
|
+
|
|
2015
|
+
|
|
2016
|
+
if __name__ == "__main__":
|
|
2017
|
+
cli()
|