nexus-dev 3.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of nexus-dev might be problematic. Click here for more details.
- nexus_dev/__init__.py +4 -0
- nexus_dev/agent_templates/__init__.py +26 -0
- nexus_dev/agent_templates/api_designer.yaml +26 -0
- nexus_dev/agent_templates/code_reviewer.yaml +26 -0
- nexus_dev/agent_templates/debug_detective.yaml +26 -0
- nexus_dev/agent_templates/doc_writer.yaml +26 -0
- nexus_dev/agent_templates/performance_optimizer.yaml +26 -0
- nexus_dev/agent_templates/refactor_architect.yaml +26 -0
- nexus_dev/agent_templates/security_auditor.yaml +26 -0
- nexus_dev/agent_templates/test_engineer.yaml +26 -0
- nexus_dev/agents/__init__.py +20 -0
- nexus_dev/agents/agent_config.py +97 -0
- nexus_dev/agents/agent_executor.py +197 -0
- nexus_dev/agents/agent_manager.py +104 -0
- nexus_dev/agents/prompt_factory.py +91 -0
- nexus_dev/chunkers/__init__.py +168 -0
- nexus_dev/chunkers/base.py +202 -0
- nexus_dev/chunkers/docs_chunker.py +291 -0
- nexus_dev/chunkers/java_chunker.py +343 -0
- nexus_dev/chunkers/javascript_chunker.py +312 -0
- nexus_dev/chunkers/python_chunker.py +308 -0
- nexus_dev/cli.py +1673 -0
- nexus_dev/config.py +253 -0
- nexus_dev/database.py +558 -0
- nexus_dev/embeddings.py +585 -0
- nexus_dev/gateway/__init__.py +10 -0
- nexus_dev/gateway/connection_manager.py +348 -0
- nexus_dev/github_importer.py +247 -0
- nexus_dev/mcp_client.py +281 -0
- nexus_dev/mcp_config.py +184 -0
- nexus_dev/schemas/mcp_config_schema.json +166 -0
- nexus_dev/server.py +1866 -0
- nexus_dev/templates/pre-commit-hook +33 -0
- nexus_dev-3.2.0.data/data/nexus_dev/agent_templates/__init__.py +26 -0
- nexus_dev-3.2.0.data/data/nexus_dev/agent_templates/api_designer.yaml +26 -0
- nexus_dev-3.2.0.data/data/nexus_dev/agent_templates/code_reviewer.yaml +26 -0
- nexus_dev-3.2.0.data/data/nexus_dev/agent_templates/debug_detective.yaml +26 -0
- nexus_dev-3.2.0.data/data/nexus_dev/agent_templates/doc_writer.yaml +26 -0
- nexus_dev-3.2.0.data/data/nexus_dev/agent_templates/performance_optimizer.yaml +26 -0
- nexus_dev-3.2.0.data/data/nexus_dev/agent_templates/refactor_architect.yaml +26 -0
- nexus_dev-3.2.0.data/data/nexus_dev/agent_templates/security_auditor.yaml +26 -0
- nexus_dev-3.2.0.data/data/nexus_dev/agent_templates/test_engineer.yaml +26 -0
- nexus_dev-3.2.0.data/data/nexus_dev/templates/pre-commit-hook +33 -0
- nexus_dev-3.2.0.dist-info/METADATA +636 -0
- nexus_dev-3.2.0.dist-info/RECORD +48 -0
- nexus_dev-3.2.0.dist-info/WHEEL +4 -0
- nexus_dev-3.2.0.dist-info/entry_points.txt +12 -0
- nexus_dev-3.2.0.dist-info/licenses/LICENSE +21 -0
nexus_dev/cli.py
ADDED
|
@@ -0,0 +1,1673 @@
|
|
|
1
|
+
"""Nexus-Dev CLI commands.
|
|
2
|
+
|
|
3
|
+
Provides commands for:
|
|
4
|
+
- nexus-init: Initialize Nexus-Dev in a project
|
|
5
|
+
- nexus-index: Manually index files
|
|
6
|
+
- nexus-status: Show project statistics
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import asyncio
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import shutil
|
|
15
|
+
import stat
|
|
16
|
+
from collections import defaultdict
|
|
17
|
+
from collections.abc import Coroutine
|
|
18
|
+
from datetime import datetime
|
|
19
|
+
from fnmatch import fnmatch
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
from typing import Any, Literal
|
|
22
|
+
|
|
23
|
+
import click
|
|
24
|
+
import yaml
|
|
25
|
+
|
|
26
|
+
from .chunkers import ChunkerRegistry
|
|
27
|
+
from .config import NexusConfig
|
|
28
|
+
from .database import Document, DocumentType, NexusDatabase, generate_document_id
|
|
29
|
+
from .embeddings import create_embedder
|
|
30
|
+
from .github_importer import GitHubImporter
|
|
31
|
+
from .mcp_client import MCPClientManager, MCPServerConnection
|
|
32
|
+
from .mcp_config import MCPConfig, MCPServerConfig
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _run_async(coro: Coroutine[Any, Any, Any]) -> Any:
|
|
36
|
+
"""Run async function in sync context."""
|
|
37
|
+
return asyncio.get_event_loop().run_until_complete(coro)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@click.group()
|
|
41
|
+
@click.version_option(version="0.1.0", prog_name="nexus-dev")
|
|
42
|
+
def cli() -> None:
|
|
43
|
+
"""Nexus-Dev CLI - Local RAG for AI coding agents.
|
|
44
|
+
|
|
45
|
+
Nexus-Dev provides persistent memory for AI coding assistants by indexing
|
|
46
|
+
your code and documentation into a local vector database.
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@cli.command("init")
|
|
51
|
+
@click.option(
|
|
52
|
+
"--project-name",
|
|
53
|
+
prompt="Project name",
|
|
54
|
+
help="Human-readable name for the project",
|
|
55
|
+
)
|
|
56
|
+
@click.option(
|
|
57
|
+
"--embedding-provider",
|
|
58
|
+
type=click.Choice(["openai", "ollama"]),
|
|
59
|
+
default="openai",
|
|
60
|
+
help="Embedding provider to use (default: openai)",
|
|
61
|
+
)
|
|
62
|
+
@click.option(
|
|
63
|
+
"--install-hook/--no-hook",
|
|
64
|
+
default=False,
|
|
65
|
+
help="Install pre-commit hook for automatic indexing",
|
|
66
|
+
)
|
|
67
|
+
def init_command(
|
|
68
|
+
project_name: str,
|
|
69
|
+
embedding_provider: Literal["openai", "ollama"],
|
|
70
|
+
install_hook: bool,
|
|
71
|
+
) -> None:
|
|
72
|
+
"""Initialize Nexus-Dev in the current repository.
|
|
73
|
+
|
|
74
|
+
Creates configuration file, lessons directory, and optionally installs
|
|
75
|
+
the pre-commit hook for automatic indexing.
|
|
76
|
+
"""
|
|
77
|
+
cwd = Path.cwd()
|
|
78
|
+
config_path = cwd / "nexus_config.json"
|
|
79
|
+
|
|
80
|
+
# Check if already initialized
|
|
81
|
+
if config_path.exists():
|
|
82
|
+
click.echo("⚠️ nexus_config.json already exists.")
|
|
83
|
+
if not click.confirm("Overwrite existing configuration?"):
|
|
84
|
+
click.echo("Aborted.")
|
|
85
|
+
return
|
|
86
|
+
|
|
87
|
+
# Create configuration
|
|
88
|
+
config = NexusConfig.create_new(
|
|
89
|
+
project_name=project_name,
|
|
90
|
+
embedding_provider=embedding_provider,
|
|
91
|
+
)
|
|
92
|
+
config.save(config_path)
|
|
93
|
+
click.echo("✅ Created nexus_config.json")
|
|
94
|
+
|
|
95
|
+
# Create .nexus/lessons directory
|
|
96
|
+
lessons_dir = cwd / ".nexus" / "lessons"
|
|
97
|
+
lessons_dir.mkdir(parents=True, exist_ok=True)
|
|
98
|
+
click.echo("✅ Created .nexus/lessons/")
|
|
99
|
+
|
|
100
|
+
# Create .gitkeep so the directory is tracked
|
|
101
|
+
gitkeep = lessons_dir / ".gitkeep"
|
|
102
|
+
gitkeep.touch(exist_ok=True)
|
|
103
|
+
|
|
104
|
+
# Create database directory
|
|
105
|
+
db_path = config.get_db_path()
|
|
106
|
+
db_path.mkdir(parents=True, exist_ok=True)
|
|
107
|
+
click.echo(f"✅ Created database directory at {db_path}")
|
|
108
|
+
|
|
109
|
+
# Optionally install pre-commit hook
|
|
110
|
+
if install_hook:
|
|
111
|
+
_install_hook(cwd)
|
|
112
|
+
|
|
113
|
+
# Configure .gitignore
|
|
114
|
+
click.echo("")
|
|
115
|
+
ignore_choice = click.prompt(
|
|
116
|
+
"Configure .gitignore for .nexus folder?",
|
|
117
|
+
type=click.Choice(["allow-lessons", "ignore-all", "skip"]),
|
|
118
|
+
default="allow-lessons",
|
|
119
|
+
show_default=True,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
if ignore_choice != "skip":
|
|
123
|
+
_update_gitignore(cwd, ignore_choice)
|
|
124
|
+
|
|
125
|
+
click.echo("")
|
|
126
|
+
click.echo(f"Project ID: {config.project_id}")
|
|
127
|
+
|
|
128
|
+
if embedding_provider == "openai":
|
|
129
|
+
click.echo("")
|
|
130
|
+
click.echo("⚠️ Using OpenAI embeddings. Ensure OPENAI_API_KEY is set.")
|
|
131
|
+
|
|
132
|
+
click.echo("")
|
|
133
|
+
click.echo("----------------------------------------------------------------")
|
|
134
|
+
click.echo("🤖 COPY-PASTE THIS INTO YOUR AGENT'S SYSTEM PROMPT OR RULES:")
|
|
135
|
+
click.echo("----------------------------------------------------------------")
|
|
136
|
+
click.echo(f"""
|
|
137
|
+
## Nexus-Dev Knowledge Base
|
|
138
|
+
|
|
139
|
+
You have access to a local RAG system for this project.
|
|
140
|
+
|
|
141
|
+
**Project ID:** {config.project_id}
|
|
142
|
+
|
|
143
|
+
**MANDATORY**: You MUST use `nexus-dev` tools BEFORE answering questions about this code.
|
|
144
|
+
1. `search_knowledge("{config.project_name} <query>")` - Search code, docs, and lessons
|
|
145
|
+
2. `search_code("<class/function_name>")` - Find specific code definitions
|
|
146
|
+
3. `search_lessons("<error/problem>")` - Check for past solutions
|
|
147
|
+
4. `record_lesson(...)` - Save solutions after fixing non-trivial bugs
|
|
148
|
+
|
|
149
|
+
**Best Practice:**
|
|
150
|
+
- Start every session with `get_project_context()`
|
|
151
|
+
- Search before writing code
|
|
152
|
+
- Record insights with `record_insight()`
|
|
153
|
+
""")
|
|
154
|
+
click.echo("----------------------------------------------------------------")
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def _install_hook(cwd: Path) -> None:
|
|
158
|
+
"""Install pre-commit hook."""
|
|
159
|
+
git_dir = cwd / ".git"
|
|
160
|
+
if not git_dir.exists():
|
|
161
|
+
click.echo("⚠️ Not a git repository. Skipping hook installation.")
|
|
162
|
+
return
|
|
163
|
+
|
|
164
|
+
hooks_dir = git_dir / "hooks"
|
|
165
|
+
hooks_dir.mkdir(exist_ok=True)
|
|
166
|
+
|
|
167
|
+
hook_path = hooks_dir / "pre-commit"
|
|
168
|
+
|
|
169
|
+
# Check if hook already exists
|
|
170
|
+
if hook_path.exists():
|
|
171
|
+
click.echo("⚠️ pre-commit hook already exists. Skipping.")
|
|
172
|
+
return
|
|
173
|
+
|
|
174
|
+
# Copy template
|
|
175
|
+
template_path = Path(__file__).parent / "templates" / "pre-commit-hook"
|
|
176
|
+
if template_path.exists():
|
|
177
|
+
shutil.copy(template_path, hook_path)
|
|
178
|
+
else:
|
|
179
|
+
# Write inline
|
|
180
|
+
hook_content = """#!/bin/bash
|
|
181
|
+
# Nexus-Dev Pre-commit Hook
|
|
182
|
+
|
|
183
|
+
set -e
|
|
184
|
+
|
|
185
|
+
echo "🧠 Nexus-Dev: Checking for files to index..."
|
|
186
|
+
|
|
187
|
+
MODIFIED_FILES=$(git diff --cached --name-only --diff-filter=ACM | \
|
|
188
|
+
grep -E '\\.(py|js|jsx|ts|tsx|java)$' || true)
|
|
189
|
+
|
|
190
|
+
if [ -n "$MODIFIED_FILES" ]; then
|
|
191
|
+
echo "📁 Indexing modified code files..."
|
|
192
|
+
for file in $MODIFIED_FILES; do
|
|
193
|
+
if [ -f "$file" ]; then
|
|
194
|
+
python -m nexus_dev.cli index "$file" --quiet 2>/dev/null || true
|
|
195
|
+
fi
|
|
196
|
+
done
|
|
197
|
+
fi
|
|
198
|
+
|
|
199
|
+
LESSON_FILES=$(git diff --cached --name-only --diff-filter=A | \
|
|
200
|
+
grep -E '^\\.nexus/lessons/.*\\.md$' || true)
|
|
201
|
+
|
|
202
|
+
if [ -n "$LESSON_FILES" ]; then
|
|
203
|
+
echo "📚 Indexing new lessons..."
|
|
204
|
+
for file in $LESSON_FILES; do
|
|
205
|
+
if [ -f "$file" ]; then
|
|
206
|
+
python -m nexus_dev.cli index-lesson "$file" --quiet 2>/dev/null || true
|
|
207
|
+
fi
|
|
208
|
+
done
|
|
209
|
+
fi
|
|
210
|
+
|
|
211
|
+
echo "✅ Nexus-Dev indexing complete"
|
|
212
|
+
"""
|
|
213
|
+
hook_path.write_text(hook_content)
|
|
214
|
+
|
|
215
|
+
# Make executable
|
|
216
|
+
current_mode = hook_path.stat().st_mode
|
|
217
|
+
hook_path.chmod(current_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
|
218
|
+
|
|
219
|
+
click.echo("✅ Installed pre-commit hook")
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def _update_gitignore(cwd: Path, choice: str) -> None:
|
|
223
|
+
"""Update .gitignore based on user choice."""
|
|
224
|
+
gitignore_path = cwd / ".gitignore"
|
|
225
|
+
|
|
226
|
+
# define mapping for content
|
|
227
|
+
content_map = {
|
|
228
|
+
"allow-lessons": [
|
|
229
|
+
"\n# Nexus-Dev",
|
|
230
|
+
".nexus_config.json",
|
|
231
|
+
".nexus/*",
|
|
232
|
+
"!.nexus/lessons/",
|
|
233
|
+
"",
|
|
234
|
+
],
|
|
235
|
+
"ignore-all": ["\n# Nexus-Dev", ".nexus_config.json", ".nexus/", ""],
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
new_lines = content_map.get(choice, [])
|
|
239
|
+
if not new_lines:
|
|
240
|
+
return
|
|
241
|
+
|
|
242
|
+
# Create if doesn't exist
|
|
243
|
+
if not gitignore_path.exists():
|
|
244
|
+
gitignore_path.write_text("\n".join(new_lines), encoding="utf-8")
|
|
245
|
+
click.echo("✅ Created .gitignore")
|
|
246
|
+
return
|
|
247
|
+
|
|
248
|
+
# Append if exists
|
|
249
|
+
current_content = gitignore_path.read_text(encoding="utf-8")
|
|
250
|
+
|
|
251
|
+
# simple check to avoid duplication (imperfect but sufficient for init)
|
|
252
|
+
if ".nexus" in current_content:
|
|
253
|
+
click.echo("⚠️ .nexus already in .gitignore, skipping update.")
|
|
254
|
+
return
|
|
255
|
+
|
|
256
|
+
with gitignore_path.open("a", encoding="utf-8") as f:
|
|
257
|
+
f.write("\n".join(new_lines))
|
|
258
|
+
|
|
259
|
+
click.echo(f"✅ Updated .gitignore ({choice})")
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
@cli.command("index")
|
|
263
|
+
@click.argument("paths", nargs=-1, required=True)
|
|
264
|
+
@click.option(
|
|
265
|
+
"-r",
|
|
266
|
+
"--recursive",
|
|
267
|
+
is_flag=True,
|
|
268
|
+
help="Index directories recursively",
|
|
269
|
+
)
|
|
270
|
+
@click.option(
|
|
271
|
+
"-q",
|
|
272
|
+
"--quiet",
|
|
273
|
+
is_flag=True,
|
|
274
|
+
help="Suppress output",
|
|
275
|
+
)
|
|
276
|
+
def index_command(paths: tuple[str, ...], recursive: bool, quiet: bool) -> None:
|
|
277
|
+
"""Manually index files or directories.
|
|
278
|
+
|
|
279
|
+
PATHS can be files or directories. Use -r to recursively index directories.
|
|
280
|
+
|
|
281
|
+
Examples:
|
|
282
|
+
nexus-index src/
|
|
283
|
+
nexus-index docs/ -r
|
|
284
|
+
nexus-index main.py utils.py
|
|
285
|
+
"""
|
|
286
|
+
# Load config
|
|
287
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
288
|
+
if not config_path.exists():
|
|
289
|
+
click.echo("❌ nexus_config.json not found. Run 'nexus-init' first.", err=True)
|
|
290
|
+
return
|
|
291
|
+
|
|
292
|
+
config = NexusConfig.load(config_path)
|
|
293
|
+
embedder = create_embedder(config)
|
|
294
|
+
database = NexusDatabase(config, embedder)
|
|
295
|
+
database.connect()
|
|
296
|
+
|
|
297
|
+
# Collect files to index
|
|
298
|
+
files_to_index: list[Path] = []
|
|
299
|
+
|
|
300
|
+
for path_str in paths:
|
|
301
|
+
path = Path(path_str)
|
|
302
|
+
if not path.is_absolute():
|
|
303
|
+
path = Path.cwd() / path
|
|
304
|
+
|
|
305
|
+
if not path.exists():
|
|
306
|
+
if not quiet:
|
|
307
|
+
click.echo(f"⚠️ Path not found: {path_str}")
|
|
308
|
+
continue
|
|
309
|
+
|
|
310
|
+
if path.is_file():
|
|
311
|
+
files_to_index.append(path)
|
|
312
|
+
elif path.is_dir():
|
|
313
|
+
if recursive:
|
|
314
|
+
# Recursively find files using os.walk to prune ignored directories
|
|
315
|
+
for root, dirs, files in os.walk(path):
|
|
316
|
+
root_path = Path(root)
|
|
317
|
+
|
|
318
|
+
# Compute relative path for pattern matching
|
|
319
|
+
rel_root = str(root_path.relative_to(Path.cwd()))
|
|
320
|
+
if rel_root == ".":
|
|
321
|
+
rel_root = ""
|
|
322
|
+
|
|
323
|
+
# Filter directories to prevent traversal into ignored paths
|
|
324
|
+
# We must modify dirs in-place to prune the walk
|
|
325
|
+
i = 0
|
|
326
|
+
while i < len(dirs):
|
|
327
|
+
d = dirs[i]
|
|
328
|
+
d_path = root_path / d
|
|
329
|
+
# We construct a mock path string for the directory check
|
|
330
|
+
# (relative path + directory name)
|
|
331
|
+
check_path = str(d_path.relative_to(Path.cwd()))
|
|
332
|
+
|
|
333
|
+
# Use a simpler check: if the directory ITSELF matches exclude pattern
|
|
334
|
+
# we should remove it.
|
|
335
|
+
should_exclude = False
|
|
336
|
+
|
|
337
|
+
# Check excludes for this directory
|
|
338
|
+
# We treat the directory string as match target for exclude patterns
|
|
339
|
+
# excluding the trailing slash for fnmatch
|
|
340
|
+
for pattern in config.exclude_patterns:
|
|
341
|
+
# Normalize pattern: remove trailing slash for directory matching
|
|
342
|
+
clean_pat = pattern.rstrip("/")
|
|
343
|
+
if clean_pat.endswith("/**"):
|
|
344
|
+
clean_pat = clean_pat[:-3]
|
|
345
|
+
|
|
346
|
+
# Simple fnmatch on the logic
|
|
347
|
+
if fnmatch(check_path, pattern) or fnmatch(check_path, clean_pat):
|
|
348
|
+
should_exclude = True
|
|
349
|
+
break
|
|
350
|
+
|
|
351
|
+
# Handle recursive wildcard start (e.g. **/node_modules)
|
|
352
|
+
if clean_pat.startswith("**/"):
|
|
353
|
+
suffix = clean_pat[3:]
|
|
354
|
+
if (
|
|
355
|
+
check_path == suffix
|
|
356
|
+
or check_path.endswith("/" + suffix)
|
|
357
|
+
or fnmatch(check_path, suffix)
|
|
358
|
+
):
|
|
359
|
+
should_exclude = True
|
|
360
|
+
break
|
|
361
|
+
|
|
362
|
+
if should_exclude:
|
|
363
|
+
if not quiet:
|
|
364
|
+
# Optional: debug output if needed, but keeping it clean for now
|
|
365
|
+
pass
|
|
366
|
+
del dirs[i]
|
|
367
|
+
else:
|
|
368
|
+
i += 1
|
|
369
|
+
|
|
370
|
+
# Add files
|
|
371
|
+
for file in files:
|
|
372
|
+
file_path = root_path / file
|
|
373
|
+
if _should_index(file_path, config):
|
|
374
|
+
files_to_index.append(file_path)
|
|
375
|
+
else:
|
|
376
|
+
# Only immediate children
|
|
377
|
+
for file_path in path.iterdir():
|
|
378
|
+
if file_path.is_file():
|
|
379
|
+
# For explicit paths/directories, we check excludes but ignore
|
|
380
|
+
# include patterns to allow indexing "anything I point at"
|
|
381
|
+
# unless specifically excluded
|
|
382
|
+
is_excluded = _is_excluded(file_path, config)
|
|
383
|
+
if not is_excluded:
|
|
384
|
+
files_to_index.append(file_path)
|
|
385
|
+
|
|
386
|
+
if not files_to_index:
|
|
387
|
+
if not quiet:
|
|
388
|
+
click.echo("No files to index.")
|
|
389
|
+
return
|
|
390
|
+
|
|
391
|
+
if not quiet:
|
|
392
|
+
_print_file_summary(files_to_index)
|
|
393
|
+
if not click.confirm("Proceed with indexing?"):
|
|
394
|
+
click.echo("Aborted.")
|
|
395
|
+
return
|
|
396
|
+
|
|
397
|
+
# Index files
|
|
398
|
+
total_chunks = 0
|
|
399
|
+
errors = 0
|
|
400
|
+
|
|
401
|
+
for file_path in files_to_index:
|
|
402
|
+
try:
|
|
403
|
+
# Read file
|
|
404
|
+
content = file_path.read_text(encoding="utf-8")
|
|
405
|
+
|
|
406
|
+
# Detect smart type from frontmatter
|
|
407
|
+
detected_type, metadata = _detect_document_type_and_metadata(content)
|
|
408
|
+
|
|
409
|
+
# Determine type
|
|
410
|
+
if detected_type:
|
|
411
|
+
doc_type = detected_type
|
|
412
|
+
else:
|
|
413
|
+
ext = file_path.suffix.lower()
|
|
414
|
+
doc_type = (
|
|
415
|
+
DocumentType.DOCUMENTATION
|
|
416
|
+
if ext in (".md", ".markdown", ".rst", ".txt")
|
|
417
|
+
else DocumentType.CODE
|
|
418
|
+
)
|
|
419
|
+
|
|
420
|
+
# Delete existing
|
|
421
|
+
_run_async(database.delete_by_file(str(file_path), config.project_id))
|
|
422
|
+
|
|
423
|
+
# Chunk file
|
|
424
|
+
chunks = ChunkerRegistry.chunk_file(file_path, content)
|
|
425
|
+
|
|
426
|
+
if chunks:
|
|
427
|
+
# Generate embeddings and store
|
|
428
|
+
chunk_count = _run_async(
|
|
429
|
+
_index_chunks_sync(
|
|
430
|
+
chunks,
|
|
431
|
+
config.project_id,
|
|
432
|
+
doc_type,
|
|
433
|
+
embedder,
|
|
434
|
+
database,
|
|
435
|
+
metadata=metadata,
|
|
436
|
+
)
|
|
437
|
+
)
|
|
438
|
+
total_chunks += chunk_count
|
|
439
|
+
|
|
440
|
+
if not quiet:
|
|
441
|
+
click.echo(f" ✅ {file_path.name}: {chunk_count} chunks")
|
|
442
|
+
|
|
443
|
+
except Exception as e:
|
|
444
|
+
errors += 1
|
|
445
|
+
if not quiet:
|
|
446
|
+
click.echo(f" ❌ {file_path.name}: {e!s}")
|
|
447
|
+
|
|
448
|
+
if not quiet:
|
|
449
|
+
click.echo("")
|
|
450
|
+
click.echo(f"✅ Indexed {total_chunks} chunks from {len(files_to_index) - errors} files")
|
|
451
|
+
if errors:
|
|
452
|
+
click.echo(f"⚠️ {errors} file(s) failed")
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
async def _index_chunks_sync(
|
|
456
|
+
chunks: list[Any],
|
|
457
|
+
project_id: str,
|
|
458
|
+
doc_type: DocumentType,
|
|
459
|
+
embedder: Any,
|
|
460
|
+
database: NexusDatabase,
|
|
461
|
+
metadata: dict[str, Any] | None = None,
|
|
462
|
+
) -> int:
|
|
463
|
+
"""Index chunks synchronously."""
|
|
464
|
+
if not chunks:
|
|
465
|
+
return 0
|
|
466
|
+
|
|
467
|
+
texts = [chunk.get_searchable_text() for chunk in chunks]
|
|
468
|
+
embeddings = await embedder.embed_batch(texts)
|
|
469
|
+
|
|
470
|
+
documents = []
|
|
471
|
+
for chunk, embedding in zip(chunks, embeddings, strict=True):
|
|
472
|
+
doc_id = generate_document_id(
|
|
473
|
+
project_id,
|
|
474
|
+
chunk.file_path,
|
|
475
|
+
chunk.name,
|
|
476
|
+
chunk.start_line,
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
# Prepare document kwargs
|
|
480
|
+
doc_kwargs = {
|
|
481
|
+
"id": doc_id,
|
|
482
|
+
"text": chunk.get_searchable_text(),
|
|
483
|
+
"vector": embedding,
|
|
484
|
+
"project_id": project_id,
|
|
485
|
+
"file_path": chunk.file_path,
|
|
486
|
+
"doc_type": doc_type,
|
|
487
|
+
"chunk_type": chunk.chunk_type.value,
|
|
488
|
+
"language": chunk.language,
|
|
489
|
+
"name": chunk.name,
|
|
490
|
+
"start_line": chunk.start_line,
|
|
491
|
+
"end_line": chunk.end_line,
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
# Add metadata if present
|
|
495
|
+
if metadata and "timestamp" in metadata:
|
|
496
|
+
try:
|
|
497
|
+
# Handle ISO format from export
|
|
498
|
+
if isinstance(metadata["timestamp"], str):
|
|
499
|
+
doc_kwargs["timestamp"] = datetime.fromisoformat(metadata["timestamp"])
|
|
500
|
+
elif isinstance(metadata["timestamp"], datetime):
|
|
501
|
+
doc_kwargs["timestamp"] = metadata["timestamp"]
|
|
502
|
+
except Exception:
|
|
503
|
+
# Fallback to current time if parse fails
|
|
504
|
+
pass
|
|
505
|
+
|
|
506
|
+
doc = Document(**doc_kwargs)
|
|
507
|
+
documents.append(doc)
|
|
508
|
+
|
|
509
|
+
await database.upsert_documents(documents)
|
|
510
|
+
return len(documents)
|
|
511
|
+
|
|
512
|
+
|
|
513
|
+
def _should_index(file_path: Path, config: NexusConfig) -> bool:
|
|
514
|
+
"""Check if file should be indexed based on config patterns."""
|
|
515
|
+
rel_path = str(file_path.relative_to(Path.cwd()))
|
|
516
|
+
|
|
517
|
+
# Check exclude patterns
|
|
518
|
+
for pattern in config.exclude_patterns:
|
|
519
|
+
if fnmatch(rel_path, pattern):
|
|
520
|
+
return False
|
|
521
|
+
|
|
522
|
+
# Also check without leading **/ if present (for root matches)
|
|
523
|
+
if pattern.startswith("**/") and fnmatch(rel_path, pattern[3:]):
|
|
524
|
+
return False
|
|
525
|
+
|
|
526
|
+
# Check include patterns
|
|
527
|
+
for pattern in config.include_patterns:
|
|
528
|
+
if fnmatch(rel_path, pattern):
|
|
529
|
+
return True
|
|
530
|
+
|
|
531
|
+
# Also include docs folders
|
|
532
|
+
for docs_folder in config.docs_folders:
|
|
533
|
+
if rel_path.startswith(docs_folder) or rel_path == docs_folder.rstrip("/"):
|
|
534
|
+
return True
|
|
535
|
+
|
|
536
|
+
return False
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
def _is_excluded(file_path: Path, config: NexusConfig) -> bool:
|
|
540
|
+
"""Check if file is explicitly excluded by config patterns."""
|
|
541
|
+
rel_path = str(file_path.relative_to(Path.cwd()))
|
|
542
|
+
|
|
543
|
+
# Check exclude patterns
|
|
544
|
+
for pattern in config.exclude_patterns:
|
|
545
|
+
if fnmatch(rel_path, pattern):
|
|
546
|
+
return True
|
|
547
|
+
|
|
548
|
+
# Also check without leading **/ if present (for root matches)
|
|
549
|
+
if pattern.startswith("**/") and fnmatch(rel_path, pattern[3:]):
|
|
550
|
+
return True
|
|
551
|
+
|
|
552
|
+
return False
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
def _detect_document_type_and_metadata(
|
|
556
|
+
content: str,
|
|
557
|
+
) -> tuple[DocumentType | None, dict[str, Any]]:
|
|
558
|
+
"""Detect document type and metadata from frontmatter."""
|
|
559
|
+
if not content.startswith("---\n"):
|
|
560
|
+
return None, {}
|
|
561
|
+
|
|
562
|
+
try:
|
|
563
|
+
# Extract frontmatter
|
|
564
|
+
_, frontmatter, _ = content.split("---", 2)
|
|
565
|
+
data = yaml.safe_load(frontmatter)
|
|
566
|
+
|
|
567
|
+
if not isinstance(data, dict):
|
|
568
|
+
return None, {}
|
|
569
|
+
|
|
570
|
+
# Detect type based on keys/values
|
|
571
|
+
if data.get("category") in ["discovery", "mistake", "backtrack", "optimization"]:
|
|
572
|
+
return DocumentType.INSIGHT, data
|
|
573
|
+
|
|
574
|
+
if "problem" in data and "solution" in data:
|
|
575
|
+
return DocumentType.LESSON, data
|
|
576
|
+
|
|
577
|
+
if (
|
|
578
|
+
"summary" in data
|
|
579
|
+
and "approach" in data
|
|
580
|
+
and ("files_changed" in data or "design_decisions" in data)
|
|
581
|
+
):
|
|
582
|
+
return DocumentType.IMPLEMENTATION, data
|
|
583
|
+
|
|
584
|
+
if data.get("type") == "github_issue":
|
|
585
|
+
return DocumentType.GITHUB_ISSUE, data
|
|
586
|
+
|
|
587
|
+
if data.get("type") == "github_pr":
|
|
588
|
+
return DocumentType.GITHUB_PR, data
|
|
589
|
+
|
|
590
|
+
return None, data
|
|
591
|
+
|
|
592
|
+
except Exception:
|
|
593
|
+
return None, {}
|
|
594
|
+
|
|
595
|
+
|
|
596
|
+
def _print_file_summary(files: list[Path]) -> None:
|
|
597
|
+
"""Print a summary of files to be indexed."""
|
|
598
|
+
if not files:
|
|
599
|
+
click.echo("No files to index.")
|
|
600
|
+
return
|
|
601
|
+
|
|
602
|
+
# Group by directory
|
|
603
|
+
by_dir: dict[str, int] = defaultdict(int)
|
|
604
|
+
for f in files:
|
|
605
|
+
parent = str(f.parent.relative_to(Path.cwd()) if f.is_absolute() else f.parent)
|
|
606
|
+
if parent == ".":
|
|
607
|
+
parent = "Root"
|
|
608
|
+
by_dir[parent] += 1
|
|
609
|
+
|
|
610
|
+
click.echo(f" Found {len(files)} files to index:")
|
|
611
|
+
click.echo("")
|
|
612
|
+
|
|
613
|
+
# Sort by directory name
|
|
614
|
+
for directory, count in sorted(by_dir.items()):
|
|
615
|
+
click.echo(f" 📁 {directory:<40} {count} files")
|
|
616
|
+
|
|
617
|
+
click.echo("")
|
|
618
|
+
|
|
619
|
+
|
|
620
|
+
@cli.command("index-lesson")
|
|
621
|
+
@click.argument("lesson_file")
|
|
622
|
+
@click.option("-q", "--quiet", is_flag=True, help="Suppress output")
|
|
623
|
+
def index_lesson_command(lesson_file: str, quiet: bool) -> None:
|
|
624
|
+
"""Index a lesson file from .nexus/lessons/."""
|
|
625
|
+
path = Path(lesson_file)
|
|
626
|
+
if not path.is_absolute():
|
|
627
|
+
path = Path.cwd() / path
|
|
628
|
+
|
|
629
|
+
if not path.exists():
|
|
630
|
+
if not quiet:
|
|
631
|
+
click.echo(f"❌ Lesson file not found: {lesson_file}", err=True)
|
|
632
|
+
return
|
|
633
|
+
|
|
634
|
+
# Load config
|
|
635
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
636
|
+
if not config_path.exists():
|
|
637
|
+
click.echo("❌ nexus_config.json not found. Run 'nexus-init' first.", err=True)
|
|
638
|
+
return
|
|
639
|
+
|
|
640
|
+
config = NexusConfig.load(config_path)
|
|
641
|
+
embedder = create_embedder(config)
|
|
642
|
+
database = NexusDatabase(config, embedder)
|
|
643
|
+
database.connect()
|
|
644
|
+
|
|
645
|
+
try:
|
|
646
|
+
content = path.read_text(encoding="utf-8")
|
|
647
|
+
|
|
648
|
+
# Generate embedding
|
|
649
|
+
embedding = _run_async(embedder.embed(content))
|
|
650
|
+
|
|
651
|
+
# Create document
|
|
652
|
+
doc_id = generate_document_id(
|
|
653
|
+
config.project_id,
|
|
654
|
+
str(path),
|
|
655
|
+
path.stem,
|
|
656
|
+
0,
|
|
657
|
+
)
|
|
658
|
+
|
|
659
|
+
doc = Document(
|
|
660
|
+
id=doc_id,
|
|
661
|
+
text=content,
|
|
662
|
+
vector=embedding,
|
|
663
|
+
project_id=config.project_id,
|
|
664
|
+
file_path=str(path),
|
|
665
|
+
doc_type=DocumentType.LESSON,
|
|
666
|
+
chunk_type="lesson",
|
|
667
|
+
language="markdown",
|
|
668
|
+
name=path.stem,
|
|
669
|
+
start_line=0,
|
|
670
|
+
end_line=0,
|
|
671
|
+
)
|
|
672
|
+
|
|
673
|
+
_run_async(database.upsert_document(doc))
|
|
674
|
+
|
|
675
|
+
if not quiet:
|
|
676
|
+
click.echo(f"✅ Indexed lesson: {path.name}")
|
|
677
|
+
|
|
678
|
+
except Exception as e:
|
|
679
|
+
if not quiet:
|
|
680
|
+
click.echo(f"❌ Failed to index lesson: {e!s}", err=True)
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
@cli.command("export")
|
|
684
|
+
@click.option("--project-id", help="Project ID to export (defaults to current config)")
|
|
685
|
+
@click.option(
|
|
686
|
+
"--output",
|
|
687
|
+
"-o",
|
|
688
|
+
type=click.Path(path_type=Path),
|
|
689
|
+
help="Output directory (default: ./nexus-export)",
|
|
690
|
+
)
|
|
691
|
+
def export_command(project_id: str | None, output: Path | None) -> None:
|
|
692
|
+
"""Export project knowledge to markdown files."""
|
|
693
|
+
from .config import NexusConfig
|
|
694
|
+
from .database import DocumentType, NexusDatabase
|
|
695
|
+
from .embeddings import create_embedder
|
|
696
|
+
|
|
697
|
+
async def _export() -> None:
|
|
698
|
+
# Load config
|
|
699
|
+
config = None
|
|
700
|
+
try:
|
|
701
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
702
|
+
if config_path.exists():
|
|
703
|
+
config = NexusConfig.load(config_path)
|
|
704
|
+
except Exception:
|
|
705
|
+
pass
|
|
706
|
+
|
|
707
|
+
effective_project_id = project_id
|
|
708
|
+
if not effective_project_id and config:
|
|
709
|
+
effective_project_id = config.project_id
|
|
710
|
+
|
|
711
|
+
if not effective_project_id:
|
|
712
|
+
click.secho("Error: No project-id provided and no nexus_config.json found.", fg="red")
|
|
713
|
+
return
|
|
714
|
+
|
|
715
|
+
# Initialize DB
|
|
716
|
+
if not config:
|
|
717
|
+
# Create temporary config for DB access
|
|
718
|
+
config = NexusConfig.create_new("temp")
|
|
719
|
+
|
|
720
|
+
try:
|
|
721
|
+
embedder = create_embedder(config)
|
|
722
|
+
db = NexusDatabase(config, embedder)
|
|
723
|
+
db.connect()
|
|
724
|
+
|
|
725
|
+
click.echo(f"Exporting knowledge for project: {effective_project_id}")
|
|
726
|
+
|
|
727
|
+
# Get all documents for this project
|
|
728
|
+
# searching with empty query returns all items for project/type
|
|
729
|
+
# We fetch strictly structured data types: Lesson, Insight, Implementation
|
|
730
|
+
types_to_export = [
|
|
731
|
+
(DocumentType.LESSON, "lessons"),
|
|
732
|
+
(DocumentType.INSIGHT, "insights"),
|
|
733
|
+
(DocumentType.IMPLEMENTATION, "implementations"),
|
|
734
|
+
]
|
|
735
|
+
|
|
736
|
+
base_dir = output or Path.cwd() / "nexus-export"
|
|
737
|
+
base_dir.mkdir(parents=True, exist_ok=True)
|
|
738
|
+
|
|
739
|
+
total_count = 0
|
|
740
|
+
|
|
741
|
+
for doc_type, dirname in types_to_export:
|
|
742
|
+
# We use a hack: search for " " (space) which usually matches everything
|
|
743
|
+
# or rely on search implementation to support wildcards.
|
|
744
|
+
# Since vector search always returns something, we use a high limit
|
|
745
|
+
results = await db.search(
|
|
746
|
+
query="*", # Some vector DBs verify query length
|
|
747
|
+
project_id=effective_project_id,
|
|
748
|
+
doc_type=doc_type,
|
|
749
|
+
limit=1000,
|
|
750
|
+
)
|
|
751
|
+
|
|
752
|
+
if not results:
|
|
753
|
+
continue
|
|
754
|
+
|
|
755
|
+
type_dir = base_dir / dirname
|
|
756
|
+
type_dir.mkdir(exist_ok=True)
|
|
757
|
+
|
|
758
|
+
click.echo(f" - Found {len(results)} {dirname}")
|
|
759
|
+
|
|
760
|
+
for res in results:
|
|
761
|
+
# Use ID from metadata if available, else generate safe name
|
|
762
|
+
safe_name = "".join(c for c in res.name if c.isalnum() or c in "-_")
|
|
763
|
+
filename = f"{safe_name}.md"
|
|
764
|
+
|
|
765
|
+
file_path = type_dir / filename
|
|
766
|
+
file_path.write_text(res.text, encoding="utf-8")
|
|
767
|
+
total_count += 1
|
|
768
|
+
|
|
769
|
+
click.secho(f"\nSuccessfully exported {total_count} files to {base_dir}", fg="green")
|
|
770
|
+
|
|
771
|
+
except Exception as e:
|
|
772
|
+
click.secho(f"Export failed: {e}", fg="red")
|
|
773
|
+
|
|
774
|
+
_run_async(_export())
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
@cli.command("status")
|
|
778
|
+
def status_command() -> None:
|
|
779
|
+
"""Show Nexus-Dev status and statistics."""
|
|
780
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
781
|
+
|
|
782
|
+
if not config_path.exists():
|
|
783
|
+
click.echo("❌ Nexus-Dev not initialized in this directory.")
|
|
784
|
+
click.echo(" Run 'nexus-init' to get started.")
|
|
785
|
+
return
|
|
786
|
+
|
|
787
|
+
config = NexusConfig.load(config_path)
|
|
788
|
+
|
|
789
|
+
click.echo("📊 Nexus-Dev Status")
|
|
790
|
+
click.echo("")
|
|
791
|
+
click.echo(f"Project: {config.project_name}")
|
|
792
|
+
click.echo(f"Project ID: {config.project_id}")
|
|
793
|
+
click.echo(f"Embedding Provider: {config.embedding_provider}")
|
|
794
|
+
click.echo(f"Embedding Model: {config.embedding_model}")
|
|
795
|
+
click.echo(f"Database: {config.get_db_path()}")
|
|
796
|
+
click.echo("")
|
|
797
|
+
|
|
798
|
+
try:
|
|
799
|
+
embedder = create_embedder(config)
|
|
800
|
+
database = NexusDatabase(config, embedder)
|
|
801
|
+
database.connect()
|
|
802
|
+
|
|
803
|
+
stats = _run_async(database.get_project_stats(config.project_id))
|
|
804
|
+
|
|
805
|
+
click.echo("📈 Statistics:")
|
|
806
|
+
click.echo(f" Total chunks: {stats.get('total', 0)}")
|
|
807
|
+
click.echo(f" Code: {stats.get('code', 0)}")
|
|
808
|
+
click.echo(f" Documentation: {stats.get('documentation', 0)}")
|
|
809
|
+
click.echo(f" Lessons: {stats.get('lesson', 0)}")
|
|
810
|
+
|
|
811
|
+
except Exception as e:
|
|
812
|
+
click.echo(f"⚠️ Could not connect to database: {e!s}")
|
|
813
|
+
|
|
814
|
+
|
|
815
|
+
@cli.command("reindex")
|
|
816
|
+
def reindex_command() -> None:
|
|
817
|
+
"""Re-index entire project (clear and rebuild)."""
|
|
818
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
819
|
+
|
|
820
|
+
if not config_path.exists():
|
|
821
|
+
click.echo("❌ nexus_config.json not found. Run 'nexus-init' first.", err=True)
|
|
822
|
+
return
|
|
823
|
+
|
|
824
|
+
config = NexusConfig.load(config_path)
|
|
825
|
+
|
|
826
|
+
# Collect files first to show summary
|
|
827
|
+
click.echo("🔍 Scanning files...")
|
|
828
|
+
|
|
829
|
+
cwd = Path.cwd()
|
|
830
|
+
files_to_index: list[Path] = []
|
|
831
|
+
|
|
832
|
+
for pattern in config.include_patterns:
|
|
833
|
+
for file_path in cwd.glob(pattern):
|
|
834
|
+
if file_path.is_file() and _should_index(file_path, config):
|
|
835
|
+
files_to_index.append(file_path)
|
|
836
|
+
|
|
837
|
+
# Also index docs folders
|
|
838
|
+
for docs_folder in config.docs_folders:
|
|
839
|
+
docs_path = cwd / docs_folder
|
|
840
|
+
if docs_path.is_file():
|
|
841
|
+
files_to_index.append(docs_path)
|
|
842
|
+
elif docs_path.is_dir():
|
|
843
|
+
for root, _, files in os.walk(docs_path):
|
|
844
|
+
# Apply same pruning logic for docs if needed, though usually docs are safer
|
|
845
|
+
# For consistency let's just collect files
|
|
846
|
+
for file in files:
|
|
847
|
+
files_to_index.append(Path(root) / file)
|
|
848
|
+
|
|
849
|
+
# Remove duplicates
|
|
850
|
+
files_to_index = list(set(files_to_index))
|
|
851
|
+
|
|
852
|
+
# Show summary and ask for confirmation
|
|
853
|
+
_print_file_summary(files_to_index)
|
|
854
|
+
|
|
855
|
+
if not click.confirm("This will CLEAR the database and re-index the above files. Continue?"):
|
|
856
|
+
click.echo("Aborted.")
|
|
857
|
+
return
|
|
858
|
+
|
|
859
|
+
# Proceed with DB operations
|
|
860
|
+
embedder = create_embedder(config)
|
|
861
|
+
database = NexusDatabase(config, embedder)
|
|
862
|
+
database.connect()
|
|
863
|
+
|
|
864
|
+
click.echo("🗑️ Clearing existing index...")
|
|
865
|
+
# Reset database to handle schema changes
|
|
866
|
+
database.reset()
|
|
867
|
+
# Re-connect to create new table with updated schema
|
|
868
|
+
database.connect()
|
|
869
|
+
click.echo(" Index cleared and schema updated")
|
|
870
|
+
|
|
871
|
+
click.echo("")
|
|
872
|
+
click.echo("📁 Re-indexing project...")
|
|
873
|
+
|
|
874
|
+
# Index all files
|
|
875
|
+
total_chunks = 0
|
|
876
|
+
for file_path in files_to_index:
|
|
877
|
+
try:
|
|
878
|
+
content = file_path.read_text(encoding="utf-8")
|
|
879
|
+
ext = file_path.suffix.lower()
|
|
880
|
+
doc_type = (
|
|
881
|
+
DocumentType.DOCUMENTATION
|
|
882
|
+
if ext in (".md", ".markdown", ".rst", ".txt")
|
|
883
|
+
else DocumentType.CODE
|
|
884
|
+
)
|
|
885
|
+
|
|
886
|
+
chunks = ChunkerRegistry.chunk_file(file_path, content)
|
|
887
|
+
if chunks:
|
|
888
|
+
count = _run_async(
|
|
889
|
+
_index_chunks_sync(chunks, config.project_id, doc_type, embedder, database)
|
|
890
|
+
)
|
|
891
|
+
total_chunks += count
|
|
892
|
+
click.echo(f" ✅ {file_path.name}: {count} chunks")
|
|
893
|
+
|
|
894
|
+
except Exception as e:
|
|
895
|
+
click.echo(f" ❌ Failed to index {file_path.name}: {e!s}", err=True)
|
|
896
|
+
|
|
897
|
+
click.echo("")
|
|
898
|
+
click.echo(f"✅ Re-indexed {total_chunks} chunks from {len(files_to_index)} files")
|
|
899
|
+
|
|
900
|
+
|
|
901
|
+
@cli.command("import-github")
|
|
902
|
+
@click.option("--repo", required=True, help="Repository name")
|
|
903
|
+
@click.option("--owner", required=True, help="Repository owner")
|
|
904
|
+
@click.option("--limit", default=20, help="Maximum number of issues to import")
|
|
905
|
+
@click.option("--state", default="all", help="Issue state (open, closed, all)")
|
|
906
|
+
def import_github_command(repo: str, owner: str, limit: int, state: str) -> None:
|
|
907
|
+
"""Import GitHub issues and PRs."""
|
|
908
|
+
# Load config
|
|
909
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
910
|
+
if not config_path.exists():
|
|
911
|
+
click.echo("❌ nexus_config.json not found. Run 'nexus-init' first.", err=True)
|
|
912
|
+
return
|
|
913
|
+
|
|
914
|
+
config = NexusConfig.load(config_path)
|
|
915
|
+
embedder = create_embedder(config)
|
|
916
|
+
database = NexusDatabase(config, embedder)
|
|
917
|
+
database.connect()
|
|
918
|
+
|
|
919
|
+
database.connect()
|
|
920
|
+
|
|
921
|
+
# Load MCP config handled by manager inside generally, but here we can rely on standard init
|
|
922
|
+
client_manager = MCPClientManager()
|
|
923
|
+
|
|
924
|
+
# Load MCP config
|
|
925
|
+
mcp_config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
926
|
+
mcp_config = None
|
|
927
|
+
if mcp_config_path.exists():
|
|
928
|
+
try:
|
|
929
|
+
mcp_config = MCPConfig.load(mcp_config_path)
|
|
930
|
+
except Exception as e:
|
|
931
|
+
click.echo(f"⚠️ Failed to load MCP config: {e}", err=True)
|
|
932
|
+
|
|
933
|
+
if not mcp_config:
|
|
934
|
+
click.echo("⚠️ No MCP config found. GitHub import may fail if server not found.")
|
|
935
|
+
|
|
936
|
+
importer = GitHubImporter(database, config.project_id, client_manager, mcp_config)
|
|
937
|
+
|
|
938
|
+
click.echo(f"📥 Importing issues from {owner}/{repo}...")
|
|
939
|
+
|
|
940
|
+
try:
|
|
941
|
+
count = _run_async(importer.import_issues(owner, repo, limit, state))
|
|
942
|
+
click.echo(f"✅ Imported {count} issues/PRs")
|
|
943
|
+
except Exception as e:
|
|
944
|
+
click.echo(f"❌ Import failed: {e}", err=True)
|
|
945
|
+
|
|
946
|
+
|
|
947
|
+
@cli.command("search")
|
|
948
|
+
@click.argument("query")
|
|
949
|
+
@click.option("--type", "content_type", help="Content type to filter by")
|
|
950
|
+
@click.option("--limit", default=5, help="Number of results")
|
|
951
|
+
def search_command(query: str, content_type: str | None, limit: int) -> None:
|
|
952
|
+
"""Search the knowledge base."""
|
|
953
|
+
# Load config
|
|
954
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
955
|
+
if not config_path.exists():
|
|
956
|
+
click.echo("❌ nexus_config.json not found. Run 'nexus-init' first.", err=True)
|
|
957
|
+
return
|
|
958
|
+
|
|
959
|
+
config = NexusConfig.load(config_path)
|
|
960
|
+
embedder = create_embedder(config)
|
|
961
|
+
database = NexusDatabase(config, embedder)
|
|
962
|
+
database.connect()
|
|
963
|
+
|
|
964
|
+
click.echo(f"🔍 Searching for '{query}'...")
|
|
965
|
+
|
|
966
|
+
doc_type_enum = None
|
|
967
|
+
if content_type:
|
|
968
|
+
try:
|
|
969
|
+
doc_type_enum = DocumentType(content_type)
|
|
970
|
+
except ValueError:
|
|
971
|
+
click.echo(f"⚠️ Invalid type '{content_type}'. Ignoring filter.")
|
|
972
|
+
|
|
973
|
+
results = _run_async(database.search(query, limit=limit, doc_type=doc_type_enum))
|
|
974
|
+
|
|
975
|
+
if not results:
|
|
976
|
+
click.echo("No results found.")
|
|
977
|
+
return
|
|
978
|
+
|
|
979
|
+
click.echo(f"\nFound {len(results)} results:\n")
|
|
980
|
+
|
|
981
|
+
for i, doc in enumerate(results, 1):
|
|
982
|
+
click.echo(f"{i}. [{doc.doc_type.upper()}] {doc.name} (Score: {doc.score:.3f})")
|
|
983
|
+
click.echo(f" path: {doc.file_path}")
|
|
984
|
+
# Preview text
|
|
985
|
+
text = doc.text.replace("\n", " ").strip()
|
|
986
|
+
if len(text) > 100:
|
|
987
|
+
text = text[:97] + "..."
|
|
988
|
+
click.echo(f' "{text}"')
|
|
989
|
+
click.echo("")
|
|
990
|
+
|
|
991
|
+
|
|
992
|
+
@cli.command("index-mcp")
|
|
993
|
+
@click.option("--server", "-s", help="Server name to index (from MCP config)")
|
|
994
|
+
@click.option(
|
|
995
|
+
"--config",
|
|
996
|
+
"-c",
|
|
997
|
+
type=click.Path(exists=True),
|
|
998
|
+
help="Path to MCP config file (default: ~/.config/mcp/config.json)",
|
|
999
|
+
)
|
|
1000
|
+
@click.option("--all", "-a", "index_all", is_flag=True, help="Index all configured servers")
|
|
1001
|
+
def index_mcp_command(server: str | None, config: str | None, index_all: bool) -> None:
|
|
1002
|
+
"""Index MCP tool documentation into the knowledge base.
|
|
1003
|
+
|
|
1004
|
+
This command reads tool schemas from MCP servers and indexes them
|
|
1005
|
+
for semantic search via the search_tools command.
|
|
1006
|
+
|
|
1007
|
+
Examples:
|
|
1008
|
+
nexus-index-mcp --server github
|
|
1009
|
+
nexus-index-mcp --all
|
|
1010
|
+
nexus-index-mcp --config ~/my-mcp-config.json --all
|
|
1011
|
+
"""
|
|
1012
|
+
# Load MCP config
|
|
1013
|
+
mcp_config_data: dict[str, Any] | MCPConfig
|
|
1014
|
+
if config:
|
|
1015
|
+
config_path = Path(config)
|
|
1016
|
+
else:
|
|
1017
|
+
# Prioritize local project config
|
|
1018
|
+
local_config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1019
|
+
if local_config_path.exists():
|
|
1020
|
+
config_path = local_config_path
|
|
1021
|
+
else:
|
|
1022
|
+
config_path = Path.home() / ".config" / "mcp" / "config.json"
|
|
1023
|
+
|
|
1024
|
+
if not config_path.exists():
|
|
1025
|
+
click.echo(f"MCP config not found: {config_path}")
|
|
1026
|
+
click.echo("Specify --config or create ~/.config/mcp/config.json")
|
|
1027
|
+
return
|
|
1028
|
+
|
|
1029
|
+
try:
|
|
1030
|
+
if config_path.name == "mcp_config.json" and config_path.parent.name == ".nexus":
|
|
1031
|
+
# Project-specific config
|
|
1032
|
+
mcp_config_data = MCPConfig.load(config_path)
|
|
1033
|
+
else:
|
|
1034
|
+
# Global config (or custom dict-based config)
|
|
1035
|
+
mcp_config_data = json.loads(config_path.read_text())
|
|
1036
|
+
except json.JSONDecodeError as e:
|
|
1037
|
+
click.echo(f"❌ Invalid JSON in MCP config: {e}", err=True)
|
|
1038
|
+
return
|
|
1039
|
+
except Exception as e:
|
|
1040
|
+
click.echo(f"❌ Failed to load MCP config: {e}", err=True)
|
|
1041
|
+
return
|
|
1042
|
+
|
|
1043
|
+
# Determine which servers to index
|
|
1044
|
+
servers_to_index = []
|
|
1045
|
+
if isinstance(mcp_config_data, MCPConfig):
|
|
1046
|
+
all_servers = list(mcp_config_data.servers.keys())
|
|
1047
|
+
else:
|
|
1048
|
+
all_servers = list(mcp_config_data.get("mcpServers", {}).keys())
|
|
1049
|
+
|
|
1050
|
+
if index_all:
|
|
1051
|
+
servers_to_index = all_servers
|
|
1052
|
+
elif server:
|
|
1053
|
+
servers_to_index = [server]
|
|
1054
|
+
else:
|
|
1055
|
+
click.echo("Specify --server or --all")
|
|
1056
|
+
return
|
|
1057
|
+
|
|
1058
|
+
# Index each server
|
|
1059
|
+
asyncio.run(_index_mcp_servers(mcp_config_data, servers_to_index))
|
|
1060
|
+
|
|
1061
|
+
|
|
1062
|
+
async def _index_mcp_servers(
|
|
1063
|
+
mcp_config: dict[str, Any] | MCPConfig, server_names: list[str]
|
|
1064
|
+
) -> None:
|
|
1065
|
+
"""Index tools from specified MCP servers."""
|
|
1066
|
+
# Load config
|
|
1067
|
+
config_path = Path.cwd() / "nexus_config.json"
|
|
1068
|
+
if not config_path.exists():
|
|
1069
|
+
click.echo("❌ nexus_config.json not found. Run 'nexus-init' first.", err=True)
|
|
1070
|
+
return
|
|
1071
|
+
|
|
1072
|
+
config = NexusConfig.load(config_path)
|
|
1073
|
+
client = MCPClientManager()
|
|
1074
|
+
embedder = create_embedder(config)
|
|
1075
|
+
database = NexusDatabase(config, embedder)
|
|
1076
|
+
database.connect()
|
|
1077
|
+
|
|
1078
|
+
for name in server_names:
|
|
1079
|
+
if isinstance(mcp_config, MCPConfig):
|
|
1080
|
+
server_config = mcp_config.servers.get(name)
|
|
1081
|
+
if not server_config:
|
|
1082
|
+
click.echo(f"Server not found: {name}")
|
|
1083
|
+
continue
|
|
1084
|
+
# Convert to internal connection format
|
|
1085
|
+
connection = MCPServerConnection(
|
|
1086
|
+
name=name,
|
|
1087
|
+
command=server_config.command or "",
|
|
1088
|
+
args=server_config.args,
|
|
1089
|
+
env=server_config.env,
|
|
1090
|
+
transport=server_config.transport,
|
|
1091
|
+
url=server_config.url,
|
|
1092
|
+
headers=server_config.headers,
|
|
1093
|
+
timeout=server_config.timeout,
|
|
1094
|
+
)
|
|
1095
|
+
else:
|
|
1096
|
+
server_dict = mcp_config.get("mcpServers", {}).get(name)
|
|
1097
|
+
if not server_dict:
|
|
1098
|
+
click.echo(f"Server not found: {name}")
|
|
1099
|
+
continue
|
|
1100
|
+
connection = MCPServerConnection(
|
|
1101
|
+
name=name,
|
|
1102
|
+
command=server_dict.get("command", ""),
|
|
1103
|
+
args=server_dict.get("args", []),
|
|
1104
|
+
env=server_dict.get("env"),
|
|
1105
|
+
transport=server_dict.get("transport", "stdio"),
|
|
1106
|
+
url=server_dict.get("url"),
|
|
1107
|
+
headers=server_dict.get("headers"),
|
|
1108
|
+
timeout=server_dict.get("timeout", 30.0),
|
|
1109
|
+
)
|
|
1110
|
+
|
|
1111
|
+
# Connect and index
|
|
1112
|
+
|
|
1113
|
+
click.echo(f"Indexing tools from: {name}")
|
|
1114
|
+
|
|
1115
|
+
try:
|
|
1116
|
+
tools = await client.get_tools(connection)
|
|
1117
|
+
click.echo(f" Found {len(tools)} tools")
|
|
1118
|
+
|
|
1119
|
+
# Create documents and index
|
|
1120
|
+
for tool in tools:
|
|
1121
|
+
text = f"{name}.{tool.name}: {tool.description}"
|
|
1122
|
+
vector = await embedder.embed(text)
|
|
1123
|
+
|
|
1124
|
+
doc = Document(
|
|
1125
|
+
id=f"{name}:{tool.name}",
|
|
1126
|
+
text=text,
|
|
1127
|
+
vector=vector,
|
|
1128
|
+
project_id=f"{config.project_id}_mcp_tools",
|
|
1129
|
+
file_path=f"mcp://{name}/{tool.name}",
|
|
1130
|
+
doc_type=DocumentType.TOOL,
|
|
1131
|
+
chunk_type="tool",
|
|
1132
|
+
language="mcp",
|
|
1133
|
+
name=tool.name,
|
|
1134
|
+
start_line=0,
|
|
1135
|
+
end_line=0,
|
|
1136
|
+
server_name=name,
|
|
1137
|
+
parameters_schema=json.dumps(tool.input_schema),
|
|
1138
|
+
)
|
|
1139
|
+
|
|
1140
|
+
await database.upsert_document(doc)
|
|
1141
|
+
|
|
1142
|
+
click.echo(f" ✅ Indexed {len(tools)} tools from {name}")
|
|
1143
|
+
|
|
1144
|
+
except Exception as e:
|
|
1145
|
+
# Handle ExceptionGroup from anyio/TaskGroup
|
|
1146
|
+
if hasattr(e, "exceptions"):
|
|
1147
|
+
for sub_e in e.exceptions:
|
|
1148
|
+
click.echo(f" ❌ Failed to index {name}: {sub_e}")
|
|
1149
|
+
else:
|
|
1150
|
+
click.echo(f" ❌ Failed to index {name}: {e}")
|
|
1151
|
+
|
|
1152
|
+
click.echo("Done!")
|
|
1153
|
+
|
|
1154
|
+
|
|
1155
|
+
@cli.group("mcp")
|
|
1156
|
+
def mcp_group() -> None:
|
|
1157
|
+
"""Manage MCP server configurations."""
|
|
1158
|
+
|
|
1159
|
+
|
|
1160
|
+
@mcp_group.command("init")
|
|
1161
|
+
@click.option(
|
|
1162
|
+
"--from-global",
|
|
1163
|
+
is_flag=True,
|
|
1164
|
+
help="Import servers from ~/.config/mcp/config.json",
|
|
1165
|
+
)
|
|
1166
|
+
def mcp_init_command(from_global: bool) -> None:
|
|
1167
|
+
"""Initialize MCP configuration for this project.
|
|
1168
|
+
|
|
1169
|
+
Creates .nexus/mcp_config.json with an empty configuration
|
|
1170
|
+
or imports from your global MCP config.
|
|
1171
|
+
|
|
1172
|
+
Examples:
|
|
1173
|
+
nexus-mcp init
|
|
1174
|
+
nexus-mcp init --from-global
|
|
1175
|
+
"""
|
|
1176
|
+
config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1177
|
+
|
|
1178
|
+
if config_path.exists() and not click.confirm("MCP config exists. Overwrite?"):
|
|
1179
|
+
click.echo("Aborted.")
|
|
1180
|
+
return
|
|
1181
|
+
|
|
1182
|
+
# Ensure .nexus directory exists
|
|
1183
|
+
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1184
|
+
|
|
1185
|
+
if from_global:
|
|
1186
|
+
# Import from global config
|
|
1187
|
+
global_path = Path.home() / ".config" / "mcp" / "config.json"
|
|
1188
|
+
if not global_path.exists():
|
|
1189
|
+
click.echo(f"Global config not found: {global_path}")
|
|
1190
|
+
return
|
|
1191
|
+
|
|
1192
|
+
try:
|
|
1193
|
+
global_config = json.loads(global_path.read_text())
|
|
1194
|
+
except json.JSONDecodeError as e:
|
|
1195
|
+
click.echo(f"❌ Invalid JSON in global config: {e}")
|
|
1196
|
+
return
|
|
1197
|
+
|
|
1198
|
+
servers = {}
|
|
1199
|
+
|
|
1200
|
+
for name, cfg in global_config.get("mcpServers", {}).items():
|
|
1201
|
+
servers[name] = MCPServerConfig(
|
|
1202
|
+
command=cfg.get("command", ""),
|
|
1203
|
+
args=cfg.get("args", []),
|
|
1204
|
+
env=cfg.get("env", {}),
|
|
1205
|
+
enabled=True,
|
|
1206
|
+
)
|
|
1207
|
+
|
|
1208
|
+
mcp_config = MCPConfig(
|
|
1209
|
+
version="1.0",
|
|
1210
|
+
servers=servers,
|
|
1211
|
+
profiles={},
|
|
1212
|
+
)
|
|
1213
|
+
click.echo(f"Imported {len(servers)} servers from global config")
|
|
1214
|
+
else:
|
|
1215
|
+
# Create empty config
|
|
1216
|
+
mcp_config = MCPConfig(
|
|
1217
|
+
version="1.0",
|
|
1218
|
+
servers={},
|
|
1219
|
+
profiles={"default": []},
|
|
1220
|
+
)
|
|
1221
|
+
|
|
1222
|
+
mcp_config.save(config_path)
|
|
1223
|
+
click.echo(f"✅ Created {config_path}")
|
|
1224
|
+
click.echo("")
|
|
1225
|
+
click.echo("Configuration initialized successfully!")
|
|
1226
|
+
click.echo("You can manually edit the config file to add MCP servers.")
|
|
1227
|
+
|
|
1228
|
+
|
|
1229
|
+
@mcp_group.command("add")
|
|
1230
|
+
@click.argument("name")
|
|
1231
|
+
@click.option("--command", "-c", required=True, help="Command to run MCP server")
|
|
1232
|
+
@click.option("--args", "-a", multiple=True, help="Arguments for the command")
|
|
1233
|
+
@click.option("--env", "-e", multiple=True, help="Environment vars (KEY=value or KEY=${VAR})")
|
|
1234
|
+
@click.option("--profile", "-p", default="default", help="Add to profile (default: default)")
|
|
1235
|
+
def mcp_add_command(
|
|
1236
|
+
name: str, command: str, args: tuple[str, ...], env: tuple[str, ...], profile: str
|
|
1237
|
+
) -> None:
|
|
1238
|
+
"""Add an MCP server to the configuration.
|
|
1239
|
+
|
|
1240
|
+
Examples:
|
|
1241
|
+
nexus-mcp add github --command "npx" --args "-y" \\
|
|
1242
|
+
--args "@modelcontextprotocol/server-github"
|
|
1243
|
+
nexus-mcp add myserver --command "my-mcp" --env "API_KEY=${MY_API_KEY}"
|
|
1244
|
+
"""
|
|
1245
|
+
config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1246
|
+
if not config_path.exists():
|
|
1247
|
+
click.echo("Run 'nexus-mcp init' first")
|
|
1248
|
+
return
|
|
1249
|
+
|
|
1250
|
+
mcp_config = MCPConfig.load(config_path)
|
|
1251
|
+
|
|
1252
|
+
# Parse environment variables
|
|
1253
|
+
env_dict = {}
|
|
1254
|
+
for e in env:
|
|
1255
|
+
if "=" in e:
|
|
1256
|
+
k, v = e.split("=", 1)
|
|
1257
|
+
env_dict[k] = v
|
|
1258
|
+
|
|
1259
|
+
# Add server
|
|
1260
|
+
mcp_config.servers[name] = MCPServerConfig(
|
|
1261
|
+
command=command,
|
|
1262
|
+
args=list(args),
|
|
1263
|
+
env=env_dict,
|
|
1264
|
+
enabled=True,
|
|
1265
|
+
)
|
|
1266
|
+
|
|
1267
|
+
# Add to profile
|
|
1268
|
+
if profile not in mcp_config.profiles:
|
|
1269
|
+
mcp_config.profiles[profile] = []
|
|
1270
|
+
if name not in mcp_config.profiles[profile]:
|
|
1271
|
+
mcp_config.profiles[profile].append(name)
|
|
1272
|
+
|
|
1273
|
+
mcp_config.save(config_path)
|
|
1274
|
+
click.echo(f"Added {name} to profile '{profile}'")
|
|
1275
|
+
|
|
1276
|
+
|
|
1277
|
+
@mcp_group.command("list")
|
|
1278
|
+
@click.option(
|
|
1279
|
+
"--all", "-a", "show_all", is_flag=True, help="Show all servers, not just active profile"
|
|
1280
|
+
)
|
|
1281
|
+
def mcp_list_command(show_all: bool) -> None:
|
|
1282
|
+
"""List configured MCP servers.
|
|
1283
|
+
|
|
1284
|
+
Examples:
|
|
1285
|
+
nexus-mcp list
|
|
1286
|
+
nexus-mcp list --all
|
|
1287
|
+
"""
|
|
1288
|
+
config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1289
|
+
if not config_path.exists():
|
|
1290
|
+
click.echo("No MCP config. Run 'nexus-mcp init' first")
|
|
1291
|
+
return
|
|
1292
|
+
|
|
1293
|
+
mcp_config = MCPConfig.load(config_path)
|
|
1294
|
+
|
|
1295
|
+
click.echo(f"Active profile: {mcp_config.active_profile}")
|
|
1296
|
+
click.echo("")
|
|
1297
|
+
|
|
1298
|
+
if show_all:
|
|
1299
|
+
click.echo("All servers:")
|
|
1300
|
+
servers_to_show = list(mcp_config.servers.items())
|
|
1301
|
+
else:
|
|
1302
|
+
click.echo("Active servers:")
|
|
1303
|
+
# Get active profile server names
|
|
1304
|
+
if mcp_config.active_profile in mcp_config.profiles:
|
|
1305
|
+
active_server_names = mcp_config.profiles[mcp_config.active_profile]
|
|
1306
|
+
# Filter to only enabled servers
|
|
1307
|
+
servers_to_show = [
|
|
1308
|
+
(name, mcp_config.servers[name])
|
|
1309
|
+
for name in active_server_names
|
|
1310
|
+
if name in mcp_config.servers and mcp_config.servers[name].enabled
|
|
1311
|
+
]
|
|
1312
|
+
else:
|
|
1313
|
+
# If no active profile, show all enabled servers
|
|
1314
|
+
servers_to_show = [
|
|
1315
|
+
(name, server) for name, server in mcp_config.servers.items() if server.enabled
|
|
1316
|
+
]
|
|
1317
|
+
|
|
1318
|
+
for name, server in servers_to_show:
|
|
1319
|
+
status = "✓" if server.enabled else "✗"
|
|
1320
|
+
click.echo(f" {status} {name}")
|
|
1321
|
+
click.echo(f" Command: {server.command} {' '.join(server.args)}")
|
|
1322
|
+
if server.env:
|
|
1323
|
+
click.echo(f" Env: {', '.join(server.env.keys())}")
|
|
1324
|
+
|
|
1325
|
+
click.echo("")
|
|
1326
|
+
click.echo(f"Profiles: {', '.join(mcp_config.profiles.keys())}")
|
|
1327
|
+
|
|
1328
|
+
|
|
1329
|
+
@mcp_group.command("profile")
|
|
1330
|
+
@click.argument("name", required=False)
|
|
1331
|
+
@click.option("--add", "-a", multiple=True, help="Add server to profile")
|
|
1332
|
+
@click.option("--remove", "-r", multiple=True, help="Remove server from profile")
|
|
1333
|
+
@click.option("--create", is_flag=True, help="Create new profile")
|
|
1334
|
+
def mcp_profile_command(
|
|
1335
|
+
name: str | None, add: tuple[str, ...], remove: tuple[str, ...], create: bool
|
|
1336
|
+
) -> None:
|
|
1337
|
+
"""Manage MCP profiles.
|
|
1338
|
+
|
|
1339
|
+
Without arguments, shows current profile. With name, switches to that profile.
|
|
1340
|
+
|
|
1341
|
+
Examples:
|
|
1342
|
+
nexus-mcp profile # Show current
|
|
1343
|
+
nexus-mcp profile dev # Switch to 'dev'
|
|
1344
|
+
nexus-mcp profile dev --create # Create new 'dev' profile
|
|
1345
|
+
nexus-mcp profile default --add homeassistant
|
|
1346
|
+
nexus-mcp profile default --remove github
|
|
1347
|
+
"""
|
|
1348
|
+
config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1349
|
+
if not config_path.exists():
|
|
1350
|
+
click.echo("Run 'nexus-mcp init' first")
|
|
1351
|
+
return
|
|
1352
|
+
|
|
1353
|
+
mcp_config = MCPConfig.load(config_path)
|
|
1354
|
+
|
|
1355
|
+
if not name:
|
|
1356
|
+
# Show current profile
|
|
1357
|
+
click.echo(f"Active: {mcp_config.active_profile}")
|
|
1358
|
+
servers = mcp_config.profiles.get(mcp_config.active_profile, [])
|
|
1359
|
+
click.echo(f"Servers: {', '.join(servers) or '(none)'}")
|
|
1360
|
+
return
|
|
1361
|
+
|
|
1362
|
+
if create:
|
|
1363
|
+
if name in mcp_config.profiles:
|
|
1364
|
+
click.echo(f"Profile '{name}' exists")
|
|
1365
|
+
return
|
|
1366
|
+
mcp_config.profiles[name] = []
|
|
1367
|
+
click.echo(f"Created profile: {name}")
|
|
1368
|
+
|
|
1369
|
+
if name not in mcp_config.profiles:
|
|
1370
|
+
click.echo(f"Profile '{name}' not found")
|
|
1371
|
+
return
|
|
1372
|
+
|
|
1373
|
+
# Add servers
|
|
1374
|
+
for server in add:
|
|
1375
|
+
if server not in mcp_config.profiles[name]:
|
|
1376
|
+
mcp_config.profiles[name].append(server)
|
|
1377
|
+
click.echo(f"Added {server} to {name}")
|
|
1378
|
+
# Warn if server not defined yet
|
|
1379
|
+
if server not in mcp_config.servers:
|
|
1380
|
+
click.echo(f" ⚠️ Server '{server}' not defined. Add it with 'nexus-mcp add'")
|
|
1381
|
+
else:
|
|
1382
|
+
click.echo(f"Server {server} already in {name}")
|
|
1383
|
+
|
|
1384
|
+
# Remove servers
|
|
1385
|
+
for server in remove:
|
|
1386
|
+
if server in mcp_config.profiles[name]:
|
|
1387
|
+
mcp_config.profiles[name].remove(server)
|
|
1388
|
+
click.echo(f"Removed {server} from {name}")
|
|
1389
|
+
|
|
1390
|
+
# Switch profile
|
|
1391
|
+
if not add and not remove and not create:
|
|
1392
|
+
mcp_config.active_profile = name
|
|
1393
|
+
click.echo(f"Switched to profile: {name}")
|
|
1394
|
+
|
|
1395
|
+
mcp_config.save(config_path)
|
|
1396
|
+
|
|
1397
|
+
|
|
1398
|
+
def _set_server_enabled(name: str, enabled: bool) -> None:
|
|
1399
|
+
"""Set server enabled status."""
|
|
1400
|
+
config_path = Path.cwd() / ".nexus" / "mcp_config.json"
|
|
1401
|
+
if not config_path.exists():
|
|
1402
|
+
click.echo("Run 'nexus-mcp init' first")
|
|
1403
|
+
return
|
|
1404
|
+
|
|
1405
|
+
mcp_config = MCPConfig.load(config_path)
|
|
1406
|
+
|
|
1407
|
+
if name not in mcp_config.servers:
|
|
1408
|
+
click.echo(f"Server not found: {name}")
|
|
1409
|
+
return
|
|
1410
|
+
|
|
1411
|
+
mcp_config.servers[name].enabled = enabled
|
|
1412
|
+
mcp_config.save(config_path)
|
|
1413
|
+
|
|
1414
|
+
status = "enabled" if enabled else "disabled"
|
|
1415
|
+
click.echo(f"{name}: {status}")
|
|
1416
|
+
|
|
1417
|
+
|
|
1418
|
+
@mcp_group.command("enable")
|
|
1419
|
+
@click.argument("name")
|
|
1420
|
+
def mcp_enable_command(name: str) -> None:
|
|
1421
|
+
"""Enable an MCP server."""
|
|
1422
|
+
_set_server_enabled(name, True)
|
|
1423
|
+
|
|
1424
|
+
|
|
1425
|
+
@mcp_group.command("disable")
|
|
1426
|
+
@click.argument("name")
|
|
1427
|
+
def mcp_disable_command(name: str) -> None:
|
|
1428
|
+
"""Disable an MCP server."""
|
|
1429
|
+
_set_server_enabled(name, False)
|
|
1430
|
+
|
|
1431
|
+
|
|
1432
|
+
# Agent management commands
|
|
1433
|
+
@cli.group("agent")
|
|
1434
|
+
def agent_group() -> None:
|
|
1435
|
+
"""Manage custom agents."""
|
|
1436
|
+
|
|
1437
|
+
|
|
1438
|
+
@agent_group.command("templates")
|
|
1439
|
+
def agent_templates_command() -> None:
|
|
1440
|
+
"""List available agent templates."""
|
|
1441
|
+
from .agent_templates import list_templates
|
|
1442
|
+
|
|
1443
|
+
templates = list_templates()
|
|
1444
|
+
|
|
1445
|
+
if not templates:
|
|
1446
|
+
click.echo("No templates found.")
|
|
1447
|
+
return
|
|
1448
|
+
|
|
1449
|
+
click.echo("📋 Available Agent Templates:")
|
|
1450
|
+
click.echo("")
|
|
1451
|
+
|
|
1452
|
+
# Load and display each template
|
|
1453
|
+
import yaml
|
|
1454
|
+
|
|
1455
|
+
from .agent_templates import get_template_path
|
|
1456
|
+
|
|
1457
|
+
for template_name in sorted(templates):
|
|
1458
|
+
try:
|
|
1459
|
+
template_path = get_template_path(template_name)
|
|
1460
|
+
with open(template_path, encoding="utf-8") as f:
|
|
1461
|
+
data = yaml.safe_load(f)
|
|
1462
|
+
|
|
1463
|
+
display_name = data.get("display_name", template_name)
|
|
1464
|
+
role = data.get("profile", {}).get("role", "Unknown")
|
|
1465
|
+
model = data.get("llm_config", {}).get("model_hint", "auto")
|
|
1466
|
+
|
|
1467
|
+
click.echo(f" • {display_name} ({template_name})")
|
|
1468
|
+
click.echo(f" Role: {role}")
|
|
1469
|
+
click.echo(f" Model: {model}")
|
|
1470
|
+
click.echo("")
|
|
1471
|
+
except Exception as e:
|
|
1472
|
+
click.echo(f" ⚠️ {template_name}: Failed to load ({e})")
|
|
1473
|
+
|
|
1474
|
+
|
|
1475
|
+
@agent_group.command("init")
|
|
1476
|
+
@click.argument("name")
|
|
1477
|
+
@click.option("--from-template", "-t", "template_name", help="Create from template")
|
|
1478
|
+
@click.option("--model", "-m", "custom_model", help="Override template model")
|
|
1479
|
+
@click.option("--role", prompt=False, default=None)
|
|
1480
|
+
@click.option("--goal", prompt=False, default=None)
|
|
1481
|
+
@click.option(
|
|
1482
|
+
"--backstory",
|
|
1483
|
+
prompt=False,
|
|
1484
|
+
default=None,
|
|
1485
|
+
)
|
|
1486
|
+
def agent_init_command(
|
|
1487
|
+
name: str,
|
|
1488
|
+
template_name: str | None,
|
|
1489
|
+
custom_model: str | None,
|
|
1490
|
+
role: str | None,
|
|
1491
|
+
goal: str | None,
|
|
1492
|
+
backstory: str | None,
|
|
1493
|
+
) -> None:
|
|
1494
|
+
"""Create a new agent configuration.
|
|
1495
|
+
|
|
1496
|
+
NAME is the agent identifier (lowercase with underscores).
|
|
1497
|
+
|
|
1498
|
+
Examples:
|
|
1499
|
+
nexus-agent init my_reviewer --from-template code_reviewer
|
|
1500
|
+
nexus-agent init security_check -t security_auditor --model claude-opus-4.5
|
|
1501
|
+
nexus-agent init my_custom_agent
|
|
1502
|
+
"""
|
|
1503
|
+
import re
|
|
1504
|
+
|
|
1505
|
+
import yaml
|
|
1506
|
+
|
|
1507
|
+
from .agent_templates import get_template_path, list_templates
|
|
1508
|
+
|
|
1509
|
+
agents_dir = Path.cwd() / "agents"
|
|
1510
|
+
agents_dir.mkdir(exist_ok=True)
|
|
1511
|
+
|
|
1512
|
+
# Normalize name
|
|
1513
|
+
agent_name = name.lower().replace(" ", "_").replace("-", "_")
|
|
1514
|
+
|
|
1515
|
+
# Validate name format
|
|
1516
|
+
if not re.match(r"^[a-z][a-z0-9_]*$", agent_name):
|
|
1517
|
+
click.echo(f"❌ Invalid agent name: {agent_name}", err=True)
|
|
1518
|
+
click.echo(
|
|
1519
|
+
" Name must start with a letter and contain only lowercase letters, "
|
|
1520
|
+
"numbers, and underscores."
|
|
1521
|
+
)
|
|
1522
|
+
return
|
|
1523
|
+
|
|
1524
|
+
# Load from template if specified
|
|
1525
|
+
if template_name:
|
|
1526
|
+
available_templates = list_templates()
|
|
1527
|
+
if template_name not in available_templates:
|
|
1528
|
+
click.echo(f"❌ Template '{template_name}' not found.", err=True)
|
|
1529
|
+
click.echo(f" Available templates: {', '.join(available_templates)}")
|
|
1530
|
+
return
|
|
1531
|
+
|
|
1532
|
+
template_path = get_template_path(template_name)
|
|
1533
|
+
with open(template_path, encoding="utf-8") as f:
|
|
1534
|
+
config = yaml.safe_load(f)
|
|
1535
|
+
|
|
1536
|
+
# Customize the template
|
|
1537
|
+
config["name"] = agent_name
|
|
1538
|
+
config["display_name"] = name.replace("_", " ").title()
|
|
1539
|
+
config["description"] = f"Delegate tasks to the {name.replace('_', ' ').title()} agent."
|
|
1540
|
+
|
|
1541
|
+
# Override model if specified
|
|
1542
|
+
if custom_model:
|
|
1543
|
+
config["llm_config"]["model_hint"] = custom_model
|
|
1544
|
+
|
|
1545
|
+
click.echo(f"✅ Created agent from template: {template_name}")
|
|
1546
|
+
else:
|
|
1547
|
+
# Interactive mode
|
|
1548
|
+
if not role:
|
|
1549
|
+
role = click.prompt("Agent role (e.g., 'Code Reviewer')")
|
|
1550
|
+
if not goal:
|
|
1551
|
+
goal = click.prompt("Agent goal (e.g., 'Review code for best practices')")
|
|
1552
|
+
if not backstory:
|
|
1553
|
+
backstory = click.prompt(
|
|
1554
|
+
"Agent backstory", default="Expert developer with years of experience."
|
|
1555
|
+
)
|
|
1556
|
+
|
|
1557
|
+
# Generate YAML content
|
|
1558
|
+
config = {
|
|
1559
|
+
"name": agent_name,
|
|
1560
|
+
"display_name": name.replace("_", " ").title(),
|
|
1561
|
+
"description": f"Delegate tasks to the {name.replace('_', ' ').title()} agent.",
|
|
1562
|
+
"profile": {
|
|
1563
|
+
"role": role,
|
|
1564
|
+
"goal": goal,
|
|
1565
|
+
"backstory": backstory,
|
|
1566
|
+
"tone": "Professional and helpful",
|
|
1567
|
+
},
|
|
1568
|
+
"memory": {
|
|
1569
|
+
"enabled": True,
|
|
1570
|
+
"rag_limit": 5,
|
|
1571
|
+
"search_types": ["code", "documentation", "lesson"],
|
|
1572
|
+
},
|
|
1573
|
+
"tools": [],
|
|
1574
|
+
"llm_config": {
|
|
1575
|
+
"model_hint": custom_model or "claude-sonnet-4.5",
|
|
1576
|
+
"fallback_hints": ["auto"],
|
|
1577
|
+
"temperature": 0.5,
|
|
1578
|
+
"max_tokens": 4000,
|
|
1579
|
+
},
|
|
1580
|
+
}
|
|
1581
|
+
|
|
1582
|
+
output_file = agents_dir / f"{agent_name}.yaml"
|
|
1583
|
+
|
|
1584
|
+
if output_file.exists() and not click.confirm(
|
|
1585
|
+
f"Agent {agent_name}.yaml already exists. Overwrite?"
|
|
1586
|
+
):
|
|
1587
|
+
click.echo("Aborted.")
|
|
1588
|
+
return
|
|
1589
|
+
|
|
1590
|
+
with open(output_file, "w", encoding="utf-8") as f:
|
|
1591
|
+
yaml.dump(config, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
1592
|
+
|
|
1593
|
+
click.echo(f"✅ Created agent: {output_file}")
|
|
1594
|
+
click.echo("")
|
|
1595
|
+
click.echo("Next steps:")
|
|
1596
|
+
click.echo(f" 1. Edit {output_file} to customize your agent")
|
|
1597
|
+
click.echo(" 2. Restart the MCP server to activate this agent")
|
|
1598
|
+
click.echo(f" 3. Use the 'ask_{agent_name}' tool in your IDE")
|
|
1599
|
+
|
|
1600
|
+
|
|
1601
|
+
@agent_group.command("list")
|
|
1602
|
+
def agent_list_command() -> None:
|
|
1603
|
+
"""List all configured agents."""
|
|
1604
|
+
agents_dir = Path.cwd() / "agents"
|
|
1605
|
+
|
|
1606
|
+
if not agents_dir.exists():
|
|
1607
|
+
click.echo("No agents directory found.")
|
|
1608
|
+
click.echo("Create an agent with: nexus-agent init <name>")
|
|
1609
|
+
return
|
|
1610
|
+
|
|
1611
|
+
yaml_files = list(agents_dir.glob("*.yaml")) + list(agents_dir.glob("*.yml"))
|
|
1612
|
+
|
|
1613
|
+
if not yaml_files:
|
|
1614
|
+
click.echo("No agents found.")
|
|
1615
|
+
click.echo("Create an agent with: nexus-agent init <name>")
|
|
1616
|
+
return
|
|
1617
|
+
|
|
1618
|
+
click.echo("📋 Custom Agents:")
|
|
1619
|
+
click.echo("")
|
|
1620
|
+
|
|
1621
|
+
import yaml
|
|
1622
|
+
|
|
1623
|
+
for yaml_file in sorted(yaml_files):
|
|
1624
|
+
try:
|
|
1625
|
+
with open(yaml_file, encoding="utf-8") as f:
|
|
1626
|
+
data = yaml.safe_load(f)
|
|
1627
|
+
name = data.get("name", yaml_file.stem)
|
|
1628
|
+
display_name = data.get("display_name", name)
|
|
1629
|
+
role = data.get("profile", {}).get("role", "Unknown")
|
|
1630
|
+
click.echo(f" • {display_name} (ask_{name})")
|
|
1631
|
+
click.echo(f" Role: {role}")
|
|
1632
|
+
click.echo("")
|
|
1633
|
+
except Exception as e:
|
|
1634
|
+
click.echo(f" ⚠️ {yaml_file.name}: Failed to load ({e})")
|
|
1635
|
+
|
|
1636
|
+
|
|
1637
|
+
# Entry points for pyproject.toml scripts
|
|
1638
|
+
def init_command_entry() -> None:
|
|
1639
|
+
"""Entry point for nexus-init."""
|
|
1640
|
+
cli(["init"])
|
|
1641
|
+
|
|
1642
|
+
|
|
1643
|
+
def index_command_entry() -> None:
|
|
1644
|
+
"""Entry point for nexus-index."""
|
|
1645
|
+
# Get args after the command name
|
|
1646
|
+
import sys
|
|
1647
|
+
|
|
1648
|
+
cli(["index"] + sys.argv[1:])
|
|
1649
|
+
|
|
1650
|
+
|
|
1651
|
+
def index_mcp_command_entry() -> None:
|
|
1652
|
+
"""Entry point for nexus-index-mcp."""
|
|
1653
|
+
import sys
|
|
1654
|
+
|
|
1655
|
+
cli(["index-mcp"] + sys.argv[1:])
|
|
1656
|
+
|
|
1657
|
+
|
|
1658
|
+
def mcp_command_entry() -> None:
|
|
1659
|
+
"""Entry point for nexus-mcp."""
|
|
1660
|
+
import sys
|
|
1661
|
+
|
|
1662
|
+
cli(["mcp"] + sys.argv[1:])
|
|
1663
|
+
|
|
1664
|
+
|
|
1665
|
+
def agent_command_entry() -> None:
|
|
1666
|
+
"""Entry point for nexus-agent."""
|
|
1667
|
+
import sys
|
|
1668
|
+
|
|
1669
|
+
cli(["agent"] + sys.argv[1:])
|
|
1670
|
+
|
|
1671
|
+
|
|
1672
|
+
if __name__ == "__main__":
|
|
1673
|
+
cli()
|