sol-mcp 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sol_mcp-0.2.0.dist-info/METADATA +218 -0
- sol_mcp-0.2.0.dist-info/RECORD +20 -0
- sol_mcp-0.2.0.dist-info/WHEEL +4 -0
- sol_mcp-0.2.0.dist-info/entry_points.txt +3 -0
- solana_mcp/__init__.py +3 -0
- solana_mcp/cli.py +527 -0
- solana_mcp/config.py +324 -0
- solana_mcp/expert/__init__.py +5 -0
- solana_mcp/expert/guidance.py +452 -0
- solana_mcp/indexer/__init__.py +8 -0
- solana_mcp/indexer/chunker.py +457 -0
- solana_mcp/indexer/compiler.py +1101 -0
- solana_mcp/indexer/downloader.py +304 -0
- solana_mcp/indexer/embedder.py +755 -0
- solana_mcp/indexer/manifest.py +411 -0
- solana_mcp/logging.py +85 -0
- solana_mcp/models.py +62 -0
- solana_mcp/server.py +746 -0
- solana_mcp/tools/__init__.py +1 -0
- solana_mcp/versions.py +391 -0
solana_mcp/cli.py
ADDED
|
@@ -0,0 +1,527 @@
|
|
|
1
|
+
"""CLI for solana-mcp.
|
|
2
|
+
|
|
3
|
+
Commands:
|
|
4
|
+
- build: Full pipeline (download + compile + index)
|
|
5
|
+
- download: Clone repositories
|
|
6
|
+
- compile: Extract Rust to JSON
|
|
7
|
+
- index: Build vector embeddings (incremental by default)
|
|
8
|
+
- update: Git pull + incremental index
|
|
9
|
+
- search: Search the index
|
|
10
|
+
- status: Check index status
|
|
11
|
+
- models: List available embedding models
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import sys
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
|
|
17
|
+
import click
|
|
18
|
+
|
|
19
|
+
from .config import DEFAULT_EMBEDDING_MODEL, EMBEDDING_MODELS, get_model_info, load_config
|
|
20
|
+
from .indexer.chunker import chunk_all_simds, chunk_content
|
|
21
|
+
from .indexer.compiler import (
|
|
22
|
+
compile_c,
|
|
23
|
+
compile_rust,
|
|
24
|
+
load_compiled_constants,
|
|
25
|
+
load_compiled_items,
|
|
26
|
+
lookup_constant,
|
|
27
|
+
lookup_function,
|
|
28
|
+
)
|
|
29
|
+
from .indexer.downloader import (
|
|
30
|
+
DEFAULT_DATA_DIR,
|
|
31
|
+
REPOS,
|
|
32
|
+
download_repos,
|
|
33
|
+
list_downloaded_repos,
|
|
34
|
+
)
|
|
35
|
+
from .indexer.embedder import (
|
|
36
|
+
DEPS_AVAILABLE,
|
|
37
|
+
IncrementalEmbedder,
|
|
38
|
+
build_index,
|
|
39
|
+
get_index_stats,
|
|
40
|
+
search,
|
|
41
|
+
)
|
|
42
|
+
from .indexer.manifest import load_manifest
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@click.group()
|
|
46
|
+
@click.option(
|
|
47
|
+
"--data-dir",
|
|
48
|
+
type=click.Path(path_type=Path),
|
|
49
|
+
default=DEFAULT_DATA_DIR,
|
|
50
|
+
help="Data directory (default: ~/.solana-mcp)",
|
|
51
|
+
)
|
|
52
|
+
@click.pass_context
|
|
53
|
+
def main(ctx, data_dir: Path):
|
|
54
|
+
"""Solana MCP - RAG-powered search for Solana runtime and SIMDs."""
|
|
55
|
+
ctx.ensure_object(dict)
|
|
56
|
+
ctx.obj["data_dir"] = data_dir
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@main.command()
|
|
60
|
+
@click.option("--full", is_flag=True, help="Force full rebuild")
|
|
61
|
+
@click.pass_context
|
|
62
|
+
def build(ctx, full: bool):
|
|
63
|
+
"""Full build pipeline: download, compile, and index."""
|
|
64
|
+
data_dir = ctx.obj["data_dir"]
|
|
65
|
+
|
|
66
|
+
click.echo("=" * 60)
|
|
67
|
+
click.echo("SOLANA MCP BUILD")
|
|
68
|
+
click.echo("=" * 60)
|
|
69
|
+
|
|
70
|
+
# Step 1: Download
|
|
71
|
+
click.echo("\n[1/3] Downloading repositories...")
|
|
72
|
+
results = download_repos(data_dir, progress_callback=click.echo)
|
|
73
|
+
|
|
74
|
+
failed = [name for name, success in results.items() if not success]
|
|
75
|
+
if failed:
|
|
76
|
+
click.echo(f"Warning: Failed to download: {', '.join(failed)}")
|
|
77
|
+
|
|
78
|
+
# Step 2: Compile
|
|
79
|
+
click.echo("\n[2/3] Compiling Rust source...")
|
|
80
|
+
ctx.invoke(compile)
|
|
81
|
+
|
|
82
|
+
# Step 3: Index
|
|
83
|
+
click.echo("\n[3/3] Building vector index...")
|
|
84
|
+
ctx.invoke(index, full=full)
|
|
85
|
+
|
|
86
|
+
click.echo("\n" + "=" * 60)
|
|
87
|
+
click.echo("BUILD COMPLETE")
|
|
88
|
+
click.echo("=" * 60)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
@main.command()
|
|
92
|
+
@click.option("--repo", multiple=True, help="Specific repos to download")
|
|
93
|
+
@click.pass_context
|
|
94
|
+
def download(ctx, repo):
|
|
95
|
+
"""Download Solana repositories."""
|
|
96
|
+
data_dir = ctx.obj["data_dir"]
|
|
97
|
+
|
|
98
|
+
repos = list(repo) if repo else None
|
|
99
|
+
|
|
100
|
+
click.echo("Downloading repositories...")
|
|
101
|
+
results = download_repos(data_dir, repos=repos, progress_callback=click.echo)
|
|
102
|
+
|
|
103
|
+
click.echo("\nResults:")
|
|
104
|
+
for name, success in results.items():
|
|
105
|
+
status = click.style("✓", fg="green") if success else click.style("✗", fg="red")
|
|
106
|
+
click.echo(f" {status} {name}")
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@main.command()
|
|
110
|
+
@click.pass_context
|
|
111
|
+
def compile(ctx):
|
|
112
|
+
"""Compile source code to JSON extracts."""
|
|
113
|
+
data_dir = ctx.obj["data_dir"]
|
|
114
|
+
compiled_dir = data_dir / "compiled"
|
|
115
|
+
|
|
116
|
+
total_stats = {
|
|
117
|
+
"files_processed": 0,
|
|
118
|
+
"items_extracted": 0,
|
|
119
|
+
"constants_extracted": 0,
|
|
120
|
+
"functions": 0,
|
|
121
|
+
"structs": 0,
|
|
122
|
+
"enums": 0,
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
# Compile agave (Rust - reference implementation)
|
|
126
|
+
agave_dir = data_dir / "agave"
|
|
127
|
+
if agave_dir.exists():
|
|
128
|
+
click.echo("Compiling agave (Rust)...")
|
|
129
|
+
|
|
130
|
+
# Compile key directories
|
|
131
|
+
for subdir in ["programs", "runtime", "svm", "poh", "turbine", "core", "gossip", "ledger"]:
|
|
132
|
+
source = agave_dir / subdir
|
|
133
|
+
if source.exists():
|
|
134
|
+
output = compiled_dir / "agave" / subdir
|
|
135
|
+
click.echo(f" {subdir}...")
|
|
136
|
+
stats = compile_rust(source, output)
|
|
137
|
+
|
|
138
|
+
for key in total_stats:
|
|
139
|
+
total_stats[key] += stats.get(key, 0)
|
|
140
|
+
else:
|
|
141
|
+
click.echo("Warning: agave not found. Run 'download' first.")
|
|
142
|
+
|
|
143
|
+
# Compile jito-solana (Rust - MEV fork, ~70% of stake)
|
|
144
|
+
jito_dir = data_dir / "jito-solana"
|
|
145
|
+
if jito_dir.exists():
|
|
146
|
+
click.echo("Compiling jito-solana (Rust)...")
|
|
147
|
+
|
|
148
|
+
# Compile Jito-specific directories + core
|
|
149
|
+
jito_subdirs = [
|
|
150
|
+
"core", "runtime", "poh", "turbine", "gossip", "validator",
|
|
151
|
+
"bundle", "tip-distributor", "block-engine", # Jito-specific
|
|
152
|
+
]
|
|
153
|
+
for subdir in jito_subdirs:
|
|
154
|
+
source = jito_dir / subdir
|
|
155
|
+
if source.exists():
|
|
156
|
+
output = compiled_dir / "jito-solana" / subdir
|
|
157
|
+
click.echo(f" {subdir}...")
|
|
158
|
+
stats = compile_rust(source, output)
|
|
159
|
+
|
|
160
|
+
for key in total_stats:
|
|
161
|
+
total_stats[key] += stats.get(key, 0)
|
|
162
|
+
|
|
163
|
+
# Compile firedancer (C - Jump's independent implementation)
|
|
164
|
+
firedancer_dir = data_dir / "firedancer"
|
|
165
|
+
if firedancer_dir.exists():
|
|
166
|
+
click.echo("Compiling firedancer (C)...")
|
|
167
|
+
|
|
168
|
+
# Compile key C source directories
|
|
169
|
+
fd_subdirs = [
|
|
170
|
+
"src/app", "src/ballet", "src/disco", "src/flamenco",
|
|
171
|
+
"src/tango", "src/waltz", "src/choreo",
|
|
172
|
+
]
|
|
173
|
+
for subdir in fd_subdirs:
|
|
174
|
+
source = firedancer_dir / subdir
|
|
175
|
+
if source.exists():
|
|
176
|
+
output = compiled_dir / "firedancer" / subdir.replace("src/", "")
|
|
177
|
+
click.echo(f" {subdir}...")
|
|
178
|
+
stats = compile_c(source, output)
|
|
179
|
+
|
|
180
|
+
for key in total_stats:
|
|
181
|
+
total_stats[key] += stats.get(key, 0)
|
|
182
|
+
|
|
183
|
+
# Compile alpenglow (Rust - future consensus, not yet live)
|
|
184
|
+
alpenglow_dir = data_dir / "alpenglow"
|
|
185
|
+
if alpenglow_dir.exists():
|
|
186
|
+
click.echo("Compiling alpenglow (Rust)...")
|
|
187
|
+
output = compiled_dir / "alpenglow"
|
|
188
|
+
stats = compile_rust(alpenglow_dir, output)
|
|
189
|
+
|
|
190
|
+
for key in total_stats:
|
|
191
|
+
total_stats[key] += stats.get(key, 0)
|
|
192
|
+
|
|
193
|
+
# Compile jito-programs (Rust - on-chain MEV programs)
|
|
194
|
+
jito_programs_dir = data_dir / "jito-programs"
|
|
195
|
+
if jito_programs_dir.exists():
|
|
196
|
+
click.echo("Compiling jito-programs (Rust)...")
|
|
197
|
+
|
|
198
|
+
jito_prog_subdirs = ["mev-programs", "tip-distribution", "tip-payment"]
|
|
199
|
+
for subdir in jito_prog_subdirs:
|
|
200
|
+
source = jito_programs_dir / subdir
|
|
201
|
+
if source.exists():
|
|
202
|
+
output = compiled_dir / "jito-programs" / subdir
|
|
203
|
+
click.echo(f" {subdir}...")
|
|
204
|
+
stats = compile_rust(source, output)
|
|
205
|
+
|
|
206
|
+
for key in total_stats:
|
|
207
|
+
total_stats[key] += stats.get(key, 0)
|
|
208
|
+
|
|
209
|
+
click.echo("\nCompilation complete:")
|
|
210
|
+
click.echo(f" Files: {total_stats['files_processed']}")
|
|
211
|
+
click.echo(f" Functions: {total_stats['functions']}")
|
|
212
|
+
click.echo(f" Structs: {total_stats['structs']}")
|
|
213
|
+
click.echo(f" Enums: {total_stats['enums']}")
|
|
214
|
+
click.echo(f" Constants: {total_stats['constants_extracted']}")
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
@main.command()
|
|
218
|
+
@click.option("--full", is_flag=True, help="Force full rebuild")
|
|
219
|
+
@click.option("--dry-run", is_flag=True, help="Show what would change without indexing")
|
|
220
|
+
@click.option("--model", "model_name", help="Embedding model to use")
|
|
221
|
+
@click.pass_context
|
|
222
|
+
def index(ctx, full: bool, dry_run: bool, model_name: str | None):
|
|
223
|
+
"""Build vector embeddings index (incremental by default)."""
|
|
224
|
+
if not DEPS_AVAILABLE:
|
|
225
|
+
click.echo("Error: Embedding dependencies not installed.")
|
|
226
|
+
click.echo("Run: pip install lancedb sentence-transformers")
|
|
227
|
+
sys.exit(1)
|
|
228
|
+
|
|
229
|
+
data_dir = ctx.obj["data_dir"]
|
|
230
|
+
compiled_dir = data_dir / "compiled"
|
|
231
|
+
|
|
232
|
+
# Load config
|
|
233
|
+
config = load_config(data_dir=data_dir)
|
|
234
|
+
model = model_name or config.embedding.model
|
|
235
|
+
|
|
236
|
+
# Validate model
|
|
237
|
+
if model not in EMBEDDING_MODELS:
|
|
238
|
+
click.echo(f"Warning: Unknown model '{model}', using default")
|
|
239
|
+
model = DEFAULT_EMBEDDING_MODEL
|
|
240
|
+
|
|
241
|
+
all_chunks = []
|
|
242
|
+
|
|
243
|
+
# Load compiled Rust items
|
|
244
|
+
for compiled_subdir in compiled_dir.glob("**"):
|
|
245
|
+
if (compiled_subdir / "items.json").exists():
|
|
246
|
+
click.echo(f"Loading items from {compiled_subdir.relative_to(compiled_dir)}...")
|
|
247
|
+
items = load_compiled_items(compiled_subdir)
|
|
248
|
+
constants = load_compiled_constants(compiled_subdir)
|
|
249
|
+
|
|
250
|
+
repo_name = compiled_subdir.parts[-2] if len(compiled_subdir.parts) > 1 else "agave"
|
|
251
|
+
chunks = chunk_content(
|
|
252
|
+
items=items, constants=constants, repo_name=repo_name
|
|
253
|
+
)
|
|
254
|
+
all_chunks.extend(chunks)
|
|
255
|
+
click.echo(f" {len(chunks)} chunks")
|
|
256
|
+
|
|
257
|
+
# Load SIMDs
|
|
258
|
+
simd_dir = data_dir / "solana-improvement-documents"
|
|
259
|
+
if simd_dir.exists():
|
|
260
|
+
click.echo("Chunking SIMDs...")
|
|
261
|
+
simd_chunks = chunk_all_simds(simd_dir)
|
|
262
|
+
all_chunks.extend(simd_chunks)
|
|
263
|
+
click.echo(f" {len(simd_chunks)} chunks")
|
|
264
|
+
|
|
265
|
+
if not all_chunks:
|
|
266
|
+
click.echo("No content to index. Run 'download' and 'compile' first.")
|
|
267
|
+
return
|
|
268
|
+
|
|
269
|
+
# Build file tracking maps
|
|
270
|
+
current_files: dict[str, Path] = {}
|
|
271
|
+
file_types: dict[str, str] = {}
|
|
272
|
+
|
|
273
|
+
for chunk in all_chunks:
|
|
274
|
+
if chunk.source_file not in current_files:
|
|
275
|
+
# Try to find actual file path
|
|
276
|
+
abs_path = data_dir / chunk.source_file
|
|
277
|
+
if abs_path.exists():
|
|
278
|
+
current_files[chunk.source_file] = abs_path
|
|
279
|
+
file_types[chunk.source_file] = chunk.source_type
|
|
280
|
+
|
|
281
|
+
if dry_run:
|
|
282
|
+
click.echo("\n[DRY RUN] Analyzing changes...")
|
|
283
|
+
embedder = IncrementalEmbedder(
|
|
284
|
+
data_dir=data_dir,
|
|
285
|
+
model_name=model,
|
|
286
|
+
)
|
|
287
|
+
result = embedder.dry_run(current_files, file_types)
|
|
288
|
+
click.echo(result.summary())
|
|
289
|
+
if result.files_to_add:
|
|
290
|
+
click.echo(f" Files to add: {', '.join(result.files_to_add[:5])}")
|
|
291
|
+
if len(result.files_to_add) > 5:
|
|
292
|
+
click.echo(f" ... and {len(result.files_to_add) - 5} more")
|
|
293
|
+
if result.files_to_modify:
|
|
294
|
+
click.echo(f" Files to modify: {', '.join(result.files_to_modify[:5])}")
|
|
295
|
+
if result.files_to_delete:
|
|
296
|
+
click.echo(f" Files to delete: {', '.join(result.files_to_delete[:5])}")
|
|
297
|
+
return
|
|
298
|
+
|
|
299
|
+
click.echo(f"\nIndexing {len(all_chunks)} total chunks...")
|
|
300
|
+
|
|
301
|
+
# Use legacy build_index for now (chunks are already prepared)
|
|
302
|
+
stats = build_index(
|
|
303
|
+
all_chunks,
|
|
304
|
+
data_dir=data_dir,
|
|
305
|
+
model_name=model,
|
|
306
|
+
progress_callback=click.echo,
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
click.echo(f"\nIndex built: {stats['chunks_indexed']} chunks")
|
|
310
|
+
click.echo(f"Database: {stats['db_path']}")
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
@main.command()
|
|
314
|
+
@click.option("--full", is_flag=True, help="Force full rebuild after update")
|
|
315
|
+
@click.pass_context
|
|
316
|
+
def update(ctx, full: bool):
|
|
317
|
+
"""Update repos and re-index incrementally."""
|
|
318
|
+
data_dir = ctx.obj["data_dir"]
|
|
319
|
+
|
|
320
|
+
click.echo("Updating repositories...")
|
|
321
|
+
|
|
322
|
+
# Pull all repos
|
|
323
|
+
repos = list_downloaded_repos(data_dir)
|
|
324
|
+
updated = []
|
|
325
|
+
|
|
326
|
+
for name, info in repos.items():
|
|
327
|
+
if info["exists"]:
|
|
328
|
+
repo_path = data_dir / name
|
|
329
|
+
click.echo(f" Pulling {name}...")
|
|
330
|
+
try:
|
|
331
|
+
import subprocess
|
|
332
|
+
|
|
333
|
+
result = subprocess.run(
|
|
334
|
+
["git", "pull", "--ff-only"],
|
|
335
|
+
cwd=repo_path,
|
|
336
|
+
capture_output=True,
|
|
337
|
+
text=True,
|
|
338
|
+
)
|
|
339
|
+
if "Already up to date" not in result.stdout:
|
|
340
|
+
updated.append(name)
|
|
341
|
+
click.echo(" Updated")
|
|
342
|
+
else:
|
|
343
|
+
click.echo(" Up to date")
|
|
344
|
+
except Exception as e:
|
|
345
|
+
click.echo(f" Failed: {e}")
|
|
346
|
+
|
|
347
|
+
if updated:
|
|
348
|
+
click.echo(f"\nUpdated: {', '.join(updated)}")
|
|
349
|
+
click.echo("Re-compiling and re-indexing...")
|
|
350
|
+
ctx.invoke(compile)
|
|
351
|
+
ctx.invoke(index, full=full)
|
|
352
|
+
else:
|
|
353
|
+
click.echo("\nNo updates found")
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
@main.command("search")
|
|
357
|
+
@click.argument("query")
|
|
358
|
+
@click.option("--limit", "-n", default=5, help="Number of results")
|
|
359
|
+
@click.option("--type", "source_type", help="Filter by type (rust, simd, docs)")
|
|
360
|
+
@click.pass_context
|
|
361
|
+
def search_cmd(ctx, query: str, limit: int, source_type: str | None):
|
|
362
|
+
"""Search the index."""
|
|
363
|
+
if not DEPS_AVAILABLE:
|
|
364
|
+
click.echo("Error: Search dependencies not installed.")
|
|
365
|
+
sys.exit(1)
|
|
366
|
+
|
|
367
|
+
data_dir = ctx.obj["data_dir"]
|
|
368
|
+
|
|
369
|
+
results = search(
|
|
370
|
+
query,
|
|
371
|
+
data_dir=data_dir,
|
|
372
|
+
limit=limit,
|
|
373
|
+
source_type=source_type,
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
if not results:
|
|
377
|
+
click.echo("No results found.")
|
|
378
|
+
return
|
|
379
|
+
|
|
380
|
+
click.echo(f"\nResults for: {query}\n")
|
|
381
|
+
|
|
382
|
+
for i, result in enumerate(results):
|
|
383
|
+
score = 1 - result["score"] # Convert distance to similarity
|
|
384
|
+
click.echo(
|
|
385
|
+
f"{i + 1}. [{result['source_type']}] "
|
|
386
|
+
f"{click.style(result['source_name'], bold=True)} "
|
|
387
|
+
f"(score: {score:.2%})"
|
|
388
|
+
)
|
|
389
|
+
click.echo(f" {result['source_file']}:{result['line_number']}")
|
|
390
|
+
|
|
391
|
+
# Show snippet
|
|
392
|
+
content = result["content"]
|
|
393
|
+
if len(content) > 200:
|
|
394
|
+
content = content[:200] + "..."
|
|
395
|
+
for line in content.split("\n")[:3]:
|
|
396
|
+
click.echo(f" {line}")
|
|
397
|
+
click.echo()
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
@main.command()
|
|
401
|
+
@click.argument("name")
|
|
402
|
+
@click.pass_context
|
|
403
|
+
def constant(ctx, name: str):
|
|
404
|
+
"""Look up a constant by name."""
|
|
405
|
+
data_dir = ctx.obj["data_dir"]
|
|
406
|
+
compiled_dir = data_dir / "compiled"
|
|
407
|
+
|
|
408
|
+
# Search all compiled directories
|
|
409
|
+
for subdir in compiled_dir.glob("**"):
|
|
410
|
+
if (subdir / "index.json").exists():
|
|
411
|
+
result = lookup_constant(name, subdir)
|
|
412
|
+
if result:
|
|
413
|
+
click.echo(f"\n{click.style(result.name, bold=True)}")
|
|
414
|
+
click.echo(f" Value: {result.value}")
|
|
415
|
+
if result.type_annotation:
|
|
416
|
+
click.echo(f" Type: {result.type_annotation}")
|
|
417
|
+
click.echo(f" File: {result.file_path}:{result.line_number}")
|
|
418
|
+
if result.doc_comment:
|
|
419
|
+
click.echo(f" Doc: {result.doc_comment}")
|
|
420
|
+
return
|
|
421
|
+
|
|
422
|
+
click.echo(f"Constant '{name}' not found.")
|
|
423
|
+
|
|
424
|
+
|
|
425
|
+
@main.command()
|
|
426
|
+
@click.argument("name")
|
|
427
|
+
@click.pass_context
|
|
428
|
+
def function(ctx, name: str):
|
|
429
|
+
"""Look up a function by name."""
|
|
430
|
+
data_dir = ctx.obj["data_dir"]
|
|
431
|
+
compiled_dir = data_dir / "compiled"
|
|
432
|
+
|
|
433
|
+
for subdir in compiled_dir.glob("**"):
|
|
434
|
+
if (subdir / "index.json").exists():
|
|
435
|
+
result = lookup_function(name, subdir)
|
|
436
|
+
if result:
|
|
437
|
+
click.echo(f"\n{click.style(result.signature, bold=True)}")
|
|
438
|
+
click.echo(f" File: {result.file_path}:{result.line_number}")
|
|
439
|
+
if result.doc_comment:
|
|
440
|
+
click.echo(f"\n /// {result.doc_comment}\n")
|
|
441
|
+
click.echo(result.body)
|
|
442
|
+
return
|
|
443
|
+
|
|
444
|
+
click.echo(f"Function '{name}' not found.")
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
@main.command()
|
|
448
|
+
@click.pass_context
|
|
449
|
+
def status(ctx):
|
|
450
|
+
"""Check index status."""
|
|
451
|
+
data_dir = ctx.obj["data_dir"]
|
|
452
|
+
|
|
453
|
+
click.echo("SOLANA MCP STATUS")
|
|
454
|
+
click.echo("=" * 40)
|
|
455
|
+
|
|
456
|
+
# Check repos
|
|
457
|
+
click.echo("\nRepositories:")
|
|
458
|
+
repos = list_downloaded_repos(data_dir)
|
|
459
|
+
for name, info in repos.items():
|
|
460
|
+
repo_config = REPOS.get(name, {})
|
|
461
|
+
is_client = repo_config.get("client", False)
|
|
462
|
+
stake_pct = repo_config.get("stake_pct")
|
|
463
|
+
|
|
464
|
+
if info["exists"]:
|
|
465
|
+
status_icon = click.style("✓", fg="green")
|
|
466
|
+
version = info["version"] or "unknown"
|
|
467
|
+
if is_client and stake_pct:
|
|
468
|
+
click.echo(f" {status_icon} {name} ({version}) - CLIENT ~{stake_pct}% stake")
|
|
469
|
+
elif is_client:
|
|
470
|
+
click.echo(f" {status_icon} {name} ({version}) - CLIENT")
|
|
471
|
+
else:
|
|
472
|
+
click.echo(f" {status_icon} {name} ({version})")
|
|
473
|
+
else:
|
|
474
|
+
status_icon = click.style("✗", fg="red")
|
|
475
|
+
if is_client and stake_pct:
|
|
476
|
+
click.echo(f" {status_icon} {name} (not downloaded) - CLIENT ~{stake_pct}% stake")
|
|
477
|
+
else:
|
|
478
|
+
click.echo(f" {status_icon} {name} (not downloaded)")
|
|
479
|
+
|
|
480
|
+
# Check compiled
|
|
481
|
+
compiled_dir = data_dir / "compiled"
|
|
482
|
+
click.echo("\nCompiled:")
|
|
483
|
+
if compiled_dir.exists():
|
|
484
|
+
for subdir in compiled_dir.glob("**"):
|
|
485
|
+
items_file = subdir / "items.json"
|
|
486
|
+
if items_file.exists():
|
|
487
|
+
items = load_compiled_items(subdir)
|
|
488
|
+
constants = load_compiled_constants(subdir)
|
|
489
|
+
rel_path = subdir.relative_to(compiled_dir)
|
|
490
|
+
click.echo(f" {rel_path}: {len(items)} items, {len(constants)} constants")
|
|
491
|
+
else:
|
|
492
|
+
click.echo(" Not compiled yet")
|
|
493
|
+
|
|
494
|
+
# Check manifest
|
|
495
|
+
click.echo("\nManifest:")
|
|
496
|
+
manifest = load_manifest(data_dir / "manifest.json")
|
|
497
|
+
if manifest:
|
|
498
|
+
click.echo(f" Version: {manifest.version}")
|
|
499
|
+
click.echo(f" Updated: {manifest.updated_at}")
|
|
500
|
+
click.echo(f" Model: {manifest.embedding_model}")
|
|
501
|
+
click.echo(f" Files tracked: {len(manifest.files)}")
|
|
502
|
+
else:
|
|
503
|
+
click.echo(" No manifest (full build needed)")
|
|
504
|
+
|
|
505
|
+
# Check index
|
|
506
|
+
click.echo("\nIndex:")
|
|
507
|
+
if DEPS_AVAILABLE:
|
|
508
|
+
stats = get_index_stats(data_dir)
|
|
509
|
+
if stats and "error" not in stats:
|
|
510
|
+
click.echo(f" Total chunks: {stats['total_chunks']}")
|
|
511
|
+
for source_type, count in stats.get("by_source_type", {}).items():
|
|
512
|
+
click.echo(f" {source_type}: {count}")
|
|
513
|
+
else:
|
|
514
|
+
click.echo(" Not indexed yet")
|
|
515
|
+
else:
|
|
516
|
+
click.echo(" Dependencies not installed")
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
@main.command()
|
|
520
|
+
@click.argument("model", required=False)
|
|
521
|
+
def models(model: str | None):
|
|
522
|
+
"""List available embedding models."""
|
|
523
|
+
click.echo(get_model_info(model))
|
|
524
|
+
|
|
525
|
+
|
|
526
|
+
if __name__ == "__main__":
|
|
527
|
+
main()
|