iflow-mcp_hulupeep_ruvscan-mcp 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,19 @@
1
+ mcp/__init__.py,sha256=dWs4CTMlszA3kkivHnL6Rz0SnheoDGrh_--HBkJ4Ixg,171
2
+ mcp/mcp_stdio_server.py,sha256=v-LHyZVrBRiFnr2zTQhIkwbn4E1mcojv504DHP1rBZs,6164
3
+ mcp/monitoring.py,sha256=MS6fwF1p-YjMZjeokssjM2vF4cPNjWYbzQ9-9NbJo0w,3757
4
+ mcp/server.py,sha256=CE_4mQVa1-5GXpC0KCfpXqjQomKX27mJSxlfQlkCH5Q,8552
5
+ mcp/bindings/__init__.py,sha256=AVE5Lad6YApYZ8CNH7saAsPaHTPyKBL2d677hjcUJTc,118
6
+ mcp/bindings/rust_client.py,sha256=LWcT1bc3cPKZSOYhcV8X_zSoHWvQb8KCUpUlXTOqEqQ,4984
7
+ mcp/endpoints/query.py,sha256=4OxYyNPmK7FdKDHHv5wp6AIMGQ_IvSs8alvemsVNitI,4853
8
+ mcp/endpoints/scan.py,sha256=_7Yf5uNZEmwRbDz7s5NrZnYSuXCZ9vDLleUgDFaWWQY,3226
9
+ mcp/reasoning/__init__.py,sha256=cctZeASsyKAOBFkaAbTDLIQiDBuR62ZU8b8A6DL8cPc,144
10
+ mcp/reasoning/embeddings.py,sha256=B_xjRzn98bVzsHa3XErKWys7fzdTjTNfRgvnoHPe7ng,6235
11
+ mcp/reasoning/fact_cache.py,sha256=dititHNTgjRQePw3e3ljbYLhxzRHYNRsrmfFdH3mtZ8,5738
12
+ mcp/reasoning/safla_agent.py,sha256=l0wqcNfL4jTa4W4Fl7U_nRZz4YXxcWCn0rKLrb3M5tw,9420
13
+ mcp/storage/__init__.py,sha256=S0bzRxAhvEBvYi8Z7dTxjFDzc5DqtiS-dDUHhV9zQC8,349
14
+ mcp/storage/db.py,sha256=SiwOjnzUHi4YKHwgFK5tnHkYp9NuXPN0vWMRUW2ipS4,7196
15
+ mcp/storage/models.py,sha256=ujTeJQZ9KAPzH0yUyunwc8zuGwMshhQ_0Ug5B1q4RLQ,2213
16
+ iflow_mcp_hulupeep_ruvscan_mcp-0.5.0.dist-info/METADATA,sha256=iEwl08ewL_lktVQF2JrBP4dQ9xhkjQprKaaOOsX4el4,39046
17
+ iflow_mcp_hulupeep_ruvscan_mcp-0.5.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
18
+ iflow_mcp_hulupeep_ruvscan_mcp-0.5.0.dist-info/entry_points.txt,sha256=GT06cmvaZZh6KZOrpBzF8X7MpZ1Y1mBBHhLXmEMLBXo,77
19
+ iflow_mcp_hulupeep_ruvscan_mcp-0.5.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.28.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ iflow-mcp-hulupeep-ruvscan-mcp = mcp.mcp_stdio_server:main
mcp/__init__.py ADDED
@@ -0,0 +1,7 @@
1
+ """
2
+ RuvScan MCP Server Package
3
+ """
4
+
5
+ __version__ = "0.5.0"
6
+ __author__ = "Colm Byrne / Flout Labs"
7
+ __description__ = "Sublinear-intelligence MCP server for GitHub scanning"
@@ -0,0 +1,5 @@
1
+ """Bindings for external services"""
2
+
3
+ from .rust_client import RustSublinearClient
4
+
5
+ __all__ = ['RustSublinearClient']
@@ -0,0 +1,166 @@
1
+ """
2
+ gRPC client for Rust sublinear engine
3
+ """
4
+
5
+ import grpc
6
+ from typing import List, Dict, Any, Tuple
7
+ import logging
8
+ import numpy as np
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+ class RustSublinearClient:
13
+ """
14
+ gRPC client for communicating with Rust sublinear engine
15
+ """
16
+
17
+ def __init__(self, host: str = "localhost", port: int = 50051):
18
+ self.host = host
19
+ self.port = port
20
+ self.channel = None
21
+ self.stub = None
22
+
23
+ async def connect(self):
24
+ """Establish gRPC connection"""
25
+ try:
26
+ address = f"{self.host}:{self.port}"
27
+ self.channel = grpc.aio.insecure_channel(address)
28
+ # TODO: Initialize stub when proto definitions are added
29
+ logger.info(f"Connected to Rust engine at {address}")
30
+ except Exception as e:
31
+ logger.error(f"Failed to connect to Rust engine: {e}")
32
+ raise
33
+
34
+ async def compute_similarity(
35
+ self,
36
+ query_embedding: np.ndarray,
37
+ corpus_embeddings: List[np.ndarray],
38
+ distortion: float = 0.5
39
+ ) -> List[Tuple[int, float]]:
40
+ """
41
+ Compute sublinear similarity between query and corpus
42
+
43
+ Args:
44
+ query_embedding: Query vector
45
+ corpus_embeddings: List of corpus vectors
46
+ distortion: JL distortion parameter
47
+
48
+ Returns:
49
+ List of (index, similarity_score) tuples
50
+ """
51
+ logger.info(f"Computing sublinear similarity for {len(corpus_embeddings)} vectors")
52
+
53
+ try:
54
+ # TODO: Implement gRPC call when proto is defined
55
+ # For now, use placeholder local computation
56
+
57
+ # Placeholder: Use simple cosine similarity
58
+ similarities = []
59
+ for idx, corpus_vec in enumerate(corpus_embeddings):
60
+ sim = self._cosine_similarity(query_embedding, corpus_vec)
61
+ similarities.append((idx, sim))
62
+
63
+ # Sort by similarity descending
64
+ similarities.sort(key=lambda x: x[1], reverse=True)
65
+
66
+ logger.info(f"Computed {len(similarities)} similarities")
67
+ return similarities
68
+
69
+ except Exception as e:
70
+ logger.error(f"Similarity computation error: {e}")
71
+ raise
72
+
73
+ async def compare_vectors(
74
+ self,
75
+ vec_a: np.ndarray,
76
+ vec_b: np.ndarray,
77
+ distortion: float = 0.5
78
+ ) -> Dict[str, Any]:
79
+ """
80
+ Compare two vectors using sublinear algorithm
81
+
82
+ Args:
83
+ vec_a: First vector
84
+ vec_b: Second vector
85
+ distortion: JL distortion parameter
86
+
87
+ Returns:
88
+ Comparison result with similarity and complexity
89
+ """
90
+ try:
91
+ # TODO: Implement gRPC call
92
+
93
+ similarity = self._cosine_similarity(vec_a, vec_b)
94
+
95
+ return {
96
+ "similarity": float(similarity),
97
+ "complexity": f"O(log {len(vec_a)})",
98
+ "method": "sublinear_jl",
99
+ "distortion": distortion
100
+ }
101
+
102
+ except Exception as e:
103
+ logger.error(f"Vector comparison error: {e}")
104
+ raise
105
+
106
+ async def analyze_matrix(
107
+ self,
108
+ matrix: np.ndarray
109
+ ) -> Dict[str, Any]:
110
+ """
111
+ Analyze matrix properties for optimal algorithm selection
112
+
113
+ Args:
114
+ matrix: Matrix to analyze
115
+
116
+ Returns:
117
+ Analysis result
118
+ """
119
+ try:
120
+ # TODO: Implement gRPC call
121
+
122
+ # Placeholder analysis
123
+ is_sparse = np.count_nonzero(matrix) / matrix.size < 0.3
124
+ is_symmetric = np.allclose(matrix, matrix.T)
125
+
126
+ return {
127
+ "is_sparse": is_sparse,
128
+ "is_symmetric": is_symmetric,
129
+ "is_diagonally_dominant": self._check_diagonal_dominance(matrix),
130
+ "recommended_method": "neumann" if is_sparse else "direct",
131
+ "complexity_estimate": "O(log n)"
132
+ }
133
+
134
+ except Exception as e:
135
+ logger.error(f"Matrix analysis error: {e}")
136
+ raise
137
+
138
+ def _cosine_similarity(self, a: np.ndarray, b: np.ndarray) -> float:
139
+ """Compute cosine similarity between two vectors"""
140
+ dot_product = np.dot(a, b)
141
+ norm_a = np.linalg.norm(a)
142
+ norm_b = np.linalg.norm(b)
143
+
144
+ if norm_a == 0 or norm_b == 0:
145
+ return 0.0
146
+
147
+ return float(dot_product / (norm_a * norm_b))
148
+
149
+ def _check_diagonal_dominance(self, matrix: np.ndarray) -> bool:
150
+ """Check if matrix is diagonally dominant"""
151
+ n = matrix.shape[0]
152
+
153
+ for i in range(n):
154
+ diagonal = abs(matrix[i, i])
155
+ row_sum = sum(abs(matrix[i, j]) for j in range(n) if j != i)
156
+
157
+ if diagonal <= row_sum:
158
+ return False
159
+
160
+ return True
161
+
162
+ async def close(self):
163
+ """Close gRPC connection"""
164
+ if self.channel:
165
+ await self.channel.close()
166
+ logger.info("Closed connection to Rust engine")
mcp/endpoints/query.py ADDED
@@ -0,0 +1,144 @@
1
+ """
2
+ Query endpoint implementation
3
+ Handles leverage discovery queries
4
+ """
5
+
6
+ from fastapi import APIRouter, HTTPException
7
+ from pydantic import BaseModel, Field
8
+ from typing import List, Optional
9
+ import logging
10
+ import numpy as np
11
+
12
+ from ..reasoning.embeddings import EmbeddingService
13
+ from ..reasoning.fact_cache import FACTCache
14
+ from ..reasoning.safla_agent import SAFLAAgent
15
+ from ..bindings.rust_client import RustSublinearClient
16
+ from ..storage.db import RuvScanDB
17
+ from ..storage.models import LeverageCard
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+ router = APIRouter()
22
+
23
+ # Initialize services
24
+ embedding_service = EmbeddingService()
25
+ fact_cache = FACTCache()
26
+ safla_agent = SAFLAAgent(fact_cache)
27
+ rust_client = RustSublinearClient()
28
+
29
+ class QueryRequest(BaseModel):
30
+ """Request to query for leverage"""
31
+ intent: str = Field(..., min_length=10)
32
+ max_results: int = Field(10, gt=0, le=100)
33
+ min_score: float = Field(0.7, ge=0.0, le=1.0)
34
+
35
+ @router.post("/query", response_model=List[LeverageCard])
36
+ async def query_leverage(request: QueryRequest):
37
+ """
38
+ Query for leverage cards based on user intent
39
+
40
+ Uses sublinear similarity and SAFLA reasoning to find relevant repos
41
+ """
42
+ logger.info(f"Querying intent: {request.intent[:100]}...")
43
+
44
+ try:
45
+ # Check FACT cache first
46
+ cached = fact_cache.get(f"query:{request.intent}")
47
+ if cached:
48
+ logger.info("Returning cached query results")
49
+ import json
50
+ return json.loads(cached['response'])
51
+
52
+ # Generate embedding for intent
53
+ logger.info("Generating embedding for query intent")
54
+ intent_embedding = await embedding_service.embed_text(request.intent)
55
+
56
+ # Get all repo embeddings from database
57
+ # TODO: Implement database retrieval
58
+ # For now, create mock data
59
+ mock_repos = create_mock_repos()
60
+
61
+ # Compute similarities using Rust engine
62
+ logger.info(f"Computing sublinear similarity against {len(mock_repos)} repos")
63
+ corpus_embeddings = [repo['embedding'] for repo in mock_repos]
64
+
65
+ similarities = await rust_client.compute_similarity(
66
+ intent_embedding,
67
+ corpus_embeddings,
68
+ distortion=0.5
69
+ )
70
+
71
+ # Filter by minimum score
72
+ filtered = [
73
+ (idx, score) for idx, score in similarities
74
+ if score >= request.min_score
75
+ ][:request.max_results]
76
+
77
+ logger.info(f"Found {len(filtered)} repos above threshold {request.min_score}")
78
+
79
+ # Generate leverage cards with SAFLA reasoning
80
+ leverage_cards = []
81
+ for idx, score in filtered:
82
+ repo_data = mock_repos[idx]
83
+
84
+ # Generate SAFLA reasoning
85
+ card = safla_agent.generate_leverage_card(
86
+ repo_data=repo_data,
87
+ query_intent=request.intent,
88
+ similarity_score=score
89
+ )
90
+
91
+ leverage_cards.append(LeverageCard(**card))
92
+
93
+ # Cache results
94
+ import json
95
+ fact_cache.set(
96
+ f"query:{request.intent}",
97
+ json.dumps([card.dict() for card in leverage_cards]),
98
+ metadata={"query_length": len(request.intent)}
99
+ )
100
+
101
+ logger.info(f"Returning {len(leverage_cards)} leverage cards")
102
+ return leverage_cards
103
+
104
+ except Exception as e:
105
+ logger.error(f"Query error: {str(e)}", exc_info=True)
106
+ raise HTTPException(status_code=500, detail=str(e))
107
+
108
+ def create_mock_repos() -> List[dict]:
109
+ """Create mock repository data for testing"""
110
+ return [
111
+ {
112
+ "id": 1,
113
+ "full_name": "ruvnet/sublinear-time-solver",
114
+ "name": "sublinear-time-solver",
115
+ "org": "ruvnet",
116
+ "description": "TRUE O(log n) matrix solver with consciousness exploration",
117
+ "capabilities": ["O(log n) solving", "WASM acceleration", "MCP integration"],
118
+ "embedding": np.random.randn(1536),
119
+ "stars": 150,
120
+ "language": "Rust"
121
+ },
122
+ {
123
+ "id": 2,
124
+ "full_name": "ruvnet/FACT",
125
+ "name": "FACT",
126
+ "org": "ruvnet",
127
+ "description": "Framework for Autonomous Context Tracking - deterministic caching",
128
+ "capabilities": ["Deterministic caching", "Prompt replay", "Context management"],
129
+ "embedding": np.random.randn(1536),
130
+ "stars": 85,
131
+ "language": "Python"
132
+ },
133
+ {
134
+ "id": 3,
135
+ "full_name": "ruvnet/MidStream",
136
+ "name": "MidStream",
137
+ "org": "ruvnet",
138
+ "description": "Real-time streaming and inflight data processing",
139
+ "capabilities": ["Streaming", "Real-time processing", "Async channels"],
140
+ "embedding": np.random.randn(1536),
141
+ "stars": 120,
142
+ "language": "Rust"
143
+ }
144
+ ]
mcp/endpoints/scan.py ADDED
@@ -0,0 +1,112 @@
1
+ """
2
+ Scan endpoint implementation
3
+ Handles GitHub repository scanning
4
+ """
5
+
6
+ from fastapi import APIRouter, HTTPException
7
+ from pydantic import BaseModel, Field
8
+ from typing import Optional
9
+ import logging
10
+ import httpx
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+ router = APIRouter()
15
+
16
+ class ScanRequest(BaseModel):
17
+ """Request to scan GitHub org/user/topic"""
18
+ source_type: str = Field(..., pattern="^(org|user|topic)$")
19
+ source_name: str
20
+ limit: int = Field(50, gt=0, le=1000)
21
+
22
+ class ScanResponse(BaseModel):
23
+ """Scan response"""
24
+ status: str
25
+ source_type: str
26
+ source_name: str
27
+ estimated_repos: int
28
+ message: str
29
+ job_id: Optional[str] = None
30
+
31
+ @router.post("/scan", response_model=ScanResponse)
32
+ async def scan_repos(request: ScanRequest):
33
+ """
34
+ Trigger GitHub scanning for org/user/topic
35
+
36
+ This endpoint initiates concurrent Go workers to fetch and analyze repos
37
+ """
38
+ logger.info(f"Scanning {request.source_type}: {request.source_name}")
39
+
40
+ try:
41
+ # TODO: Implement actual Go scanner triggering
42
+ # For now, return a mock response
43
+
44
+ # In production, this would:
45
+ # 1. Queue a scan job
46
+ # 2. Trigger Go workers via REST/gRPC
47
+ # 3. Store job in database
48
+ # 4. Return job ID for tracking
49
+
50
+ return ScanResponse(
51
+ status="initiated",
52
+ source_type=request.source_type,
53
+ source_name=request.source_name,
54
+ estimated_repos=request.limit,
55
+ message=f"Scan initiated for {request.source_type}/{request.source_name}",
56
+ job_id=f"scan_{request.source_type}_{request.source_name}"
57
+ )
58
+
59
+ except Exception as e:
60
+ logger.error(f"Scan error: {str(e)}")
61
+ raise HTTPException(status_code=500, detail=str(e))
62
+
63
+ @router.post("/ingest")
64
+ async def ingest_repo_data(repo_data: dict):
65
+ """
66
+ Ingest repository data from Go scanner workers
67
+
68
+ This endpoint receives repo metadata, README, and other data
69
+ from concurrent Go scanner workers
70
+ """
71
+ logger.info(f"Ingesting repo data: {repo_data.get('full_name', 'unknown')}")
72
+
73
+ try:
74
+ # TODO: Implement actual ingestion
75
+ # 1. Validate repo data
76
+ # 2. Generate embeddings
77
+ # 3. Store in database
78
+ # 4. Update scan job status
79
+
80
+ return {
81
+ "status": "ingested",
82
+ "repo": repo_data.get("full_name"),
83
+ "message": "Repository data ingested successfully"
84
+ }
85
+
86
+ except Exception as e:
87
+ logger.error(f"Ingest error: {str(e)}")
88
+ raise HTTPException(status_code=500, detail=str(e))
89
+
90
+ @router.get("/scan/{job_id}/status")
91
+ async def get_scan_status(job_id: str):
92
+ """
93
+ Get status of a scan job
94
+
95
+ Returns current progress, repos found, and completion status
96
+ """
97
+ logger.info(f"Checking scan status: {job_id}")
98
+
99
+ try:
100
+ # TODO: Query database for job status
101
+
102
+ return {
103
+ "job_id": job_id,
104
+ "status": "in_progress",
105
+ "repos_found": 0,
106
+ "repos_processed": 0,
107
+ "completion_percentage": 0.0
108
+ }
109
+
110
+ except Exception as e:
111
+ logger.error(f"Status check error: {str(e)}")
112
+ raise HTTPException(status_code=500, detail=str(e))
@@ -0,0 +1,212 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ RuvScan MCP Server (STDIO Transport)
4
+ For use with Claude Code CLI, Codex, and Claude Desktop
5
+ """
6
+
7
+ import asyncio
8
+ import os
9
+ import sys
10
+ from typing import Any
11
+ import httpx
12
+ from mcp.server.fastmcp import FastMCP
13
+
14
+ # Initialize FastMCP server
15
+ mcp = FastMCP("ruvscan")
16
+
17
+ # Get API endpoint from environment (defaults to localhost if running locally)
18
+ RUVSCAN_API = os.getenv("RUVSCAN_API_URL", "http://localhost:8000")
19
+ GITHUB_TOKEN = os.getenv("GITHUB_TOKEN", "")
20
+
21
+
22
+ @mcp.tool()
23
+ async def scan_github(
24
+ source_type: str,
25
+ source_name: str,
26
+ limit: int = 50
27
+ ) -> str:
28
+ """Scan GitHub organization, user, or topic for repositories.
29
+
30
+ Args:
31
+ source_type: Type of source - 'org', 'user', or 'topic'
32
+ source_name: Name of the organization, user, or topic keyword
33
+ limit: Maximum number of repositories to scan (default: 50)
34
+ """
35
+ async with httpx.AsyncClient() as client:
36
+ try:
37
+ response = await client.post(
38
+ f"{RUVSCAN_API}/scan",
39
+ json={
40
+ "source_type": source_type,
41
+ "source_name": source_name,
42
+ "limit": limit
43
+ },
44
+ headers={"Authorization": f"Bearer {GITHUB_TOKEN}"} if GITHUB_TOKEN else {},
45
+ timeout=30.0
46
+ )
47
+ response.raise_for_status()
48
+ data = response.json()
49
+
50
+ return f"""Scan initiated for {source_type}: {source_name}
51
+ Status: {data.get('status', 'unknown')}
52
+ Estimated repositories: {data.get('estimated_repos', limit)}
53
+ Message: {data.get('message', 'Processing')}
54
+ """
55
+ except Exception as e:
56
+ return f"Error scanning GitHub: {str(e)}"
57
+
58
+
59
+ @mcp.tool()
60
+ async def query_leverage(
61
+ intent: str,
62
+ max_results: int = 10,
63
+ min_score: float = 0.7
64
+ ) -> str:
65
+ """Query for leverage opportunities based on your intent or problem.
66
+
67
+ Args:
68
+ intent: Your problem statement or what you're trying to build
69
+ max_results: Maximum number of results to return (default: 10)
70
+ min_score: Minimum relevance score from 0-1 (default: 0.7)
71
+ """
72
+ async with httpx.AsyncClient() as client:
73
+ try:
74
+ response = await client.post(
75
+ f"{RUVSCAN_API}/query",
76
+ json={
77
+ "intent": intent,
78
+ "max_results": max_results,
79
+ "min_score": min_score
80
+ },
81
+ timeout=30.0
82
+ )
83
+ response.raise_for_status()
84
+ cards = response.json()
85
+
86
+ if not cards:
87
+ return f"No leverage opportunities found for: {intent}"
88
+
89
+ results = []
90
+ for card in cards:
91
+ result = f"""
92
+ Repository: {card['repo']}
93
+ Relevance Score: {card['relevance_score']:.2f}
94
+ Complexity: {card.get('runtime_complexity', 'N/A')}
95
+
96
+ Summary: {card['summary']}
97
+
98
+ Why This Helps: {card['outside_box_reasoning']}
99
+
100
+ How to Use: {card['integration_hint']}
101
+
102
+ Capabilities: {', '.join(card['capabilities'])}
103
+ {'(Cached Result)' if card.get('cached') else ''}
104
+ """
105
+ results.append(result)
106
+
107
+ return "\n" + "="*80 + "\n".join(results)
108
+
109
+ except Exception as e:
110
+ return f"Error querying leverage: {str(e)}"
111
+
112
+
113
+ @mcp.tool()
114
+ async def compare_repositories(
115
+ repo_a: str,
116
+ repo_b: str
117
+ ) -> str:
118
+ """Compare two GitHub repositories using sublinear similarity.
119
+
120
+ Args:
121
+ repo_a: First repository in format 'org/repo'
122
+ repo_b: Second repository in format 'org/repo'
123
+ """
124
+ async with httpx.AsyncClient() as client:
125
+ try:
126
+ response = await client.post(
127
+ f"{RUVSCAN_API}/compare",
128
+ json={
129
+ "repo_a": repo_a,
130
+ "repo_b": repo_b
131
+ },
132
+ timeout=30.0
133
+ )
134
+ response.raise_for_status()
135
+ data = response.json()
136
+
137
+ return f"""
138
+ Repository Comparison (O(log n) complexity)
139
+
140
+ {repo_a} vs {repo_b}
141
+
142
+ Similarity Score: {data.get('similarity_score', 0):.2f}
143
+ Complexity: {data.get('complexity', 'O(log n)')}
144
+
145
+ Analysis: {data.get('analysis', 'Comparison complete')}
146
+ """
147
+ except Exception as e:
148
+ return f"Error comparing repositories: {str(e)}"
149
+
150
+
151
+ @mcp.tool()
152
+ async def analyze_reasoning(repo: str) -> str:
153
+ """Analyze and replay the reasoning chain for a repository using FACT cache.
154
+
155
+ Args:
156
+ repo: Repository name in format 'org/repo'
157
+ """
158
+ async with httpx.AsyncClient() as client:
159
+ try:
160
+ response = await client.post(
161
+ f"{RUVSCAN_API}/analyze",
162
+ params={"repo": repo},
163
+ timeout=30.0
164
+ )
165
+ response.raise_for_status()
166
+ data = response.json()
167
+
168
+ trace = data.get('reasoning_trace', [])
169
+ if not trace:
170
+ return f"No reasoning trace available for {repo}"
171
+
172
+ result = f"Reasoning Chain for {repo}:\n\n"
173
+ for step in trace:
174
+ result += f"- {step}\n"
175
+
176
+ if data.get('cached'):
177
+ result += "\n(Retrieved from FACT deterministic cache)"
178
+
179
+ return result
180
+
181
+ except Exception as e:
182
+ return f"Error analyzing reasoning: {str(e)}"
183
+
184
+
185
+ @mcp.resource("ruvscan://status")
186
+ async def get_status() -> str:
187
+ """Get RuvScan server status and health."""
188
+ async with httpx.AsyncClient() as client:
189
+ try:
190
+ response = await client.get(f"{RUVSCAN_API}/health", timeout=5.0)
191
+ response.raise_for_status()
192
+ data = response.json()
193
+
194
+ return f"""RuvScan Server Status
195
+
196
+ Status: {data.get('status', 'unknown')}
197
+ Version: {data.get('version', '0.5.0')}
198
+ Service: {data.get('service', 'RuvScan MCP Server')}
199
+ API Endpoint: {RUVSCAN_API}
200
+ """
201
+ except Exception as e:
202
+ return f"Server unreachable: {str(e)}\n\nMake sure the RuvScan API server is running:\ndocker-compose up -d"
203
+
204
+
205
+ def main():
206
+ """Run the MCP server using stdio transport."""
207
+ # Initialize and run the server
208
+ mcp.run(transport='stdio')
209
+
210
+
211
+ if __name__ == "__main__":
212
+ main()