suse-documentation-mcp-server 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- server/search_suse_documentation.py +96 -0
- {suse_documentation_mcp_server-0.2.0.dist-info → suse_documentation_mcp_server-0.3.0.dist-info}/METADATA +1 -1
- suse_documentation_mcp_server-0.3.0.dist-info/RECORD +6 -0
- suse_documentation_mcp_server-0.3.0.dist-info/entry_points.txt +2 -0
- suse_documentation_mcp_server-0.3.0.dist-info/top_level.txt +1 -0
- suse_documentation_mcp_server-0.2.0.dist-info/RECORD +0 -4
- suse_documentation_mcp_server-0.2.0.dist-info/top_level.txt +0 -1
- {suse_documentation_mcp_server-0.2.0.dist-info → suse_documentation_mcp_server-0.3.0.dist-info}/WHEEL +0 -0
@@ -0,0 +1,96 @@
|
|
1
|
+
import requests
|
2
|
+
import re
|
3
|
+
import os
|
4
|
+
from dotenv import load_dotenv
|
5
|
+
from typing import Any
|
6
|
+
from mcp.server.fastmcp import FastMCP
|
7
|
+
|
8
|
+
load_dotenv()
|
9
|
+
|
10
|
+
base_url = os.getenv("OI_API_BASE", "") + "/api/v1/retrieval/process/web"
|
11
|
+
authorization_token = os.getenv("OI_API_TOKEN", "")
|
12
|
+
|
13
|
+
# Initialize FastMCP server
|
14
|
+
mcp = FastMCP("suse-documentation")
|
15
|
+
|
16
|
+
def clean_content(content):
|
17
|
+
# Remove excessive blank lines
|
18
|
+
cleaned_content = re.sub(r'\n\s*\n+', '\n\n', content.strip())
|
19
|
+
# Replace multiple spaces with a single space
|
20
|
+
cleaned_content = re.sub(r'[ \t]+', ' ', cleaned_content)
|
21
|
+
return cleaned_content
|
22
|
+
|
23
|
+
def get_web_search_results_from_oi(query):
|
24
|
+
headers = {
|
25
|
+
'Content-Type': 'application/json',
|
26
|
+
'Authorization': f'Bearer {authorization_token}'
|
27
|
+
}
|
28
|
+
|
29
|
+
# Step 1: Get web search results (list of URLs)
|
30
|
+
search_payload = {
|
31
|
+
"query": query,
|
32
|
+
"collection_name": "your_collection_name"
|
33
|
+
}
|
34
|
+
|
35
|
+
search_response = requests.post(f"{base_url}/search", headers=headers, json=search_payload)
|
36
|
+
if search_response.status_code != 200:
|
37
|
+
raise Exception(f"Search API call failed: {search_response.status_code} - {search_response.text}")
|
38
|
+
|
39
|
+
search_data = search_response.json()
|
40
|
+
|
41
|
+
if not search_data.get("status"):
|
42
|
+
raise Exception(f"Search API response indicates failure: {search_data}")
|
43
|
+
|
44
|
+
filenames = search_data.get("filenames", [])
|
45
|
+
if not filenames:
|
46
|
+
return "No filenames found in the search response."
|
47
|
+
|
48
|
+
combined_response = ""
|
49
|
+
|
50
|
+
# Step 2: Loop through URLs to get page content
|
51
|
+
for filename in filenames:
|
52
|
+
process_payload = {
|
53
|
+
"url": filename,
|
54
|
+
"collection_name": search_data["collection_name"]
|
55
|
+
}
|
56
|
+
|
57
|
+
process_response = requests.post(base_url, headers=headers, json=process_payload)
|
58
|
+
if process_response.status_code != 200:
|
59
|
+
print(f"Failed to process URL {filename}: {process_response.status_code} - {process_response.text}")
|
60
|
+
continue
|
61
|
+
|
62
|
+
process_data = process_response.json()
|
63
|
+
if not process_data.get("status"):
|
64
|
+
print(f"Processing failed for URL {filename}: {process_data}")
|
65
|
+
continue
|
66
|
+
|
67
|
+
content = process_data.get("file", {}).get("data", {}).get("content", "No content available")
|
68
|
+
|
69
|
+
# Append to get combined response
|
70
|
+
cleaned_content = clean_content(content)
|
71
|
+
combined_response += f"Source: {filename}\n\nContent:\n{cleaned_content}\n\n"
|
72
|
+
|
73
|
+
return combined_response
|
74
|
+
|
75
|
+
@mcp.tool()
|
76
|
+
async def get_web_search_results(query: str) -> str:
|
77
|
+
"""Get web search results for a given query.
|
78
|
+
|
79
|
+
Args:
|
80
|
+
query: Web search query
|
81
|
+
"""
|
82
|
+
try:
|
83
|
+
return get_web_search_results_from_oi(query)
|
84
|
+
except Exception as e:
|
85
|
+
return f"Error performing web search: {str(e)}"
|
86
|
+
|
87
|
+
|
88
|
+
def main():
|
89
|
+
"""Main entry point for the script."""
|
90
|
+
# Initialize and run the server
|
91
|
+
mcp.run(transport='stdio')
|
92
|
+
|
93
|
+
if __name__ == "__main__":
|
94
|
+
# Initialize and run the server
|
95
|
+
mcp.run(transport='stdio')
|
96
|
+
|
@@ -0,0 +1,6 @@
|
|
1
|
+
server/search_suse_documentation.py,sha256=EneucIS1a3rVNdtG4i0J8RumSivEDmAoJQqcEDEu60k,3127
|
2
|
+
suse_documentation_mcp_server-0.3.0.dist-info/METADATA,sha256=gzdKCiakDFcA-xb3GrurB5R9-XIzrvXmPT7qZBA03d0,343
|
3
|
+
suse_documentation_mcp_server-0.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
4
|
+
suse_documentation_mcp_server-0.3.0.dist-info/entry_points.txt,sha256=IEM7kH8YFWMMJCcp4hJGv5tHzHWxiR5lZ7jOyzRCHQA,88
|
5
|
+
suse_documentation_mcp_server-0.3.0.dist-info/top_level.txt,sha256=StKOSmRhvWS5IPcvhsDRbtxUTEofJgYFGOu5AAJdSWo,7
|
6
|
+
suse_documentation_mcp_server-0.3.0.dist-info/RECORD,,
|
@@ -0,0 +1 @@
|
|
1
|
+
server
|
@@ -1,4 +0,0 @@
|
|
1
|
-
suse_documentation_mcp_server-0.2.0.dist-info/METADATA,sha256=0fy3O3OTkesfR66aUbXsdXjUWZodR92Z3P8tp_luFnY,343
|
2
|
-
suse_documentation_mcp_server-0.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
3
|
-
suse_documentation_mcp_server-0.2.0.dist-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
4
|
-
suse_documentation_mcp_server-0.2.0.dist-info/RECORD,,
|
@@ -1 +0,0 @@
|
|
1
|
-
|
File without changes
|