rootly-mcp-server 1.0.0__py3-none-any.whl → 2.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rootly_mcp_server/__init__.py +1 -1
- rootly_mcp_server/__main__.py +78 -10
- rootly_mcp_server/client.py +40 -26
- rootly_mcp_server/routemap_server.py +206 -0
- rootly_mcp_server/server.py +439 -365
- rootly_mcp_server/test_client.py +128 -47
- rootly_mcp_server-2.0.1.dist-info/METADATA +225 -0
- rootly_mcp_server-2.0.1.dist-info/RECORD +12 -0
- rootly_mcp_server/data/swagger.json +0 -1
- rootly_mcp_server-1.0.0.dist-info/METADATA +0 -128
- rootly_mcp_server-1.0.0.dist-info/RECORD +0 -12
- {rootly_mcp_server-1.0.0.dist-info → rootly_mcp_server-2.0.1.dist-info}/WHEEL +0 -0
- {rootly_mcp_server-1.0.0.dist-info → rootly_mcp_server-2.0.1.dist-info}/entry_points.txt +0 -0
- {rootly_mcp_server-1.0.0.dist-info → rootly_mcp_server-2.0.1.dist-info}/licenses/LICENSE +0 -0
rootly_mcp_server/server.py
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
Rootly MCP Server - A Model Context Protocol server for Rootly API integration.
|
|
3
3
|
|
|
4
4
|
This module implements a server that dynamically generates MCP tools based on
|
|
5
|
-
the Rootly API's OpenAPI (Swagger) specification.
|
|
5
|
+
the Rootly API's OpenAPI (Swagger) specification using FastMCP's OpenAPI integration.
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
8
|
import json
|
|
@@ -11,11 +11,14 @@ import re
|
|
|
11
11
|
import logging
|
|
12
12
|
from pathlib import Path
|
|
13
13
|
import requests
|
|
14
|
-
import
|
|
15
|
-
from typing import Any, Dict, List, Optional, Tuple, Union, Callable
|
|
14
|
+
import httpx
|
|
15
|
+
from typing import Any, Dict, List, Optional, Tuple, Union, Callable, Annotated, Literal
|
|
16
|
+
from enum import Enum
|
|
16
17
|
|
|
17
|
-
import
|
|
18
|
-
|
|
18
|
+
from fastmcp import FastMCP
|
|
19
|
+
|
|
20
|
+
from fastmcp.server.dependencies import get_http_request
|
|
21
|
+
from starlette.requests import Request
|
|
19
22
|
from pydantic import BaseModel, Field
|
|
20
23
|
|
|
21
24
|
from .client import RootlyClient
|
|
@@ -27,33 +30,70 @@ logger = logging.getLogger(__name__)
|
|
|
27
30
|
SWAGGER_URL = "https://rootly-heroku.s3.amazonaws.com/swagger/v1/swagger.json"
|
|
28
31
|
|
|
29
32
|
|
|
30
|
-
class
|
|
33
|
+
class AuthenticatedHTTPXClient:
|
|
34
|
+
"""An HTTPX client wrapper that handles Rootly API authentication."""
|
|
35
|
+
|
|
36
|
+
def __init__(self, base_url: str = "https://api.rootly.com", hosted: bool = False):
|
|
37
|
+
self.base_url = base_url
|
|
38
|
+
self.hosted = hosted
|
|
39
|
+
self._api_token = None
|
|
40
|
+
|
|
41
|
+
if not self.hosted:
|
|
42
|
+
self._api_token = self._get_api_token()
|
|
43
|
+
|
|
44
|
+
# Create the HTTPX client
|
|
45
|
+
headers = {"Content-Type": "application/json", "Accept": "application/json"}
|
|
46
|
+
if self._api_token:
|
|
47
|
+
headers["Authorization"] = f"Bearer {self._api_token}"
|
|
48
|
+
|
|
49
|
+
self.client = httpx.AsyncClient(
|
|
50
|
+
base_url=base_url,
|
|
51
|
+
headers=headers,
|
|
52
|
+
timeout=30.0
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
def _get_api_token(self) -> Optional[str]:
|
|
56
|
+
"""Get the API token from environment variables."""
|
|
57
|
+
api_token = os.getenv("ROOTLY_API_TOKEN")
|
|
58
|
+
if not api_token:
|
|
59
|
+
logger.warning("ROOTLY_API_TOKEN environment variable is not set")
|
|
60
|
+
return None
|
|
61
|
+
return api_token
|
|
62
|
+
|
|
63
|
+
async def __aenter__(self):
|
|
64
|
+
return self.client
|
|
65
|
+
|
|
66
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
67
|
+
await self.client.aclose()
|
|
68
|
+
|
|
69
|
+
def __getattr__(self, name):
|
|
70
|
+
# Delegate all other attributes to the underlying client
|
|
71
|
+
return getattr(self.client, name)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def create_rootly_mcp_server(
|
|
75
|
+
swagger_path: Optional[str] = None,
|
|
76
|
+
name: str = "Rootly",
|
|
77
|
+
allowed_paths: Optional[List[str]] = None,
|
|
78
|
+
hosted: bool = False,
|
|
79
|
+
base_url: Optional[str] = None,
|
|
80
|
+
) -> FastMCP:
|
|
31
81
|
"""
|
|
32
|
-
|
|
82
|
+
Create a Rootly MCP Server using FastMCP's OpenAPI integration.
|
|
33
83
|
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
84
|
+
Args:
|
|
85
|
+
swagger_path: Path to the Swagger JSON file. If None, will fetch from URL.
|
|
86
|
+
name: Name of the MCP server.
|
|
87
|
+
allowed_paths: List of API paths to include. If None, includes default paths.
|
|
88
|
+
hosted: Whether the server is hosted (affects authentication).
|
|
89
|
+
base_url: Base URL for Rootly API. If None, uses ROOTLY_BASE_URL env var or default.
|
|
37
90
|
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
Initialize the Rootly MCP Server.
|
|
45
|
-
|
|
46
|
-
Args:
|
|
47
|
-
swagger_path: Path to the Swagger JSON file. If None, will look for
|
|
48
|
-
swagger.json in the current directory and parent directories.
|
|
49
|
-
name: Name of the MCP server.
|
|
50
|
-
default_page_size: Default number of items to return per page for paginated endpoints.
|
|
51
|
-
allowed_paths: List of API paths to load. If None, all paths will be loaded.
|
|
52
|
-
Paths should be specified without the /v1 prefix.
|
|
53
|
-
Example: ["/incidents", "/incidents/{incident_id}/alerts"]
|
|
54
|
-
"""
|
|
55
|
-
# Set default allowed paths if none provided
|
|
56
|
-
self.allowed_paths = allowed_paths or [
|
|
91
|
+
Returns:
|
|
92
|
+
A FastMCP server instance.
|
|
93
|
+
"""
|
|
94
|
+
# Set default allowed paths if none provided
|
|
95
|
+
if allowed_paths is None:
|
|
96
|
+
allowed_paths = [
|
|
57
97
|
"/incidents",
|
|
58
98
|
"/incidents/{incident_id}/alerts",
|
|
59
99
|
"/alerts",
|
|
@@ -88,364 +128,398 @@ class RootlyMCPServer(FastMCP):
|
|
|
88
128
|
"/users/me",
|
|
89
129
|
# Status pages
|
|
90
130
|
"/status_pages",
|
|
91
|
-
"/status_pages/{status_page_id}"
|
|
131
|
+
"/status_pages/{status_page_id}",
|
|
92
132
|
]
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
133
|
+
|
|
134
|
+
# Add /v1 prefix to paths if not present
|
|
135
|
+
allowed_paths_v1 = [
|
|
136
|
+
f"/v1{path}" if not path.startswith("/v1") else path
|
|
137
|
+
for path in allowed_paths
|
|
138
|
+
]
|
|
139
|
+
|
|
140
|
+
logger.info(f"Creating Rootly MCP Server with allowed paths: {allowed_paths_v1}")
|
|
141
|
+
|
|
142
|
+
# Load the Swagger specification
|
|
143
|
+
swagger_spec = _load_swagger_spec(swagger_path)
|
|
144
|
+
logger.info(f"Loaded Swagger spec with {len(swagger_spec.get('paths', {}))} total paths")
|
|
145
|
+
|
|
146
|
+
# Filter the OpenAPI spec to only include allowed paths
|
|
147
|
+
filtered_spec = _filter_openapi_spec(swagger_spec, allowed_paths_v1)
|
|
148
|
+
logger.info(f"Filtered spec to {len(filtered_spec.get('paths', {}))} allowed paths")
|
|
149
|
+
|
|
150
|
+
# Determine the base URL
|
|
151
|
+
if base_url is None:
|
|
152
|
+
base_url = os.getenv("ROOTLY_BASE_URL", "https://api.rootly.com")
|
|
153
|
+
|
|
154
|
+
logger.info(f"Using Rootly API base URL: {base_url}")
|
|
155
|
+
|
|
156
|
+
# Create the authenticated HTTP client
|
|
157
|
+
try:
|
|
158
|
+
http_client = AuthenticatedHTTPXClient(
|
|
159
|
+
base_url=base_url,
|
|
160
|
+
hosted=hosted
|
|
161
|
+
)
|
|
162
|
+
except Exception as e:
|
|
163
|
+
logger.warning(f"Failed to create authenticated client: {e}")
|
|
164
|
+
# Create a mock client for testing
|
|
165
|
+
http_client = httpx.AsyncClient(base_url=base_url)
|
|
166
|
+
|
|
167
|
+
# Create the MCP server using OpenAPI integration
|
|
168
|
+
# By default, all routes become tools which is what we want
|
|
169
|
+
mcp = FastMCP.from_openapi(
|
|
170
|
+
openapi_spec=filtered_spec,
|
|
171
|
+
client=http_client,
|
|
172
|
+
name=name,
|
|
173
|
+
timeout=30.0,
|
|
174
|
+
tags={"rootly", "incident-management"},
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
# Add some custom tools for enhanced functionality
|
|
178
|
+
@mcp.tool()
|
|
179
|
+
def list_endpoints() -> str:
|
|
180
|
+
"""List all available Rootly API endpoints with their descriptions."""
|
|
181
|
+
endpoints = []
|
|
182
|
+
for path, path_item in filtered_spec.get("paths", {}).items():
|
|
183
|
+
for method, operation in path_item.items():
|
|
184
|
+
if method.lower() not in ["get", "post", "put", "delete", "patch"]:
|
|
185
|
+
continue
|
|
109
186
|
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
187
|
+
summary = operation.get("summary", "")
|
|
188
|
+
description = operation.get("description", "")
|
|
189
|
+
|
|
190
|
+
endpoints.append({
|
|
191
|
+
"path": path,
|
|
192
|
+
"method": method.upper(),
|
|
193
|
+
"summary": summary,
|
|
194
|
+
"description": description,
|
|
195
|
+
})
|
|
196
|
+
|
|
197
|
+
return json.dumps(endpoints, indent=2)
|
|
198
|
+
|
|
199
|
+
@mcp.tool()
|
|
200
|
+
async def search_incidents_paginated(
|
|
201
|
+
query: Annotated[str, Field(description="Search query to filter incidents by title/summary")] = "",
|
|
202
|
+
page_size: Annotated[int, Field(description="Number of results per page (max: 100)", ge=1, le=100)] = 100,
|
|
203
|
+
page_number: Annotated[int, Field(description="Page number to retrieve", ge=1)] = 1,
|
|
204
|
+
) -> str:
|
|
205
|
+
"""
|
|
206
|
+
Search incidents with enhanced pagination control.
|
|
207
|
+
|
|
208
|
+
This tool provides better pagination handling than the standard API endpoint.
|
|
209
|
+
"""
|
|
210
|
+
params = {
|
|
211
|
+
"page[size]": min(page_size, 100),
|
|
212
|
+
"page[number]": page_number,
|
|
213
|
+
}
|
|
214
|
+
if query:
|
|
215
|
+
params["filter[search]"] = query
|
|
216
|
+
|
|
217
|
+
try:
|
|
218
|
+
async with http_client as client:
|
|
219
|
+
response = await client.get("/v1/incidents", params=params)
|
|
220
|
+
response.raise_for_status()
|
|
221
|
+
result = response.json()
|
|
222
|
+
except Exception as e:
|
|
223
|
+
result = {"error": str(e)}
|
|
224
|
+
|
|
225
|
+
return json.dumps(result, indent=2)
|
|
226
|
+
|
|
227
|
+
@mcp.tool()
|
|
228
|
+
async def get_all_incidents_matching(
|
|
229
|
+
query: Annotated[str, Field(description="Search query to filter incidents by title/summary")] = "",
|
|
230
|
+
max_results: Annotated[int, Field(description="Maximum number of results to return", ge=1, le=1000)] = 500,
|
|
231
|
+
) -> str:
|
|
232
|
+
"""
|
|
233
|
+
Get all incidents matching a query by automatically fetching multiple pages.
|
|
234
|
+
|
|
235
|
+
This tool automatically handles pagination to fetch multiple pages of results.
|
|
236
|
+
"""
|
|
237
|
+
all_incidents = []
|
|
238
|
+
page_number = 1
|
|
239
|
+
page_size = 100
|
|
240
|
+
|
|
241
|
+
try:
|
|
242
|
+
async with http_client as client:
|
|
243
|
+
while len(all_incidents) < max_results:
|
|
244
|
+
params = {
|
|
245
|
+
"page[size]": page_size,
|
|
246
|
+
"page[number]": page_number,
|
|
247
|
+
}
|
|
248
|
+
if query:
|
|
249
|
+
params["filter[search]"] = query
|
|
250
|
+
|
|
251
|
+
try:
|
|
252
|
+
response = await client.get("/v1/incidents", params=params)
|
|
253
|
+
response.raise_for_status()
|
|
254
|
+
response_data = response.json()
|
|
255
|
+
|
|
256
|
+
if "data" in response_data:
|
|
257
|
+
incidents = response_data["data"]
|
|
258
|
+
if not incidents: # No more results
|
|
259
|
+
break
|
|
260
|
+
all_incidents.extend(incidents)
|
|
261
|
+
|
|
262
|
+
# Check if we have more pages
|
|
263
|
+
meta = response_data.get("meta", {})
|
|
264
|
+
current_page = meta.get("current_page", page_number)
|
|
265
|
+
total_pages = meta.get("total_pages", 1)
|
|
266
|
+
|
|
267
|
+
if current_page >= total_pages:
|
|
268
|
+
break # No more pages
|
|
269
|
+
|
|
270
|
+
page_number += 1
|
|
271
|
+
else:
|
|
272
|
+
break # Unexpected response format
|
|
273
|
+
|
|
274
|
+
except Exception as e:
|
|
275
|
+
logger.error(f"Error fetching incidents page {page_number}: {e}")
|
|
276
|
+
break
|
|
277
|
+
|
|
278
|
+
# Limit to max_results
|
|
279
|
+
if len(all_incidents) > max_results:
|
|
280
|
+
all_incidents = all_incidents[:max_results]
|
|
281
|
+
|
|
282
|
+
result = {
|
|
283
|
+
"data": all_incidents,
|
|
284
|
+
"meta": {
|
|
285
|
+
"total_fetched": len(all_incidents),
|
|
286
|
+
"max_results": max_results,
|
|
287
|
+
"query": query
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
except Exception as e:
|
|
291
|
+
result = {"error": str(e)}
|
|
292
|
+
|
|
293
|
+
return json.dumps(result, indent=2)
|
|
114
294
|
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
295
|
+
# Log server creation (tool count will be shown when tools are accessed)
|
|
296
|
+
logger.info(f"Created Rootly MCP Server successfully")
|
|
297
|
+
return mcp
|
|
118
298
|
|
|
119
|
-
def _fetch_swagger_from_url(self, url: str = SWAGGER_URL) -> Dict[str, Any]:
|
|
120
|
-
"""
|
|
121
|
-
Fetch the Swagger specification from the specified URL.
|
|
122
299
|
|
|
123
|
-
|
|
124
|
-
|
|
300
|
+
def _load_swagger_spec(swagger_path: Optional[str] = None) -> Dict[str, Any]:
|
|
301
|
+
"""
|
|
302
|
+
Load the Swagger specification from a file or URL.
|
|
125
303
|
|
|
126
|
-
|
|
127
|
-
|
|
304
|
+
Args:
|
|
305
|
+
swagger_path: Path to the Swagger JSON file. If None, will fetch from URL.
|
|
128
306
|
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
307
|
+
Returns:
|
|
308
|
+
The Swagger specification as a dictionary.
|
|
309
|
+
"""
|
|
310
|
+
if swagger_path:
|
|
311
|
+
# Use the provided path
|
|
312
|
+
logger.info(f"Using provided Swagger path: {swagger_path}")
|
|
313
|
+
if not os.path.isfile(swagger_path):
|
|
314
|
+
raise FileNotFoundError(f"Swagger file not found at {swagger_path}")
|
|
315
|
+
with open(swagger_path, "r") as f:
|
|
316
|
+
return json.load(f)
|
|
317
|
+
else:
|
|
318
|
+
# First, check in the package data directory
|
|
133
319
|
try:
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
logger.error(f"Failed to parse Swagger spec: {e}")
|
|
142
|
-
raise Exception(f"Failed to parse Swagger specification: {e}")
|
|
143
|
-
|
|
144
|
-
def _load_swagger_spec(self, swagger_path: Optional[str] = None) -> Dict[str, Any]:
|
|
145
|
-
"""
|
|
146
|
-
Load the Swagger specification from a file.
|
|
320
|
+
package_data_path = Path(__file__).parent / "data" / "swagger.json"
|
|
321
|
+
if package_data_path.is_file():
|
|
322
|
+
logger.info(f"Found Swagger file in package data: {package_data_path}")
|
|
323
|
+
with open(package_data_path, "r") as f:
|
|
324
|
+
return json.load(f)
|
|
325
|
+
except Exception as e:
|
|
326
|
+
logger.debug(f"Could not load Swagger file from package data: {e}")
|
|
147
327
|
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
1. package data directory
|
|
152
|
-
2. current directory and parent directories
|
|
153
|
-
3. download from the URL
|
|
328
|
+
# Then, look for swagger.json in the current directory and parent directories
|
|
329
|
+
logger.info("Looking for swagger.json in current directory and parent directories")
|
|
330
|
+
current_dir = Path.cwd()
|
|
154
331
|
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
# Use the provided path
|
|
160
|
-
logger.info(f"Using provided Swagger path: {swagger_path}")
|
|
161
|
-
if not os.path.isfile(swagger_path):
|
|
162
|
-
raise FileNotFoundError(f"Swagger file not found at {swagger_path}")
|
|
332
|
+
# Check current directory first
|
|
333
|
+
swagger_path = current_dir / "swagger.json"
|
|
334
|
+
if swagger_path.is_file():
|
|
335
|
+
logger.info(f"Found Swagger file at {swagger_path}")
|
|
163
336
|
with open(swagger_path, "r") as f:
|
|
164
337
|
return json.load(f)
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
if package_data_path.is_file():
|
|
170
|
-
logger.info(f"Found Swagger file in package data: {package_data_path}")
|
|
171
|
-
with open(package_data_path, "r") as f:
|
|
172
|
-
return json.load(f)
|
|
173
|
-
except Exception as e:
|
|
174
|
-
logger.debug(f"Could not load Swagger file from package data: {e}")
|
|
175
|
-
|
|
176
|
-
# Then, look for swagger.json in the current directory and parent directories
|
|
177
|
-
logger.info("Looking for swagger.json in current directory and parent directories")
|
|
178
|
-
current_dir = Path.cwd()
|
|
179
|
-
|
|
180
|
-
# Check current directory first
|
|
181
|
-
swagger_path = current_dir / "swagger.json"
|
|
338
|
+
|
|
339
|
+
# Check parent directories
|
|
340
|
+
for parent in current_dir.parents:
|
|
341
|
+
swagger_path = parent / "swagger.json"
|
|
182
342
|
if swagger_path.is_file():
|
|
183
343
|
logger.info(f"Found Swagger file at {swagger_path}")
|
|
184
344
|
with open(swagger_path, "r") as f:
|
|
185
345
|
return json.load(f)
|
|
186
346
|
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
if swagger_path.is_file():
|
|
191
|
-
logger.info(f"Found Swagger file at {swagger_path}")
|
|
192
|
-
with open(swagger_path, "r") as f:
|
|
193
|
-
return json.load(f)
|
|
194
|
-
|
|
195
|
-
# If the file wasn't found, fetch it from the URL and save it
|
|
196
|
-
logger.info("Swagger file not found locally, fetching from URL")
|
|
197
|
-
swagger_spec = self._fetch_swagger_from_url()
|
|
198
|
-
|
|
199
|
-
# Save the fetched spec to the current directory
|
|
200
|
-
swagger_path = current_dir / "swagger.json"
|
|
201
|
-
logger.info(f"Saving Swagger file to {swagger_path}")
|
|
202
|
-
try:
|
|
203
|
-
with open(swagger_path, "w") as f:
|
|
204
|
-
json.dump(swagger_spec, f)
|
|
205
|
-
logger.info(f"Saved Swagger file to {swagger_path}")
|
|
206
|
-
except Exception as e:
|
|
207
|
-
logger.warning(f"Failed to save Swagger file: {e}")
|
|
208
|
-
|
|
209
|
-
return swagger_spec
|
|
210
|
-
|
|
211
|
-
def _register_tools(self) -> None:
|
|
212
|
-
"""
|
|
213
|
-
Register MCP tools based on the Swagger specification.
|
|
214
|
-
Only registers tools for paths specified in allowed_paths.
|
|
215
|
-
"""
|
|
216
|
-
paths = self.swagger_spec.get("paths", {})
|
|
217
|
-
|
|
218
|
-
# Filter paths based on allowed_paths
|
|
219
|
-
filtered_paths = {
|
|
220
|
-
path: path_item
|
|
221
|
-
for path, path_item in paths.items()
|
|
222
|
-
if path in self.allowed_paths
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
logger.info(f"Registering {len(filtered_paths)} paths out of {len(paths)} total paths")
|
|
226
|
-
|
|
227
|
-
# Register the list_endpoints tool
|
|
228
|
-
@self.tool()
|
|
229
|
-
def list_endpoints() -> str:
|
|
230
|
-
"""List all available Rootly API endpoints."""
|
|
231
|
-
endpoints = []
|
|
232
|
-
for path, path_item in filtered_paths.items():
|
|
233
|
-
for method, operation in path_item.items():
|
|
234
|
-
if method.lower() not in ["get", "post", "put", "delete", "patch"]:
|
|
235
|
-
continue
|
|
236
|
-
|
|
237
|
-
summary = operation.get("summary", "")
|
|
238
|
-
description = operation.get("description", "")
|
|
239
|
-
|
|
240
|
-
endpoints.append({
|
|
241
|
-
"path": path,
|
|
242
|
-
"method": method.upper(),
|
|
243
|
-
"summary": summary,
|
|
244
|
-
"description": description,
|
|
245
|
-
"tool_name": self._create_tool_name(path, method)
|
|
246
|
-
})
|
|
247
|
-
|
|
248
|
-
return json.dumps(endpoints, indent=2)
|
|
249
|
-
|
|
250
|
-
# Register a tool for each endpoint
|
|
251
|
-
tool_count = 0
|
|
252
|
-
|
|
253
|
-
for path, path_item in filtered_paths.items():
|
|
254
|
-
# Skip path parameters
|
|
255
|
-
if "parameters" in path_item:
|
|
256
|
-
path_item = {k: v for k, v in path_item.items() if k != "parameters"}
|
|
347
|
+
# If the file wasn't found, fetch it from the URL and save it
|
|
348
|
+
logger.info("Swagger file not found locally, fetching from URL")
|
|
349
|
+
swagger_spec = _fetch_swagger_from_url()
|
|
257
350
|
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
if not description:
|
|
268
|
-
description = f"{method.upper()} {path}"
|
|
269
|
-
|
|
270
|
-
# Register the tool using the direct method
|
|
271
|
-
try:
|
|
272
|
-
# Define the tool function
|
|
273
|
-
def create_tool_fn(p=path, m=method, op=operation):
|
|
274
|
-
def tool_fn(**kwargs):
|
|
275
|
-
return self._handle_api_request(p, m, op, **kwargs)
|
|
276
|
-
|
|
277
|
-
# Set the function name and docstring
|
|
278
|
-
tool_fn.__name__ = tool_name
|
|
279
|
-
tool_fn.__doc__ = description
|
|
280
|
-
return tool_fn
|
|
281
|
-
|
|
282
|
-
# Create the tool function
|
|
283
|
-
tool_fn = create_tool_fn()
|
|
284
|
-
|
|
285
|
-
# Register the tool with FastMCP
|
|
286
|
-
self.add_tool(
|
|
287
|
-
name=tool_name,
|
|
288
|
-
description=description,
|
|
289
|
-
fn=tool_fn
|
|
290
|
-
)
|
|
291
|
-
|
|
292
|
-
tool_count += 1
|
|
293
|
-
logger.info(f"Registered tool: {tool_name}")
|
|
294
|
-
except Exception as e:
|
|
295
|
-
logger.error(f"Error registering tool {tool_name}: {e}")
|
|
296
|
-
|
|
297
|
-
logger.info(f"Registered {tool_count} tools in total. Modify allowed_paths to register more paths from the Rootly API.")
|
|
298
|
-
|
|
299
|
-
def _create_tool_name(self, path: str, method: str) -> str:
|
|
300
|
-
"""
|
|
301
|
-
Create a tool name based on the path and method.
|
|
302
|
-
|
|
303
|
-
Args:
|
|
304
|
-
path: The API path.
|
|
305
|
-
method: The HTTP method.
|
|
306
|
-
|
|
307
|
-
Returns:
|
|
308
|
-
A tool name string.
|
|
309
|
-
"""
|
|
310
|
-
# Remove the /v1 prefix if present
|
|
311
|
-
if path.startswith("/v1"):
|
|
312
|
-
path = path[3:]
|
|
313
|
-
|
|
314
|
-
# Replace path parameters with "by_id"
|
|
315
|
-
path = re.sub(r"\{([^}]+)\}", r"by_\1", path)
|
|
316
|
-
|
|
317
|
-
# Replace slashes with underscores and remove leading/trailing underscores
|
|
318
|
-
path = path.replace("/", "_").strip("_")
|
|
319
|
-
|
|
320
|
-
return f"{path}_{method.lower()}"
|
|
321
|
-
|
|
322
|
-
def _create_input_schema(self, path: str, operation: Dict[str, Any]) -> Dict[str, Any]:
|
|
323
|
-
"""
|
|
324
|
-
Create an input schema for the tool.
|
|
325
|
-
|
|
326
|
-
Args:
|
|
327
|
-
path: The API path.
|
|
328
|
-
operation: The Swagger operation object.
|
|
329
|
-
|
|
330
|
-
Returns:
|
|
331
|
-
An input schema dictionary.
|
|
332
|
-
"""
|
|
333
|
-
# Create a basic schema
|
|
334
|
-
schema = {
|
|
335
|
-
"type": "object",
|
|
336
|
-
"properties": {},
|
|
337
|
-
"required": [],
|
|
338
|
-
"additionalProperties": False
|
|
339
|
-
}
|
|
340
|
-
|
|
341
|
-
# Extract path parameters
|
|
342
|
-
path_params = re.findall(r"\{([^}]+)\}", path)
|
|
343
|
-
for param in path_params:
|
|
344
|
-
schema["properties"][param] = {
|
|
345
|
-
"type": "string",
|
|
346
|
-
"description": f"Path parameter: {param}"
|
|
347
|
-
}
|
|
348
|
-
schema["required"].append(param)
|
|
349
|
-
|
|
350
|
-
# Add operation parameters
|
|
351
|
-
for param in operation.get("parameters", []):
|
|
352
|
-
param_name = param.get("name")
|
|
353
|
-
param_in = param.get("in")
|
|
354
|
-
|
|
355
|
-
if param_in in ["query", "header"]:
|
|
356
|
-
param_schema = param.get("schema", {})
|
|
357
|
-
param_type = param_schema.get("type", "string")
|
|
358
|
-
|
|
359
|
-
schema["properties"][param_name] = {
|
|
360
|
-
"type": param_type,
|
|
361
|
-
"description": param.get("description", f"{param_in} parameter: {param_name}")
|
|
362
|
-
}
|
|
363
|
-
|
|
364
|
-
if param.get("required", False):
|
|
365
|
-
schema["required"].append(param_name)
|
|
366
|
-
|
|
367
|
-
# Add request body for POST, PUT, PATCH methods
|
|
368
|
-
if "requestBody" in operation:
|
|
369
|
-
content = operation["requestBody"].get("content", {})
|
|
370
|
-
if "application/json" in content:
|
|
371
|
-
body_schema = content["application/json"].get("schema", {})
|
|
351
|
+
# Save the fetched spec to the current directory
|
|
352
|
+
swagger_path = current_dir / "swagger.json"
|
|
353
|
+
logger.info(f"Saving Swagger file to {swagger_path}")
|
|
354
|
+
try:
|
|
355
|
+
with open(swagger_path, "w") as f:
|
|
356
|
+
json.dump(swagger_spec, f)
|
|
357
|
+
logger.info(f"Saved Swagger file to {swagger_path}")
|
|
358
|
+
except Exception as e:
|
|
359
|
+
logger.warning(f"Failed to save Swagger file: {e}")
|
|
372
360
|
|
|
373
|
-
|
|
374
|
-
for prop_name, prop_schema in body_schema["properties"].items():
|
|
375
|
-
schema["properties"][prop_name] = {
|
|
376
|
-
"type": prop_schema.get("type", "string"),
|
|
377
|
-
"description": prop_schema.get("description", f"Body parameter: {prop_name}")
|
|
378
|
-
}
|
|
361
|
+
return swagger_spec
|
|
379
362
|
|
|
380
|
-
if "required" in body_schema:
|
|
381
|
-
schema["required"].extend(body_schema["required"])
|
|
382
363
|
|
|
383
|
-
|
|
364
|
+
def _fetch_swagger_from_url(url: str = SWAGGER_URL) -> Dict[str, Any]:
|
|
365
|
+
"""
|
|
366
|
+
Fetch the Swagger specification from the specified URL.
|
|
384
367
|
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
Handle an API request to the Rootly API.
|
|
368
|
+
Args:
|
|
369
|
+
url: URL of the Swagger JSON file.
|
|
388
370
|
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
371
|
+
Returns:
|
|
372
|
+
The Swagger specification as a dictionary.
|
|
373
|
+
"""
|
|
374
|
+
logger.info(f"Fetching Swagger specification from {url}")
|
|
375
|
+
try:
|
|
376
|
+
response = requests.get(url)
|
|
377
|
+
response.raise_for_status()
|
|
378
|
+
return response.json()
|
|
379
|
+
except requests.RequestException as e:
|
|
380
|
+
logger.error(f"Failed to fetch Swagger spec: {e}")
|
|
381
|
+
raise Exception(f"Failed to fetch Swagger specification: {e}")
|
|
382
|
+
except json.JSONDecodeError as e:
|
|
383
|
+
logger.error(f"Failed to parse Swagger spec: {e}")
|
|
384
|
+
raise Exception(f"Failed to parse Swagger specification: {e}")
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
def _filter_openapi_spec(spec: Dict[str, Any], allowed_paths: List[str]) -> Dict[str, Any]:
|
|
388
|
+
"""
|
|
389
|
+
Filter an OpenAPI specification to only include specified paths and clean up schema references.
|
|
394
390
|
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
logger.debug(f"Handling API request: {method} {path}")
|
|
399
|
-
logger.debug(f"Request parameters: {kwargs}")
|
|
400
|
-
|
|
401
|
-
# Extract path parameters
|
|
402
|
-
path_params = re.findall(r"\{([^}]+)\}", path)
|
|
403
|
-
actual_path = path
|
|
404
|
-
|
|
405
|
-
# Replace path parameters in the URL
|
|
406
|
-
for param in path_params:
|
|
407
|
-
if param in kwargs:
|
|
408
|
-
actual_path = actual_path.replace(f"{{{param}}}", str(kwargs.pop(param)))
|
|
409
|
-
|
|
410
|
-
# Separate query parameters and body parameters
|
|
411
|
-
query_params = {}
|
|
412
|
-
body_params = {}
|
|
413
|
-
|
|
414
|
-
if method.lower() == "get":
|
|
415
|
-
query_params = kwargs
|
|
416
|
-
if "incidents" in path and method.lower() == "get":
|
|
417
|
-
has_pagination = any(param.startswith("page[") for param in query_params.keys())
|
|
418
|
-
if not has_pagination:
|
|
419
|
-
query_params["page[size]"] = self.default_page_size
|
|
420
|
-
logger.debug(f"Added default pagination (page[size]={self.default_page_size}) for incidents endpoint: {path}")
|
|
421
|
-
else:
|
|
422
|
-
for param in operation.get("parameters", []):
|
|
423
|
-
param_name = param.get("name")
|
|
424
|
-
param_in = param.get("in")
|
|
425
|
-
if param_in == "query" and param_name in kwargs:
|
|
426
|
-
query_params[param_name] = kwargs.pop(param_name)
|
|
427
|
-
body_params = kwargs
|
|
391
|
+
Args:
|
|
392
|
+
spec: The original OpenAPI specification.
|
|
393
|
+
allowed_paths: List of paths to include.
|
|
428
394
|
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
395
|
+
Returns:
|
|
396
|
+
A filtered OpenAPI specification with cleaned schema references.
|
|
397
|
+
"""
|
|
398
|
+
filtered_spec = spec.copy()
|
|
399
|
+
|
|
400
|
+
# Filter paths
|
|
401
|
+
original_paths = spec.get("paths", {})
|
|
402
|
+
filtered_paths = {
|
|
403
|
+
path: path_item
|
|
404
|
+
for path, path_item in original_paths.items()
|
|
405
|
+
if path in allowed_paths
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
filtered_spec["paths"] = filtered_paths
|
|
409
|
+
|
|
410
|
+
# Clean up schema references that might be broken
|
|
411
|
+
# Remove problematic schema references from request bodies and parameters
|
|
412
|
+
for path, path_item in filtered_paths.items():
|
|
413
|
+
for method, operation in path_item.items():
|
|
414
|
+
if method.lower() not in ["get", "post", "put", "delete", "patch"]:
|
|
415
|
+
continue
|
|
416
|
+
|
|
417
|
+
# Clean request body schemas
|
|
418
|
+
if "requestBody" in operation:
|
|
419
|
+
request_body = operation["requestBody"]
|
|
420
|
+
if "content" in request_body:
|
|
421
|
+
for content_type, content_info in request_body["content"].items():
|
|
422
|
+
if "schema" in content_info:
|
|
423
|
+
schema = content_info["schema"]
|
|
424
|
+
# Remove problematic $ref references
|
|
425
|
+
if "$ref" in schema and "incident_trigger_params" in schema["$ref"]:
|
|
426
|
+
# Replace with a generic object schema
|
|
427
|
+
content_info["schema"] = {
|
|
428
|
+
"type": "object",
|
|
429
|
+
"description": "Request parameters for this endpoint",
|
|
430
|
+
"additionalProperties": True
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
# Clean parameter schemas
|
|
434
|
+
if "parameters" in operation:
|
|
435
|
+
for param in operation["parameters"]:
|
|
436
|
+
if "schema" in param and "$ref" in param["schema"]:
|
|
437
|
+
ref_path = param["schema"]["$ref"]
|
|
438
|
+
if "incident_trigger_params" in ref_path:
|
|
439
|
+
# Replace with a simple string schema
|
|
440
|
+
param["schema"] = {
|
|
441
|
+
"type": "string",
|
|
442
|
+
"description": param.get("description", "Parameter value")
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
# Also clean up any remaining broken references in components
|
|
446
|
+
if "components" in filtered_spec and "schemas" in filtered_spec["components"]:
|
|
447
|
+
schemas = filtered_spec["components"]["schemas"]
|
|
448
|
+
# Remove or fix any schemas that reference missing components
|
|
449
|
+
schemas_to_remove = []
|
|
450
|
+
for schema_name, schema_def in schemas.items():
|
|
451
|
+
if isinstance(schema_def, dict) and _has_broken_references(schema_def):
|
|
452
|
+
schemas_to_remove.append(schema_name)
|
|
453
|
+
|
|
454
|
+
for schema_name in schemas_to_remove:
|
|
455
|
+
logger.warning(f"Removing schema with broken references: {schema_name}")
|
|
456
|
+
del schemas[schema_name]
|
|
457
|
+
|
|
458
|
+
return filtered_spec
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
def _has_broken_references(schema_def: Dict[str, Any]) -> bool:
|
|
462
|
+
"""Check if a schema definition has broken references."""
|
|
463
|
+
if "$ref" in schema_def:
|
|
464
|
+
ref_path = schema_def["$ref"]
|
|
465
|
+
# List of known broken references in the Rootly API spec
|
|
466
|
+
broken_refs = [
|
|
467
|
+
"incident_trigger_params",
|
|
468
|
+
"new_workflow",
|
|
469
|
+
"update_workflow",
|
|
470
|
+
"workflow"
|
|
471
|
+
]
|
|
472
|
+
if any(broken_ref in ref_path for broken_ref in broken_refs):
|
|
473
|
+
return True
|
|
474
|
+
|
|
475
|
+
# Recursively check nested schemas
|
|
476
|
+
for key, value in schema_def.items():
|
|
477
|
+
if isinstance(value, dict):
|
|
478
|
+
if _has_broken_references(value):
|
|
479
|
+
return True
|
|
480
|
+
elif isinstance(value, list):
|
|
481
|
+
for item in value:
|
|
482
|
+
if isinstance(item, dict) and _has_broken_references(item):
|
|
483
|
+
return True
|
|
484
|
+
|
|
485
|
+
return False
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
# Legacy class for backward compatibility
|
|
489
|
+
class RootlyMCPServer(FastMCP):
|
|
490
|
+
"""
|
|
491
|
+
Legacy Rootly MCP Server class for backward compatibility.
|
|
492
|
+
|
|
493
|
+
This class is deprecated. Use create_rootly_mcp_server() instead.
|
|
494
|
+
"""
|
|
495
|
+
|
|
496
|
+
def __init__(
|
|
497
|
+
self,
|
|
498
|
+
swagger_path: Optional[str] = None,
|
|
499
|
+
name: str = "Rootly",
|
|
500
|
+
default_page_size: int = 10,
|
|
501
|
+
allowed_paths: Optional[List[str]] = None,
|
|
502
|
+
hosted: bool = False,
|
|
503
|
+
*args,
|
|
504
|
+
**kwargs,
|
|
505
|
+
):
|
|
506
|
+
logger.warning(
|
|
507
|
+
"RootlyMCPServer class is deprecated. Use create_rootly_mcp_server() function instead."
|
|
508
|
+
)
|
|
509
|
+
|
|
510
|
+
# Create the server using the new function
|
|
511
|
+
server = create_rootly_mcp_server(
|
|
512
|
+
swagger_path=swagger_path,
|
|
513
|
+
name=name,
|
|
514
|
+
allowed_paths=allowed_paths,
|
|
515
|
+
hosted=hosted
|
|
516
|
+
)
|
|
517
|
+
|
|
518
|
+
# Copy the server's state to this instance
|
|
519
|
+
super().__init__(name, *args, **kwargs)
|
|
520
|
+
# For compatibility, store reference to the new server
|
|
521
|
+
# Tools will be accessed via async methods when needed
|
|
522
|
+
self._server = server
|
|
523
|
+
self._tools = {} # Placeholder - tools should be accessed via async methods
|
|
524
|
+
self._resources = getattr(server, '_resources', {})
|
|
525
|
+
self._prompts = getattr(server, '_prompts', {})
|