iflow-mcp_haroldfinchift-vuln-nist-mcp-server 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- iflow_mcp_haroldfinchift_vuln_nist_mcp_server-1.1.0.dist-info/METADATA +221 -0
- iflow_mcp_haroldfinchift_vuln_nist_mcp_server-1.1.0.dist-info/RECORD +7 -0
- iflow_mcp_haroldfinchift_vuln_nist_mcp_server-1.1.0.dist-info/WHEEL +5 -0
- iflow_mcp_haroldfinchift_vuln_nist_mcp_server-1.1.0.dist-info/entry_points.txt +2 -0
- iflow_mcp_haroldfinchift_vuln_nist_mcp_server-1.1.0.dist-info/licenses/LICENSE +21 -0
- iflow_mcp_haroldfinchift_vuln_nist_mcp_server-1.1.0.dist-info/top_level.txt +1 -0
- vuln_nist_mcp_server.py +573 -0
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: iflow-mcp_haroldfinchift-vuln-nist-mcp-server
|
|
3
|
+
Version: 1.1.0
|
|
4
|
+
Summary: A Model Context Protocol (MCP) server for querying NIST National Vulnerability Database (NVD) API endpoints
|
|
5
|
+
Requires-Python: >=3.11
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
License-File: LICENSE
|
|
8
|
+
Requires-Dist: mcp[cli]>=1.2.0
|
|
9
|
+
Requires-Dist: httpx>=0.24.0
|
|
10
|
+
Dynamic: license-file
|
|
11
|
+
|
|
12
|
+
# vuln-nist-mcp-server
|
|
13
|
+
|
|
14
|
+
A Model Context Protocol (MCP) server for querying NIST National Vulnerability Database (NVD) API endpoints.
|
|
15
|
+
|
|
16
|
+
## Purpose
|
|
17
|
+
|
|
18
|
+
This MCP server exposes tools to query the NVD/CVE REST API and return formatted text results suitable for LLM consumption via the MCP protocol. It includes automatic query chunking for large date ranges and parallel processing for improved performance.
|
|
19
|
+
|
|
20
|
+
Base API docs: https://nvd.nist.gov/developers/vulnerabilities
|
|
21
|
+
|
|
22
|
+
## Features
|
|
23
|
+
|
|
24
|
+
### Available Tools
|
|
25
|
+
|
|
26
|
+
- **`get_temporal_context`** - Get current date and temporal context for time-relative queries
|
|
27
|
+
- Essential for queries like "this year", "last year", "6 months ago"
|
|
28
|
+
- Provides current date mappings and examples for date parameter construction
|
|
29
|
+
- **USAGE**: Call this tool FIRST when user asks time-relative questions
|
|
30
|
+
|
|
31
|
+
- **`search_cves`** - Search CVE descriptions by keyword with flexible date filtering
|
|
32
|
+
- Parameters: `keyword`, `resultsPerPage` (default: 20), `startIndex` (default: 0), `last_days` (`recent_days` has been deprecated), `start_date`, `end_date`
|
|
33
|
+
- **New in v1.1.0**: Support for absolute date ranges with `start_date` and `end_date` parameters
|
|
34
|
+
- **Date filtering priority**: `start_date`/`end_date` → `last_days` → default 30 days
|
|
35
|
+
- Auto-chunks queries > 120 days into parallel requests
|
|
36
|
+
- Results sorted by publication date (newest first)
|
|
37
|
+
|
|
38
|
+
- **`get_cve_by_id`** - Retrieve detailed information for a specific CVE
|
|
39
|
+
- Parameters: `cve_id`
|
|
40
|
+
- Returns: CVE details, references, tags, and publication dates
|
|
41
|
+
|
|
42
|
+
- **`cves_by_cpe`** - List CVEs associated with a Common Platform Enumeration (CPE)
|
|
43
|
+
- Parameters: `cpe_name` (full CPE 2.3 format required), `is_vulnerable` (optional)
|
|
44
|
+
- Validates CPE format before querying
|
|
45
|
+
|
|
46
|
+
- **`kevs_between`** - Find CVEs added to CISA KEV catalog within a date range
|
|
47
|
+
- Parameters: `kevStartDate`, `kevEndDate`, `resultsPerPage` (default: 20), `startIndex` (default: 0)
|
|
48
|
+
- Auto-chunks queries > 90 days into parallel requests
|
|
49
|
+
- Results sorted by publication date (newest first)
|
|
50
|
+
|
|
51
|
+
- **`cve_change_history`** - Retrieve change history for CVEs
|
|
52
|
+
- Parameters: `cve_id` OR (`changeStartDate` + `changeEndDate`), `resultsPerPage` (default: 20), `startIndex` (default: 0)
|
|
53
|
+
- Auto-chunks date range queries > 120 days into parallel requests
|
|
54
|
+
- Results sorted by change creation date (newest first)
|
|
55
|
+
|
|
56
|
+
### Key Features
|
|
57
|
+
|
|
58
|
+
- **Temporal Awareness**: New `get_temporal_context` tool for accurate time-relative queries
|
|
59
|
+
- **Flexible Date Filtering**: Support for both relative (`last_days`) and absolute (`start_date`/`end_date`) date ranges
|
|
60
|
+
- **Improved Result Ordering**: All results sorted chronologically (newest first) for better relevance
|
|
61
|
+
- **Parallel Processing**: Large date ranges are automatically split into chunks and processed concurrently
|
|
62
|
+
- **Input Validation**: CPE format validation, date parsing, parameter sanitization
|
|
63
|
+
- **Emoji Indicators**: Clear visual feedback (✅ success, ❌ error, ⚠️ warning, 🔍 search, 🔥 KEV, 🌐 CPE, 🕘 history, 📅 temporal)
|
|
64
|
+
- **Comprehensive Logging**: Detailed stderr logging for debugging
|
|
65
|
+
- **Error Handling**: Graceful handling of API errors, timeouts, and malformed responses
|
|
66
|
+
|
|
67
|
+
## Prerequisites
|
|
68
|
+
|
|
69
|
+
- Docker (recommended) or Python 3.11+
|
|
70
|
+
- Network access to NVD endpoints (`services.nvd.nist.gov`)
|
|
71
|
+
- MCP-compatible client (e.g., Claude Desktop)
|
|
72
|
+
|
|
73
|
+
## Quick Start
|
|
74
|
+
|
|
75
|
+
### Using Docker (Recommended)
|
|
76
|
+
|
|
77
|
+
```bash
|
|
78
|
+
# Clone and build
|
|
79
|
+
git clone https://github.com/HaroldFinchIFT/vuln-nist-mcp-server
|
|
80
|
+
cd vuln-nist-mcp-server
|
|
81
|
+
docker build -t vuln-nist-mcp-server .
|
|
82
|
+
|
|
83
|
+
# Run
|
|
84
|
+
docker run --rm -it vuln-nist-mcp-server
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
## Configuration
|
|
88
|
+
|
|
89
|
+
Environment variables:
|
|
90
|
+
|
|
91
|
+
- `NVD_BASE_URL`: Base URL for NVD API (default: `https://services.nvd.nist.gov/rest/json`)
|
|
92
|
+
- `NVD_VERSION`: API version (default: `/2.0`)
|
|
93
|
+
- `NVD_API_TIMEOUT`: Request timeout in seconds (default: `10`)
|
|
94
|
+
|
|
95
|
+
## Usage Examples
|
|
96
|
+
|
|
97
|
+
### With Claude Desktop or MCP Client
|
|
98
|
+
|
|
99
|
+
**Get temporal context for time-relative queries:**
|
|
100
|
+
```
|
|
101
|
+
Tool: get_temporal_context
|
|
102
|
+
Params: {}
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
**Search recent CVEs (relative time):**
|
|
106
|
+
```
|
|
107
|
+
Tool: search_cves
|
|
108
|
+
Params: {
|
|
109
|
+
"keyword": "Microsoft Exchange",
|
|
110
|
+
"resultsPerPage": 10,
|
|
111
|
+
"last_days": 7
|
|
112
|
+
}
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
**Search CVEs with absolute date range:**
|
|
116
|
+
```
|
|
117
|
+
Tool: search_cves
|
|
118
|
+
Params: {
|
|
119
|
+
"keyword": "buffer overflow",
|
|
120
|
+
"start_date": "2024-01-01T00:00:00",
|
|
121
|
+
"end_date": "2024-03-31T23:59:59"
|
|
122
|
+
}
|
|
123
|
+
```
|
|
124
|
+
|
|
125
|
+
**Search CVEs for "this year" (use get_temporal_context first):**
|
|
126
|
+
```
|
|
127
|
+
# First, get temporal context
|
|
128
|
+
Tool: get_temporal_context
|
|
129
|
+
|
|
130
|
+
# Then use the provided date mappings
|
|
131
|
+
Tool: search_cves
|
|
132
|
+
Params: {
|
|
133
|
+
"keyword": "remote code execution",
|
|
134
|
+
"start_date": "2025-01-01T00:00:00",
|
|
135
|
+
"end_date": "2025-09-17T12:00:00"
|
|
136
|
+
}
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
**Get CVE details:**
|
|
140
|
+
```
|
|
141
|
+
Tool: get_cve_by_id
|
|
142
|
+
Params: {"cve_id": "CVE-2024-21413"}
|
|
143
|
+
```
|
|
144
|
+
|
|
145
|
+
**Check CPE vulnerabilities:**
|
|
146
|
+
```
|
|
147
|
+
Tool: cves_by_cpe
|
|
148
|
+
Params: {
|
|
149
|
+
"cpe_name": "cpe:2.3:a:microsoft:exchange_server:2019:*:*:*:*:*:*:*",
|
|
150
|
+
"is_vulnerable": "true"
|
|
151
|
+
}
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
**Find recent KEV additions:**
|
|
155
|
+
```
|
|
156
|
+
Tool: kevs_between
|
|
157
|
+
Params: {
|
|
158
|
+
"kevStartDate": "2024-01-01T00:00:00.000Z",
|
|
159
|
+
"kevEndDate": "2024-03-31T23:59:59.000Z"
|
|
160
|
+
}
|
|
161
|
+
```
|
|
162
|
+
|
|
163
|
+
## Performance Notes
|
|
164
|
+
|
|
165
|
+
- Queries with date ranges > 90-120 days are automatically chunked for better performance
|
|
166
|
+
- Parallel processing reduces total query time for large date ranges
|
|
167
|
+
- Results are automatically sorted by publication date (newest first) across all chunks
|
|
168
|
+
|
|
169
|
+
## Development
|
|
170
|
+
|
|
171
|
+
### File Structure
|
|
172
|
+
|
|
173
|
+
```
|
|
174
|
+
vuln-nist-mcp-server/
|
|
175
|
+
├── Dockerfile
|
|
176
|
+
├── glama.json
|
|
177
|
+
├── LICENSE
|
|
178
|
+
├── nvd_logo.png
|
|
179
|
+
├── README.md
|
|
180
|
+
├── requirements.txt
|
|
181
|
+
├── SECURITY.md
|
|
182
|
+
└── vuln_nist_mcp_server.py
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
## Security Considerations
|
|
186
|
+
|
|
187
|
+
- No API key required (public NVD endpoints)
|
|
188
|
+
- Container runs as non-root user (`mcpuser`)
|
|
189
|
+
- Input validation prevents injection attacks
|
|
190
|
+
- No persistent storage of sensitive data
|
|
191
|
+
- Network capabilities added only when required via Docker flags
|
|
192
|
+
|
|
193
|
+
## Contributing
|
|
194
|
+
|
|
195
|
+
1. Fork the repository
|
|
196
|
+
2. Create a feature branch
|
|
197
|
+
3. Make your changes
|
|
198
|
+
4. Test locally
|
|
199
|
+
5. Submit a pull request
|
|
200
|
+
|
|
201
|
+
## License
|
|
202
|
+
|
|
203
|
+
MIT - see LICENSE file for details
|
|
204
|
+
|
|
205
|
+
## Changelog
|
|
206
|
+
|
|
207
|
+
### v1.1.0
|
|
208
|
+
- **NEW**: Added `get_temporal_context` tool for temporal awareness and time-relative queries
|
|
209
|
+
- **ENHANCED**: `search_cves` now supports absolute date ranges with `start_date` and `end_date` parameters
|
|
210
|
+
- **ENHANCED**: Improved date filtering logic with priority: absolute dates → relative days → default 30 days
|
|
211
|
+
- **ENHANCED**: All tools now return results sorted chronologically (newest first) for better relevance
|
|
212
|
+
- **IMPROVED**: Better error handling for ISO-8601 date parsing
|
|
213
|
+
- **DEPRECATED**: `recent_days` parameter in `search_cves` (use `last_days` instead)
|
|
214
|
+
- **UPDATED**: Logo and visual improvements
|
|
215
|
+
|
|
216
|
+
### v1.0.0
|
|
217
|
+
- Initial release
|
|
218
|
+
- Support for all major NVD API endpoints
|
|
219
|
+
- Automatic query chunking and parallel processing
|
|
220
|
+
- CPE format validation
|
|
221
|
+
- Comprehensive error handling
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
vuln_nist_mcp_server.py,sha256=D1sYCDReat88GN83Uf8QHv_adPt_R9mOHMvzOZx7pis,21497
|
|
2
|
+
iflow_mcp_haroldfinchift_vuln_nist_mcp_server-1.1.0.dist-info/licenses/LICENSE,sha256=6ZzG5LeGiA5YwdZjNbAhTbQqL4xUNIZaY-dWlhHIqrQ,1066
|
|
3
|
+
iflow_mcp_haroldfinchift_vuln_nist_mcp_server-1.1.0.dist-info/METADATA,sha256=ugsBAmFJm5GZP9Dcqy5Vrm1BdPhTR3052BQzuKJXob4,7256
|
|
4
|
+
iflow_mcp_haroldfinchift_vuln_nist_mcp_server-1.1.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
5
|
+
iflow_mcp_haroldfinchift_vuln_nist_mcp_server-1.1.0.dist-info/entry_points.txt,sha256=m-JsAcfutWM4aUWeOlgnEQjaOw6dm3MROdkedUzGJHE,67
|
|
6
|
+
iflow_mcp_haroldfinchift_vuln_nist_mcp_server-1.1.0.dist-info/top_level.txt,sha256=86AVq6DXmtpI5_84wR-watmQTlHxsBfeu10slaYBWJE,21
|
|
7
|
+
iflow_mcp_haroldfinchift_vuln_nist_mcp_server-1.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Nick Clyde
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
vuln_nist_mcp_server
|
vuln_nist_mcp_server.py
ADDED
|
@@ -0,0 +1,573 @@
|
|
|
1
|
+
"""vuln-nist-mcp-server MCP server"""
|
|
2
|
+
import asyncio
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
import logging
|
|
6
|
+
from datetime import datetime, timedelta, timezone
|
|
7
|
+
from typing import cast
|
|
8
|
+
import re
|
|
9
|
+
|
|
10
|
+
import httpx
|
|
11
|
+
import traceback
|
|
12
|
+
|
|
13
|
+
from mcp.server.fastmcp import FastMCP
|
|
14
|
+
|
|
15
|
+
__version__ = "1.0.0"
|
|
16
|
+
|
|
17
|
+
logging.basicConfig(
|
|
18
|
+
level=logging.INFO,
|
|
19
|
+
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
|
20
|
+
stream=sys.stderr,
|
|
21
|
+
)
|
|
22
|
+
logger = logging.getLogger("vuln-nist-mcp-server")
|
|
23
|
+
|
|
24
|
+
mcp = FastMCP("vuln-nist-mcp-server")
|
|
25
|
+
|
|
26
|
+
# Configuration
|
|
27
|
+
NVD_BASE = os.environ.get("NVD_BASE_URL", "https://services.nvd.nist.gov/rest/json")
|
|
28
|
+
NVD_VERSION = os.environ.get("NVD_VERSION", "/2.0")
|
|
29
|
+
API_TIMEOUT = int(os.environ.get("NVD_API_TIMEOUT", "10"))
|
|
30
|
+
|
|
31
|
+
# Utility helpers
|
|
32
|
+
def _safe_str(s):
|
|
33
|
+
"""Return safe stripped string"""
|
|
34
|
+
try:
|
|
35
|
+
return "" if s is None else str(s).strip()
|
|
36
|
+
except Exception:
|
|
37
|
+
return ""
|
|
38
|
+
|
|
39
|
+
def _int_or_default(s, default=20):
|
|
40
|
+
"""Convert string to int or default"""
|
|
41
|
+
try:
|
|
42
|
+
return int(str(s).strip()) if str(s).strip() != "" else default
|
|
43
|
+
except Exception:
|
|
44
|
+
return default
|
|
45
|
+
|
|
46
|
+
def _short_desc_from_vuln(v):
|
|
47
|
+
"""Return a short description for a vuln object"""
|
|
48
|
+
try:
|
|
49
|
+
cve = v.get("cve", {})
|
|
50
|
+
descs = cve.get("descriptions", []) or []
|
|
51
|
+
for d in descs:
|
|
52
|
+
if d.get("lang") == "en" and d.get("value"):
|
|
53
|
+
txt = d.get("value")
|
|
54
|
+
return txt if len(txt) <= 240 else txt[:237] + "..."
|
|
55
|
+
if descs:
|
|
56
|
+
txt = descs[0].get("value", "")
|
|
57
|
+
return txt if len(txt) <= 240 else txt[:237] + "..."
|
|
58
|
+
except Exception:
|
|
59
|
+
pass
|
|
60
|
+
return "(no description)"
|
|
61
|
+
|
|
62
|
+
def _format_vuln_entry(v):
|
|
63
|
+
"""Format a single vulnerability entry for output"""
|
|
64
|
+
try:
|
|
65
|
+
cve = v.get("cve", {})
|
|
66
|
+
cve_id = cve.get("id", "UNKNOWN")
|
|
67
|
+
published = _safe_str(cve.get("published"))
|
|
68
|
+
desc = _short_desc_from_vuln(v)
|
|
69
|
+
return f"- {cve_id} | published: {published} | {desc}"
|
|
70
|
+
except Exception:
|
|
71
|
+
return "- (malformed entry)"
|
|
72
|
+
|
|
73
|
+
# === MCP TOOLS ===
|
|
74
|
+
|
|
75
|
+
@mcp.tool()
|
|
76
|
+
async def get_temporal_context() -> str:
|
|
77
|
+
"""
|
|
78
|
+
Get current date and temporal context when it needed.
|
|
79
|
+
|
|
80
|
+
**USAGE**: Call this tool FIRST when user asks for time-relative question like "this year", "last year", "6 months ago", etc.
|
|
81
|
+
|
|
82
|
+
Returns current date context and examples for constructing specific date parameters.
|
|
83
|
+
"""
|
|
84
|
+
now = datetime.now(timezone.utc)
|
|
85
|
+
current_year = now.year
|
|
86
|
+
current_date = now.strftime("%Y-%m-%dT%H:%M:%S")
|
|
87
|
+
|
|
88
|
+
this_year_start = f"{current_year}-01-01T00:00:00"
|
|
89
|
+
last_year_start = f"{current_year - 1}-01-01T00:00:00"
|
|
90
|
+
last_year_end = f"{current_year - 1}-12-31T23:59:59"
|
|
91
|
+
six_months_ago = (now - timedelta(days=180)).strftime("%Y-%m-%dT00:00:00")
|
|
92
|
+
three_months_ago = (now - timedelta(days=90)).strftime("%Y-%m-%dT00:00:00")
|
|
93
|
+
|
|
94
|
+
return f"""📅 **CURRENT TEMPORAL CONTEXT**
|
|
95
|
+
|
|
96
|
+
🗓️ **Current date**: {now.strftime('%Y-%m-%d %H:%M:%S UTC')}
|
|
97
|
+
📊 **Current year**: {current_year}
|
|
98
|
+
|
|
99
|
+
🎯 **COMMON TIME PERIOD MAPPINGS**:
|
|
100
|
+
|
|
101
|
+
▪️ **"This year"**:
|
|
102
|
+
start_date="{this_year_start}", end_date="{current_date}"
|
|
103
|
+
|
|
104
|
+
▪️ **"Last year"**:
|
|
105
|
+
start_date="{last_year_start}", end_date="{last_year_end}"
|
|
106
|
+
|
|
107
|
+
▪️ **"Last 6 months"**:
|
|
108
|
+
start_date="{six_months_ago}", end_date="{current_date}"
|
|
109
|
+
|
|
110
|
+
▪️ **"Last 3 months"**:
|
|
111
|
+
start_date="{three_months_ago}", end_date="{current_date}"
|
|
112
|
+
|
|
113
|
+
▪️ **"2 years ago ({current_year - 2})"**:
|
|
114
|
+
start_date="{current_year - 2}-01-01T00:00:00", end_date="{current_year - 2}-12-31T23:59:59"
|
|
115
|
+
|
|
116
|
+
▪️ **"Q1 this year"**:
|
|
117
|
+
start_date="{current_year}-01-01T00:00:00", end_date="{current_year}-03-31T23:59:59"
|
|
118
|
+
|
|
119
|
+
▪️ **"Q1 last year"**:
|
|
120
|
+
start_date="{current_year - 1}-01-01T00:00:00", end_date="{current_year - 1}-03-31T23:59:59"
|
|
121
|
+
|
|
122
|
+
💡 **Usage**: Copy the appropriate value above and use them directly in the other tools call when it is needed.
|
|
123
|
+
"""
|
|
124
|
+
|
|
125
|
+
@mcp.tool()
|
|
126
|
+
async def search_cves(
|
|
127
|
+
keyword: str = "",
|
|
128
|
+
resultsPerPage: int = 20,
|
|
129
|
+
startIndex: int = 0,
|
|
130
|
+
recent_days: int | None = None, #deprecated
|
|
131
|
+
last_days: int | None = None,
|
|
132
|
+
start_date: str = "",
|
|
133
|
+
end_date: str = "",
|
|
134
|
+
) -> str:
|
|
135
|
+
"""
|
|
136
|
+
Search CVEs by keyword in description, with flexible time filtering.
|
|
137
|
+
|
|
138
|
+
**IMPORTANT**: For time-relative queries (this year, last year, etc.), call get_temporal_context() FIRST to get current date information.
|
|
139
|
+
|
|
140
|
+
**Date filtering logic (in priority order):**
|
|
141
|
+
- If start_date and end_date are provided → use them directly
|
|
142
|
+
- Else if last_days is provided → calculate start_date = now - last_days
|
|
143
|
+
- Else fallback to last 30 days
|
|
144
|
+
|
|
145
|
+
**Technical notes:**
|
|
146
|
+
- If the time period > 120 days, queries are split into 120-day chunks
|
|
147
|
+
- start_date, end_date: Use ISO 8601 format: "YYYY-MM-DDTHH:MM:SS"
|
|
148
|
+
- recent_days parameter is deprecated, use last_days instead.
|
|
149
|
+
"""
|
|
150
|
+
|
|
151
|
+
last_days = last_days if last_days is not None else recent_days
|
|
152
|
+
|
|
153
|
+
keyword = _safe_str(keyword)
|
|
154
|
+
now = datetime.now(timezone.utc)
|
|
155
|
+
|
|
156
|
+
if end_date and end_date.strip():
|
|
157
|
+
try:
|
|
158
|
+
end_date = datetime.fromisoformat(end_date.replace("Z", "+00:00"))
|
|
159
|
+
except ValueError:
|
|
160
|
+
return "❌ Error: end_date must be valid ISO-8601 format"
|
|
161
|
+
else:
|
|
162
|
+
end_date = now
|
|
163
|
+
|
|
164
|
+
if start_date and start_date.strip():
|
|
165
|
+
try:
|
|
166
|
+
start_date = datetime.fromisoformat(start_date.replace("Z", "+00:00"))
|
|
167
|
+
except ValueError:
|
|
168
|
+
return "❌ Error: start_date must be valid ISO-8601 format"
|
|
169
|
+
else:
|
|
170
|
+
if last_days is not None:
|
|
171
|
+
start_date = end_date - timedelta(days=last_days)
|
|
172
|
+
else:
|
|
173
|
+
start_date = end_date - timedelta(days=30)
|
|
174
|
+
|
|
175
|
+
url = f"{NVD_BASE}/cves{NVD_VERSION}"
|
|
176
|
+
|
|
177
|
+
async def fetch_chunk(chunk_start: datetime, chunk_end: datetime):
|
|
178
|
+
pubStartDate = chunk_start.strftime("%Y-%m-%dT%H:%M:%S.000")
|
|
179
|
+
pubEndDate = chunk_end.strftime("%Y-%m-%dT%H:%M:%S.000")
|
|
180
|
+
|
|
181
|
+
query = (
|
|
182
|
+
f"?keywordSearch={keyword}"
|
|
183
|
+
f"&resultsPerPage={resultsPerPage}"
|
|
184
|
+
f"&startIndex={startIndex}"
|
|
185
|
+
f"&pubStartDate={pubStartDate}"
|
|
186
|
+
f"&pubEndDate={pubEndDate}"
|
|
187
|
+
)
|
|
188
|
+
full_url = url + query
|
|
189
|
+
logger.info(f"search_cves chunk: full_url={full_url}")
|
|
190
|
+
|
|
191
|
+
async with httpx.AsyncClient(timeout=API_TIMEOUT) as client:
|
|
192
|
+
resp = await client.get(full_url)
|
|
193
|
+
resp.raise_for_status()
|
|
194
|
+
return resp.json()
|
|
195
|
+
|
|
196
|
+
try:
|
|
197
|
+
# Generate chunks (max 120 days each)
|
|
198
|
+
chunk_size = timedelta(days=120)
|
|
199
|
+
chunks = []
|
|
200
|
+
chunk_start = start_date
|
|
201
|
+
while chunk_start < end_date:
|
|
202
|
+
chunk_end = min(chunk_start + chunk_size, end_date)
|
|
203
|
+
chunks.append((chunk_start, chunk_end))
|
|
204
|
+
chunk_start = chunk_end
|
|
205
|
+
|
|
206
|
+
tasks = [fetch_chunk(cs, ce) for cs, ce in chunks]
|
|
207
|
+
responses = await asyncio.gather(*tasks, return_exceptions=True)
|
|
208
|
+
|
|
209
|
+
total = 0
|
|
210
|
+
results: list[dict] = []
|
|
211
|
+
|
|
212
|
+
for resp in responses:
|
|
213
|
+
if isinstance(resp, Exception):
|
|
214
|
+
logger.error(f"Chunk failed: {resp}")
|
|
215
|
+
continue
|
|
216
|
+
|
|
217
|
+
data = cast(dict, resp)
|
|
218
|
+
total += data.get("totalResults", 0)
|
|
219
|
+
results.extend(data.get("vulnerabilities", []) or [])
|
|
220
|
+
|
|
221
|
+
results.sort(
|
|
222
|
+
key=lambda v: v.get("cve", {}).get("published", ""),
|
|
223
|
+
reverse=True
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
lines = [f"🔍 Search results for \"{keyword}\" - total matches: {total}"]
|
|
227
|
+
if not results:
|
|
228
|
+
lines.append("⚠️ No vulnerabilities returned for the given params.")
|
|
229
|
+
else:
|
|
230
|
+
for v in results:
|
|
231
|
+
lines.append(_format_vuln_entry(v))
|
|
232
|
+
|
|
233
|
+
lines.append(
|
|
234
|
+
f"📄 Aggregated across {len(chunks)} chunk(s) "
|
|
235
|
+
f"(parallelized), period={start_date.date()} → {end_date.date()}, "
|
|
236
|
+
f"resultsPerPage={resultsPerPage}"
|
|
237
|
+
)
|
|
238
|
+
return "\n".join(lines)
|
|
239
|
+
|
|
240
|
+
except httpx.HTTPStatusError as e:
|
|
241
|
+
logger.error(f"HTTP error in search_cves: {e.response.status_code} - {e.response.text}")
|
|
242
|
+
return f"❌ API Error: {e.response.status_code}"
|
|
243
|
+
except Exception as e:
|
|
244
|
+
logger.error("Exception in search_cves: " + str(e))
|
|
245
|
+
logger.debug(traceback.format_exc())
|
|
246
|
+
return f"❌ Error: {str(e)}"
|
|
247
|
+
|
|
248
|
+
@mcp.tool()
|
|
249
|
+
async def get_cve_by_id(cve_id: str = "") -> str:
|
|
250
|
+
"""Retrieve a CVE by its CVE-ID"""
|
|
251
|
+
cve_id = _safe_str(cve_id)
|
|
252
|
+
if not cve_id:
|
|
253
|
+
return "❌ Error: cve_id parameter is required"
|
|
254
|
+
url = f"{NVD_BASE}/cves{NVD_VERSION}"
|
|
255
|
+
params = {"cveId": cve_id}
|
|
256
|
+
logger.info(f"get_cve_by_id: cveId={cve_id}")
|
|
257
|
+
try:
|
|
258
|
+
async with httpx.AsyncClient(timeout=API_TIMEOUT) as client:
|
|
259
|
+
resp = await client.get(url, params=params)
|
|
260
|
+
resp.raise_for_status()
|
|
261
|
+
data = resp.json()
|
|
262
|
+
vulns = data.get("vulnerabilities", []) or []
|
|
263
|
+
if not vulns:
|
|
264
|
+
return f"⚠️ No CVE found for {cve_id}"
|
|
265
|
+
v = vulns[0]
|
|
266
|
+
cve = v.get("cve", {})
|
|
267
|
+
desc = _short_desc_from_vuln(v)
|
|
268
|
+
published = _safe_str(cve.get("published"))
|
|
269
|
+
lastmod = _safe_str(cve.get("lastModified"))
|
|
270
|
+
tags = cve.get("cveTags", []) or []
|
|
271
|
+
tag_names = ", ".join([t.get("tag", "") for t in tags]) if tags else "none"
|
|
272
|
+
out = [
|
|
273
|
+
f"✅ CVE: {cve_id}",
|
|
274
|
+
f"- Published: {published}",
|
|
275
|
+
f"- Last Modified: {lastmod}",
|
|
276
|
+
f"- Tags: {tag_names}",
|
|
277
|
+
f"- Description: {desc}",
|
|
278
|
+
]
|
|
279
|
+
refs = v.get("references", []) or []
|
|
280
|
+
if refs:
|
|
281
|
+
out.append(f"- References ({len(refs)}):")
|
|
282
|
+
for r in refs[:5]:
|
|
283
|
+
rtype = r.get("type", "ref")
|
|
284
|
+
urlr = r.get("url", "")
|
|
285
|
+
out.append(f" - [{rtype}] {urlr}")
|
|
286
|
+
if len(refs) > 5:
|
|
287
|
+
out.append(f" - ... and {len(refs)-5} more")
|
|
288
|
+
return "\n".join(out)
|
|
289
|
+
except httpx.HTTPStatusError as e:
|
|
290
|
+
logger.error(f"HTTP error in get_cve_by_id: {e.response.status_code}")
|
|
291
|
+
return f"❌ API Error: {e.response.status_code}"
|
|
292
|
+
except Exception as e:
|
|
293
|
+
logger.error("Exception in get_cve_by_id: " + str(e))
|
|
294
|
+
logger.debug(traceback.format_exc())
|
|
295
|
+
return f"❌ Error: {str(e)}"
|
|
296
|
+
|
|
297
|
+
CPE_REGEX = re.compile(
|
|
298
|
+
r"^cpe:(?P<version>2\.3):"
|
|
299
|
+
r"(?P<part>[aho]):"
|
|
300
|
+
r"(?P<vendor>[^:]*):"
|
|
301
|
+
r"(?P<product>[^:]*):"
|
|
302
|
+
r"(?P<version_field>[^:]*):"
|
|
303
|
+
r"(?P<update>[^:]*):"
|
|
304
|
+
r"(?P<edition>[^:]*):"
|
|
305
|
+
r"(?P<language>[^:]*):"
|
|
306
|
+
r"(?P<sw_edition>[^:]*):"
|
|
307
|
+
r"(?P<target_sw>[^:]*):"
|
|
308
|
+
r"(?P<target_hw>[^:]*):"
|
|
309
|
+
r"(?P<other>[^:]*)$"
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
@mcp.tool()
|
|
313
|
+
async def cves_by_cpe(cpe_name: str = "", is_vulnerable: str = "") -> str:
|
|
314
|
+
"""List CVEs associated with a specific CPE"""
|
|
315
|
+
cpe_name = _safe_str(cpe_name)
|
|
316
|
+
if not cpe_name:
|
|
317
|
+
return "❌ Error: cpe_name parameter is required"
|
|
318
|
+
|
|
319
|
+
if not CPE_REGEX.match(cpe_name):
|
|
320
|
+
return ("❌ Error: cpe_name must be provided in full CPE 2.3 format, e.g. "
|
|
321
|
+
"cpe:2.3:a:vendor:product:version:update:edition:language:"
|
|
322
|
+
"sw_edition:target_sw:target_hw:other - eventually use the wildcard *, e.g.: cpe:2.3:a:ntp:ntp:4.2.8:p3:*:*:*:*:*:*")
|
|
323
|
+
|
|
324
|
+
url = f"{NVD_BASE}/cves{NVD_VERSION}"
|
|
325
|
+
params = {"cpeName": cpe_name}
|
|
326
|
+
if _safe_str(is_vulnerable).lower() in ("1", "true", "yes"):
|
|
327
|
+
params["isVulnerable"] = ""
|
|
328
|
+
logger.info(f"cves_by_cpe: cpeName={cpe_name} isVulnerable={_safe_str(is_vulnerable)}")
|
|
329
|
+
try:
|
|
330
|
+
async with httpx.AsyncClient(timeout=API_TIMEOUT) as client:
|
|
331
|
+
resp = await client.get(url, params=params)
|
|
332
|
+
resp.raise_for_status()
|
|
333
|
+
data = resp.json()
|
|
334
|
+
total = data.get("totalResults", 0)
|
|
335
|
+
vulns = data.get("vulnerabilities", []) or []
|
|
336
|
+
lines = [f"🌐 CVEs for CPE \"{cpe_name}\" - total matches: {total}"]
|
|
337
|
+
if not vulns:
|
|
338
|
+
lines.append("⚠️ No vulnerabilities returned for the given CPE.")
|
|
339
|
+
else:
|
|
340
|
+
for v in vulns[:50]:
|
|
341
|
+
lines.append(_format_vuln_entry(v))
|
|
342
|
+
if total > len(vulns):
|
|
343
|
+
lines.append(f"📄 Partial list: returned {len(vulns)} of {total}")
|
|
344
|
+
return "\n".join(lines)
|
|
345
|
+
except httpx.HTTPStatusError as e:
|
|
346
|
+
logger.error(f"HTTP error in cves_by_cpe: {e.response.status_code}")
|
|
347
|
+
return f"❌ API Error: {e.response.status_code}"
|
|
348
|
+
except Exception as e:
|
|
349
|
+
logger.error("Exception in cves_by_cpe: " + str(e))
|
|
350
|
+
logger.debug(traceback.format_exc())
|
|
351
|
+
return f"❌ Error: {str(e)}"
|
|
352
|
+
|
|
353
|
+
@mcp.tool()
|
|
354
|
+
async def kevs_between(
|
|
355
|
+
kevStartDate: str = "",
|
|
356
|
+
kevEndDate: str = "",
|
|
357
|
+
resultsPerPage: str = "20",
|
|
358
|
+
startIndex: str = "0"
|
|
359
|
+
) -> str:
|
|
360
|
+
"""
|
|
361
|
+
List CVEs added to CISA KEV catalog in a date window.
|
|
362
|
+
If the requested window exceeds 90 days, the query is automatically
|
|
363
|
+
split into multiple chunks (max 90 days each) and results are aggregated.
|
|
364
|
+
"""
|
|
365
|
+
kevStartDate = _safe_str(kevStartDate)
|
|
366
|
+
kevEndDate = _safe_str(kevEndDate)
|
|
367
|
+
if not kevStartDate or not kevEndDate:
|
|
368
|
+
return "❌ Error: kevStartDate and kevEndDate parameters are required and must be ISO-8601"
|
|
369
|
+
|
|
370
|
+
rpp = _int_or_default(resultsPerPage, 20)
|
|
371
|
+
sidx = _int_or_default(startIndex, 0)
|
|
372
|
+
|
|
373
|
+
try:
|
|
374
|
+
dt_start = datetime.fromisoformat(kevStartDate.replace("Z", "+00:00"))
|
|
375
|
+
dt_end = datetime.fromisoformat(kevEndDate.replace("Z", "+00:00"))
|
|
376
|
+
|
|
377
|
+
if dt_end <= dt_start:
|
|
378
|
+
return "❌ Error: kevEndDate must be after kevStartDate"
|
|
379
|
+
|
|
380
|
+
chunk_size = timedelta(days=90)
|
|
381
|
+
chunks = []
|
|
382
|
+
chunk_start = dt_start
|
|
383
|
+
while chunk_start < dt_end:
|
|
384
|
+
chunk_end = min(chunk_start + chunk_size, dt_end)
|
|
385
|
+
chunks.append((chunk_start, chunk_end))
|
|
386
|
+
chunk_start = chunk_end
|
|
387
|
+
|
|
388
|
+
async def fetch_chunk(cs, ce):
|
|
389
|
+
params = {
|
|
390
|
+
"hasKev": "",
|
|
391
|
+
"kevStartDate": cs.strftime("%Y-%m-%dT%H:%M:%S.000"),
|
|
392
|
+
"kevEndDate": ce.strftime("%Y-%m-%dT%H:%M:%S.000"),
|
|
393
|
+
"resultsPerPage": str(rpp),
|
|
394
|
+
"startIndex": str(sidx),
|
|
395
|
+
}
|
|
396
|
+
url = f"{NVD_BASE}/cves{NVD_VERSION}"
|
|
397
|
+
logger.info(f"kevs_between chunk: {params['kevStartDate']} -> {params['kevEndDate']}")
|
|
398
|
+
async with httpx.AsyncClient(timeout=API_TIMEOUT) as client:
|
|
399
|
+
resp = await client.get(url, params=params)
|
|
400
|
+
resp.raise_for_status()
|
|
401
|
+
return resp.json()
|
|
402
|
+
|
|
403
|
+
tasks = [fetch_chunk(cs, ce) for cs, ce in chunks]
|
|
404
|
+
responses = await asyncio.gather(*tasks, return_exceptions=True)
|
|
405
|
+
|
|
406
|
+
total = 0
|
|
407
|
+
vulns: list[dict] = []
|
|
408
|
+
|
|
409
|
+
for resp in responses:
|
|
410
|
+
if isinstance(resp, Exception):
|
|
411
|
+
logger.error(f"KEV chunk failed: {resp}")
|
|
412
|
+
continue
|
|
413
|
+
|
|
414
|
+
data = cast(dict, resp)
|
|
415
|
+
total += data.get("totalResults", 0)
|
|
416
|
+
vulns.extend(data.get("vulnerabilities", []) or [])
|
|
417
|
+
|
|
418
|
+
vulns.sort(
|
|
419
|
+
key=lambda v: v.get("cve", {}).get("published", ""),
|
|
420
|
+
reverse=True
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
lines = [
|
|
424
|
+
f"🔥 KEV CVEs added between {kevStartDate} and {kevEndDate} - total matches (aggregated): {total}"
|
|
425
|
+
]
|
|
426
|
+
if not vulns:
|
|
427
|
+
lines.append("⚠️ No KEV CVEs returned for the given window.")
|
|
428
|
+
else:
|
|
429
|
+
for v in vulns:
|
|
430
|
+
lines.append(_format_vuln_entry(v))
|
|
431
|
+
lines.append(f"📄 Aggregated across {len(chunks)} chunk(s), resultsPerPage={rpp} startIndex={sidx}")
|
|
432
|
+
|
|
433
|
+
return "\n".join(lines)
|
|
434
|
+
|
|
435
|
+
except ValueError:
|
|
436
|
+
return "❌ Error: kevStartDate and kevEndDate must be valid ISO-8601 timestamps"
|
|
437
|
+
except httpx.HTTPStatusError as e:
|
|
438
|
+
logger.error(f"HTTP error in kevs_between: {e.response.status_code}")
|
|
439
|
+
return f"❌ API Error: {e.response.status_code}"
|
|
440
|
+
except Exception as e:
|
|
441
|
+
logger.error("Exception in kevs_between: " + str(e))
|
|
442
|
+
logger.debug(traceback.format_exc())
|
|
443
|
+
return f"❌ Error: {str(e)}"
|
|
444
|
+
|
|
445
|
+
@mcp.tool()
|
|
446
|
+
async def cve_change_history(
|
|
447
|
+
cve_id: str = "",
|
|
448
|
+
changeStartDate: str = "",
|
|
449
|
+
changeEndDate: str = "",
|
|
450
|
+
resultsPerPage: str = "20",
|
|
451
|
+
startIndex: str = "0"
|
|
452
|
+
) -> str:
|
|
453
|
+
"""
|
|
454
|
+
Retrieve change history for a CVE or a time window.
|
|
455
|
+
If no cve_id is provided and the date range exceeds 120 days,
|
|
456
|
+
the query is split into multiple chunks (max 120 days each) and results aggregated.
|
|
457
|
+
"""
|
|
458
|
+
cve_id = _safe_str(cve_id)
|
|
459
|
+
rpp = _int_or_default(resultsPerPage, 20)
|
|
460
|
+
sidx = _int_or_default(startIndex, 0)
|
|
461
|
+
url = f"{NVD_BASE}/cvehistory{NVD_VERSION}"
|
|
462
|
+
|
|
463
|
+
try:
|
|
464
|
+
chunks = []
|
|
465
|
+
|
|
466
|
+
if cve_id:
|
|
467
|
+
params = {"cveId": cve_id, "resultsPerPage": str(rpp), "startIndex": str(sidx)}
|
|
468
|
+
logger.info(f"cve_change_history: cveId={cve_id}")
|
|
469
|
+
async with httpx.AsyncClient(timeout=API_TIMEOUT) as client:
|
|
470
|
+
resp = await client.get(url, params=params)
|
|
471
|
+
resp.raise_for_status()
|
|
472
|
+
data = resp.json()
|
|
473
|
+
changes = data.get("cveChanges", []) or []
|
|
474
|
+
total = data.get("totalResults", 0)
|
|
475
|
+
else:
|
|
476
|
+
changeStartDate = _safe_str(changeStartDate)
|
|
477
|
+
changeEndDate = _safe_str(changeEndDate)
|
|
478
|
+
if not changeStartDate or not changeEndDate:
|
|
479
|
+
return "❌ Error: either cve_id or both changeStartDate and changeEndDate are required"
|
|
480
|
+
|
|
481
|
+
dt_start = datetime.fromisoformat(changeStartDate.replace("Z", "+00:00"))
|
|
482
|
+
dt_end = datetime.fromisoformat(changeEndDate.replace("Z", "+00:00"))
|
|
483
|
+
|
|
484
|
+
if dt_end <= dt_start:
|
|
485
|
+
return "❌ Error: changeEndDate must be after changeStartDate"
|
|
486
|
+
|
|
487
|
+
chunk_size = timedelta(days=120)
|
|
488
|
+
chunks = []
|
|
489
|
+
chunk_start = dt_start
|
|
490
|
+
while chunk_start < dt_end:
|
|
491
|
+
chunk_end = min(chunk_start + chunk_size, dt_end)
|
|
492
|
+
chunks.append((chunk_start, chunk_end))
|
|
493
|
+
chunk_start = chunk_end
|
|
494
|
+
|
|
495
|
+
async def fetch_chunk(cs, ce):
|
|
496
|
+
params = {
|
|
497
|
+
"changeStartDate": cs.strftime("%Y-%m-%dT%H:%M:%S.000"),
|
|
498
|
+
"changeEndDate": ce.strftime("%Y-%m-%dT%H:%M:%S.000"),
|
|
499
|
+
"resultsPerPage": str(rpp),
|
|
500
|
+
"startIndex": str(sidx)
|
|
501
|
+
}
|
|
502
|
+
logger.info(f"cve_change_history chunk: {params['changeStartDate']} -> {params['changeEndDate']}")
|
|
503
|
+
async with httpx.AsyncClient(timeout=API_TIMEOUT) as client:
|
|
504
|
+
resp = await client.get(url, params=params)
|
|
505
|
+
resp.raise_for_status()
|
|
506
|
+
return resp.json()
|
|
507
|
+
|
|
508
|
+
tasks = [fetch_chunk(cs, ce) for cs, ce in chunks]
|
|
509
|
+
responses = await asyncio.gather(*tasks, return_exceptions=True)
|
|
510
|
+
|
|
511
|
+
total = 0
|
|
512
|
+
changes: list[dict] = []
|
|
513
|
+
|
|
514
|
+
for resp in responses:
|
|
515
|
+
if isinstance(resp, Exception):
|
|
516
|
+
logger.error(f"CVE change chunk failed: {resp}")
|
|
517
|
+
continue
|
|
518
|
+
|
|
519
|
+
data = cast(dict, resp)
|
|
520
|
+
total += data.get("totalResults", 0)
|
|
521
|
+
changes.extend(data.get("cveChanges", []) or [])
|
|
522
|
+
|
|
523
|
+
changes.sort(
|
|
524
|
+
key=lambda v: v.get("change", {}).get("created", ""),
|
|
525
|
+
reverse=True
|
|
526
|
+
)
|
|
527
|
+
|
|
528
|
+
lines = [f"🕘 CVE Change History - total events: {total}"]
|
|
529
|
+
if not changes:
|
|
530
|
+
lines.append("⚠️ No change events returned for the given query.")
|
|
531
|
+
else:
|
|
532
|
+
for ch in changes[:50]:
|
|
533
|
+
try:
|
|
534
|
+
change = ch.get("change", {})
|
|
535
|
+
cid = change.get("cveId", "UNKNOWN")
|
|
536
|
+
event = change.get("eventName", "EVENT")
|
|
537
|
+
created = change.get("created", "")
|
|
538
|
+
lines.append(f"- {cid} | event: {event} | at: {created}")
|
|
539
|
+
except Exception:
|
|
540
|
+
lines.append("- (malformed change event)")
|
|
541
|
+
if total > len(changes):
|
|
542
|
+
lines.append(f"📄 Partial list: returned {len(changes)} of {total}")
|
|
543
|
+
if not cve_id and len(chunks) > 1:
|
|
544
|
+
lines.append(f"📄 Aggregated across {len(chunks)} chunk(s), resultsPerPage={rpp} startIndex={sidx}")
|
|
545
|
+
|
|
546
|
+
return "\n".join(lines)
|
|
547
|
+
|
|
548
|
+
except ValueError:
|
|
549
|
+
return "❌ Error: changeStartDate and changeEndDate must be valid ISO-8601 timestamps"
|
|
550
|
+
except httpx.HTTPStatusError as e:
|
|
551
|
+
logger.error(f"HTTP error in cve_change_history: {e.response.status_code}")
|
|
552
|
+
return f"❌ API Error: {e.response.status_code}"
|
|
553
|
+
except Exception as e:
|
|
554
|
+
logger.error("Exception in cve_change_history: " + str(e))
|
|
555
|
+
logger.debug(traceback.format_exc())
|
|
556
|
+
return f"❌ Error: {str(e)}"
|
|
557
|
+
|
|
558
|
+
# === SERVER STARTUP ===
|
|
559
|
+
|
|
560
|
+
def main():
|
|
561
|
+
"""Main entry point"""
|
|
562
|
+
logger.info(f"Starting NIST Vulnerability MCP server v{__version__}...")
|
|
563
|
+
try:
|
|
564
|
+
mcp.run(transport="stdio")
|
|
565
|
+
except KeyboardInterrupt:
|
|
566
|
+
logger.info("Server stopped by user")
|
|
567
|
+
sys.exit(0)
|
|
568
|
+
except Exception as e:
|
|
569
|
+
logger.error(f"Server error: {e}", exc_info=True)
|
|
570
|
+
sys.exit(1)
|
|
571
|
+
|
|
572
|
+
if __name__ == "__main__":
|
|
573
|
+
main()
|