security-detections-mcp 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,188 @@
1
+ # Security Detections MCP
2
+
3
+ An MCP (Model Context Protocol) server that lets LLMs query a unified database of **Sigma** and **Splunk ESCU** security detection rules.
4
+
5
+ [![Add to Cursor](https://img.shields.io/badge/Add%20to-Cursor-blue?style=for-the-badge&logo=cursor)](cursor://anysphere.cursor-deeplink/mcp/install?name=security-detections&config=eyJjb21tYW5kIjoibnB4IiwiYXJncyI6WyIteSIsInNlY3VyaXR5LWRldGVjdGlvbnMtbWNwIl0sImVudiI6eyJTSUdNQV9QQVRIUyI6Ii9wYXRoL3RvL3NpZ21hL3J1bGVzLC9wYXRoL3RvL3NpZ21hL3J1bGVzLXRocmVhdC1odW50aW5nIiwiU1BMVU5LX1BBVEhTIjoiL3BhdGgvdG8vc2VjdXJpdHlfY29udGVudC9kZXRlY3Rpb25zIn19)
6
+
7
+ ## Features
8
+
9
+ - **Unified Search** - Query both Sigma and Splunk ESCU detections from a single interface
10
+ - **Full-Text Search** - SQLite FTS5 powered search across names, descriptions, queries, and tags
11
+ - **MITRE ATT&CK Mapping** - Filter detections by technique ID (e.g., T1059.001)
12
+ - **Auto-Indexing** - Automatically indexes detections on startup from configured paths
13
+ - **Logsource Filtering** - Filter Sigma rules by category, product, or service
14
+ - **Severity Filtering** - Filter by criticality level
15
+
16
+ ## Quick Start
17
+
18
+ ### Option 1: npx (Recommended)
19
+
20
+ No installation required - just configure and run:
21
+
22
+ ```bash
23
+ npx -y security-detections-mcp
24
+ ```
25
+
26
+ ### Option 2: Clone and Build
27
+
28
+ ```bash
29
+ git clone https://github.com/MHaggis/Security-Detections-MCP.git
30
+ cd Security-Detections-MCP
31
+ npm install
32
+ npm run build
33
+ ```
34
+
35
+ ## Configuration
36
+
37
+ ### Cursor IDE
38
+
39
+ Add to your MCP config (`~/.cursor/mcp.json` or `.cursor/mcp.json` in your project):
40
+
41
+ ```json
42
+ {
43
+ "mcpServers": {
44
+ "security-detections": {
45
+ "command": "npx",
46
+ "args": ["-y", "security-detections-mcp"],
47
+ "env": {
48
+ "SIGMA_PATHS": "/path/to/sigma/rules,/path/to/sigma/rules-threat-hunting",
49
+ "SPLUNK_PATHS": "/path/to/security_content/detections"
50
+ }
51
+ }
52
+ }
53
+ }
54
+ ```
55
+
56
+ ### Claude Desktop
57
+
58
+ Add to `~/Library/Application Support/Claude/claude_desktop_config.json`:
59
+
60
+ ```json
61
+ {
62
+ "mcpServers": {
63
+ "security-detections": {
64
+ "command": "npx",
65
+ "args": ["-y", "security-detections-mcp"],
66
+ "env": {
67
+ "SIGMA_PATHS": "/Users/you/sigma/rules,/Users/you/sigma/rules-threat-hunting",
68
+ "SPLUNK_PATHS": "/Users/you/security_content/detections"
69
+ }
70
+ }
71
+ }
72
+ }
73
+ ```
74
+
75
+ ### Environment Variables
76
+
77
+ | Variable | Description | Example |
78
+ |----------|-------------|---------|
79
+ | `SIGMA_PATHS` | Comma-separated paths to Sigma rule directories | `/path/to/sigma/rules,/path/to/sigma/rules-threat-hunting` |
80
+ | `SPLUNK_PATHS` | Comma-separated paths to Splunk ESCU detection directories | `/path/to/security_content/detections` |
81
+
82
+ ## Getting Detection Content
83
+
84
+ ### Sigma Rules
85
+
86
+ ```bash
87
+ git clone https://github.com/SigmaHQ/sigma.git
88
+ # Use rules/ and rules-threat-hunting/ directories
89
+ ```
90
+
91
+ ### Splunk ESCU
92
+
93
+ ```bash
94
+ git clone https://github.com/splunk/security_content.git
95
+ # Use detections/ directory
96
+ ```
97
+
98
+ ## MCP Tools
99
+
100
+ | Tool | Description |
101
+ |------|-------------|
102
+ | `search(query, limit)` | Full-text search across all detection fields |
103
+ | `get_by_id(id)` | Get a single detection by its ID |
104
+ | `list_all(limit, offset)` | Paginated list of all detections |
105
+ | `list_by_source(source_type)` | Filter by `sigma` or `splunk_escu` |
106
+ | `list_by_mitre(technique_id)` | Filter by MITRE ATT&CK technique ID |
107
+ | `list_by_logsource(category, product, service)` | Filter Sigma rules by logsource |
108
+ | `list_by_severity(level)` | Filter by severity (informational/low/medium/high/critical) |
109
+ | `get_stats()` | Get index statistics |
110
+ | `rebuild_index()` | Force re-index from configured paths |
111
+ | `get_raw_yaml(id)` | Get the original YAML content |
112
+
113
+ ## Example Workflow
114
+
115
+ 1. **Ask the LLM**: "Find me PowerShell detections related to base64 encoding"
116
+
117
+ 2. **LLM calls**: `search(query="powershell base64", limit=5)`
118
+
119
+ 3. **LLM receives**: Top 5 detections with names, descriptions, and detection logic
120
+
121
+ 4. **LLM explores**: Uses `get_by_id` to get full details on interesting detections
122
+
123
+ 5. **LLM filters by MITRE**: `list_by_mitre(technique_id="T1059.001")` to find all PowerShell execution detections
124
+
125
+ ## Unified Schema
126
+
127
+ Both Sigma and Splunk ESCU detections are normalized to a common schema:
128
+
129
+ | Field | Description |
130
+ |-------|-------------|
131
+ | `id` | Unique identifier (UUID for Sigma, ID field for Splunk) |
132
+ | `name` | Detection name/title |
133
+ | `description` | What the detection looks for |
134
+ | `query` | Detection logic (Sigma YAML or Splunk SPL) |
135
+ | `source_type` | `sigma` or `splunk_escu` |
136
+ | `mitre_ids` | Mapped MITRE ATT&CK technique IDs |
137
+ | `logsource_category` | Sigma logsource category |
138
+ | `logsource_product` | Sigma logsource product (windows, linux, etc.) |
139
+ | `logsource_service` | Sigma logsource service |
140
+ | `severity` | Detection severity level |
141
+ | `status` | Rule status (stable, test, experimental, etc.) |
142
+ | `author` | Rule author |
143
+ | `date_created` | Creation date |
144
+ | `date_modified` | Last modification date |
145
+ | `references` | External references |
146
+ | `falsepositives` | Known false positive scenarios |
147
+ | `tags` | All tags (MITRE, analytic stories, etc.) |
148
+ | `file_path` | Original file path |
149
+ | `raw_yaml` | Original YAML content |
150
+
151
+ ## Database
152
+
153
+ The index is stored at `~/.cache/security-detections-mcp/detections.sqlite`.
154
+
155
+ - Auto-created on first run
156
+ - Auto-indexed when paths are configured
157
+ - Use `rebuild_index()` to refresh after updating detection repos
158
+
159
+ ## Supported Detection Formats
160
+
161
+ ### Sigma Rules
162
+
163
+ Based on the [official Sigma specification](https://github.com/SigmaHQ/sigma-specification):
164
+ - All required fields: `title`, `logsource`, `detection`
165
+ - All optional fields: `id`, `status`, `description`, `author`, `date`, `modified`, `references`, `tags`, `level`, `falsepositives`, etc.
166
+
167
+ ### Splunk ESCU
168
+
169
+ From [Splunk Security Content](https://github.com/splunk/security_content):
170
+ - Required: `name`, `id`, `search`
171
+ - Optional: `description`, `author`, `date`, `status`, `references`, `tags` (including `mitre_attack_id`, `analytic_story`)
172
+
173
+ ## Development
174
+
175
+ ```bash
176
+ # Install dependencies
177
+ npm install
178
+
179
+ # Build
180
+ npm run build
181
+
182
+ # Run with paths
183
+ SIGMA_PATHS="./detections/sigma/rules" SPLUNK_PATHS="./detections/splunk/detections" npm start
184
+ ```
185
+
186
+ ## License
187
+
188
+ Apache 2.0
package/dist/db.d.ts ADDED
@@ -0,0 +1,17 @@
1
+ import Database from 'better-sqlite3';
2
+ import type { Detection, IndexStats } from './types.js';
3
+ export declare function getDbPath(): string;
4
+ export declare function initDb(): Database.Database;
5
+ export declare function clearDb(): void;
6
+ export declare function insertDetection(detection: Detection): void;
7
+ export declare function searchDetections(query: string, limit?: number): Detection[];
8
+ export declare function getDetectionById(id: string): Detection | null;
9
+ export declare function listDetections(limit?: number, offset?: number): Detection[];
10
+ export declare function listBySource(sourceType: 'sigma' | 'splunk_escu', limit?: number, offset?: number): Detection[];
11
+ export declare function listByMitre(techniqueId: string, limit?: number, offset?: number): Detection[];
12
+ export declare function listByLogsource(category?: string, product?: string, service?: string, limit?: number, offset?: number): Detection[];
13
+ export declare function listBySeverity(level: string, limit?: number, offset?: number): Detection[];
14
+ export declare function getStats(): IndexStats;
15
+ export declare function getRawYaml(id: string): string | null;
16
+ export declare function dbExists(): boolean;
17
+ export declare function getDetectionCount(): number;
package/dist/db.js ADDED
@@ -0,0 +1,248 @@
1
+ import Database from 'better-sqlite3';
2
+ import { homedir } from 'os';
3
+ import { join } from 'path';
4
+ import { mkdirSync, existsSync } from 'fs';
5
+ const CACHE_DIR = join(homedir(), '.cache', 'security-detections-mcp');
6
+ const DB_PATH = join(CACHE_DIR, 'detections.sqlite');
7
+ let db = null;
8
+ export function getDbPath() {
9
+ return DB_PATH;
10
+ }
11
+ export function initDb() {
12
+ if (db)
13
+ return db;
14
+ // Ensure cache directory exists
15
+ if (!existsSync(CACHE_DIR)) {
16
+ mkdirSync(CACHE_DIR, { recursive: true });
17
+ }
18
+ db = new Database(DB_PATH);
19
+ // Create main detections table
20
+ db.exec(`
21
+ CREATE TABLE IF NOT EXISTS detections (
22
+ id TEXT PRIMARY KEY,
23
+ name TEXT NOT NULL,
24
+ description TEXT,
25
+ query TEXT,
26
+ source_type TEXT NOT NULL,
27
+ mitre_ids TEXT,
28
+ logsource_category TEXT,
29
+ logsource_product TEXT,
30
+ logsource_service TEXT,
31
+ severity TEXT,
32
+ status TEXT,
33
+ author TEXT,
34
+ date_created TEXT,
35
+ date_modified TEXT,
36
+ refs TEXT,
37
+ falsepositives TEXT,
38
+ tags TEXT,
39
+ file_path TEXT,
40
+ raw_yaml TEXT
41
+ )
42
+ `);
43
+ // Create FTS5 virtual table for full-text search
44
+ db.exec(`
45
+ CREATE VIRTUAL TABLE IF NOT EXISTS detections_fts USING fts5(
46
+ id,
47
+ name,
48
+ description,
49
+ query,
50
+ mitre_ids,
51
+ tags,
52
+ content='detections',
53
+ content_rowid='rowid'
54
+ )
55
+ `);
56
+ // Create triggers to keep FTS in sync
57
+ db.exec(`
58
+ CREATE TRIGGER IF NOT EXISTS detections_ai AFTER INSERT ON detections BEGIN
59
+ INSERT INTO detections_fts(rowid, id, name, description, query, mitre_ids, tags)
60
+ VALUES (NEW.rowid, NEW.id, NEW.name, NEW.description, NEW.query, NEW.mitre_ids, NEW.tags);
61
+ END
62
+ `);
63
+ db.exec(`
64
+ CREATE TRIGGER IF NOT EXISTS detections_ad AFTER DELETE ON detections BEGIN
65
+ INSERT INTO detections_fts(detections_fts, rowid, id, name, description, query, mitre_ids, tags)
66
+ VALUES ('delete', OLD.rowid, OLD.id, OLD.name, OLD.description, OLD.query, OLD.mitre_ids, OLD.tags);
67
+ END
68
+ `);
69
+ db.exec(`
70
+ CREATE TRIGGER IF NOT EXISTS detections_au AFTER UPDATE ON detections BEGIN
71
+ INSERT INTO detections_fts(detections_fts, rowid, id, name, description, query, mitre_ids, tags)
72
+ VALUES ('delete', OLD.rowid, OLD.id, OLD.name, OLD.description, OLD.query, OLD.mitre_ids, OLD.tags);
73
+ INSERT INTO detections_fts(rowid, id, name, description, query, mitre_ids, tags)
74
+ VALUES (NEW.rowid, NEW.id, NEW.name, NEW.description, NEW.query, NEW.mitre_ids, NEW.tags);
75
+ END
76
+ `);
77
+ // Create indexes for common queries
78
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_source_type ON detections(source_type)`);
79
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_severity ON detections(severity)`);
80
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_logsource_product ON detections(logsource_product)`);
81
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_logsource_category ON detections(logsource_category)`);
82
+ return db;
83
+ }
84
+ export function clearDb() {
85
+ const database = initDb();
86
+ database.exec('DELETE FROM detections');
87
+ }
88
+ export function insertDetection(detection) {
89
+ const database = initDb();
90
+ const stmt = database.prepare(`
91
+ INSERT OR REPLACE INTO detections
92
+ (id, name, description, query, source_type, mitre_ids, logsource_category,
93
+ logsource_product, logsource_service, severity, status, author,
94
+ date_created, date_modified, refs, falsepositives, tags, file_path, raw_yaml)
95
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
96
+ `);
97
+ stmt.run(detection.id, detection.name, detection.description, detection.query, detection.source_type, JSON.stringify(detection.mitre_ids), detection.logsource_category, detection.logsource_product, detection.logsource_service, detection.severity, detection.status, detection.author, detection.date_created, detection.date_modified, JSON.stringify(detection.references), JSON.stringify(detection.falsepositives), JSON.stringify(detection.tags), detection.file_path, detection.raw_yaml);
98
+ }
99
+ function rowToDetection(row) {
100
+ return {
101
+ id: row.id,
102
+ name: row.name,
103
+ description: row.description || '',
104
+ query: row.query || '',
105
+ source_type: row.source_type,
106
+ mitre_ids: JSON.parse(row.mitre_ids || '[]'),
107
+ logsource_category: row.logsource_category,
108
+ logsource_product: row.logsource_product,
109
+ logsource_service: row.logsource_service,
110
+ severity: row.severity,
111
+ status: row.status,
112
+ author: row.author,
113
+ date_created: row.date_created,
114
+ date_modified: row.date_modified,
115
+ references: JSON.parse(row.refs || '[]'),
116
+ falsepositives: JSON.parse(row.falsepositives || '[]'),
117
+ tags: JSON.parse(row.tags || '[]'),
118
+ file_path: row.file_path,
119
+ raw_yaml: row.raw_yaml,
120
+ };
121
+ }
122
+ export function searchDetections(query, limit = 50) {
123
+ const database = initDb();
124
+ // Use FTS5 for search
125
+ const stmt = database.prepare(`
126
+ SELECT d.* FROM detections d
127
+ JOIN detections_fts fts ON d.rowid = fts.rowid
128
+ WHERE detections_fts MATCH ?
129
+ ORDER BY rank
130
+ LIMIT ?
131
+ `);
132
+ const rows = stmt.all(query, limit);
133
+ return rows.map(rowToDetection);
134
+ }
135
+ export function getDetectionById(id) {
136
+ const database = initDb();
137
+ const stmt = database.prepare('SELECT * FROM detections WHERE id = ?');
138
+ const row = stmt.get(id);
139
+ return row ? rowToDetection(row) : null;
140
+ }
141
+ export function listDetections(limit = 100, offset = 0) {
142
+ const database = initDb();
143
+ const stmt = database.prepare('SELECT * FROM detections ORDER BY name LIMIT ? OFFSET ?');
144
+ const rows = stmt.all(limit, offset);
145
+ return rows.map(rowToDetection);
146
+ }
147
+ export function listBySource(sourceType, limit = 100, offset = 0) {
148
+ const database = initDb();
149
+ const stmt = database.prepare('SELECT * FROM detections WHERE source_type = ? ORDER BY name LIMIT ? OFFSET ?');
150
+ const rows = stmt.all(sourceType, limit, offset);
151
+ return rows.map(rowToDetection);
152
+ }
153
+ export function listByMitre(techniqueId, limit = 100, offset = 0) {
154
+ const database = initDb();
155
+ // Search in JSON array
156
+ const stmt = database.prepare(`
157
+ SELECT * FROM detections
158
+ WHERE mitre_ids LIKE ?
159
+ ORDER BY name
160
+ LIMIT ? OFFSET ?
161
+ `);
162
+ const rows = stmt.all(`%"${techniqueId}"%`, limit, offset);
163
+ return rows.map(rowToDetection);
164
+ }
165
+ export function listByLogsource(category, product, service, limit = 100, offset = 0) {
166
+ const database = initDb();
167
+ let sql = 'SELECT * FROM detections WHERE 1=1';
168
+ const params = [];
169
+ if (category) {
170
+ sql += ' AND logsource_category = ?';
171
+ params.push(category);
172
+ }
173
+ if (product) {
174
+ sql += ' AND logsource_product = ?';
175
+ params.push(product);
176
+ }
177
+ if (service) {
178
+ sql += ' AND logsource_service = ?';
179
+ params.push(service);
180
+ }
181
+ sql += ' ORDER BY name LIMIT ? OFFSET ?';
182
+ params.push(limit, offset);
183
+ const stmt = database.prepare(sql);
184
+ const rows = stmt.all(...params);
185
+ return rows.map(rowToDetection);
186
+ }
187
+ export function listBySeverity(level, limit = 100, offset = 0) {
188
+ const database = initDb();
189
+ const stmt = database.prepare('SELECT * FROM detections WHERE severity = ? ORDER BY name LIMIT ? OFFSET ?');
190
+ const rows = stmt.all(level, limit, offset);
191
+ return rows.map(rowToDetection);
192
+ }
193
+ export function getStats() {
194
+ const database = initDb();
195
+ const total = database.prepare('SELECT COUNT(*) as count FROM detections').get().count;
196
+ const sigma = database.prepare("SELECT COUNT(*) as count FROM detections WHERE source_type = 'sigma'").get().count;
197
+ const splunk = database.prepare("SELECT COUNT(*) as count FROM detections WHERE source_type = 'splunk_escu'").get().count;
198
+ // Count by severity
199
+ const severityRows = database.prepare(`
200
+ SELECT severity, COUNT(*) as count FROM detections
201
+ WHERE severity IS NOT NULL
202
+ GROUP BY severity
203
+ `).all();
204
+ const by_severity = {};
205
+ for (const row of severityRows) {
206
+ by_severity[row.severity] = row.count;
207
+ }
208
+ // Count by logsource product
209
+ const productRows = database.prepare(`
210
+ SELECT logsource_product, COUNT(*) as count FROM detections
211
+ WHERE logsource_product IS NOT NULL
212
+ GROUP BY logsource_product
213
+ ORDER BY count DESC
214
+ LIMIT 20
215
+ `).all();
216
+ const by_logsource_product = {};
217
+ for (const row of productRows) {
218
+ by_logsource_product[row.logsource_product] = row.count;
219
+ }
220
+ // Count detections with MITRE mappings
221
+ const mitre_coverage = database.prepare(`
222
+ SELECT COUNT(*) as count FROM detections
223
+ WHERE mitre_ids != '[]' AND mitre_ids IS NOT NULL
224
+ `).get().count;
225
+ return {
226
+ total,
227
+ sigma,
228
+ splunk_escu: splunk,
229
+ by_severity,
230
+ by_logsource_product,
231
+ mitre_coverage,
232
+ };
233
+ }
234
+ export function getRawYaml(id) {
235
+ const database = initDb();
236
+ const stmt = database.prepare('SELECT raw_yaml FROM detections WHERE id = ?');
237
+ const row = stmt.get(id);
238
+ return row?.raw_yaml || null;
239
+ }
240
+ export function dbExists() {
241
+ return existsSync(DB_PATH);
242
+ }
243
+ export function getDetectionCount() {
244
+ if (!dbExists())
245
+ return 0;
246
+ const database = initDb();
247
+ return database.prepare('SELECT COUNT(*) as count FROM detections').get().count;
248
+ }
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export {};
package/dist/index.js ADDED
@@ -0,0 +1,402 @@
1
+ #!/usr/bin/env node
2
+ import { Server } from '@modelcontextprotocol/sdk/server/index.js';
3
+ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
4
+ import { CallToolRequestSchema, ListToolsRequestSchema, } from '@modelcontextprotocol/sdk/types.js';
5
+ import { searchDetections, getDetectionById, listDetections, listBySource, listByMitre, listByLogsource, listBySeverity, getStats, getRawYaml, getDbPath, initDb, } from './db.js';
6
+ import { indexDetections, needsIndexing } from './indexer.js';
7
+ // Parse comma-separated paths from env var
8
+ function parsePaths(envVar) {
9
+ if (!envVar)
10
+ return [];
11
+ return envVar.split(',').map(p => p.trim()).filter(p => p.length > 0);
12
+ }
13
+ // Get configured paths from environment
14
+ const SIGMA_PATHS = parsePaths(process.env.SIGMA_PATHS);
15
+ const SPLUNK_PATHS = parsePaths(process.env.SPLUNK_PATHS);
16
+ // Auto-index on startup if paths are configured and DB is empty
17
+ function autoIndex() {
18
+ if (SIGMA_PATHS.length === 0 && SPLUNK_PATHS.length === 0) {
19
+ return;
20
+ }
21
+ initDb();
22
+ if (needsIndexing()) {
23
+ console.error('[security-detections-mcp] Auto-indexing detections...');
24
+ const result = indexDetections(SIGMA_PATHS, SPLUNK_PATHS);
25
+ console.error(`[security-detections-mcp] Indexed ${result.total} detections (${result.sigma_indexed} Sigma, ${result.splunk_indexed} Splunk ESCU)`);
26
+ }
27
+ }
28
+ // Create MCP server
29
+ const server = new Server({
30
+ name: 'security-detections-mcp',
31
+ version: '1.0.0',
32
+ }, {
33
+ capabilities: {
34
+ tools: {},
35
+ },
36
+ });
37
+ // Define available tools
38
+ server.setRequestHandler(ListToolsRequestSchema, async () => {
39
+ return {
40
+ tools: [
41
+ {
42
+ name: 'search',
43
+ description: 'Full-text search across all detection fields (name, description, query, MITRE IDs, tags)',
44
+ inputSchema: {
45
+ type: 'object',
46
+ properties: {
47
+ query: {
48
+ type: 'string',
49
+ description: 'Search query (FTS5 syntax supported)',
50
+ },
51
+ limit: {
52
+ type: 'number',
53
+ description: 'Max results to return (default 50)',
54
+ },
55
+ },
56
+ required: ['query'],
57
+ },
58
+ },
59
+ {
60
+ name: 'get_by_id',
61
+ description: 'Get a single detection by its ID',
62
+ inputSchema: {
63
+ type: 'object',
64
+ properties: {
65
+ id: {
66
+ type: 'string',
67
+ description: 'Detection ID (UUID for Sigma, or Splunk detection ID)',
68
+ },
69
+ },
70
+ required: ['id'],
71
+ },
72
+ },
73
+ {
74
+ name: 'list_all',
75
+ description: 'List all detections with pagination',
76
+ inputSchema: {
77
+ type: 'object',
78
+ properties: {
79
+ limit: {
80
+ type: 'number',
81
+ description: 'Max results to return (default 100)',
82
+ },
83
+ offset: {
84
+ type: 'number',
85
+ description: 'Offset for pagination (default 0)',
86
+ },
87
+ },
88
+ },
89
+ },
90
+ {
91
+ name: 'list_by_source',
92
+ description: 'List detections filtered by source type',
93
+ inputSchema: {
94
+ type: 'object',
95
+ properties: {
96
+ source_type: {
97
+ type: 'string',
98
+ enum: ['sigma', 'splunk_escu'],
99
+ description: 'Source type to filter by',
100
+ },
101
+ limit: {
102
+ type: 'number',
103
+ description: 'Max results to return (default 100)',
104
+ },
105
+ offset: {
106
+ type: 'number',
107
+ description: 'Offset for pagination (default 0)',
108
+ },
109
+ },
110
+ required: ['source_type'],
111
+ },
112
+ },
113
+ {
114
+ name: 'list_by_mitre',
115
+ description: 'List detections that map to a specific MITRE ATT&CK technique',
116
+ inputSchema: {
117
+ type: 'object',
118
+ properties: {
119
+ technique_id: {
120
+ type: 'string',
121
+ description: 'MITRE ATT&CK technique ID (e.g., T1059.001)',
122
+ },
123
+ limit: {
124
+ type: 'number',
125
+ description: 'Max results to return (default 100)',
126
+ },
127
+ offset: {
128
+ type: 'number',
129
+ description: 'Offset for pagination (default 0)',
130
+ },
131
+ },
132
+ required: ['technique_id'],
133
+ },
134
+ },
135
+ {
136
+ name: 'list_by_logsource',
137
+ description: 'List Sigma detections filtered by logsource (category, product, or service)',
138
+ inputSchema: {
139
+ type: 'object',
140
+ properties: {
141
+ category: {
142
+ type: 'string',
143
+ description: 'Logsource category (e.g., process_creation, network_connection)',
144
+ },
145
+ product: {
146
+ type: 'string',
147
+ description: 'Logsource product (e.g., windows, linux, aws)',
148
+ },
149
+ service: {
150
+ type: 'string',
151
+ description: 'Logsource service (e.g., sysmon, security, powershell)',
152
+ },
153
+ limit: {
154
+ type: 'number',
155
+ description: 'Max results to return (default 100)',
156
+ },
157
+ offset: {
158
+ type: 'number',
159
+ description: 'Offset for pagination (default 0)',
160
+ },
161
+ },
162
+ },
163
+ },
164
+ {
165
+ name: 'list_by_severity',
166
+ description: 'List detections filtered by severity level',
167
+ inputSchema: {
168
+ type: 'object',
169
+ properties: {
170
+ level: {
171
+ type: 'string',
172
+ enum: ['informational', 'low', 'medium', 'high', 'critical'],
173
+ description: 'Severity level to filter by',
174
+ },
175
+ limit: {
176
+ type: 'number',
177
+ description: 'Max results to return (default 100)',
178
+ },
179
+ offset: {
180
+ type: 'number',
181
+ description: 'Offset for pagination (default 0)',
182
+ },
183
+ },
184
+ required: ['level'],
185
+ },
186
+ },
187
+ {
188
+ name: 'get_stats',
189
+ description: 'Get statistics about the indexed detections',
190
+ inputSchema: {
191
+ type: 'object',
192
+ properties: {},
193
+ },
194
+ },
195
+ {
196
+ name: 'rebuild_index',
197
+ description: 'Force re-index all detections from configured paths',
198
+ inputSchema: {
199
+ type: 'object',
200
+ properties: {},
201
+ },
202
+ },
203
+ {
204
+ name: 'get_raw_yaml',
205
+ description: 'Get the original YAML content for a detection',
206
+ inputSchema: {
207
+ type: 'object',
208
+ properties: {
209
+ id: {
210
+ type: 'string',
211
+ description: 'Detection ID',
212
+ },
213
+ },
214
+ required: ['id'],
215
+ },
216
+ },
217
+ ],
218
+ };
219
+ });
220
+ // Handle tool calls
221
+ server.setRequestHandler(CallToolRequestSchema, async (request) => {
222
+ const { name, arguments: args } = request.params;
223
+ try {
224
+ switch (name) {
225
+ case 'search': {
226
+ const query = args?.query;
227
+ const limit = args?.limit || 50;
228
+ if (!query) {
229
+ return { content: [{ type: 'text', text: 'Error: query is required' }] };
230
+ }
231
+ const results = searchDetections(query, limit);
232
+ return {
233
+ content: [{
234
+ type: 'text',
235
+ text: JSON.stringify(results, null, 2),
236
+ }],
237
+ };
238
+ }
239
+ case 'get_by_id': {
240
+ const id = args?.id;
241
+ if (!id) {
242
+ return { content: [{ type: 'text', text: 'Error: id is required' }] };
243
+ }
244
+ const detection = getDetectionById(id);
245
+ if (!detection) {
246
+ return { content: [{ type: 'text', text: `Detection not found: ${id}` }] };
247
+ }
248
+ return {
249
+ content: [{
250
+ type: 'text',
251
+ text: JSON.stringify(detection, null, 2),
252
+ }],
253
+ };
254
+ }
255
+ case 'list_all': {
256
+ const limit = args?.limit || 100;
257
+ const offset = args?.offset || 0;
258
+ const results = listDetections(limit, offset);
259
+ return {
260
+ content: [{
261
+ type: 'text',
262
+ text: JSON.stringify(results, null, 2),
263
+ }],
264
+ };
265
+ }
266
+ case 'list_by_source': {
267
+ const sourceType = args?.source_type;
268
+ const limit = args?.limit || 100;
269
+ const offset = args?.offset || 0;
270
+ if (!sourceType) {
271
+ return { content: [{ type: 'text', text: 'Error: source_type is required' }] };
272
+ }
273
+ const results = listBySource(sourceType, limit, offset);
274
+ return {
275
+ content: [{
276
+ type: 'text',
277
+ text: JSON.stringify(results, null, 2),
278
+ }],
279
+ };
280
+ }
281
+ case 'list_by_mitre': {
282
+ const techniqueId = args?.technique_id;
283
+ const limit = args?.limit || 100;
284
+ const offset = args?.offset || 0;
285
+ if (!techniqueId) {
286
+ return { content: [{ type: 'text', text: 'Error: technique_id is required' }] };
287
+ }
288
+ const results = listByMitre(techniqueId, limit, offset);
289
+ return {
290
+ content: [{
291
+ type: 'text',
292
+ text: JSON.stringify(results, null, 2),
293
+ }],
294
+ };
295
+ }
296
+ case 'list_by_logsource': {
297
+ const category = args?.category;
298
+ const product = args?.product;
299
+ const service = args?.service;
300
+ const limit = args?.limit || 100;
301
+ const offset = args?.offset || 0;
302
+ const results = listByLogsource(category, product, service, limit, offset);
303
+ return {
304
+ content: [{
305
+ type: 'text',
306
+ text: JSON.stringify(results, null, 2),
307
+ }],
308
+ };
309
+ }
310
+ case 'list_by_severity': {
311
+ const level = args?.level;
312
+ const limit = args?.limit || 100;
313
+ const offset = args?.offset || 0;
314
+ if (!level) {
315
+ return { content: [{ type: 'text', text: 'Error: level is required' }] };
316
+ }
317
+ const results = listBySeverity(level, limit, offset);
318
+ return {
319
+ content: [{
320
+ type: 'text',
321
+ text: JSON.stringify(results, null, 2),
322
+ }],
323
+ };
324
+ }
325
+ case 'get_stats': {
326
+ const stats = getStats();
327
+ return {
328
+ content: [{
329
+ type: 'text',
330
+ text: JSON.stringify(stats, null, 2),
331
+ }],
332
+ };
333
+ }
334
+ case 'rebuild_index': {
335
+ if (SIGMA_PATHS.length === 0 && SPLUNK_PATHS.length === 0) {
336
+ return {
337
+ content: [{
338
+ type: 'text',
339
+ text: 'Error: No paths configured. Set SIGMA_PATHS and/or SPLUNK_PATHS environment variables.',
340
+ }],
341
+ };
342
+ }
343
+ const result = indexDetections(SIGMA_PATHS, SPLUNK_PATHS);
344
+ return {
345
+ content: [{
346
+ type: 'text',
347
+ text: JSON.stringify({
348
+ message: 'Index rebuilt successfully',
349
+ ...result,
350
+ db_path: getDbPath(),
351
+ }, null, 2),
352
+ }],
353
+ };
354
+ }
355
+ case 'get_raw_yaml': {
356
+ const id = args?.id;
357
+ if (!id) {
358
+ return { content: [{ type: 'text', text: 'Error: id is required' }] };
359
+ }
360
+ const yaml = getRawYaml(id);
361
+ if (!yaml) {
362
+ return { content: [{ type: 'text', text: `Detection not found: ${id}` }] };
363
+ }
364
+ return {
365
+ content: [{
366
+ type: 'text',
367
+ text: yaml,
368
+ }],
369
+ };
370
+ }
371
+ default:
372
+ return {
373
+ content: [{
374
+ type: 'text',
375
+ text: `Unknown tool: ${name}`,
376
+ }],
377
+ };
378
+ }
379
+ }
380
+ catch (error) {
381
+ const message = error instanceof Error ? error.message : String(error);
382
+ return {
383
+ content: [{
384
+ type: 'text',
385
+ text: `Error: ${message}`,
386
+ }],
387
+ };
388
+ }
389
+ });
390
+ // Main entry point
391
+ async function main() {
392
+ // Auto-index on startup
393
+ autoIndex();
394
+ // Start MCP server
395
+ const transport = new StdioServerTransport();
396
+ await server.connect(transport);
397
+ console.error('[security-detections-mcp] Server started');
398
+ }
399
+ main().catch((error) => {
400
+ console.error('[security-detections-mcp] Fatal error:', error);
401
+ process.exit(1);
402
+ });
@@ -0,0 +1,9 @@
1
+ export interface IndexResult {
2
+ sigma_indexed: number;
3
+ sigma_failed: number;
4
+ splunk_indexed: number;
5
+ splunk_failed: number;
6
+ total: number;
7
+ }
8
+ export declare function indexDetections(sigmaPaths: string[], splunkPaths: string[]): IndexResult;
9
+ export declare function needsIndexing(): boolean;
@@ -0,0 +1,80 @@
1
+ import { readdirSync, statSync } from 'fs';
2
+ import { join, extname } from 'path';
3
+ import { parseSigmaFile } from './parsers/sigma.js';
4
+ import { parseSplunkFile } from './parsers/splunk.js';
5
+ import { clearDb, insertDetection, getDetectionCount } from './db.js';
6
+ // Recursively find all YAML files in a directory
7
+ function findYamlFiles(dir) {
8
+ const files = [];
9
+ try {
10
+ const entries = readdirSync(dir);
11
+ for (const entry of entries) {
12
+ const fullPath = join(dir, entry);
13
+ try {
14
+ const stat = statSync(fullPath);
15
+ if (stat.isDirectory()) {
16
+ files.push(...findYamlFiles(fullPath));
17
+ }
18
+ else if (stat.isFile()) {
19
+ const ext = extname(entry).toLowerCase();
20
+ if (ext === '.yml' || ext === '.yaml') {
21
+ files.push(fullPath);
22
+ }
23
+ }
24
+ }
25
+ catch {
26
+ // Skip files we can't stat
27
+ }
28
+ }
29
+ }
30
+ catch {
31
+ // Skip directories we can't read
32
+ }
33
+ return files;
34
+ }
35
+ export function indexDetections(sigmaPaths, splunkPaths) {
36
+ // Clear existing data
37
+ clearDb();
38
+ let sigma_indexed = 0;
39
+ let sigma_failed = 0;
40
+ let splunk_indexed = 0;
41
+ let splunk_failed = 0;
42
+ // Index Sigma rules
43
+ for (const basePath of sigmaPaths) {
44
+ const files = findYamlFiles(basePath);
45
+ for (const file of files) {
46
+ const detection = parseSigmaFile(file);
47
+ if (detection) {
48
+ insertDetection(detection);
49
+ sigma_indexed++;
50
+ }
51
+ else {
52
+ sigma_failed++;
53
+ }
54
+ }
55
+ }
56
+ // Index Splunk ESCU detections
57
+ for (const basePath of splunkPaths) {
58
+ const files = findYamlFiles(basePath);
59
+ for (const file of files) {
60
+ const detection = parseSplunkFile(file);
61
+ if (detection) {
62
+ insertDetection(detection);
63
+ splunk_indexed++;
64
+ }
65
+ else {
66
+ splunk_failed++;
67
+ }
68
+ }
69
+ }
70
+ return {
71
+ sigma_indexed,
72
+ sigma_failed,
73
+ splunk_indexed,
74
+ splunk_failed,
75
+ total: sigma_indexed + splunk_indexed,
76
+ };
77
+ }
78
+ export function needsIndexing() {
79
+ return getDetectionCount() === 0;
80
+ }
@@ -0,0 +1,2 @@
1
+ import type { Detection } from '../types.js';
2
+ export declare function parseSigmaFile(filePath: string): Detection | null;
@@ -0,0 +1,88 @@
1
+ import { readFileSync } from 'fs';
2
+ import { parse as parseYaml, stringify as stringifyYaml } from 'yaml';
3
+ import { createHash } from 'crypto';
4
+ // Extract MITRE ATT&CK IDs from Sigma tags
5
+ // Tags like: attack.t1059.001, attack.execution, attack.s0154, attack.g0032
6
+ function extractMitreIds(tags) {
7
+ if (!tags)
8
+ return [];
9
+ const mitreIds = [];
10
+ for (const tag of tags) {
11
+ const lower = tag.toLowerCase();
12
+ // Match technique IDs: attack.t1234 or attack.t1234.001
13
+ const techMatch = lower.match(/^attack\.t(\d{4}(?:\.\d{3})?)$/);
14
+ if (techMatch) {
15
+ mitreIds.push(`T${techMatch[1].toUpperCase()}`);
16
+ continue;
17
+ }
18
+ // Match software IDs: attack.s1234
19
+ const softMatch = lower.match(/^attack\.s(\d{4})$/);
20
+ if (softMatch) {
21
+ mitreIds.push(`S${softMatch[1]}`);
22
+ continue;
23
+ }
24
+ // Match group IDs: attack.g1234
25
+ const groupMatch = lower.match(/^attack\.g(\d{4})$/);
26
+ if (groupMatch) {
27
+ mitreIds.push(`G${groupMatch[1]}`);
28
+ }
29
+ }
30
+ return mitreIds;
31
+ }
32
+ // Generate a stable ID from file path and title if no UUID present
33
+ function generateId(filePath, title) {
34
+ const hash = createHash('sha256')
35
+ .update(`${filePath}:${title}`)
36
+ .digest('hex')
37
+ .substring(0, 32);
38
+ return `sigma-${hash}`;
39
+ }
40
+ // Normalize falsepositives field (can be string or array)
41
+ function normalizeFalsePositives(fp) {
42
+ if (!fp)
43
+ return [];
44
+ if (typeof fp === 'string')
45
+ return [fp];
46
+ return fp;
47
+ }
48
+ export function parseSigmaFile(filePath) {
49
+ try {
50
+ const content = readFileSync(filePath, 'utf-8');
51
+ const rule = parseYaml(content);
52
+ // title is required
53
+ if (!rule.title) {
54
+ return null;
55
+ }
56
+ // logsource and detection are required
57
+ if (!rule.logsource || !rule.detection) {
58
+ return null;
59
+ }
60
+ const id = rule.id || generateId(filePath, rule.title);
61
+ const detection = {
62
+ id,
63
+ name: rule.title,
64
+ description: rule.description || '',
65
+ query: stringifyYaml(rule.detection),
66
+ source_type: 'sigma',
67
+ mitre_ids: extractMitreIds(rule.tags),
68
+ logsource_category: rule.logsource.category || null,
69
+ logsource_product: rule.logsource.product || null,
70
+ logsource_service: rule.logsource.service || null,
71
+ severity: rule.level || null,
72
+ status: rule.status || null,
73
+ author: rule.author || null,
74
+ date_created: rule.date || null,
75
+ date_modified: rule.modified || null,
76
+ references: rule.references || [],
77
+ falsepositives: normalizeFalsePositives(rule.falsepositives),
78
+ tags: rule.tags || [],
79
+ file_path: filePath,
80
+ raw_yaml: content,
81
+ };
82
+ return detection;
83
+ }
84
+ catch (err) {
85
+ // Skip files that can't be parsed
86
+ return null;
87
+ }
88
+ }
@@ -0,0 +1,2 @@
1
+ import type { Detection } from '../types.js';
2
+ export declare function parseSplunkFile(filePath: string): Detection | null;
@@ -0,0 +1,42 @@
1
+ import { readFileSync } from 'fs';
2
+ import { parse as parseYaml } from 'yaml';
3
+ export function parseSplunkFile(filePath) {
4
+ try {
5
+ const content = readFileSync(filePath, 'utf-8');
6
+ const rule = parseYaml(content);
7
+ // name and id are required
8
+ if (!rule.name || !rule.id) {
9
+ return null;
10
+ }
11
+ // search is required
12
+ if (!rule.search) {
13
+ return null;
14
+ }
15
+ const detection = {
16
+ id: rule.id,
17
+ name: rule.name,
18
+ description: rule.description || '',
19
+ query: rule.search,
20
+ source_type: 'splunk_escu',
21
+ mitre_ids: rule.tags?.mitre_attack_id || [],
22
+ logsource_category: null,
23
+ logsource_product: null,
24
+ logsource_service: null,
25
+ severity: null, // Splunk ESCU doesn't have severity in same way
26
+ status: rule.status || null,
27
+ author: rule.author || null,
28
+ date_created: rule.date || null,
29
+ date_modified: null,
30
+ references: rule.references || [],
31
+ falsepositives: rule.known_false_positives ? [rule.known_false_positives] : [],
32
+ tags: rule.tags?.analytic_story || [],
33
+ file_path: filePath,
34
+ raw_yaml: content,
35
+ };
36
+ return detection;
37
+ }
38
+ catch (err) {
39
+ // Skip files that can't be parsed
40
+ return null;
41
+ }
42
+ }
@@ -0,0 +1,81 @@
1
+ export interface Detection {
2
+ id: string;
3
+ name: string;
4
+ description: string;
5
+ query: string;
6
+ source_type: 'sigma' | 'splunk_escu';
7
+ mitre_ids: string[];
8
+ logsource_category: string | null;
9
+ logsource_product: string | null;
10
+ logsource_service: string | null;
11
+ severity: string | null;
12
+ status: string | null;
13
+ author: string | null;
14
+ date_created: string | null;
15
+ date_modified: string | null;
16
+ references: string[];
17
+ falsepositives: string[];
18
+ tags: string[];
19
+ file_path: string;
20
+ raw_yaml: string;
21
+ }
22
+ export interface SigmaRule {
23
+ title: string;
24
+ id?: string;
25
+ name?: string;
26
+ status?: 'stable' | 'test' | 'experimental' | 'deprecated' | 'unsupported';
27
+ description?: string;
28
+ license?: string;
29
+ author?: string;
30
+ references?: string[];
31
+ date?: string;
32
+ modified?: string;
33
+ logsource: {
34
+ category?: string;
35
+ product?: string;
36
+ service?: string;
37
+ definition?: string;
38
+ };
39
+ detection: Record<string, unknown>;
40
+ fields?: string[];
41
+ falsepositives?: string | string[];
42
+ level?: 'informational' | 'low' | 'medium' | 'high' | 'critical';
43
+ tags?: string[];
44
+ related?: Array<{
45
+ id: string;
46
+ type: string;
47
+ }>;
48
+ scope?: string[];
49
+ taxonomy?: string;
50
+ }
51
+ export interface SplunkDetection {
52
+ name: string;
53
+ id: string;
54
+ version?: number;
55
+ date?: string;
56
+ author?: string;
57
+ status?: string;
58
+ type?: string;
59
+ description?: string;
60
+ data_source?: string[];
61
+ search: string;
62
+ how_to_implement?: string;
63
+ known_false_positives?: string;
64
+ references?: string[];
65
+ tags?: {
66
+ analytic_story?: string[];
67
+ asset_type?: string;
68
+ mitre_attack_id?: string[];
69
+ product?: string[];
70
+ security_domain?: string;
71
+ [key: string]: unknown;
72
+ };
73
+ }
74
+ export interface IndexStats {
75
+ total: number;
76
+ sigma: number;
77
+ splunk_escu: number;
78
+ by_severity: Record<string, number>;
79
+ by_logsource_product: Record<string, number>;
80
+ mitre_coverage: number;
81
+ }
package/dist/types.js ADDED
@@ -0,0 +1,2 @@
1
+ // Unified detection schema - normalized from both Sigma and Splunk ESCU
2
+ export {};
package/package.json ADDED
@@ -0,0 +1,53 @@
1
+ {
2
+ "name": "security-detections-mcp",
3
+ "version": "1.0.0",
4
+ "description": "MCP server for querying Sigma and Splunk ESCU security detection rules",
5
+ "type": "module",
6
+ "main": "dist/index.js",
7
+ "bin": {
8
+ "security-detections-mcp": "dist/index.js"
9
+ },
10
+ "repository": {
11
+ "type": "git",
12
+ "url": "git+https://github.com/MHaggis/Security-Detections-MCP.git"
13
+ },
14
+ "homepage": "https://github.com/MHaggis/Security-Detections-MCP#readme",
15
+ "bugs": {
16
+ "url": "https://github.com/MHaggis/Security-Detections-MCP/issues"
17
+ },
18
+ "scripts": {
19
+ "build": "tsc",
20
+ "start": "node dist/index.js",
21
+ "dev": "tsc --watch",
22
+ "prepublishOnly": "npm run build"
23
+ },
24
+ "keywords": [
25
+ "mcp",
26
+ "security",
27
+ "detections",
28
+ "sigma",
29
+ "splunk",
30
+ "escu",
31
+ "siem",
32
+ "mitre",
33
+ "attack"
34
+ ],
35
+ "author": "Michael Haag",
36
+ "license": "Apache-2.0",
37
+ "dependencies": {
38
+ "@modelcontextprotocol/sdk": "^1.0.0",
39
+ "better-sqlite3": "^11.0.0",
40
+ "yaml": "^2.4.0"
41
+ },
42
+ "devDependencies": {
43
+ "@types/better-sqlite3": "^7.6.11",
44
+ "@types/node": "^20.0.0",
45
+ "typescript": "^5.4.0"
46
+ },
47
+ "engines": {
48
+ "node": ">=18.0.0"
49
+ },
50
+ "files": [
51
+ "dist"
52
+ ]
53
+ }