iflow-mcp_splunk_splunk-mcp-server 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
server.py ADDED
@@ -0,0 +1,637 @@
1
+ from mcp.server.fastmcp import FastMCP, Context
2
+ from contextlib import asynccontextmanager
3
+ from collections.abc import AsyncIterator
4
+ from dataclasses import dataclass
5
+ from dotenv import load_dotenv
6
+ import asyncio
7
+ import os
8
+ import sys
9
+ import signal
10
+ from typing import Optional, List, Dict, Any
11
+ from helpers import format_events_as_markdown, format_events_as_csv, format_events_as_summary
12
+ from splunk_client import SplunkClient, SplunkAPIError
13
+ from guardrails import validate_spl_query, sanitize_output
14
+
15
+ load_dotenv()
16
+
17
+ @dataclass
18
+ class AppContext:
19
+ """Application context for the server."""
20
+ config: dict
21
+ splunk_client: Optional[SplunkClient] = None
22
+ test_mode: bool = False
23
+
24
+ @asynccontextmanager
25
+ async def app_lifespan(server: FastMCP) -> AsyncIterator[AppContext]:
26
+ """Manages the application lifecycle."""
27
+ # Check if running inside Docker and use appropriate Splunk host
28
+ if os.getenv("RUNNING_INSIDE_DOCKER") == "1":
29
+ splunk_host = os.getenv("SPLUNK_HOST_FOR_DOCKER", "host.docker.internal")
30
+ else:
31
+ splunk_host = os.getenv("SPLUNK_HOST", "localhost")
32
+
33
+ config = {
34
+ "name": os.getenv("SERVER_NAME"),
35
+ "description": os.getenv("SERVER_DESCRIPTION"),
36
+ "host": os.getenv("HOST"),
37
+ "port": os.getenv("PORT"),
38
+ "transport": os.getenv("TRANSPORT"),
39
+ "log_level": os.getenv("LOG_LEVEL"),
40
+ "splunk_host": splunk_host,
41
+ "splunk_port": int(os.getenv("SPLUNK_PORT", "8089")),
42
+ "splunk_username": os.getenv("SPLUNK_USERNAME"),
43
+ "splunk_password": os.getenv("SPLUNK_PASSWORD"),
44
+ "splunk_token": os.getenv("SPLUNK_TOKEN"),
45
+ "verify_ssl": os.getenv("VERIFY_SSL", "false").lower() == "true",
46
+ "spl_max_events_count": int(os.getenv("SPL_MAX_EVENTS_COUNT", "100000")),
47
+ "spl_risk_tolerance": int(os.getenv("SPL_RISK_TOLERANCE", "75")),
48
+ "spl_safe_timerange": os.getenv("SPL_SAFE_TIMERANGE", "24h"),
49
+ "spl_sanitize_output": os.getenv("SPL_SANITIZE_OUTPUT", "false").lower() == "true"
50
+ }
51
+
52
+ # Check if running in test mode (no Splunk connection required)
53
+ test_mode = os.getenv("TEST_MODE", "false").lower() == "true"
54
+
55
+ # Create Splunk client
56
+ splunk_client = SplunkClient(config)
57
+ try:
58
+ if test_mode:
59
+ # In test mode, don't connect to Splunk
60
+ yield AppContext(config=config, splunk_client=None, test_mode=True)
61
+ else:
62
+ await splunk_client.connect()
63
+ yield AppContext(config=config, splunk_client=splunk_client, test_mode=False)
64
+ except Exception as e:
65
+ # If connection fails and we're not in test mode, raise the error
66
+ if not test_mode:
67
+ raise
68
+ # In test mode, continue without connection
69
+ yield AppContext(config=config, splunk_client=None, test_mode=True)
70
+ finally:
71
+ if not test_mode and splunk_client:
72
+ try:
73
+ await splunk_client.disconnect()
74
+ except:
75
+ pass
76
+
77
+ # Initialize FastMCP server
78
+ transport = os.getenv("TRANSPORT", "sse")
79
+ if transport == "stdio":
80
+ # For stdio mode, don't set host and port
81
+ mcp = FastMCP(
82
+ os.getenv("SERVER_NAME", "Splunk MCP"),
83
+ instructions=os.getenv("SERVER_DESCRIPTION", "MCP server for retrieving data from Splunk"),
84
+ lifespan=app_lifespan
85
+ )
86
+ else:
87
+ # For SSE mode, set host and port
88
+ mcp = FastMCP(
89
+ os.getenv("SERVER_NAME", "Splunk MCP"),
90
+ instructions=os.getenv("SERVER_DESCRIPTION", "MCP server for retrieving data from Splunk"),
91
+ lifespan=app_lifespan,
92
+ host=os.getenv("HOST", "0.0.0.0"),
93
+ port=int(os.getenv("PORT", "18050"))
94
+ )
95
+
96
+
97
+ @mcp.tool()
98
+ async def validate_spl(ctx: Context, query: str) -> Dict[str, Any]:
99
+ """
100
+ Validate an SPL query for potential risks and inefficiencies.
101
+
102
+ Args:
103
+ query: The SPL query to validate
104
+
105
+ Returns:
106
+ Dictionary containing:
107
+ - risk_score: Risk score from 0-100
108
+ - risk_message: Explanation of risks found with suggestions
109
+ - risk_tolerance: Current risk tolerance setting
110
+ - would_execute: Whether this query would execute or be blocked
111
+ """
112
+ config = ctx.request_context.lifespan_context.config
113
+ safe_timerange = config.get("spl_safe_timerange", "24h")
114
+ risk_tolerance = config.get("spl_risk_tolerance", 75)
115
+
116
+ risk_score, risk_message = validate_spl_query(query, safe_timerange)
117
+
118
+ return {
119
+ "risk_score": risk_score,
120
+ "risk_message": risk_message,
121
+ "risk_tolerance": risk_tolerance,
122
+ "would_execute": risk_score <= risk_tolerance,
123
+ "execution_note": f"Query would be {'executed' if risk_score <= risk_tolerance else 'BLOCKED - no search would be executed and no data would be returned'}"
124
+ }
125
+
126
+
127
+ @mcp.tool()
128
+ async def search_oneshot(ctx: Context, query: str, earliest_time: str = "-24h", latest_time: str = "now", max_count: int = 100, output_format: str = "json", risk_tolerance: Optional[int] = None, sanitize_output: Optional[bool] = None) -> Dict[str, Any]:
129
+ """
130
+ Run a oneshot search query in Splunk and return results.
131
+
132
+ Args:
133
+ query: The Splunk search query (e.g., "index=main | head 10")
134
+ earliest_time: Start time for search (default: -24h)
135
+ latest_time: End time for search (default: now)
136
+ max_count: Maximum number of results to return (default: 100, or SPL_MAX_EVENTS_COUNT from .env, 0 = unlimited)
137
+ output_format: Format for results - json, markdown/md, csv, or summary (default: json)
138
+ risk_tolerance: Override risk tolerance level (default: SPL_RISK_TOLERANCE from .env)
139
+ sanitize_output: Override output sanitization (default: SPL_SANITIZE_OUTPUT from .env)
140
+
141
+ Returns:
142
+ Dictionary containing search results in the specified format
143
+ """
144
+ test_mode = ctx.request_context.lifespan_context.test_mode
145
+
146
+ if test_mode:
147
+ # Return mock data in test mode
148
+ events = [{"_time": "2024-01-01T12:00:00.000", "_raw": "test log data", "host": "test-host", "index": "main"}]
149
+ return {
150
+ "query": query,
151
+ "event_count": len(events),
152
+ "events": events,
153
+ "search_params": {
154
+ "earliest_time": earliest_time,
155
+ "latest_time": latest_time,
156
+ "max_count": max_count
157
+ },
158
+ "_test_mode": True
159
+ }
160
+
161
+ if not ctx.request_context.lifespan_context.splunk_client:
162
+ return {"error": "Splunk client not initialized"}
163
+
164
+ try:
165
+ client = ctx.request_context.lifespan_context.splunk_client
166
+ config = ctx.request_context.lifespan_context.config
167
+
168
+ # Get risk tolerance and sanitization settings
169
+ if risk_tolerance is None:
170
+ risk_tolerance = config.get("spl_risk_tolerance", 75)
171
+ if sanitize_output is None:
172
+ sanitize_output = config.get("spl_sanitize_output", False)
173
+
174
+ # Validate query if risk_tolerance < 100
175
+ if risk_tolerance < 100:
176
+ safe_timerange = config.get("spl_safe_timerange", "24h")
177
+ risk_score, risk_message = validate_spl_query(query, safe_timerange)
178
+
179
+ if risk_score > risk_tolerance:
180
+ return {
181
+ "error": f"Query exceeds risk tolerance ({risk_score} > {risk_tolerance}). No search was executed and no data was returned.",
182
+ "risk_score": risk_score,
183
+ "risk_tolerance": risk_tolerance,
184
+ "risk_message": risk_message,
185
+ "search_executed": False,
186
+ "data_returned": None
187
+ }
188
+
189
+ # Use configured spl_max_events_count if max_count is default (100)
190
+ if max_count == 100:
191
+ max_count = config.get("spl_max_events_count", 100000)
192
+
193
+ # Execute search using client
194
+ events = await client.search_oneshot(query, earliest_time, latest_time, max_count)
195
+
196
+ # Sanitize output if requested
197
+ if sanitize_output:
198
+ from guardrails import sanitize_output as sanitize_fn
199
+ events = sanitize_fn(events)
200
+
201
+ # Format results based on output_format
202
+ # Handle synonyms
203
+ if output_format == "md":
204
+ output_format = "markdown"
205
+
206
+ if output_format == "json":
207
+ return {
208
+ "query": query,
209
+ "event_count": len(events),
210
+ "events": events,
211
+ "search_params": {
212
+ "earliest_time": earliest_time,
213
+ "latest_time": latest_time,
214
+ "max_count": max_count
215
+ }
216
+ }
217
+ elif output_format == "markdown":
218
+ return {
219
+ "query": query,
220
+ "event_count": len(events),
221
+ "format": "markdown",
222
+ "content": format_events_as_markdown(events, query),
223
+ "search_params": {
224
+ "earliest_time": earliest_time,
225
+ "latest_time": latest_time,
226
+ "max_count": max_count
227
+ }
228
+ }
229
+ elif output_format == "csv":
230
+ return {
231
+ "query": query,
232
+ "event_count": len(events),
233
+ "format": "csv",
234
+ "content": format_events_as_csv(events, query),
235
+ "search_params": {
236
+ "earliest_time": earliest_time,
237
+ "latest_time": latest_time,
238
+ "max_count": max_count
239
+ }
240
+ }
241
+ elif output_format == "summary":
242
+ return {
243
+ "query": query,
244
+ "event_count": len(events),
245
+ "format": "summary",
246
+ "content": format_events_as_summary(events, query, len(events)),
247
+ "search_params": {
248
+ "earliest_time": earliest_time,
249
+ "latest_time": latest_time,
250
+ "max_count": max_count
251
+ }
252
+ }
253
+ else:
254
+ return {"error": f"Invalid output_format: {output_format}. Must be one of: json, markdown (or md), csv, summary"}
255
+
256
+ except SplunkAPIError as e:
257
+ return {"error": str(e), "details": e.details}
258
+ except Exception as e:
259
+ return {"error": f"Search failed: {str(e)}"}
260
+
261
+ @mcp.tool()
262
+ async def search_export(ctx: Context, query: str, earliest_time: str = "-24h", latest_time: str = "now", max_count: int = 100, output_format: str = "json", risk_tolerance: Optional[int] = None, sanitize_output: Optional[bool] = None) -> Dict[str, Any]:
263
+ """
264
+ Run an export search query in Splunk that streams results immediately.
265
+
266
+ Args:
267
+ query: The Splunk search query
268
+ earliest_time: Start time for search (default: -24h)
269
+ latest_time: End time for search (default: now)
270
+ max_count: Maximum number of results to return (default: 100, or SPL_MAX_EVENTS_COUNT from .env, 0 = unlimited)
271
+ output_format: Format for results - json, markdown/md, csv, or summary (default: json)
272
+ risk_tolerance: Override risk tolerance level (default: SPL_RISK_TOLERANCE from .env)
273
+ sanitize_output: Override output sanitization (default: SPL_SANITIZE_OUTPUT from .env)
274
+
275
+ Returns:
276
+ Dictionary containing search results in the specified format
277
+ """
278
+ test_mode = ctx.request_context.lifespan_context.test_mode
279
+
280
+ if test_mode:
281
+ # Return mock data in test mode
282
+ events = [{"_time": "2024-01-01T12:00:00.000", "_raw": "test log data", "host": "test-host", "index": "main"}]
283
+ return {
284
+ "query": query,
285
+ "event_count": len(events),
286
+ "events": events,
287
+ "is_preview": False,
288
+ "_test_mode": True
289
+ }
290
+
291
+ if not ctx.request_context.lifespan_context.splunk_client:
292
+ return {"error": "Splunk client not initialized"}
293
+
294
+ try:
295
+ client = ctx.request_context.lifespan_context.splunk_client
296
+ config = ctx.request_context.lifespan_context.config
297
+
298
+ # Get risk tolerance and sanitization settings
299
+ if risk_tolerance is None:
300
+ risk_tolerance = config.get("spl_risk_tolerance", 75)
301
+ if sanitize_output is None:
302
+ sanitize_output = config.get("spl_sanitize_output", False)
303
+
304
+ # Validate query if risk_tolerance < 100
305
+ if risk_tolerance < 100:
306
+ safe_timerange = config.get("spl_safe_timerange", "24h")
307
+ risk_score, risk_message = validate_spl_query(query, safe_timerange)
308
+
309
+ if risk_score > risk_tolerance:
310
+ return {
311
+ "error": f"Query exceeds risk tolerance ({risk_score} > {risk_tolerance}). No search was executed and no data was returned.",
312
+ "risk_score": risk_score,
313
+ "risk_tolerance": risk_tolerance,
314
+ "risk_message": risk_message,
315
+ "search_executed": False,
316
+ "data_returned": None
317
+ }
318
+
319
+ # Use configured spl_max_events_count if max_count is default (100)
320
+ if max_count == 100:
321
+ max_count = config.get("spl_max_events_count", 100000)
322
+
323
+ # Execute export search using client
324
+ events = await client.search_export(query, earliest_time, latest_time, max_count)
325
+
326
+ # Sanitize output if requested
327
+ if sanitize_output:
328
+ from guardrails import sanitize_output as sanitize_fn
329
+ events = sanitize_fn(events)
330
+
331
+ # Format results based on output_format
332
+ # Handle synonyms
333
+ if output_format == "md":
334
+ output_format = "markdown"
335
+
336
+ if output_format == "json":
337
+ return {
338
+ "query": query,
339
+ "event_count": len(events),
340
+ "events": events,
341
+ "is_preview": False
342
+ }
343
+ elif output_format == "markdown":
344
+ return {
345
+ "query": query,
346
+ "event_count": len(events),
347
+ "format": "markdown",
348
+ "content": format_events_as_markdown(events, query),
349
+ "is_preview": False
350
+ }
351
+ elif output_format == "csv":
352
+ return {
353
+ "query": query,
354
+ "event_count": len(events),
355
+ "format": "csv",
356
+ "content": format_events_as_csv(events, query),
357
+ "is_preview": False
358
+ }
359
+ elif output_format == "summary":
360
+ return {
361
+ "query": query,
362
+ "event_count": len(events),
363
+ "format": "summary",
364
+ "content": format_events_as_summary(events, query, len(events)),
365
+ "is_preview": False
366
+ }
367
+ else:
368
+ return {"error": f"Invalid output_format: {output_format}. Must be one of: json, markdown (or md), csv, summary"}
369
+
370
+ except SplunkAPIError as e:
371
+ return {"error": str(e), "details": e.details}
372
+ except Exception as e:
373
+ return {"error": f"Export search failed: {str(e)}"}
374
+
375
+ @mcp.tool()
376
+ async def get_indexes(ctx: Context) -> Dict[str, Any]:
377
+ """
378
+ Get list of available Splunk indexes with detailed information.
379
+
380
+ Returns:
381
+ Dictionary containing list of indexes with their properties including:
382
+ - name, datatype, event count, size, time range, and more
383
+ """
384
+ test_mode = ctx.request_context.lifespan_context.test_mode
385
+
386
+ if test_mode:
387
+ # Return mock data in test mode
388
+ indexes = [
389
+ {"name": "main", "datatype": "event", "totalEventCount": 1000, "currentDBSizeMB": 100.5, "maxDataSize": "auto", "disabled": False},
390
+ {"name": "summary", "datatype": "event", "totalEventCount": 500, "currentDBSizeMB": 50.2, "maxDataSize": "auto", "disabled": False}
391
+ ]
392
+ return {"indexes": indexes, "count": len(indexes), "_test_mode": True}
393
+
394
+ if not ctx.request_context.lifespan_context.splunk_client:
395
+ return {"error": "Splunk client not initialized"}
396
+
397
+ try:
398
+ client = ctx.request_context.lifespan_context.splunk_client
399
+ indexes = await client.get_indexes()
400
+
401
+ return {"indexes": indexes, "count": len(indexes)}
402
+
403
+ except SplunkAPIError as e:
404
+ return {"error": str(e), "details": e.details}
405
+ except Exception as e:
406
+ return {"error": f"Failed to get indexes: {str(e)}"}
407
+
408
+ @mcp.tool()
409
+ async def get_saved_searches(ctx: Context) -> Dict[str, Any]:
410
+ """
411
+ Get list of saved searches available in Splunk.
412
+
413
+ Returns:
414
+ Dictionary containing list of saved searches with their names, queries,
415
+ descriptions, schedules, and other metadata
416
+ """
417
+ test_mode = ctx.request_context.lifespan_context.test_mode
418
+
419
+ if test_mode:
420
+ # Return mock data in test mode
421
+ saved_searches = [
422
+ {"name": "Error Search", "search": "index=main error", "description": "Search for errors", "is_scheduled": False},
423
+ {"name": "Daily Summary", "search": "index=summary | stats count", "description": "Daily summary", "is_scheduled": True}
424
+ ]
425
+ return {"saved_searches": saved_searches, "count": len(saved_searches), "_test_mode": True}
426
+
427
+ if not ctx.request_context.lifespan_context.splunk_client:
428
+ return {"error": "Splunk client not initialized"}
429
+
430
+ try:
431
+ client = ctx.request_context.lifespan_context.splunk_client
432
+ saved_searches = await client.get_saved_searches()
433
+
434
+ return {"saved_searches": saved_searches, "count": len(saved_searches)}
435
+
436
+ except SplunkAPIError as e:
437
+ return {"error": str(e), "details": e.details}
438
+ except Exception as e:
439
+ return {"error": f"Failed to get saved searches: {str(e)}"}
440
+
441
+ @mcp.tool()
442
+ async def run_saved_search(ctx: Context, search_name: str, trigger_actions: bool = False) -> Dict[str, Any]:
443
+ """
444
+ Run a saved search by name.
445
+
446
+ Args:
447
+ search_name: Name of the saved search to run
448
+ trigger_actions: Whether to trigger the search's actions (default: False)
449
+
450
+ Returns:
451
+ Dictionary containing search job information and results
452
+ """
453
+ test_mode = ctx.request_context.lifespan_context.test_mode
454
+
455
+ if test_mode:
456
+ # Return mock data in test mode
457
+ return {
458
+ "search_name": search_name,
459
+ "job_id": "test-job-id",
460
+ "event_count": 1,
461
+ "events": [{"_time": "2024-01-01T12:00:00.000", "_raw": "test result"}],
462
+ "_test_mode": True
463
+ }
464
+
465
+ if not ctx.request_context.lifespan_context.splunk_client:
466
+ return {"error": "Splunk client not initialized"}
467
+
468
+ try:
469
+ client = ctx.request_context.lifespan_context.splunk_client
470
+ result = await client.run_saved_search(search_name, trigger_actions)
471
+
472
+ return result
473
+
474
+ except SplunkAPIError as e:
475
+ return {"error": str(e), "details": e.details}
476
+ except Exception as e:
477
+ return {"error": f"Failed to run saved search: {str(e)}"}
478
+
479
+ @mcp.tool()
480
+ async def get_config(ctx: Context) -> dict:
481
+ """Get current server configuration."""
482
+ config = ctx.request_context.lifespan_context.config.copy()
483
+ # Remove sensitive information
484
+ config.pop("splunk_password", None)
485
+ config.pop("splunk_token", None)
486
+ config["splunk_connected"] = ctx.request_context.lifespan_context.splunk_client is not None
487
+ config["test_mode"] = ctx.request_context.lifespan_context.test_mode
488
+ return config
489
+
490
+ @mcp.resource("splunk://saved-searches")
491
+ async def get_saved_searches_resource() -> str:
492
+ """Provide saved searches information as a resource."""
493
+ # Check if in test mode
494
+ test_mode = os.getenv("TEST_MODE", "false").lower() == "true"
495
+
496
+ if test_mode:
497
+ # Return mock data in test mode
498
+ content = "# Splunk Saved Searches\n\n"
499
+ content += "## Error Search\n\n"
500
+ content += "**Description:** Search for errors\n"
501
+ content += "**Query:** `index=main error`\n\n"
502
+ content += "## Daily Summary\n\n"
503
+ content += "**Description:** Daily summary\n"
504
+ content += "**Query:** `index=summary | stats count`\n"
505
+ content += "**Schedule:** 0 0 * * *\n"
506
+ return content
507
+
508
+ # Create a temporary client for resource access
509
+ config = {
510
+ "splunk_host": os.getenv("SPLUNK_HOST"),
511
+ "splunk_port": int(os.getenv("SPLUNK_PORT", "8089")),
512
+ "splunk_username": os.getenv("SPLUNK_USERNAME"),
513
+ "splunk_password": os.getenv("SPLUNK_PASSWORD"),
514
+ "splunk_token": os.getenv("SPLUNK_TOKEN"),
515
+ "verify_ssl": os.getenv("VERIFY_SSL", "false").lower() == "true"
516
+ }
517
+
518
+ try:
519
+ async with SplunkClient(config) as client:
520
+ saved_searches = await client.get_saved_searches()
521
+
522
+ content = "# Splunk Saved Searches\n\n"
523
+
524
+ for search in saved_searches:
525
+ content += f"## {search['name']}\n\n"
526
+ if search.get('description'):
527
+ content += f"**Description:** {search['description']}\n"
528
+ content += f"**Query:** `{search['search']}`\n"
529
+ if search.get('is_scheduled'):
530
+ content += f"**Schedule:** {search.get('cron_schedule', 'N/A')}\n"
531
+ if search.get('next_scheduled_time'):
532
+ content += f"**Next Run:** {search['next_scheduled_time']}\n"
533
+ if search.get('actions'):
534
+ content += f"**Actions:** {search['actions']}\n"
535
+ content += "\n"
536
+
537
+ return content
538
+
539
+ except Exception as e:
540
+ return f"Error retrieving saved searches: {str(e)}"
541
+
542
+ @mcp.resource("splunk://indexes")
543
+ async def get_indexes_resource() -> str:
544
+ """Provide index information as a resource with detailed metadata."""
545
+ # Check if in test mode
546
+ test_mode = os.getenv("TEST_MODE", "false").lower() == "true"
547
+
548
+ if test_mode:
549
+ # Return mock data in test mode
550
+ content = "# Splunk Indexes\n\n"
551
+ content += "| Index | Type | Events | Size (MB) | Max Size | Time Range | Status |\n"
552
+ content += "|-------|------|--------|-----------|----------|------------|--------|\n"
553
+ content += "| main | event | 1,000 | 100.50 | auto | N/A | ✓ Enabled |\n"
554
+ content += "| summary | event | 500 | 50.20 | auto | N/A | ✓ Enabled |\n"
555
+ content += "\n## Index Details\n\n"
556
+ content += "### main\n"
557
+ content += "- **Total Events:** 1,000\n"
558
+ content += "- **Current Size:** 100.50 MB\n"
559
+ content += "- **Max Size:** auto\n\n"
560
+ content += "### summary\n"
561
+ content += "- **Total Events:** 500\n"
562
+ content += "- **Current Size:** 50.20 MB\n"
563
+ content += "- **Max Size:** auto\n"
564
+ return content
565
+
566
+ # Create a temporary client for resource access
567
+ config = {
568
+ "splunk_host": os.getenv("SPLUNK_HOST"),
569
+ "splunk_port": int(os.getenv("SPLUNK_PORT", "8089")),
570
+ "splunk_username": os.getenv("SPLUNK_USERNAME"),
571
+ "splunk_password": os.getenv("SPLUNK_PASSWORD"),
572
+ "splunk_token": os.getenv("SPLUNK_TOKEN"),
573
+ "verify_ssl": os.getenv("VERIFY_SSL", "false").lower() == "true"
574
+ }
575
+
576
+ try:
577
+ async with SplunkClient(config) as client:
578
+ indexes = await client.get_indexes()
579
+
580
+ content = "# Splunk Indexes\n\n"
581
+ content += "| Index | Type | Events | Size (MB) | Max Size | Time Range | Status |\n"
582
+ content += "|-------|------|--------|-----------|----------|------------|--------|\n"
583
+
584
+ for idx in indexes:
585
+ time_range = "N/A"
586
+ if idx.get('minTime') and idx.get('maxTime'):
587
+ time_range = f"{idx['minTime']} to {idx['maxTime']}"
588
+
589
+ status = "✓ Enabled" if not idx.get('disabled', False) else "✗ Disabled"
590
+ max_size = idx.get('maxDataSize', 'auto')
591
+
592
+ content += f"| {idx['name']} | {idx.get('datatype', 'event')} | "
593
+ content += f"{idx.get('totalEventCount', 0):,} | "
594
+ content += f"{idx.get('currentDBSizeMB', 0):,.2f} | "
595
+ content += f"{max_size} | {time_range} | {status} |\n"
596
+
597
+ content += "\n## Index Details\n\n"
598
+
599
+ for idx in indexes:
600
+ if idx.get('totalEventCount', 0) > 0: # Only show non-empty indexes
601
+ content += f"### {idx['name']}\n"
602
+ content += f"- **Total Events:** {idx.get('totalEventCount', 0):,}\n"
603
+ content += f"- **Current Size:** {idx.get('currentDBSizeMB', 0):,.2f} MB\n"
604
+ content += f"- **Max Size:** {idx.get('maxDataSize', 'auto')}\n"
605
+ if idx.get('frozenTimePeriodInSecs'):
606
+ frozen_days = int(idx['frozenTimePeriodInSecs']) / 86400
607
+ content += f"- **Retention:** {frozen_days:.0f} days\n"
608
+ content += "\n"
609
+
610
+ return content
611
+
612
+ except Exception as e:
613
+ return f"Error retrieving indexes: {str(e)}"
614
+
615
+ def signal_handler(signum, frame):
616
+ """Handle shutdown signals gracefully."""
617
+ print("\n\n✨ Server shutdown ...")
618
+ sys.exit(0)
619
+
620
+ async def main():
621
+ # Set up signal handlers
622
+ signal.signal(signal.SIGINT, signal_handler)
623
+ signal.signal(signal.SIGTERM, signal_handler)
624
+
625
+ transport = os.getenv("TRANSPORT", "sse")
626
+ if transport == "sse":
627
+ await mcp.run_sse_async()
628
+ else:
629
+ await mcp.run_stdio_async()
630
+
631
+ if __name__ == "__main__":
632
+ try:
633
+ asyncio.run(main())
634
+ except KeyboardInterrupt:
635
+ # Handle Ctrl+C gracefully without printing stack trace
636
+ print("\n\n✨ Server shutdown ...")
637
+ sys.exit(0)