iflow-mcp_splunk_splunk-mcp-server 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
splunk_client.py ADDED
@@ -0,0 +1,336 @@
1
+ """Splunk REST API Client for async operations."""
2
+
3
+ import httpx
4
+ import json
5
+ from typing import Optional, Dict, Any, List
6
+ import xml.etree.ElementTree as ET
7
+
8
+
9
+ class SplunkAPIError(Exception):
10
+ """Custom exception for Splunk API errors."""
11
+ def __init__(self, message: str, status_code: Optional[int] = None, details: Optional[dict] = None):
12
+ self.message = message
13
+ self.status_code = status_code
14
+ self.details = details or {}
15
+ super().__init__(self.message)
16
+
17
+
18
+ class SplunkClient:
19
+ """Async client for Splunk REST API operations."""
20
+
21
+ def __init__(self, config: dict):
22
+ """Initialize Splunk client with configuration.
23
+
24
+ Args:
25
+ config: Dictionary containing:
26
+ - splunk_host: Splunk server hostname
27
+ - splunk_port: Splunk management port (default: 8089)
28
+ - splunk_username: Username for basic auth (optional)
29
+ - splunk_password: Password for basic auth (optional)
30
+ - splunk_token: Token for token auth (optional)
31
+ - verify_ssl: Whether to verify SSL certificates
32
+ """
33
+ self.config = config
34
+ self.base_url = f"https://{config['splunk_host']}:{config['splunk_port']}"
35
+ self._client: Optional[httpx.AsyncClient] = None
36
+
37
+ async def __aenter__(self):
38
+ """Async context manager entry."""
39
+ await self.connect()
40
+ return self
41
+
42
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
43
+ """Async context manager exit."""
44
+ await self.disconnect()
45
+
46
+ async def connect(self):
47
+ """Create and configure the HTTP client."""
48
+ # Setup authentication - prefer token over basic auth
49
+ auth = None
50
+ headers = {}
51
+
52
+ if self.config.get("splunk_token"):
53
+ headers["Authorization"] = f"Splunk {self.config['splunk_token']}"
54
+ elif self.config.get("splunk_username") and self.config.get("splunk_password"):
55
+ auth = httpx.BasicAuth(self.config["splunk_username"], self.config["splunk_password"])
56
+ else:
57
+ raise SplunkAPIError("No valid authentication configured. Set either SPLUNK_TOKEN or SPLUNK_USERNAME/SPLUNK_PASSWORD.")
58
+
59
+ self._client = httpx.AsyncClient(
60
+ base_url=self.base_url,
61
+ auth=auth,
62
+ headers=headers,
63
+ verify=self.config.get("verify_ssl", False),
64
+ timeout=30.0
65
+ )
66
+
67
+ async def disconnect(self):
68
+ """Close the HTTP client."""
69
+ if self._client:
70
+ await self._client.aclose()
71
+ self._client = None
72
+
73
+ def _ensure_connected(self):
74
+ """Ensure client is connected."""
75
+ if not self._client:
76
+ raise SplunkAPIError("Client not connected. Call connect() first or use async context manager.")
77
+
78
+ def _parse_response(self, response_text: str, output_mode: str = "json") -> List[Dict[str, Any]]:
79
+ """Parse Splunk response based on output mode."""
80
+ if output_mode == "json":
81
+ try:
82
+ # Try to parse as a single JSON object first (oneshot format)
83
+ data = json.loads(response_text)
84
+ if "results" in data:
85
+ return data["results"]
86
+ elif "result" in data:
87
+ return [data["result"]]
88
+ except json.JSONDecodeError:
89
+ # Fall back to line-by-line parsing (export format)
90
+ events = []
91
+ for line in response_text.strip().split('\n'):
92
+ if line.strip():
93
+ try:
94
+ data = json.loads(line)
95
+ if "result" in data:
96
+ events.append(data["result"])
97
+ elif "results" in data:
98
+ events.extend(data["results"])
99
+ except json.JSONDecodeError:
100
+ continue
101
+ return events
102
+ else:
103
+ # Simple XML parsing for other formats
104
+ events = []
105
+ try:
106
+ root = ET.fromstring(response_text)
107
+ for result in root.findall(".//result"):
108
+ event = {}
109
+ for field in result.findall("field"):
110
+ key = field.get("k")
111
+ value = field.find("value/text").text if field.find("value/text") is not None else ""
112
+ event[key] = value
113
+ events.append(event)
114
+ except ET.ParseError:
115
+ pass
116
+ return events
117
+
118
+ async def search_oneshot(self, query: str, earliest_time: str = "-24h",
119
+ latest_time: str = "now", max_count: int = 100) -> List[Dict[str, Any]]:
120
+ """Execute a oneshot search and return results immediately.
121
+
122
+ Args:
123
+ query: SPL search query
124
+ earliest_time: Start time for search
125
+ latest_time: End time for search
126
+ max_count: Maximum number of results
127
+
128
+ Returns:
129
+ List of event dictionaries
130
+ """
131
+ self._ensure_connected()
132
+
133
+ # Don't prepend "search" if query starts with a pipe (|)
134
+ if query.strip().startswith("|"):
135
+ search_query = query
136
+ else:
137
+ search_query = f"search {query}"
138
+
139
+ params = {
140
+ "search": search_query,
141
+ "earliest_time": earliest_time,
142
+ "latest_time": latest_time,
143
+ "count": max_count,
144
+ "output_mode": "json"
145
+ }
146
+
147
+ try:
148
+ response = await self._client.post("/services/search/jobs/oneshot", data=params)
149
+ response.raise_for_status()
150
+ return self._parse_response(response.text, "json")
151
+ except httpx.HTTPStatusError as e:
152
+ raise SplunkAPIError(f"Search failed", status_code=e.response.status_code,
153
+ details={"error": e.response.text})
154
+ except Exception as e:
155
+ raise SplunkAPIError(f"Search failed: {str(e)}")
156
+
157
+ async def search_export(self, query: str, earliest_time: str = "-24h",
158
+ latest_time: str = "now", max_count: int = 100) -> List[Dict[str, Any]]:
159
+ """Execute an export search that streams results.
160
+
161
+ Args:
162
+ query: SPL search query
163
+ earliest_time: Start time for search
164
+ latest_time: End time for search
165
+ max_count: Maximum number of results
166
+
167
+ Returns:
168
+ List of event dictionaries
169
+ """
170
+ self._ensure_connected()
171
+
172
+ # Don't prepend "search" if query starts with a pipe (|)
173
+ if query.strip().startswith("|"):
174
+ search_query = query
175
+ else:
176
+ search_query = f"search {query}"
177
+
178
+ params = {
179
+ "search": search_query,
180
+ "earliest_time": earliest_time,
181
+ "latest_time": latest_time,
182
+ "count": max_count,
183
+ "output_mode": "json",
184
+ "search_mode": "normal"
185
+ }
186
+
187
+ try:
188
+ response = await self._client.post("/services/search/jobs/export", data=params)
189
+ response.raise_for_status()
190
+ events = self._parse_response(response.text, "json")
191
+
192
+ # Limit results if needed
193
+ if max_count > 0:
194
+ return events[:max_count]
195
+ return events
196
+ except httpx.HTTPStatusError as e:
197
+ raise SplunkAPIError(f"Export search failed", status_code=e.response.status_code,
198
+ details={"error": e.response.text})
199
+ except Exception as e:
200
+ raise SplunkAPIError(f"Export search failed: {str(e)}")
201
+
202
+ async def get_indexes(self) -> List[Dict[str, Any]]:
203
+ """Get list of all indexes with detailed information.
204
+
205
+ Returns:
206
+ List of index dictionaries with properties
207
+ """
208
+ self._ensure_connected()
209
+
210
+ try:
211
+ response = await self._client.get("/services/data/indexes", params={"output_mode": "json"})
212
+ response.raise_for_status()
213
+
214
+ data = response.json()
215
+ indexes = []
216
+
217
+ for entry in data.get("entry", []):
218
+ content = entry.get("content", {})
219
+ indexes.append({
220
+ "name": entry.get("name", ""),
221
+ "datatype": content.get("datatype", "event"),
222
+ "totalEventCount": int(content.get("totalEventCount", "0")),
223
+ "currentDBSizeMB": float(content.get("currentDBSizeMB", "0")),
224
+ "maxDataSize": content.get("maxDataSize", "auto"),
225
+ "maxTotalDataSizeMB": content.get("maxTotalDataSizeMB", "unknown"),
226
+ "minTime": content.get("minTime", ""),
227
+ "maxTime": content.get("maxTime", ""),
228
+ "disabled": content.get("disabled", False),
229
+ "frozenTimePeriodInSecs": content.get("frozenTimePeriodInSecs", "")
230
+ })
231
+
232
+ return indexes
233
+ except httpx.HTTPStatusError as e:
234
+ raise SplunkAPIError(f"Failed to get indexes", status_code=e.response.status_code,
235
+ details={"error": e.response.text})
236
+ except Exception as e:
237
+ raise SplunkAPIError(f"Failed to get indexes: {str(e)}")
238
+
239
+ async def get_saved_searches(self) -> List[Dict[str, Any]]:
240
+ """Get list of all saved searches.
241
+
242
+ Returns:
243
+ List of saved search dictionaries
244
+ """
245
+ self._ensure_connected()
246
+
247
+ try:
248
+ response = await self._client.get("/services/saved/searches", params={"output_mode": "json"})
249
+ response.raise_for_status()
250
+
251
+ data = response.json()
252
+ saved_searches = []
253
+
254
+ for entry in data.get("entry", []):
255
+ content = entry.get("content", {})
256
+ saved_searches.append({
257
+ "name": entry.get("name", ""),
258
+ "search": content.get("search", ""),
259
+ "description": content.get("description", ""),
260
+ "is_scheduled": content.get("is_scheduled", False),
261
+ "cron_schedule": content.get("cron_schedule", ""),
262
+ "next_scheduled_time": content.get("next_scheduled_time", ""),
263
+ "actions": content.get("actions", "")
264
+ })
265
+
266
+ return saved_searches
267
+ except httpx.HTTPStatusError as e:
268
+ raise SplunkAPIError(f"Failed to get saved searches", status_code=e.response.status_code,
269
+ details={"error": e.response.text})
270
+ except Exception as e:
271
+ raise SplunkAPIError(f"Failed to get saved searches: {str(e)}")
272
+
273
+ async def run_saved_search(self, search_name: str, trigger_actions: bool = False) -> Dict[str, Any]:
274
+ """Run a saved search by name and get results.
275
+
276
+ Args:
277
+ search_name: Name of the saved search
278
+ trigger_actions: Whether to trigger configured actions
279
+
280
+ Returns:
281
+ Dictionary with job info and results
282
+ """
283
+ self._ensure_connected()
284
+
285
+ try:
286
+ # Dispatch the saved search
287
+ dispatch_url = f"/services/saved/searches/{search_name}/dispatch"
288
+ params = {
289
+ "trigger_actions": "1" if trigger_actions else "0",
290
+ "output_mode": "json"
291
+ }
292
+
293
+ response = await self._client.post(dispatch_url, data=params)
294
+ response.raise_for_status()
295
+
296
+ # Get job ID
297
+ job_data = response.json()
298
+ job_id = job_data.get("sid")
299
+
300
+ if not job_id:
301
+ raise SplunkAPIError("No job ID returned from saved search dispatch")
302
+
303
+ # Poll for completion
304
+ import asyncio
305
+ job_url = f"/services/search/jobs/{job_id}"
306
+ while True:
307
+ job_response = await self._client.get(job_url, params={"output_mode": "json"})
308
+ job_response.raise_for_status()
309
+
310
+ job_info = job_response.json()
311
+ entry = job_info.get("entry", [{}])[0]
312
+ content = entry.get("content", {})
313
+
314
+ if content.get("dispatchState") == "DONE":
315
+ break
316
+
317
+ await asyncio.sleep(0.5)
318
+
319
+ # Get results
320
+ results_url = f"/services/search/jobs/{job_id}/results"
321
+ results_response = await self._client.get(results_url, params={"output_mode": "json", "count": 100})
322
+ results_response.raise_for_status()
323
+
324
+ events = self._parse_response(results_response.text, "json")
325
+
326
+ return {
327
+ "search_name": search_name,
328
+ "job_id": job_id,
329
+ "event_count": len(events),
330
+ "events": events
331
+ }
332
+ except httpx.HTTPStatusError as e:
333
+ raise SplunkAPIError(f"Failed to run saved search", status_code=e.response.status_code,
334
+ details={"error": e.response.text})
335
+ except Exception as e:
336
+ raise SplunkAPIError(f"Failed to run saved search: {str(e)}")