holmesgpt 0.16.2a0__py3-none-any.whl → 0.18.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- holmes/__init__.py +3 -5
- holmes/clients/robusta_client.py +4 -3
- holmes/common/env_vars.py +18 -2
- holmes/common/openshift.py +1 -1
- holmes/config.py +11 -6
- holmes/core/conversations.py +30 -13
- holmes/core/investigation.py +21 -25
- holmes/core/investigation_structured_output.py +3 -3
- holmes/core/issue.py +1 -1
- holmes/core/llm.py +50 -31
- holmes/core/models.py +19 -17
- holmes/core/openai_formatting.py +1 -1
- holmes/core/prompt.py +47 -2
- holmes/core/runbooks.py +1 -0
- holmes/core/safeguards.py +4 -2
- holmes/core/supabase_dal.py +4 -2
- holmes/core/tool_calling_llm.py +102 -141
- holmes/core/tools.py +19 -28
- holmes/core/tools_utils/token_counting.py +9 -2
- holmes/core/tools_utils/tool_context_window_limiter.py +13 -30
- holmes/core/tools_utils/tool_executor.py +0 -18
- holmes/core/tools_utils/toolset_utils.py +1 -0
- holmes/core/toolset_manager.py +37 -2
- holmes/core/tracing.py +13 -2
- holmes/core/transformers/__init__.py +1 -1
- holmes/core/transformers/base.py +1 -0
- holmes/core/transformers/llm_summarize.py +3 -2
- holmes/core/transformers/registry.py +2 -1
- holmes/core/transformers/transformer.py +1 -0
- holmes/core/truncation/compaction.py +37 -2
- holmes/core/truncation/input_context_window_limiter.py +3 -2
- holmes/interactive.py +52 -8
- holmes/main.py +17 -37
- holmes/plugins/interfaces.py +2 -1
- holmes/plugins/prompts/__init__.py +2 -1
- holmes/plugins/prompts/_fetch_logs.jinja2 +5 -5
- holmes/plugins/prompts/_runbook_instructions.jinja2 +2 -1
- holmes/plugins/prompts/base_user_prompt.jinja2 +7 -0
- holmes/plugins/prompts/conversation_history_compaction.jinja2 +2 -1
- holmes/plugins/prompts/generic_ask.jinja2 +0 -2
- holmes/plugins/prompts/generic_ask_conversation.jinja2 +0 -2
- holmes/plugins/prompts/generic_ask_for_issue_conversation.jinja2 +0 -2
- holmes/plugins/prompts/generic_investigation.jinja2 +0 -2
- holmes/plugins/prompts/investigation_procedure.jinja2 +2 -1
- holmes/plugins/prompts/kubernetes_workload_ask.jinja2 +0 -2
- holmes/plugins/prompts/kubernetes_workload_chat.jinja2 +0 -2
- holmes/plugins/runbooks/__init__.py +32 -3
- holmes/plugins/sources/github/__init__.py +4 -2
- holmes/plugins/sources/prometheus/models.py +1 -0
- holmes/plugins/toolsets/__init__.py +30 -26
- holmes/plugins/toolsets/atlas_mongodb/mongodb_atlas.py +13 -12
- holmes/plugins/toolsets/azure_sql/apis/alert_monitoring_api.py +3 -2
- holmes/plugins/toolsets/azure_sql/apis/azure_sql_api.py +2 -1
- holmes/plugins/toolsets/azure_sql/apis/connection_failure_api.py +3 -2
- holmes/plugins/toolsets/azure_sql/apis/connection_monitoring_api.py +3 -1
- holmes/plugins/toolsets/azure_sql/apis/storage_analysis_api.py +3 -1
- holmes/plugins/toolsets/azure_sql/azure_sql_toolset.py +12 -12
- holmes/plugins/toolsets/azure_sql/tools/analyze_connection_failures.py +7 -7
- holmes/plugins/toolsets/azure_sql/tools/analyze_database_connections.py +7 -7
- holmes/plugins/toolsets/azure_sql/tools/analyze_database_health_status.py +3 -5
- holmes/plugins/toolsets/azure_sql/tools/analyze_database_performance.py +3 -3
- holmes/plugins/toolsets/azure_sql/tools/analyze_database_storage.py +7 -7
- holmes/plugins/toolsets/azure_sql/tools/get_active_alerts.py +6 -8
- holmes/plugins/toolsets/azure_sql/tools/get_slow_queries.py +3 -3
- holmes/plugins/toolsets/azure_sql/tools/get_top_cpu_queries.py +3 -3
- holmes/plugins/toolsets/azure_sql/tools/get_top_data_io_queries.py +3 -3
- holmes/plugins/toolsets/azure_sql/tools/get_top_log_io_queries.py +3 -3
- holmes/plugins/toolsets/azure_sql/utils.py +0 -32
- holmes/plugins/toolsets/bash/argocd/__init__.py +3 -3
- holmes/plugins/toolsets/bash/aws/__init__.py +4 -4
- holmes/plugins/toolsets/bash/azure/__init__.py +4 -4
- holmes/plugins/toolsets/bash/bash_toolset.py +2 -3
- holmes/plugins/toolsets/bash/common/bash.py +19 -9
- holmes/plugins/toolsets/bash/common/bash_command.py +1 -1
- holmes/plugins/toolsets/bash/common/stringify.py +1 -1
- holmes/plugins/toolsets/bash/kubectl/__init__.py +2 -1
- holmes/plugins/toolsets/bash/kubectl/constants.py +0 -1
- holmes/plugins/toolsets/bash/kubectl/kubectl_get.py +3 -4
- holmes/plugins/toolsets/bash/parse_command.py +12 -13
- holmes/plugins/toolsets/connectivity_check.py +124 -0
- holmes/plugins/toolsets/coralogix/api.py +132 -119
- holmes/plugins/toolsets/coralogix/coralogix.jinja2 +14 -0
- holmes/plugins/toolsets/coralogix/toolset_coralogix.py +219 -0
- holmes/plugins/toolsets/coralogix/utils.py +15 -79
- holmes/plugins/toolsets/datadog/datadog_api.py +36 -3
- holmes/plugins/toolsets/datadog/datadog_logs_instructions.jinja2 +34 -1
- holmes/plugins/toolsets/datadog/datadog_metrics_instructions.jinja2 +3 -3
- holmes/plugins/toolsets/datadog/datadog_models.py +59 -0
- holmes/plugins/toolsets/datadog/datadog_url_utils.py +213 -0
- holmes/plugins/toolsets/datadog/instructions_datadog_traces.jinja2 +165 -28
- holmes/plugins/toolsets/datadog/toolset_datadog_general.py +71 -28
- holmes/plugins/toolsets/datadog/toolset_datadog_logs.py +224 -375
- holmes/plugins/toolsets/datadog/toolset_datadog_metrics.py +67 -36
- holmes/plugins/toolsets/datadog/toolset_datadog_traces.py +360 -343
- holmes/plugins/toolsets/elasticsearch/__init__.py +6 -0
- holmes/plugins/toolsets/elasticsearch/elasticsearch.py +834 -0
- holmes/plugins/toolsets/git.py +7 -8
- holmes/plugins/toolsets/grafana/base_grafana_toolset.py +16 -4
- holmes/plugins/toolsets/grafana/common.py +2 -30
- holmes/plugins/toolsets/grafana/grafana_tempo_api.py +2 -1
- holmes/plugins/toolsets/grafana/loki/instructions.jinja2 +18 -2
- holmes/plugins/toolsets/grafana/loki/toolset_grafana_loki.py +92 -18
- holmes/plugins/toolsets/grafana/loki_api.py +4 -0
- holmes/plugins/toolsets/grafana/toolset_grafana.py +109 -25
- holmes/plugins/toolsets/grafana/toolset_grafana_dashboard.jinja2 +22 -0
- holmes/plugins/toolsets/grafana/toolset_grafana_tempo.py +201 -33
- holmes/plugins/toolsets/grafana/trace_parser.py +3 -2
- holmes/plugins/toolsets/internet/internet.py +10 -10
- holmes/plugins/toolsets/internet/notion.py +5 -6
- holmes/plugins/toolsets/investigator/core_investigation.py +3 -3
- holmes/plugins/toolsets/investigator/model.py +3 -1
- holmes/plugins/toolsets/json_filter_mixin.py +134 -0
- holmes/plugins/toolsets/kafka.py +12 -7
- holmes/plugins/toolsets/kubernetes.yaml +260 -30
- holmes/plugins/toolsets/kubernetes_logs.py +3 -3
- holmes/plugins/toolsets/logging_utils/logging_api.py +16 -6
- holmes/plugins/toolsets/mcp/toolset_mcp.py +88 -60
- holmes/plugins/toolsets/newrelic/new_relic_api.py +41 -1
- holmes/plugins/toolsets/newrelic/newrelic.jinja2 +24 -0
- holmes/plugins/toolsets/newrelic/newrelic.py +212 -55
- holmes/plugins/toolsets/prometheus/prometheus.py +358 -102
- holmes/plugins/toolsets/prometheus/prometheus_instructions.jinja2 +11 -3
- holmes/plugins/toolsets/rabbitmq/api.py +23 -4
- holmes/plugins/toolsets/rabbitmq/toolset_rabbitmq.py +5 -5
- holmes/plugins/toolsets/robusta/robusta.py +5 -5
- holmes/plugins/toolsets/runbook/runbook_fetcher.py +25 -6
- holmes/plugins/toolsets/servicenow_tables/servicenow_tables.py +1 -1
- holmes/plugins/toolsets/utils.py +1 -1
- holmes/utils/config_utils.py +1 -1
- holmes/utils/connection_utils.py +31 -0
- holmes/utils/console/result.py +10 -0
- holmes/utils/file_utils.py +2 -1
- holmes/utils/global_instructions.py +10 -26
- holmes/utils/holmes_status.py +4 -3
- holmes/utils/log.py +15 -0
- holmes/utils/markdown_utils.py +2 -3
- holmes/utils/memory_limit.py +58 -0
- holmes/utils/sentry_helper.py +23 -0
- holmes/utils/stream.py +12 -5
- holmes/utils/tags.py +4 -3
- holmes/version.py +3 -1
- {holmesgpt-0.16.2a0.dist-info → holmesgpt-0.18.4.dist-info}/METADATA +12 -10
- holmesgpt-0.18.4.dist-info/RECORD +258 -0
- holmes/plugins/toolsets/aws.yaml +0 -80
- holmes/plugins/toolsets/coralogix/toolset_coralogix_logs.py +0 -114
- holmes/plugins/toolsets/datadog/datadog_traces_formatter.py +0 -310
- holmes/plugins/toolsets/datadog/toolset_datadog_rds.py +0 -736
- holmes/plugins/toolsets/grafana/grafana_api.py +0 -64
- holmes/plugins/toolsets/opensearch/__init__.py +0 -0
- holmes/plugins/toolsets/opensearch/opensearch.py +0 -250
- holmes/plugins/toolsets/opensearch/opensearch_logs.py +0 -161
- holmes/plugins/toolsets/opensearch/opensearch_traces.py +0 -215
- holmes/plugins/toolsets/opensearch/opensearch_traces_instructions.jinja2 +0 -12
- holmes/plugins/toolsets/opensearch/opensearch_utils.py +0 -166
- holmes/utils/keygen_utils.py +0 -6
- holmesgpt-0.16.2a0.dist-info/RECORD +0 -258
- holmes/plugins/toolsets/{opensearch → elasticsearch}/opensearch_ppl_query_docs.jinja2 +0 -0
- holmes/plugins/toolsets/{opensearch → elasticsearch}/opensearch_query_assist.py +2 -2
- /holmes/plugins/toolsets/{opensearch → elasticsearch}/opensearch_query_assist_instructions.jinja2 +0 -0
- {holmesgpt-0.16.2a0.dist-info → holmesgpt-0.18.4.dist-info}/LICENSE +0 -0
- {holmesgpt-0.16.2a0.dist-info → holmesgpt-0.18.4.dist-info}/WHEEL +0 -0
- {holmesgpt-0.16.2a0.dist-info → holmesgpt-0.18.4.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,834 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from abc import ABC
|
|
3
|
+
from typing import Any, ClassVar, Dict, Optional, Tuple, Type
|
|
4
|
+
|
|
5
|
+
import requests # type: ignore[import-untyped]
|
|
6
|
+
from pydantic import BaseModel, ConfigDict
|
|
7
|
+
|
|
8
|
+
from holmes.core.tools import (
|
|
9
|
+
CallablePrerequisite,
|
|
10
|
+
StructuredToolResult,
|
|
11
|
+
StructuredToolResultStatus,
|
|
12
|
+
Tool,
|
|
13
|
+
ToolInvokeContext,
|
|
14
|
+
ToolParameter,
|
|
15
|
+
Toolset,
|
|
16
|
+
ToolsetTag,
|
|
17
|
+
)
|
|
18
|
+
from holmes.plugins.toolsets.json_filter_mixin import JsonFilterMixin
|
|
19
|
+
from holmes.plugins.toolsets.utils import toolset_name_for_one_liner
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ElasticsearchConfig(BaseModel):
|
|
23
|
+
"""Configuration for Elasticsearch/OpenSearch API access.
|
|
24
|
+
|
|
25
|
+
Example configuration:
|
|
26
|
+
```yaml
|
|
27
|
+
url: "https://your-cluster.es.cloud.io"
|
|
28
|
+
api_key: "base64_encoded_api_key"
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
Or with basic auth:
|
|
32
|
+
```yaml
|
|
33
|
+
url: "https://your-cluster.es.cloud.io"
|
|
34
|
+
username: "elastic"
|
|
35
|
+
password: "your_password"
|
|
36
|
+
```
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
url: str
|
|
40
|
+
api_key: Optional[str] = None
|
|
41
|
+
username: Optional[str] = None
|
|
42
|
+
password: Optional[str] = None
|
|
43
|
+
verify_ssl: bool = True
|
|
44
|
+
timeout: int = 10 # Default timeout in seconds
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class ElasticsearchBaseToolset(Toolset):
|
|
48
|
+
"""Base class for Elasticsearch toolsets with shared configuration and HTTP logic."""
|
|
49
|
+
|
|
50
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
51
|
+
config_class: ClassVar[Type[ElasticsearchConfig]] = ElasticsearchConfig
|
|
52
|
+
|
|
53
|
+
def __init__(self, name: str, description: str, tools: list, **kwargs):
|
|
54
|
+
super().__init__(
|
|
55
|
+
name=name,
|
|
56
|
+
enabled=False,
|
|
57
|
+
description=description,
|
|
58
|
+
docs_url="https://holmesgpt.dev/data-sources/builtin-toolsets/elasticsearch/",
|
|
59
|
+
icon_url="https://www.elastic.co/favicon.ico",
|
|
60
|
+
prerequisites=[CallablePrerequisite(callable=self.prerequisites_callable)],
|
|
61
|
+
tools=tools,
|
|
62
|
+
tags=[ToolsetTag.CORE],
|
|
63
|
+
**kwargs,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
def prerequisites_callable(self, config: Dict[str, Any]) -> Tuple[bool, str]:
|
|
67
|
+
"""Check if the Elasticsearch configuration is valid and the cluster is reachable."""
|
|
68
|
+
try:
|
|
69
|
+
self.config = ElasticsearchConfig(**config)
|
|
70
|
+
return self._perform_health_check()
|
|
71
|
+
except Exception as e:
|
|
72
|
+
return False, f"Failed to validate Elasticsearch configuration: {str(e)}"
|
|
73
|
+
|
|
74
|
+
def _perform_health_check(self) -> Tuple[bool, str]:
|
|
75
|
+
"""Perform a health check by querying cluster health."""
|
|
76
|
+
try:
|
|
77
|
+
response = self._make_request("GET", "_cluster/health", timeout=10)
|
|
78
|
+
cluster_name = response.get("cluster_name", "unknown")
|
|
79
|
+
status = response.get("status", "unknown")
|
|
80
|
+
return (
|
|
81
|
+
True,
|
|
82
|
+
f"Connected to Elasticsearch cluster '{cluster_name}' (status: {status})",
|
|
83
|
+
)
|
|
84
|
+
except requests.exceptions.HTTPError as e:
|
|
85
|
+
if e.response.status_code == 401:
|
|
86
|
+
return (
|
|
87
|
+
False,
|
|
88
|
+
"Elasticsearch authentication failed. Check your API key or credentials.",
|
|
89
|
+
)
|
|
90
|
+
elif e.response.status_code == 403:
|
|
91
|
+
return (
|
|
92
|
+
False,
|
|
93
|
+
"Elasticsearch access denied. Ensure your credentials have cluster access.",
|
|
94
|
+
)
|
|
95
|
+
else:
|
|
96
|
+
return (
|
|
97
|
+
False,
|
|
98
|
+
f"Elasticsearch API error: {e.response.status_code} - {e.response.text}",
|
|
99
|
+
)
|
|
100
|
+
except requests.exceptions.ConnectionError:
|
|
101
|
+
return (
|
|
102
|
+
False,
|
|
103
|
+
f"Failed to connect to Elasticsearch at {self.elasticsearch_config.url}",
|
|
104
|
+
)
|
|
105
|
+
except requests.exceptions.Timeout:
|
|
106
|
+
return False, "Elasticsearch health check timed out"
|
|
107
|
+
except Exception as e:
|
|
108
|
+
return False, f"Elasticsearch health check failed: {str(e)}"
|
|
109
|
+
|
|
110
|
+
@property
|
|
111
|
+
def elasticsearch_config(self) -> ElasticsearchConfig:
|
|
112
|
+
return self.config # type: ignore
|
|
113
|
+
|
|
114
|
+
def get_example_config(self) -> Dict[str, Any]:
|
|
115
|
+
"""Return an example configuration for this toolset."""
|
|
116
|
+
return {
|
|
117
|
+
"url": "https://your-cluster.es.cloud.io",
|
|
118
|
+
"api_key": "{{ env.ELASTICSEARCH_API_KEY }}",
|
|
119
|
+
"verify_ssl": True,
|
|
120
|
+
"timeout": 10,
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
def _get_headers(self) -> Dict[str, str]:
|
|
124
|
+
"""Build request headers with authentication."""
|
|
125
|
+
headers = {
|
|
126
|
+
"Accept": "application/json",
|
|
127
|
+
"Content-Type": "application/json",
|
|
128
|
+
}
|
|
129
|
+
if self.elasticsearch_config.api_key:
|
|
130
|
+
headers["Authorization"] = f"ApiKey {self.elasticsearch_config.api_key}"
|
|
131
|
+
return headers
|
|
132
|
+
|
|
133
|
+
def _get_auth(self) -> Optional[Tuple[str, str]]:
|
|
134
|
+
"""Return basic auth tuple if username/password configured."""
|
|
135
|
+
if self.elasticsearch_config.username and self.elasticsearch_config.password:
|
|
136
|
+
return (
|
|
137
|
+
self.elasticsearch_config.username,
|
|
138
|
+
self.elasticsearch_config.password,
|
|
139
|
+
)
|
|
140
|
+
return None
|
|
141
|
+
|
|
142
|
+
def _make_request(
|
|
143
|
+
self,
|
|
144
|
+
method: str,
|
|
145
|
+
endpoint: str,
|
|
146
|
+
params: Optional[Dict[str, Any]] = None,
|
|
147
|
+
body: Optional[Dict[str, Any]] = None,
|
|
148
|
+
timeout: Optional[int] = None,
|
|
149
|
+
) -> Dict[str, Any]:
|
|
150
|
+
"""Make HTTP request to Elasticsearch.
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
method: HTTP method (GET, POST, etc.)
|
|
154
|
+
endpoint: API endpoint (e.g., "_cluster/health")
|
|
155
|
+
params: Query parameters
|
|
156
|
+
body: Request body (JSON)
|
|
157
|
+
timeout: Request timeout in seconds
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
Parsed JSON response
|
|
161
|
+
|
|
162
|
+
Raises:
|
|
163
|
+
requests.exceptions.HTTPError: For HTTP error responses
|
|
164
|
+
requests.exceptions.ConnectionError: For connection problems
|
|
165
|
+
requests.exceptions.Timeout: For timeout errors
|
|
166
|
+
"""
|
|
167
|
+
url = f"{self.elasticsearch_config.url.rstrip('/')}/{endpoint.lstrip('/')}"
|
|
168
|
+
timeout = timeout or self.elasticsearch_config.timeout
|
|
169
|
+
|
|
170
|
+
response = requests.request(
|
|
171
|
+
method=method,
|
|
172
|
+
url=url,
|
|
173
|
+
headers=self._get_headers(),
|
|
174
|
+
auth=self._get_auth(),
|
|
175
|
+
params=params,
|
|
176
|
+
json=body,
|
|
177
|
+
timeout=timeout,
|
|
178
|
+
verify=self.elasticsearch_config.verify_ssl,
|
|
179
|
+
)
|
|
180
|
+
response.raise_for_status()
|
|
181
|
+
return response.json()
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class BaseElasticsearchTool(Tool, ABC):
|
|
185
|
+
"""Base class for Elasticsearch tools."""
|
|
186
|
+
|
|
187
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
188
|
+
|
|
189
|
+
def __init__(self, toolset: ElasticsearchBaseToolset, *args, **kwargs):
|
|
190
|
+
super().__init__(*args, **kwargs)
|
|
191
|
+
self._toolset = toolset
|
|
192
|
+
|
|
193
|
+
@property
|
|
194
|
+
def toolset(self) -> ElasticsearchBaseToolset:
|
|
195
|
+
return self._toolset
|
|
196
|
+
|
|
197
|
+
def _make_request(
|
|
198
|
+
self,
|
|
199
|
+
method: str,
|
|
200
|
+
endpoint: str,
|
|
201
|
+
params: dict,
|
|
202
|
+
query_params: Optional[Dict[str, Any]] = None,
|
|
203
|
+
body: Optional[Dict[str, Any]] = None,
|
|
204
|
+
timeout: Optional[int] = None,
|
|
205
|
+
) -> StructuredToolResult:
|
|
206
|
+
"""Make a request to Elasticsearch and return structured result."""
|
|
207
|
+
try:
|
|
208
|
+
data = self._toolset._make_request(
|
|
209
|
+
method=method,
|
|
210
|
+
endpoint=endpoint,
|
|
211
|
+
params=query_params,
|
|
212
|
+
body=body,
|
|
213
|
+
timeout=timeout,
|
|
214
|
+
)
|
|
215
|
+
return StructuredToolResult(
|
|
216
|
+
status=StructuredToolResultStatus.SUCCESS,
|
|
217
|
+
data=data,
|
|
218
|
+
params=params,
|
|
219
|
+
)
|
|
220
|
+
except requests.exceptions.HTTPError as e:
|
|
221
|
+
error_detail = f"HTTP {e.response.status_code}"
|
|
222
|
+
try:
|
|
223
|
+
error_body = e.response.json()
|
|
224
|
+
if "error" in error_body:
|
|
225
|
+
error_detail = f"{error_detail}: {json.dumps(error_body['error'])}"
|
|
226
|
+
except Exception:
|
|
227
|
+
error_detail = f"{error_detail}: {e.response.text[:500]}"
|
|
228
|
+
|
|
229
|
+
return StructuredToolResult(
|
|
230
|
+
status=StructuredToolResultStatus.ERROR,
|
|
231
|
+
error=f"Elasticsearch request failed for endpoint '{endpoint}': {error_detail}",
|
|
232
|
+
params=params,
|
|
233
|
+
)
|
|
234
|
+
except requests.exceptions.Timeout:
|
|
235
|
+
return StructuredToolResult(
|
|
236
|
+
status=StructuredToolResultStatus.ERROR,
|
|
237
|
+
error=f"Elasticsearch request timed out for endpoint '{endpoint}'",
|
|
238
|
+
params=params,
|
|
239
|
+
)
|
|
240
|
+
except requests.exceptions.ConnectionError as e:
|
|
241
|
+
return StructuredToolResult(
|
|
242
|
+
status=StructuredToolResultStatus.ERROR,
|
|
243
|
+
error=f"Failed to connect to Elasticsearch: {str(e)}",
|
|
244
|
+
params=params,
|
|
245
|
+
)
|
|
246
|
+
except Exception as e:
|
|
247
|
+
return StructuredToolResult(
|
|
248
|
+
status=StructuredToolResultStatus.ERROR,
|
|
249
|
+
error=f"Unexpected error querying Elasticsearch: {str(e)}",
|
|
250
|
+
params=params,
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
class ElasticsearchCat(BaseElasticsearchTool):
|
|
255
|
+
"""Thin wrapper around Elasticsearch _cat APIs with server-side filtering."""
|
|
256
|
+
|
|
257
|
+
def __init__(self, toolset: ElasticsearchBaseToolset):
|
|
258
|
+
super().__init__(
|
|
259
|
+
toolset=toolset,
|
|
260
|
+
name="elasticsearch_cat",
|
|
261
|
+
description=(
|
|
262
|
+
"Query Elasticsearch _cat APIs for cluster information. "
|
|
263
|
+
"Supports: indices, shards, nodes, health, allocation, recovery, segments, aliases. "
|
|
264
|
+
"IMPORTANT: Always use the 'index' parameter when querying shards to filter by specific index."
|
|
265
|
+
),
|
|
266
|
+
parameters={
|
|
267
|
+
"endpoint": ToolParameter(
|
|
268
|
+
description=(
|
|
269
|
+
"The _cat endpoint to query. Valid values: "
|
|
270
|
+
"indices, shards, nodes, health, allocation, recovery, segments, aliases, "
|
|
271
|
+
"pending_tasks, thread_pool, plugins, nodeattrs, repositories, snapshots, tasks"
|
|
272
|
+
),
|
|
273
|
+
type="string",
|
|
274
|
+
required=True,
|
|
275
|
+
),
|
|
276
|
+
"index": ToolParameter(
|
|
277
|
+
description=(
|
|
278
|
+
"Filter by index name or pattern. Supports wildcards (e.g., 'logs-*'). "
|
|
279
|
+
"REQUIRED for shards, segments, recovery endpoints to avoid returning data for all indices. "
|
|
280
|
+
"Recommended for indices endpoint when looking for specific indices."
|
|
281
|
+
),
|
|
282
|
+
type="string",
|
|
283
|
+
required=False,
|
|
284
|
+
),
|
|
285
|
+
"columns": ToolParameter(
|
|
286
|
+
description=(
|
|
287
|
+
"Comma-separated list of columns to return (e.g., 'index,shard,prirep,state,docs'). "
|
|
288
|
+
"Use this to reduce response size. Run without columns first to see available columns."
|
|
289
|
+
),
|
|
290
|
+
type="string",
|
|
291
|
+
required=False,
|
|
292
|
+
),
|
|
293
|
+
"sort": ToolParameter(
|
|
294
|
+
description="Comma-separated list of columns to sort by (e.g., 'docs:desc,index')",
|
|
295
|
+
type="string",
|
|
296
|
+
required=False,
|
|
297
|
+
),
|
|
298
|
+
"health": ToolParameter(
|
|
299
|
+
description="Filter by index health (green, yellow, red). Only for indices endpoint.",
|
|
300
|
+
type="string",
|
|
301
|
+
required=False,
|
|
302
|
+
),
|
|
303
|
+
},
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
def _invoke(self, params: dict, context: ToolInvokeContext) -> StructuredToolResult:
|
|
307
|
+
endpoint = params["endpoint"]
|
|
308
|
+
index = params.get("index")
|
|
309
|
+
|
|
310
|
+
# Build the endpoint path
|
|
311
|
+
if index and endpoint in (
|
|
312
|
+
"shards",
|
|
313
|
+
"indices",
|
|
314
|
+
"segments",
|
|
315
|
+
"recovery",
|
|
316
|
+
"aliases",
|
|
317
|
+
):
|
|
318
|
+
path = f"_cat/{endpoint}/{index}"
|
|
319
|
+
else:
|
|
320
|
+
path = f"_cat/{endpoint}"
|
|
321
|
+
|
|
322
|
+
# Build query parameters
|
|
323
|
+
query_params: Dict[str, Any] = {"format": "json"}
|
|
324
|
+
|
|
325
|
+
if params.get("columns"):
|
|
326
|
+
query_params["h"] = params["columns"]
|
|
327
|
+
|
|
328
|
+
if params.get("sort"):
|
|
329
|
+
query_params["s"] = params["sort"]
|
|
330
|
+
|
|
331
|
+
if params.get("health") and endpoint == "indices":
|
|
332
|
+
query_params["health"] = params["health"]
|
|
333
|
+
|
|
334
|
+
return self._make_request("GET", path, params, query_params=query_params)
|
|
335
|
+
|
|
336
|
+
def get_parameterized_one_liner(self, params: Dict) -> str:
|
|
337
|
+
endpoint = params.get("endpoint", "")
|
|
338
|
+
index = params.get("index", "")
|
|
339
|
+
suffix = f" ({index})" if index else ""
|
|
340
|
+
return (
|
|
341
|
+
f"{toolset_name_for_one_liner(self._toolset.name)}: Cat {endpoint}{suffix}"
|
|
342
|
+
)
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
class ElasticsearchSearch(BaseElasticsearchTool):
|
|
346
|
+
"""Execute Elasticsearch Query DSL searches."""
|
|
347
|
+
|
|
348
|
+
def __init__(self, toolset: ElasticsearchBaseToolset):
|
|
349
|
+
super().__init__(
|
|
350
|
+
toolset=toolset,
|
|
351
|
+
name="elasticsearch_search",
|
|
352
|
+
description=(
|
|
353
|
+
"Execute an Elasticsearch search query using Query DSL. "
|
|
354
|
+
"Supports full Query DSL including bool queries, aggregations, and filters. "
|
|
355
|
+
"Returns up to 100 documents by default (configurable via size parameter)."
|
|
356
|
+
),
|
|
357
|
+
parameters={
|
|
358
|
+
"index": ToolParameter(
|
|
359
|
+
description=(
|
|
360
|
+
"Index name or pattern to search. Supports wildcards (e.g., 'logs-*'). "
|
|
361
|
+
"Can be comma-separated for multiple indices."
|
|
362
|
+
),
|
|
363
|
+
type="string",
|
|
364
|
+
required=True,
|
|
365
|
+
),
|
|
366
|
+
"query": ToolParameter(
|
|
367
|
+
description=(
|
|
368
|
+
"Elasticsearch Query DSL query object. Example: "
|
|
369
|
+
'{"bool": {"must": [{"match": {"level": "ERROR"}}]}}. '
|
|
370
|
+
"Use match_all for all documents: {}. "
|
|
371
|
+
"For full-text search use 'match', for exact matches use 'term'."
|
|
372
|
+
),
|
|
373
|
+
type="object",
|
|
374
|
+
required=False,
|
|
375
|
+
),
|
|
376
|
+
"size": ToolParameter(
|
|
377
|
+
description="Maximum number of documents to return (default: 100, max recommended: 500)",
|
|
378
|
+
type="integer",
|
|
379
|
+
required=False,
|
|
380
|
+
),
|
|
381
|
+
"from_offset": ToolParameter(
|
|
382
|
+
description="Starting offset for pagination (default: 0)",
|
|
383
|
+
type="integer",
|
|
384
|
+
required=False,
|
|
385
|
+
),
|
|
386
|
+
"sort": ToolParameter(
|
|
387
|
+
description=(
|
|
388
|
+
"Sort specification. Example: "
|
|
389
|
+
'[{"@timestamp": "desc"}, {"_score": "asc"}] or just "timestamp:desc"'
|
|
390
|
+
),
|
|
391
|
+
type="array",
|
|
392
|
+
required=False,
|
|
393
|
+
),
|
|
394
|
+
"source": ToolParameter(
|
|
395
|
+
description=(
|
|
396
|
+
"Fields to include in response. Can be boolean (true/false), "
|
|
397
|
+
"string (single field), or array of field names"
|
|
398
|
+
),
|
|
399
|
+
type="string",
|
|
400
|
+
required=False,
|
|
401
|
+
),
|
|
402
|
+
"aggregations": ToolParameter(
|
|
403
|
+
description=(
|
|
404
|
+
"Aggregations to compute. Example: "
|
|
405
|
+
'{"by_service": {"terms": {"field": "service.keyword", "size": 10}}}. '
|
|
406
|
+
"Common aggregations: terms (group by), date_histogram, avg, sum, min, max, cardinality."
|
|
407
|
+
),
|
|
408
|
+
type="object",
|
|
409
|
+
required=False,
|
|
410
|
+
),
|
|
411
|
+
"profile": ToolParameter(
|
|
412
|
+
description=(
|
|
413
|
+
"Enable query profiling to get detailed performance breakdown. "
|
|
414
|
+
"Shows time spent in each query component. Useful for diagnosing slow queries."
|
|
415
|
+
),
|
|
416
|
+
type="boolean",
|
|
417
|
+
required=False,
|
|
418
|
+
),
|
|
419
|
+
},
|
|
420
|
+
)
|
|
421
|
+
|
|
422
|
+
def _invoke(self, params: dict, context: ToolInvokeContext) -> StructuredToolResult:
|
|
423
|
+
index = params["index"]
|
|
424
|
+
path = f"{index}/_search"
|
|
425
|
+
|
|
426
|
+
# Build request body
|
|
427
|
+
body: Dict[str, Any] = {}
|
|
428
|
+
|
|
429
|
+
if params.get("query"):
|
|
430
|
+
body["query"] = params["query"]
|
|
431
|
+
|
|
432
|
+
body["size"] = params.get("size", 100)
|
|
433
|
+
|
|
434
|
+
if params.get("from_offset"):
|
|
435
|
+
body["from"] = params["from_offset"]
|
|
436
|
+
|
|
437
|
+
if params.get("sort"):
|
|
438
|
+
body["sort"] = params["sort"]
|
|
439
|
+
|
|
440
|
+
if params.get("source") is not None:
|
|
441
|
+
body["_source"] = params["source"]
|
|
442
|
+
|
|
443
|
+
if params.get("aggregations"):
|
|
444
|
+
body["aggs"] = params["aggregations"]
|
|
445
|
+
|
|
446
|
+
if params.get("profile"):
|
|
447
|
+
body["profile"] = True
|
|
448
|
+
|
|
449
|
+
return self._make_request("POST", path, params, body=body)
|
|
450
|
+
|
|
451
|
+
def get_parameterized_one_liner(self, params: Dict) -> str:
|
|
452
|
+
index = params.get("index", "")
|
|
453
|
+
return f"{toolset_name_for_one_liner(self._toolset.name)}: Search {index}"
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
class ElasticsearchClusterHealth(BaseElasticsearchTool):
|
|
457
|
+
"""Get Elasticsearch cluster health status."""
|
|
458
|
+
|
|
459
|
+
def __init__(self, toolset: ElasticsearchBaseToolset):
|
|
460
|
+
super().__init__(
|
|
461
|
+
toolset=toolset,
|
|
462
|
+
name="elasticsearch_cluster_health",
|
|
463
|
+
description=(
|
|
464
|
+
"Get cluster health information including status (green/yellow/red), "
|
|
465
|
+
"node count, shard counts, and pending tasks."
|
|
466
|
+
),
|
|
467
|
+
parameters={
|
|
468
|
+
"index": ToolParameter(
|
|
469
|
+
description="Optional: Get health for specific index or pattern",
|
|
470
|
+
type="string",
|
|
471
|
+
required=False,
|
|
472
|
+
),
|
|
473
|
+
"level": ToolParameter(
|
|
474
|
+
description=(
|
|
475
|
+
"Level of detail: 'cluster' (default), 'indices', or 'shards'. "
|
|
476
|
+
"Higher levels return more detail but more data."
|
|
477
|
+
),
|
|
478
|
+
type="string",
|
|
479
|
+
required=False,
|
|
480
|
+
),
|
|
481
|
+
},
|
|
482
|
+
)
|
|
483
|
+
|
|
484
|
+
def _invoke(self, params: dict, context: ToolInvokeContext) -> StructuredToolResult:
|
|
485
|
+
index = params.get("index")
|
|
486
|
+
path = f"_cluster/health/{index}" if index else "_cluster/health"
|
|
487
|
+
|
|
488
|
+
query_params: Dict[str, Any] = {}
|
|
489
|
+
if params.get("level"):
|
|
490
|
+
query_params["level"] = params["level"]
|
|
491
|
+
|
|
492
|
+
return self._make_request("GET", path, params, query_params=query_params)
|
|
493
|
+
|
|
494
|
+
def get_parameterized_one_liner(self, params: Dict) -> str:
|
|
495
|
+
index = params.get("index", "")
|
|
496
|
+
suffix = f" ({index})" if index else ""
|
|
497
|
+
return (
|
|
498
|
+
f"{toolset_name_for_one_liner(self._toolset.name)}: Cluster health{suffix}"
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
class ElasticsearchMappings(BaseElasticsearchTool, JsonFilterMixin):
|
|
503
|
+
"""Get index mappings (field definitions and types)."""
|
|
504
|
+
|
|
505
|
+
def __init__(self, toolset: ElasticsearchBaseToolset):
|
|
506
|
+
super().__init__(
|
|
507
|
+
toolset=toolset,
|
|
508
|
+
name="elasticsearch_mappings",
|
|
509
|
+
description=(
|
|
510
|
+
"Get the field mappings (schema) for an index. "
|
|
511
|
+
"Shows field names, data types, and analyzers. "
|
|
512
|
+
"Useful for understanding index structure before writing queries. "
|
|
513
|
+
"For large mappings, use the jq parameter to filter results "
|
|
514
|
+
"(e.g., jq='.*.mappings.properties | keys' to list field names)."
|
|
515
|
+
),
|
|
516
|
+
parameters=JsonFilterMixin.extend_parameters(
|
|
517
|
+
{
|
|
518
|
+
"index": ToolParameter(
|
|
519
|
+
description="Index name or pattern to get mappings for",
|
|
520
|
+
type="string",
|
|
521
|
+
required=True,
|
|
522
|
+
),
|
|
523
|
+
}
|
|
524
|
+
),
|
|
525
|
+
)
|
|
526
|
+
|
|
527
|
+
def _invoke(self, params: dict, context: ToolInvokeContext) -> StructuredToolResult:
|
|
528
|
+
index = params["index"]
|
|
529
|
+
path = f"{index}/_mapping"
|
|
530
|
+
result = self._make_request("GET", path, params)
|
|
531
|
+
return self.filter_result(result, params)
|
|
532
|
+
|
|
533
|
+
def get_parameterized_one_liner(self, params: Dict) -> str:
|
|
534
|
+
index = params.get("index", "")
|
|
535
|
+
return f"{toolset_name_for_one_liner(self._toolset.name)}: Get mappings for {index}"
|
|
536
|
+
|
|
537
|
+
|
|
538
|
+
class ElasticsearchIndexStats(BaseElasticsearchTool):
|
|
539
|
+
"""Get index statistics including document counts, storage, and indexing rates."""
|
|
540
|
+
|
|
541
|
+
def __init__(self, toolset: ElasticsearchBaseToolset):
|
|
542
|
+
super().__init__(
|
|
543
|
+
toolset=toolset,
|
|
544
|
+
name="elasticsearch_index_stats",
|
|
545
|
+
description=(
|
|
546
|
+
"Get detailed statistics for indices including document count, "
|
|
547
|
+
"store size, indexing rate, and search rate."
|
|
548
|
+
),
|
|
549
|
+
parameters={
|
|
550
|
+
"index": ToolParameter(
|
|
551
|
+
description="Index name or pattern. Use '_all' for all indices.",
|
|
552
|
+
type="string",
|
|
553
|
+
required=True,
|
|
554
|
+
),
|
|
555
|
+
"metrics": ToolParameter(
|
|
556
|
+
description=(
|
|
557
|
+
"Comma-separated list of metrics to return. Options: "
|
|
558
|
+
"_all, docs, store, indexing, search, get, merge, refresh, flush, warmer, "
|
|
559
|
+
"query_cache, fielddata, completion, segments, translog, recovery"
|
|
560
|
+
),
|
|
561
|
+
type="string",
|
|
562
|
+
required=False,
|
|
563
|
+
),
|
|
564
|
+
},
|
|
565
|
+
)
|
|
566
|
+
|
|
567
|
+
def _invoke(self, params: dict, context: ToolInvokeContext) -> StructuredToolResult:
|
|
568
|
+
index = params["index"]
|
|
569
|
+
metrics = params.get("metrics")
|
|
570
|
+
|
|
571
|
+
if metrics:
|
|
572
|
+
path = f"{index}/_stats/{metrics}"
|
|
573
|
+
else:
|
|
574
|
+
path = f"{index}/_stats"
|
|
575
|
+
|
|
576
|
+
return self._make_request("GET", path, params)
|
|
577
|
+
|
|
578
|
+
def get_parameterized_one_liner(self, params: Dict) -> str:
|
|
579
|
+
index = params.get("index", "")
|
|
580
|
+
return f"{toolset_name_for_one_liner(self._toolset.name)}: Stats for {index}"
|
|
581
|
+
|
|
582
|
+
|
|
583
|
+
class ElasticsearchAllocationExplain(BaseElasticsearchTool):
|
|
584
|
+
"""Explain shard allocation decisions and issues."""
|
|
585
|
+
|
|
586
|
+
def __init__(self, toolset: ElasticsearchBaseToolset):
|
|
587
|
+
super().__init__(
|
|
588
|
+
toolset=toolset,
|
|
589
|
+
name="elasticsearch_allocation_explain",
|
|
590
|
+
description=(
|
|
591
|
+
"Explain why a shard is unassigned or how allocation decisions are made. "
|
|
592
|
+
"Call without parameters to explain the first unassigned shard, "
|
|
593
|
+
"or specify index/shard to explain a specific shard."
|
|
594
|
+
),
|
|
595
|
+
parameters={
|
|
596
|
+
"index": ToolParameter(
|
|
597
|
+
description="Index name for specific shard explanation",
|
|
598
|
+
type="string",
|
|
599
|
+
required=False,
|
|
600
|
+
),
|
|
601
|
+
"shard": ToolParameter(
|
|
602
|
+
description="Shard number (0-based) for specific shard explanation",
|
|
603
|
+
type="integer",
|
|
604
|
+
required=False,
|
|
605
|
+
),
|
|
606
|
+
"primary": ToolParameter(
|
|
607
|
+
description="True for primary shard, false for replica (default: true)",
|
|
608
|
+
type="boolean",
|
|
609
|
+
required=False,
|
|
610
|
+
),
|
|
611
|
+
},
|
|
612
|
+
)
|
|
613
|
+
|
|
614
|
+
def _invoke(self, params: dict, context: ToolInvokeContext) -> StructuredToolResult:
|
|
615
|
+
body: Optional[Dict[str, Any]] = None
|
|
616
|
+
|
|
617
|
+
if params.get("index") is not None and params.get("shard") is not None:
|
|
618
|
+
body = {
|
|
619
|
+
"index": params["index"],
|
|
620
|
+
"shard": params["shard"],
|
|
621
|
+
"primary": params.get("primary", True),
|
|
622
|
+
}
|
|
623
|
+
|
|
624
|
+
return self._make_request(
|
|
625
|
+
"GET", "_cluster/allocation/explain", params, body=body
|
|
626
|
+
)
|
|
627
|
+
|
|
628
|
+
def get_parameterized_one_liner(self, params: Dict) -> str:
|
|
629
|
+
index = params.get("index", "")
|
|
630
|
+
shard = params.get("shard", "")
|
|
631
|
+
if index and shard is not None:
|
|
632
|
+
return f"{toolset_name_for_one_liner(self._toolset.name)}: Explain allocation for {index} shard {shard}"
|
|
633
|
+
return f"{toolset_name_for_one_liner(self._toolset.name)}: Explain unassigned shard"
|
|
634
|
+
|
|
635
|
+
|
|
636
|
+
class ElasticsearchNodesStats(BaseElasticsearchTool):
|
|
637
|
+
"""Get node-level statistics."""
|
|
638
|
+
|
|
639
|
+
def __init__(self, toolset: ElasticsearchBaseToolset):
|
|
640
|
+
super().__init__(
|
|
641
|
+
toolset=toolset,
|
|
642
|
+
name="elasticsearch_nodes_stats",
|
|
643
|
+
description=(
|
|
644
|
+
"Get statistics for cluster nodes including JVM, OS, process, "
|
|
645
|
+
"thread pool, filesystem, transport, and HTTP metrics."
|
|
646
|
+
),
|
|
647
|
+
parameters={
|
|
648
|
+
"node_id": ToolParameter(
|
|
649
|
+
description="Specific node ID or name. Use '_local' for current node, '_all' for all nodes.",
|
|
650
|
+
type="string",
|
|
651
|
+
required=False,
|
|
652
|
+
),
|
|
653
|
+
"metrics": ToolParameter(
|
|
654
|
+
description=(
|
|
655
|
+
"Comma-separated list of metrics. Options: "
|
|
656
|
+
"_all, breaker, fs, http, indices, jvm, os, process, thread_pool, transport, discovery"
|
|
657
|
+
),
|
|
658
|
+
type="string",
|
|
659
|
+
required=False,
|
|
660
|
+
),
|
|
661
|
+
},
|
|
662
|
+
)
|
|
663
|
+
|
|
664
|
+
def _invoke(self, params: dict, context: ToolInvokeContext) -> StructuredToolResult:
|
|
665
|
+
node_id = params.get("node_id", "_all")
|
|
666
|
+
metrics = params.get("metrics")
|
|
667
|
+
|
|
668
|
+
if metrics:
|
|
669
|
+
path = f"_nodes/{node_id}/stats/{metrics}"
|
|
670
|
+
else:
|
|
671
|
+
path = f"_nodes/{node_id}/stats"
|
|
672
|
+
|
|
673
|
+
return self._make_request("GET", path, params)
|
|
674
|
+
|
|
675
|
+
def get_parameterized_one_liner(self, params: Dict) -> str:
|
|
676
|
+
node_id = params.get("node_id", "_all")
|
|
677
|
+
return (
|
|
678
|
+
f"{toolset_name_for_one_liner(self._toolset.name)}: Node stats ({node_id})"
|
|
679
|
+
)
|
|
680
|
+
|
|
681
|
+
|
|
682
|
+
class ElasticsearchListIndices(BaseElasticsearchTool, JsonFilterMixin):
|
|
683
|
+
"""List indices matching a pattern with full server-side filtering support."""
|
|
684
|
+
|
|
685
|
+
def __init__(self, toolset: ElasticsearchBaseToolset):
|
|
686
|
+
super().__init__(
|
|
687
|
+
toolset=toolset,
|
|
688
|
+
name="elasticsearch_list_indices",
|
|
689
|
+
description=(
|
|
690
|
+
"List Elasticsearch indices matching a pattern. "
|
|
691
|
+
"Returns index names, document counts, and storage size. "
|
|
692
|
+
"Supports server-side sorting and filtering for efficient queries on large clusters."
|
|
693
|
+
),
|
|
694
|
+
parameters=JsonFilterMixin.extend_parameters(
|
|
695
|
+
{
|
|
696
|
+
"pattern": ToolParameter(
|
|
697
|
+
description=(
|
|
698
|
+
"Index name pattern to match. Supports wildcards (e.g., 'logs-*', 'app-*'). "
|
|
699
|
+
"Use '*' to list all indices."
|
|
700
|
+
),
|
|
701
|
+
type="string",
|
|
702
|
+
required=False,
|
|
703
|
+
),
|
|
704
|
+
"sort": ToolParameter(
|
|
705
|
+
description=(
|
|
706
|
+
"Sort by column. Format: 'column' or 'column:desc'. "
|
|
707
|
+
"Examples: 'store.size:desc' (largest first), 'docs.count:desc', 'index'. "
|
|
708
|
+
"Default: 'index' (alphabetical)."
|
|
709
|
+
),
|
|
710
|
+
type="string",
|
|
711
|
+
required=False,
|
|
712
|
+
),
|
|
713
|
+
"columns": ToolParameter(
|
|
714
|
+
description=(
|
|
715
|
+
"Comma-separated columns to return. Available: index, health, status, pri, rep, "
|
|
716
|
+
"docs.count, docs.deleted, store.size, pri.store.size, creation.date, creation.date.string. "
|
|
717
|
+
"Default: 'index,health,status,docs.count,store.size'"
|
|
718
|
+
),
|
|
719
|
+
type="string",
|
|
720
|
+
required=False,
|
|
721
|
+
),
|
|
722
|
+
"health": ToolParameter(
|
|
723
|
+
description="Filter by index health: green, yellow, or red",
|
|
724
|
+
type="string",
|
|
725
|
+
required=False,
|
|
726
|
+
),
|
|
727
|
+
"bytes": ToolParameter(
|
|
728
|
+
description="Unit for byte sizes: b, kb, mb, gb, tb, pb. Default: human-readable.",
|
|
729
|
+
type="string",
|
|
730
|
+
required=False,
|
|
731
|
+
),
|
|
732
|
+
"pri": ToolParameter(
|
|
733
|
+
description="If true, return only primary shard statistics",
|
|
734
|
+
type="boolean",
|
|
735
|
+
required=False,
|
|
736
|
+
),
|
|
737
|
+
"expand_wildcards": ToolParameter(
|
|
738
|
+
description="Which indices to expand wildcards to: open, closed, hidden, none, all. Default: open",
|
|
739
|
+
type="string",
|
|
740
|
+
required=False,
|
|
741
|
+
),
|
|
742
|
+
}
|
|
743
|
+
),
|
|
744
|
+
)
|
|
745
|
+
|
|
746
|
+
def _invoke(self, params: dict, context: ToolInvokeContext) -> StructuredToolResult:
|
|
747
|
+
pattern = params.get("pattern", "*")
|
|
748
|
+
path = f"_cat/indices/{pattern}"
|
|
749
|
+
|
|
750
|
+
query_params: Dict[str, Any] = {"format": "json"}
|
|
751
|
+
|
|
752
|
+
# Columns (h parameter)
|
|
753
|
+
columns = params.get("columns", "index,health,status,docs.count,store.size")
|
|
754
|
+
query_params["h"] = columns
|
|
755
|
+
|
|
756
|
+
# Sort (s parameter)
|
|
757
|
+
sort = params.get("sort", "index")
|
|
758
|
+
query_params["s"] = sort
|
|
759
|
+
|
|
760
|
+
# Health filter
|
|
761
|
+
if params.get("health"):
|
|
762
|
+
query_params["health"] = params["health"]
|
|
763
|
+
|
|
764
|
+
# Byte units
|
|
765
|
+
if params.get("bytes"):
|
|
766
|
+
query_params["bytes"] = params["bytes"]
|
|
767
|
+
|
|
768
|
+
# Primary only
|
|
769
|
+
if params.get("pri"):
|
|
770
|
+
query_params["pri"] = "true"
|
|
771
|
+
|
|
772
|
+
# Expand wildcards
|
|
773
|
+
if params.get("expand_wildcards"):
|
|
774
|
+
query_params["expand_wildcards"] = params["expand_wildcards"]
|
|
775
|
+
|
|
776
|
+
result = self._make_request("GET", path, params, query_params=query_params)
|
|
777
|
+
return self.filter_result(result, params)
|
|
778
|
+
|
|
779
|
+
def get_parameterized_one_liner(self, params: Dict) -> str:
|
|
780
|
+
pattern = params.get("pattern", "*")
|
|
781
|
+
return f"{toolset_name_for_one_liner(self._toolset.name)}: List indices ({pattern})"
|
|
782
|
+
|
|
783
|
+
|
|
784
|
+
# =============================================================================
|
|
785
|
+
# Toolset Definitions (must be after all tool classes)
|
|
786
|
+
# =============================================================================
|
|
787
|
+
|
|
788
|
+
|
|
789
|
+
class ElasticsearchDataToolset(ElasticsearchBaseToolset):
|
|
790
|
+
"""Toolset for querying data stored in Elasticsearch/OpenSearch.
|
|
791
|
+
|
|
792
|
+
This toolset provides tools for searching logs, metrics, and documents.
|
|
793
|
+
Requires only index-level read permissions (no cluster-level access needed).
|
|
794
|
+
"""
|
|
795
|
+
|
|
796
|
+
def __init__(self):
|
|
797
|
+
super().__init__(
|
|
798
|
+
name="elasticsearch/data",
|
|
799
|
+
description="Search and query data in Elasticsearch/OpenSearch indices - logs, metrics, documents",
|
|
800
|
+
tools=[],
|
|
801
|
+
)
|
|
802
|
+
# Initialize tools after super().__init__() - update the pydantic field
|
|
803
|
+
self.tools = [
|
|
804
|
+
ElasticsearchSearch(self),
|
|
805
|
+
ElasticsearchMappings(self),
|
|
806
|
+
ElasticsearchListIndices(self),
|
|
807
|
+
]
|
|
808
|
+
|
|
809
|
+
|
|
810
|
+
class ElasticsearchClusterToolset(ElasticsearchBaseToolset):
|
|
811
|
+
"""Toolset for troubleshooting Elasticsearch/OpenSearch cluster health.
|
|
812
|
+
|
|
813
|
+
This toolset provides tools for diagnosing cluster issues like unassigned
|
|
814
|
+
shards, node problems, and resource usage. Requires cluster-level permissions.
|
|
815
|
+
"""
|
|
816
|
+
|
|
817
|
+
def __init__(self):
|
|
818
|
+
super().__init__(
|
|
819
|
+
name="elasticsearch/cluster",
|
|
820
|
+
description="Troubleshoot Elasticsearch/OpenSearch cluster health - shards, nodes, allocation",
|
|
821
|
+
tools=[],
|
|
822
|
+
)
|
|
823
|
+
# Initialize tools after super().__init__() - update the pydantic field
|
|
824
|
+
self.tools = [
|
|
825
|
+
ElasticsearchCat(self),
|
|
826
|
+
ElasticsearchClusterHealth(self),
|
|
827
|
+
ElasticsearchIndexStats(self),
|
|
828
|
+
ElasticsearchAllocationExplain(self),
|
|
829
|
+
ElasticsearchNodesStats(self),
|
|
830
|
+
]
|
|
831
|
+
|
|
832
|
+
|
|
833
|
+
# Backwards compatibility alias
|
|
834
|
+
ElasticsearchToolset = ElasticsearchClusterToolset
|