awslabs.cloudwatch-mcp-server 0.0.10__py3-none-any.whl → 0.0.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- awslabs/cloudwatch_mcp_server/__init__.py +1 -1
- awslabs/cloudwatch_mcp_server/cloudwatch_alarms/models.py +1 -1
- awslabs/cloudwatch_mcp_server/cloudwatch_alarms/tools.py +2 -2
- awslabs/cloudwatch_mcp_server/cloudwatch_logs/tools.py +59 -10
- awslabs/cloudwatch_mcp_server/cloudwatch_metrics/cloudformation_template_generator.py +162 -0
- awslabs/cloudwatch_mcp_server/cloudwatch_metrics/constants.py +30 -0
- awslabs/cloudwatch_mcp_server/cloudwatch_metrics/metric_analyzer.py +192 -0
- awslabs/cloudwatch_mcp_server/cloudwatch_metrics/metric_data_decomposer.py +218 -0
- awslabs/cloudwatch_mcp_server/cloudwatch_metrics/models.py +129 -3
- awslabs/cloudwatch_mcp_server/cloudwatch_metrics/tools.py +365 -31
- {awslabs_cloudwatch_mcp_server-0.0.10.dist-info → awslabs_cloudwatch_mcp_server-0.0.13.dist-info}/METADATA +6 -2
- awslabs_cloudwatch_mcp_server-0.0.13.dist-info/RECORD +21 -0
- awslabs_cloudwatch_mcp_server-0.0.10.dist-info/RECORD +0 -17
- {awslabs_cloudwatch_mcp_server-0.0.10.dist-info → awslabs_cloudwatch_mcp_server-0.0.13.dist-info}/WHEEL +0 -0
- {awslabs_cloudwatch_mcp_server-0.0.10.dist-info → awslabs_cloudwatch_mcp_server-0.0.13.dist-info}/entry_points.txt +0 -0
- {awslabs_cloudwatch_mcp_server-0.0.10.dist-info → awslabs_cloudwatch_mcp_server-0.0.13.dist-info}/licenses/LICENSE +0 -0
- {awslabs_cloudwatch_mcp_server-0.0.10.dist-info → awslabs_cloudwatch_mcp_server-0.0.13.dist-info}/licenses/NOTICE +0 -0
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
import numpy as np
|
|
16
|
+
import pandas as pd
|
|
17
|
+
import statsmodels.api as sm
|
|
18
|
+
from awslabs.cloudwatch_mcp_server.cloudwatch_metrics.constants import (
|
|
19
|
+
NUMERICAL_STABILITY_THRESHOLD,
|
|
20
|
+
)
|
|
21
|
+
from awslabs.cloudwatch_mcp_server.cloudwatch_metrics.models import (
|
|
22
|
+
DecompositionResult,
|
|
23
|
+
Seasonality,
|
|
24
|
+
Trend,
|
|
25
|
+
)
|
|
26
|
+
from loguru import logger
|
|
27
|
+
from statsmodels.regression.linear_model import OLS
|
|
28
|
+
from typing import List, Optional, Tuple
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class MetricDataDecomposer:
|
|
32
|
+
"""Decomposes metric time series data into seasonal and trend components."""
|
|
33
|
+
|
|
34
|
+
SEASONALITY_STRENGTH_THRESHOLD = 0.6 # See https://robjhyndman.com/hyndsight/tsoutliers/
|
|
35
|
+
STATISTICAL_SIGNIFICANCE_THRESHOLD = 0.05
|
|
36
|
+
|
|
37
|
+
def detect_seasonality_and_trend(
|
|
38
|
+
self,
|
|
39
|
+
timestamps_ms: List[int],
|
|
40
|
+
values: List[float],
|
|
41
|
+
density_ratio: float,
|
|
42
|
+
publishing_period_seconds: int,
|
|
43
|
+
) -> DecompositionResult:
|
|
44
|
+
"""Analyze seasonality and extract trend component.
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
DecompositionResult with seasonality and trend
|
|
48
|
+
"""
|
|
49
|
+
# Return NONE for empty data or insufficient density
|
|
50
|
+
if not timestamps_ms or not values or density_ratio <= 0.5:
|
|
51
|
+
return DecompositionResult(seasonality=Seasonality.NONE, trend=Trend.NONE)
|
|
52
|
+
|
|
53
|
+
# Interpolate if we have sufficient density
|
|
54
|
+
timestamps_ms, values = self._interpolate_to_regular_grid(
|
|
55
|
+
timestamps_ms, values, publishing_period_seconds
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
return self._detect_strongest_seasonality(timestamps_ms, values, publishing_period_seconds)
|
|
59
|
+
|
|
60
|
+
def _interpolate_to_regular_grid(
|
|
61
|
+
self, timestamps_ms: List[int], values: List[float], period_seconds: float
|
|
62
|
+
) -> Tuple[List[int], List[float]]:
|
|
63
|
+
"""Interpolate data to regular grid using numpy."""
|
|
64
|
+
if len(timestamps_ms) < 2:
|
|
65
|
+
return timestamps_ms, values
|
|
66
|
+
|
|
67
|
+
period_ms = int(period_seconds * 1000)
|
|
68
|
+
start_time = timestamps_ms[0]
|
|
69
|
+
end_time = timestamps_ms[-1]
|
|
70
|
+
|
|
71
|
+
# Create regular grid
|
|
72
|
+
regular_timestamps = list(range(start_time, end_time + period_ms, period_ms))
|
|
73
|
+
|
|
74
|
+
# Interpolate using numpy
|
|
75
|
+
interpolated_values = np.interp(regular_timestamps, timestamps_ms, values).tolist()
|
|
76
|
+
|
|
77
|
+
return regular_timestamps, interpolated_values
|
|
78
|
+
|
|
79
|
+
def _detect_strongest_seasonality(
|
|
80
|
+
self, timestamps_ms: List[int], values: List[float], period_seconds: Optional[float]
|
|
81
|
+
) -> DecompositionResult:
|
|
82
|
+
"""Detect seasonal patterns and compute trend in the data."""
|
|
83
|
+
timestamps_ms = sorted(timestamps_ms)
|
|
84
|
+
|
|
85
|
+
# Calculate period for analysis
|
|
86
|
+
if period_seconds is None and len(timestamps_ms) > 1:
|
|
87
|
+
period_seconds = (timestamps_ms[1] - timestamps_ms[0]) / 1000
|
|
88
|
+
|
|
89
|
+
if period_seconds is None or period_seconds <= 0:
|
|
90
|
+
period_seconds = 300 # 5 minutes default
|
|
91
|
+
|
|
92
|
+
# Winsorize values
|
|
93
|
+
values_array = np.array(values)
|
|
94
|
+
qtiles = np.quantile(values_array, [0.001, 0.999])
|
|
95
|
+
lo, hi = qtiles
|
|
96
|
+
winsorized_values = np.clip(values_array, lo, hi)
|
|
97
|
+
|
|
98
|
+
# Test seasonal periods
|
|
99
|
+
seasonal_periods_seconds = [
|
|
100
|
+
Seasonality.FIFTEEN_MINUTES.value,
|
|
101
|
+
Seasonality.ONE_HOUR.value,
|
|
102
|
+
Seasonality.SIX_HOURS.value,
|
|
103
|
+
Seasonality.ONE_DAY.value,
|
|
104
|
+
Seasonality.ONE_WEEK.value,
|
|
105
|
+
]
|
|
106
|
+
|
|
107
|
+
best_seasonality = Seasonality.NONE
|
|
108
|
+
best_strength = 0.0
|
|
109
|
+
best_deseasonalized = None
|
|
110
|
+
|
|
111
|
+
for seasonal_period_seconds in seasonal_periods_seconds:
|
|
112
|
+
datapoints_per_period = seasonal_period_seconds / period_seconds
|
|
113
|
+
min_required_points = datapoints_per_period * 2
|
|
114
|
+
|
|
115
|
+
if len(values) < min_required_points or datapoints_per_period <= 0:
|
|
116
|
+
continue
|
|
117
|
+
|
|
118
|
+
strength, deseasonalized = self._calculate_seasonal_strength(
|
|
119
|
+
winsorized_values, int(datapoints_per_period)
|
|
120
|
+
)
|
|
121
|
+
if strength > best_strength:
|
|
122
|
+
best_strength = strength
|
|
123
|
+
best_seasonality = Seasonality.from_seconds(seasonal_period_seconds)
|
|
124
|
+
best_deseasonalized = deseasonalized
|
|
125
|
+
|
|
126
|
+
# Compute trend from deseasonalized data if seasonality detected
|
|
127
|
+
if best_strength > self.SEASONALITY_STRENGTH_THRESHOLD and best_deseasonalized is not None:
|
|
128
|
+
trend = self._compute_trend(best_deseasonalized)
|
|
129
|
+
return DecompositionResult(seasonality=best_seasonality, trend=trend)
|
|
130
|
+
else:
|
|
131
|
+
# No seasonality, compute trend on raw values
|
|
132
|
+
trend = self._compute_trend(winsorized_values)
|
|
133
|
+
return DecompositionResult(seasonality=Seasonality.NONE, trend=trend)
|
|
134
|
+
|
|
135
|
+
def _calculate_seasonal_strength(
|
|
136
|
+
self, values: np.ndarray, seasonal_period: int
|
|
137
|
+
) -> Tuple[float, Optional[np.ndarray]]:
|
|
138
|
+
"""Calculate seasonal strength and extract deseasonalized data for trend.
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
Tuple of (strength, deseasonalized_values) where deseasonalized = original - seasonal_pattern
|
|
142
|
+
"""
|
|
143
|
+
if len(values) < seasonal_period * 2 or seasonal_period <= 0:
|
|
144
|
+
return (0.0, None)
|
|
145
|
+
|
|
146
|
+
# Reshape data into seasonal cycles
|
|
147
|
+
n_cycles = len(values) // seasonal_period
|
|
148
|
+
if n_cycles <= 0:
|
|
149
|
+
return (0.0, None)
|
|
150
|
+
|
|
151
|
+
truncated_values = values[: n_cycles * seasonal_period]
|
|
152
|
+
reshaped = truncated_values.reshape(n_cycles, seasonal_period)
|
|
153
|
+
|
|
154
|
+
# Calculate seasonal pattern (mean across cycles)
|
|
155
|
+
seasonal_pattern = np.mean(reshaped, axis=0)
|
|
156
|
+
tiled_pattern = np.tile(seasonal_pattern, n_cycles)
|
|
157
|
+
|
|
158
|
+
# Calculate trend (moving average) for seasonal strength calculation
|
|
159
|
+
trend_series = (
|
|
160
|
+
pd.Series(truncated_values)
|
|
161
|
+
.rolling(window=seasonal_period, center=True, min_periods=1)
|
|
162
|
+
.mean()
|
|
163
|
+
)
|
|
164
|
+
trend = np.asarray(trend_series)
|
|
165
|
+
|
|
166
|
+
# Calculate components
|
|
167
|
+
detrended = truncated_values - trend
|
|
168
|
+
remainder = detrended - tiled_pattern
|
|
169
|
+
|
|
170
|
+
# Seasonal strength = 1 - Var(remainder) / Var(detrended)
|
|
171
|
+
var_remainder = np.var(remainder)
|
|
172
|
+
var_detrended = np.var(detrended)
|
|
173
|
+
|
|
174
|
+
if var_detrended <= NUMERICAL_STABILITY_THRESHOLD:
|
|
175
|
+
return (0.0, None)
|
|
176
|
+
|
|
177
|
+
strength = max(0.0, float(1 - var_remainder / var_detrended))
|
|
178
|
+
|
|
179
|
+
# Return deseasonalized data (original - seasonal pattern) for trend calculation
|
|
180
|
+
deseasonalized = truncated_values - tiled_pattern
|
|
181
|
+
return (strength, deseasonalized)
|
|
182
|
+
|
|
183
|
+
def _compute_trend(self, values: np.ndarray) -> Trend:
|
|
184
|
+
"""Compute trend using OLS on trend component values."""
|
|
185
|
+
if len(values) <= 2:
|
|
186
|
+
return Trend.NONE
|
|
187
|
+
|
|
188
|
+
try:
|
|
189
|
+
valid_data = [
|
|
190
|
+
(i, v) for i, v in enumerate(values) if not np.isnan(v) and not np.isinf(v)
|
|
191
|
+
]
|
|
192
|
+
if len(valid_data) <= 2:
|
|
193
|
+
return Trend.NONE
|
|
194
|
+
|
|
195
|
+
x_vals = np.array([x for x, _ in valid_data])
|
|
196
|
+
y_vals = np.array([y for _, y in valid_data])
|
|
197
|
+
|
|
198
|
+
# Check if all values are the same (flat line)
|
|
199
|
+
if np.std(y_vals) < NUMERICAL_STABILITY_THRESHOLD:
|
|
200
|
+
return Trend.NONE
|
|
201
|
+
|
|
202
|
+
x_vals = (x_vals - x_vals.min()) / (
|
|
203
|
+
x_vals.max() - x_vals.min() + NUMERICAL_STABILITY_THRESHOLD
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
X = sm.add_constant(x_vals)
|
|
207
|
+
model = OLS(y_vals, X).fit()
|
|
208
|
+
|
|
209
|
+
slope = model.params[1]
|
|
210
|
+
p_value = model.pvalues[1]
|
|
211
|
+
|
|
212
|
+
if p_value >= self.STATISTICAL_SIGNIFICANCE_THRESHOLD:
|
|
213
|
+
return Trend.NONE
|
|
214
|
+
|
|
215
|
+
return Trend.POSITIVE if slope > 0 else Trend.NEGATIVE
|
|
216
|
+
except Exception as e:
|
|
217
|
+
logger.warning(f'Error computing trend: {e}')
|
|
218
|
+
return Trend.NONE
|
|
@@ -14,10 +14,57 @@
|
|
|
14
14
|
|
|
15
15
|
"""Data models for CloudWatch Metrics MCP tools."""
|
|
16
16
|
|
|
17
|
+
from awslabs.cloudwatch_mcp_server.cloudwatch_metrics.constants import (
|
|
18
|
+
DAYS_PER_WEEK,
|
|
19
|
+
HOURS_PER_DAY,
|
|
20
|
+
MINUTES_PER_HOUR,
|
|
21
|
+
SECONDS_PER_MINUTE,
|
|
22
|
+
)
|
|
17
23
|
from datetime import datetime
|
|
18
24
|
from enum import Enum
|
|
19
|
-
from pydantic import BaseModel, Field
|
|
20
|
-
from typing import Any, Dict, List
|
|
25
|
+
from pydantic import BaseModel, Field, field_validator, model_serializer, model_validator
|
|
26
|
+
from typing import Any, ClassVar, Dict, List, Optional, Union
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class Trend(str, Enum):
|
|
30
|
+
"""Trend direction based on statistical significance."""
|
|
31
|
+
|
|
32
|
+
POSITIVE = 'positive'
|
|
33
|
+
NEGATIVE = 'negative'
|
|
34
|
+
NONE = 'none'
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# Seasonality rounding threshold constant
|
|
38
|
+
SEASONALITY_ROUNDING_THRESHOLD = 0.1
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class Seasonality(Enum):
|
|
42
|
+
"""Seasonality detection results with period in seconds."""
|
|
43
|
+
|
|
44
|
+
NONE = 0
|
|
45
|
+
FIFTEEN_MINUTES = 15 * SECONDS_PER_MINUTE
|
|
46
|
+
ONE_HOUR = MINUTES_PER_HOUR * SECONDS_PER_MINUTE
|
|
47
|
+
SIX_HOURS = 6 * ONE_HOUR
|
|
48
|
+
ONE_DAY = HOURS_PER_DAY * ONE_HOUR
|
|
49
|
+
ONE_WEEK = DAYS_PER_WEEK * ONE_DAY
|
|
50
|
+
|
|
51
|
+
@classmethod
|
|
52
|
+
def from_seconds(cls, seconds: Union[float, int]) -> 'Seasonality':
|
|
53
|
+
"""Convert seconds to closest seasonality enum."""
|
|
54
|
+
seconds = int(seconds)
|
|
55
|
+
closest = min(cls, key=lambda x: abs(x.value - seconds))
|
|
56
|
+
return (
|
|
57
|
+
closest
|
|
58
|
+
if abs(closest.value - seconds) < closest.value * SEASONALITY_ROUNDING_THRESHOLD
|
|
59
|
+
else cls.NONE
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class DecompositionResult(BaseModel):
|
|
64
|
+
"""Result of metric data decomposition into seasonal and trend components."""
|
|
65
|
+
|
|
66
|
+
seasonality: Seasonality
|
|
67
|
+
trend: Trend
|
|
21
68
|
|
|
22
69
|
|
|
23
70
|
class SortOrder(str, Enum):
|
|
@@ -107,8 +154,32 @@ class MetricMetadata(BaseModel):
|
|
|
107
154
|
class AlarmRecommendationThreshold(BaseModel):
|
|
108
155
|
"""Represents an alarm threshold configuration."""
|
|
109
156
|
|
|
157
|
+
justification: str = Field(default='', description='Justification for the threshold value')
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
class StaticAlarmThreshold(AlarmRecommendationThreshold):
|
|
161
|
+
"""Represents an alarm static threshold configuration."""
|
|
162
|
+
|
|
110
163
|
staticValue: float = Field(..., description='The static threshold value')
|
|
111
|
-
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
class AnomalyDetectionAlarmThreshold(AlarmRecommendationThreshold):
|
|
167
|
+
"""Represents an anomaly detection alarm threshold configuration."""
|
|
168
|
+
|
|
169
|
+
DEFAULT_SENSITIVITY: ClassVar[float] = 2.0
|
|
170
|
+
|
|
171
|
+
sensitivity: float = Field(
|
|
172
|
+
default=DEFAULT_SENSITIVITY, description='The sensitivity of the Anomaly Detection bands.'
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
@field_validator('sensitivity')
|
|
176
|
+
@classmethod
|
|
177
|
+
def validate_sensitivity(cls, v):
|
|
178
|
+
"""Validate sensitivity is within acceptable range."""
|
|
179
|
+
# Extreme sensitivity values result in reduced Anomaly Detection performance
|
|
180
|
+
if not 0 < v <= 100:
|
|
181
|
+
raise ValueError('Sensitivity must be above 0 and less than or equal to 100')
|
|
182
|
+
return v
|
|
112
183
|
|
|
113
184
|
|
|
114
185
|
class AlarmRecommendationDimension(BaseModel):
|
|
@@ -148,3 +219,58 @@ class AlarmRecommendation(BaseModel):
|
|
|
148
219
|
default_factory=list, description='List of dimensions for the alarm'
|
|
149
220
|
)
|
|
150
221
|
intent: str = Field(..., description='The intent or purpose of the alarm')
|
|
222
|
+
cloudformation_template: Optional[Dict[str, Any]] = Field(
|
|
223
|
+
default=None,
|
|
224
|
+
description='CloudFormation template (only for anomaly detection alarms)',
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
@model_serializer
|
|
228
|
+
def serialize_model(self):
|
|
229
|
+
"""Serialize alarm recommendation to dict format."""
|
|
230
|
+
data = {
|
|
231
|
+
'alarmDescription': self.alarmDescription,
|
|
232
|
+
'threshold': self.threshold,
|
|
233
|
+
'period': self.period,
|
|
234
|
+
'comparisonOperator': self.comparisonOperator,
|
|
235
|
+
'statistic': self.statistic,
|
|
236
|
+
'evaluationPeriods': self.evaluationPeriods,
|
|
237
|
+
'datapointsToAlarm': self.datapointsToAlarm,
|
|
238
|
+
'treatMissingData': self.treatMissingData,
|
|
239
|
+
'dimensions': self.dimensions,
|
|
240
|
+
'intent': self.intent,
|
|
241
|
+
}
|
|
242
|
+
if self.cloudformation_template is not None:
|
|
243
|
+
data['cloudformation_template'] = self.cloudformation_template
|
|
244
|
+
return data
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
class MetricData(BaseModel):
|
|
248
|
+
"""Represents CloudWatch Metric (time series) data."""
|
|
249
|
+
|
|
250
|
+
period_seconds: int = Field(
|
|
251
|
+
..., description='The aggregation period in seconds of the requested metric data'
|
|
252
|
+
)
|
|
253
|
+
timestamps: List[int] = Field(default_factory=list, description='List of metric timestamps')
|
|
254
|
+
values: List[float] = Field(default_factory=list, description='List of metric values')
|
|
255
|
+
|
|
256
|
+
@model_validator(mode='after')
|
|
257
|
+
def validate_metric_data(self):
|
|
258
|
+
"""Validate MetricData after initialization."""
|
|
259
|
+
if len(self.timestamps) != len(self.values):
|
|
260
|
+
raise ValueError('Timestamps and values must have the same length')
|
|
261
|
+
if self.period_seconds <= 0:
|
|
262
|
+
raise ValueError('Timeseries must have a period >= 0')
|
|
263
|
+
return self
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
class AlarmRecommendationResult(BaseModel):
|
|
267
|
+
"""Result wrapper for alarm recommendations with a success/failure message to guide the calling LLM."""
|
|
268
|
+
|
|
269
|
+
recommendations: List[AlarmRecommendation] = Field(
|
|
270
|
+
default_factory=list,
|
|
271
|
+
description='A list of alarm recommendations that match the provided dimensions.',
|
|
272
|
+
)
|
|
273
|
+
message: str = Field(
|
|
274
|
+
...,
|
|
275
|
+
description='Message describing the success/failure of generating alarm recommendation.',
|
|
276
|
+
)
|