fixdoc 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fixdoc/__init__.py +8 -0
- fixdoc/cli.py +26 -0
- fixdoc/commands/__init__.py +11 -0
- fixdoc/commands/analyze.py +313 -0
- fixdoc/commands/capture.py +109 -0
- fixdoc/commands/capture_handlers.py +298 -0
- fixdoc/commands/delete.py +72 -0
- fixdoc/commands/edit.py +118 -0
- fixdoc/commands/manage.py +67 -0
- fixdoc/commands/search.py +65 -0
- fixdoc/commands/sync.py +268 -0
- fixdoc/config.py +113 -0
- fixdoc/fix.py +19 -0
- fixdoc/formatter.py +62 -0
- fixdoc/git.py +263 -0
- fixdoc/markdown_parser.py +106 -0
- fixdoc/models.py +83 -0
- fixdoc/parsers/__init__.py +24 -0
- fixdoc/parsers/base.py +131 -0
- fixdoc/parsers/kubernetes.py +584 -0
- fixdoc/parsers/router.py +160 -0
- fixdoc/parsers/terraform.py +409 -0
- fixdoc/storage.py +146 -0
- fixdoc/sync_engine.py +330 -0
- fixdoc/terraform_parser.py +135 -0
- fixdoc-0.0.1.dist-info/METADATA +261 -0
- fixdoc-0.0.1.dist-info/RECORD +30 -0
- fixdoc-0.0.1.dist-info/WHEEL +5 -0
- fixdoc-0.0.1.dist-info/entry_points.txt +2 -0
- fixdoc-0.0.1.dist-info/top_level.txt +1 -0
fixdoc/parsers/router.py
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Error source detection and routing.
|
|
3
|
+
|
|
4
|
+
This module provides unified error parsing by detecting the source
|
|
5
|
+
of an error and routing to the appropriate parser.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from enum import Enum
|
|
9
|
+
from typing import Optional
|
|
10
|
+
|
|
11
|
+
from .base import ParsedError
|
|
12
|
+
from .terraform import TerraformParser, TerraformError
|
|
13
|
+
from .kubernetes import KubernetesParser, KubernetesError
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ErrorSource(Enum):
|
|
17
|
+
"""Detected error source."""
|
|
18
|
+
TERRAFORM = "terraform"
|
|
19
|
+
KUBERNETES = "kubernetes"
|
|
20
|
+
HELM = "helm"
|
|
21
|
+
ANSIBLE = "ansible" # Future support
|
|
22
|
+
UNKNOWN = "unknown"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# Singleton parser instances
|
|
26
|
+
_terraform_parser = TerraformParser()
|
|
27
|
+
_kubernetes_parser = KubernetesParser()
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def detect_error_source(text: str) -> ErrorSource:
|
|
31
|
+
"""
|
|
32
|
+
Detect the source of an error from the text.
|
|
33
|
+
|
|
34
|
+
This uses heuristics to determine whether the error comes from
|
|
35
|
+
Terraform, Kubernetes (kubectl/Helm), or another source.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
text: The error output text to analyze
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
ErrorSource enum indicating the detected source
|
|
42
|
+
"""
|
|
43
|
+
# Check for Helm first (subset of Kubernetes)
|
|
44
|
+
helm_indicators = [
|
|
45
|
+
'helm install', 'helm upgrade', 'helm rollback',
|
|
46
|
+
'INSTALLATION FAILED', 'UPGRADE FAILED', 'ROLLBACK FAILED',
|
|
47
|
+
'helm template', 'release "',
|
|
48
|
+
]
|
|
49
|
+
if any(ind.lower() in text.lower() for ind in helm_indicators):
|
|
50
|
+
return ErrorSource.HELM
|
|
51
|
+
|
|
52
|
+
# Check for kubectl/Kubernetes
|
|
53
|
+
if _kubernetes_parser.can_parse(text):
|
|
54
|
+
return ErrorSource.KUBERNETES
|
|
55
|
+
|
|
56
|
+
# Check for Terraform
|
|
57
|
+
if _terraform_parser.can_parse(text):
|
|
58
|
+
return ErrorSource.TERRAFORM
|
|
59
|
+
|
|
60
|
+
return ErrorSource.UNKNOWN
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def detect_and_parse(text: str) -> list[ParsedError]:
|
|
64
|
+
"""
|
|
65
|
+
Automatically detect the error source and parse the text.
|
|
66
|
+
|
|
67
|
+
This is the main entry point for unified error parsing. It detects
|
|
68
|
+
whether the input is from Terraform, Kubernetes, or another source
|
|
69
|
+
and routes to the appropriate parser.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
text: The error output text to parse
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
List of ParsedError objects (may be TerraformError or KubernetesError)
|
|
76
|
+
"""
|
|
77
|
+
source = detect_error_source(text)
|
|
78
|
+
|
|
79
|
+
if source == ErrorSource.TERRAFORM:
|
|
80
|
+
return _terraform_parser.parse(text)
|
|
81
|
+
|
|
82
|
+
if source in (ErrorSource.KUBERNETES, ErrorSource.HELM):
|
|
83
|
+
return _kubernetes_parser.parse(text)
|
|
84
|
+
|
|
85
|
+
# Unknown source - try all parsers
|
|
86
|
+
errors = []
|
|
87
|
+
|
|
88
|
+
# Try Terraform parser
|
|
89
|
+
tf_errors = _terraform_parser.parse(text)
|
|
90
|
+
if tf_errors:
|
|
91
|
+
errors.extend(tf_errors)
|
|
92
|
+
|
|
93
|
+
# Try Kubernetes parser
|
|
94
|
+
k8s_errors = _kubernetes_parser.parse(text)
|
|
95
|
+
if k8s_errors:
|
|
96
|
+
errors.extend(k8s_errors)
|
|
97
|
+
|
|
98
|
+
return errors
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def parse_single_error(text: str) -> Optional[ParsedError]:
|
|
102
|
+
"""
|
|
103
|
+
Parse a single error from the text.
|
|
104
|
+
|
|
105
|
+
Useful when you expect only one error or want the most relevant one.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
text: The error output text to parse
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
A single ParsedError or None if no error found
|
|
112
|
+
"""
|
|
113
|
+
errors = detect_and_parse(text)
|
|
114
|
+
return errors[0] if errors else None
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def get_parser_for_source(source: ErrorSource):
|
|
118
|
+
"""
|
|
119
|
+
Get the parser instance for a given error source.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
source: The ErrorSource to get a parser for
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
The appropriate ErrorParser instance or None
|
|
126
|
+
"""
|
|
127
|
+
if source == ErrorSource.TERRAFORM:
|
|
128
|
+
return _terraform_parser
|
|
129
|
+
if source in (ErrorSource.KUBERNETES, ErrorSource.HELM):
|
|
130
|
+
return _kubernetes_parser
|
|
131
|
+
return None
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def summarize_errors(errors: list[ParsedError]) -> str:
|
|
135
|
+
"""
|
|
136
|
+
Generate a summary of multiple errors.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
errors: List of ParsedError objects
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
A human-readable summary string
|
|
143
|
+
"""
|
|
144
|
+
if not errors:
|
|
145
|
+
return "No errors found"
|
|
146
|
+
|
|
147
|
+
if len(errors) == 1:
|
|
148
|
+
e = errors[0]
|
|
149
|
+
return f"1 {e.error_type} error: {e.short_error()}"
|
|
150
|
+
|
|
151
|
+
# Group by error type
|
|
152
|
+
by_type = {}
|
|
153
|
+
for e in errors:
|
|
154
|
+
by_type.setdefault(e.error_type, []).append(e)
|
|
155
|
+
|
|
156
|
+
parts = []
|
|
157
|
+
for error_type, type_errors in by_type.items():
|
|
158
|
+
parts.append(f"{len(type_errors)} {error_type} error(s)")
|
|
159
|
+
|
|
160
|
+
return f"{len(errors)} errors found: " + ", ".join(parts)
|
|
@@ -0,0 +1,409 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Terraform error parser with multi-cloud support.
|
|
3
|
+
|
|
4
|
+
Supports parsing errors from:
|
|
5
|
+
- AWS (aws_*)
|
|
6
|
+
- Azure (azurerm_*)
|
|
7
|
+
- GCP (google_*)
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import re
|
|
11
|
+
from dataclasses import dataclass
|
|
12
|
+
from typing import Optional
|
|
13
|
+
|
|
14
|
+
from .base import ParsedError, ErrorParser, CloudProvider, ErrorSeverity
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
# Cloud provider detection patterns
|
|
18
|
+
AWS_RESOURCE_PATTERN = re.compile(r'\baws_[a-z_]+\b', re.IGNORECASE)
|
|
19
|
+
AZURE_RESOURCE_PATTERN = re.compile(r'\bazurerm_[a-z_]+\b', re.IGNORECASE)
|
|
20
|
+
GCP_RESOURCE_PATTERN = re.compile(r'\bgoogle_[a-z_]+\b', re.IGNORECASE)
|
|
21
|
+
|
|
22
|
+
# Common AWS error codes
|
|
23
|
+
AWS_ERROR_CODES = {
|
|
24
|
+
'AccessDenied', 'AccessDeniedException', 'UnauthorizedAccess',
|
|
25
|
+
'BucketAlreadyExists', 'BucketAlreadyOwnedByYou',
|
|
26
|
+
'InvalidParameterValue', 'InvalidParameterCombination',
|
|
27
|
+
'ResourceNotFoundException', 'ResourceInUseException',
|
|
28
|
+
'LimitExceeded', 'QuotaExceeded', 'ServiceQuotaExceededException',
|
|
29
|
+
'ValidationException', 'ValidationError',
|
|
30
|
+
'InvalidAMIID', 'InvalidSubnet', 'InvalidVpcID',
|
|
31
|
+
'InsufficientInstanceCapacity', 'InstanceLimitExceeded',
|
|
32
|
+
'DBInstanceNotFound', 'DBSubnetGroupDoesNotCoverEnoughAZs',
|
|
33
|
+
'StorageQuotaExceeded', 'InvalidDBInstanceState',
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
# Common Azure error codes
|
|
37
|
+
AZURE_ERROR_CODES = {
|
|
38
|
+
'AuthorizationFailed', 'AuthenticationFailed',
|
|
39
|
+
'StorageAccountAlreadyTaken', 'StorageAccountNotFound',
|
|
40
|
+
'SkuNotAvailable', 'QuotaExceeded',
|
|
41
|
+
'ResourceNotFound', 'ResourceGroupNotFound',
|
|
42
|
+
'ConflictError', 'Conflict',
|
|
43
|
+
'InvalidParameter', 'BadRequest',
|
|
44
|
+
'PrincipalNotFound', 'RoleAssignmentExists',
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@dataclass
|
|
49
|
+
class TerraformError(ParsedError):
|
|
50
|
+
"""Terraform-specific error with additional context."""
|
|
51
|
+
|
|
52
|
+
terraform_action: Optional[str] = None # create, update, delete
|
|
53
|
+
module_path: Optional[str] = None
|
|
54
|
+
|
|
55
|
+
def __post_init__(self):
|
|
56
|
+
self.error_type = "terraform"
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class TerraformParser(ErrorParser):
|
|
60
|
+
"""Parser for Terraform apply/plan errors."""
|
|
61
|
+
|
|
62
|
+
@property
|
|
63
|
+
def name(self) -> str:
|
|
64
|
+
return "terraform"
|
|
65
|
+
|
|
66
|
+
def can_parse(self, text: str) -> bool:
|
|
67
|
+
"""Check if text looks like Terraform output."""
|
|
68
|
+
indicators = [
|
|
69
|
+
r'Error:',
|
|
70
|
+
r'ā\s*Error:',
|
|
71
|
+
r'aws_\w+\.',
|
|
72
|
+
r'azurerm_\w+\.',
|
|
73
|
+
r'google_\w+\.',
|
|
74
|
+
r'\.tf\s+line\s+\d+',
|
|
75
|
+
r'with\s+\w+\.\w+',
|
|
76
|
+
r'Plan:.*to add.*to change.*to destroy',
|
|
77
|
+
r'terraform\s+(init|plan|apply)',
|
|
78
|
+
]
|
|
79
|
+
text_lower = text.lower()
|
|
80
|
+
return any(re.search(pattern, text, re.IGNORECASE) for pattern in indicators) #regex to determine if we can parse
|
|
81
|
+
|
|
82
|
+
def parse(self, text: str) -> list[TerraformError]:
|
|
83
|
+
"""Parse Terraform output for all errors."""
|
|
84
|
+
errors = []
|
|
85
|
+
|
|
86
|
+
# Split on error boundaries
|
|
87
|
+
# Handle both box-drawing and plain error formats
|
|
88
|
+
parts = re.split(r'(?=ā?\s*Error:)', text)
|
|
89
|
+
|
|
90
|
+
for part in parts:
|
|
91
|
+
if 'Error:' in part:
|
|
92
|
+
parsed = self.parse_single(part)
|
|
93
|
+
if parsed:
|
|
94
|
+
errors.append(parsed)
|
|
95
|
+
|
|
96
|
+
# Deduplicate by resource address
|
|
97
|
+
seen = set()
|
|
98
|
+
unique = []
|
|
99
|
+
for e in errors:
|
|
100
|
+
key = e.resource_address or e.error_message[:50]
|
|
101
|
+
if key not in seen:
|
|
102
|
+
seen.add(key)
|
|
103
|
+
unique.append(e)
|
|
104
|
+
|
|
105
|
+
return unique
|
|
106
|
+
|
|
107
|
+
def parse_single(self, text: str) -> Optional[TerraformError]:
|
|
108
|
+
"""Parse a single Terraform error block."""
|
|
109
|
+
|
|
110
|
+
# Extract error message
|
|
111
|
+
error_match = re.search(
|
|
112
|
+
r'ā?\s*Error:\s*(.+?)(?=\nā?\s*\n|\n\n|$)',
|
|
113
|
+
text,
|
|
114
|
+
re.DOTALL
|
|
115
|
+
)
|
|
116
|
+
if not error_match:
|
|
117
|
+
error_match = re.search(r'Error:\s*(.+?)(?=\n\n|$)', text, re.DOTALL)
|
|
118
|
+
if not error_match:
|
|
119
|
+
return None
|
|
120
|
+
|
|
121
|
+
error_block = error_match.group(0)
|
|
122
|
+
|
|
123
|
+
# Extract resource information
|
|
124
|
+
resource_info = self._extract_resource_info(text)
|
|
125
|
+
|
|
126
|
+
# Extract file and line
|
|
127
|
+
file_match = re.search(r'on\s+([^\s]+\.tf)\s+line\s+(\d+)', text)
|
|
128
|
+
file = file_match.group(1) if file_match else None
|
|
129
|
+
line = int(file_match.group(2)) if file_match else None
|
|
130
|
+
|
|
131
|
+
# Detect cloud provider (from text and resource type)
|
|
132
|
+
cloud_provider = self._detect_cloud_provider(text, resource_info.get('type'))
|
|
133
|
+
|
|
134
|
+
# Extract error code
|
|
135
|
+
error_code = self._extract_error_code(text)
|
|
136
|
+
|
|
137
|
+
# Extract error message
|
|
138
|
+
error_message = self._extract_error_message(text, error_block)
|
|
139
|
+
|
|
140
|
+
# Detect action (create, update, delete)
|
|
141
|
+
action = self._detect_action(text)
|
|
142
|
+
|
|
143
|
+
# Generate tags
|
|
144
|
+
tags = self._generate_tags(resource_info, cloud_provider, error_code)
|
|
145
|
+
|
|
146
|
+
# Generate suggestions
|
|
147
|
+
suggestions = self._generate_suggestions(error_code, error_message, cloud_provider)
|
|
148
|
+
|
|
149
|
+
return TerraformError(
|
|
150
|
+
error_type="terraform",
|
|
151
|
+
error_message=error_message,
|
|
152
|
+
raw_output=text,
|
|
153
|
+
resource_type=resource_info.get('type'),
|
|
154
|
+
resource_name=resource_info.get('name'),
|
|
155
|
+
resource_address=resource_info.get('address'),
|
|
156
|
+
file=file,
|
|
157
|
+
line=line,
|
|
158
|
+
error_code=error_code,
|
|
159
|
+
cloud_provider=cloud_provider,
|
|
160
|
+
severity=ErrorSeverity.ERROR,
|
|
161
|
+
suggestions=suggestions,
|
|
162
|
+
tags=tags,
|
|
163
|
+
terraform_action=action,
|
|
164
|
+
module_path=resource_info.get('module'),
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
def _extract_resource_info(self, text: str) -> dict:
|
|
168
|
+
"""Extract resource type, name, address from error text."""
|
|
169
|
+
info = {
|
|
170
|
+
'type': 'unknown',
|
|
171
|
+
'name': 'unknown',
|
|
172
|
+
'address': 'unknown',
|
|
173
|
+
'module': None,
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
# Clean up box-drawing characters for matching
|
|
177
|
+
clean_text = re.sub(r'ā', '', text)
|
|
178
|
+
|
|
179
|
+
# Try to match "with <resource_address>" pattern
|
|
180
|
+
# Handles: "with aws_s3_bucket.data," or "with module.app.aws_s3_bucket.data,"
|
|
181
|
+
resource_match = re.search(
|
|
182
|
+
r'with\s+((?:module\.[a-z0-9_-]+\.)*([a-z][a-z0-9_]*)\.([-a-z0-9_]+))',
|
|
183
|
+
clean_text,
|
|
184
|
+
re.IGNORECASE,
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
if resource_match:
|
|
188
|
+
info['address'] = resource_match.group(1)
|
|
189
|
+
info['type'] = resource_match.group(2)
|
|
190
|
+
info['name'] = resource_match.group(3)
|
|
191
|
+
|
|
192
|
+
# Extract module path if present
|
|
193
|
+
if 'module.' in info['address']:
|
|
194
|
+
module_match = re.match(r'(module\.[^.]+)', info['address'])
|
|
195
|
+
if module_match:
|
|
196
|
+
info['module'] = module_match.group(1)
|
|
197
|
+
else:
|
|
198
|
+
# Try alternative: look for resource type patterns directly
|
|
199
|
+
# Matches aws_*, azurerm_*, google_* resource types
|
|
200
|
+
direct_match = re.search(
|
|
201
|
+
r'\b((?:aws|azurerm|google)_[a-z0-9_]+)\.([a-z0-9_-]+)\b',
|
|
202
|
+
clean_text,
|
|
203
|
+
re.IGNORECASE
|
|
204
|
+
)
|
|
205
|
+
if direct_match:
|
|
206
|
+
info['type'] = direct_match.group(1)
|
|
207
|
+
info['name'] = direct_match.group(2)
|
|
208
|
+
info['address'] = f"{info['type']}.{info['name']}"
|
|
209
|
+
else:
|
|
210
|
+
# Fallback: Pattern "creating <ResourceType> (<name>)"
|
|
211
|
+
alt_match = re.search(
|
|
212
|
+
r'(?:creating|updating|deleting)\s+([A-Za-z0-9_\s]+)\s*\(([^)]+)\)',
|
|
213
|
+
clean_text,
|
|
214
|
+
re.IGNORECASE
|
|
215
|
+
)
|
|
216
|
+
if alt_match:
|
|
217
|
+
info['type'] = alt_match.group(1).strip().replace(' ', '_').lower()
|
|
218
|
+
info['name'] = alt_match.group(2).strip()
|
|
219
|
+
info['address'] = f"{info['type']}.{info['name']}"
|
|
220
|
+
|
|
221
|
+
return info
|
|
222
|
+
|
|
223
|
+
def _detect_cloud_provider(self, text: str, resource_type: Optional[str] = None) -> CloudProvider:
|
|
224
|
+
"""Detect which cloud provider the error relates to."""
|
|
225
|
+
# First check resource type if provided
|
|
226
|
+
if resource_type and resource_type != 'unknown':
|
|
227
|
+
if resource_type.startswith('aws_'):
|
|
228
|
+
return CloudProvider.AWS
|
|
229
|
+
if resource_type.startswith('azurerm_'):
|
|
230
|
+
return CloudProvider.AZURE
|
|
231
|
+
if resource_type.startswith('google_'):
|
|
232
|
+
return CloudProvider.GCP
|
|
233
|
+
|
|
234
|
+
# Then check text patterns
|
|
235
|
+
if AWS_RESOURCE_PATTERN.search(text):
|
|
236
|
+
return CloudProvider.AWS
|
|
237
|
+
if AZURE_RESOURCE_PATTERN.search(text):
|
|
238
|
+
return CloudProvider.AZURE
|
|
239
|
+
if GCP_RESOURCE_PATTERN.search(text):
|
|
240
|
+
return CloudProvider.GCP
|
|
241
|
+
|
|
242
|
+
# Check for provider-specific error patterns
|
|
243
|
+
aws_patterns = [
|
|
244
|
+
r'arn:aws:', r'amazonaws\.com', r'aws-sdk',
|
|
245
|
+
r'ec2:', r's3:', r'iam:', r'lambda:',
|
|
246
|
+
]
|
|
247
|
+
azure_patterns = [
|
|
248
|
+
r'azure\.com', r'microsoft\.com', r'\.azure\.',
|
|
249
|
+
r'subscription.*resource\s*group',
|
|
250
|
+
]
|
|
251
|
+
gcp_patterns = [
|
|
252
|
+
r'googleapis\.com', r'gcloud', r'projects/[^/]+/',
|
|
253
|
+
]
|
|
254
|
+
|
|
255
|
+
for pattern in aws_patterns:
|
|
256
|
+
if re.search(pattern, text, re.IGNORECASE):
|
|
257
|
+
return CloudProvider.AWS
|
|
258
|
+
for pattern in azure_patterns:
|
|
259
|
+
if re.search(pattern, text, re.IGNORECASE):
|
|
260
|
+
return CloudProvider.AZURE
|
|
261
|
+
for pattern in gcp_patterns:
|
|
262
|
+
if re.search(pattern, text, re.IGNORECASE):
|
|
263
|
+
return CloudProvider.GCP
|
|
264
|
+
|
|
265
|
+
return CloudProvider.UNKNOWN
|
|
266
|
+
|
|
267
|
+
def _extract_error_code(self, text: str) -> Optional[str]:
|
|
268
|
+
"""Extract error code from Terraform output."""
|
|
269
|
+
# Try explicit Code: field first (highest priority)
|
|
270
|
+
code_match = re.search(r'Code:\s*["\']?([A-Za-z][A-Za-z0-9_]+)["\']?', text)
|
|
271
|
+
if code_match:
|
|
272
|
+
return code_match.group(1)
|
|
273
|
+
|
|
274
|
+
# Try api error pattern: "api error <ErrorCode>: message"
|
|
275
|
+
api_error_match = re.search(r'api\s+error\s+([A-Za-z][A-Za-z0-9_]+):', text, re.IGNORECASE)
|
|
276
|
+
if api_error_match:
|
|
277
|
+
return api_error_match.group(1)
|
|
278
|
+
|
|
279
|
+
# Try to find known AWS-style error codes (prioritize over generic status)
|
|
280
|
+
for code in AWS_ERROR_CODES:
|
|
281
|
+
if code in text:
|
|
282
|
+
return code
|
|
283
|
+
|
|
284
|
+
# Try to find Azure-style error codes
|
|
285
|
+
for code in AZURE_ERROR_CODES:
|
|
286
|
+
if code in text:
|
|
287
|
+
return code
|
|
288
|
+
|
|
289
|
+
# Try Status: field with descriptive name (e.g., "403 Forbidden")
|
|
290
|
+
status_match = re.search(r'Status:\s*(\d+\s*[A-Za-z]+)', text)
|
|
291
|
+
if status_match:
|
|
292
|
+
return status_match.group(1).replace(' ', '')
|
|
293
|
+
|
|
294
|
+
# Fallback: try generic status code
|
|
295
|
+
status_code_match = re.search(r'StatusCode:\s*(\d+)', text)
|
|
296
|
+
if status_code_match:
|
|
297
|
+
return status_code_match.group(1)
|
|
298
|
+
|
|
299
|
+
return None
|
|
300
|
+
|
|
301
|
+
def _extract_error_message(self, text: str, error_block: str) -> str:
|
|
302
|
+
"""Extract the main error message."""
|
|
303
|
+
# Try Message: field first
|
|
304
|
+
msg_match = re.search(
|
|
305
|
+
r'Message:\s*["\']?(.+?)["\']?(?=\nā|\n\n|$)',
|
|
306
|
+
text,
|
|
307
|
+
re.DOTALL
|
|
308
|
+
)
|
|
309
|
+
if msg_match:
|
|
310
|
+
message = msg_match.group(1).strip()
|
|
311
|
+
else:
|
|
312
|
+
# Use the first line of the error block
|
|
313
|
+
first_line = error_block.split('\n')[0]
|
|
314
|
+
message = re.sub(r'^ā?\s*Error:\s*', '', first_line).strip()
|
|
315
|
+
|
|
316
|
+
# Clean up the message
|
|
317
|
+
message = re.sub(r'\s+', ' ', message).strip()
|
|
318
|
+
message = re.sub(r'^ā\s*', '', message)
|
|
319
|
+
|
|
320
|
+
return message[:500]
|
|
321
|
+
|
|
322
|
+
def _detect_action(self, text: str) -> Optional[str]:
|
|
323
|
+
"""Detect the Terraform action being performed."""
|
|
324
|
+
if re.search(r'creating', text, re.IGNORECASE):
|
|
325
|
+
return 'create'
|
|
326
|
+
if re.search(r'updating|modifying', text, re.IGNORECASE):
|
|
327
|
+
return 'update'
|
|
328
|
+
if re.search(r'deleting|destroying', text, re.IGNORECASE):
|
|
329
|
+
return 'delete'
|
|
330
|
+
return None
|
|
331
|
+
|
|
332
|
+
def _generate_tags(
|
|
333
|
+
self,
|
|
334
|
+
resource_info: dict,
|
|
335
|
+
cloud_provider: CloudProvider,
|
|
336
|
+
error_code: Optional[str]
|
|
337
|
+
) -> list[str]:
|
|
338
|
+
"""Generate relevant tags for the error."""
|
|
339
|
+
tags = ['terraform']
|
|
340
|
+
|
|
341
|
+
if cloud_provider != CloudProvider.UNKNOWN:
|
|
342
|
+
tags.append(cloud_provider.value)
|
|
343
|
+
|
|
344
|
+
if resource_info.get('type') and resource_info['type'] != 'unknown':
|
|
345
|
+
tags.append(resource_info['type'])
|
|
346
|
+
|
|
347
|
+
if error_code:
|
|
348
|
+
tags.append(error_code)
|
|
349
|
+
|
|
350
|
+
return tags
|
|
351
|
+
|
|
352
|
+
def _generate_suggestions(
|
|
353
|
+
self,
|
|
354
|
+
error_code: Optional[str],
|
|
355
|
+
error_message: str,
|
|
356
|
+
cloud_provider: CloudProvider
|
|
357
|
+
) -> list[str]:
|
|
358
|
+
"""Generate fix suggestions based on error patterns."""
|
|
359
|
+
suggestions = []
|
|
360
|
+
|
|
361
|
+
if not error_code:
|
|
362
|
+
return suggestions
|
|
363
|
+
|
|
364
|
+
# AWS-specific suggestions
|
|
365
|
+
if cloud_provider == CloudProvider.AWS:
|
|
366
|
+
if error_code in ('AccessDenied', 'AccessDeniedException'):
|
|
367
|
+
suggestions.append("Check IAM permissions for the Terraform execution role")
|
|
368
|
+
suggestions.append("Verify the resource policy allows the action")
|
|
369
|
+
elif error_code == 'BucketAlreadyExists':
|
|
370
|
+
suggestions.append("S3 bucket names are globally unique - use a different name")
|
|
371
|
+
suggestions.append("Add a random suffix to the bucket name")
|
|
372
|
+
elif 'Quota' in error_code or 'Limit' in error_code:
|
|
373
|
+
suggestions.append("Request a service quota increase via AWS Support")
|
|
374
|
+
suggestions.append("Check current usage in AWS Service Quotas console")
|
|
375
|
+
elif error_code == 'InvalidAMIID':
|
|
376
|
+
suggestions.append("Verify the AMI exists in the target region")
|
|
377
|
+
suggestions.append("Check if the AMI is shared with your account")
|
|
378
|
+
elif 'InsufficientCapacity' in error_code:
|
|
379
|
+
suggestions.append("Try a different availability zone")
|
|
380
|
+
suggestions.append("Try a different instance type")
|
|
381
|
+
|
|
382
|
+
# Azure-specific suggestions
|
|
383
|
+
elif cloud_provider == CloudProvider.AZURE:
|
|
384
|
+
if error_code in ('AuthorizationFailed', 'AuthenticationFailed'):
|
|
385
|
+
suggestions.append("Check Azure RBAC role assignments")
|
|
386
|
+
suggestions.append("Verify service principal credentials")
|
|
387
|
+
elif error_code == 'StorageAccountAlreadyTaken':
|
|
388
|
+
suggestions.append("Storage account names are globally unique - use a different name")
|
|
389
|
+
elif error_code == 'SkuNotAvailable':
|
|
390
|
+
suggestions.append("Check VM size availability in the target region")
|
|
391
|
+
suggestions.append("Try a different region or VM size")
|
|
392
|
+
elif error_code == 'ConflictError' and 'soft' in error_message.lower():
|
|
393
|
+
suggestions.append("Recover or purge the soft-deleted resource")
|
|
394
|
+
suggestions.append("Use az keyvault purge or az keyvault recover")
|
|
395
|
+
|
|
396
|
+
return suggestions
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
# Convenience function for backwards compatibility
|
|
400
|
+
def parse_terraform_output(output: str) -> list[TerraformError]:
|
|
401
|
+
"""Parse Terraform output for all errors."""
|
|
402
|
+
parser = TerraformParser()
|
|
403
|
+
return parser.parse(output)
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
def is_terraform_output(text: str) -> bool:
|
|
407
|
+
"""Check if text looks like Terraform output."""
|
|
408
|
+
parser = TerraformParser()
|
|
409
|
+
return parser.can_parse(text)
|