fixdoc 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fixdoc/__init__.py +8 -0
- fixdoc/cli.py +26 -0
- fixdoc/commands/__init__.py +11 -0
- fixdoc/commands/analyze.py +313 -0
- fixdoc/commands/capture.py +109 -0
- fixdoc/commands/capture_handlers.py +298 -0
- fixdoc/commands/delete.py +72 -0
- fixdoc/commands/edit.py +118 -0
- fixdoc/commands/manage.py +67 -0
- fixdoc/commands/search.py +65 -0
- fixdoc/commands/sync.py +268 -0
- fixdoc/config.py +113 -0
- fixdoc/fix.py +19 -0
- fixdoc/formatter.py +62 -0
- fixdoc/git.py +263 -0
- fixdoc/markdown_parser.py +106 -0
- fixdoc/models.py +83 -0
- fixdoc/parsers/__init__.py +24 -0
- fixdoc/parsers/base.py +131 -0
- fixdoc/parsers/kubernetes.py +584 -0
- fixdoc/parsers/router.py +160 -0
- fixdoc/parsers/terraform.py +409 -0
- fixdoc/storage.py +146 -0
- fixdoc/sync_engine.py +330 -0
- fixdoc/terraform_parser.py +135 -0
- fixdoc-0.0.1.dist-info/METADATA +261 -0
- fixdoc-0.0.1.dist-info/RECORD +30 -0
- fixdoc-0.0.1.dist-info/WHEEL +5 -0
- fixdoc-0.0.1.dist-info/entry_points.txt +2 -0
- fixdoc-0.0.1.dist-info/top_level.txt +1 -0
fixdoc/git.py
ADDED
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
"""Git operations wrapper for fixdoc sync."""
|
|
2
|
+
|
|
3
|
+
import subprocess
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Optional, Tuple
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class GitError(Exception):
|
|
11
|
+
"""Raised when a git operation fails."""
|
|
12
|
+
|
|
13
|
+
def __init__(self, message: str, stderr: str = ""):
|
|
14
|
+
self.message = message
|
|
15
|
+
self.stderr = stderr
|
|
16
|
+
super().__init__(f"{message}: {stderr}" if stderr else message)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class SyncStatus(Enum):
|
|
20
|
+
"""Status of local repo relative to remote."""
|
|
21
|
+
|
|
22
|
+
UP_TO_DATE = "up_to_date"
|
|
23
|
+
AHEAD = "ahead"
|
|
24
|
+
BEHIND = "behind"
|
|
25
|
+
DIVERGED = "diverged"
|
|
26
|
+
NOT_INITIALIZED = "not_initialized"
|
|
27
|
+
NO_REMOTE = "no_remote"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class GitStatusInfo:
|
|
32
|
+
"""Detailed git status information."""
|
|
33
|
+
|
|
34
|
+
status: SyncStatus
|
|
35
|
+
commits_ahead: int = 0
|
|
36
|
+
commits_behind: int = 0
|
|
37
|
+
local_changes: list[str] = field(default_factory=list)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class GitOperations:
|
|
41
|
+
"""Encapsulates git subprocess operations."""
|
|
42
|
+
|
|
43
|
+
def __init__(self, repo_path: Path):
|
|
44
|
+
self.repo_path = repo_path
|
|
45
|
+
|
|
46
|
+
def _run(
|
|
47
|
+
self, *args: str, check: bool = True, capture_output: bool = True
|
|
48
|
+
) -> subprocess.CompletedProcess:
|
|
49
|
+
"""Execute a git command in the repo directory."""
|
|
50
|
+
cmd = ["git"] + list(args)
|
|
51
|
+
try:
|
|
52
|
+
result = subprocess.run(
|
|
53
|
+
cmd,
|
|
54
|
+
cwd=self.repo_path,
|
|
55
|
+
capture_output=capture_output,
|
|
56
|
+
text=True,
|
|
57
|
+
check=False,
|
|
58
|
+
)
|
|
59
|
+
if check and result.returncode != 0:
|
|
60
|
+
raise GitError(f"Git command failed: {' '.join(cmd)}", result.stderr)
|
|
61
|
+
return result
|
|
62
|
+
except FileNotFoundError:
|
|
63
|
+
raise GitError("Git is not installed or not in PATH")
|
|
64
|
+
|
|
65
|
+
def is_git_repo(self) -> bool:
|
|
66
|
+
"""Check if directory is a git repository."""
|
|
67
|
+
result = self._run("rev-parse", "--git-dir", check=False)
|
|
68
|
+
return result.returncode == 0
|
|
69
|
+
|
|
70
|
+
def init(self) -> None:
|
|
71
|
+
"""Initialize a new git repository."""
|
|
72
|
+
self._run("init")
|
|
73
|
+
|
|
74
|
+
def clone(self, url: str, branch: str = "main") -> None:
|
|
75
|
+
"""Clone a remote repository into the repo path."""
|
|
76
|
+
parent = self.repo_path.parent
|
|
77
|
+
name = self.repo_path.name
|
|
78
|
+
subprocess.run(
|
|
79
|
+
["git", "clone", "-b", branch, url, name],
|
|
80
|
+
cwd=parent,
|
|
81
|
+
capture_output=True,
|
|
82
|
+
text=True,
|
|
83
|
+
check=True,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
def add(self, *paths: str) -> None:
|
|
87
|
+
"""Stage files for commit."""
|
|
88
|
+
if not paths:
|
|
89
|
+
return
|
|
90
|
+
self._run("add", *paths)
|
|
91
|
+
|
|
92
|
+
def add_all(self) -> None:
|
|
93
|
+
"""Stage all changes."""
|
|
94
|
+
self._run("add", "-A")
|
|
95
|
+
|
|
96
|
+
def commit(self, message: str, author: Optional[str] = None) -> str:
|
|
97
|
+
"""Create a commit, return commit hash."""
|
|
98
|
+
args = ["commit", "-m", message]
|
|
99
|
+
if author:
|
|
100
|
+
args.extend(["--author", author])
|
|
101
|
+
|
|
102
|
+
result = self._run(*args)
|
|
103
|
+
hash_result = self._run("rev-parse", "HEAD")
|
|
104
|
+
return hash_result.stdout.strip()
|
|
105
|
+
|
|
106
|
+
def push(self, remote: str = "origin", branch: str = "main") -> None:
|
|
107
|
+
"""Push commits to remote."""
|
|
108
|
+
self._run("push", "-u", remote, branch)
|
|
109
|
+
|
|
110
|
+
def pull(
|
|
111
|
+
self, remote: str = "origin", branch: str = "main"
|
|
112
|
+
) -> Tuple[bool, list[str]]:
|
|
113
|
+
"""
|
|
114
|
+
Pull from remote.
|
|
115
|
+
Returns (had_conflicts, conflicted_files).
|
|
116
|
+
"""
|
|
117
|
+
result = self._run("pull", remote, branch, check=False)
|
|
118
|
+
|
|
119
|
+
if result.returncode != 0:
|
|
120
|
+
if "CONFLICT" in result.stdout or "CONFLICT" in result.stderr:
|
|
121
|
+
conflict_files = self._get_conflict_files()
|
|
122
|
+
return (True, conflict_files)
|
|
123
|
+
raise GitError("Pull failed", result.stderr)
|
|
124
|
+
|
|
125
|
+
return (False, [])
|
|
126
|
+
|
|
127
|
+
def _get_conflict_files(self) -> list[str]:
|
|
128
|
+
"""Get list of files with merge conflicts."""
|
|
129
|
+
result = self._run("diff", "--name-only", "--diff-filter=U", check=False)
|
|
130
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
131
|
+
return result.stdout.strip().split("\n")
|
|
132
|
+
return []
|
|
133
|
+
|
|
134
|
+
def fetch(self, remote: str = "origin") -> None:
|
|
135
|
+
"""Fetch from remote without merging."""
|
|
136
|
+
self._run("fetch", remote)
|
|
137
|
+
|
|
138
|
+
def remote_add(self, name: str, url: str) -> None:
|
|
139
|
+
"""Add a remote."""
|
|
140
|
+
self._run("remote", "add", name, url)
|
|
141
|
+
|
|
142
|
+
def remote_get_url(self, name: str = "origin") -> Optional[str]:
|
|
143
|
+
"""Get remote URL."""
|
|
144
|
+
result = self._run("remote", "get-url", name, check=False)
|
|
145
|
+
if result.returncode == 0:
|
|
146
|
+
return result.stdout.strip()
|
|
147
|
+
return None
|
|
148
|
+
|
|
149
|
+
def remote_set_url(self, name: str, url: str) -> None:
|
|
150
|
+
"""Set remote URL."""
|
|
151
|
+
self._run("remote", "set-url", name, url)
|
|
152
|
+
|
|
153
|
+
def has_remote(self, name: str = "origin") -> bool:
|
|
154
|
+
"""Check if a remote exists."""
|
|
155
|
+
result = self._run("remote", "get-url", name, check=False)
|
|
156
|
+
return result.returncode == 0
|
|
157
|
+
|
|
158
|
+
def get_status(self, remote: str = "origin", branch: str = "main") -> GitStatusInfo:
|
|
159
|
+
"""Get sync status relative to remote."""
|
|
160
|
+
if not self.is_git_repo():
|
|
161
|
+
return GitStatusInfo(status=SyncStatus.NOT_INITIALIZED)
|
|
162
|
+
|
|
163
|
+
if not self.has_remote(remote):
|
|
164
|
+
return GitStatusInfo(status=SyncStatus.NO_REMOTE)
|
|
165
|
+
|
|
166
|
+
self.fetch(remote)
|
|
167
|
+
|
|
168
|
+
ahead = self._count_commits(f"{remote}/{branch}..HEAD")
|
|
169
|
+
behind = self._count_commits(f"HEAD..{remote}/{branch}")
|
|
170
|
+
local_changes = self.get_changed_files()
|
|
171
|
+
|
|
172
|
+
if ahead > 0 and behind > 0:
|
|
173
|
+
status = SyncStatus.DIVERGED
|
|
174
|
+
elif ahead > 0:
|
|
175
|
+
status = SyncStatus.AHEAD
|
|
176
|
+
elif behind > 0:
|
|
177
|
+
status = SyncStatus.BEHIND
|
|
178
|
+
else:
|
|
179
|
+
status = SyncStatus.UP_TO_DATE
|
|
180
|
+
|
|
181
|
+
return GitStatusInfo(
|
|
182
|
+
status=status,
|
|
183
|
+
commits_ahead=ahead,
|
|
184
|
+
commits_behind=behind,
|
|
185
|
+
local_changes=local_changes,
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
def _count_commits(self, range_spec: str) -> int:
|
|
189
|
+
"""Count commits in a range."""
|
|
190
|
+
result = self._run("rev-list", "--count", range_spec, check=False)
|
|
191
|
+
if result.returncode == 0:
|
|
192
|
+
try:
|
|
193
|
+
return int(result.stdout.strip())
|
|
194
|
+
except ValueError:
|
|
195
|
+
return 0
|
|
196
|
+
return 0
|
|
197
|
+
|
|
198
|
+
def get_changed_files(self) -> list[str]:
|
|
199
|
+
"""Get list of modified/added/untracked files."""
|
|
200
|
+
result = self._run("status", "--porcelain", check=False)
|
|
201
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
202
|
+
files = []
|
|
203
|
+
for line in result.stdout.strip().split("\n"):
|
|
204
|
+
if line:
|
|
205
|
+
files.append(line[3:])
|
|
206
|
+
return files
|
|
207
|
+
return []
|
|
208
|
+
|
|
209
|
+
def has_uncommitted_changes(self) -> bool:
|
|
210
|
+
"""Check if there are uncommitted changes."""
|
|
211
|
+
result = self._run("status", "--porcelain", check=False)
|
|
212
|
+
return bool(result.stdout.strip())
|
|
213
|
+
|
|
214
|
+
def get_current_branch(self) -> Optional[str]:
|
|
215
|
+
"""Get the current branch name."""
|
|
216
|
+
result = self._run("branch", "--show-current", check=False)
|
|
217
|
+
if result.returncode == 0:
|
|
218
|
+
return result.stdout.strip()
|
|
219
|
+
return None
|
|
220
|
+
|
|
221
|
+
def checkout_branch(self, branch: str, create: bool = False) -> None:
|
|
222
|
+
"""Checkout a branch."""
|
|
223
|
+
if create:
|
|
224
|
+
self._run("checkout", "-b", branch)
|
|
225
|
+
else:
|
|
226
|
+
self._run("checkout", branch)
|
|
227
|
+
|
|
228
|
+
def stash(self) -> bool:
|
|
229
|
+
"""Stash current changes. Returns True if something was stashed."""
|
|
230
|
+
result = self._run("stash", check=False)
|
|
231
|
+
return "No local changes" not in result.stdout
|
|
232
|
+
|
|
233
|
+
def stash_pop(self) -> None:
|
|
234
|
+
"""Pop the most recent stash."""
|
|
235
|
+
self._run("stash", "pop", check=False)
|
|
236
|
+
|
|
237
|
+
def reset_hard(self, ref: str = "HEAD") -> None:
|
|
238
|
+
"""Hard reset to a reference."""
|
|
239
|
+
self._run("reset", "--hard", ref)
|
|
240
|
+
|
|
241
|
+
def get_file_content_at_ref(self, filepath: str, ref: str) -> Optional[str]:
|
|
242
|
+
"""Get file contents at a specific revision."""
|
|
243
|
+
result = self._run("show", f"{ref}:{filepath}", check=False)
|
|
244
|
+
if result.returncode == 0:
|
|
245
|
+
return result.stdout
|
|
246
|
+
return None
|
|
247
|
+
|
|
248
|
+
def get_last_commit_message(self) -> Optional[str]:
|
|
249
|
+
"""Get the last commit message."""
|
|
250
|
+
result = self._run("log", "-1", "--pretty=%B", check=False)
|
|
251
|
+
if result.returncode == 0:
|
|
252
|
+
return result.stdout.strip()
|
|
253
|
+
return None
|
|
254
|
+
|
|
255
|
+
def is_git_available() -> bool:
|
|
256
|
+
"""Check if git is available on the system."""
|
|
257
|
+
try:
|
|
258
|
+
subprocess.run(
|
|
259
|
+
["git", "--version"], capture_output=True, text=True, check=True
|
|
260
|
+
)
|
|
261
|
+
return True
|
|
262
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
263
|
+
return False
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"""Markdown parsing for fixes - reverse of formatter.py."""
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from typing import Optional
|
|
5
|
+
|
|
6
|
+
from .models import Fix
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class MarkdownParseError(Exception):
|
|
10
|
+
"""Raised when markdown parsing fails."""
|
|
11
|
+
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def markdown_to_fix(content: str, fix_id: str) -> Fix:
|
|
16
|
+
"""
|
|
17
|
+
Parse markdown content back into a Fix object.
|
|
18
|
+
|
|
19
|
+
Expected format matches output of fix_to_markdown():
|
|
20
|
+
- Header: # Fix: {short_id}
|
|
21
|
+
- Metadata: **Created:** ... **Updated:** ... **Author:** ...
|
|
22
|
+
- Sections: ## Issue, ## Resolution, ## Error Excerpt, ## Notes
|
|
23
|
+
"""
|
|
24
|
+
created_at = _extract_metadata(content, "Created")
|
|
25
|
+
updated_at = _extract_metadata(content, "Updated")
|
|
26
|
+
author = _extract_metadata(content, "Author")
|
|
27
|
+
author_email = _extract_metadata(content, "Author Email")
|
|
28
|
+
tags = _extract_tags(content)
|
|
29
|
+
|
|
30
|
+
issue = _extract_section(content, "Issue")
|
|
31
|
+
resolution = _extract_section(content, "Resolution")
|
|
32
|
+
error_excerpt = _extract_code_block(content, "Error Excerpt")
|
|
33
|
+
notes = _extract_section(content, "Notes")
|
|
34
|
+
|
|
35
|
+
if not issue:
|
|
36
|
+
raise MarkdownParseError("Missing required 'Issue' section")
|
|
37
|
+
if not resolution:
|
|
38
|
+
raise MarkdownParseError("Missing required 'Resolution' section")
|
|
39
|
+
|
|
40
|
+
return Fix(
|
|
41
|
+
id=fix_id,
|
|
42
|
+
issue=issue,
|
|
43
|
+
resolution=resolution,
|
|
44
|
+
error_excerpt=error_excerpt,
|
|
45
|
+
tags=tags,
|
|
46
|
+
notes=notes,
|
|
47
|
+
created_at=created_at or "",
|
|
48
|
+
updated_at=updated_at or "",
|
|
49
|
+
author=author,
|
|
50
|
+
author_email=author_email,
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _extract_metadata(content: str, field: str) -> Optional[str]:
|
|
55
|
+
"""Extract **Field:** value from content."""
|
|
56
|
+
pattern = rf"\*\*{re.escape(field)}:\*\*\s*(.+?)(?:\n|$)"
|
|
57
|
+
match = re.search(pattern, content)
|
|
58
|
+
if match:
|
|
59
|
+
return match.group(1).strip()
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _extract_tags(content: str) -> Optional[str]:
|
|
64
|
+
"""Extract **Tags:** `value` from content."""
|
|
65
|
+
pattern = r"\*\*Tags:\*\*\s*`([^`]+)`"
|
|
66
|
+
match = re.search(pattern, content)
|
|
67
|
+
if match:
|
|
68
|
+
return match.group(1).strip()
|
|
69
|
+
return None
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _extract_section(content: str, section_name: str) -> Optional[str]:
|
|
73
|
+
"""Extract content under ## SectionName header."""
|
|
74
|
+
pattern = rf"##\s+{re.escape(section_name)}\s*\n\n?(.*?)(?=\n##\s|\n\*\*|\Z)"
|
|
75
|
+
match = re.search(pattern, content, re.DOTALL)
|
|
76
|
+
if match:
|
|
77
|
+
text = match.group(1).strip()
|
|
78
|
+
if text.startswith("```"):
|
|
79
|
+
return None
|
|
80
|
+
return text if text else None
|
|
81
|
+
return None
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _extract_code_block(content: str, section_name: str) -> Optional[str]:
|
|
85
|
+
"""Extract code block content from a section."""
|
|
86
|
+
pattern = rf"##\s+{re.escape(section_name)}\s*\n\n?```[^\n]*\n(.*?)```"
|
|
87
|
+
match = re.search(pattern, content, re.DOTALL)
|
|
88
|
+
if match:
|
|
89
|
+
return match.group(1).strip()
|
|
90
|
+
return None
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def parse_markdown_file(file_path: str) -> Fix:
|
|
94
|
+
"""Parse a markdown file and return a Fix object."""
|
|
95
|
+
from pathlib import Path
|
|
96
|
+
|
|
97
|
+
path = Path(file_path)
|
|
98
|
+
if not path.exists():
|
|
99
|
+
raise MarkdownParseError(f"File not found: {file_path}")
|
|
100
|
+
|
|
101
|
+
fix_id = path.stem
|
|
102
|
+
|
|
103
|
+
with open(path, "r") as f:
|
|
104
|
+
content = f.read()
|
|
105
|
+
|
|
106
|
+
return markdown_to_fix(content, fix_id)
|
fixdoc/models.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
"""Fix data model for fixdoc."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field, asdict
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
from typing import Optional
|
|
6
|
+
import uuid
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def _now_iso() -> str:
|
|
10
|
+
"""Get current UTC time as ISO string."""
|
|
11
|
+
return datetime.now(timezone.utc).isoformat()
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class Fix:
|
|
16
|
+
"""
|
|
17
|
+
Represents a fix.
|
|
18
|
+
|
|
19
|
+
Required fields: issue, resolution
|
|
20
|
+
Optional fields: error_excerpt, tags, notes, author, author_email
|
|
21
|
+
Auto-generated: id, created_at, updated_at
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
issue: str
|
|
25
|
+
resolution: str
|
|
26
|
+
error_excerpt: Optional[str] = None
|
|
27
|
+
tags: Optional[str] = None
|
|
28
|
+
notes: Optional[str] = None
|
|
29
|
+
id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
|
30
|
+
created_at: str = field(default_factory=_now_iso)
|
|
31
|
+
updated_at: str = field(default_factory=_now_iso)
|
|
32
|
+
author: Optional[str] = None
|
|
33
|
+
author_email: Optional[str] = None
|
|
34
|
+
is_private: bool = False
|
|
35
|
+
|
|
36
|
+
def to_dict(self) -> dict:
|
|
37
|
+
"""Convert fix to dictionary for JSON serialization."""
|
|
38
|
+
return asdict(self)
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def from_dict(cls, data: dict) -> "Fix":
|
|
42
|
+
"""Create a Fix instance from a dictionary."""
|
|
43
|
+
return cls(
|
|
44
|
+
id=data.get("id", str(uuid.uuid4())),
|
|
45
|
+
issue=data["issue"],
|
|
46
|
+
resolution=data["resolution"],
|
|
47
|
+
error_excerpt=data.get("error_excerpt"),
|
|
48
|
+
tags=data.get("tags"),
|
|
49
|
+
notes=data.get("notes"),
|
|
50
|
+
created_at=data.get("created_at", _now_iso()),
|
|
51
|
+
updated_at=data.get("updated_at", _now_iso()),
|
|
52
|
+
author=data.get("author"),
|
|
53
|
+
author_email=data.get("author_email"),
|
|
54
|
+
is_private=data.get("is_private", False),
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
def summary(self) -> str:
|
|
58
|
+
"""Return a one-line summary for list displays."""
|
|
59
|
+
short_id = self.id[:8]
|
|
60
|
+
tags_str = f" [{self.tags}]" if self.tags else ""
|
|
61
|
+
issue_preview = self.issue[:40] + "..." if len(self.issue) > 40 else self.issue
|
|
62
|
+
return f"{short_id}{tags_str} - {issue_preview}"
|
|
63
|
+
|
|
64
|
+
def matches(self, query: str) -> bool:
|
|
65
|
+
"""Check if this fix matches a search query (case-insensitive)."""
|
|
66
|
+
query_lower = query.lower()
|
|
67
|
+
searchable = " ".join(
|
|
68
|
+
filter(
|
|
69
|
+
None,
|
|
70
|
+
[self.issue, self.resolution, self.error_excerpt, self.tags, self.notes],
|
|
71
|
+
)
|
|
72
|
+
).lower()
|
|
73
|
+
return query_lower in searchable
|
|
74
|
+
|
|
75
|
+
def matches_resource_type(self, resource_type: str) -> bool:
|
|
76
|
+
"""Check if this fix is tagged with a specific resource type."""
|
|
77
|
+
if not self.tags:
|
|
78
|
+
return False
|
|
79
|
+
return resource_type.lower() in self.tags.lower()
|
|
80
|
+
|
|
81
|
+
def touch(self) -> None:
|
|
82
|
+
"""Update the updated_at timestamp."""
|
|
83
|
+
self.updated_at = _now_iso()
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Multi-cloud and multi-tool error parsers for FixDoc.
|
|
3
|
+
|
|
4
|
+
This module provides unified parsing for:
|
|
5
|
+
- Terraform errors (AWS, Azure, GCP)
|
|
6
|
+
- Kubernetes errors (kubectl, Helm)
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from .base import ParsedError, ErrorParser
|
|
10
|
+
from .terraform import TerraformParser, TerraformError
|
|
11
|
+
from .kubernetes import KubernetesParser, KubernetesError
|
|
12
|
+
from .router import detect_and_parse, detect_error_source, ErrorSource
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"ParsedError",
|
|
16
|
+
"ErrorParser",
|
|
17
|
+
"TerraformParser",
|
|
18
|
+
"TerraformError",
|
|
19
|
+
"KubernetesParser",
|
|
20
|
+
"KubernetesError",
|
|
21
|
+
"detect_and_parse",
|
|
22
|
+
"detect_error_source",
|
|
23
|
+
"ErrorSource",
|
|
24
|
+
]
|
fixdoc/parsers/base.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
"""Base classes and interfaces for error parsers."""
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class CloudProvider(Enum):
|
|
10
|
+
"""Cloud provider enumeration."""
|
|
11
|
+
AWS = "aws"
|
|
12
|
+
AZURE = "azure"
|
|
13
|
+
GCP = "gcp"
|
|
14
|
+
UNKNOWN = "unknown"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ErrorSeverity(Enum):
|
|
18
|
+
"""Error severity levels."""
|
|
19
|
+
CRITICAL = "critical"
|
|
20
|
+
ERROR = "error"
|
|
21
|
+
WARNING = "warning"
|
|
22
|
+
INFO = "info"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class ParsedError:
|
|
27
|
+
"""
|
|
28
|
+
Base class for all parsed errors.
|
|
29
|
+
|
|
30
|
+
This provides a unified interface for errors from different sources
|
|
31
|
+
(Terraform, Kubernetes, Ansible, etc.)
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
# Core error information
|
|
35
|
+
error_type: str # e.g., "terraform", "kubectl", "helm"
|
|
36
|
+
error_message: str
|
|
37
|
+
raw_output: str
|
|
38
|
+
|
|
39
|
+
# Resource identification
|
|
40
|
+
resource_type: Optional[str] = None # e.g., "aws_s3_bucket", "Deployment"
|
|
41
|
+
resource_name: Optional[str] = None
|
|
42
|
+
resource_address: Optional[str] = None # Full address like "module.app.aws_s3_bucket.main"
|
|
43
|
+
|
|
44
|
+
# Location information
|
|
45
|
+
file: Optional[str] = None
|
|
46
|
+
line: Optional[int] = None
|
|
47
|
+
namespace: Optional[str] = None # For K8s resources
|
|
48
|
+
|
|
49
|
+
# Error classification
|
|
50
|
+
error_code: Optional[str] = None # e.g., "BucketAlreadyExists", "ImagePullBackOff"
|
|
51
|
+
cloud_provider: CloudProvider = CloudProvider.UNKNOWN
|
|
52
|
+
severity: ErrorSeverity = ErrorSeverity.ERROR
|
|
53
|
+
|
|
54
|
+
# Additional context
|
|
55
|
+
suggestions: list[str] = field(default_factory=list)
|
|
56
|
+
related_resources: list[str] = field(default_factory=list)
|
|
57
|
+
tags: list[str] = field(default_factory=list)
|
|
58
|
+
|
|
59
|
+
def short_error(self, max_length: int = 100) -> str:
|
|
60
|
+
"""Return a shortened error description."""
|
|
61
|
+
if self.error_code:
|
|
62
|
+
prefix = f"{self.error_code}: "
|
|
63
|
+
remaining = max_length - len(prefix)
|
|
64
|
+
return f"{prefix}{self.error_message[:remaining]}"
|
|
65
|
+
return self.error_message[:max_length]
|
|
66
|
+
|
|
67
|
+
def generate_tags(self) -> str:
|
|
68
|
+
"""Generate comma-separated tags for this error."""
|
|
69
|
+
tags = list(self.tags)
|
|
70
|
+
|
|
71
|
+
if self.resource_type:
|
|
72
|
+
tags.append(self.resource_type)
|
|
73
|
+
|
|
74
|
+
if self.cloud_provider != CloudProvider.UNKNOWN:
|
|
75
|
+
tags.append(self.cloud_provider.value)
|
|
76
|
+
|
|
77
|
+
if self.error_code:
|
|
78
|
+
tags.append(self.error_code)
|
|
79
|
+
|
|
80
|
+
if self.error_type:
|
|
81
|
+
tags.append(self.error_type)
|
|
82
|
+
|
|
83
|
+
# Deduplicate while preserving order
|
|
84
|
+
seen = set()
|
|
85
|
+
unique_tags = []
|
|
86
|
+
for tag in tags:
|
|
87
|
+
if tag.lower() not in seen:
|
|
88
|
+
seen.add(tag.lower())
|
|
89
|
+
unique_tags.append(tag)
|
|
90
|
+
|
|
91
|
+
return ",".join(unique_tags)
|
|
92
|
+
|
|
93
|
+
def to_issue_string(self) -> str:
|
|
94
|
+
"""Generate an issue string for Fix creation."""
|
|
95
|
+
parts = []
|
|
96
|
+
|
|
97
|
+
if self.resource_address:
|
|
98
|
+
parts.append(self.resource_address)
|
|
99
|
+
elif self.resource_type and self.resource_name:
|
|
100
|
+
parts.append(f"{self.resource_type}/{self.resource_name}")
|
|
101
|
+
elif self.resource_type:
|
|
102
|
+
parts.append(self.resource_type)
|
|
103
|
+
|
|
104
|
+
parts.append(self.short_error())
|
|
105
|
+
|
|
106
|
+
return ": ".join(parts)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class ErrorParser(ABC):
|
|
110
|
+
"""Abstract base class for error parsers."""
|
|
111
|
+
|
|
112
|
+
@abstractmethod
|
|
113
|
+
def can_parse(self, text: str) -> bool:
|
|
114
|
+
"""Check if this parser can handle the given text."""
|
|
115
|
+
pass
|
|
116
|
+
|
|
117
|
+
@abstractmethod
|
|
118
|
+
def parse(self, text: str) -> list[ParsedError]:
|
|
119
|
+
"""Parse the text and return a list of errors."""
|
|
120
|
+
pass
|
|
121
|
+
|
|
122
|
+
@abstractmethod
|
|
123
|
+
def parse_single(self, text: str) -> Optional[ParsedError]:
|
|
124
|
+
"""Parse a single error block."""
|
|
125
|
+
pass
|
|
126
|
+
|
|
127
|
+
@property
|
|
128
|
+
@abstractmethod
|
|
129
|
+
def name(self) -> str:
|
|
130
|
+
"""Return the name of this parser."""
|
|
131
|
+
pass
|