fixdoc 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fixdoc/__init__.py +8 -0
- fixdoc/cli.py +26 -0
- fixdoc/commands/__init__.py +11 -0
- fixdoc/commands/analyze.py +313 -0
- fixdoc/commands/capture.py +109 -0
- fixdoc/commands/capture_handlers.py +298 -0
- fixdoc/commands/delete.py +72 -0
- fixdoc/commands/edit.py +118 -0
- fixdoc/commands/manage.py +67 -0
- fixdoc/commands/search.py +65 -0
- fixdoc/commands/sync.py +268 -0
- fixdoc/config.py +113 -0
- fixdoc/fix.py +19 -0
- fixdoc/formatter.py +62 -0
- fixdoc/git.py +263 -0
- fixdoc/markdown_parser.py +106 -0
- fixdoc/models.py +83 -0
- fixdoc/parsers/__init__.py +24 -0
- fixdoc/parsers/base.py +131 -0
- fixdoc/parsers/kubernetes.py +584 -0
- fixdoc/parsers/router.py +160 -0
- fixdoc/parsers/terraform.py +409 -0
- fixdoc/storage.py +146 -0
- fixdoc/sync_engine.py +330 -0
- fixdoc/terraform_parser.py +135 -0
- fixdoc-0.0.1.dist-info/METADATA +261 -0
- fixdoc-0.0.1.dist-info/RECORD +30 -0
- fixdoc-0.0.1.dist-info/WHEEL +5 -0
- fixdoc-0.0.1.dist-info/entry_points.txt +2 -0
- fixdoc-0.0.1.dist-info/top_level.txt +1 -0
fixdoc/storage.py
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
"""Storage management for fixdoc."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from .models import Fix
|
|
8
|
+
from .formatter import fix_to_markdown
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class FixRepository:
|
|
12
|
+
"""
|
|
13
|
+
Manages the local fix database and markdown files.
|
|
14
|
+
|
|
15
|
+
Storage structure:
|
|
16
|
+
~/.fixdoc/
|
|
17
|
+
fixes.json # JSON database
|
|
18
|
+
docs/ # Generated markdown files
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
DEFAULT_PATH = Path.home() / ".fixdoc"
|
|
22
|
+
|
|
23
|
+
def __init__(self, base_path: Optional[Path] = None):
|
|
24
|
+
self.base_path = base_path or self.DEFAULT_PATH
|
|
25
|
+
self.db_path = self.base_path / "fixes.json"
|
|
26
|
+
self.docs_path = self.base_path / "docs"
|
|
27
|
+
self._ensure_paths()
|
|
28
|
+
|
|
29
|
+
def _ensure_paths(self) -> None:
|
|
30
|
+
"""Create necessary directories if they don't exist."""
|
|
31
|
+
self.base_path.mkdir(parents=True, exist_ok=True)
|
|
32
|
+
self.docs_path.mkdir(parents=True, exist_ok=True)
|
|
33
|
+
if not self.db_path.exists():
|
|
34
|
+
self._write_db([])
|
|
35
|
+
|
|
36
|
+
def _read_db(self) -> list[dict]:
|
|
37
|
+
"""Read the JSON database."""
|
|
38
|
+
try:
|
|
39
|
+
with open(self.db_path, "r") as f:
|
|
40
|
+
# print('--------------',json.load(f))
|
|
41
|
+
return json.load(f)
|
|
42
|
+
except (json.JSONDecodeError, FileNotFoundError):
|
|
43
|
+
return []
|
|
44
|
+
|
|
45
|
+
def _write_db(self, data: list[dict]) -> None:
|
|
46
|
+
"""Write to the JSON database."""
|
|
47
|
+
with open(self.db_path, "w") as f:
|
|
48
|
+
json.dump(data, f, indent=2)
|
|
49
|
+
|
|
50
|
+
def _write_markdown(self, fix: Fix) -> Path:
|
|
51
|
+
"""Generate markdown file for a fix."""
|
|
52
|
+
md_path = self.docs_path / f"{fix.id}.md"
|
|
53
|
+
with open(md_path, "w") as f:
|
|
54
|
+
f.write(fix_to_markdown(fix))
|
|
55
|
+
return md_path
|
|
56
|
+
|
|
57
|
+
def save(self, fix: Fix) -> Fix:
|
|
58
|
+
"""Save a fix to the database and generate markdown."""
|
|
59
|
+
fixes = self._read_db()
|
|
60
|
+
|
|
61
|
+
existing_idx = next(
|
|
62
|
+
(i for i, f in enumerate(fixes) if f.get("id") == fix.id), None
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
if existing_idx is not None:
|
|
66
|
+
fixes[existing_idx] = fix.to_dict()
|
|
67
|
+
else:
|
|
68
|
+
fixes.append(fix.to_dict())
|
|
69
|
+
|
|
70
|
+
self._write_db(fixes)
|
|
71
|
+
self._write_markdown(fix)
|
|
72
|
+
return fix
|
|
73
|
+
|
|
74
|
+
def get(self, fix_id: str) -> Optional[Fix]:
|
|
75
|
+
"""Retrieve a fix by ID """
|
|
76
|
+
fixes = self._read_db()
|
|
77
|
+
fix_id_lower = fix_id.lower()
|
|
78
|
+
|
|
79
|
+
for f in fixes:
|
|
80
|
+
if f["id"].lower().startswith(fix_id_lower):
|
|
81
|
+
return Fix.from_dict(f)
|
|
82
|
+
return None
|
|
83
|
+
|
|
84
|
+
def list_all(self) -> list[Fix]:
|
|
85
|
+
"""Return all fixes in the database."""
|
|
86
|
+
return [Fix.from_dict(f) for f in self._read_db()]
|
|
87
|
+
|
|
88
|
+
def search(self, query: str) -> list[Fix]:
|
|
89
|
+
"""Search fixes by query string (case-insensitive)."""
|
|
90
|
+
return [f for f in self.list_all() if f.matches(query)]
|
|
91
|
+
|
|
92
|
+
def find_by_resource_type(self, resource_type: str) -> list[Fix]:
|
|
93
|
+
"""Find all fixes tagged with a specific resource type."""
|
|
94
|
+
return [f for f in self.list_all() if f.matches_resource_type(resource_type)]
|
|
95
|
+
|
|
96
|
+
def delete(self, fix_id: str) -> bool:
|
|
97
|
+
"""Delete a fix by ID. Returns True if deleted."""
|
|
98
|
+
fixes = self._read_db()
|
|
99
|
+
fix_id_lower = fix_id.lower()
|
|
100
|
+
|
|
101
|
+
for i, f in enumerate(fixes):
|
|
102
|
+
if f["id"].lower().startswith(fix_id_lower):
|
|
103
|
+
deleted_fix = fixes.pop(i)
|
|
104
|
+
self._write_db(fixes)
|
|
105
|
+
|
|
106
|
+
md_path = self.docs_path / f"{deleted_fix['id']}.md"
|
|
107
|
+
if md_path.exists():
|
|
108
|
+
md_path.unlink()
|
|
109
|
+
return True
|
|
110
|
+
return False
|
|
111
|
+
|
|
112
|
+
def count(self) -> int:
|
|
113
|
+
"""Return the number of fixes in the database."""
|
|
114
|
+
return len(self._read_db())
|
|
115
|
+
|
|
116
|
+
def purge(self) -> bool:
|
|
117
|
+
"""Delete a fix by ID. Returns True if deleted."""
|
|
118
|
+
fixes = self._read_db()
|
|
119
|
+
|
|
120
|
+
for i, f in enumerate(fixes):
|
|
121
|
+
deleted_fix = fixes.pop(i)
|
|
122
|
+
self._write_db(fixes)
|
|
123
|
+
|
|
124
|
+
md_path = self.docs_path / f"{deleted_fix['id']}.md"
|
|
125
|
+
if md_path.exists():
|
|
126
|
+
md_path.unlink()
|
|
127
|
+
return True
|
|
128
|
+
return False
|
|
129
|
+
|
|
130
|
+
def get_by_full_id(self, fix_id: str) -> Optional[Fix]:
|
|
131
|
+
"""Get fix by exact ID match (for sync operations)."""
|
|
132
|
+
fixes = self._read_db()
|
|
133
|
+
for f in fixes:
|
|
134
|
+
if f["id"] == fix_id:
|
|
135
|
+
return Fix.from_dict(f)
|
|
136
|
+
return None
|
|
137
|
+
|
|
138
|
+
def list_markdown_files(self) -> list[Path]:
|
|
139
|
+
"""List all markdown files in docs directory."""
|
|
140
|
+
if not self.docs_path.exists():
|
|
141
|
+
return []
|
|
142
|
+
return list(self.docs_path.glob("*.md"))
|
|
143
|
+
|
|
144
|
+
def get_fix_ids(self) -> set[str]:
|
|
145
|
+
"""Get set of all fix IDs."""
|
|
146
|
+
return {f["id"] for f in self._read_db()}
|
fixdoc/sync_engine.py
ADDED
|
@@ -0,0 +1,330 @@
|
|
|
1
|
+
"""Core synchronization logic for fixdoc sync."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import List, Optional
|
|
7
|
+
|
|
8
|
+
from .config import ConfigManager
|
|
9
|
+
from .formatter import fix_to_markdown
|
|
10
|
+
from .git import GitOperations, GitError, SyncStatus
|
|
11
|
+
from .markdown_parser import markdown_to_fix, MarkdownParseError
|
|
12
|
+
from .models import Fix
|
|
13
|
+
from .storage import FixRepository
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ConflictType(Enum):
|
|
17
|
+
"""Types of sync conflicts."""
|
|
18
|
+
|
|
19
|
+
BOTH_MODIFIED = "both_modified"
|
|
20
|
+
LOCAL_DELETED_REMOTE_MODIFIED = "local_deleted"
|
|
21
|
+
REMOTE_DELETED_LOCAL_MODIFIED = "remote_deleted"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class SyncConflict:
|
|
26
|
+
"""Represents a conflict during sync."""
|
|
27
|
+
|
|
28
|
+
fix_id: str
|
|
29
|
+
conflict_type: ConflictType
|
|
30
|
+
local_fix: Optional[Fix] = None
|
|
31
|
+
remote_fix: Optional[Fix] = None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass
|
|
35
|
+
class SyncResult:
|
|
36
|
+
"""Result of a sync operation."""
|
|
37
|
+
|
|
38
|
+
success: bool
|
|
39
|
+
pushed_fixes: List[str] = field(default_factory=list)
|
|
40
|
+
pulled_fixes: List[str] = field(default_factory=list)
|
|
41
|
+
conflicts: List[SyncConflict] = field(default_factory=list)
|
|
42
|
+
error_message: Optional[str] = None
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class SyncEngine:
|
|
46
|
+
"""Handles the core sync logic."""
|
|
47
|
+
|
|
48
|
+
def __init__(
|
|
49
|
+
self,
|
|
50
|
+
repo: FixRepository,
|
|
51
|
+
git: GitOperations,
|
|
52
|
+
config_manager: ConfigManager,
|
|
53
|
+
):
|
|
54
|
+
self.repo = repo
|
|
55
|
+
self.git = git
|
|
56
|
+
self.config_manager = config_manager
|
|
57
|
+
|
|
58
|
+
def prepare_push(self, push_all: bool = False) -> List[Fix]:
|
|
59
|
+
"""
|
|
60
|
+
Identify fixes that need to be pushed.
|
|
61
|
+
|
|
62
|
+
By default, only returns fixes that are new or have been modified
|
|
63
|
+
since the last push. Use push_all=True to push all non-private fixes.
|
|
64
|
+
"""
|
|
65
|
+
config = self.config_manager.load()
|
|
66
|
+
all_fixes = self.repo.list_all()
|
|
67
|
+
|
|
68
|
+
pushable = []
|
|
69
|
+
for fix in all_fixes:
|
|
70
|
+
if fix.is_private:
|
|
71
|
+
continue
|
|
72
|
+
if fix.id in config.private_fixes:
|
|
73
|
+
continue
|
|
74
|
+
pushable.append(fix)
|
|
75
|
+
|
|
76
|
+
if push_all:
|
|
77
|
+
return pushable
|
|
78
|
+
|
|
79
|
+
# Filter to only new or changed fixes by comparing generated
|
|
80
|
+
# markdown against what's already committed in HEAD.
|
|
81
|
+
changed = []
|
|
82
|
+
for fix in pushable:
|
|
83
|
+
current_md = fix_to_markdown(fix)
|
|
84
|
+
committed_md = self.git.get_file_content_at_ref(
|
|
85
|
+
f"docs/{fix.id}.md", "HEAD"
|
|
86
|
+
)
|
|
87
|
+
if committed_md is None or committed_md != current_md:
|
|
88
|
+
changed.append(fix)
|
|
89
|
+
|
|
90
|
+
return changed
|
|
91
|
+
|
|
92
|
+
def execute_push(self, fixes: List[Fix], commit_message: Optional[str] = None) -> SyncResult:
|
|
93
|
+
"""
|
|
94
|
+
Push fixes to remote:
|
|
95
|
+
1. Regenerate markdown files for fixes
|
|
96
|
+
2. Git add changed files
|
|
97
|
+
3. Git commit with message and author info
|
|
98
|
+
4. Git push to remote
|
|
99
|
+
"""
|
|
100
|
+
config = self.config_manager.load()
|
|
101
|
+
|
|
102
|
+
if not config.sync.remote_url:
|
|
103
|
+
return SyncResult(
|
|
104
|
+
success=False,
|
|
105
|
+
error_message="Sync not configured. Run 'fixdoc sync init' first.",
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
if not fixes:
|
|
109
|
+
return SyncResult(
|
|
110
|
+
success=True,
|
|
111
|
+
pushed_fixes=[],
|
|
112
|
+
error_message="No fixes to push.",
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
try:
|
|
116
|
+
docs_path = self.repo.docs_path
|
|
117
|
+
pushed_ids = []
|
|
118
|
+
|
|
119
|
+
for fix in fixes:
|
|
120
|
+
if config.user.name and not fix.author:
|
|
121
|
+
fix.author = config.user.name
|
|
122
|
+
fix.author_email = config.user.email
|
|
123
|
+
self.repo.save(fix)
|
|
124
|
+
|
|
125
|
+
md_path = docs_path / f"{fix.id}.md"
|
|
126
|
+
with open(md_path, "w") as f:
|
|
127
|
+
f.write(fix_to_markdown(fix))
|
|
128
|
+
pushed_ids.append(fix.id)
|
|
129
|
+
|
|
130
|
+
self.git.add("docs/")
|
|
131
|
+
|
|
132
|
+
if not self.git.has_uncommitted_changes():
|
|
133
|
+
return SyncResult(
|
|
134
|
+
success=True,
|
|
135
|
+
pushed_fixes=[],
|
|
136
|
+
error_message="No changes to push.",
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
if not commit_message:
|
|
140
|
+
if len(fixes) == 1:
|
|
141
|
+
commit_message = f"[fixdoc] Add fix: {fixes[0].id[:8]}"
|
|
142
|
+
else:
|
|
143
|
+
commit_message = f"[fixdoc] Add/update {len(fixes)} fixes"
|
|
144
|
+
|
|
145
|
+
author = None
|
|
146
|
+
if config.user.name and config.user.email:
|
|
147
|
+
author = f"{config.user.name} <{config.user.email}>"
|
|
148
|
+
|
|
149
|
+
self.git.commit(commit_message, author=author)
|
|
150
|
+
self.git.push(branch=config.sync.branch)
|
|
151
|
+
|
|
152
|
+
return SyncResult(success=True, pushed_fixes=pushed_ids)
|
|
153
|
+
|
|
154
|
+
except GitError as e:
|
|
155
|
+
return SyncResult(success=False, error_message=str(e))
|
|
156
|
+
|
|
157
|
+
def execute_pull(self, force: bool = False) -> SyncResult:
|
|
158
|
+
"""
|
|
159
|
+
Pull fixes from remote:
|
|
160
|
+
1. Git fetch
|
|
161
|
+
2. Detect conflicts
|
|
162
|
+
3. If no conflicts or force=True: git pull and update local DB
|
|
163
|
+
4. If conflicts: return conflicts for user resolution
|
|
164
|
+
"""
|
|
165
|
+
config = self.config_manager.load()
|
|
166
|
+
|
|
167
|
+
if not config.sync.remote_url:
|
|
168
|
+
return SyncResult(
|
|
169
|
+
success=False,
|
|
170
|
+
error_message="Sync not configured. Run 'fixdoc sync init' first.",
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
try:
|
|
174
|
+
if force and self.git.has_uncommitted_changes():
|
|
175
|
+
self.git.stash()
|
|
176
|
+
|
|
177
|
+
had_conflicts, conflict_files = self.git.pull(branch=config.sync.branch)
|
|
178
|
+
|
|
179
|
+
if had_conflicts and not force:
|
|
180
|
+
conflicts = self._build_conflicts_from_files(conflict_files)
|
|
181
|
+
self.git.reset_hard("HEAD")
|
|
182
|
+
return SyncResult(success=False, conflicts=conflicts)
|
|
183
|
+
|
|
184
|
+
pulled_fixes = self.rebuild_json_from_markdown()
|
|
185
|
+
|
|
186
|
+
if force:
|
|
187
|
+
self.git.stash_pop()
|
|
188
|
+
|
|
189
|
+
return SyncResult(success=True, pulled_fixes=pulled_fixes)
|
|
190
|
+
|
|
191
|
+
except GitError as e:
|
|
192
|
+
return SyncResult(success=False, error_message=str(e))
|
|
193
|
+
|
|
194
|
+
def _build_conflicts_from_files(self, conflict_files: List[str]) -> List[SyncConflict]:
|
|
195
|
+
"""Build conflict objects from conflicted file paths."""
|
|
196
|
+
conflicts = []
|
|
197
|
+
for filepath in conflict_files:
|
|
198
|
+
if filepath.startswith("docs/") and filepath.endswith(".md"):
|
|
199
|
+
fix_id = Path(filepath).stem
|
|
200
|
+
local_fix = self.repo.get(fix_id)
|
|
201
|
+
conflicts.append(
|
|
202
|
+
SyncConflict(
|
|
203
|
+
fix_id=fix_id,
|
|
204
|
+
conflict_type=ConflictType.BOTH_MODIFIED,
|
|
205
|
+
local_fix=local_fix,
|
|
206
|
+
remote_fix=None,
|
|
207
|
+
)
|
|
208
|
+
)
|
|
209
|
+
return conflicts
|
|
210
|
+
|
|
211
|
+
def rebuild_json_from_markdown(self) -> List[str]:
|
|
212
|
+
"""
|
|
213
|
+
Rebuild fixes.json from markdown files.
|
|
214
|
+
Markdown is source of truth for sync.
|
|
215
|
+
Returns list of fix IDs that were updated/added.
|
|
216
|
+
"""
|
|
217
|
+
docs_path = self.repo.docs_path
|
|
218
|
+
updated_ids = []
|
|
219
|
+
|
|
220
|
+
if not docs_path.exists():
|
|
221
|
+
return updated_ids
|
|
222
|
+
|
|
223
|
+
existing_fixes = {fix.id: fix for fix in self.repo.list_all()}
|
|
224
|
+
|
|
225
|
+
for md_file in docs_path.glob("*.md"):
|
|
226
|
+
fix_id = md_file.stem
|
|
227
|
+
try:
|
|
228
|
+
with open(md_file, "r") as f:
|
|
229
|
+
content = f.read()
|
|
230
|
+
parsed_fix = markdown_to_fix(content, fix_id)
|
|
231
|
+
|
|
232
|
+
if fix_id in existing_fixes:
|
|
233
|
+
existing = existing_fixes[fix_id]
|
|
234
|
+
if parsed_fix.is_private or existing.is_private:
|
|
235
|
+
parsed_fix.is_private = existing.is_private
|
|
236
|
+
|
|
237
|
+
self.repo.save(parsed_fix)
|
|
238
|
+
updated_ids.append(fix_id)
|
|
239
|
+
except MarkdownParseError:
|
|
240
|
+
continue
|
|
241
|
+
|
|
242
|
+
return updated_ids
|
|
243
|
+
|
|
244
|
+
def resolve_conflict(
|
|
245
|
+
self, conflict: SyncConflict, resolution: str
|
|
246
|
+
) -> Optional[Fix]:
|
|
247
|
+
"""
|
|
248
|
+
Resolve a conflict based on user choice:
|
|
249
|
+
- 'local': Keep local version
|
|
250
|
+
- 'remote': Accept remote version
|
|
251
|
+
- 'merge': Combine both (add both to notes)
|
|
252
|
+
"""
|
|
253
|
+
if resolution == "local":
|
|
254
|
+
return conflict.local_fix
|
|
255
|
+
|
|
256
|
+
elif resolution == "remote":
|
|
257
|
+
return conflict.remote_fix
|
|
258
|
+
|
|
259
|
+
elif resolution == "merge" and conflict.local_fix and conflict.remote_fix:
|
|
260
|
+
merged = Fix(
|
|
261
|
+
id=conflict.fix_id,
|
|
262
|
+
issue=conflict.remote_fix.issue,
|
|
263
|
+
resolution=conflict.remote_fix.resolution,
|
|
264
|
+
error_excerpt=conflict.remote_fix.error_excerpt or conflict.local_fix.error_excerpt,
|
|
265
|
+
tags=self._merge_tags(conflict.local_fix.tags, conflict.remote_fix.tags),
|
|
266
|
+
notes=self._merge_notes(conflict.local_fix, conflict.remote_fix),
|
|
267
|
+
created_at=conflict.local_fix.created_at,
|
|
268
|
+
updated_at=conflict.remote_fix.updated_at,
|
|
269
|
+
author=conflict.remote_fix.author or conflict.local_fix.author,
|
|
270
|
+
author_email=conflict.remote_fix.author_email or conflict.local_fix.author_email,
|
|
271
|
+
)
|
|
272
|
+
return merged
|
|
273
|
+
|
|
274
|
+
return None
|
|
275
|
+
|
|
276
|
+
def _merge_tags(self, local_tags: Optional[str], remote_tags: Optional[str]) -> Optional[str]:
|
|
277
|
+
"""Merge two tag strings, removing duplicates."""
|
|
278
|
+
if not local_tags and not remote_tags:
|
|
279
|
+
return None
|
|
280
|
+
|
|
281
|
+
local_set = set(t.strip() for t in (local_tags or "").split(",") if t.strip())
|
|
282
|
+
remote_set = set(t.strip() for t in (remote_tags or "").split(",") if t.strip())
|
|
283
|
+
merged = local_set | remote_set
|
|
284
|
+
|
|
285
|
+
return ",".join(sorted(merged)) if merged else None
|
|
286
|
+
|
|
287
|
+
def _merge_notes(self, local_fix: Fix, remote_fix: Fix) -> Optional[str]:
|
|
288
|
+
"""Merge notes from both versions."""
|
|
289
|
+
parts = []
|
|
290
|
+
|
|
291
|
+
if local_fix.notes:
|
|
292
|
+
parts.append(f"### Local Notes:\n{local_fix.notes}")
|
|
293
|
+
|
|
294
|
+
if remote_fix.notes:
|
|
295
|
+
parts.append(f"### Remote Notes:\n{remote_fix.notes}")
|
|
296
|
+
|
|
297
|
+
if local_fix.resolution != remote_fix.resolution:
|
|
298
|
+
parts.append(f"### Local Resolution (merged):\n{local_fix.resolution}")
|
|
299
|
+
|
|
300
|
+
if parts:
|
|
301
|
+
return "\n\n".join(parts)
|
|
302
|
+
return None
|
|
303
|
+
|
|
304
|
+
def get_sync_status(self) -> dict:
|
|
305
|
+
"""Get current sync status information."""
|
|
306
|
+
config = self.config_manager.load()
|
|
307
|
+
|
|
308
|
+
if not config.sync.remote_url:
|
|
309
|
+
return {
|
|
310
|
+
"configured": False,
|
|
311
|
+
"message": "Sync not configured.",
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
git_status = self.git.get_status(branch=config.sync.branch)
|
|
315
|
+
all_fixes = self.repo.list_all()
|
|
316
|
+
pushable = self.prepare_push(push_all=False)
|
|
317
|
+
private_count = len([f for f in all_fixes if f.is_private or f.id in config.private_fixes])
|
|
318
|
+
|
|
319
|
+
return {
|
|
320
|
+
"configured": True,
|
|
321
|
+
"remote_url": config.sync.remote_url,
|
|
322
|
+
"branch": config.sync.branch,
|
|
323
|
+
"status": git_status.status.value,
|
|
324
|
+
"commits_ahead": git_status.commits_ahead,
|
|
325
|
+
"commits_behind": git_status.commits_behind,
|
|
326
|
+
"local_changes": git_status.local_changes,
|
|
327
|
+
"pushable_fixes": len(pushable),
|
|
328
|
+
"private_fixes": private_count,
|
|
329
|
+
"total_fixes": len(all_fixes),
|
|
330
|
+
}
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
## This file will parse terraform plan/apply and extract the relevant errors using regular expression matching
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class TerraformError:
|
|
10
|
+
##Parsed error object created from terraform output
|
|
11
|
+
|
|
12
|
+
resource_type: str
|
|
13
|
+
resource_name: str
|
|
14
|
+
resource_address: str
|
|
15
|
+
error_code: Optional[str]
|
|
16
|
+
error_message: str
|
|
17
|
+
file: Optional[str]
|
|
18
|
+
line: Optional[int]
|
|
19
|
+
raw_output: str
|
|
20
|
+
|
|
21
|
+
def short_error(self) -> str:
|
|
22
|
+
"""Short error description for display."""
|
|
23
|
+
if self.error_code:
|
|
24
|
+
return f"{self.error_code}: {self.error_message[:100]}"
|
|
25
|
+
return self.error_message[:100]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def parse_terraform_error(output: str) -> Optional[TerraformError]:
|
|
29
|
+
"""Parse a terraform error block."""
|
|
30
|
+
|
|
31
|
+
# Find error block
|
|
32
|
+
error_match = re.search(
|
|
33
|
+
r'│?\s*Error:\s*(.+?)(?=\n│?\s*\n|\n\n|$)', output, re.DOTALL
|
|
34
|
+
)
|
|
35
|
+
if not error_match:
|
|
36
|
+
error_match = re.search(r'Error:\s*(.+?)(?=\n\n|$)', output, re.DOTALL)
|
|
37
|
+
if not error_match:
|
|
38
|
+
return None
|
|
39
|
+
|
|
40
|
+
error_block = error_match.group(0)
|
|
41
|
+
|
|
42
|
+
# Extract resource address with regex
|
|
43
|
+
resource_match = re.search(
|
|
44
|
+
r'with\s+((?:module\.[^,\s]+\.)?([a-z_]+)\.([a-z0-9_-]+))',
|
|
45
|
+
output,
|
|
46
|
+
re.IGNORECASE,
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
if resource_match:
|
|
50
|
+
resource_address = resource_match.group(1)
|
|
51
|
+
resource_type = resource_match.group(2)
|
|
52
|
+
resource_name = resource_match.group(3)
|
|
53
|
+
else:
|
|
54
|
+
resource_type, resource_name, resource_address = "unknown", "unknown", "unknown"
|
|
55
|
+
|
|
56
|
+
# Extract file and line
|
|
57
|
+
file_match = re.search(r'on\s+([^\s]+\.tf)\s+line\s+(\d+)', output)
|
|
58
|
+
file = file_match.group(1) if file_match else None
|
|
59
|
+
line = int(file_match.group(2)) if file_match else None
|
|
60
|
+
|
|
61
|
+
# Extract error code
|
|
62
|
+
error_code = _extract_error_code(output)
|
|
63
|
+
|
|
64
|
+
# Extract error message
|
|
65
|
+
error_message = _extract_error_message(output, error_block)
|
|
66
|
+
|
|
67
|
+
return TerraformError(
|
|
68
|
+
resource_type=resource_type,
|
|
69
|
+
resource_name=resource_name,
|
|
70
|
+
resource_address=resource_address,
|
|
71
|
+
error_code=error_code,
|
|
72
|
+
error_message=error_message,
|
|
73
|
+
file=file,
|
|
74
|
+
line=line,
|
|
75
|
+
raw_output=output,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _extract_error_code(output: str) -> Optional[str]:
|
|
80
|
+
##Extract error code from terraform
|
|
81
|
+
code_match = re.search(r'Code:\s*["\']?([A-Za-z0-9_]+)["\']?', output)
|
|
82
|
+
if code_match:
|
|
83
|
+
return code_match.group(1)
|
|
84
|
+
|
|
85
|
+
status_match = re.search(r'Status:\s*(\d+\s*[A-Za-z]+)', output)
|
|
86
|
+
if status_match:
|
|
87
|
+
return status_match.group(1)
|
|
88
|
+
|
|
89
|
+
return None
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def _extract_error_message(output: str, error_block: str) -> str:
|
|
93
|
+
##Extract error code from terraform
|
|
94
|
+
msg_match = re.search(
|
|
95
|
+
r'Message:\s*["\']?(.+?)["\']?(?=\n│|\n\n|$)', output, re.DOTALL
|
|
96
|
+
)
|
|
97
|
+
if msg_match:
|
|
98
|
+
message = msg_match.group(1).strip()
|
|
99
|
+
else:
|
|
100
|
+
first_line = error_block.split('\n')[0]
|
|
101
|
+
message = re.sub(r'^│?\s*Error:\s*', '', first_line).strip()
|
|
102
|
+
|
|
103
|
+
message = re.sub(r'\s+', ' ', message).strip()
|
|
104
|
+
return message[:500]
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def parse_terraform_output(output: str) -> list[TerraformError]:
|
|
108
|
+
##Parse terraform output for all errors.
|
|
109
|
+
errors = []
|
|
110
|
+
parts = re.split(r'(?=│?\s*Error:)', output)
|
|
111
|
+
|
|
112
|
+
for part in parts:
|
|
113
|
+
if 'Error:' in part:
|
|
114
|
+
parsed = parse_terraform_error(part)
|
|
115
|
+
if parsed:
|
|
116
|
+
errors.append(parsed)
|
|
117
|
+
|
|
118
|
+
# resource addresses should be unique
|
|
119
|
+
seen = set()
|
|
120
|
+
unique = []
|
|
121
|
+
for e in errors:
|
|
122
|
+
if e.resource_address not in seen:
|
|
123
|
+
seen.add(e.resource_address)
|
|
124
|
+
unique.append(e)
|
|
125
|
+
|
|
126
|
+
return unique
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def is_terraform_output(text: str) -> bool:
|
|
130
|
+
indicators = [
|
|
131
|
+
'Error:', 'azurerm_', 'aws_', 'google_',
|
|
132
|
+
'.tf line', 'with module.', 'Plan:', 'Apply',
|
|
133
|
+
]
|
|
134
|
+
text_lower = text.lower()
|
|
135
|
+
return any(ind.lower() in text_lower for ind in indicators)
|