security-use 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,196 @@
1
+ """Dependency vulnerability fixer.
2
+
3
+ Updates vulnerable dependencies to safe versions in requirements files.
4
+ """
5
+
6
+ import re
7
+ import json
8
+ import urllib.request
9
+ from dataclasses import dataclass
10
+ from pathlib import Path
11
+ from typing import Optional
12
+
13
+
14
+ @dataclass
15
+ class FixResult:
16
+ """Result of applying a fix."""
17
+ success: bool
18
+ file_modified: str = ""
19
+ old_version: str = ""
20
+ new_version: str = ""
21
+ diff: str = ""
22
+ before: str = ""
23
+ after: str = ""
24
+ explanation: str = ""
25
+ error: Optional[str] = None
26
+
27
+
28
+ class DependencyFixer:
29
+ """Fixer for dependency vulnerabilities."""
30
+
31
+ def fix(
32
+ self,
33
+ path: str,
34
+ package_name: str,
35
+ target_version: Optional[str] = None
36
+ ) -> FixResult:
37
+ """Fix a vulnerable dependency by updating its version.
38
+
39
+ Args:
40
+ path: Path to the project directory.
41
+ package_name: Name of the package to update.
42
+ target_version: Version to update to (if not specified, uses latest safe version).
43
+
44
+ Returns:
45
+ FixResult with the outcome.
46
+ """
47
+ path_obj = Path(path)
48
+
49
+ if not path_obj.exists():
50
+ return FixResult(
51
+ success=False,
52
+ error=f"Path does not exist: {path}"
53
+ )
54
+
55
+ # Find the requirements file containing the package
56
+ req_file = self._find_package_file(path_obj, package_name)
57
+ if not req_file:
58
+ return FixResult(
59
+ success=False,
60
+ error=f"Package '{package_name}' not found in any dependency file"
61
+ )
62
+
63
+ try:
64
+ original_content = req_file.read_text()
65
+ old_version = self._get_package_version(original_content, package_name)
66
+
67
+ if not old_version:
68
+ return FixResult(
69
+ success=False,
70
+ error=f"Could not find version for '{package_name}'"
71
+ )
72
+
73
+ # Determine target version
74
+ new_version = target_version or self._get_latest_version(package_name) or old_version
75
+
76
+ if old_version == new_version:
77
+ return FixResult(
78
+ success=False,
79
+ error=f"Package is already at version {new_version}"
80
+ )
81
+
82
+ # Update the file
83
+ new_content = self._update_package_version(
84
+ original_content, package_name, old_version, new_version
85
+ )
86
+
87
+ # Write the file
88
+ req_file.write_text(new_content)
89
+
90
+ # Generate diff
91
+ diff = self._generate_diff(original_content, new_content, package_name, old_version, new_version)
92
+
93
+ return FixResult(
94
+ success=True,
95
+ file_modified=str(req_file.relative_to(path_obj) if path_obj.is_dir() else req_file.name),
96
+ old_version=old_version,
97
+ new_version=new_version,
98
+ diff=diff,
99
+ explanation=f"Updated {package_name} from {old_version} to {new_version}",
100
+ )
101
+
102
+ except Exception as e:
103
+ return FixResult(
104
+ success=False,
105
+ error=str(e)
106
+ )
107
+
108
+ def _find_package_file(self, path: Path, package_name: str) -> Optional[Path]:
109
+ """Find the dependency file containing the package."""
110
+ patterns = ["requirements*.txt", "pyproject.toml", "Pipfile"]
111
+
112
+ for pattern in patterns:
113
+ for f in path.glob(pattern):
114
+ content = f.read_text()
115
+ if re.search(rf'\b{re.escape(package_name)}\b', content, re.IGNORECASE):
116
+ return f
117
+
118
+ return None
119
+
120
+ def _get_package_version(self, content: str, package_name: str) -> Optional[str]:
121
+ """Extract the current version of a package from file content."""
122
+ # Try requirements.txt format
123
+ match = re.search(
124
+ rf'^{re.escape(package_name)}\s*[=<>~!]=?\s*([\d.]+)',
125
+ content,
126
+ re.MULTILINE | re.IGNORECASE
127
+ )
128
+ if match:
129
+ return match.group(1)
130
+
131
+ # Try pyproject.toml format
132
+ match = re.search(
133
+ rf'"{re.escape(package_name)}\s*[=<>~!]=?\s*([\d.]+)"',
134
+ content,
135
+ re.IGNORECASE
136
+ )
137
+ if match:
138
+ return match.group(1)
139
+
140
+ return None
141
+
142
+ def _update_package_version(
143
+ self,
144
+ content: str,
145
+ package_name: str,
146
+ old_version: str,
147
+ new_version: str
148
+ ) -> str:
149
+ """Update the package version in the file content."""
150
+ # Replace in requirements.txt format
151
+ pattern = rf'^({re.escape(package_name)}\s*[=<>~!]=?\s*){re.escape(old_version)}'
152
+ new_content = re.sub(pattern, rf'\g<1>{new_version}', content, flags=re.MULTILINE | re.IGNORECASE)
153
+
154
+ # If no change, try pyproject.toml format
155
+ if new_content == content:
156
+ pattern = rf'("{re.escape(package_name)}\s*[=<>~!>=]*){re.escape(old_version)}'
157
+ new_content = re.sub(pattern, rf'\g<1>{new_version}', content, flags=re.IGNORECASE)
158
+
159
+ return new_content
160
+
161
+ def _generate_diff(
162
+ self,
163
+ old_content: str,
164
+ new_content: str,
165
+ package_name: str,
166
+ old_version: str,
167
+ new_version: str
168
+ ) -> str:
169
+ """Generate a simple diff of the changes."""
170
+ old_lines = old_content.split("\n")
171
+ new_lines = new_content.split("\n")
172
+
173
+ diff_lines = []
174
+ for i, (old_line, new_line) in enumerate(zip(old_lines, new_lines)):
175
+ if old_line != new_line:
176
+ diff_lines.append(f"-{old_line}")
177
+ diff_lines.append(f"+{new_line}")
178
+
179
+ if not diff_lines:
180
+ diff_lines = [
181
+ f"-{package_name}=={old_version}",
182
+ f"+{package_name}=={new_version}",
183
+ ]
184
+
185
+ return "\n".join(diff_lines)
186
+
187
+ def _get_latest_version(self, package_name: str) -> Optional[str]:
188
+ """Get the latest version of a package from PyPI."""
189
+ try:
190
+ url = f"https://pypi.org/pypi/{package_name}/json"
191
+ request = urllib.request.Request(url, headers={"Accept": "application/json"})
192
+ with urllib.request.urlopen(request, timeout=10) as response:
193
+ data = json.loads(response.read().decode())
194
+ return data.get("info", {}).get("version")
195
+ except Exception:
196
+ return None
@@ -0,0 +1,191 @@
1
+ """Infrastructure as Code fixer.
2
+
3
+ Generates and optionally applies fixes for IaC security issues.
4
+ """
5
+
6
+ import re
7
+ from dataclasses import dataclass
8
+ from pathlib import Path
9
+ from typing import Optional
10
+
11
+
12
+ @dataclass
13
+ class FixResult:
14
+ """Result of applying or suggesting a fix."""
15
+ success: bool
16
+ file_modified: str = ""
17
+ old_version: str = ""
18
+ new_version: str = ""
19
+ diff: str = ""
20
+ before: str = ""
21
+ after: str = ""
22
+ explanation: str = ""
23
+ error: Optional[str] = None
24
+
25
+
26
+ # Fix mappings for known rules
27
+ IAC_FIXES = {
28
+ "S3_PUBLIC_ACCESS": {
29
+ "pattern": r'acl\s*=\s*"public-read"',
30
+ "replacement": 'acl = "private"',
31
+ "explanation": "Changed S3 bucket ACL from public-read to private to prevent unauthorized public access.",
32
+ },
33
+ "S3_VERSIONING_DISABLED": {
34
+ "pattern": r'versioning\s*=\s*false',
35
+ "replacement": 'versioning = true',
36
+ "explanation": "Enabled S3 bucket versioning to allow recovery of accidentally deleted objects.",
37
+ },
38
+ "SG_OPEN_INGRESS": {
39
+ "pattern": r'cidr_blocks\s*=\s*\[\s*"0\.0\.0\.0/0"\s*\]',
40
+ "replacement": 'cidr_blocks = ["10.0.0.0/8"]',
41
+ "explanation": "Restricted security group ingress to private IP range. Update this to your specific IP range.",
42
+ },
43
+ "RDS_PUBLIC": {
44
+ "pattern": r'publicly_accessible\s*=\s*true',
45
+ "replacement": 'publicly_accessible = false',
46
+ "explanation": "Disabled public accessibility for RDS instance. Access via VPC or bastion host instead.",
47
+ },
48
+ "EBS_UNENCRYPTED": {
49
+ "pattern": r'encrypted\s*=\s*false',
50
+ "replacement": 'encrypted = true',
51
+ "explanation": "Enabled EBS volume encryption to protect data at rest.",
52
+ },
53
+ "IAM_WILDCARD_ACTION": {
54
+ "pattern": r'"Action"\s*:\s*"\*"',
55
+ "replacement": '"Action": ["s3:GetObject", "s3:PutObject"]',
56
+ "explanation": "Replaced wildcard action with specific actions. Update to your required actions.",
57
+ },
58
+ "CLOUDTRAIL_DISABLED": {
59
+ "pattern": r'enable_logging\s*=\s*false',
60
+ "replacement": 'enable_logging = true',
61
+ "explanation": "Enabled CloudTrail logging to maintain audit trail.",
62
+ },
63
+ "KMS_KEY_ROTATION": {
64
+ "pattern": r'enable_key_rotation\s*=\s*false',
65
+ "replacement": 'enable_key_rotation = true',
66
+ "explanation": "Enabled KMS key rotation for improved security compliance.",
67
+ },
68
+ }
69
+
70
+
71
+ class IaCFixer:
72
+ """Fixer for Infrastructure as Code security issues."""
73
+
74
+ def __init__(self):
75
+ self.fixes = IAC_FIXES
76
+
77
+ def fix(
78
+ self,
79
+ file_path: str,
80
+ rule_id: str,
81
+ line_number: Optional[int] = None,
82
+ auto_apply: bool = False
83
+ ) -> FixResult:
84
+ """Fix an IaC security issue.
85
+
86
+ Args:
87
+ file_path: Path to the IaC file.
88
+ rule_id: ID of the security rule that was violated.
89
+ line_number: Optional line number where the issue is located.
90
+ auto_apply: If True, apply the fix. If False, only suggest.
91
+
92
+ Returns:
93
+ FixResult with the fix details.
94
+ """
95
+ path_obj = Path(file_path)
96
+
97
+ if not path_obj.exists():
98
+ return FixResult(
99
+ success=False,
100
+ error=f"File does not exist: {file_path}"
101
+ )
102
+
103
+ if rule_id not in self.fixes:
104
+ return FixResult(
105
+ success=False,
106
+ error=f"No fix available for rule: {rule_id}"
107
+ )
108
+
109
+ fix_info = self.fixes[rule_id]
110
+
111
+ try:
112
+ content = path_obj.read_text()
113
+ lines = content.split("\n")
114
+
115
+ # Find the problematic section
116
+ pattern = re.compile(fix_info["pattern"], re.IGNORECASE | re.MULTILINE)
117
+ match = pattern.search(content)
118
+
119
+ if not match:
120
+ return FixResult(
121
+ success=False,
122
+ error=f"Could not find the issue pattern for rule {rule_id}"
123
+ )
124
+
125
+ # Get the before snippet
126
+ match_line = content[:match.start()].count("\n")
127
+ start_line = max(0, match_line - 2)
128
+ end_line = min(len(lines), match_line + 5)
129
+ before_snippet = "\n".join(lines[start_line:end_line])
130
+
131
+ # Apply the fix
132
+ new_content = pattern.sub(fix_info["replacement"], content, count=1)
133
+ new_lines = new_content.split("\n")
134
+
135
+ # Get the after snippet
136
+ after_snippet = "\n".join(new_lines[start_line:end_line])
137
+
138
+ # Generate diff
139
+ diff = self._generate_diff(content, new_content)
140
+
141
+ if auto_apply:
142
+ # Write the fixed content
143
+ path_obj.write_text(new_content)
144
+ return FixResult(
145
+ success=True,
146
+ file_modified=file_path,
147
+ diff=diff,
148
+ before=before_snippet,
149
+ after=after_snippet,
150
+ explanation=fix_info["explanation"],
151
+ )
152
+ else:
153
+ # Return suggested fix without applying
154
+ return FixResult(
155
+ success=True,
156
+ before=before_snippet,
157
+ after=after_snippet,
158
+ diff=diff,
159
+ explanation=fix_info["explanation"],
160
+ )
161
+
162
+ except Exception as e:
163
+ return FixResult(
164
+ success=False,
165
+ error=str(e)
166
+ )
167
+
168
+ def _generate_diff(self, old_content: str, new_content: str) -> str:
169
+ """Generate a unified diff of the changes."""
170
+ old_lines = old_content.split("\n")
171
+ new_lines = new_content.split("\n")
172
+
173
+ diff_lines = []
174
+ for i, (old_line, new_line) in enumerate(zip(old_lines, new_lines)):
175
+ if old_line != new_line:
176
+ diff_lines.append(f"-{old_line}")
177
+ diff_lines.append(f"+{new_line}")
178
+
179
+ # Handle length differences
180
+ if len(old_lines) > len(new_lines):
181
+ for line in old_lines[len(new_lines):]:
182
+ diff_lines.append(f"-{line}")
183
+ elif len(new_lines) > len(old_lines):
184
+ for line in new_lines[len(old_lines):]:
185
+ diff_lines.append(f"+{line}")
186
+
187
+ return "\n".join(diff_lines) if diff_lines else "No changes"
188
+
189
+ def get_available_fixes(self) -> list[str]:
190
+ """Get list of rule IDs that have available fixes."""
191
+ return list(self.fixes.keys())
@@ -0,0 +1,9 @@
1
+ """Infrastructure as Code parsers and scanners."""
2
+
3
+ from security_use.iac.terraform import TerraformParser
4
+ from security_use.iac.cloudformation import CloudFormationParser
5
+
6
+ __all__ = [
7
+ "TerraformParser",
8
+ "CloudFormationParser",
9
+ ]
@@ -0,0 +1,69 @@
1
+ """Base classes for IaC parsers."""
2
+
3
+ from abc import ABC, abstractmethod
4
+ from dataclasses import dataclass, field
5
+ from typing import Any, Optional
6
+
7
+
8
+ @dataclass
9
+ class IaCResource:
10
+ """Represents a parsed IaC resource."""
11
+
12
+ resource_type: str
13
+ name: str
14
+ config: dict[str, Any]
15
+ file_path: str
16
+ line_number: int
17
+ end_line: Optional[int] = None
18
+ provider: str = "unknown"
19
+
20
+ def get_config(self, *keys: str, default: Any = None) -> Any:
21
+ """Get nested config value by key path.
22
+
23
+ Args:
24
+ *keys: Path of keys to traverse.
25
+ default: Default value if not found.
26
+
27
+ Returns:
28
+ Config value or default.
29
+ """
30
+ current = self.config
31
+ for key in keys:
32
+ if isinstance(current, dict) and key in current:
33
+ current = current[key]
34
+ else:
35
+ return default
36
+ return current
37
+
38
+
39
+ @dataclass
40
+ class ParseResult:
41
+ """Result of parsing an IaC file."""
42
+
43
+ resources: list[IaCResource] = field(default_factory=list)
44
+ variables: dict[str, Any] = field(default_factory=dict)
45
+ outputs: dict[str, Any] = field(default_factory=dict)
46
+ errors: list[str] = field(default_factory=list)
47
+
48
+
49
+ class IaCParser(ABC):
50
+ """Abstract base class for IaC file parsers."""
51
+
52
+ @abstractmethod
53
+ def parse(self, content: str, file_path: str = "<string>") -> ParseResult:
54
+ """Parse IaC file content.
55
+
56
+ Args:
57
+ content: File content to parse.
58
+ file_path: Path to the file (for error reporting).
59
+
60
+ Returns:
61
+ ParseResult containing resources and any errors.
62
+ """
63
+ pass
64
+
65
+ @classmethod
66
+ @abstractmethod
67
+ def supported_extensions(cls) -> list[str]:
68
+ """Return list of supported file extensions."""
69
+ pass
@@ -0,0 +1,207 @@
1
+ """CloudFormation template parser."""
2
+
3
+ import json
4
+ import re
5
+ from typing import Any, Optional
6
+
7
+ import yaml
8
+
9
+ from security_use.iac.base import IaCParser, IaCResource, ParseResult
10
+
11
+
12
+ # Custom YAML loader that handles CloudFormation intrinsic functions
13
+ class CloudFormationLoader(yaml.SafeLoader):
14
+ """YAML loader that handles CloudFormation intrinsic function tags."""
15
+
16
+ pass
17
+
18
+
19
+ def _construct_cfn_tag(loader: yaml.SafeLoader, tag_suffix: str, node: yaml.Node) -> dict:
20
+ """Construct a dict representing a CloudFormation intrinsic function."""
21
+ if isinstance(node, yaml.ScalarNode):
22
+ value = loader.construct_scalar(node)
23
+ elif isinstance(node, yaml.SequenceNode):
24
+ value = loader.construct_sequence(node)
25
+ elif isinstance(node, yaml.MappingNode):
26
+ value = loader.construct_mapping(node)
27
+ else:
28
+ value = None
29
+ return {f"Fn::{tag_suffix}": value}
30
+
31
+
32
+ def _construct_ref(loader: yaml.SafeLoader, node: yaml.Node) -> dict:
33
+ """Construct a Ref intrinsic function."""
34
+ return {"Ref": loader.construct_scalar(node)}
35
+
36
+
37
+ # Register CloudFormation intrinsic function tags
38
+ CloudFormationLoader.add_constructor("!Ref", _construct_ref)
39
+ CloudFormationLoader.add_constructor("!GetAtt", lambda l, n: _construct_cfn_tag(l, "GetAtt", n))
40
+ CloudFormationLoader.add_constructor("!Sub", lambda l, n: _construct_cfn_tag(l, "Sub", n))
41
+ CloudFormationLoader.add_constructor("!Join", lambda l, n: _construct_cfn_tag(l, "Join", n))
42
+ CloudFormationLoader.add_constructor("!If", lambda l, n: _construct_cfn_tag(l, "If", n))
43
+ CloudFormationLoader.add_constructor("!Equals", lambda l, n: _construct_cfn_tag(l, "Equals", n))
44
+ CloudFormationLoader.add_constructor("!And", lambda l, n: _construct_cfn_tag(l, "And", n))
45
+ CloudFormationLoader.add_constructor("!Or", lambda l, n: _construct_cfn_tag(l, "Or", n))
46
+ CloudFormationLoader.add_constructor("!Not", lambda l, n: _construct_cfn_tag(l, "Not", n))
47
+ CloudFormationLoader.add_constructor("!Condition", lambda l, n: _construct_cfn_tag(l, "Condition", n))
48
+ CloudFormationLoader.add_constructor("!FindInMap", lambda l, n: _construct_cfn_tag(l, "FindInMap", n))
49
+ CloudFormationLoader.add_constructor("!Base64", lambda l, n: _construct_cfn_tag(l, "Base64", n))
50
+ CloudFormationLoader.add_constructor("!Cidr", lambda l, n: _construct_cfn_tag(l, "Cidr", n))
51
+ CloudFormationLoader.add_constructor("!GetAZs", lambda l, n: _construct_cfn_tag(l, "GetAZs", n))
52
+ CloudFormationLoader.add_constructor("!ImportValue", lambda l, n: _construct_cfn_tag(l, "ImportValue", n))
53
+ CloudFormationLoader.add_constructor("!Select", lambda l, n: _construct_cfn_tag(l, "Select", n))
54
+ CloudFormationLoader.add_constructor("!Split", lambda l, n: _construct_cfn_tag(l, "Split", n))
55
+ CloudFormationLoader.add_constructor("!Transform", lambda l, n: _construct_cfn_tag(l, "Transform", n))
56
+
57
+
58
+ class CloudFormationParser(IaCParser):
59
+ """Parser for AWS CloudFormation templates (YAML/JSON)."""
60
+
61
+ def parse(self, content: str, file_path: str = "<string>") -> ParseResult:
62
+ """Parse CloudFormation template.
63
+
64
+ Args:
65
+ content: Template content (YAML or JSON).
66
+ file_path: Path to the file.
67
+
68
+ Returns:
69
+ ParseResult with resources and any errors.
70
+ """
71
+ result = ParseResult()
72
+
73
+ # Determine format and parse
74
+ template = self._parse_template(content, file_path)
75
+ if template is None:
76
+ result.errors.append(f"Failed to parse {file_path}: Invalid YAML/JSON")
77
+ return result
78
+
79
+ # Validate it's a CloudFormation template
80
+ if not self._is_cloudformation(template):
81
+ result.errors.append(f"{file_path} is not a valid CloudFormation template")
82
+ return result
83
+
84
+ # Extract resources
85
+ resources = template.get("Resources", {})
86
+ for resource_name, resource_def in resources.items():
87
+ resource_type = resource_def.get("Type", "Unknown")
88
+ properties = resource_def.get("Properties", {})
89
+
90
+ line_number = self._find_resource_line(content, resource_name)
91
+
92
+ resource = IaCResource(
93
+ resource_type=resource_type,
94
+ name=resource_name,
95
+ config=properties,
96
+ file_path=file_path,
97
+ line_number=line_number,
98
+ provider="aws",
99
+ )
100
+ result.resources.append(resource)
101
+
102
+ # Extract parameters as variables
103
+ for param_name, param_def in template.get("Parameters", {}).items():
104
+ result.variables[param_name] = param_def
105
+
106
+ # Extract outputs
107
+ for output_name, output_def in template.get("Outputs", {}).items():
108
+ result.outputs[output_name] = output_def
109
+
110
+ return result
111
+
112
+ def _parse_template(self, content: str, file_path: str) -> Optional[dict[str, Any]]:
113
+ """Parse template content as YAML or JSON."""
114
+ # Try YAML first with CloudFormation-aware loader
115
+ try:
116
+ template = yaml.load(content, Loader=CloudFormationLoader)
117
+ if isinstance(template, dict):
118
+ return template
119
+ except yaml.YAMLError:
120
+ pass
121
+
122
+ # Try JSON explicitly
123
+ try:
124
+ template = json.loads(content)
125
+ if isinstance(template, dict):
126
+ return template
127
+ except json.JSONDecodeError:
128
+ pass
129
+
130
+ return None
131
+
132
+ def _is_cloudformation(self, template: dict[str, Any]) -> bool:
133
+ """Check if the template is a valid CloudFormation template."""
134
+ # Must have either Resources or AWSTemplateFormatVersion
135
+ if "Resources" in template:
136
+ return True
137
+ if "AWSTemplateFormatVersion" in template:
138
+ return True
139
+ return False
140
+
141
+ def _find_resource_line(self, content: str, resource_name: str) -> int:
142
+ """Find the line number where a resource is defined."""
143
+ lines = content.split("\n")
144
+ for i, line in enumerate(lines, start=1):
145
+ # Match YAML style: "ResourceName:" at start of line
146
+ if re.match(rf'^\s*{re.escape(resource_name)}\s*:', line):
147
+ return i
148
+ # Match JSON style: "ResourceName": {
149
+ if re.search(rf'"{re.escape(resource_name)}"\s*:\s*\{{', line):
150
+ return i
151
+ return 1
152
+
153
+ @classmethod
154
+ def supported_extensions(cls) -> list[str]:
155
+ """Return supported file extensions."""
156
+ return [".yaml", ".yml", ".json", ".template"]
157
+
158
+
159
+ class SAMParser(CloudFormationParser):
160
+ """Parser for AWS SAM (Serverless Application Model) templates.
161
+
162
+ SAM is a superset of CloudFormation with additional resource types.
163
+ """
164
+
165
+ def _is_cloudformation(self, template: dict[str, Any]) -> bool:
166
+ """Check if the template is a valid SAM or CloudFormation template."""
167
+ # SAM templates have Transform: AWS::Serverless
168
+ if "Transform" in template:
169
+ transform = template["Transform"]
170
+ if isinstance(transform, str) and "AWS::Serverless" in transform:
171
+ return True
172
+ if isinstance(transform, list) and any(
173
+ "AWS::Serverless" in t for t in transform
174
+ ):
175
+ return True
176
+
177
+ # Fall back to CloudFormation check
178
+ return super()._is_cloudformation(template)
179
+
180
+
181
+ class CDKOutputParser(IaCParser):
182
+ """Parser for AWS CDK synthesized CloudFormation templates."""
183
+
184
+ def parse(self, content: str, file_path: str = "<string>") -> ParseResult:
185
+ """Parse CDK output (CloudFormation JSON).
186
+
187
+ Args:
188
+ content: Synthesized CloudFormation template.
189
+ file_path: Path to the file.
190
+
191
+ Returns:
192
+ ParseResult with resources.
193
+ """
194
+ # CDK outputs CloudFormation JSON, so delegate to CloudFormation parser
195
+ cf_parser = CloudFormationParser()
196
+ result = cf_parser.parse(content, file_path)
197
+
198
+ # Mark resources as CDK-generated
199
+ for resource in result.resources:
200
+ resource.config["__cdk_generated"] = True
201
+
202
+ return result
203
+
204
+ @classmethod
205
+ def supported_extensions(cls) -> list[str]:
206
+ """Return supported file extensions."""
207
+ return [".template.json"]