cfn-check 0.6.1__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cfn-check might be problematic. Click here for more details.
- cfn_check/cli/render.py +26 -22
- cfn_check/cli/utils/files.py +0 -1
- cfn_check/cli/utils/stdout.py +18 -0
- cfn_check/cli/validate.py +12 -22
- cfn_check/evaluation/evaluator.py +12 -3
- cfn_check/evaluation/validate.py +6 -1
- cfn_check/rendering/cidr_solver.py +66 -0
- cfn_check/rendering/renderer.py +500 -177
- {cfn_check-0.6.1.dist-info → cfn_check-0.7.0.dist-info}/METADATA +58 -4
- {cfn_check-0.6.1.dist-info → cfn_check-0.7.0.dist-info}/RECORD +14 -12
- {cfn_check-0.6.1.dist-info → cfn_check-0.7.0.dist-info}/WHEEL +0 -0
- {cfn_check-0.6.1.dist-info → cfn_check-0.7.0.dist-info}/entry_points.txt +0 -0
- {cfn_check-0.6.1.dist-info → cfn_check-0.7.0.dist-info}/licenses/LICENSE +0 -0
- {cfn_check-0.6.1.dist-info → cfn_check-0.7.0.dist-info}/top_level.txt +0 -0
cfn_check/cli/render.py
CHANGED
|
@@ -3,6 +3,7 @@ from async_logging import LogLevelName, Logger, LoggingConfig
|
|
|
3
3
|
from cocoa.cli import CLI
|
|
4
4
|
|
|
5
5
|
from cfn_check.cli.utils.files import load_templates, write_to_file
|
|
6
|
+
from cfn_check.cli.utils.stdout import write_to_stdout
|
|
6
7
|
from cfn_check.rendering import Renderer
|
|
7
8
|
from cfn_check.logging.models import InfoLog
|
|
8
9
|
|
|
@@ -10,36 +11,22 @@ from cfn_check.logging.models import InfoLog
|
|
|
10
11
|
@CLI.command()
|
|
11
12
|
async def render(
|
|
12
13
|
path: str,
|
|
13
|
-
output_file: str
|
|
14
|
+
output_file: str | None = None,
|
|
15
|
+
attributes: list[str] | None = None,
|
|
16
|
+
mappings: list[str] | None = None,
|
|
14
17
|
parameters: list[str] | None = None,
|
|
15
18
|
references: list[str] | None = None,
|
|
16
|
-
tags: list[str] = [
|
|
17
|
-
'Ref',
|
|
18
|
-
'Sub',
|
|
19
|
-
'Join',
|
|
20
|
-
'Select',
|
|
21
|
-
'Split',
|
|
22
|
-
'GetAtt',
|
|
23
|
-
'GetAZs',
|
|
24
|
-
'ImportValue',
|
|
25
|
-
'Equals',
|
|
26
|
-
'If',
|
|
27
|
-
'Not',
|
|
28
|
-
'And',
|
|
29
|
-
'Or',
|
|
30
|
-
'Condition',
|
|
31
|
-
'FindInMap',
|
|
32
|
-
],
|
|
33
19
|
log_level: LogLevelName = 'info',
|
|
34
20
|
):
|
|
35
21
|
"""
|
|
36
22
|
Render a Cloud Formation template
|
|
37
23
|
|
|
38
24
|
@param output_file Path to output the rendered CloudFormation template to
|
|
25
|
+
@param attributes A list of <key>=<value> input !GetAtt attributes to use
|
|
26
|
+
@param mappings A list of <key>=<value> input Mappings to use
|
|
39
27
|
@param parameters A list of <key>=<value> input Parameters to use
|
|
40
28
|
@param references A list of <key>=<value> input !Ref values to use
|
|
41
|
-
@param
|
|
42
|
-
@param log_level The log level to use
|
|
29
|
+
@param log-level The log level to use
|
|
43
30
|
"""
|
|
44
31
|
logging_config = LoggingConfig()
|
|
45
32
|
logging_config.update(
|
|
@@ -47,6 +34,18 @@ async def render(
|
|
|
47
34
|
log_output='stderr',
|
|
48
35
|
)
|
|
49
36
|
|
|
37
|
+
parsed_attributes: dict[str, str] | None = None
|
|
38
|
+
if attributes:
|
|
39
|
+
parsed_attributes = dict([
|
|
40
|
+
attribute.split('=', maxsplit=1) for attribute in attributes if len(attribute.split('=', maxsplit=1)) > 0
|
|
41
|
+
])
|
|
42
|
+
|
|
43
|
+
parsed_mappings: dict[str, str] | None = None
|
|
44
|
+
if mappings:
|
|
45
|
+
parsed_mappings = dict([
|
|
46
|
+
mapping.split('=', maxsplit=1) for mapping in mappings if len(mapping.split('=', maxsplit=1)) > 0
|
|
47
|
+
])
|
|
48
|
+
|
|
50
49
|
parsed_parameters: dict[str, str] | None = None
|
|
51
50
|
if parameters:
|
|
52
51
|
parsed_parameters = dict([
|
|
@@ -72,10 +71,15 @@ async def render(
|
|
|
72
71
|
renderer = Renderer()
|
|
73
72
|
rendered = renderer.render(
|
|
74
73
|
template,
|
|
74
|
+
attributes=parsed_attributes,
|
|
75
|
+
mappings=parsed_mappings,
|
|
75
76
|
parameters=parsed_parameters,
|
|
76
77
|
references=parsed_references,
|
|
77
78
|
)
|
|
78
79
|
|
|
79
|
-
|
|
80
|
+
if output_file is False:
|
|
81
|
+
await write_to_file(output_file, rendered)
|
|
82
|
+
await logger.log(InfoLog(message=f'✅ {path} template rendered'))
|
|
80
83
|
|
|
81
|
-
|
|
84
|
+
else:
|
|
85
|
+
await write_to_stdout(rendered)
|
cfn_check/cli/utils/files.py
CHANGED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import sys
|
|
3
|
+
from ruamel.yaml import YAML
|
|
4
|
+
from ruamel.yaml.comments import CommentedBase
|
|
5
|
+
|
|
6
|
+
async def write_to_stdout(data: CommentedBase):
|
|
7
|
+
loop = asyncio.get_event_loop()
|
|
8
|
+
|
|
9
|
+
yaml = YAML(typ=['rt'])
|
|
10
|
+
yaml.preserve_quotes = True
|
|
11
|
+
yaml.width = 4096
|
|
12
|
+
yaml.indent(mapping=2, sequence=4, offset=2)
|
|
13
|
+
await loop.run_in_executor(
|
|
14
|
+
None,
|
|
15
|
+
yaml.dump,
|
|
16
|
+
data,
|
|
17
|
+
sys.stdout,
|
|
18
|
+
)
|
cfn_check/cli/validate.py
CHANGED
|
@@ -11,36 +11,24 @@ from cfn_check.collection.collection import Collection
|
|
|
11
11
|
from cfn_check.validation.validator import Validator
|
|
12
12
|
|
|
13
13
|
|
|
14
|
-
@CLI.command(
|
|
14
|
+
@CLI.command(
|
|
15
|
+
shortnames={
|
|
16
|
+
'flags': 'F'
|
|
17
|
+
}
|
|
18
|
+
)
|
|
15
19
|
async def validate(
|
|
16
20
|
path: str,
|
|
17
21
|
file_pattern: str | None = None,
|
|
18
22
|
rules: ImportType[Collection] = None,
|
|
19
|
-
|
|
20
|
-
'Ref',
|
|
21
|
-
'Sub',
|
|
22
|
-
'Join',
|
|
23
|
-
'Select',
|
|
24
|
-
'Split',
|
|
25
|
-
'GetAtt',
|
|
26
|
-
'GetAZs',
|
|
27
|
-
'ImportValue',
|
|
28
|
-
'Equals',
|
|
29
|
-
'If',
|
|
30
|
-
'Not',
|
|
31
|
-
'And',
|
|
32
|
-
'Or',
|
|
33
|
-
'Condition',
|
|
34
|
-
'FindInMap',
|
|
35
|
-
],
|
|
23
|
+
flags: list[str] | None = None,
|
|
36
24
|
log_level: LogLevelName = 'info',
|
|
37
25
|
):
|
|
38
26
|
'''
|
|
39
27
|
Validate Cloud Foundation
|
|
40
28
|
|
|
41
|
-
@param
|
|
29
|
+
@param disabled A list of string features to disable during checks
|
|
42
30
|
@param file_pattern A string pattern used to find template files
|
|
43
|
-
@param
|
|
31
|
+
@param rules Path to a file containing Collections
|
|
44
32
|
@param log_level The log level to use
|
|
45
33
|
'''
|
|
46
34
|
|
|
@@ -52,9 +40,11 @@ async def validate(
|
|
|
52
40
|
|
|
53
41
|
logger = Logger()
|
|
54
42
|
|
|
43
|
+
if flags is None:
|
|
44
|
+
flags = []
|
|
45
|
+
|
|
55
46
|
templates = await load_templates(
|
|
56
47
|
path,
|
|
57
|
-
tags,
|
|
58
48
|
file_pattern=file_pattern,
|
|
59
49
|
)
|
|
60
50
|
|
|
@@ -71,7 +61,7 @@ async def validate(
|
|
|
71
61
|
for rule in rules.data.values()
|
|
72
62
|
for _, validation in inspect.getmembers(rule)
|
|
73
63
|
if isinstance(validation, Validator)
|
|
74
|
-
])
|
|
64
|
+
], flags=flags)
|
|
75
65
|
|
|
76
66
|
if validation_error := validation_set.validate([
|
|
77
67
|
template_data for _, template_data in templates
|
|
@@ -12,7 +12,14 @@ from .parsing import QueryParser
|
|
|
12
12
|
|
|
13
13
|
class Evaluator:
|
|
14
14
|
|
|
15
|
-
def __init__(
|
|
15
|
+
def __init__(
|
|
16
|
+
self,
|
|
17
|
+
flags: list[str] | None = None
|
|
18
|
+
):
|
|
19
|
+
if flags is None:
|
|
20
|
+
flags = []
|
|
21
|
+
|
|
22
|
+
self.flags = flags
|
|
16
23
|
self._query_parser = QueryParser()
|
|
17
24
|
self._renderer = Renderer()
|
|
18
25
|
|
|
@@ -22,9 +29,11 @@ class Evaluator:
|
|
|
22
29
|
path: str,
|
|
23
30
|
):
|
|
24
31
|
items: Items = deque()
|
|
32
|
+
|
|
33
|
+
if 'no-render' not in self.flags:
|
|
34
|
+
resources = self._renderer.render(resources)
|
|
25
35
|
|
|
26
|
-
|
|
27
|
-
items.append(rendered)
|
|
36
|
+
items.append(resources)
|
|
28
37
|
|
|
29
38
|
segments = path.split("::")[::-1]
|
|
30
39
|
# Queries can be multi-segment,
|
cfn_check/evaluation/validate.py
CHANGED
|
@@ -9,8 +9,13 @@ class ValidationSet:
|
|
|
9
9
|
def __init__(
|
|
10
10
|
self,
|
|
11
11
|
validators: list[Validator],
|
|
12
|
+
flags: list[str] | None = None
|
|
12
13
|
):
|
|
13
|
-
|
|
14
|
+
|
|
15
|
+
if flags is None:
|
|
16
|
+
flags = []
|
|
17
|
+
|
|
18
|
+
self._evaluator = Evaluator(flags=flags)
|
|
14
19
|
self._validators = validators
|
|
15
20
|
|
|
16
21
|
@property
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
class IPv4CIDRSolver:
|
|
2
|
+
|
|
3
|
+
def __init__(
|
|
4
|
+
self,
|
|
5
|
+
host: str,
|
|
6
|
+
desired: int,
|
|
7
|
+
bits: int,
|
|
8
|
+
):
|
|
9
|
+
self.host = host
|
|
10
|
+
self.subnets_desired = desired
|
|
11
|
+
self.subnet_bits = bits
|
|
12
|
+
|
|
13
|
+
host_ip, mask = self.host.split('/', maxsplit=1)
|
|
14
|
+
|
|
15
|
+
self.host_ip = host_ip
|
|
16
|
+
self._host_mask_string = f'/{mask}'
|
|
17
|
+
self.host_mask = int(mask)
|
|
18
|
+
|
|
19
|
+
self.subnet_mask = 32 - bits
|
|
20
|
+
|
|
21
|
+
self._host_octets = [
|
|
22
|
+
int(octet) for octet in self.host.strip(self._host_mask_string).split('.')
|
|
23
|
+
]
|
|
24
|
+
|
|
25
|
+
def provision_subnets(self):
|
|
26
|
+
subnet_requested_ips = 2**self.subnet_bits
|
|
27
|
+
host_available_ips = 2**(32 - self.host_mask)
|
|
28
|
+
|
|
29
|
+
total_ips_requested = subnet_requested_ips * self.subnets_desired
|
|
30
|
+
if host_available_ips < total_ips_requested:
|
|
31
|
+
return []
|
|
32
|
+
|
|
33
|
+
return [
|
|
34
|
+
self._provision_subnet(
|
|
35
|
+
subnet_requested_ips,
|
|
36
|
+
idx
|
|
37
|
+
) for idx in range(self.subnets_desired)
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _provision_subnet(
|
|
42
|
+
self,
|
|
43
|
+
requested_ips: int,
|
|
44
|
+
idx: int,
|
|
45
|
+
):
|
|
46
|
+
increment = requested_ips
|
|
47
|
+
octet_idx = -1
|
|
48
|
+
if requested_ips > 255:
|
|
49
|
+
increment /= 256
|
|
50
|
+
octet_idx -= 1
|
|
51
|
+
|
|
52
|
+
increment *= idx
|
|
53
|
+
|
|
54
|
+
subnet = list(self._host_octets)
|
|
55
|
+
|
|
56
|
+
subnet[octet_idx] += increment
|
|
57
|
+
|
|
58
|
+
subnet_base_ip = '.'.join([
|
|
59
|
+
str(octet) for octet in subnet
|
|
60
|
+
])
|
|
61
|
+
|
|
62
|
+
return f'{subnet_base_ip}/{self.subnet_mask}'
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
|
cfn_check/rendering/renderer.py
CHANGED
|
@@ -2,10 +2,12 @@ from __future__ import annotations
|
|
|
2
2
|
import base64
|
|
3
3
|
import json
|
|
4
4
|
import re
|
|
5
|
+
import copy
|
|
5
6
|
from typing import Callable, Any
|
|
6
7
|
from collections import deque
|
|
7
8
|
from ruamel.yaml.tag import Tag
|
|
8
9
|
from ruamel.yaml.comments import TaggedScalar, CommentedMap, CommentedSeq
|
|
10
|
+
from .cidr_solver import IPv4CIDRSolver
|
|
9
11
|
from .utils import assign
|
|
10
12
|
|
|
11
13
|
from cfn_check.shared.types import (
|
|
@@ -14,6 +16,14 @@ from cfn_check.shared.types import (
|
|
|
14
16
|
YamlObject,
|
|
15
17
|
)
|
|
16
18
|
|
|
19
|
+
Resolver = Callable[
|
|
20
|
+
[
|
|
21
|
+
CommentedMap,
|
|
22
|
+
CommentedMap | CommentedSeq | TaggedScalar | YamlObject
|
|
23
|
+
],
|
|
24
|
+
CommentedMap | CommentedSeq | TaggedScalar | YamlObject,
|
|
25
|
+
]
|
|
26
|
+
|
|
17
27
|
class Renderer:
|
|
18
28
|
|
|
19
29
|
def __init__(self):
|
|
@@ -31,6 +41,44 @@ class Renderer:
|
|
|
31
41
|
self._resources: dict[str, YamlObject] = CommentedMap()
|
|
32
42
|
self._attributes: dict[str, str] = {}
|
|
33
43
|
|
|
44
|
+
self._inline_functions = {
|
|
45
|
+
'Fn::ForEach': re.compile(r'Fn::ForEach::\w+'),
|
|
46
|
+
'Fn::If': re.compile(r'Fn::If'),
|
|
47
|
+
'Fn::And': re.compile(r'Fn::And'),
|
|
48
|
+
'Fn::Equals': re.compile(r'Fn::Equals'),
|
|
49
|
+
'Fn::Not': re.compile(r'Fn::Not'),
|
|
50
|
+
'Fn::Or': re.compile(r'Fn::Or'),
|
|
51
|
+
'Fn:GetAtt': re.compile(r'Fn::GetAtt'),
|
|
52
|
+
'Fn::Join': re.compile(r'Fn::Join'),
|
|
53
|
+
'Fn::Sub': re.compile(r'Fn::Sub'),
|
|
54
|
+
'Fn::Base64': re.compile(r'Fn::Base64'),
|
|
55
|
+
'Fn::Split': re.compile(r'Fn::Split'),
|
|
56
|
+
'Fn::Select': re.compile(r'Fn::Select'),
|
|
57
|
+
'Fn::ToJsonString': re.compile(r'Fn::ToJsonString'),
|
|
58
|
+
'Fn::Condition': re.compile(r'Fn::Condition'),
|
|
59
|
+
'Fn::Cidr': re.compile(r'Fn::Cidr'),
|
|
60
|
+
'Fn::Length': re.compile(r'Fn::Length')
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
self._inline_resolvers = {
|
|
64
|
+
'Fn::ForEach': self._resolve_foreach,
|
|
65
|
+
'Fn::If': self._resolve_if,
|
|
66
|
+
'Fn::And': self._resolve_and,
|
|
67
|
+
'Fn::Equals': self._resolve_equals,
|
|
68
|
+
'Fn::Not': self._resolve_not,
|
|
69
|
+
'Fn::Or': self._resolve_or,
|
|
70
|
+
'Fn:GetAtt': self._resolve_getatt,
|
|
71
|
+
'Fn::Join': self._resolve_join,
|
|
72
|
+
'Fn::Sub': self._resolve_sub,
|
|
73
|
+
'Fn::Base64': self._resolve_base64,
|
|
74
|
+
'Fn::Split': self._resolve_split,
|
|
75
|
+
'Fn::Select': self._resolve_select,
|
|
76
|
+
'Fn::ToJsonString': self._resolve_tree_to_json,
|
|
77
|
+
'Fn::Condition': self._resolve_condition,
|
|
78
|
+
'Fn::Cidr': self._resolve_cidr,
|
|
79
|
+
'Fn::Length': self._resolve_length
|
|
80
|
+
}
|
|
81
|
+
|
|
34
82
|
self._resolvers: dict[str, Callable[[CommentedMap, str], YamlObject]] = {
|
|
35
83
|
'!Ref': self._resolve_ref,
|
|
36
84
|
'!FindInMap': self._resolve_by_subset_query,
|
|
@@ -46,7 +94,8 @@ class Renderer:
|
|
|
46
94
|
'!Condition': self._resolve_condition,
|
|
47
95
|
'!And': self._resolve_and,
|
|
48
96
|
'!Not': self._resolve_not,
|
|
49
|
-
'!Or': self._resolve_or
|
|
97
|
+
'!Or': self._resolve_or,
|
|
98
|
+
'!Cidr': self._resolve_cidr,
|
|
50
99
|
}
|
|
51
100
|
|
|
52
101
|
def render(
|
|
@@ -62,14 +111,6 @@ class Renderer:
|
|
|
62
111
|
|
|
63
112
|
self._assemble_parameters(template)
|
|
64
113
|
|
|
65
|
-
attributes = {
|
|
66
|
-
'LambdaExecutionRole.Arn': 'This is a test',
|
|
67
|
-
'AllSecurityGroups.Value': [
|
|
68
|
-
'123456',
|
|
69
|
-
'112211'
|
|
70
|
-
]
|
|
71
|
-
|
|
72
|
-
}
|
|
73
114
|
if attributes:
|
|
74
115
|
self._attributes = self._process_attributes(attributes)
|
|
75
116
|
|
|
@@ -96,7 +137,14 @@ class Renderer:
|
|
|
96
137
|
|
|
97
138
|
while self.items:
|
|
98
139
|
parent, accessor, node = self.items.pop()
|
|
99
|
-
|
|
140
|
+
if match := self._match_and_resolve_accessor_fn(
|
|
141
|
+
root,
|
|
142
|
+
parent,
|
|
143
|
+
accessor,
|
|
144
|
+
node,
|
|
145
|
+
):
|
|
146
|
+
root.update(match)
|
|
147
|
+
|
|
100
148
|
if isinstance(node, TaggedScalar):
|
|
101
149
|
# Replace in parent
|
|
102
150
|
if parent is not None and (
|
|
@@ -142,31 +190,34 @@ class Renderer:
|
|
|
142
190
|
|
|
143
191
|
return root
|
|
144
192
|
|
|
145
|
-
def
|
|
193
|
+
def _match_and_resolve_accessor_fn(
|
|
146
194
|
self,
|
|
147
|
-
root: CommentedMap,
|
|
148
|
-
|
|
195
|
+
root: CommentedMap,
|
|
196
|
+
parent: CommentedMap | CommentedSeq | TaggedScalar | YamlObject | None,
|
|
197
|
+
accessor: str | int | None,
|
|
198
|
+
node: CommentedMap | CommentedSeq | TaggedScalar | YamlObject,
|
|
149
199
|
):
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
200
|
+
if not isinstance(accessor, str):
|
|
201
|
+
return None
|
|
202
|
+
|
|
203
|
+
resolver: Resolver | None = None
|
|
204
|
+
matcher_pattern: re.Pattern | None = None
|
|
205
|
+
for key, pattern in self._inline_functions.items():
|
|
206
|
+
if pattern.match(accessor):
|
|
207
|
+
matcher_pattern = pattern
|
|
208
|
+
resolver = self._inline_resolvers[key]
|
|
209
|
+
|
|
210
|
+
if resolver is None:
|
|
211
|
+
return None
|
|
212
|
+
|
|
213
|
+
result = resolver(root, node)
|
|
162
214
|
|
|
163
|
-
return
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
self._parameters_with_defaults[param_name] = default
|
|
215
|
+
return self._replace_target(
|
|
216
|
+
root,
|
|
217
|
+
parent,
|
|
218
|
+
result,
|
|
219
|
+
matcher_pattern,
|
|
220
|
+
)
|
|
170
221
|
|
|
171
222
|
def _resolve_tagged(self, root: CommentedMap, node: TaggedScalar | CommentedMap | CommentedSeq):
|
|
172
223
|
resolver: Callable[[CommentedMap, str], YamlObject] | None = None
|
|
@@ -195,75 +246,10 @@ class Renderer:
|
|
|
195
246
|
return ref
|
|
196
247
|
|
|
197
248
|
else:
|
|
198
|
-
return self.
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
self,
|
|
202
|
-
root: CommentedMap,
|
|
203
|
-
subset: CommentedMap | CommentedSeq,
|
|
204
|
-
) -> YamlObject | None:
|
|
205
|
-
"""
|
|
206
|
-
Traverse `subset` iteratively. For every leaf (scalar or TaggedScalar) encountered in `subset`,
|
|
207
|
-
use its value as the next key/index into `root`. Return (path, value) where:
|
|
208
|
-
- path: list of keys/indices used to reach into `root`
|
|
209
|
-
- value: the value at the end of traversal, or None if a step was missing (early return)
|
|
210
|
-
TaggedScalar is treated as a leaf and its .value is used as the key component.
|
|
211
|
-
"""
|
|
212
|
-
current = self._mappings
|
|
213
|
-
path = []
|
|
214
|
-
|
|
215
|
-
stack = [(subset, [])]
|
|
216
|
-
while stack:
|
|
217
|
-
node, _ = stack.pop()
|
|
218
|
-
|
|
219
|
-
if isinstance(node, CommentedMap):
|
|
220
|
-
|
|
221
|
-
if isinstance(node.tag, Tag) and node.tag.value is not None and (
|
|
222
|
-
node != subset
|
|
223
|
-
):
|
|
224
|
-
resolved_node = self._resolve_tagged(root, node)
|
|
225
|
-
stack.append((resolved_node, []))
|
|
226
|
-
|
|
227
|
-
else:
|
|
228
|
-
for k in reversed(list(node.keys())):
|
|
229
|
-
stack.append((node[k], []))
|
|
230
|
-
|
|
231
|
-
elif isinstance(node, CommentedSeq):
|
|
232
|
-
|
|
233
|
-
if isinstance(node.tag, Tag) and node.tag.value is not None and (
|
|
234
|
-
node != subset
|
|
235
|
-
):
|
|
236
|
-
resolved_node = self._resolve_tagged(root, node)
|
|
237
|
-
stack.append((resolved_node, []))
|
|
238
|
-
|
|
239
|
-
else:
|
|
240
|
-
for val in reversed(node):
|
|
241
|
-
stack.append((val, []))
|
|
242
|
-
else:
|
|
243
|
-
# Leaf: scalar or TaggedScalar
|
|
244
|
-
key = self._resolve_tagged(
|
|
245
|
-
self._selected_mappings,
|
|
246
|
-
node,
|
|
247
|
-
) if isinstance(node, TaggedScalar) else node
|
|
248
|
-
path.append(key)
|
|
249
|
-
|
|
250
|
-
if isinstance(current, CommentedMap):
|
|
251
|
-
if key in current:
|
|
252
|
-
current = current[key]
|
|
253
|
-
else:
|
|
254
|
-
return None
|
|
255
|
-
elif isinstance(current, CommentedSeq) and isinstance(key, int) and 0 <= key < len(current):
|
|
256
|
-
current = current[key]
|
|
257
|
-
else:
|
|
258
|
-
return None
|
|
259
|
-
|
|
260
|
-
if isinstance(current, TaggedScalar):
|
|
261
|
-
return path, self._resolve_tagged(
|
|
262
|
-
self._selected_mappings,
|
|
263
|
-
current,
|
|
249
|
+
return self._resolve_subtree(
|
|
250
|
+
root,
|
|
251
|
+
self._find_matching_key(root, scalar.value),
|
|
264
252
|
)
|
|
265
|
-
|
|
266
|
-
return current
|
|
267
253
|
|
|
268
254
|
def _resolve_getatt(
|
|
269
255
|
self,
|
|
@@ -271,16 +257,16 @@ class Renderer:
|
|
|
271
257
|
query: TaggedScalar | CommentedMap | CommentedSeq,
|
|
272
258
|
) -> YamlObject | None:
|
|
273
259
|
steps: list[str] = []
|
|
274
|
-
|
|
260
|
+
|
|
275
261
|
if isinstance(query, TaggedScalar):
|
|
276
262
|
steps_string: str = query.value
|
|
277
263
|
steps = steps_string.split('.')
|
|
278
264
|
|
|
279
265
|
elif (
|
|
280
|
-
resolved := self.
|
|
266
|
+
resolved := self._resolve_subtree(root, query)
|
|
281
267
|
) and isinstance(
|
|
282
268
|
resolved,
|
|
283
|
-
|
|
269
|
+
CommentedSeq,
|
|
284
270
|
):
|
|
285
271
|
steps = resolved
|
|
286
272
|
|
|
@@ -289,8 +275,12 @@ class Renderer:
|
|
|
289
275
|
):
|
|
290
276
|
return value
|
|
291
277
|
|
|
292
|
-
current = self._resources
|
|
293
|
-
|
|
278
|
+
current = self._resources.get(steps[0], CommentedMap()).get(
|
|
279
|
+
'Properties',
|
|
280
|
+
CommentedMap(),
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
for step in steps[1:]:
|
|
294
284
|
if step == 'Value':
|
|
295
285
|
return current
|
|
296
286
|
# Mapping
|
|
@@ -404,6 +394,60 @@ class Renderer:
|
|
|
404
394
|
|
|
405
395
|
return source
|
|
406
396
|
|
|
397
|
+
def _resolve_foreach(
|
|
398
|
+
self,
|
|
399
|
+
root: CommentedMap,
|
|
400
|
+
source: CommentedSeq | CommentedMap | TaggedScalar,
|
|
401
|
+
):
|
|
402
|
+
if not isinstance(source, CommentedSeq) or len(source) < 3:
|
|
403
|
+
return source
|
|
404
|
+
|
|
405
|
+
identifier = source[0]
|
|
406
|
+
if not isinstance(identifier, str):
|
|
407
|
+
identifier = self._resolve_subtree(root, identifier)
|
|
408
|
+
|
|
409
|
+
collection = source[1]
|
|
410
|
+
if not isinstance(collection, list):
|
|
411
|
+
return source
|
|
412
|
+
|
|
413
|
+
collection: list[str] = self._resolve_subtree(root, collection)
|
|
414
|
+
|
|
415
|
+
output = source[2]
|
|
416
|
+
if not isinstance(output, CommentedMap):
|
|
417
|
+
return source
|
|
418
|
+
|
|
419
|
+
resolved_items = CommentedMap()
|
|
420
|
+
for item in collection:
|
|
421
|
+
self._references[identifier] = item
|
|
422
|
+
resolved_items.update(
|
|
423
|
+
self._resolve_foreach_item(
|
|
424
|
+
root,
|
|
425
|
+
self._copy_subtree(output),
|
|
426
|
+
)
|
|
427
|
+
)
|
|
428
|
+
|
|
429
|
+
return resolved_items
|
|
430
|
+
|
|
431
|
+
def _resolve_foreach_item(
|
|
432
|
+
self,
|
|
433
|
+
root: CommentedMap,
|
|
434
|
+
output_item: CommentedMap,
|
|
435
|
+
):
|
|
436
|
+
output_map: dict[str, CommentedMap] = {}
|
|
437
|
+
for output_key, output_value in output_item.items():
|
|
438
|
+
variables = self._resolve_template_string(output_key)
|
|
439
|
+
resolved_key = self._resolve_sub_ref_queries(
|
|
440
|
+
variables,
|
|
441
|
+
output_key,
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
output_map[resolved_key] = self._resolve_subtree(
|
|
445
|
+
root,
|
|
446
|
+
output_value,
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
return output_map
|
|
450
|
+
|
|
407
451
|
def _resolve_split(
|
|
408
452
|
self,
|
|
409
453
|
root: CommentedMap,
|
|
@@ -504,6 +548,8 @@ class Renderer:
|
|
|
504
548
|
):
|
|
505
549
|
condition_key = self._resolve_subtree(root, condition_key)
|
|
506
550
|
|
|
551
|
+
result = self._resolve_subtree(root, self._conditions.get(condition_key))
|
|
552
|
+
|
|
507
553
|
true_result = source[1]
|
|
508
554
|
if isinstance(
|
|
509
555
|
true_result,
|
|
@@ -512,35 +558,8 @@ class Renderer:
|
|
|
512
558
|
true_result = self._resolve_subtree(root, true_result)
|
|
513
559
|
|
|
514
560
|
false_result = source[2]
|
|
515
|
-
if isinstance(
|
|
516
|
-
true_result,
|
|
517
|
-
(CommentedMap, CommentedSeq, TaggedScalar),
|
|
518
|
-
):
|
|
519
|
-
false_result = self._resolve_subtree(root, false_result)
|
|
520
|
-
|
|
521
|
-
if (
|
|
522
|
-
condition := self._conditions.get(condition_key)
|
|
523
|
-
) and isinstance(
|
|
524
|
-
condition,
|
|
525
|
-
(CommentedMap, CommentedSeq, TaggedScalar)
|
|
526
|
-
) and (
|
|
527
|
-
result := self._resolve_subtree(root, condition)
|
|
528
|
-
) and isinstance(
|
|
529
|
-
result,
|
|
530
|
-
bool,
|
|
531
|
-
):
|
|
532
|
-
|
|
533
|
-
return true_result if result else False
|
|
534
|
-
|
|
535
|
-
elif (
|
|
536
|
-
condition := self._conditions.get(condition_key)
|
|
537
|
-
) and isinstance(
|
|
538
|
-
condition,
|
|
539
|
-
bool,
|
|
540
|
-
):
|
|
541
|
-
return true_result if condition else False
|
|
542
561
|
|
|
543
|
-
return
|
|
562
|
+
return true_result if isinstance(result, bool) and result else false_result
|
|
544
563
|
|
|
545
564
|
def _resolve_condition(
|
|
546
565
|
self,
|
|
@@ -646,6 +665,32 @@ class Renderer:
|
|
|
646
665
|
return source
|
|
647
666
|
|
|
648
667
|
return any(resolved)
|
|
668
|
+
|
|
669
|
+
def _resolve_cidr(
|
|
670
|
+
self,
|
|
671
|
+
root: CommentedMap,
|
|
672
|
+
source: CommentedSeq | CommentedMap | TaggedScalar,
|
|
673
|
+
):
|
|
674
|
+
if not isinstance(
|
|
675
|
+
source,
|
|
676
|
+
CommentedSeq,
|
|
677
|
+
) or len(source) < 3:
|
|
678
|
+
return source
|
|
679
|
+
|
|
680
|
+
cidr = self._resolve_subtree(root, source[0])
|
|
681
|
+
if not isinstance(cidr, str):
|
|
682
|
+
return source
|
|
683
|
+
|
|
684
|
+
subnets_requested = source[1]
|
|
685
|
+
subnet_cidr_bits = source[2]
|
|
686
|
+
|
|
687
|
+
ipv4_solver = IPv4CIDRSolver(
|
|
688
|
+
cidr,
|
|
689
|
+
subnets_requested,
|
|
690
|
+
subnet_cidr_bits,
|
|
691
|
+
)
|
|
692
|
+
|
|
693
|
+
return CommentedSeq(ipv4_solver.provision_subnets())
|
|
649
694
|
|
|
650
695
|
def _resolve_tree_to_json(
|
|
651
696
|
self,
|
|
@@ -706,6 +751,201 @@ class Renderer:
|
|
|
706
751
|
stack.append((node, idx, val))
|
|
707
752
|
|
|
708
753
|
return json.dumps(source)
|
|
754
|
+
|
|
755
|
+
def _resolve_length(
|
|
756
|
+
self,
|
|
757
|
+
root: CommentedMap,
|
|
758
|
+
source: CommentedMap | CommentedSeq | TaggedScalar | YamlObject,
|
|
759
|
+
):
|
|
760
|
+
items = CommentedSeq()
|
|
761
|
+
if isinstance(source, TaggedScalar):
|
|
762
|
+
items = self._resolve_tagged(root, source)
|
|
763
|
+
|
|
764
|
+
elif isinstance(source, (CommentedMap, CommentedSeq)):
|
|
765
|
+
items = self._resolve_subtree(root, source)
|
|
766
|
+
|
|
767
|
+
elif isinstance(source, (str, list, dict)):
|
|
768
|
+
items = source
|
|
769
|
+
|
|
770
|
+
else:
|
|
771
|
+
return source
|
|
772
|
+
|
|
773
|
+
return len(items)
|
|
774
|
+
|
|
775
|
+
def _copy_subtree(
|
|
776
|
+
self,
|
|
777
|
+
root: CommentedMap | CommentedSeq | TaggedScalar | YamlObject,
|
|
778
|
+
) -> Any:
|
|
779
|
+
"""
|
|
780
|
+
Depth-first clone of a ruamel.yaml tree.
|
|
781
|
+
- Rebuilds CommentedMap/CommentedSeq
|
|
782
|
+
- Copies TaggedScalar (preserves tag and value)
|
|
783
|
+
- Scalars are copied as-is
|
|
784
|
+
Note: does not preserve comments/anchors.
|
|
785
|
+
"""
|
|
786
|
+
if isinstance(root, CommentedMap):
|
|
787
|
+
root_clone: Any = CommentedMap()
|
|
788
|
+
elif isinstance(root, CommentedSeq):
|
|
789
|
+
root_clone = CommentedSeq()
|
|
790
|
+
elif isinstance(root, TaggedScalar):
|
|
791
|
+
return TaggedScalar(
|
|
792
|
+
value=root.value,
|
|
793
|
+
tag=root.tag,
|
|
794
|
+
)
|
|
795
|
+
else:
|
|
796
|
+
return root
|
|
797
|
+
|
|
798
|
+
stack: list[
|
|
799
|
+
tuple[
|
|
800
|
+
Any,
|
|
801
|
+
CommentedMap | CommentedSeq | None,
|
|
802
|
+
Any | None,
|
|
803
|
+
]
|
|
804
|
+
] = [(root, None, None)]
|
|
805
|
+
|
|
806
|
+
built: dict[
|
|
807
|
+
int,
|
|
808
|
+
CommentedMap | CommentedSeq,
|
|
809
|
+
] = {id(root): root_clone}
|
|
810
|
+
|
|
811
|
+
while stack:
|
|
812
|
+
in_node, out_parent, out_key = stack.pop()
|
|
813
|
+
|
|
814
|
+
if isinstance(in_node, CommentedMap):
|
|
815
|
+
out_container = built.get(id(in_node))
|
|
816
|
+
if out_container is None:
|
|
817
|
+
out_container = CommentedMap()
|
|
818
|
+
built[id(in_node)] = out_container
|
|
819
|
+
assign(out_parent, out_key, out_container)
|
|
820
|
+
|
|
821
|
+
for k in reversed(list(in_node.keys())):
|
|
822
|
+
v = in_node[k]
|
|
823
|
+
if isinstance(v, CommentedMap):
|
|
824
|
+
child = CommentedMap()
|
|
825
|
+
built[id(v)] = child
|
|
826
|
+
|
|
827
|
+
stack.append((v, out_container, k))
|
|
828
|
+
elif isinstance(v, CommentedSeq):
|
|
829
|
+
child = CommentedSeq()
|
|
830
|
+
built[id(v)] = child
|
|
831
|
+
|
|
832
|
+
stack.append((v, out_container, k))
|
|
833
|
+
elif isinstance(v, TaggedScalar):
|
|
834
|
+
ts = TaggedScalar(
|
|
835
|
+
value=v.value,
|
|
836
|
+
tag=v.tag,
|
|
837
|
+
)
|
|
838
|
+
|
|
839
|
+
out_container[k] = ts
|
|
840
|
+
else:
|
|
841
|
+
out_container[k] = v
|
|
842
|
+
|
|
843
|
+
elif isinstance(in_node, CommentedSeq):
|
|
844
|
+
out_container = built.get(id(in_node))
|
|
845
|
+
if out_container is None:
|
|
846
|
+
out_container = CommentedSeq()
|
|
847
|
+
built[id(in_node)] = out_container
|
|
848
|
+
assign(out_parent, out_key, out_container)
|
|
849
|
+
|
|
850
|
+
for idx in reversed(range(len(in_node))):
|
|
851
|
+
v = in_node[idx]
|
|
852
|
+
|
|
853
|
+
if isinstance(v, CommentedMap):
|
|
854
|
+
child = CommentedMap()
|
|
855
|
+
built[id(v)] = child
|
|
856
|
+
|
|
857
|
+
stack.append((v, out_container, idx))
|
|
858
|
+
elif isinstance(v, CommentedSeq):
|
|
859
|
+
child = CommentedSeq()
|
|
860
|
+
built[id(v)] = child
|
|
861
|
+
|
|
862
|
+
stack.append((v, out_container, idx))
|
|
863
|
+
elif isinstance(v, TaggedScalar):
|
|
864
|
+
ts = TaggedScalar(
|
|
865
|
+
value=v.value,
|
|
866
|
+
tag=v.tag,
|
|
867
|
+
)
|
|
868
|
+
|
|
869
|
+
out_container.append(ts)
|
|
870
|
+
else:
|
|
871
|
+
out_container.append(v)
|
|
872
|
+
|
|
873
|
+
elif isinstance(in_node, TaggedScalar):
|
|
874
|
+
ts = TaggedScalar(
|
|
875
|
+
value=in_node.value,
|
|
876
|
+
tag=in_node.tag,
|
|
877
|
+
)
|
|
878
|
+
|
|
879
|
+
assign(out_parent, out_key, ts)
|
|
880
|
+
|
|
881
|
+
else:
|
|
882
|
+
assign(out_parent, out_key, in_node)
|
|
883
|
+
|
|
884
|
+
return root_clone
|
|
885
|
+
|
|
886
|
+
def _replace_target(
|
|
887
|
+
self,
|
|
888
|
+
root: CommentedMap,
|
|
889
|
+
target: CommentedMap,
|
|
890
|
+
replacement: Any,
|
|
891
|
+
matcher_pattern: re.Pattern
|
|
892
|
+
) -> CommentedMap:
|
|
893
|
+
if not isinstance(target, CommentedMap):
|
|
894
|
+
return root
|
|
895
|
+
|
|
896
|
+
if root is target:
|
|
897
|
+
return replacement
|
|
898
|
+
|
|
899
|
+
stack: list[tuple[Any, Any | None, Any | None]] = [(root, None, None)]
|
|
900
|
+
|
|
901
|
+
while stack:
|
|
902
|
+
node, parent, accessor = stack.pop()
|
|
903
|
+
|
|
904
|
+
if isinstance(node, CommentedMap):
|
|
905
|
+
for k in reversed(list(node.keys())):
|
|
906
|
+
child = node[k]
|
|
907
|
+
if child is target and isinstance(child, CommentedMap):
|
|
908
|
+
for key in list(target.keys()):
|
|
909
|
+
if matcher_pattern.match(key):
|
|
910
|
+
del child[key]
|
|
911
|
+
|
|
912
|
+
if isinstance(replacement, CommentedMap):
|
|
913
|
+
child.update(replacement)
|
|
914
|
+
node[k] = child
|
|
915
|
+
|
|
916
|
+
else:
|
|
917
|
+
node[k] = replacement
|
|
918
|
+
|
|
919
|
+
if parent:
|
|
920
|
+
parent[accessor] = node
|
|
921
|
+
|
|
922
|
+
return root
|
|
923
|
+
|
|
924
|
+
stack.append((child, node, k))
|
|
925
|
+
|
|
926
|
+
elif isinstance(node, CommentedSeq):
|
|
927
|
+
for idx in reversed(range(len(node))):
|
|
928
|
+
child = node[idx]
|
|
929
|
+
if child is target and isinstance(child, CommentedMap):
|
|
930
|
+
for key in list(target.keys()):
|
|
931
|
+
if matcher_pattern.match(key):
|
|
932
|
+
del child[key]
|
|
933
|
+
|
|
934
|
+
if isinstance(replacement, CommentedMap):
|
|
935
|
+
child.update(replacement)
|
|
936
|
+
node[idx] = child
|
|
937
|
+
|
|
938
|
+
else:
|
|
939
|
+
node[idx] = replacement
|
|
940
|
+
|
|
941
|
+
if parent:
|
|
942
|
+
parent[accessor] = node
|
|
943
|
+
|
|
944
|
+
return root
|
|
945
|
+
|
|
946
|
+
stack.append((child, node, idx))
|
|
947
|
+
|
|
948
|
+
return root
|
|
709
949
|
|
|
710
950
|
def _resolve_subtree(
|
|
711
951
|
self,
|
|
@@ -718,8 +958,24 @@ class Renderer:
|
|
|
718
958
|
"""
|
|
719
959
|
stack: list[tuple[CommentedMap | CommentedSeq | None, Any | None, Any]] = [(None, None, source)]
|
|
720
960
|
|
|
961
|
+
source_parent, source_index = self._find_parent(root, source)
|
|
962
|
+
|
|
721
963
|
while stack:
|
|
722
964
|
parent, accessor, node = stack.pop()
|
|
965
|
+
if match := self._match_and_resolve_accessor_fn(
|
|
966
|
+
root,
|
|
967
|
+
parent,
|
|
968
|
+
accessor,
|
|
969
|
+
node,
|
|
970
|
+
):
|
|
971
|
+
root.update(match)
|
|
972
|
+
# At this point we've likely (and completely)
|
|
973
|
+
# successfully nuked the source from orbit
|
|
974
|
+
# so we need to fetch it from the source parent
|
|
975
|
+
# to get it back (i.e. the ref is no longer
|
|
976
|
+
# correct).
|
|
977
|
+
source = source_parent[source_index]
|
|
978
|
+
|
|
723
979
|
if isinstance(node, TaggedScalar):
|
|
724
980
|
# Replace in parent
|
|
725
981
|
if parent is not None and (
|
|
@@ -768,62 +1024,130 @@ class Renderer:
|
|
|
768
1024
|
|
|
769
1025
|
return source
|
|
770
1026
|
|
|
771
|
-
def
|
|
772
|
-
self,
|
|
773
|
-
root: CommentedMap,
|
|
774
|
-
|
|
775
|
-
):
|
|
1027
|
+
def _resolve_by_subset_query(
|
|
1028
|
+
self,
|
|
1029
|
+
root: CommentedMap,
|
|
1030
|
+
subset: CommentedMap | CommentedSeq,
|
|
1031
|
+
) -> YamlObject | None:
|
|
776
1032
|
"""
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
-
|
|
780
|
-
-
|
|
1033
|
+
Traverse `subset` iteratively. For every leaf (scalar or TaggedScalar) encountered in `subset`,
|
|
1034
|
+
use its value as the next key/index into `root`. Return (path, value) where:
|
|
1035
|
+
- path: list of keys/indices used to reach into `root`
|
|
1036
|
+
- value: the value at the end of traversal, or None if a step was missing (early return)
|
|
1037
|
+
TaggedScalar is treated as a leaf and its .value is used as the key component.
|
|
781
1038
|
"""
|
|
782
|
-
|
|
783
|
-
|
|
1039
|
+
current = self._mappings
|
|
1040
|
+
path = []
|
|
784
1041
|
|
|
1042
|
+
stack = [(subset, [])]
|
|
785
1043
|
while stack:
|
|
786
|
-
|
|
1044
|
+
node, _ = stack.pop()
|
|
1045
|
+
|
|
1046
|
+
if isinstance(node, CommentedMap):
|
|
787
1047
|
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
1048
|
+
if isinstance(node.tag, Tag) and node.tag.value is not None and (
|
|
1049
|
+
node != subset
|
|
1050
|
+
):
|
|
1051
|
+
resolved_node = self._resolve_tagged(root, node)
|
|
1052
|
+
stack.append((resolved_node, []))
|
|
1053
|
+
|
|
792
1054
|
else:
|
|
1055
|
+
for k in reversed(list(node.keys())):
|
|
1056
|
+
stack.append((node[k], []))
|
|
793
1057
|
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
1058
|
+
elif isinstance(node, CommentedSeq):
|
|
1059
|
+
|
|
1060
|
+
if isinstance(node.tag, Tag) and node.tag.value is not None and (
|
|
1061
|
+
node != subset
|
|
1062
|
+
):
|
|
1063
|
+
resolved_node = self._resolve_tagged(root, node)
|
|
1064
|
+
stack.append((resolved_node, []))
|
|
799
1065
|
|
|
800
|
-
else:
|
|
801
|
-
# Iterate in normal order; push in reverse to keep DFS intuitive
|
|
802
|
-
keys = list(current.keys())
|
|
803
|
-
for k in reversed(keys):
|
|
804
|
-
stack.append((current[k], path + [str(k)]))
|
|
805
|
-
|
|
806
|
-
elif isinstance(current, CommentedSeq):
|
|
807
|
-
if not current:
|
|
808
|
-
if len(path) > len(longest):
|
|
809
|
-
longest = path
|
|
810
1066
|
else:
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
1067
|
+
for val in reversed(node):
|
|
1068
|
+
stack.append((val, []))
|
|
1069
|
+
else:
|
|
1070
|
+
# Leaf: scalar or TaggedScalar
|
|
1071
|
+
key = self._resolve_tagged(
|
|
1072
|
+
self._selected_mappings,
|
|
1073
|
+
node,
|
|
1074
|
+
) if isinstance(node, TaggedScalar) else node
|
|
1075
|
+
path.append(key)
|
|
816
1076
|
|
|
1077
|
+
if isinstance(current, CommentedMap):
|
|
1078
|
+
if key in current:
|
|
1079
|
+
current = current[key]
|
|
817
1080
|
else:
|
|
818
|
-
|
|
819
|
-
|
|
1081
|
+
return None
|
|
1082
|
+
elif isinstance(current, CommentedSeq) and isinstance(key, int) and 0 <= key < len(current):
|
|
1083
|
+
current = current[key]
|
|
1084
|
+
else:
|
|
1085
|
+
return None
|
|
1086
|
+
|
|
1087
|
+
if isinstance(current, TaggedScalar):
|
|
1088
|
+
return path, self._resolve_tagged(
|
|
1089
|
+
self._selected_mappings,
|
|
1090
|
+
current,
|
|
1091
|
+
)
|
|
820
1092
|
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
1093
|
+
return current
|
|
1094
|
+
|
|
1095
|
+
def _find_matching_key(
|
|
1096
|
+
self,
|
|
1097
|
+
root: CommentedMap,
|
|
1098
|
+
search_key: str,
|
|
1099
|
+
):
|
|
1100
|
+
"""Returns the first path (list of keys/indices) to a mapping with key == search_key, and the value at that path."""
|
|
1101
|
+
stack = [(root, [])]
|
|
1102
|
+
while stack:
|
|
1103
|
+
node, path = stack.pop()
|
|
1104
|
+
if isinstance(node, CommentedMap):
|
|
1105
|
+
for k in reversed(list(node.keys())):
|
|
1106
|
+
if k == search_key:
|
|
1107
|
+
return node[k]
|
|
1108
|
+
stack.append((node[k], path + [k]))
|
|
1109
|
+
elif isinstance(node, CommentedSeq):
|
|
1110
|
+
for idx, item in reversed(list(enumerate(node))):
|
|
1111
|
+
stack.append((item, path + [idx]))
|
|
825
1112
|
|
|
826
|
-
return
|
|
1113
|
+
return None # No match found
|
|
1114
|
+
|
|
1115
|
+
def _find_parent(
|
|
1116
|
+
self,
|
|
1117
|
+
root: CommentedMap,
|
|
1118
|
+
target: CommentedMap,
|
|
1119
|
+
) -> CommentedMap:
|
|
1120
|
+
|
|
1121
|
+
stack: list[tuple[Any, Any | None, Any | None]] = [(root, None, None)]
|
|
1122
|
+
|
|
1123
|
+
while stack:
|
|
1124
|
+
node, parent, accessor = stack.pop()
|
|
1125
|
+
|
|
1126
|
+
if isinstance(node, CommentedMap):
|
|
1127
|
+
for k in reversed(list(node.keys())):
|
|
1128
|
+
child = node[k]
|
|
1129
|
+
if child is target and isinstance(child, CommentedMap):
|
|
1130
|
+
return node, k
|
|
1131
|
+
|
|
1132
|
+
stack.append((child, node, k))
|
|
1133
|
+
|
|
1134
|
+
elif isinstance(node, CommentedSeq):
|
|
1135
|
+
for idx in reversed(range(len(node))):
|
|
1136
|
+
child = node[idx]
|
|
1137
|
+
if child is target and isinstance(child, CommentedMap):
|
|
1138
|
+
return node, node.index(child)
|
|
1139
|
+
|
|
1140
|
+
stack.append((child, node, idx))
|
|
1141
|
+
|
|
1142
|
+
return None, None
|
|
1143
|
+
|
|
1144
|
+
def _assemble_parameters(self, resources: YamlObject):
|
|
1145
|
+
params: dict[str, Data] = resources.get("Parameters", {})
|
|
1146
|
+
for param_name, param in params.items():
|
|
1147
|
+
if isinstance(param, CommentedMap) and (
|
|
1148
|
+
default := param.get("Default")
|
|
1149
|
+
):
|
|
1150
|
+
self._parameters_with_defaults[param_name] = default
|
|
827
1151
|
|
|
828
1152
|
def _assemble_mappings(self, mappings: dict[str, str]):
|
|
829
1153
|
for mapping, value in mappings.items():
|
|
@@ -920,7 +1244,6 @@ class Renderer:
|
|
|
920
1244
|
return root_out
|
|
921
1245
|
|
|
922
1246
|
def _resolve_template_string(self, template: str):
|
|
923
|
-
|
|
924
1247
|
variables: list[tuple[str, str]] = []
|
|
925
1248
|
for match in self._sub_pattern.finditer(template):
|
|
926
1249
|
variables.append((
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cfn-check
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.7.0
|
|
4
4
|
Summary: Validate Cloud Formation
|
|
5
5
|
Author-email: Ada Lundhe <adalundhe@lundhe.audio>
|
|
6
6
|
License: MIT License
|
|
@@ -34,7 +34,7 @@ Requires-Python: >=3.12
|
|
|
34
34
|
Description-Content-Type: text/markdown
|
|
35
35
|
License-File: LICENSE
|
|
36
36
|
Requires-Dist: pydantic
|
|
37
|
-
Requires-Dist:
|
|
37
|
+
Requires-Dist: ruamel.yaml
|
|
38
38
|
Requires-Dist: hyperlight-cocoa
|
|
39
39
|
Requires-Dist: async-logging
|
|
40
40
|
Dynamic: license-file
|
|
@@ -50,7 +50,7 @@ Dynamic: license-file
|
|
|
50
50
|
|
|
51
51
|
| Package | cfn-check |
|
|
52
52
|
| ----------- | ----------- |
|
|
53
|
-
| Version | 0.
|
|
53
|
+
| Version | 0.7.0 |
|
|
54
54
|
| Download | https://pypi.org/project/cfn-check/ |
|
|
55
55
|
| Source | https://github.com/adalundhe/cfn-check |
|
|
56
56
|
| Keywords | cloud-formation, testing, aws, cli |
|
|
@@ -70,15 +70,20 @@ problems inherint to `cfn-lint` more than `cfn-guard`, primarily:
|
|
|
70
70
|
- Inability to parse non-resource wildcards
|
|
71
71
|
- Inability to validate non-resource template data
|
|
72
72
|
- Inabillity to use structured models to validate input
|
|
73
|
+
- Poor ability to parse and render CloudFormation Refs/Functions
|
|
73
74
|
|
|
74
75
|
In comparison to `cfn-guard`, `cfn-check` is pure Python, thus
|
|
75
76
|
avoiding YADSL (Yet Another DSL) headaches. It also proves
|
|
76
77
|
significantly more configurable/modular/hackable as a result.
|
|
78
|
+
`cfn-check` can resolve _some_ (not all) CloudFormation Intrinsic
|
|
79
|
+
Functions and Refs.
|
|
77
80
|
|
|
78
81
|
CFN-Check uses a combination of simple depth-first-search tree
|
|
79
82
|
parsing, friendly `cfn-lint` like query syntax, `Pydantic` models,
|
|
80
83
|
and `pytest`-like assert-driven checks to make validating your
|
|
81
84
|
Cloud Formation easy while offering both CLI and Python API interfaces.
|
|
85
|
+
CFN-Check also uses a lightning-fast AST-parser to render your templates,
|
|
86
|
+
allowing you to validate policy, not just a YAML document.
|
|
82
87
|
|
|
83
88
|
<br/>
|
|
84
89
|
|
|
@@ -447,7 +452,7 @@ Resources::*::Type
|
|
|
447
452
|
Selects all `Resource` objects. If we convert the Wildcard Token in the query to a Wildcard Range Token:
|
|
448
453
|
|
|
449
454
|
```
|
|
450
|
-
Resources
|
|
455
|
+
Resources::[*]::Type
|
|
451
456
|
```
|
|
452
457
|
|
|
453
458
|
The Rule will fail as below:
|
|
@@ -585,3 +590,52 @@ class ValidateResourceType(Collection):
|
|
|
585
590
|
```
|
|
586
591
|
|
|
587
592
|
By deferring type and existence assertions to `Pydantic` models, you can focus your actual assertion logic on business/security policy checks.
|
|
593
|
+
|
|
594
|
+
<br/>
|
|
595
|
+
|
|
596
|
+
# The Rendering Engine
|
|
597
|
+
|
|
598
|
+
### Overview
|
|
599
|
+
|
|
600
|
+
In Version 0.6.X, CFN-Check introduced a rendering engine, which allows it
|
|
601
|
+
to parse and execute Refs and all CloudFormation intrinsic functions via
|
|
602
|
+
either the CloudFormation document or user-supplied values. This additional
|
|
603
|
+
also resulted in the:
|
|
604
|
+
|
|
605
|
+
```bash
|
|
606
|
+
cfn-check render <TEMPLATE_PATH >
|
|
607
|
+
```
|
|
608
|
+
|
|
609
|
+
command being added, allowing you to effectively "dry run" render your
|
|
610
|
+
CloudFormation templates akin to the `helm template` command for Helm.
|
|
611
|
+
|
|
612
|
+
By default, `cfn-check render` outputs to stdout, however you can easily
|
|
613
|
+
save rendered output to a file via the `-o/--output-file` flag. For example:
|
|
614
|
+
|
|
615
|
+
```bash
|
|
616
|
+
cfn-check render template.yml -o rendered.yml
|
|
617
|
+
```
|
|
618
|
+
|
|
619
|
+
The `cfn-check render` command also offers the following options:
|
|
620
|
+
|
|
621
|
+
- `-a/--attributes`: A list of <key>=<value> input `!GetAtt` attributes to use
|
|
622
|
+
- `-m/--mappings`: A list of <key>=<value> input `Mappings` to use
|
|
623
|
+
- `-p/--parameters`: A list of <key>=<value> input `Parameters` to use
|
|
624
|
+
- `-l/--log-level`: The log level to use
|
|
625
|
+
|
|
626
|
+
### The Rendering Engine during Checks
|
|
627
|
+
|
|
628
|
+
By default rendering is enabled when running `cfn-check` validation. You can
|
|
629
|
+
disable it by supplying `no-render` to the `-F/--flags` option as below:
|
|
630
|
+
|
|
631
|
+
```bash
|
|
632
|
+
cfn-check validate -F no-render -r rules.py template.yaml
|
|
633
|
+
```
|
|
634
|
+
|
|
635
|
+
Disabling rendering means CFN-Check will validate your template as-is, with
|
|
636
|
+
no additional pre-processing and no application of user input values.
|
|
637
|
+
|
|
638
|
+
> [!WARNING]
|
|
639
|
+
> CloudFormation documents are <b>not</b> "plain yaml" and disabling
|
|
640
|
+
> rendering means any dynamically determined values will likely fail
|
|
641
|
+
> to pass validation, resulting in false positives for failures!
|
|
@@ -1,17 +1,18 @@
|
|
|
1
1
|
cfn_check/__init__.py,sha256=ccUo2YxBmuEmak1M5o-8J0ECLXNkDDUsLJ4mkm31GvU,96
|
|
2
2
|
cfn_check/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
-
cfn_check/cli/render.py,sha256=
|
|
3
|
+
cfn_check/cli/render.py,sha256=FWOrHAk5ZdGVP_Hch3YtC1gOTBoaOiwaTy_oJ3tQjXk,2735
|
|
4
4
|
cfn_check/cli/root.py,sha256=Fi-G3nP-HQMY4iPenF2xnkQF798x5cNWDqJZs9TH66A,1727
|
|
5
|
-
cfn_check/cli/validate.py,sha256=
|
|
5
|
+
cfn_check/cli/validate.py,sha256=QxGMRf-uoe8MRGd9SwiJOrGPw7Ui6-R8QUHjf8B2EWE,2019
|
|
6
6
|
cfn_check/cli/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
7
|
cfn_check/cli/utils/attributes.py,sha256=hEMWJfNcTOKqWrleS8idWlZP81wAq2J06yV-JQm_WNw,340
|
|
8
|
-
cfn_check/cli/utils/files.py,sha256=
|
|
8
|
+
cfn_check/cli/utils/files.py,sha256=87F72INUuA61k3pQ1NNbg0vUwBYOY7-wn1rPqRWbrao,3357
|
|
9
|
+
cfn_check/cli/utils/stdout.py,sha256=dztgy5cBF03oGHRr5ITvMVVf5qdopPbAQm6Rp0cHZq4,423
|
|
9
10
|
cfn_check/collection/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
11
|
cfn_check/collection/collection.py,sha256=Fl5ONtvosLrksJklRoxER9j-YN5RUdPN45yS02Yw5jU,1492
|
|
11
12
|
cfn_check/evaluation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
13
|
cfn_check/evaluation/errors.py,sha256=yPJdtRYo67le4yMC9sYqcboCnkqKsJ3KPbSPFY2-Pi8,773
|
|
13
|
-
cfn_check/evaluation/evaluator.py,sha256=
|
|
14
|
-
cfn_check/evaluation/validate.py,sha256=
|
|
14
|
+
cfn_check/evaluation/evaluator.py,sha256=GjwljK1fiFeJ_iRfLAAPaPpbZ6fBcDBIN1LhOHlmzMY,2565
|
|
15
|
+
cfn_check/evaluation/validate.py,sha256=b5TpFKOnn9dutowCXGaoU5Jw3_l9HqpGaBpcTXFFzeY,1656
|
|
15
16
|
cfn_check/evaluation/parsing/__init__.py,sha256=s5TxU4mzsbNIpbMynbwibGR8ac0dTcf_2qUfGkAEDvQ,52
|
|
16
17
|
cfn_check/evaluation/parsing/query_parser.py,sha256=4J3CJQKAyb11gugfx6OZT-mfSdNDB5Al8Jiy9DbJZMw,3459
|
|
17
18
|
cfn_check/evaluation/parsing/token.py,sha256=nrg7Tca182WY0VhRqfsZ1UgpxsUX73vdLToSeK50DZE,7055
|
|
@@ -19,7 +20,8 @@ cfn_check/evaluation/parsing/token_type.py,sha256=E5AVBerinBszMLjjc7ejwSSWEc0p0J
|
|
|
19
20
|
cfn_check/logging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
21
|
cfn_check/logging/models.py,sha256=-tBaK6p8mJ0cO8h2keEJ-VmtFX_VW4XzwAw2PtqbkF0,490
|
|
21
22
|
cfn_check/rendering/__init__.py,sha256=atcbddYun4YHyY7bVGA9CgEYzzXpYzvkx9_Kg-gnD5w,42
|
|
22
|
-
cfn_check/rendering/
|
|
23
|
+
cfn_check/rendering/cidr_solver.py,sha256=aCUH3q9PvQ7-hkJd79VmUc175Ks-HifShPIMVnD8Ws8,1528
|
|
24
|
+
cfn_check/rendering/renderer.py,sha256=hcs-DVuaNLUP80NB0h0OW4oNlskj7Lyc9bpbqEkYsPA,42207
|
|
23
25
|
cfn_check/rendering/utils.py,sha256=MNaKePylbJ9Bs4kjuoV0PpCmPJYttPXXvKQILemCrUI,489
|
|
24
26
|
cfn_check/rules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
25
27
|
cfn_check/rules/rule.py,sha256=_cKNQ5ciJgPj-exmtBUz31cU2lxWYxw2n2NWIlhYc3s,635
|
|
@@ -27,13 +29,13 @@ cfn_check/shared/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|
|
27
29
|
cfn_check/shared/types.py,sha256=-om3DyZsjK_tJd-I8SITkoE55W0nB2WA3LOc87Cs7xI,414
|
|
28
30
|
cfn_check/validation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
29
31
|
cfn_check/validation/validator.py,sha256=Z6S6T_4yQW1IUa5Kv3ohR9U8NDrhTvBadW2FEM8TRL8,1478
|
|
30
|
-
cfn_check-0.
|
|
32
|
+
cfn_check-0.7.0.dist-info/licenses/LICENSE,sha256=EbCpGNzOkyQ53ig7J2Iwgmy4Og0dgHe8COo3WylhIKk,1069
|
|
31
33
|
example/multitag.py,sha256=QQfcRERGEDgTUCGqWRqRbXHrLwSX4jEOFq8ED4NJnz8,636
|
|
32
34
|
example/pydantic_rules.py,sha256=6NFtDiaqmnYWt6oZIWB7AO_v5LJoZVOGXrmEe2_J_rI,4162
|
|
33
35
|
example/renderer_test.py,sha256=XG5PVTSHztYXHrBw4bpwVuuYt1JNZdtLGJ-DZ9wPjFM,741
|
|
34
36
|
example/rules.py,sha256=mWHB0DK283lb0CeSHgnyO5qiVTJJpybuwWXb4Yoa3zQ,3148
|
|
35
|
-
cfn_check-0.
|
|
36
|
-
cfn_check-0.
|
|
37
|
-
cfn_check-0.
|
|
38
|
-
cfn_check-0.
|
|
39
|
-
cfn_check-0.
|
|
37
|
+
cfn_check-0.7.0.dist-info/METADATA,sha256=-H4HezPbY-Z5Ka3bEpRjO8WGcUOKvwsdNdRvL_x_98c,22397
|
|
38
|
+
cfn_check-0.7.0.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
|
|
39
|
+
cfn_check-0.7.0.dist-info/entry_points.txt,sha256=B4lCHoDHmwisABxKgRLShwqqFv7QwwDAFXoAChOnkwg,53
|
|
40
|
+
cfn_check-0.7.0.dist-info/top_level.txt,sha256=hUn9Ya50yY1fpgWxEhG5iMgfMDDVX7qWQnM1xrgZnhM,18
|
|
41
|
+
cfn_check-0.7.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|