cfn-check 0.4.0__tar.gz → 0.5.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cfn-check might be problematic. Click here for more details.
- {cfn_check-0.4.0 → cfn_check-0.5.0}/PKG-INFO +1 -1
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/cli/render.py +21 -8
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/cli/utils/files.py +15 -19
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/collection/collection.py +0 -1
- cfn_check-0.5.0/cfn_check/rendering/renderer.py +740 -0
- cfn_check-0.5.0/cfn_check/rendering/utils.py +13 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check.egg-info/PKG-INFO +1 -1
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check.egg-info/SOURCES.txt +2 -2
- cfn_check-0.5.0/example/multitag.py +21 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/example/renderer_test.py +1 -1
- {cfn_check-0.4.0 → cfn_check-0.5.0}/pyproject.toml +1 -1
- cfn_check-0.4.0/cfn_check/loader/loader.py +0 -21
- cfn_check-0.4.0/cfn_check/rendering/renderer.py +0 -124
- cfn_check-0.4.0/cfn_check/validation/__init__.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/LICENSE +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/README.md +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/__init__.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/cli/__init__.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/cli/root.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/cli/utils/__init__.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/cli/utils/attributes.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/cli/validate.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/collection/__init__.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/evaluation/__init__.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/evaluation/errors.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/evaluation/evaluator.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/evaluation/parsing/__init__.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/evaluation/parsing/query_parser.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/evaluation/parsing/token.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/evaluation/parsing/token_type.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/evaluation/validate.py +0 -0
- {cfn_check-0.4.0/cfn_check/loader → cfn_check-0.5.0/cfn_check/logging}/__init__.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/logging/models.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/rendering/__init__.py +0 -0
- {cfn_check-0.4.0/cfn_check/logging → cfn_check-0.5.0/cfn_check/rules}/__init__.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/rules/rule.py +0 -0
- {cfn_check-0.4.0/cfn_check/rules → cfn_check-0.5.0/cfn_check/shared}/__init__.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/shared/types.py +0 -0
- {cfn_check-0.4.0/cfn_check/shared → cfn_check-0.5.0/cfn_check/validation}/__init__.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check/validation/validator.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check.egg-info/dependency_links.txt +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check.egg-info/entry_points.txt +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check.egg-info/requires.txt +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/cfn_check.egg-info/top_level.txt +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/example/pydantic_rules.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/example/rules.py +0 -0
- {cfn_check-0.4.0 → cfn_check-0.5.0}/setup.cfg +0 -0
|
@@ -7,11 +7,14 @@ from cfn_check.rendering import Renderer
|
|
|
7
7
|
from cfn_check.logging.models import InfoLog
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
@CLI.command(
|
|
10
|
+
@CLI.command(
|
|
11
|
+
display_help_on_error=False
|
|
12
|
+
)
|
|
11
13
|
async def render(
|
|
12
14
|
path: str,
|
|
13
15
|
output_file: str = 'rendered.yml',
|
|
14
|
-
|
|
16
|
+
parameters: list[str] | None = None,
|
|
17
|
+
references: list[str] | None = None,
|
|
15
18
|
tags: list[str] = [
|
|
16
19
|
'Ref',
|
|
17
20
|
'Sub',
|
|
@@ -35,7 +38,8 @@ async def render(
|
|
|
35
38
|
Render a Cloud Formation template
|
|
36
39
|
|
|
37
40
|
@param output_file Path to output the rendered CloudFormation template to
|
|
38
|
-
@param
|
|
41
|
+
@param parameters A list of <key>=<value> input Parameters to use
|
|
42
|
+
@param references A list of <key>=<value> input !Ref values to use
|
|
39
43
|
@param tags List of CloudFormation intrinsic function tags
|
|
40
44
|
@param log_level The log level to use
|
|
41
45
|
"""
|
|
@@ -45,11 +49,16 @@ async def render(
|
|
|
45
49
|
log_output='stderr',
|
|
46
50
|
)
|
|
47
51
|
|
|
48
|
-
|
|
52
|
+
parsed_parameters: dict[str, str] | None = None
|
|
53
|
+
if parameters:
|
|
54
|
+
parsed_parameters = dict([
|
|
55
|
+
parameter.split('=', maxsplit=1) for parameter in parameters if len(parameter.split('=', maxsplit=1)) > 0
|
|
56
|
+
])
|
|
49
57
|
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
58
|
+
parsed_references: dict[str, str] | None = None
|
|
59
|
+
if references:
|
|
60
|
+
parsed_references = dict([
|
|
61
|
+
reference.split('=', maxsplit=1) for reference in references if len(reference.split('=', maxsplit=1)) > 0
|
|
53
62
|
])
|
|
54
63
|
|
|
55
64
|
logger = Logger()
|
|
@@ -63,7 +72,11 @@ async def render(
|
|
|
63
72
|
|
|
64
73
|
_, template = templates[0]
|
|
65
74
|
renderer = Renderer()
|
|
66
|
-
rendered = renderer.render(
|
|
75
|
+
rendered = renderer.render(
|
|
76
|
+
template,
|
|
77
|
+
parameters=parsed_parameters,
|
|
78
|
+
references=parsed_references,
|
|
79
|
+
)
|
|
67
80
|
|
|
68
81
|
await write_to_file(output_file, rendered)
|
|
69
82
|
|
|
@@ -2,22 +2,24 @@
|
|
|
2
2
|
import asyncio
|
|
3
3
|
import os
|
|
4
4
|
import pathlib
|
|
5
|
-
import
|
|
6
|
-
from cfn_check.loader.loader import (
|
|
7
|
-
Loader,
|
|
8
|
-
create_tag,
|
|
9
|
-
find_templates,
|
|
10
|
-
)
|
|
5
|
+
from ruamel.yaml import YAML
|
|
11
6
|
from cfn_check.shared.types import YamlObject, Data
|
|
12
7
|
|
|
8
|
+
|
|
9
|
+
def find_templates(path, file_pattern):
|
|
10
|
+
return list(pathlib.Path(path).rglob(file_pattern))
|
|
11
|
+
|
|
13
12
|
def open_template(path: str) -> tuple[str, YamlObject] | None:
|
|
14
13
|
|
|
15
14
|
if os.path.exists(path) is False:
|
|
16
15
|
return None
|
|
17
16
|
|
|
18
17
|
try:
|
|
19
|
-
with open(path, 'r') as
|
|
20
|
-
|
|
18
|
+
with open(path, 'r') as yml:
|
|
19
|
+
loader = YAML(typ='rt')
|
|
20
|
+
loader.preserve_quotes = True
|
|
21
|
+
loader.indent(mapping=2, sequence=4, offset=2)
|
|
22
|
+
return (path, loader.load(yml))
|
|
21
23
|
except Exception as e:
|
|
22
24
|
raise e
|
|
23
25
|
|
|
@@ -99,16 +101,6 @@ async def load_templates(
|
|
|
99
101
|
|
|
100
102
|
assert len(template_filepaths) > 0 , '❌ No matching files found'
|
|
101
103
|
|
|
102
|
-
for tag in tags:
|
|
103
|
-
new_tag = await loop.run_in_executor(
|
|
104
|
-
None,
|
|
105
|
-
create_tag,
|
|
106
|
-
tag,
|
|
107
|
-
)
|
|
108
|
-
|
|
109
|
-
Loader.add_constructor(f'!{tag}', new_tag)
|
|
110
|
-
|
|
111
|
-
|
|
112
104
|
templates: list[tuple[str, Data]] = await asyncio.gather(*[
|
|
113
105
|
loop.run_in_executor(
|
|
114
106
|
None,
|
|
@@ -142,5 +134,9 @@ async def write_to_file(path: str, data: YamlObject):
|
|
|
142
134
|
)
|
|
143
135
|
|
|
144
136
|
def _write_to_file(path: str, data: YamlObject):
|
|
137
|
+
dumper = YAML(typ='rt')
|
|
138
|
+
dumper.preserve_quotes = True
|
|
139
|
+
dumper.width = 4096
|
|
140
|
+
dumper.indent(mapping=2, sequence=4, offset=2)
|
|
145
141
|
with open(path, 'w') as yml:
|
|
146
|
-
|
|
142
|
+
dumper.dump(data, yml)
|
|
@@ -0,0 +1,740 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import base64
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
from typing import Callable, Any
|
|
6
|
+
from collections import deque
|
|
7
|
+
from ruamel.yaml.tag import Tag
|
|
8
|
+
from ruamel.yaml.comments import TaggedScalar, CommentedMap, CommentedSeq
|
|
9
|
+
from .utils import assign
|
|
10
|
+
|
|
11
|
+
from cfn_check.shared.types import (
|
|
12
|
+
Data,
|
|
13
|
+
Items,
|
|
14
|
+
YamlObject,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
class Renderer:
|
|
18
|
+
|
|
19
|
+
def __init__(self):
|
|
20
|
+
self.items: Items = deque()
|
|
21
|
+
self._sub_pattern = re.compile(r'\$\{([\w+::]+)\}')
|
|
22
|
+
self._sub_inner_text_pattern = re.compile(r'[\$|\{|\}]+')
|
|
23
|
+
self._visited: list[str | int] = []
|
|
24
|
+
self._data: YamlObject = {}
|
|
25
|
+
self._parameters = CommentedMap()
|
|
26
|
+
self._mappings = CommentedMap()
|
|
27
|
+
self._parameters_with_defaults: dict[str, str | int | float | bool | None] = {}
|
|
28
|
+
self._selected_mappings = CommentedMap()
|
|
29
|
+
self._references: dict[str, str] = {}
|
|
30
|
+
self._resources: dict[str, YamlObject] = CommentedMap()
|
|
31
|
+
self._attributes: dict[str, str] = {}
|
|
32
|
+
|
|
33
|
+
self._resolvers: dict[str, Callable[[CommentedMap, str], YamlObject]] = {
|
|
34
|
+
'!Ref': self._resolve_ref,
|
|
35
|
+
'!FindInMap': self._resolve_by_subset_query,
|
|
36
|
+
'!GetAtt': self._resolve_getatt,
|
|
37
|
+
'!Join': self._resolve_join,
|
|
38
|
+
'!Sub': self._resolve_sub,
|
|
39
|
+
'!Base64': self._resolve_base64,
|
|
40
|
+
'!Split': self._resolve_split,
|
|
41
|
+
'!Select': self._resolve_select,
|
|
42
|
+
'!ToJsonString': self._resolve_to_json_string,
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
def render(
|
|
46
|
+
self,
|
|
47
|
+
template: YamlObject,
|
|
48
|
+
attributes: dict[str, Any] | None = None,
|
|
49
|
+
parameters: dict[str, Any] | None = None,
|
|
50
|
+
references: dict[str, str] | None = None,
|
|
51
|
+
mappings: dict[str, str] | None = None,
|
|
52
|
+
):
|
|
53
|
+
|
|
54
|
+
self._sources = list(template.keys())
|
|
55
|
+
|
|
56
|
+
self._assemble_parameters(template)
|
|
57
|
+
|
|
58
|
+
attributes = {
|
|
59
|
+
'LambdaExecutionRole.Arn': 'This is a test',
|
|
60
|
+
'AllSecurityGroups.Value': [
|
|
61
|
+
'123456',
|
|
62
|
+
'112211'
|
|
63
|
+
]
|
|
64
|
+
|
|
65
|
+
}
|
|
66
|
+
if attributes:
|
|
67
|
+
self._attributes = self._process_attributes(attributes)
|
|
68
|
+
|
|
69
|
+
self._parameters = template.get('Parameters', CommentedMap())
|
|
70
|
+
if parameters:
|
|
71
|
+
self._parameters_with_defaults.update(parameters)
|
|
72
|
+
|
|
73
|
+
if references:
|
|
74
|
+
self._references.update(references)
|
|
75
|
+
|
|
76
|
+
self._mappings = template.get('Mappings', CommentedMap())
|
|
77
|
+
|
|
78
|
+
if mappings:
|
|
79
|
+
self._selected_mappings = mappings
|
|
80
|
+
|
|
81
|
+
self._resources = template.get('Resources', CommentedMap())
|
|
82
|
+
|
|
83
|
+
return self._resolve_tree(template)
|
|
84
|
+
|
|
85
|
+
def _resolve_tree(self, root: YamlObject):
|
|
86
|
+
self.items.clear()
|
|
87
|
+
self.items.append((None, None, root))
|
|
88
|
+
self.items.append((None, None, root))
|
|
89
|
+
|
|
90
|
+
while self.items:
|
|
91
|
+
parent, accessor, node = self.items.pop()
|
|
92
|
+
|
|
93
|
+
if isinstance(node, TaggedScalar):
|
|
94
|
+
# Replace in parent
|
|
95
|
+
if parent is not None and (
|
|
96
|
+
resolved := self._resolve_tagged(root, node)
|
|
97
|
+
):
|
|
98
|
+
parent[accessor] = resolved
|
|
99
|
+
|
|
100
|
+
elif isinstance(node, CommentedMap):
|
|
101
|
+
if isinstance(node.tag, Tag) and node.tag.value is not None and parent and (
|
|
102
|
+
resolved_node := self._resolve_tagged(root, node)
|
|
103
|
+
):
|
|
104
|
+
parent[accessor] = resolved_node
|
|
105
|
+
|
|
106
|
+
elif isinstance(node.tag, Tag) and node.tag.value is not None:
|
|
107
|
+
node = self._resolve_tagged(root, node)
|
|
108
|
+
for k in reversed(list(node.keys())):
|
|
109
|
+
self.items.append((node, k, node[k]))
|
|
110
|
+
|
|
111
|
+
root = node
|
|
112
|
+
|
|
113
|
+
else:
|
|
114
|
+
# Process keys in reverse order for proper DFS
|
|
115
|
+
for k in reversed(list(node.keys())):
|
|
116
|
+
self.items.append((node, k, node[k]))
|
|
117
|
+
|
|
118
|
+
elif isinstance(node, CommentedSeq):
|
|
119
|
+
|
|
120
|
+
if isinstance(node.tag, Tag) and node.tag.value is not None and parent and (
|
|
121
|
+
resolved_node := self._resolve_tagged(root, node)
|
|
122
|
+
):
|
|
123
|
+
parent[accessor] = resolved_node
|
|
124
|
+
|
|
125
|
+
elif isinstance(node.tag, Tag) and node.tag.value is not None:
|
|
126
|
+
node = self._resolve_tagged(root, node)
|
|
127
|
+
for idx, val in enumerate(reversed(node)):
|
|
128
|
+
self.items.append((node, idx, val))
|
|
129
|
+
|
|
130
|
+
root = node
|
|
131
|
+
|
|
132
|
+
else:
|
|
133
|
+
# Process indices in reverse order for proper DFS
|
|
134
|
+
for idx, val in enumerate(reversed(node)):
|
|
135
|
+
self.items.append((node, idx, val))
|
|
136
|
+
|
|
137
|
+
return root
|
|
138
|
+
|
|
139
|
+
def _find_matching_key(
|
|
140
|
+
self,
|
|
141
|
+
root: CommentedMap,
|
|
142
|
+
search_key: str,
|
|
143
|
+
):
|
|
144
|
+
"""Returns the first path (list of keys/indices) to a mapping with key == search_key, and the value at that path."""
|
|
145
|
+
stack = [(root, [])]
|
|
146
|
+
while stack:
|
|
147
|
+
node, path = stack.pop()
|
|
148
|
+
if isinstance(node, CommentedMap):
|
|
149
|
+
for k in node.keys():
|
|
150
|
+
if k == search_key:
|
|
151
|
+
return node[k]
|
|
152
|
+
stack.append((node[k], path + [k]))
|
|
153
|
+
elif isinstance(node, CommentedSeq):
|
|
154
|
+
for idx, item in reversed(list(enumerate(node))):
|
|
155
|
+
stack.append((item, path + [idx]))
|
|
156
|
+
|
|
157
|
+
return None # No match found
|
|
158
|
+
|
|
159
|
+
def _assemble_parameters(self, resources: YamlObject):
|
|
160
|
+
params: dict[str, Data] = resources.get("Parameters", {})
|
|
161
|
+
for param_name, param in params.items():
|
|
162
|
+
if default := param.get("Default"):
|
|
163
|
+
self._parameters_with_defaults[param_name] = default
|
|
164
|
+
|
|
165
|
+
def _resolve_tagged(self, root: CommentedMap, node: TaggedScalar | CommentedMap | CommentedSeq):
|
|
166
|
+
resolver: Callable[[CommentedMap, str], YamlObject] | None = None
|
|
167
|
+
|
|
168
|
+
if isinstance(node.tag, Tag) and (
|
|
169
|
+
resolver := self._resolvers.get(node.tag.value)
|
|
170
|
+
):
|
|
171
|
+
return resolver(root, node)
|
|
172
|
+
|
|
173
|
+
def _resolve_ref(self, root: YamlObject, scalar: TaggedScalar):
|
|
174
|
+
'''
|
|
175
|
+
Sometimes we can resolve a !Ref if it has an explicit correlation
|
|
176
|
+
to a Resources key or input Parameter. This helps reduce the amount
|
|
177
|
+
of work we have to do when resolving later.
|
|
178
|
+
'''
|
|
179
|
+
if val := self._parameters_with_defaults.get(scalar.value):
|
|
180
|
+
return val
|
|
181
|
+
|
|
182
|
+
elif scalar.value in self._parameters:
|
|
183
|
+
return scalar
|
|
184
|
+
|
|
185
|
+
elif scalar.value in self._resources:
|
|
186
|
+
return scalar.value
|
|
187
|
+
|
|
188
|
+
elif ref := self._references.get(scalar.value):
|
|
189
|
+
return ref
|
|
190
|
+
|
|
191
|
+
else:
|
|
192
|
+
return self._find_matching_key(root, scalar.value)
|
|
193
|
+
|
|
194
|
+
def _resolve_by_subset_query(
|
|
195
|
+
self,
|
|
196
|
+
root: CommentedMap,
|
|
197
|
+
subset: CommentedMap | CommentedSeq,
|
|
198
|
+
) -> YamlObject | None:
|
|
199
|
+
"""
|
|
200
|
+
Traverse `subset` iteratively. For every leaf (scalar or TaggedScalar) encountered in `subset`,
|
|
201
|
+
use its value as the next key/index into `root`. Return (path, value) where:
|
|
202
|
+
- path: list of keys/indices used to reach into `root`
|
|
203
|
+
- value: the value at the end of traversal, or None if a step was missing (early return)
|
|
204
|
+
TaggedScalar is treated as a leaf and its .value is used as the key component.
|
|
205
|
+
"""
|
|
206
|
+
current = self._mappings
|
|
207
|
+
path = []
|
|
208
|
+
|
|
209
|
+
stack = [(subset, [])]
|
|
210
|
+
while stack:
|
|
211
|
+
node, _ = stack.pop()
|
|
212
|
+
|
|
213
|
+
if isinstance(node, CommentedMap):
|
|
214
|
+
|
|
215
|
+
if isinstance(node.tag, Tag) and node.tag.value is not None and (
|
|
216
|
+
node != subset
|
|
217
|
+
):
|
|
218
|
+
resolved_node = self._resolve_tagged(root, node)
|
|
219
|
+
stack.append((resolved_node, []))
|
|
220
|
+
|
|
221
|
+
else:
|
|
222
|
+
for k in reversed(list(node.keys())):
|
|
223
|
+
stack.append((node[k], []))
|
|
224
|
+
|
|
225
|
+
elif isinstance(node, CommentedSeq):
|
|
226
|
+
|
|
227
|
+
if isinstance(node.tag, Tag) and node.tag.value is not None and (
|
|
228
|
+
node != subset
|
|
229
|
+
):
|
|
230
|
+
resolved_node = self._resolve_tagged(root, node)
|
|
231
|
+
stack.append((resolved_node, []))
|
|
232
|
+
|
|
233
|
+
else:
|
|
234
|
+
for val in reversed(node):
|
|
235
|
+
stack.append((val, []))
|
|
236
|
+
else:
|
|
237
|
+
# Leaf: scalar or TaggedScalar
|
|
238
|
+
key = self._resolve_tagged(
|
|
239
|
+
self._selected_mappings,
|
|
240
|
+
node,
|
|
241
|
+
) if isinstance(node, TaggedScalar) else node
|
|
242
|
+
path.append(key)
|
|
243
|
+
|
|
244
|
+
if isinstance(current, CommentedMap):
|
|
245
|
+
if key in current:
|
|
246
|
+
current = current[key]
|
|
247
|
+
else:
|
|
248
|
+
return None
|
|
249
|
+
elif isinstance(current, CommentedSeq) and isinstance(key, int) and 0 <= key < len(current):
|
|
250
|
+
current = current[key]
|
|
251
|
+
else:
|
|
252
|
+
return None
|
|
253
|
+
|
|
254
|
+
if isinstance(current, TaggedScalar):
|
|
255
|
+
return path, self._resolve_tagged(
|
|
256
|
+
self._selected_mappings,
|
|
257
|
+
current,
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
return current
|
|
261
|
+
|
|
262
|
+
def _resolve_getatt(
|
|
263
|
+
self,
|
|
264
|
+
root: CommentedMap,
|
|
265
|
+
query: TaggedScalar | CommentedMap | CommentedSeq,
|
|
266
|
+
) -> YamlObject | None:
|
|
267
|
+
steps: list[str] = []
|
|
268
|
+
|
|
269
|
+
if isinstance(query, TaggedScalar):
|
|
270
|
+
steps_string: str = query.value
|
|
271
|
+
steps = steps_string.split('.')
|
|
272
|
+
|
|
273
|
+
elif (
|
|
274
|
+
resolved := self._longest_path(root, query)
|
|
275
|
+
) and isinstance(
|
|
276
|
+
resolved,
|
|
277
|
+
list,
|
|
278
|
+
):
|
|
279
|
+
steps = resolved
|
|
280
|
+
|
|
281
|
+
if value := self._attributes.get(
|
|
282
|
+
'.'.join(steps)
|
|
283
|
+
):
|
|
284
|
+
return value
|
|
285
|
+
|
|
286
|
+
current = self._resources
|
|
287
|
+
for step in steps:
|
|
288
|
+
if step == 'Value':
|
|
289
|
+
return current
|
|
290
|
+
# Mapping
|
|
291
|
+
if isinstance(current, (CommentedMap, dict)):
|
|
292
|
+
if step in current:
|
|
293
|
+
current = current[step]
|
|
294
|
+
else:
|
|
295
|
+
return None
|
|
296
|
+
# Sequence
|
|
297
|
+
elif isinstance(current, (CommentedSeq, list)):
|
|
298
|
+
try:
|
|
299
|
+
idx = int(step)
|
|
300
|
+
except ValueError:
|
|
301
|
+
return None
|
|
302
|
+
if 0 <= idx < len(current):
|
|
303
|
+
current = current[idx]
|
|
304
|
+
else:
|
|
305
|
+
return None
|
|
306
|
+
else:
|
|
307
|
+
# Hit a scalar (including TaggedScalar) before consuming all steps
|
|
308
|
+
return None
|
|
309
|
+
|
|
310
|
+
return current
|
|
311
|
+
|
|
312
|
+
def _resolve_join(
|
|
313
|
+
self,
|
|
314
|
+
root: CommentedMap,
|
|
315
|
+
source: CommentedSeq,
|
|
316
|
+
) -> Any:
|
|
317
|
+
if len(source) < 2:
|
|
318
|
+
return ''
|
|
319
|
+
|
|
320
|
+
delimiter = source[0]
|
|
321
|
+
if isinstance(delimiter, (TaggedScalar, CommentedMap, CommentedSeq)):
|
|
322
|
+
delimiter = str(self._resolve_tagged(root, delimiter))
|
|
323
|
+
|
|
324
|
+
else:
|
|
325
|
+
delimiter = str(delimiter)
|
|
326
|
+
|
|
327
|
+
subselction = source[1:]
|
|
328
|
+
resolved = self._resolve_subtree(root, subselction)
|
|
329
|
+
|
|
330
|
+
if not isinstance(resolved, CommentedSeq):
|
|
331
|
+
return resolved
|
|
332
|
+
|
|
333
|
+
return delimiter.join([
|
|
334
|
+
str(self._resolve_tagged(
|
|
335
|
+
root,
|
|
336
|
+
node,
|
|
337
|
+
))
|
|
338
|
+
if isinstance(
|
|
339
|
+
node,
|
|
340
|
+
(TaggedScalar, CommentedMap, CommentedSeq)
|
|
341
|
+
) else node
|
|
342
|
+
for subset in resolved
|
|
343
|
+
for node in subset
|
|
344
|
+
])
|
|
345
|
+
|
|
346
|
+
def _resolve_sub(
|
|
347
|
+
self,
|
|
348
|
+
root: CommentedMap,
|
|
349
|
+
source: CommentedSeq | TaggedScalar,
|
|
350
|
+
):
|
|
351
|
+
if isinstance(source, TaggedScalar) and isinstance(
|
|
352
|
+
source.tag,
|
|
353
|
+
Tag,
|
|
354
|
+
):
|
|
355
|
+
source_string = source.value
|
|
356
|
+
variables = self._resolve_template_string(source_string)
|
|
357
|
+
return self._resolve_sub_ref_queries(
|
|
358
|
+
variables,
|
|
359
|
+
source_string,
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
elif len(source) > 1:
|
|
363
|
+
source_string: str = source[0]
|
|
364
|
+
template_vars = self._resolve_template_string(source_string)
|
|
365
|
+
variables = source[1:]
|
|
366
|
+
resolved: list[dict[str, Any]] = self._resolve_subtree(root, variables)
|
|
367
|
+
|
|
368
|
+
for resolve_var in resolved:
|
|
369
|
+
for template_var, accessor in template_vars:
|
|
370
|
+
if val := resolve_var.get(accessor):
|
|
371
|
+
source_string = source_string.replace(template_var, val)
|
|
372
|
+
|
|
373
|
+
return source_string
|
|
374
|
+
|
|
375
|
+
return source
|
|
376
|
+
|
|
377
|
+
def _resolve_base64(
|
|
378
|
+
self,
|
|
379
|
+
root: CommentedMap,
|
|
380
|
+
source: CommentedMap | CommentedSeq | TaggedScalar,
|
|
381
|
+
):
|
|
382
|
+
if isinstance(source, TaggedScalar) and isinstance(
|
|
383
|
+
source.tag,
|
|
384
|
+
Tag,
|
|
385
|
+
) and isinstance(
|
|
386
|
+
source.tag.value,
|
|
387
|
+
str,
|
|
388
|
+
):
|
|
389
|
+
return base64.b64encode(source.tag.value.encode()).decode('ascii')
|
|
390
|
+
|
|
391
|
+
elif (
|
|
392
|
+
resolved := self._resolve_subtree(root, source)
|
|
393
|
+
) and isinstance(
|
|
394
|
+
resolved,
|
|
395
|
+
str
|
|
396
|
+
):
|
|
397
|
+
return base64.b64encode(resolved.encode()).decode('ascii')
|
|
398
|
+
|
|
399
|
+
return source
|
|
400
|
+
|
|
401
|
+
def _resolve_split(
|
|
402
|
+
self,
|
|
403
|
+
root: CommentedMap,
|
|
404
|
+
source: CommentedSeq | CommentedMap | TaggedScalar,
|
|
405
|
+
):
|
|
406
|
+
if isinstance(
|
|
407
|
+
source,
|
|
408
|
+
(CommentedMap, TaggedScalar),
|
|
409
|
+
) or len(source) != 2:
|
|
410
|
+
return source
|
|
411
|
+
|
|
412
|
+
delimiter = source[0]
|
|
413
|
+
if not isinstance(
|
|
414
|
+
delimiter,
|
|
415
|
+
str,
|
|
416
|
+
):
|
|
417
|
+
delimiter = self._resolve_subtree(root, delimiter)
|
|
418
|
+
|
|
419
|
+
target = source[1]
|
|
420
|
+
if not isinstance(
|
|
421
|
+
target,
|
|
422
|
+
str,
|
|
423
|
+
):
|
|
424
|
+
target = self._resolve_subtree(root, target)
|
|
425
|
+
|
|
426
|
+
if isinstance(delimiter, str) and isinstance(target, str):
|
|
427
|
+
return CommentedSeq(target.split(delimiter))
|
|
428
|
+
|
|
429
|
+
return target
|
|
430
|
+
|
|
431
|
+
def _resolve_select(
|
|
432
|
+
self,
|
|
433
|
+
root: CommentedMap,
|
|
434
|
+
source: CommentedSeq | CommentedMap | TaggedScalar,
|
|
435
|
+
):
|
|
436
|
+
if isinstance(
|
|
437
|
+
source,
|
|
438
|
+
(CommentedMap, TaggedScalar),
|
|
439
|
+
) or len(source) != 2:
|
|
440
|
+
return source
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
index = source[0]
|
|
444
|
+
if not isinstance(
|
|
445
|
+
index,
|
|
446
|
+
int,
|
|
447
|
+
):
|
|
448
|
+
index = self._resolve_subtree(root, index)
|
|
449
|
+
|
|
450
|
+
target = self._resolve_subtree(root, source[1])
|
|
451
|
+
if index > len(target):
|
|
452
|
+
return source
|
|
453
|
+
|
|
454
|
+
return target[index]
|
|
455
|
+
|
|
456
|
+
def _resolve_to_json_string(
|
|
457
|
+
self,
|
|
458
|
+
root: CommentedMap,
|
|
459
|
+
source: CommentedSeq | CommentedMap | TaggedScalar,
|
|
460
|
+
):
|
|
461
|
+
|
|
462
|
+
stack: list[tuple[CommentedMap | CommentedSeq | None, Any | None, Any]] = [(None, None, source)]
|
|
463
|
+
|
|
464
|
+
while stack:
|
|
465
|
+
parent, accessor, node = stack.pop()
|
|
466
|
+
if isinstance(node, TaggedScalar):
|
|
467
|
+
# Replace in parent
|
|
468
|
+
if parent is not None and (
|
|
469
|
+
resolved := self._resolve_tagged(root, node)
|
|
470
|
+
):
|
|
471
|
+
parent[accessor] = resolved
|
|
472
|
+
|
|
473
|
+
elif isinstance(node, CommentedMap):
|
|
474
|
+
if isinstance(node.tag, Tag) and node.tag.value is not None and parent and (
|
|
475
|
+
resolved_node := self._resolve_tagged(root, node)
|
|
476
|
+
) and node != source:
|
|
477
|
+
parent[accessor] = resolved_node
|
|
478
|
+
|
|
479
|
+
elif isinstance(node.tag, Tag) and node.tag.value is not None and node != source:
|
|
480
|
+
node = self._resolve_tagged(root, node)
|
|
481
|
+
for k in reversed(list(node.keys())):
|
|
482
|
+
stack.append((node, k, node[k]))
|
|
483
|
+
|
|
484
|
+
source = node
|
|
485
|
+
|
|
486
|
+
else:
|
|
487
|
+
# Push children (keys) in reverse for DFS order
|
|
488
|
+
for k in reversed(list(node.keys())):
|
|
489
|
+
stack.append((node, k, node[k]))
|
|
490
|
+
|
|
491
|
+
elif isinstance(node, CommentedSeq):
|
|
492
|
+
if isinstance(node.tag, Tag) and node.tag.value is not None and parent and (
|
|
493
|
+
resolved_node := self._resolve_tagged(root, node)
|
|
494
|
+
) and node != source :
|
|
495
|
+
parent[accessor] = resolved_node
|
|
496
|
+
|
|
497
|
+
elif isinstance(node.tag, Tag) and node.tag.value is not None and node != source:
|
|
498
|
+
node = self._resolve_tagged(root, node)
|
|
499
|
+
for idx, val in enumerate(reversed(node)):
|
|
500
|
+
stack.append((node, idx, val))
|
|
501
|
+
|
|
502
|
+
source = node
|
|
503
|
+
|
|
504
|
+
else:
|
|
505
|
+
# Process indices in reverse order for proper DFS
|
|
506
|
+
for idx, val in enumerate(reversed(node)):
|
|
507
|
+
stack.append((node, idx, val))
|
|
508
|
+
|
|
509
|
+
return json.dumps(source)
|
|
510
|
+
|
|
511
|
+
def _resolve_subtree(
|
|
512
|
+
self,
|
|
513
|
+
root: CommentedMap,
|
|
514
|
+
source: CommentedSeq
|
|
515
|
+
) -> Any:
|
|
516
|
+
"""
|
|
517
|
+
Iterative DFS over a ruamel.yaml tree.
|
|
518
|
+
- CommentedMap/CommentedSeq are traversed.
|
|
519
|
+
"""
|
|
520
|
+
stack: list[tuple[CommentedMap | CommentedSeq | None, Any | None, Any]] = [(None, None, source)]
|
|
521
|
+
|
|
522
|
+
while stack:
|
|
523
|
+
parent, accessor, node = stack.pop()
|
|
524
|
+
if isinstance(node, TaggedScalar):
|
|
525
|
+
# Replace in parent
|
|
526
|
+
if parent is not None and (
|
|
527
|
+
resolved := self._resolve_tagged(root, node)
|
|
528
|
+
):
|
|
529
|
+
parent[accessor] = resolved
|
|
530
|
+
|
|
531
|
+
elif isinstance(node, CommentedMap):
|
|
532
|
+
if isinstance(node.tag, Tag) and node.tag.value is not None and parent and (
|
|
533
|
+
resolved_node := self._resolve_tagged(root, node)
|
|
534
|
+
):
|
|
535
|
+
parent[accessor] = resolved_node
|
|
536
|
+
|
|
537
|
+
elif isinstance(node.tag, Tag) and node.tag.value is not None:
|
|
538
|
+
node = self._resolve_tagged(root, node)
|
|
539
|
+
for k in reversed(list(node.keys())):
|
|
540
|
+
stack.append((node, k, node[k]))
|
|
541
|
+
|
|
542
|
+
source = node
|
|
543
|
+
|
|
544
|
+
else:
|
|
545
|
+
# Push children (keys) in reverse for DFS order
|
|
546
|
+
for k in reversed(list(node.keys())):
|
|
547
|
+
stack.append((node, k, node[k]))
|
|
548
|
+
|
|
549
|
+
elif isinstance(node, CommentedSeq):
|
|
550
|
+
if isinstance(node.tag, Tag) and node.tag.value is not None and parent and (
|
|
551
|
+
resolved_node := self._resolve_tagged(root, node)
|
|
552
|
+
):
|
|
553
|
+
parent[accessor] = resolved_node
|
|
554
|
+
|
|
555
|
+
elif isinstance(node.tag, Tag) and node.tag.value is not None:
|
|
556
|
+
node = self._resolve_tagged(root, node)
|
|
557
|
+
for idx, val in enumerate(reversed(node)):
|
|
558
|
+
stack.append((node, idx, val))
|
|
559
|
+
|
|
560
|
+
source = node
|
|
561
|
+
|
|
562
|
+
else:
|
|
563
|
+
# Process indices in reverse order for proper DFS
|
|
564
|
+
for idx, val in enumerate(reversed(node)):
|
|
565
|
+
stack.append((node, idx, val))
|
|
566
|
+
|
|
567
|
+
return source
|
|
568
|
+
|
|
569
|
+
def _longest_path(
|
|
570
|
+
self,
|
|
571
|
+
root: CommentedMap,
|
|
572
|
+
source: TaggedScalar | CommentedMap | CommentedSeq
|
|
573
|
+
):
|
|
574
|
+
"""
|
|
575
|
+
Return the longest path from `node` to any leaf as a list of strings.
|
|
576
|
+
- Map keys are appended as strings.
|
|
577
|
+
- Sequence indices are appended as strings.
|
|
578
|
+
- TaggedScalar and other scalars are leafs.
|
|
579
|
+
"""
|
|
580
|
+
stack = [(source, [])]
|
|
581
|
+
longest: list[str] = []
|
|
582
|
+
|
|
583
|
+
while stack:
|
|
584
|
+
current, path = stack.pop()
|
|
585
|
+
|
|
586
|
+
if isinstance(current, CommentedMap):
|
|
587
|
+
if not current:
|
|
588
|
+
if len(path) > len(longest):
|
|
589
|
+
longest = path
|
|
590
|
+
else:
|
|
591
|
+
|
|
592
|
+
if isinstance(current.tag, Tag) and current.tag.value is not None and (
|
|
593
|
+
current != source
|
|
594
|
+
):
|
|
595
|
+
resolved_node = self._resolve_tagged(root, current)
|
|
596
|
+
stack.append((resolved_node, path))
|
|
597
|
+
|
|
598
|
+
else:
|
|
599
|
+
# Iterate in normal order; push in reverse to keep DFS intuitive
|
|
600
|
+
keys = list(current.keys())
|
|
601
|
+
for k in reversed(keys):
|
|
602
|
+
stack.append((current[k], path + [str(k)]))
|
|
603
|
+
|
|
604
|
+
elif isinstance(current, CommentedSeq):
|
|
605
|
+
if not current:
|
|
606
|
+
if len(path) > len(longest):
|
|
607
|
+
longest = path
|
|
608
|
+
else:
|
|
609
|
+
if isinstance(current.tag, Tag) and current.tag.value is not None and (
|
|
610
|
+
current != source
|
|
611
|
+
):
|
|
612
|
+
resolved_node = self._resolve_tagged(root, current)
|
|
613
|
+
stack.append((resolved_node, path))
|
|
614
|
+
|
|
615
|
+
else:
|
|
616
|
+
for idx in reversed(range(len(current))):
|
|
617
|
+
stack.append((current[idx], path + [str(idx)]))
|
|
618
|
+
|
|
619
|
+
else:
|
|
620
|
+
# Scalar (incl. TaggedScalar) -> leaf
|
|
621
|
+
if len(path) > len(longest):
|
|
622
|
+
longest = path
|
|
623
|
+
|
|
624
|
+
return longest
|
|
625
|
+
|
|
626
|
+
def _assemble_mappings(self, mappings: dict[str, str]):
|
|
627
|
+
for mapping, value in mappings.items():
|
|
628
|
+
if (
|
|
629
|
+
map_data := self._mappings.get(mapping)
|
|
630
|
+
) and (
|
|
631
|
+
selected := map_data.get(value)
|
|
632
|
+
):
|
|
633
|
+
self._selected_mappings[mapping] = selected
|
|
634
|
+
|
|
635
|
+
def _process_attributes(
|
|
636
|
+
self,
|
|
637
|
+
attributes: dict[str, Any],
|
|
638
|
+
):
|
|
639
|
+
return {
|
|
640
|
+
key: self._process_python_structure(value)
|
|
641
|
+
for key, value in attributes.items()
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
def _process_python_structure(
|
|
645
|
+
self,
|
|
646
|
+
obj: Any
|
|
647
|
+
) -> Any:
|
|
648
|
+
"""
|
|
649
|
+
Convert arbitrarily nested Python data (dict/list/scalars) into ruamel.yaml
|
|
650
|
+
CommentedMap/CommentedSeq equivalents using iterative DFS. Scalars are returned as-is.
|
|
651
|
+
"""
|
|
652
|
+
# Fast path for scalars
|
|
653
|
+
if not isinstance(obj, (dict, list)):
|
|
654
|
+
return obj
|
|
655
|
+
|
|
656
|
+
# Create root container
|
|
657
|
+
if isinstance(obj, dict):
|
|
658
|
+
root_out: Any = CommentedMap()
|
|
659
|
+
work: list[tuple[Any, CommentedMap | CommentedSeq | None, Any | None]] = [(obj, None, None)]
|
|
660
|
+
else:
|
|
661
|
+
root_out = CommentedSeq()
|
|
662
|
+
work = [(obj, None, None)]
|
|
663
|
+
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
# Map from input container id to output container to avoid recreating
|
|
667
|
+
created: dict[int, CommentedMap | CommentedSeq] = {id(obj): root_out}
|
|
668
|
+
|
|
669
|
+
|
|
670
|
+
while work:
|
|
671
|
+
in_node, out_parent, out_key = work.pop()
|
|
672
|
+
|
|
673
|
+
if isinstance(in_node, dict):
|
|
674
|
+
out_container = created.get(id(in_node))
|
|
675
|
+
if out_container is None:
|
|
676
|
+
out_container = CommentedMap()
|
|
677
|
+
created[id(in_node)] = out_container
|
|
678
|
+
assign(out_parent, out_key, out_container)
|
|
679
|
+
else:
|
|
680
|
+
# Root case: already created and assigned
|
|
681
|
+
assign(out_parent, out_key, out_container)
|
|
682
|
+
|
|
683
|
+
# Push children in reverse to process first child next (DFS)
|
|
684
|
+
items = list(in_node.items())
|
|
685
|
+
for k, v in reversed(items):
|
|
686
|
+
if isinstance(v, (dict, list)):
|
|
687
|
+
# Create child container placeholder now for correct parent linkage
|
|
688
|
+
child_container = CommentedMap() if isinstance(v, dict) else CommentedSeq()
|
|
689
|
+
created[id(v)] = child_container
|
|
690
|
+
work.append((v, out_container, k))
|
|
691
|
+
else:
|
|
692
|
+
# Scalar, assign directly
|
|
693
|
+
out_container[k] = v
|
|
694
|
+
|
|
695
|
+
elif isinstance(in_node, list):
|
|
696
|
+
out_container = created.get(id(in_node))
|
|
697
|
+
if out_container is None:
|
|
698
|
+
out_container = CommentedSeq()
|
|
699
|
+
created[id(in_node)] = out_container
|
|
700
|
+
assign(out_parent, out_key, out_container)
|
|
701
|
+
else:
|
|
702
|
+
assign(out_parent, out_key, out_container)
|
|
703
|
+
|
|
704
|
+
# Push children in reverse order
|
|
705
|
+
for idx in reversed(range(len(in_node))):
|
|
706
|
+
v = in_node[idx]
|
|
707
|
+
if isinstance(v, (dict, list)):
|
|
708
|
+
child_container = CommentedMap() if isinstance(v, dict) else CommentedSeq()
|
|
709
|
+
created[id(v)] = child_container
|
|
710
|
+
work.append((v, out_container, idx))
|
|
711
|
+
else:
|
|
712
|
+
out_container.append(v)
|
|
713
|
+
|
|
714
|
+
else:
|
|
715
|
+
# Scalar node
|
|
716
|
+
assign(out_parent, out_key, in_node)
|
|
717
|
+
|
|
718
|
+
return root_out
|
|
719
|
+
|
|
720
|
+
def _resolve_template_string(self, template: str):
|
|
721
|
+
|
|
722
|
+
variables: list[tuple[str, str]] = []
|
|
723
|
+
for match in self._sub_pattern.finditer(template):
|
|
724
|
+
variables.append((
|
|
725
|
+
match.group(0),
|
|
726
|
+
self._sub_inner_text_pattern.sub('', match.group(0)),
|
|
727
|
+
))
|
|
728
|
+
|
|
729
|
+
return variables
|
|
730
|
+
|
|
731
|
+
def _resolve_sub_ref_queries(
|
|
732
|
+
self,
|
|
733
|
+
variables: list[tuple[str, str]],
|
|
734
|
+
source_string: str,
|
|
735
|
+
):
|
|
736
|
+
for variable, accessor in variables:
|
|
737
|
+
if val := self._references.get(accessor):
|
|
738
|
+
source_string = source_string.replace(variable, val)
|
|
739
|
+
|
|
740
|
+
return source_string
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
from ruamel.yaml.comments import CommentedMap, CommentedSeq
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def assign(parent: CommentedMap | CommentedSeq | None, key_or_index: Any, value: Any):
|
|
6
|
+
if parent is None:
|
|
7
|
+
return # root already set
|
|
8
|
+
if isinstance(parent, CommentedMap):
|
|
9
|
+
parent[key_or_index] = value
|
|
10
|
+
else:
|
|
11
|
+
# key_or_index is an int for sequences
|
|
12
|
+
# Ensure sequence large enough (iterative approach assigns in order, so append is fine)
|
|
13
|
+
parent.append(value)
|
|
@@ -25,18 +25,18 @@ cfn_check/evaluation/parsing/__init__.py
|
|
|
25
25
|
cfn_check/evaluation/parsing/query_parser.py
|
|
26
26
|
cfn_check/evaluation/parsing/token.py
|
|
27
27
|
cfn_check/evaluation/parsing/token_type.py
|
|
28
|
-
cfn_check/loader/__init__.py
|
|
29
|
-
cfn_check/loader/loader.py
|
|
30
28
|
cfn_check/logging/__init__.py
|
|
31
29
|
cfn_check/logging/models.py
|
|
32
30
|
cfn_check/rendering/__init__.py
|
|
33
31
|
cfn_check/rendering/renderer.py
|
|
32
|
+
cfn_check/rendering/utils.py
|
|
34
33
|
cfn_check/rules/__init__.py
|
|
35
34
|
cfn_check/rules/rule.py
|
|
36
35
|
cfn_check/shared/__init__.py
|
|
37
36
|
cfn_check/shared/types.py
|
|
38
37
|
cfn_check/validation/__init__.py
|
|
39
38
|
cfn_check/validation/validator.py
|
|
39
|
+
example/multitag.py
|
|
40
40
|
example/pydantic_rules.py
|
|
41
41
|
example/renderer_test.py
|
|
42
42
|
example/rules.py
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import ruamel.yaml
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
class MultiTaggedObject:
|
|
5
|
+
def __init__(self, value, tags):
|
|
6
|
+
self.value = value
|
|
7
|
+
self.tags = tags
|
|
8
|
+
|
|
9
|
+
def represent_multi_tagged_object(dumper, data):
|
|
10
|
+
return dumper.represent_mapping('!MultiTagged', {'value': data.value, 'tags': data.tags})
|
|
11
|
+
|
|
12
|
+
def construct_multi_tagged_object(constructor, node):
|
|
13
|
+
mapping = constructor.construct_mapping(node)
|
|
14
|
+
return MultiTaggedObject(mapping['value'], mapping['tags'])
|
|
15
|
+
|
|
16
|
+
yaml = ruamel.yaml.YAML()
|
|
17
|
+
yaml.register_class(MultiTaggedObject)
|
|
18
|
+
|
|
19
|
+
# Example usage:
|
|
20
|
+
data = MultiTaggedObject("some_value", ["tag1", "tag2"])
|
|
21
|
+
yaml.dump({'item': data}, sys.stdout)
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
import yaml
|
|
2
|
-
import pathlib
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
class Loader(yaml.SafeLoader):
|
|
6
|
-
pass
|
|
7
|
-
|
|
8
|
-
def create_tag(tag):
|
|
9
|
-
def constructor(loader: Loader, node):
|
|
10
|
-
if isinstance(node, yaml.ScalarNode):
|
|
11
|
-
return node.value
|
|
12
|
-
elif isinstance(node, yaml.SequenceNode):
|
|
13
|
-
return loader.construct_sequence(node)
|
|
14
|
-
elif isinstance(node, yaml.MappingNode):
|
|
15
|
-
return loader.construct_mapping(node)
|
|
16
|
-
return constructor
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
def find_templates(path, file_pattern):
|
|
20
|
-
return list(pathlib.Path(path).rglob(file_pattern))
|
|
21
|
-
|
|
@@ -1,124 +0,0 @@
|
|
|
1
|
-
import re
|
|
2
|
-
from collections import deque
|
|
3
|
-
|
|
4
|
-
from cfn_check.shared.types import (
|
|
5
|
-
Data,
|
|
6
|
-
Items,
|
|
7
|
-
YamlObject,
|
|
8
|
-
)
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class Renderer:
|
|
12
|
-
|
|
13
|
-
def __init__(self):
|
|
14
|
-
self.parameter_defaults: dict[str, str | int | float | bool | None] = {}
|
|
15
|
-
self.items: Items = deque()
|
|
16
|
-
self._ref_pattern = re.compile(r'^!Ref\s+')
|
|
17
|
-
self._visited: list[str | int] = []
|
|
18
|
-
self._data: YamlObject = {}
|
|
19
|
-
self._mappings: dict[str, dict[str, YamlObject]] = {}
|
|
20
|
-
self._selected_mappings: dict[str, YamlObject] = {}
|
|
21
|
-
self._inputs: dict[str, str] = {}
|
|
22
|
-
|
|
23
|
-
def render(
|
|
24
|
-
self,
|
|
25
|
-
resources: YamlObject,
|
|
26
|
-
selected_mappings: dict[str, str] | None = None,
|
|
27
|
-
):
|
|
28
|
-
data = resources.get("Resources", {})
|
|
29
|
-
self.items.clear()
|
|
30
|
-
self.items.append(data)
|
|
31
|
-
|
|
32
|
-
self._assemble_parameters(resources)
|
|
33
|
-
|
|
34
|
-
self._mappings = resources.get('Mappings', {})
|
|
35
|
-
|
|
36
|
-
if selected_mappings:
|
|
37
|
-
self._assemble_mappings(selected_mappings)
|
|
38
|
-
|
|
39
|
-
while len(self.items) > 0:
|
|
40
|
-
item = self.items.pop()
|
|
41
|
-
|
|
42
|
-
if isinstance(item, list):
|
|
43
|
-
self._visited.append((None, item))
|
|
44
|
-
self.items.extend([
|
|
45
|
-
(idx, val) for idx, val in enumerate(item)
|
|
46
|
-
])
|
|
47
|
-
|
|
48
|
-
elif isinstance(item, dict):
|
|
49
|
-
self._visited.append((None, item))
|
|
50
|
-
self.items.extend(list(item.items()))
|
|
51
|
-
|
|
52
|
-
elif isinstance(item, tuple):
|
|
53
|
-
key, value = item
|
|
54
|
-
self._parse_kv_pair(key, value)
|
|
55
|
-
|
|
56
|
-
last_item = data
|
|
57
|
-
validator = dict(resources)
|
|
58
|
-
validator_data = validator.get("Resources", {})
|
|
59
|
-
for key, value in self._visited:
|
|
60
|
-
|
|
61
|
-
if isinstance(value, str) and (
|
|
62
|
-
_ := self._selected_mappings.get(value)
|
|
63
|
-
):
|
|
64
|
-
pass
|
|
65
|
-
|
|
66
|
-
if isinstance(key, str) and isinstance(last_item, dict) and key in validator_data:
|
|
67
|
-
last_item[key] = value
|
|
68
|
-
|
|
69
|
-
elif isinstance(key, int) and isinstance(last_item, list) and (
|
|
70
|
-
value in validator_data or self.parameter_defaults.get(validator_data[key]) is not None
|
|
71
|
-
):
|
|
72
|
-
last_item[key] = value
|
|
73
|
-
|
|
74
|
-
if key and isinstance(value, (dict, list)):
|
|
75
|
-
last_item = value
|
|
76
|
-
validator_data = value
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
return resources
|
|
80
|
-
|
|
81
|
-
def _parse_kv_pair(self, key: str | int, value: Data):
|
|
82
|
-
|
|
83
|
-
if isinstance(value, list):
|
|
84
|
-
self.items.extend([
|
|
85
|
-
(idx, val) for idx, val in enumerate(value)
|
|
86
|
-
])
|
|
87
|
-
|
|
88
|
-
elif isinstance(value, dict):
|
|
89
|
-
self.items.extend(list(value.items()))
|
|
90
|
-
|
|
91
|
-
else:
|
|
92
|
-
key, value = self._parse_value(key, value)
|
|
93
|
-
|
|
94
|
-
self._visited.append((key, value))
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
def _parse_value(self, key: str | int, value: str | int | float | bool):
|
|
98
|
-
|
|
99
|
-
if val := self.parameter_defaults.get(key):
|
|
100
|
-
value = val
|
|
101
|
-
|
|
102
|
-
elif val := self.parameter_defaults.get(value):
|
|
103
|
-
value = val
|
|
104
|
-
|
|
105
|
-
return key, value
|
|
106
|
-
|
|
107
|
-
def _assemble_parameters(self, resources: YamlObject):
|
|
108
|
-
params: dict[str, Data] = resources.get("Parameters", {})
|
|
109
|
-
for param_name, param in params.items():
|
|
110
|
-
if default := param.get("Default"):
|
|
111
|
-
self.parameter_defaults[param_name] = default
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
def _assemble_mappings(
|
|
115
|
-
self,
|
|
116
|
-
selected_keys: dict[str, str]
|
|
117
|
-
):
|
|
118
|
-
for key, value in selected_keys.items():
|
|
119
|
-
if (
|
|
120
|
-
mapping := self._mappings.get(key)
|
|
121
|
-
) and (
|
|
122
|
-
selected := mapping.get(value)
|
|
123
|
-
):
|
|
124
|
-
self._selected_mappings[key] = selected
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|