spl-core 4.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- spl_core/__init__.py +1 -0
- spl_core/common/__init__.py +0 -0
- spl_core/common/cmake.py +52 -0
- spl_core/common/path.py +17 -0
- spl_core/gcov_maid/__init__.py +0 -0
- spl_core/gcov_maid/gcov_maid.py +48 -0
- spl_core/kconfig/__init__.py +0 -0
- spl_core/kconfig/kconfig.py +271 -0
- spl_core/project_creator/__init__.py +0 -0
- spl_core/project_creator/creator.py +133 -0
- spl_core/project_creator/templates/project/cookiecutter.json +14 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/.flake8 +2 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/.gitignore +33 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/.vscode/cmake-kits.json +11 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/.vscode/cmake-variants.json +18 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/.vscode/extensions.json +18 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/.vscode/launch.json +37 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/.vscode/settings.json +45 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/.vscode/tasks.json +93 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/CMakeLists.txt +43 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/KConfig +23 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/LICENSE +21 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/Pipfile +33 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/README.md +7 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/build.bat +1 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/build.ps1 +245 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/conf.py +200 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/doc/Doxyfile.in +2774 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/doc/common/index.rst +5 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/doc/components/index.rst +23 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/doc/doxygen-awesome/LICENSE +21 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/doc/doxygen-awesome/doxygen-awesome.css +2530 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/doc/software_architecture/index.rst +2 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/doc/software_requirements/index.rst +7 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/doc/test_report_template.txt +46 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/index.rst +38 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/install-mandatory.bat +1 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/pytest.ini +9 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/scoopfile.json +47 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/test/test_build.py +39 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/test/test_unittests.py +28 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/test/utils.py +26 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/tools/setup/git-config.ps1 +8 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/{% if cookiecutter.touch_components -%} components {%- endif %}/component/CMakeLists.txt +3 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/{% if cookiecutter.touch_components -%} components {%- endif %}/component/doc/_images/screenshot.png +0 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/{% if cookiecutter.touch_components -%} components {%- endif %}/component/doc/design.rst +25 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/{% if cookiecutter.touch_components -%} components {%- endif %}/component/doc/index.rst +8 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/{% if cookiecutter.touch_components -%} components {%- endif %}/component/src/component.c +31 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/{% if cookiecutter.touch_components -%} components {%- endif %}/component/src/component.h +1 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/{% if cookiecutter.touch_components -%} components {%- endif %}/component/test/test_component.cc +60 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/{% if cookiecutter.touch_components -%} components {%- endif %}/main/CMakeLists.txt +2 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/{% if cookiecutter.touch_components -%} components {%- endif %}/main/doc/index.rst +14 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/{% if cookiecutter.touch_components -%} components {%- endif %}/main/src/main.c +17 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/{% if cookiecutter.touch_tools -%} tools {%- endif %}/toolchains/clang/toolchain.cmake +8 -0
- spl_core/project_creator/templates/project/{{cookiecutter.name}}/{% if cookiecutter.touch_tools -%} tools {%- endif %}/toolchains/gcc/toolchain.cmake +3 -0
- spl_core/project_creator/templates/variant/cookiecutter.json +4 -0
- spl_core/project_creator/templates/variant/{{cookiecutter.flavor}}/{{cookiecutter.subsystem}}/config.cmake +1 -0
- spl_core/project_creator/templates/variant/{{cookiecutter.flavor}}/{{cookiecutter.subsystem}}/config.txt +1 -0
- spl_core/project_creator/templates/variant/{{cookiecutter.flavor}}/{{cookiecutter.subsystem}}/parts.cmake +2 -0
- spl_core/project_creator/variant.py +23 -0
- spl_core/project_creator/workspace_artifacts.py +36 -0
- spl_core-4.0.0.dist-info/LICENSE +22 -0
- spl_core-4.0.0.dist-info/METADATA +62 -0
- spl_core-4.0.0.dist-info/RECORD +65 -0
- spl_core-4.0.0.dist-info/WHEEL +4 -0
spl_core/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "4.0.0"
|
|
File without changes
|
spl_core/common/cmake.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import subprocess
|
|
3
|
+
from subprocess import CompletedProcess
|
|
4
|
+
|
|
5
|
+
from spl_core.project_creator.variant import Variant
|
|
6
|
+
from spl_core.project_creator.workspace_artifacts import WorkspaceArtifacts
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class CMake:
|
|
10
|
+
executable = "cmake"
|
|
11
|
+
|
|
12
|
+
def __init__(self, workspace_artifacts: WorkspaceArtifacts):
|
|
13
|
+
self.logger = logging.getLogger(__name__)
|
|
14
|
+
self.workspace_artifacts = workspace_artifacts
|
|
15
|
+
|
|
16
|
+
def run(self, variant: Variant, build_kit: str = "prod", target: str = "all") -> CompletedProcess[bytes]:
|
|
17
|
+
ret_status = self.configure(variant, build_kit)
|
|
18
|
+
if ret_status.returncode == 0:
|
|
19
|
+
ret_status = self.build(variant, build_kit, target)
|
|
20
|
+
return ret_status
|
|
21
|
+
|
|
22
|
+
def configure(self, variant: Variant, build_kit: str = "prod") -> CompletedProcess[bytes]:
|
|
23
|
+
arguments = (
|
|
24
|
+
f" --log-level=DEBUG"
|
|
25
|
+
f" -S{self.workspace_artifacts.root_dir}"
|
|
26
|
+
f" -B{self.workspace_artifacts.get_build_dir(variant, build_kit)}"
|
|
27
|
+
f" -G Ninja "
|
|
28
|
+
f" -DBUILD_KIT:STRING={build_kit}"
|
|
29
|
+
f" -DVARIANT:STRING={variant.to_string()}"
|
|
30
|
+
f" -DCMAKE_BUILD_TYPE:STRING={variant.to_string('_')}"
|
|
31
|
+
)
|
|
32
|
+
if build_kit == "test":
|
|
33
|
+
toolchain = self.workspace_artifacts.root_dir.joinpath(
|
|
34
|
+
"tools\\toolchains\\gcc\\toolchain.cmake"
|
|
35
|
+
)
|
|
36
|
+
arguments += f" -DCMAKE_TOOLCHAIN_FILE={toolchain}"
|
|
37
|
+
return self.run_cmake(arguments)
|
|
38
|
+
|
|
39
|
+
def build(
|
|
40
|
+
self, variant: Variant, build_kit: str = "prod", target: str = "all"
|
|
41
|
+
) -> CompletedProcess[bytes]:
|
|
42
|
+
arguments = (
|
|
43
|
+
f" --build {self.workspace_artifacts.get_build_dir(variant, build_kit)}"
|
|
44
|
+
f" --config {variant.to_string('_')}"
|
|
45
|
+
f" --target {target} -- "
|
|
46
|
+
)
|
|
47
|
+
return self.run_cmake(arguments)
|
|
48
|
+
|
|
49
|
+
def run_cmake(self, arguments: str) -> CompletedProcess[bytes]:
|
|
50
|
+
command = self.executable + " " + arguments
|
|
51
|
+
print(f"Running {command}")
|
|
52
|
+
return subprocess.run(command.split())
|
spl_core/common/path.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def to_path(input_path: str, check_if_exists: bool = True) -> Path:
|
|
5
|
+
return_path = Path(input_path)
|
|
6
|
+
if not check_if_exists or return_path.exists():
|
|
7
|
+
return return_path.absolute()
|
|
8
|
+
else:
|
|
9
|
+
raise FileNotFoundError(input_path)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def existing_path(input_path: str) -> Path:
|
|
13
|
+
return to_path(input_path, True)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def non_existing_path(input_path: str) -> Path:
|
|
17
|
+
return to_path(input_path, False)
|
|
File without changes
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def main() -> None:
|
|
6
|
+
parser = argparse.ArgumentParser(description="Script with command line options")
|
|
7
|
+
parser.add_argument(
|
|
8
|
+
"--working-dir", help="Working directory", required=True
|
|
9
|
+
) # Make the option mandatory
|
|
10
|
+
parser.add_argument(
|
|
11
|
+
"--wipe-all-gcda", action="store_true", help="Wipe all gcda files recursively"
|
|
12
|
+
)
|
|
13
|
+
parser.add_argument(
|
|
14
|
+
"--wipe-orphaned-gcno",
|
|
15
|
+
action="store_true",
|
|
16
|
+
help="Wipe orphaned gcno files recursively",
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
args = parser.parse_args()
|
|
20
|
+
|
|
21
|
+
# Access the command line options
|
|
22
|
+
working_dir = Path(args.working_dir) # Convert the string to a Path object
|
|
23
|
+
wipe_gcda = bool(args.wipe_all_gcda) # Convert the switch value to boolean
|
|
24
|
+
wipe_gcno = bool(args.wipe_orphaned_gcno) # Convert the switch value to boolean
|
|
25
|
+
|
|
26
|
+
if wipe_gcda:
|
|
27
|
+
wipe_gcda_files(working_dir)
|
|
28
|
+
|
|
29
|
+
if wipe_gcno:
|
|
30
|
+
wipe_gcno_files(working_dir)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def wipe_gcda_files(working_dir: Path) -> None:
|
|
34
|
+
for file in working_dir.glob("**/*.gcda"):
|
|
35
|
+
print(f"Deleting obsolete coverage data file: {file}")
|
|
36
|
+
file.unlink()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def wipe_gcno_files(working_dir: Path) -> None:
|
|
40
|
+
for file in working_dir.glob("**/*.gcno"):
|
|
41
|
+
obj_file = file.with_suffix(".obj")
|
|
42
|
+
if not obj_file.exists():
|
|
43
|
+
print(f"Deleting obsolete coverage notes file: {file}")
|
|
44
|
+
file.unlink()
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
if __name__ == "__main__":
|
|
48
|
+
main()
|
|
File without changes
|
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import json
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
5
|
+
from abc import ABC, abstractmethod
|
|
6
|
+
from contextlib import contextmanager
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from enum import Enum, auto
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any, Generator, List, Optional
|
|
11
|
+
|
|
12
|
+
import kconfiglib
|
|
13
|
+
|
|
14
|
+
from spl_core.common.path import existing_path, non_existing_path
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class GeneratedFile:
|
|
18
|
+
def __init__(self, path: Path, content: str = "", skip_writing_if_unchanged: bool = False) -> None:
|
|
19
|
+
self.path = path
|
|
20
|
+
|
|
21
|
+
self.content = content
|
|
22
|
+
|
|
23
|
+
self.skip_writing_if_unchanged = skip_writing_if_unchanged
|
|
24
|
+
|
|
25
|
+
def to_string(self) -> str:
|
|
26
|
+
return self.content
|
|
27
|
+
|
|
28
|
+
def to_file(self) -> None:
|
|
29
|
+
"""
|
|
30
|
+
Only write to file if the content has changed.
|
|
31
|
+
|
|
32
|
+
The directory of the file is created if it does not exist.
|
|
33
|
+
"""
|
|
34
|
+
content = self.to_string()
|
|
35
|
+
|
|
36
|
+
if not self.path.exists() or not self.skip_writing_if_unchanged or self.path.read_text() != content:
|
|
37
|
+
self.path.parent.mkdir(parents=True, exist_ok=True)
|
|
38
|
+
self.path.write_text(content)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class TriState(Enum):
|
|
42
|
+
Y = auto()
|
|
43
|
+
M = auto()
|
|
44
|
+
N = auto()
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class ConfigElementType(Enum):
|
|
48
|
+
UNKNOWN = auto()
|
|
49
|
+
BOOL = auto()
|
|
50
|
+
TRISTATE = auto()
|
|
51
|
+
STRING = auto()
|
|
52
|
+
INT = auto()
|
|
53
|
+
HEX = auto()
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@dataclass
|
|
57
|
+
class ConfigElement:
|
|
58
|
+
type: ConfigElementType
|
|
59
|
+
name: str
|
|
60
|
+
value: Any
|
|
61
|
+
#: Is determined when the value is calculated. This is a hidden function call due to property magic.
|
|
62
|
+
_write_to_conf: bool = True
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@dataclass
|
|
66
|
+
class ConfigurationData:
|
|
67
|
+
"""
|
|
68
|
+
- holds the variant configuration data
|
|
69
|
+
- requires no variable substitution (this should have been already done)
|
|
70
|
+
"""
|
|
71
|
+
|
|
72
|
+
elements: List[ConfigElement]
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class FileWriter(ABC):
|
|
76
|
+
"""- writes the ConfigurationData to a file"""
|
|
77
|
+
|
|
78
|
+
def __init__(self, output_file: Path):
|
|
79
|
+
self.output_file = output_file
|
|
80
|
+
|
|
81
|
+
def write(self, configuration_data: ConfigurationData) -> None:
|
|
82
|
+
"""
|
|
83
|
+
- writes the ConfigurationData to a file
|
|
84
|
+
The file shall not be modified if the content is the same as the existing one
|
|
85
|
+
"""
|
|
86
|
+
content = self.generate_content(configuration_data)
|
|
87
|
+
GeneratedFile(self.output_file, content, skip_writing_if_unchanged=True).to_file()
|
|
88
|
+
|
|
89
|
+
@abstractmethod
|
|
90
|
+
def generate_content(self, configuration_data: ConfigurationData) -> str:
|
|
91
|
+
"""- generates the content of the file from the ConfigurationData"""
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
class HeaderWriter(FileWriter):
|
|
95
|
+
"""Writes the ConfigurationData as pre-processor defines in a C Header file"""
|
|
96
|
+
|
|
97
|
+
config_prefix = "CONFIG_" # Prefix for all configuration defines
|
|
98
|
+
|
|
99
|
+
def generate_content(self, configuration_data: ConfigurationData) -> str:
|
|
100
|
+
"""
|
|
101
|
+
This method does exactly what the kconfiglib.write_autoconf() method does.
|
|
102
|
+
We had to implemented here because we refactor the file writers to use the ConfigurationData
|
|
103
|
+
instead of the KConfig configuration. ConfigurationData has variable substitution already done.
|
|
104
|
+
"""
|
|
105
|
+
result: List[str] = [
|
|
106
|
+
"/** @file */",
|
|
107
|
+
"#ifndef __autoconf_h__",
|
|
108
|
+
"#define __autoconf_h__",
|
|
109
|
+
"",
|
|
110
|
+
]
|
|
111
|
+
|
|
112
|
+
def add_define(define_decl: str, description: str) -> None:
|
|
113
|
+
result.append(f"/** {description} */")
|
|
114
|
+
result.append(define_decl)
|
|
115
|
+
|
|
116
|
+
for element in configuration_data.elements:
|
|
117
|
+
val = element.value
|
|
118
|
+
if not element._write_to_conf:
|
|
119
|
+
continue
|
|
120
|
+
|
|
121
|
+
if element.type in [ConfigElementType.BOOL, ConfigElementType.TRISTATE]:
|
|
122
|
+
if val == TriState.Y:
|
|
123
|
+
add_define(f"#define {self.config_prefix}{element.name} 1", element.name)
|
|
124
|
+
elif val == TriState.M:
|
|
125
|
+
add_define(
|
|
126
|
+
f"#define {self.config_prefix}{element.name}_MODULE 1",
|
|
127
|
+
element.name,
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
elif element.type is ConfigElementType.STRING:
|
|
131
|
+
add_define(
|
|
132
|
+
f'#define {self.config_prefix}{element.name} "{kconfiglib.escape(val)}"',
|
|
133
|
+
element.name,
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
else: # element.type in [INT, HEX]:
|
|
137
|
+
if element.type is ConfigElementType.HEX:
|
|
138
|
+
val = hex(val)
|
|
139
|
+
add_define(f"#define {self.config_prefix}{element.name} {val}", element.name)
|
|
140
|
+
result.extend(["", "#endif /* __autoconf_h__ */", ""])
|
|
141
|
+
return "\n".join(result)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
class JsonWriter(FileWriter):
|
|
145
|
+
"""Writes the ConfigurationData in json format"""
|
|
146
|
+
|
|
147
|
+
def generate_content(self, configuration_data: ConfigurationData) -> str:
|
|
148
|
+
result = {}
|
|
149
|
+
for element in configuration_data.elements:
|
|
150
|
+
if element.type is ConfigElementType.BOOL:
|
|
151
|
+
result[element.name] = True if element.value == TriState.Y else False
|
|
152
|
+
else:
|
|
153
|
+
result[element.name] = element.value
|
|
154
|
+
return json.dumps({"features": result}, indent=4)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class CMakeWriter(FileWriter):
|
|
158
|
+
"""Writes the ConfigurationData as CMake variables"""
|
|
159
|
+
|
|
160
|
+
def generate_content(self, configuration_data: ConfigurationData) -> str:
|
|
161
|
+
result: List[str] = []
|
|
162
|
+
add = result.append
|
|
163
|
+
for element in configuration_data.elements:
|
|
164
|
+
val = element.value
|
|
165
|
+
if element.type is ConfigElementType.BOOL:
|
|
166
|
+
val = True if element.value == TriState.Y else False
|
|
167
|
+
if not element._write_to_conf:
|
|
168
|
+
continue
|
|
169
|
+
add(f'set({element.name} "{val}")')
|
|
170
|
+
|
|
171
|
+
return "\n".join(result)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
@contextmanager
|
|
175
|
+
def working_directory(some_directory: Path) -> Generator[Any, Any, Any]:
|
|
176
|
+
current_directory = Path().absolute()
|
|
177
|
+
try:
|
|
178
|
+
os.chdir(some_directory)
|
|
179
|
+
yield
|
|
180
|
+
finally:
|
|
181
|
+
os.chdir(current_directory)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class KConfig:
|
|
185
|
+
def __init__(
|
|
186
|
+
self,
|
|
187
|
+
k_config_model_file: Path,
|
|
188
|
+
k_config_file: Optional[Path] = None,
|
|
189
|
+
k_config_root_directory: Optional[Path] = None,
|
|
190
|
+
):
|
|
191
|
+
"""
|
|
192
|
+
:param k_config_model_file: Feature model definition (KConfig format)
|
|
193
|
+
:param k_config_file: User feature selection configuration file
|
|
194
|
+
:param k_config_root_directory: all paths for the included configuration paths shall be relative to this folder
|
|
195
|
+
"""
|
|
196
|
+
if not k_config_model_file.exists():
|
|
197
|
+
raise FileNotFoundError(f"File {k_config_model_file} does not exist.")
|
|
198
|
+
with working_directory(k_config_root_directory or k_config_model_file.parent):
|
|
199
|
+
self._config = kconfiglib.Kconfig(k_config_model_file.absolute().as_posix())
|
|
200
|
+
if k_config_file:
|
|
201
|
+
if not k_config_file.exists():
|
|
202
|
+
raise FileNotFoundError(f"File {k_config_file} does not exist.")
|
|
203
|
+
self._config.load_config(k_config_file, replace=False)
|
|
204
|
+
self.config = self.create_config_data(self._config)
|
|
205
|
+
|
|
206
|
+
def create_config_data(self, config: kconfiglib.Kconfig) -> ConfigurationData:
|
|
207
|
+
"""- creates the ConfigurationData from the KConfig configuration"""
|
|
208
|
+
elements = []
|
|
209
|
+
elements_dict = {}
|
|
210
|
+
|
|
211
|
+
def process_node(node: Any) -> None:
|
|
212
|
+
sym = node.item
|
|
213
|
+
if not isinstance(sym, kconfiglib.Symbol):
|
|
214
|
+
return
|
|
215
|
+
|
|
216
|
+
if sym.config_string:
|
|
217
|
+
val = sym.str_value
|
|
218
|
+
type = ConfigElementType.STRING
|
|
219
|
+
if sym.type in [kconfiglib.BOOL, kconfiglib.TRISTATE]:
|
|
220
|
+
val = getattr(TriState, str(val).upper())
|
|
221
|
+
type = ConfigElementType.BOOL if sym.type == kconfiglib.BOOL else ConfigElementType.TRISTATE
|
|
222
|
+
elif sym.type == kconfiglib.HEX:
|
|
223
|
+
val = int(str(val), 16)
|
|
224
|
+
type = ConfigElementType.HEX
|
|
225
|
+
elif sym.type == kconfiglib.INT:
|
|
226
|
+
val = int(val)
|
|
227
|
+
type = ConfigElementType.INT
|
|
228
|
+
new_element = ConfigElement(type, sym.name, val, sym._write_to_conf)
|
|
229
|
+
elements.append(new_element)
|
|
230
|
+
elements_dict[sym.name] = new_element
|
|
231
|
+
|
|
232
|
+
for n in config.node_iter(False):
|
|
233
|
+
process_node(n)
|
|
234
|
+
|
|
235
|
+
# replace text in KConfig with referenced variables (string type only)
|
|
236
|
+
# KConfig variables get replaced like: ${VARIABLE_NAME}, e.g. ${CONFIG_FOO}
|
|
237
|
+
for element in elements:
|
|
238
|
+
if element.type == ConfigElementType.STRING:
|
|
239
|
+
element.value = re.sub(
|
|
240
|
+
r"\$\{([A-Za-z0-9_]+)\}",
|
|
241
|
+
lambda m: str(elements_dict[m.group(1)].value),
|
|
242
|
+
element.value,
|
|
243
|
+
)
|
|
244
|
+
element.value = re.sub(
|
|
245
|
+
r"\$\{ENV:([A-Za-z0-9_]+)\}",
|
|
246
|
+
lambda m: str(os.environ.get(m.group(1), "")),
|
|
247
|
+
element.value,
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
return ConfigurationData(elements)
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def main() -> None:
|
|
254
|
+
parser = argparse.ArgumentParser(description="KConfig generation")
|
|
255
|
+
parser.add_argument("--kconfig_model_file", required=True, type=existing_path)
|
|
256
|
+
parser.add_argument("--kconfig_config_file", required=False, type=existing_path)
|
|
257
|
+
parser.add_argument("--out_header_file", required=True, type=non_existing_path)
|
|
258
|
+
parser.add_argument("--out_json_file", required=False, type=non_existing_path)
|
|
259
|
+
parser.add_argument("--out_cmake_file", required=False, type=non_existing_path)
|
|
260
|
+
arguments = parser.parse_args()
|
|
261
|
+
config = KConfig(arguments.kconfig_model_file, arguments.kconfig_config_file).config
|
|
262
|
+
|
|
263
|
+
HeaderWriter(arguments.out_header_file).write(config)
|
|
264
|
+
if arguments.out_json_file:
|
|
265
|
+
JsonWriter(arguments.out_json_file).write(config)
|
|
266
|
+
if arguments.out_cmake_file:
|
|
267
|
+
CMakeWriter(arguments.out_cmake_file).write(config)
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
if __name__ == "__main__":
|
|
271
|
+
main()
|
|
File without changes
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import logging
|
|
3
|
+
import shutil
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, Dict, List
|
|
6
|
+
|
|
7
|
+
from cookiecutter.main import cookiecutter
|
|
8
|
+
|
|
9
|
+
from spl_core.common.path import existing_path
|
|
10
|
+
from spl_core.project_creator.variant import Variant
|
|
11
|
+
from spl_core.project_creator.workspace_artifacts import WorkspaceArtifacts
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Creator:
|
|
15
|
+
def __init__(self, project_name: str, out_dir: Path):
|
|
16
|
+
self.logger = logging.getLogger(__name__)
|
|
17
|
+
self.project_name = project_name
|
|
18
|
+
self.out_dir = out_dir.absolute()
|
|
19
|
+
self.project_root_dir = self.out_dir.joinpath(self.project_name)
|
|
20
|
+
self.project_artifacts = WorkspaceArtifacts(self.project_root_dir)
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
def from_folder(cls, project_dir: Path) -> "Creator":
|
|
24
|
+
return cls(project_dir.name, project_dir.parent)
|
|
25
|
+
|
|
26
|
+
@staticmethod
|
|
27
|
+
def create_project_description(name: str, variants: List[Variant], touch_only_variants_data: bool = False) -> Dict[str, Any]:
|
|
28
|
+
project_description = {"name": name, "variants": {}, "touch_components": not touch_only_variants_data, "touch_tools": not touch_only_variants_data}
|
|
29
|
+
variants.sort()
|
|
30
|
+
for index, variant in enumerate(variants):
|
|
31
|
+
project_description["variants"] = {str(index): vars(variant) for index, variant in enumerate(variants)}
|
|
32
|
+
return project_description
|
|
33
|
+
|
|
34
|
+
@property
|
|
35
|
+
def project_template_path(self) -> Path:
|
|
36
|
+
return Path(__file__).parent.joinpath("templates/project")
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def variant_template_path(self) -> Path:
|
|
40
|
+
return Path(__file__).parent.joinpath("templates/variant")
|
|
41
|
+
|
|
42
|
+
def materialize(self, variants: List[Variant]) -> Path:
|
|
43
|
+
result_path = self.materialize_workspace(variants)
|
|
44
|
+
self.materialize_variants(variants)
|
|
45
|
+
self.logger.info(f"Project created under: {result_path}")
|
|
46
|
+
return Path(result_path)
|
|
47
|
+
|
|
48
|
+
def materialize_workspace(self, variants: List[Variant], touch_only_variants_data: bool = False) -> Path:
|
|
49
|
+
project_description = self.create_project_description(self.project_name, variants, touch_only_variants_data)
|
|
50
|
+
result_path = cookiecutter(str(self.project_template_path), output_dir=f"{self.out_dir}", no_input=True, extra_context=project_description, overwrite_if_exists=True)
|
|
51
|
+
return result_path
|
|
52
|
+
|
|
53
|
+
def materialize_variants(self, variants: List[Variant]) -> None:
|
|
54
|
+
for variant in variants:
|
|
55
|
+
self.materialize_variant(vars(variant), self.project_artifacts.variants_dir)
|
|
56
|
+
|
|
57
|
+
def materialize_variant(self, variant_description: Dict[str, Any], out_dir: Path) -> Path:
|
|
58
|
+
result_path = cookiecutter(str(self.variant_template_path), output_dir=f"{out_dir}", no_input=True, extra_context=variant_description, overwrite_if_exists=True)
|
|
59
|
+
self.logger.info(f"Variant created under: {result_path}")
|
|
60
|
+
return Path(result_path)
|
|
61
|
+
|
|
62
|
+
def add_variants(self, variants: List[Variant]) -> None:
|
|
63
|
+
existing_variants = self.collect_project_variants()
|
|
64
|
+
new_variants = [variant for variant in variants if variant not in existing_variants]
|
|
65
|
+
if len(new_variants):
|
|
66
|
+
self.materialize_workspace(new_variants + existing_variants, touch_only_variants_data=True)
|
|
67
|
+
self.materialize_variants(variants)
|
|
68
|
+
if len(new_variants) != len(variants):
|
|
69
|
+
self.logger.warning(f"Skip adding variants" f" ({', '.join([str(v) for v in variants if v not in new_variants])})" f" because they already exist in {self.project_root_dir}.")
|
|
70
|
+
else:
|
|
71
|
+
self.logger.warning(f"Nothing to add. All the provided variants" f" ({', '.join([str(v) for v in variants])}) already exist in {self.project_root_dir}.")
|
|
72
|
+
|
|
73
|
+
def delete_variants(self, variants: List[Variant]) -> None:
|
|
74
|
+
existing_variants = self.collect_project_variants()
|
|
75
|
+
variants_to_be_deleted = [variant for variant in variants if variant in existing_variants]
|
|
76
|
+
if len(variants_to_be_deleted):
|
|
77
|
+
remaining_variants = list(set(existing_variants) - set(variants_to_be_deleted))
|
|
78
|
+
self.materialize_workspace(remaining_variants, touch_only_variants_data=True)
|
|
79
|
+
for variant in variants_to_be_deleted:
|
|
80
|
+
self.delete_variant_dir(variant)
|
|
81
|
+
if len(variants_to_be_deleted) != len(variants):
|
|
82
|
+
self.logger.warning(f"Skip deleting variants" f" ({', '.join([str(v) for v in variants if v not in variants_to_be_deleted])})" f" because they do not exist in {self.project_root_dir}.")
|
|
83
|
+
else:
|
|
84
|
+
self.logger.warning(f"Nothing to delete. None of the provided variants" f" ({', '.join([str(v) for v in variants])}) exists in {self.project_root_dir}.")
|
|
85
|
+
|
|
86
|
+
def delete_variant_dir(self, variant: Variant) -> None:
|
|
87
|
+
variant_dir = self.project_artifacts.variants_dir.joinpath(f"{variant}")
|
|
88
|
+
if variant_dir.exists():
|
|
89
|
+
shutil.rmtree(variant_dir)
|
|
90
|
+
|
|
91
|
+
def collect_project_variants(self) -> List[Variant]:
|
|
92
|
+
variants = []
|
|
93
|
+
variant_dirs = self.project_artifacts.variants_dir.glob("*/*/")
|
|
94
|
+
for variant_dir in variant_dirs:
|
|
95
|
+
variants.append(Variant(variant_dir.parent.name, variant_dir.name))
|
|
96
|
+
return variants
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def main(command_arguments: Any = None) -> None:
|
|
100
|
+
arguments = parse_arguments(command_arguments)
|
|
101
|
+
if arguments.command_scope == "workspace":
|
|
102
|
+
Creator(arguments.name, arguments.out_dir).materialize(arguments.variant)
|
|
103
|
+
else: # scope is variant
|
|
104
|
+
creator = Creator.from_folder(arguments.workspace_dir)
|
|
105
|
+
if arguments.add:
|
|
106
|
+
creator.add_variants(arguments.add)
|
|
107
|
+
else:
|
|
108
|
+
creator.delete_variants(arguments.delete)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def parse_arguments(command_arguments: Any = None) -> argparse.Namespace:
|
|
112
|
+
parser = argparse.ArgumentParser(description="Workspace creator")
|
|
113
|
+
subparsers = parser.add_subparsers(dest="command_scope")
|
|
114
|
+
|
|
115
|
+
parser_workspace = subparsers.add_parser("workspace", help="Create a workspace")
|
|
116
|
+
parser_workspace.add_argument("--name", required=True, type=str, help="Workspace name. A directory with this name will be created in the <out_dir>.")
|
|
117
|
+
parser_workspace.add_argument("--variant", required=True, action="append", type=Variant.from_string, help="Variant name as <flavor>/<subsystem>. E.g. FLV1/SYS1. " "This option can be used multiple times.")
|
|
118
|
+
parser_workspace.add_argument("--out_dir", required=True, type=existing_path, help="Target directory where the workspace folder will be created.")
|
|
119
|
+
|
|
120
|
+
parser_variant = subparsers.add_parser("variant", help="Add/delete variant to existing workspace")
|
|
121
|
+
command_group = parser_variant.add_mutually_exclusive_group(required=True)
|
|
122
|
+
command_group.add_argument(
|
|
123
|
+
"--add", action="append", type=Variant.from_string, metavar="VARIANT", help="Add a variant to a workspace. Variant name as <flavor>/<subsystem>." " E.g. FLV1/SYS1. This option can be used multiple times."
|
|
124
|
+
)
|
|
125
|
+
command_group.add_argument(
|
|
126
|
+
"--delete", action="append", type=Variant.from_string, metavar="VARIANT", help="Delete a variant from a workspace. Variant name as <flavor>/<subsystem>." " E.g. FLV1/SYS1. This option can be used multiple times."
|
|
127
|
+
)
|
|
128
|
+
parser_variant.add_argument("--workspace_dir", required=True, type=existing_path, help="Workspace root directory.")
|
|
129
|
+
return parser.parse_args(command_arguments)
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
if __name__ == "__main__":
|
|
133
|
+
main()
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# Binary output dir, not recommended to push binary results to Git.
|
|
2
|
+
/build
|
|
3
|
+
|
|
4
|
+
# Output directory of test results
|
|
5
|
+
/test/output
|
|
6
|
+
|
|
7
|
+
# What: Python: byte-compiled / optimized / DLL files
|
|
8
|
+
# Why: automatically created by Python during script execution
|
|
9
|
+
__pycache__/
|
|
10
|
+
/.pytest_cache
|
|
11
|
+
*.py[cod]
|
|
12
|
+
*$py.class
|
|
13
|
+
|
|
14
|
+
# Any log directory
|
|
15
|
+
**/[Ll][Oo][Gg]/*
|
|
16
|
+
|
|
17
|
+
# Any backup file of text editors
|
|
18
|
+
*.bak
|
|
19
|
+
|
|
20
|
+
# Any log file
|
|
21
|
+
*.log
|
|
22
|
+
|
|
23
|
+
# Backup files of KConfig
|
|
24
|
+
*.old
|
|
25
|
+
|
|
26
|
+
# Generated IDEA project files
|
|
27
|
+
.idea
|
|
28
|
+
|
|
29
|
+
# bootstrap files are downloaded during installation
|
|
30
|
+
/.bootstrap
|
|
31
|
+
|
|
32
|
+
# Drawio Backup Files
|
|
33
|
+
*.drawio.bkp
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
[
|
|
2
|
+
{
|
|
3
|
+
"name": "prod",
|
|
4
|
+
"environmentSetupScript": "${workspaceFolder}/.venv/Scripts/activate"
|
|
5
|
+
},
|
|
6
|
+
{
|
|
7
|
+
"name": "test",
|
|
8
|
+
"toolchainFile": "tools/toolchains/gcc/toolchain.cmake",
|
|
9
|
+
"environmentSetupScript": "${workspaceFolder}/.venv/Scripts/activate"
|
|
10
|
+
}
|
|
11
|
+
]
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
{
|
|
2
|
+
"variant": {
|
|
3
|
+
"choices": {
|
|
4
|
+
{%- for variant in cookiecutter.variants.values() -%}
|
|
5
|
+
"{{ variant["flavor"] }}/{{ variant["subsystem"] }}": {
|
|
6
|
+
"buildType": "{{ variant["flavor"] }}_{{ variant["subsystem"] }}",
|
|
7
|
+
"long": "select to build variant '{{ variant["flavor"] }}/{{ variant["subsystem"] }}'",
|
|
8
|
+
"settings": {
|
|
9
|
+
"FLAVOR": "{{ variant["flavor"] }}",
|
|
10
|
+
"SUBSYSTEM": "{{ variant["subsystem"] }}"
|
|
11
|
+
},
|
|
12
|
+
"short": "{{ variant["flavor"] }}/{{ variant["subsystem"] }}"
|
|
13
|
+
}{{ ", " if not loop.last else "" }}
|
|
14
|
+
{%- endfor -%}
|
|
15
|
+
},
|
|
16
|
+
"default": "{{ cookiecutter.variants["0"]["flavor"] }}/{{ cookiecutter.variants["0"]["subsystem"] }}"
|
|
17
|
+
}
|
|
18
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
{
|
|
2
|
+
"recommendations": [
|
|
3
|
+
"mhutchie.git-graph",
|
|
4
|
+
"ms-vscode.cmake-tools",
|
|
5
|
+
"twxs.cmake",
|
|
6
|
+
"ms-vscode.cpptools",
|
|
7
|
+
"ms-vscode.cpptools-themes",
|
|
8
|
+
"visualstudioexptteam.intellicode-api-usage-examples",
|
|
9
|
+
"visualstudioexptteam.vscodeintellicode",
|
|
10
|
+
"haugerbr.project-tasks",
|
|
11
|
+
"avengineers.show-link-list",
|
|
12
|
+
"ms-python.vscode-pylance",
|
|
13
|
+
"ms-python.python",
|
|
14
|
+
"donjayamanne.python-environment-manager",
|
|
15
|
+
"felipecaputo.git-project-manager",
|
|
16
|
+
"ms-vscode.powershell"
|
|
17
|
+
]
|
|
18
|
+
}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": "0.2.0",
|
|
3
|
+
"configurations": [
|
|
4
|
+
{
|
|
5
|
+
"name": "(gdb) Launch",
|
|
6
|
+
"type": "cppdbg",
|
|
7
|
+
"request": "launch",
|
|
8
|
+
"program": "${command:cmake.launchTargetPath}",
|
|
9
|
+
"args": [],
|
|
10
|
+
"stopAtEntry": true,
|
|
11
|
+
"cwd": "${command:cmake.launchTargetDirectory}",
|
|
12
|
+
"environment": [],
|
|
13
|
+
"externalConsole": false,
|
|
14
|
+
"MIMode": "gdb",
|
|
15
|
+
"miDebuggerPath": "gdb",
|
|
16
|
+
"setupCommands": [
|
|
17
|
+
{
|
|
18
|
+
"description": "Enable pretty-printing for gdb",
|
|
19
|
+
"text": "-enable-pretty-printing",
|
|
20
|
+
"ignoreFailures": true
|
|
21
|
+
}
|
|
22
|
+
]
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
"name": "Python: Debug Tests",
|
|
26
|
+
"type": "python",
|
|
27
|
+
"request": "launch",
|
|
28
|
+
"program": "${file}",
|
|
29
|
+
"purpose": [
|
|
30
|
+
"debug-test"
|
|
31
|
+
],
|
|
32
|
+
"console": "integratedTerminal",
|
|
33
|
+
"env": {},
|
|
34
|
+
"justMyCode": false
|
|
35
|
+
}
|
|
36
|
+
]
|
|
37
|
+
}
|