powertrain-build 1.13.1__py3-none-any.whl → 1.13.3.dev3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- powertrain_build/__init__.py +40 -40
- powertrain_build/__main__.py +6 -6
- powertrain_build/a2l.py +582 -582
- powertrain_build/a2l_merge.py +650 -650
- powertrain_build/a2l_templates.py +717 -717
- powertrain_build/build.py +985 -985
- powertrain_build/build_defs.py +309 -309
- powertrain_build/build_proj_config.py +690 -690
- powertrain_build/check_interface.py +575 -575
- powertrain_build/cli.py +141 -141
- powertrain_build/config.py +542 -542
- powertrain_build/core.py +395 -395
- powertrain_build/core_dummy.py +343 -343
- powertrain_build/create_conversion_table.py +73 -73
- powertrain_build/dids.py +916 -916
- powertrain_build/dummy.py +157 -157
- powertrain_build/dummy_spm.py +252 -252
- powertrain_build/environmentcheck.py +52 -52
- powertrain_build/ext_dbg.py +255 -255
- powertrain_build/ext_var.py +327 -327
- powertrain_build/feature_configs.py +301 -301
- powertrain_build/gen_allsysteminfo.py +227 -227
- powertrain_build/gen_label_split.py +449 -449
- powertrain_build/handcode_replacer.py +124 -124
- powertrain_build/html_report.py +133 -133
- powertrain_build/interface/__init__.py +4 -4
- powertrain_build/interface/application.py +511 -511
- powertrain_build/interface/base.py +500 -500
- powertrain_build/interface/csp_api.py +490 -490
- powertrain_build/interface/device_proxy.py +677 -677
- powertrain_build/interface/ems.py +67 -67
- powertrain_build/interface/export_global_vars.py +121 -121
- powertrain_build/interface/generate_adapters.py +132 -132
- powertrain_build/interface/generate_hi_interface.py +87 -87
- powertrain_build/interface/generate_service.py +69 -69
- powertrain_build/interface/generate_wrappers.py +147 -147
- powertrain_build/interface/generation_utils.py +142 -142
- powertrain_build/interface/hal.py +194 -194
- powertrain_build/interface/model_yaml_verification.py +348 -348
- powertrain_build/interface/service.py +296 -296
- powertrain_build/interface/simulink.py +249 -249
- powertrain_build/interface/update_call_sources.py +180 -180
- powertrain_build/interface/update_model_yaml.py +186 -186
- powertrain_build/interface/zone_controller.py +362 -362
- powertrain_build/lib/__init__.py +4 -4
- powertrain_build/lib/helper_functions.py +127 -127
- powertrain_build/lib/logger.py +55 -55
- powertrain_build/matlab_scripts/CodeGen/BuildAutomationPyBuild.m +78 -78
- powertrain_build/matlab_scripts/CodeGen/Generate_A2L.m +154 -154
- powertrain_build/matlab_scripts/CodeGen/generateTLUnit.m +239 -239
- powertrain_build/matlab_scripts/CodeGen/getAsilClassification.m +28 -28
- powertrain_build/matlab_scripts/CodeGen/modelConfiguredForTL.m +28 -28
- powertrain_build/matlab_scripts/CodeGen/moveDefOutports.m +88 -88
- powertrain_build/matlab_scripts/CodeGen/parseCalMeasData.m +410 -410
- powertrain_build/matlab_scripts/CodeGen/parseCoreIdentifiers.m +139 -139
- powertrain_build/matlab_scripts/CodeGen/parseDIDs.m +141 -141
- powertrain_build/matlab_scripts/CodeGen/parseInPorts.m +106 -106
- powertrain_build/matlab_scripts/CodeGen/parseIncludeConfigs.m +25 -25
- powertrain_build/matlab_scripts/CodeGen/parseModelInfo.m +38 -38
- powertrain_build/matlab_scripts/CodeGen/parseNVM.m +81 -81
- powertrain_build/matlab_scripts/CodeGen/parseOutPorts.m +120 -120
- powertrain_build/matlab_scripts/CodeGen/parsePreProcBlks.m +23 -23
- powertrain_build/matlab_scripts/CodeGen/struct2JSON.m +128 -128
- powertrain_build/matlab_scripts/CodeGen/updateCodeSwConfig.m +31 -31
- powertrain_build/matlab_scripts/Init_PyBuild.m +91 -91
- powertrain_build/matlab_scripts/__init__.py +2 -2
- powertrain_build/matlab_scripts/helperFunctions/Get_Full_Name.m +46 -46
- powertrain_build/matlab_scripts/helperFunctions/Get_SrcLines.m +12 -12
- powertrain_build/matlab_scripts/helperFunctions/Init_Models.m +78 -78
- powertrain_build/matlab_scripts/helperFunctions/Init_Projects.m +67 -67
- powertrain_build/matlab_scripts/helperFunctions/Read_Units.m +34 -34
- powertrain_build/matlab_scripts/helperFunctions/SetProjectTimeSamples.m +26 -26
- powertrain_build/matlab_scripts/helperFunctions/Strip_Suffix.m +16 -16
- powertrain_build/matlab_scripts/helperFunctions/followLink.m +118 -118
- powertrain_build/matlab_scripts/helperFunctions/getCodeSwitches.m +50 -50
- powertrain_build/matlab_scripts/helperFunctions/getConsumerBlocks.m +30 -30
- powertrain_build/matlab_scripts/helperFunctions/getDefBlock.m +39 -39
- powertrain_build/matlab_scripts/helperFunctions/getDefOutport.m +58 -58
- powertrain_build/matlab_scripts/helperFunctions/getDstBlocks.m +19 -19
- powertrain_build/matlab_scripts/helperFunctions/getDstLines.m +13 -13
- powertrain_build/matlab_scripts/helperFunctions/getInterfaceSignals.m +37 -37
- powertrain_build/matlab_scripts/helperFunctions/getName.m +37 -37
- powertrain_build/matlab_scripts/helperFunctions/getPath.m +6 -6
- powertrain_build/matlab_scripts/helperFunctions/getProperValue.m +21 -21
- powertrain_build/matlab_scripts/helperFunctions/getSrcBlocks.m +19 -19
- powertrain_build/matlab_scripts/helperFunctions/getSrcLines.m +13 -13
- powertrain_build/matlab_scripts/helperFunctions/loadLibraries.m +10 -10
- powertrain_build/matlab_scripts/helperFunctions/loadjson.m +6 -6
- powertrain_build/matlab_scripts/helperFunctions/modifyEnumStructField.m +21 -21
- powertrain_build/matlab_scripts/helperFunctions/removeConfigDuplicates.m +31 -31
- powertrain_build/matlab_scripts/helperFunctions/sortSystemByClass.m +26 -26
- powertrain_build/matlab_scripts/helperFunctions/tl_getfast.m +89 -89
- powertrain_build/matlab_scripts/helperFunctions/topLevelSystem.m +20 -20
- powertrain_build/matlab_scripts/helperFunctions/updateModels.m +131 -131
- powertrain_build/memory_section.py +224 -224
- powertrain_build/nvm_def.py +729 -729
- powertrain_build/problem_logger.py +86 -86
- powertrain_build/pt_matlab.py +430 -430
- powertrain_build/pt_win32.py +144 -144
- powertrain_build/replace_compu_tab_ref.py +105 -105
- powertrain_build/rte_dummy.py +254 -254
- powertrain_build/sched_funcs.py +209 -207
- powertrain_build/signal.py +7 -7
- powertrain_build/signal_if_html_rep.py +221 -221
- powertrain_build/signal_if_html_rep_all.py +302 -302
- powertrain_build/signal_incons_html_rep.py +180 -180
- powertrain_build/signal_incons_html_rep_all.py +366 -366
- powertrain_build/signal_incons_html_rep_base.py +168 -168
- powertrain_build/signal_inconsistency_check.py +641 -641
- powertrain_build/signal_interfaces.py +864 -864
- powertrain_build/templates/Index_SigCheck_All.html +22 -22
- powertrain_build/templates/Index_SigIf_All.html +19 -19
- powertrain_build/types.py +218 -218
- powertrain_build/unit_configs.py +419 -419
- powertrain_build/user_defined_types.py +660 -660
- powertrain_build/versioncheck.py +66 -66
- powertrain_build/wrapper.py +512 -512
- powertrain_build/xlrd_csv.py +87 -87
- powertrain_build/zone_controller/__init__.py +4 -4
- powertrain_build/zone_controller/calibration.py +176 -176
- powertrain_build/zone_controller/composition_yaml.py +880 -878
- {powertrain_build-1.13.1.dist-info → powertrain_build-1.13.3.dev3.dist-info}/METADATA +100 -100
- powertrain_build-1.13.3.dev3.dist-info/RECORD +130 -0
- {powertrain_build-1.13.1.dist-info → powertrain_build-1.13.3.dev3.dist-info}/WHEEL +1 -1
- {powertrain_build-1.13.1.dist-info → powertrain_build-1.13.3.dev3.dist-info}/licenses/LICENSE +202 -202
- powertrain_build-1.13.3.dev3.dist-info/pbr.json +1 -0
- powertrain_build-1.13.1.dist-info/RECORD +0 -130
- powertrain_build-1.13.1.dist-info/pbr.json +0 -1
- {powertrain_build-1.13.1.dist-info → powertrain_build-1.13.3.dev3.dist-info}/entry_points.txt +0 -0
- {powertrain_build-1.13.1.dist-info → powertrain_build-1.13.3.dev3.dist-info}/licenses/AUTHORS +0 -0
- {powertrain_build-1.13.1.dist-info → powertrain_build-1.13.3.dev3.dist-info}/licenses/NOTICE +0 -0
- {powertrain_build-1.13.1.dist-info → powertrain_build-1.13.3.dev3.dist-info}/top_level.txt +0 -0
powertrain_build/lib/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# Copyright 2024 Volvo Car Corporation
|
|
2
|
-
# Licensed under Apache 2.0.
|
|
3
|
-
|
|
4
|
-
"""powertrain_build.lib."""
|
|
1
|
+
# Copyright 2024 Volvo Car Corporation
|
|
2
|
+
# Licensed under Apache 2.0.
|
|
3
|
+
|
|
4
|
+
"""powertrain_build.lib."""
|
|
@@ -1,127 +1,127 @@
|
|
|
1
|
-
# Copyright 2024 Volvo Car Corporation
|
|
2
|
-
# Licensed under Apache 2.0.
|
|
3
|
-
|
|
4
|
-
"""Module for various helper functions."""
|
|
5
|
-
import json
|
|
6
|
-
import collections
|
|
7
|
-
from pathlib import Path
|
|
8
|
-
from subprocess import getoutput
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def get_repo_root():
|
|
12
|
-
""" Return absolute path to repository where script is executed, regardless
|
|
13
|
-
of the current script's location.
|
|
14
|
-
|
|
15
|
-
Returns:
|
|
16
|
-
path (str): Absolute, canonical path to the top-level repository.
|
|
17
|
-
if not a git repository, returns current working dir
|
|
18
|
-
|
|
19
|
-
"""
|
|
20
|
-
try:
|
|
21
|
-
root = Path(getoutput('git rev-parse --show-toplevel')).resolve()
|
|
22
|
-
except (FileNotFoundError, OSError):
|
|
23
|
-
root = Path.cwd().resolve()
|
|
24
|
-
return str(root)
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
def create_dir(path: Path):
|
|
28
|
-
"""If the directory for a given directory path does not exist, create it.
|
|
29
|
-
Including parent directories.
|
|
30
|
-
|
|
31
|
-
Args:
|
|
32
|
-
path (Path): Path to directory.
|
|
33
|
-
Returns:
|
|
34
|
-
path (Path): Path to directory.
|
|
35
|
-
"""
|
|
36
|
-
if not path.is_dir():
|
|
37
|
-
path.mkdir(parents=True)
|
|
38
|
-
return path
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
def merge_dicts(dict1, dict2, handle_collision=lambda a, b: a, merge_recursively=False):
|
|
42
|
-
"""Merge two dicts.
|
|
43
|
-
|
|
44
|
-
Args:
|
|
45
|
-
dict1 (dict): dict to merge
|
|
46
|
-
dict2 (dict): dict to merge
|
|
47
|
-
handle_collision (function(arg1, arg2)): function to resolve key collisions,
|
|
48
|
-
default keeps original value in dict1
|
|
49
|
-
merge_recursively (bool): if set to True it merges nested dicts recursively with handle_collision
|
|
50
|
-
resolving collisions with non-dict types.
|
|
51
|
-
Returns:
|
|
52
|
-
dict: the result of the two merged dicts
|
|
53
|
-
"""
|
|
54
|
-
result = dict1.copy()
|
|
55
|
-
for key, value in dict2.items():
|
|
56
|
-
if key not in result:
|
|
57
|
-
result.update({key: value})
|
|
58
|
-
elif isinstance(result[key], dict) and \
|
|
59
|
-
isinstance(value, dict) and \
|
|
60
|
-
merge_recursively:
|
|
61
|
-
result[key] = merge_dicts(result[key], value, handle_collision, merge_recursively)
|
|
62
|
-
else:
|
|
63
|
-
result[key] = handle_collision(result[key], value)
|
|
64
|
-
return result
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
def deep_dict_update(base, add):
|
|
68
|
-
"""Recursively update a dict that may contain sub-dicts.
|
|
69
|
-
|
|
70
|
-
Args:
|
|
71
|
-
base (dict): The base dict will be updated with the contents
|
|
72
|
-
of the add dict
|
|
73
|
-
add (dict): This dict will be added to the base dict
|
|
74
|
-
|
|
75
|
-
Returns:
|
|
76
|
-
dict: the updated base dict is returned
|
|
77
|
-
|
|
78
|
-
"""
|
|
79
|
-
for key, value in add.items():
|
|
80
|
-
if key not in base:
|
|
81
|
-
base[key] = value
|
|
82
|
-
elif isinstance(value, dict):
|
|
83
|
-
deep_dict_update(base[key], value)
|
|
84
|
-
return base
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
def deep_json_update(json_file, dict_to_merge):
|
|
88
|
-
""" Recursively update a json file with the content of a dict.
|
|
89
|
-
|
|
90
|
-
Args:
|
|
91
|
-
json_file (path): json file.
|
|
92
|
-
dict_to_merge (dict): Dictionary that will be merged into json file, overwriting values and adding keys.
|
|
93
|
-
"""
|
|
94
|
-
with open(json_file, 'r', encoding="utf-8") as fle:
|
|
95
|
-
json_dict = json.load(fle)
|
|
96
|
-
merged_dict = merge_dicts(
|
|
97
|
-
json_dict,
|
|
98
|
-
dict_to_merge,
|
|
99
|
-
handle_collision=lambda a, b: b,
|
|
100
|
-
merge_recursively=True
|
|
101
|
-
)
|
|
102
|
-
with open(json_file, 'w', encoding="utf-8") as fle:
|
|
103
|
-
json.dump(merged_dict, fle, indent=2)
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
def recursive_default_dict():
|
|
107
|
-
"""Returns recursively defined default dict. This allows people to insert
|
|
108
|
-
arbitrarily complex nested data into the dict without getting KeyErrors.
|
|
109
|
-
|
|
110
|
-
Returns:
|
|
111
|
-
defaultdict(self): A new defaultdict instance, recursively defined.
|
|
112
|
-
"""
|
|
113
|
-
return collections.defaultdict(recursive_default_dict)
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
def to_normal_dict(weird_dict):
|
|
117
|
-
"""Converts nested dict to normal, suitable for YAML/JSON dumping.
|
|
118
|
-
|
|
119
|
-
Args:
|
|
120
|
-
weird_dict (dict): Any dict-like item that can be converted to a dict.
|
|
121
|
-
Returns:
|
|
122
|
-
dict(nested): An identical nested dict structure, but using real dicts.
|
|
123
|
-
"""
|
|
124
|
-
for key, value in weird_dict.items():
|
|
125
|
-
if isinstance(value, dict):
|
|
126
|
-
weird_dict[key] = to_normal_dict(value)
|
|
127
|
-
return dict(weird_dict)
|
|
1
|
+
# Copyright 2024 Volvo Car Corporation
|
|
2
|
+
# Licensed under Apache 2.0.
|
|
3
|
+
|
|
4
|
+
"""Module for various helper functions."""
|
|
5
|
+
import json
|
|
6
|
+
import collections
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from subprocess import getoutput
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_repo_root():
|
|
12
|
+
""" Return absolute path to repository where script is executed, regardless
|
|
13
|
+
of the current script's location.
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
path (str): Absolute, canonical path to the top-level repository.
|
|
17
|
+
if not a git repository, returns current working dir
|
|
18
|
+
|
|
19
|
+
"""
|
|
20
|
+
try:
|
|
21
|
+
root = Path(getoutput('git rev-parse --show-toplevel')).resolve()
|
|
22
|
+
except (FileNotFoundError, OSError):
|
|
23
|
+
root = Path.cwd().resolve()
|
|
24
|
+
return str(root)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def create_dir(path: Path):
|
|
28
|
+
"""If the directory for a given directory path does not exist, create it.
|
|
29
|
+
Including parent directories.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
path (Path): Path to directory.
|
|
33
|
+
Returns:
|
|
34
|
+
path (Path): Path to directory.
|
|
35
|
+
"""
|
|
36
|
+
if not path.is_dir():
|
|
37
|
+
path.mkdir(parents=True)
|
|
38
|
+
return path
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def merge_dicts(dict1, dict2, handle_collision=lambda a, b: a, merge_recursively=False):
|
|
42
|
+
"""Merge two dicts.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
dict1 (dict): dict to merge
|
|
46
|
+
dict2 (dict): dict to merge
|
|
47
|
+
handle_collision (function(arg1, arg2)): function to resolve key collisions,
|
|
48
|
+
default keeps original value in dict1
|
|
49
|
+
merge_recursively (bool): if set to True it merges nested dicts recursively with handle_collision
|
|
50
|
+
resolving collisions with non-dict types.
|
|
51
|
+
Returns:
|
|
52
|
+
dict: the result of the two merged dicts
|
|
53
|
+
"""
|
|
54
|
+
result = dict1.copy()
|
|
55
|
+
for key, value in dict2.items():
|
|
56
|
+
if key not in result:
|
|
57
|
+
result.update({key: value})
|
|
58
|
+
elif isinstance(result[key], dict) and \
|
|
59
|
+
isinstance(value, dict) and \
|
|
60
|
+
merge_recursively:
|
|
61
|
+
result[key] = merge_dicts(result[key], value, handle_collision, merge_recursively)
|
|
62
|
+
else:
|
|
63
|
+
result[key] = handle_collision(result[key], value)
|
|
64
|
+
return result
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def deep_dict_update(base, add):
|
|
68
|
+
"""Recursively update a dict that may contain sub-dicts.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
base (dict): The base dict will be updated with the contents
|
|
72
|
+
of the add dict
|
|
73
|
+
add (dict): This dict will be added to the base dict
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
dict: the updated base dict is returned
|
|
77
|
+
|
|
78
|
+
"""
|
|
79
|
+
for key, value in add.items():
|
|
80
|
+
if key not in base:
|
|
81
|
+
base[key] = value
|
|
82
|
+
elif isinstance(value, dict):
|
|
83
|
+
deep_dict_update(base[key], value)
|
|
84
|
+
return base
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def deep_json_update(json_file, dict_to_merge):
|
|
88
|
+
""" Recursively update a json file with the content of a dict.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
json_file (path): json file.
|
|
92
|
+
dict_to_merge (dict): Dictionary that will be merged into json file, overwriting values and adding keys.
|
|
93
|
+
"""
|
|
94
|
+
with open(json_file, 'r', encoding="utf-8") as fle:
|
|
95
|
+
json_dict = json.load(fle)
|
|
96
|
+
merged_dict = merge_dicts(
|
|
97
|
+
json_dict,
|
|
98
|
+
dict_to_merge,
|
|
99
|
+
handle_collision=lambda a, b: b,
|
|
100
|
+
merge_recursively=True
|
|
101
|
+
)
|
|
102
|
+
with open(json_file, 'w', encoding="utf-8") as fle:
|
|
103
|
+
json.dump(merged_dict, fle, indent=2)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def recursive_default_dict():
|
|
107
|
+
"""Returns recursively defined default dict. This allows people to insert
|
|
108
|
+
arbitrarily complex nested data into the dict without getting KeyErrors.
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
defaultdict(self): A new defaultdict instance, recursively defined.
|
|
112
|
+
"""
|
|
113
|
+
return collections.defaultdict(recursive_default_dict)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def to_normal_dict(weird_dict):
|
|
117
|
+
"""Converts nested dict to normal, suitable for YAML/JSON dumping.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
weird_dict (dict): Any dict-like item that can be converted to a dict.
|
|
121
|
+
Returns:
|
|
122
|
+
dict(nested): An identical nested dict structure, but using real dicts.
|
|
123
|
+
"""
|
|
124
|
+
for key, value in weird_dict.items():
|
|
125
|
+
if isinstance(value, dict):
|
|
126
|
+
weird_dict[key] = to_normal_dict(value)
|
|
127
|
+
return dict(weird_dict)
|
powertrain_build/lib/logger.py
CHANGED
|
@@ -1,55 +1,55 @@
|
|
|
1
|
-
# Copyright 2024 Volvo Car Corporation
|
|
2
|
-
# Licensed under Apache 2.0.
|
|
3
|
-
|
|
4
|
-
"""Module for logging."""
|
|
5
|
-
import os
|
|
6
|
-
import sys
|
|
7
|
-
import logging
|
|
8
|
-
|
|
9
|
-
LEVEL_NOTSET = 'NOTSET'
|
|
10
|
-
LEVEL_DEBUG = 'DEBUG'
|
|
11
|
-
LEVEL_INFO = 'INFO'
|
|
12
|
-
LEVEL_WARNING = 'WARNING'
|
|
13
|
-
LEVEL_ERROR = 'ERROR'
|
|
14
|
-
LEVEL_CRITICAL = 'CRITICAL'
|
|
15
|
-
|
|
16
|
-
LEVELS = {
|
|
17
|
-
LEVEL_NOTSET: logging.NOTSET,
|
|
18
|
-
LEVEL_DEBUG: logging.DEBUG,
|
|
19
|
-
LEVEL_INFO: logging.INFO,
|
|
20
|
-
LEVEL_WARNING: logging.WARNING,
|
|
21
|
-
LEVEL_ERROR: logging.ERROR,
|
|
22
|
-
LEVEL_CRITICAL: logging.CRITICAL
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
def parse_log_level(log_level_name):
|
|
27
|
-
"""Convert textual log_level_name to numerical ones defined in logging module."""
|
|
28
|
-
level = log_level_name.upper()
|
|
29
|
-
if level not in LEVELS:
|
|
30
|
-
print(f'Log level "{log_level_name}" invalid, valid list: {", ".join(LEVELS.keys())}', file=sys.stderr)
|
|
31
|
-
level = LEVEL_DEBUG
|
|
32
|
-
return LEVELS[level]
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
def create_logger(name, handler=None, log_format=None):
|
|
36
|
-
"""Create a logger.
|
|
37
|
-
|
|
38
|
-
If the handler already have a log format, it will be replaced.
|
|
39
|
-
|
|
40
|
-
Args:
|
|
41
|
-
name (str): Name of the logger
|
|
42
|
-
handler (obj): Handler for the logger. Default used if not supplied.
|
|
43
|
-
log_format (str): Format for the handler. Default used if not supplied.'
|
|
44
|
-
Returns:
|
|
45
|
-
logger (obj): A logger with a handler and log format
|
|
46
|
-
"""
|
|
47
|
-
new_logger = logging.getLogger(name)
|
|
48
|
-
new_logger.setLevel(parse_log_level(os.getenv('LOG_LEVEL', LEVEL_INFO)))
|
|
49
|
-
if handler is None:
|
|
50
|
-
handler = logging.StreamHandler(sys.stdout)
|
|
51
|
-
if log_format is None:
|
|
52
|
-
log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
53
|
-
handler.setFormatter(logging.Formatter(log_format))
|
|
54
|
-
new_logger.addHandler(handler)
|
|
55
|
-
return new_logger
|
|
1
|
+
# Copyright 2024 Volvo Car Corporation
|
|
2
|
+
# Licensed under Apache 2.0.
|
|
3
|
+
|
|
4
|
+
"""Module for logging."""
|
|
5
|
+
import os
|
|
6
|
+
import sys
|
|
7
|
+
import logging
|
|
8
|
+
|
|
9
|
+
LEVEL_NOTSET = 'NOTSET'
|
|
10
|
+
LEVEL_DEBUG = 'DEBUG'
|
|
11
|
+
LEVEL_INFO = 'INFO'
|
|
12
|
+
LEVEL_WARNING = 'WARNING'
|
|
13
|
+
LEVEL_ERROR = 'ERROR'
|
|
14
|
+
LEVEL_CRITICAL = 'CRITICAL'
|
|
15
|
+
|
|
16
|
+
LEVELS = {
|
|
17
|
+
LEVEL_NOTSET: logging.NOTSET,
|
|
18
|
+
LEVEL_DEBUG: logging.DEBUG,
|
|
19
|
+
LEVEL_INFO: logging.INFO,
|
|
20
|
+
LEVEL_WARNING: logging.WARNING,
|
|
21
|
+
LEVEL_ERROR: logging.ERROR,
|
|
22
|
+
LEVEL_CRITICAL: logging.CRITICAL
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def parse_log_level(log_level_name):
|
|
27
|
+
"""Convert textual log_level_name to numerical ones defined in logging module."""
|
|
28
|
+
level = log_level_name.upper()
|
|
29
|
+
if level not in LEVELS:
|
|
30
|
+
print(f'Log level "{log_level_name}" invalid, valid list: {", ".join(LEVELS.keys())}', file=sys.stderr)
|
|
31
|
+
level = LEVEL_DEBUG
|
|
32
|
+
return LEVELS[level]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def create_logger(name, handler=None, log_format=None):
|
|
36
|
+
"""Create a logger.
|
|
37
|
+
|
|
38
|
+
If the handler already have a log format, it will be replaced.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
name (str): Name of the logger
|
|
42
|
+
handler (obj): Handler for the logger. Default used if not supplied.
|
|
43
|
+
log_format (str): Format for the handler. Default used if not supplied.'
|
|
44
|
+
Returns:
|
|
45
|
+
logger (obj): A logger with a handler and log format
|
|
46
|
+
"""
|
|
47
|
+
new_logger = logging.getLogger(name)
|
|
48
|
+
new_logger.setLevel(parse_log_level(os.getenv('LOG_LEVEL', LEVEL_INFO)))
|
|
49
|
+
if handler is None:
|
|
50
|
+
handler = logging.StreamHandler(sys.stdout)
|
|
51
|
+
if log_format is None:
|
|
52
|
+
log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
53
|
+
handler.setFormatter(logging.Formatter(log_format))
|
|
54
|
+
new_logger.addHandler(handler)
|
|
55
|
+
return new_logger
|
|
@@ -1,78 +1,78 @@
|
|
|
1
|
-
% Copyright 2024 Volvo Car Corporation
|
|
2
|
-
% Licensed under Apache 2.0.
|
|
3
|
-
|
|
4
|
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
|
5
|
-
%
|
|
6
|
-
% Author: Henrik Wahlqvist
|
|
7
|
-
% Date: 31-01-2019
|
|
8
|
-
% Purpose: This is for automatically generating c-files from MATLAB models.
|
|
9
|
-
% This program can also be used as daily PyBuild code generation.
|
|
10
|
-
%
|
|
11
|
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
|
12
|
-
function BuildAutomationPyBuild(mode, exitOnFailure, modelList)
|
|
13
|
-
try
|
|
14
|
-
addpath(genpath([pwd '/Projects']));
|
|
15
|
-
addpath(genpath([pwd '/matlab-scripts']));
|
|
16
|
-
|
|
17
|
-
initFile = 'Init_PyBuild(false);';
|
|
18
|
-
disp(['Running init file: ' initFile])
|
|
19
|
-
evalin('base', initFile);
|
|
20
|
-
|
|
21
|
-
%Update all models unless a list is provided:
|
|
22
|
-
if ~exist('modelList', 'var')
|
|
23
|
-
disp('Updating all models...');
|
|
24
|
-
modelList = gatherAllModels();
|
|
25
|
-
end
|
|
26
|
-
|
|
27
|
-
updateModels(mode, pwd, modelList);
|
|
28
|
-
disp('Done.');
|
|
29
|
-
|
|
30
|
-
catch err
|
|
31
|
-
disp(getReport(err))
|
|
32
|
-
if exitOnFailure
|
|
33
|
-
quit force;
|
|
34
|
-
end
|
|
35
|
-
end
|
|
36
|
-
if exitOnFailure
|
|
37
|
-
exit;
|
|
38
|
-
end
|
|
39
|
-
bdclose Vcc_Lib;
|
|
40
|
-
end
|
|
41
|
-
|
|
42
|
-
function models = gatherAllModels()
|
|
43
|
-
% Function for gathering all models in the repo.
|
|
44
|
-
startdir = pwd;
|
|
45
|
-
models = {};
|
|
46
|
-
modelsFolder = [startdir '/Models/'];
|
|
47
|
-
env_ssp = getenv('SSP');
|
|
48
|
-
if isempty(env_ssp)
|
|
49
|
-
disp('ALL models')
|
|
50
|
-
ssps = dir([modelsFolder '*']);
|
|
51
|
-
else
|
|
52
|
-
ssp_dir = [modelsFolder env_ssp '*']
|
|
53
|
-
disp(['All models in ' ssp_dir])
|
|
54
|
-
ssps = dir(ssp_dir);
|
|
55
|
-
end
|
|
56
|
-
|
|
57
|
-
for i=1:length(ssps)
|
|
58
|
-
if ~ssps(i).isdir
|
|
59
|
-
continue;
|
|
60
|
-
end
|
|
61
|
-
ssp = ssps(i).name;
|
|
62
|
-
currSspFolder = [modelsFolder ssp '/'];
|
|
63
|
-
cd(currSspFolder)
|
|
64
|
-
modelsInSsp = dir('Vc*');
|
|
65
|
-
for j=1:length(modelsInSsp)
|
|
66
|
-
% Make sure it is a directory
|
|
67
|
-
if ~modelsInSsp(j).isdir
|
|
68
|
-
continue;
|
|
69
|
-
end
|
|
70
|
-
model = modelsInSsp(j).name;
|
|
71
|
-
% Make sure the directory contains an .mdl file
|
|
72
|
-
if isfile([model '/' model '.mdl'])
|
|
73
|
-
models = [models ['Models/' ssp '/' model '/' model '.mdl']];
|
|
74
|
-
end
|
|
75
|
-
end
|
|
76
|
-
cd(startdir);
|
|
77
|
-
end
|
|
78
|
-
end
|
|
1
|
+
% Copyright 2024 Volvo Car Corporation
|
|
2
|
+
% Licensed under Apache 2.0.
|
|
3
|
+
|
|
4
|
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
|
5
|
+
%
|
|
6
|
+
% Author: Henrik Wahlqvist
|
|
7
|
+
% Date: 31-01-2019
|
|
8
|
+
% Purpose: This is for automatically generating c-files from MATLAB models.
|
|
9
|
+
% This program can also be used as daily PyBuild code generation.
|
|
10
|
+
%
|
|
11
|
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
|
12
|
+
function BuildAutomationPyBuild(mode, exitOnFailure, modelList)
|
|
13
|
+
try
|
|
14
|
+
addpath(genpath([pwd '/Projects']));
|
|
15
|
+
addpath(genpath([pwd '/matlab-scripts']));
|
|
16
|
+
|
|
17
|
+
initFile = 'Init_PyBuild(false);';
|
|
18
|
+
disp(['Running init file: ' initFile])
|
|
19
|
+
evalin('base', initFile);
|
|
20
|
+
|
|
21
|
+
%Update all models unless a list is provided:
|
|
22
|
+
if ~exist('modelList', 'var')
|
|
23
|
+
disp('Updating all models...');
|
|
24
|
+
modelList = gatherAllModels();
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
updateModels(mode, pwd, modelList);
|
|
28
|
+
disp('Done.');
|
|
29
|
+
|
|
30
|
+
catch err
|
|
31
|
+
disp(getReport(err))
|
|
32
|
+
if exitOnFailure
|
|
33
|
+
quit force;
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
if exitOnFailure
|
|
37
|
+
exit;
|
|
38
|
+
end
|
|
39
|
+
bdclose Vcc_Lib;
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
function models = gatherAllModels()
|
|
43
|
+
% Function for gathering all models in the repo.
|
|
44
|
+
startdir = pwd;
|
|
45
|
+
models = {};
|
|
46
|
+
modelsFolder = [startdir '/Models/'];
|
|
47
|
+
env_ssp = getenv('SSP');
|
|
48
|
+
if isempty(env_ssp)
|
|
49
|
+
disp('ALL models')
|
|
50
|
+
ssps = dir([modelsFolder '*']);
|
|
51
|
+
else
|
|
52
|
+
ssp_dir = [modelsFolder env_ssp '*']
|
|
53
|
+
disp(['All models in ' ssp_dir])
|
|
54
|
+
ssps = dir(ssp_dir);
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
for i=1:length(ssps)
|
|
58
|
+
if ~ssps(i).isdir
|
|
59
|
+
continue;
|
|
60
|
+
end
|
|
61
|
+
ssp = ssps(i).name;
|
|
62
|
+
currSspFolder = [modelsFolder ssp '/'];
|
|
63
|
+
cd(currSspFolder)
|
|
64
|
+
modelsInSsp = dir('Vc*');
|
|
65
|
+
for j=1:length(modelsInSsp)
|
|
66
|
+
% Make sure it is a directory
|
|
67
|
+
if ~modelsInSsp(j).isdir
|
|
68
|
+
continue;
|
|
69
|
+
end
|
|
70
|
+
model = modelsInSsp(j).name;
|
|
71
|
+
% Make sure the directory contains an .mdl file
|
|
72
|
+
if isfile([model '/' model '.mdl'])
|
|
73
|
+
models = [models ['Models/' ssp '/' model '/' model '.mdl']];
|
|
74
|
+
end
|
|
75
|
+
end
|
|
76
|
+
cd(startdir);
|
|
77
|
+
end
|
|
78
|
+
end
|