fameio 3.4.0__py3-none-any.whl → 3.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. fameio/__init__.py +2 -1
  2. fameio/cli/__init__.py +2 -0
  3. fameio/cli/convert_results.py +8 -0
  4. fameio/cli/make_config.py +2 -0
  5. fameio/cli/options.py +4 -0
  6. fameio/cli/parser.py +17 -1
  7. fameio/cli/reformat.py +2 -0
  8. fameio/input/__init__.py +2 -1
  9. fameio/input/loader/__init__.py +1 -0
  10. fameio/input/loader/controller.py +12 -0
  11. fameio/input/loader/loader.py +2 -0
  12. fameio/input/metadata.py +2 -0
  13. fameio/input/resolver.py +2 -0
  14. fameio/input/scenario/__init__.py +2 -0
  15. fameio/input/scenario/agent.py +2 -0
  16. fameio/input/scenario/attribute.py +2 -0
  17. fameio/input/scenario/contract.py +2 -0
  18. fameio/input/scenario/exception.py +2 -0
  19. fameio/input/scenario/fameiofactory.py +2 -0
  20. fameio/input/scenario/generalproperties.py +2 -0
  21. fameio/input/scenario/scenario.py +2 -0
  22. fameio/input/scenario/stringset.py +2 -0
  23. fameio/input/schema/__init__.py +1 -0
  24. fameio/input/schema/agenttype.py +2 -0
  25. fameio/input/schema/attribute.py +2 -0
  26. fameio/input/schema/java_packages.py +2 -0
  27. fameio/input/schema/schema.py +8 -3
  28. fameio/input/validator.py +2 -0
  29. fameio/input/writer.py +16 -0
  30. fameio/logs.py +2 -1
  31. fameio/output/__init__.py +1 -0
  32. fameio/output/agent_type.py +14 -0
  33. fameio/output/conversion.py +2 -0
  34. fameio/output/csv_writer.py +4 -2
  35. fameio/output/data_transformer.py +2 -0
  36. fameio/output/execution_dao.py +5 -0
  37. fameio/output/input_dao.py +5 -3
  38. fameio/output/metadata/__init__.py +10 -0
  39. fameio/output/metadata/compiler.py +75 -0
  40. fameio/output/metadata/json_writer.py +37 -0
  41. fameio/output/metadata/locator.py +242 -0
  42. fameio/output/metadata/oeo_template.py +93 -0
  43. fameio/output/metadata/template_reader.py +65 -0
  44. fameio/output/output_dao.py +2 -0
  45. fameio/output/reader.py +1 -0
  46. fameio/output/yaml_writer.py +3 -1
  47. fameio/scripts/__init__.py +4 -0
  48. fameio/scripts/convert_results.py +35 -2
  49. fameio/scripts/exception.py +1 -0
  50. fameio/series.py +14 -6
  51. fameio/time.py +1 -0
  52. fameio/tools.py +1 -0
  53. fameio-3.5.0.dist-info/LICENSES/CC-BY-ND-4.0.txt +392 -0
  54. fameio-3.5.0.dist-info/METADATA +99 -0
  55. fameio-3.5.0.dist-info/RECORD +67 -0
  56. fameio-3.4.0.dist-info/METADATA +0 -990
  57. fameio-3.4.0.dist-info/RECORD +0 -60
  58. {fameio-3.4.0.dist-info → fameio-3.5.0.dist-info}/LICENSE.txt +0 -0
  59. {fameio-3.4.0.dist-info → fameio-3.5.0.dist-info}/LICENSES/Apache-2.0.txt +0 -0
  60. {fameio-3.4.0.dist-info → fameio-3.5.0.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
  61. {fameio-3.4.0.dist-info → fameio-3.5.0.dist-info}/LICENSES/CC0-1.0.txt +0 -0
  62. {fameio-3.4.0.dist-info → fameio-3.5.0.dist-info}/WHEEL +0 -0
  63. {fameio-3.4.0.dist-info → fameio-3.5.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,75 @@
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+ """Compiling metadata files accompanying the output CSV files."""
5
+
6
+ from __future__ import annotations
7
+
8
+ import ast
9
+ from typing import Final, Any
10
+
11
+ from fameprotobuf.input_file_pb2 import InputData
12
+
13
+ from fameio.input.metadata import Metadata
14
+ from fameio.logs import log_error, log
15
+ from fameio.output.metadata import MetadataCompilationError
16
+ from fameio.output.metadata.locator import Locator
17
+ from fameio.tools import keys_to_lower
18
+
19
+
20
+ class MetadataCompiler(Locator):
21
+ """Compiles metadata for output files based on ExecutionData and InputData."""
22
+
23
+ ENTRY_SCHEMA: Final[str] = "Schema".lower()
24
+ ENTRY_SCENARIO: Final[str] = "Scenario".lower()
25
+ ENTRY_EXECUTION: Final[str] = "Execution".lower()
26
+ SEPARATOR: Final[str] = ":"
27
+
28
+ _ERR_MALFORMED_DICT_STRING = "Input data reading failed: Malformed string representation of metadata dictionaries."
29
+ _INFO_NOT_FOUND = "Could not find element at '{}' in input section of provided file."
30
+
31
+ def __init__(
32
+ self, execution_data: dict[str, Any], input_data: InputData, agent_columns: dict[str, list[str]]
33
+ ) -> None:
34
+ """Initialises a new MetadataCompiler.
35
+
36
+ Args:
37
+ execution_data: to read execution metadata from
38
+ input_data: to read schema and scenario metadata from
39
+ agent_columns: agents and their output columns
40
+ """
41
+ super().__init__(agent_columns)
42
+ try:
43
+ self._data: dict[str, dict] = {
44
+ self.ENTRY_SCHEMA: ast.literal_eval(input_data.schema),
45
+ self.ENTRY_SCENARIO: {
46
+ Metadata.KEY_METADATA: ast.literal_eval(input_data.metadata) if input_data.metadata else {}
47
+ },
48
+ self.ENTRY_EXECUTION: execution_data,
49
+ }
50
+ except (ValueError, TypeError, SyntaxError, MemoryError, RecursionError) as e:
51
+ raise log_error(MetadataCompilationError(self._ERR_MALFORMED_DICT_STRING)) from e
52
+
53
+ def _replace(self, data_identifier: str) -> Any | None:
54
+ identifier = data_identifier[1:-1]
55
+ address = identifier.split(self.SEPARATOR)
56
+ data_source = address[0].lower()
57
+ try:
58
+ if data_source in self._data:
59
+ return self._get_from(self._data[data_source], address[1:])
60
+ except KeyError:
61
+ log().info(self._INFO_NOT_FOUND.format(self.SEPARATOR.join(address)))
62
+ return None
63
+ return None
64
+
65
+ @staticmethod
66
+ def _get_from(base: dict, address: list[str]) -> Any:
67
+ """Returns element in `base` at given `address`.
68
+
69
+ Raises:
70
+ KeyError: if element cannot be found; error not logged
71
+ """
72
+ element = base
73
+ for entry in address:
74
+ element = keys_to_lower(element)[entry.lower()]
75
+ return element
@@ -0,0 +1,37 @@
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+ """Writing of data to JSON files."""
5
+
6
+ import json
7
+ from pathlib import Path
8
+
9
+ from fameio.logs import log, log_error
10
+ from fameio.output import OutputError
11
+
12
+ METADATA_FILE_NAME = "metadata.json"
13
+
14
+ _ERR_OPEN_FILE = "Could not open file for writing: '{}'"
15
+ _INFO_DESTINATION = "Saving JSON to file to {}"
16
+
17
+
18
+ class JsonWriterError(OutputError):
19
+ """An error occurred during writing a JSON file."""
20
+
21
+
22
+ def data_to_json_file(data: dict, base_path: Path) -> None:
23
+ """Save the given data to a JSON file at given path.
24
+
25
+ Args:
26
+ data: to be saved to JSON file
27
+ base_path: at which the JSON file will be created
28
+
29
+ Raises:
30
+ JsonWriterError: if file could not be opened or written, logged with level "ERROR"
31
+ """
32
+ log().info(_INFO_DESTINATION.format(base_path))
33
+ try:
34
+ with open(Path(base_path, METADATA_FILE_NAME), "w", encoding="utf-8") as f:
35
+ json.dump(data, f)
36
+ except OSError as e:
37
+ raise log_error(JsonWriterError(_ERR_OPEN_FILE.format(base_path))) from e
@@ -0,0 +1,242 @@
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+ """Locating replacement strings in complex template dictionaries."""
5
+
6
+ from __future__ import annotations
7
+
8
+ import re
9
+ from abc import ABC, abstractmethod
10
+ from typing import Any, Final
11
+
12
+ from fameio.logs import log_error
13
+ from fameio.output.metadata import MetadataCompilationError
14
+ from fameio.tools import keys_to_lower
15
+
16
+
17
+ class Locator(ABC):
18
+ """Locates replacement strings within a given template and organises their replacement."""
19
+
20
+ KEY_BASE: Final[str] = "base".lower()
21
+ KEY_PER_AGENT: Final[str] = "perAgent".lower()
22
+ KEY_PER_COLUMN: Final[str] = "perColumn".lower()
23
+
24
+ PLACEHOLDER_START: Final[str] = "<"
25
+ PLACEHOLDER_END: Final[str] = ">"
26
+ _PLACEHOLDER_PATTERN: Final[re.Pattern] = re.compile(PLACEHOLDER_START + ".*?" + PLACEHOLDER_END)
27
+
28
+ KEY_AGENT: Final[str] = "Agent".lower()
29
+ KEY_COLUMN: Final[str] = "Column".lower()
30
+
31
+ ITERATION_START: Final[str] = "{{"
32
+ ITERATION_END: Final[str] = "}}"
33
+ _PER_AGENT_PATTERN: Final[str] = ITERATION_START + KEY_PER_AGENT + ITERATION_END
34
+ _PER_COLUMN_PATTERN: Final[str] = ITERATION_START + KEY_PER_COLUMN + ITERATION_END
35
+
36
+ _ESC = "\\"
37
+ ITERABLE_START: Final[str] = "("
38
+ ITERABLE_END: Final[str] = ")"
39
+ _AGENT_PATTERN: Final[re.Pattern] = re.compile(
40
+ f"{_ESC}{ITERABLE_START}{KEY_AGENT}{_ESC}{ITERABLE_END}", re.IGNORECASE
41
+ )
42
+ _COLUMN_PATTERN: Final[re.Pattern] = re.compile(
43
+ f"{_ESC}{ITERABLE_START}{KEY_COLUMN}{_ESC}{ITERABLE_END}", re.IGNORECASE
44
+ )
45
+
46
+ _ERR_MUST_BE_DICT = "Element '{}' in metadata template must be a dictionary."
47
+
48
+ def __init__(self, agent_columns: dict[str, list[str]]) -> None:
49
+ """Initialise a new Locator.
50
+
51
+ Args:
52
+ agent_columns: agents and their output columns
53
+ """
54
+ self._agent_columns: dict = agent_columns
55
+ self._per_agent_template: dict = {}
56
+ self._per_column_template: dict = {}
57
+ self._current_agent: str = ""
58
+ self._current_column: str = ""
59
+
60
+ def locate_and_replace(self, template: dict) -> dict:
61
+ """Returns copy of given `template` with filled-in metadata to each placeholder - if available.
62
+
63
+ Args:
64
+ template: dict with placeholders to be filled
65
+
66
+ Returns:
67
+ template with filled in placeholders (if any)
68
+
69
+ Raises:
70
+ MetadataCompilationError: if template is ill-formatted, logged with level "ERROR"
71
+ """
72
+ template = keys_to_lower(template)
73
+ per_agent_template = template.get(self.KEY_PER_AGENT, {})
74
+ self._ensure_is_dict(per_agent_template, self.KEY_PER_AGENT)
75
+ self._per_agent_template = per_agent_template
76
+
77
+ per_column_template = template.get(self.KEY_PER_COLUMN, {})
78
+ self._ensure_is_dict(per_column_template, self.KEY_PER_COLUMN)
79
+ self._per_column_template = per_column_template
80
+
81
+ self._current_column = ""
82
+ self._current_agent = ""
83
+
84
+ return self._fill_dict(template.get(self.KEY_BASE, {}))
85
+
86
+ def _ensure_is_dict(self, item: Any, key: str) -> None:
87
+ """Raises an error if given `item` is not a dictionary.
88
+
89
+ Args:
90
+ item: to be tested if it is a dictionary
91
+ key: to be specified in error message
92
+
93
+ Raises:
94
+ MetadataCompilationError: if given `item` is not a dictionary, logged with level "ERROR"
95
+ """
96
+ if not isinstance(item, dict):
97
+ raise log_error(MetadataCompilationError(self._ERR_MUST_BE_DICT.format(key)))
98
+
99
+ def _fill_dict(self, template: dict) -> dict:
100
+ """Fills in metadata to each value of the given dictionary `template`.
101
+
102
+ Args:
103
+ template: dict with placeholders to be filled
104
+
105
+ Returns:
106
+ template with filled in placeholders (if any)
107
+ """
108
+ result: dict = {}
109
+ for key, value in template.items():
110
+ if isinstance(value, dict):
111
+ result[key] = self._fill_dict(value)
112
+ elif isinstance(value, list):
113
+ result[key] = self._fill_list(value)
114
+ else:
115
+ result[key] = self._fill_value(value)
116
+ return result
117
+
118
+ def _fill_list(self, values: list) -> list:
119
+ """Fills in metadata to each value of the given `values` list.
120
+
121
+ Args:
122
+ values: list of elements with potential placeholders to be filled
123
+
124
+ Returns:
125
+ values, potentially with filled-in placeholders
126
+ """
127
+ result: list = []
128
+ for value in values:
129
+ if isinstance(value, dict):
130
+ result.append(self._fill_dict(value))
131
+ elif isinstance(value, list):
132
+ result.append(self._fill_list(value))
133
+ else:
134
+ filled_value = self._fill_value(value)
135
+ if isinstance(filled_value, list):
136
+ result.extend(filled_value)
137
+ else:
138
+ result.append(filled_value)
139
+ return result
140
+
141
+ def _fill_value(self, value: str | int | float) -> Any:
142
+ """Checks for placeholders, iterables, or iteration template markers to replace.
143
+
144
+ Returns replacement or original value if replacement cannot be found.
145
+
146
+ Args:
147
+ value: to replace or return if no replacement is needed / available
148
+
149
+ Returns:
150
+ replacement or original value if replacement is not needed / available
151
+ """
152
+ if isinstance(value, (int, float)):
153
+ return value
154
+ if self._is_agent_replacement(value):
155
+ value = self._replace_agent(value)
156
+ if self._is_column_replacement(value):
157
+ value = self._replace_column(value)
158
+ if self._has_basic_placeholder(value):
159
+ return self._replace_all(value)
160
+ if self._is_per_column(value):
161
+ return self._per_column()
162
+ if self._is_per_agent(value):
163
+ return self._per_agent()
164
+ return value
165
+
166
+ def _has_basic_placeholder(self, value: str) -> bool:
167
+ """Returns true if given value contains placeholder symbols."""
168
+ return self.PLACEHOLDER_START in value and self.PLACEHOLDER_END in value
169
+
170
+ def _replace_all(self, value: str) -> str | Any:
171
+ """Replaces all placeholders in given `value` and returns the string with all replacements.
172
+
173
+ If the whole string is a single placeholder, replace it completely with the replacement content - of any type.
174
+ Otherwise, replace the placeholders within the string with the replacement content.
175
+ """
176
+ matches = re.findall(self._PLACEHOLDER_PATTERN, value)
177
+ if len(matches) == 1:
178
+ if re.fullmatch(self._PLACEHOLDER_PATTERN, value):
179
+ return self._replace(value)
180
+ return re.sub(self._PLACEHOLDER_PATTERN, self._callback_wrapper, value)
181
+
182
+ @abstractmethod
183
+ def _replace(self, data_identifier: str) -> Any | None:
184
+ """Returns replacement for given data identifier, or None if replacement cannot be found.
185
+
186
+ Args:
187
+ data_identifier: locator for the replacement data
188
+
189
+ Returns:
190
+ either the found replacement data (of any type), or None if no data is found
191
+ """
192
+
193
+ def _callback_wrapper(self, match: re.Match) -> str:
194
+ """Extracts replacement string from given match and calls for its replacement."""
195
+ return str(self._replace(match.group(0)))
196
+
197
+ def _is_agent_replacement(self, value: str) -> bool:
198
+ """Returns true if given value contains placeholder and agent iterable placeholder."""
199
+ return re.search(self._AGENT_PATTERN, value) is not None
200
+
201
+ def _replace_agent(self, value) -> str:
202
+ """Replace all occurrences of agent iterable pattern in given string."""
203
+ return re.sub(self._AGENT_PATTERN, str(self._current_agent), value)
204
+
205
+ def _is_column_replacement(self, value: str) -> bool:
206
+ """Returns true if given value contains placeholder and agent iterable placeholder."""
207
+ return re.search(self._COLUMN_PATTERN, value) is not None
208
+
209
+ def _replace_column(self, value) -> str:
210
+ """Replace all occurrences of column iterable pattern in given string."""
211
+ return re.sub(self._COLUMN_PATTERN, str(self._current_column), value)
212
+
213
+ def _is_per_column(self, value: str) -> bool:
214
+ """Returns true if given value is the 'perColumn' section."""
215
+ return value.strip().lower() == self._PER_COLUMN_PATTERN
216
+
217
+ def _per_column(self) -> list:
218
+ """Returns list of metadata for all columns of current agent."""
219
+ column_metadata = []
220
+ if len(self._per_column_template) > 0:
221
+ for column in self._agent_columns[self._current_agent]:
222
+ template = self._per_column_template
223
+ self._current_column = column
224
+ column_metadata.append(self._fill_dict(template))
225
+ return column_metadata
226
+
227
+ def _is_per_agent(self, value: str) -> bool:
228
+ """Returns true if given value is the 'perAgent' section."""
229
+ return value.strip().lower() == self._PER_AGENT_PATTERN
230
+
231
+ def _per_agent(self) -> list:
232
+ """Returns list of metadata for all agents.
233
+
234
+ Returns empty list if either the per-agent template is missing, or no agents are registered for output.
235
+ """
236
+ agent_metadata = []
237
+ if len(self._per_agent_template) > 0:
238
+ for agent in self._agent_columns.keys():
239
+ template = self._per_agent_template
240
+ self._current_agent = agent
241
+ agent_metadata.append(self._fill_dict(template))
242
+ return agent_metadata
@@ -0,0 +1,93 @@
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ OEO_TEMPLATE = {
6
+ "base": {
7
+ "@context": "https://raw.githubusercontent.com/OpenEnergyPlatform/oemetadata/production/oemetadata/latest/context.json",
8
+ "name": "<schema:metadata:name>_result_metadata",
9
+ "title": "Results for <schema:metadata:name> run <scenario:metadata:RunId>.",
10
+ "description": """Input compiled with FAME-Io <execution:InputCompilation:SoftwareVersions:FameIo>.
11
+ Simulated by FAME-Core <execution:ModelRun:SoftwareVersions:FameCore>.
12
+ Extracted by FAME-Io <execution:OutputExtraction:SoftwareVersions:FameIo>.""",
13
+ "resources": ["{{perAgent}}"],
14
+ "metaMetadata": {
15
+ "metadataVersion": "OEMetadata-2.0.4",
16
+ "metadataLicense": {
17
+ "name": "CC0-1.0",
18
+ "title": "Creative Commons Zero v1.0 Universal",
19
+ "path": "https://creativecommons.org/publicdomain/zero/1.0",
20
+ },
21
+ },
22
+ },
23
+ "perAgent": {
24
+ "name": "(agent).*.csv",
25
+ "topics": [],
26
+ "title": "Results for (agent)",
27
+ "path": "./(agent).*.csv",
28
+ "description": "Simulation outputs by simulated time of agent type '(agent)'",
29
+ "languages": ["en-GB"],
30
+ "subject": "<schema:AgentTypes:(agent):metadata:subject>",
31
+ "keywords": "<schema:AgentTypes:(agent):metadata:keywords>",
32
+ "publicationDate": "<scenario:metadata:output:publicationDate>",
33
+ "embargoPeriod": "<scenario:metadata:output:embargoPeriod>",
34
+ "context": {
35
+ "title": "<schema:metadata:title>",
36
+ "homepage": "<schema:metadata:homepage>",
37
+ "documentation": "<schema:metadata:documentation>",
38
+ "sourceCode": "<schema:metadata:sourceCode>",
39
+ "publisher": "<schema:metadata:publisher>",
40
+ "publisherLogo": "<schema:metadata:publisherLogo>",
41
+ "contact": "<schema:metadata:contact>",
42
+ "fundingAgency": "<schema:metadata:fundingAgency>",
43
+ "fundingAgencyLogo": "<schema:metadata:fundingAgencyLogo>",
44
+ "grantNo": "<schema:metadata:grantNo>",
45
+ },
46
+ "spatial": {},
47
+ "temporal": {},
48
+ "sources": [],
49
+ "licenses": "<scenario:metadata:output:licenses>",
50
+ "contributors": "<schema:metadata:contributors>",
51
+ "type": "table",
52
+ "format": "CSV",
53
+ "encoding": "UTF-8",
54
+ "schema": {
55
+ "fields": [
56
+ {
57
+ "name": "AgentId",
58
+ "description": "Unique ID of the agent in the simulation",
59
+ "type": "integer",
60
+ "nullable": False,
61
+ "unit": "n/a",
62
+ "isAbout": [],
63
+ "valueReference": [],
64
+ },
65
+ {
66
+ "name": "TimeStep",
67
+ "description": "Simulated time these values are associated with",
68
+ "type": "integer or time stamp",
69
+ "nullable": False,
70
+ "unit": "n/a",
71
+ "isAbout": [
72
+ {"name": "TimeStamp", "@id": "https://openenergyplatform.org/ontology/oeo/OEO_00140043"}
73
+ ],
74
+ "valueReference": [],
75
+ },
76
+ "{{perColumn}}",
77
+ ],
78
+ "primaryKey": ["AgentId", "TimeStep"],
79
+ "foreignKeys": [],
80
+ "dialect": {"delimiter": ";", "decimalSeparator": "."},
81
+ "review": {},
82
+ },
83
+ },
84
+ "perColumn": {
85
+ "name": "(column)",
86
+ "description": "<schema:AgentTypes:(agent):outputs:(column):metadata:description>",
87
+ "type": "decimal",
88
+ "nullable": True,
89
+ "unit": "<schema:AgentTypes:(agent):outputs:(column):metadata:unit>",
90
+ "isAbout": "<schema:AgentTypes:(agent):outputs:(column):metadata:isAbout>",
91
+ "valueReference": "<schema:AgentTypes:(agent):outputs:(column):metadata:valueReference>",
92
+ },
93
+ }
@@ -0,0 +1,65 @@
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+ """Reading of metadata templates in YAML or JSON format."""
5
+
6
+ from __future__ import annotations
7
+
8
+ from pathlib import Path
9
+
10
+ from fameio.input import YamlLoaderError
11
+ from fameio.input.loader import ALLOWED_SUFFIXES as YAML_SUFFIXES, load_yaml
12
+ from fameio.logs import log_error
13
+ from fameio.output.metadata import MetadataCompilationError
14
+
15
+ JSON_SUFFIX: str = ".json"
16
+ ENCODING = "UTF-8"
17
+
18
+ _ERR_UNKNOWN_ENDING = "Template file ending '{}' corresponds neither to a JSON or YAML file."
19
+ _ERR_READING_FILE = "Could not read template file: '{}'"
20
+
21
+
22
+ def read_template_file(file: Path) -> dict:
23
+ """Reads and returns metadata template `file` encoded in UTF-8.
24
+
25
+ Args:
26
+ file: to be read
27
+
28
+ Returns:
29
+ dictionary content of the file
30
+
31
+ Raises:
32
+ MetadataCompilationError: if template file has unknown type, could not be opened/read, logged with level "ERROR"
33
+ """
34
+ file_ending = file.suffix.lower()
35
+ if _has_yaml_ending(file_ending) or _has_json_ending(file_ending):
36
+ return _read_yaml(file)
37
+ raise log_error(MetadataCompilationError(_ERR_UNKNOWN_ENDING.format(file_ending)))
38
+
39
+
40
+ def _has_yaml_ending(file_ending: str) -> bool:
41
+ """Returns True if `file_ending` corresponds to a YAML file."""
42
+ return file_ending in YAML_SUFFIXES
43
+
44
+
45
+ def _has_json_ending(file_ending: str) -> bool:
46
+ """Returns True if `file_ending` corresponds to a JSON file."""
47
+ return file_ending == JSON_SUFFIX
48
+
49
+
50
+ def _read_yaml(file: Path) -> dict:
51
+ """Returns content of the provided yaml file
52
+
53
+ Args:
54
+ file: to be opened and read
55
+
56
+ Returns:
57
+ file content as dict
58
+
59
+ Raises:
60
+ MetadataCompilationError: if file could not be opened or read, logged with level "ERROR"
61
+ """
62
+ try:
63
+ return load_yaml(file, encoding=ENCODING)
64
+ except YamlLoaderError as e:
65
+ raise log_error(MetadataCompilationError(_ERR_READING_FILE.format(file))) from e
@@ -1,6 +1,8 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ """Accessing output content of protobuf messages."""
5
+
4
6
  from __future__ import annotations
5
7
 
6
8
  from typing import Iterable
fameio/output/reader.py CHANGED
@@ -1,6 +1,7 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ """Reading of protobuf files."""
4
5
  from __future__ import annotations
5
6
 
6
7
  import struct
@@ -1,6 +1,8 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ """Writing of data to YAML files."""
5
+
4
6
  from pathlib import Path
5
7
 
6
8
  import yaml
@@ -8,7 +10,7 @@ import yaml
8
10
  from fameio.logs import log, log_error
9
11
  from fameio.output import OutputError
10
12
 
11
- _ERR_OPEN_FILE = "Could not open file for reading: '{}'"
13
+ _ERR_OPEN_FILE = "Could not open file for writing: '{}'"
12
14
 
13
15
  _INFO_DESTINATION = "Saving scenario to file at {}"
14
16
 
@@ -1,4 +1,5 @@
1
1
  #!/usr/bin/env python
2
+ """Main scripts callable from command line."""
2
3
  import sys
3
4
 
4
5
  from fameio.scripts.convert_results import DEFAULT_CONFIG as DEFAULT_CONVERT_CONFIG
@@ -13,6 +14,7 @@ from fameio.cli.reformat import handle_args as handle_reformat_args
13
14
 
14
15
  # noinspection PyPep8Naming
15
16
  def makeFameRunConfig():
17
+ """Compiles FAME simulation input files in protobuf format."""
16
18
  cli_config = handle_make_config_args(sys.argv[1:])
17
19
  try:
18
20
  make_config(cli_config)
@@ -22,6 +24,7 @@ def makeFameRunConfig():
22
24
 
23
25
  # noinspection PyPep8Naming
24
26
  def convertFameResults():
27
+ """Converts a protobuf file to human-readable outputs."""
25
28
  cli_config = handle_convert_results_args(sys.argv[1:], DEFAULT_CONVERT_CONFIG)
26
29
  try:
27
30
  convert_results(cli_config)
@@ -31,6 +34,7 @@ def convertFameResults():
31
34
 
32
35
  # noinspection PyPep8Naming
33
36
  def reformatTimeSeries():
37
+ """Reformats a timeseries file to speed up its future usage in scenarios."""
34
38
  cli_config = handle_reformat_args(sys.argv[1:])
35
39
  try:
36
40
  reformat(cli_config)
@@ -19,6 +19,10 @@ from fameio.output.csv_writer import CsvWriter
19
19
  from fameio.output.data_transformer import DataTransformer, INDEX
20
20
  from fameio.output.execution_dao import ExecutionDao
21
21
  from fameio.output.input_dao import InputDao
22
+ from fameio.output.metadata.compiler import MetadataCompiler
23
+ from fameio.output.metadata.json_writer import data_to_json_file
24
+ from fameio.output.metadata.oeo_template import OEO_TEMPLATE
25
+ from fameio.output.metadata.template_reader import read_template_file
22
26
  from fameio.output.output_dao import OutputDAO
23
27
  from fameio.output.reader import Reader
24
28
  from fameio.output.yaml_writer import data_to_yaml_file
@@ -73,7 +77,7 @@ def _extract_and_convert_data(config: dict[Options, Any], file_stream: BinaryIO,
73
77
  execution_dao = ExecutionDao()
74
78
  while data_storages := reader.read():
75
79
  execution_dao.store_execution_metadata(data_storages)
76
- if config[Options.INPUT_RECOVERY]:
80
+ if config[Options.INPUT_RECOVERY] or config[Options.METADATA]:
77
81
  input_dao.store_inputs(data_storages)
78
82
  output = OutputDAO(data_storages, agent_type_log)
79
83
  for agent_name in output.get_sorted_agents_to_extract():
@@ -95,6 +99,8 @@ def _extract_and_convert_data(config: dict[Options, Any], file_stream: BinaryIO,
95
99
  log().warning(_WARN_OUTPUT_SUPPRESSED.format(agent_type_log.get_agents_with_output()))
96
100
  else:
97
101
  log().warning(_WARN_OUTPUT_MISSING)
102
+ elif config[Options.METADATA]:
103
+ write_metadata(config, input_dao, execution_dao, agent_type_log)
98
104
  log().info("Data conversion completed.")
99
105
 
100
106
 
@@ -143,8 +149,35 @@ def _memory_saving_apply_conversions(config: dict[Options, Any], output_writer:
143
149
  output_writer.write_to_files(agent_name, parsed_data)
144
150
 
145
151
 
152
+ def write_metadata(
153
+ config: dict[Options, Any], input_dao: InputDao, execution_dao: ExecutionDao, agent_type_log: AgentTypeLog
154
+ ):
155
+ """Reads metadata templates, fills in available metadata, and writes output to a JSON file.
156
+
157
+ Args:
158
+ config: to determined metadata template, and output path
159
+ input_dao: contains input data
160
+ execution_dao: contains execution metadata
161
+ agent_type_log: contains log about which agent output was created
162
+
163
+ Raises:
164
+ OutputError: in case templates could not be read or filled-in, or JSON writing failed, logged with level "ERROR"
165
+ """
166
+ compiler = MetadataCompiler(
167
+ input_data=input_dao.get_input_data(),
168
+ execution_data=execution_dao.get_metadata_dict(),
169
+ agent_columns=agent_type_log.get_agent_columns(),
170
+ )
171
+
172
+ template_file = config[Options.METADATA_TEMPLATE]
173
+ template = OEO_TEMPLATE if template_file is None else read_template_file(template_file)
174
+ metadata = compiler.locate_and_replace(template)
175
+ base_path = config[Options.OUTPUT] if config[Options.OUTPUT] is not None else Path(".")
176
+ data_to_json_file(metadata, base_path)
177
+
178
+
146
179
  def run(config: dict[Options, Any] | None = None) -> None:
147
- """Reads configured file in protobuf format and extracts its content to .CSV and .YAML file(s).
180
+ """Reads configured file in protobuf format and extracts its content to CSV and YAML file(s).
148
181
 
149
182
  Args:
150
183
  config: script configuration options
@@ -1,6 +1,7 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ """Defines exceptions on the script level."""
4
5
 
5
6
 
6
7
  class ScriptError(Exception):