fameio 3.4.0__py3-none-any.whl → 3.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fameio/__init__.py +2 -1
- fameio/cli/__init__.py +2 -0
- fameio/cli/convert_results.py +8 -0
- fameio/cli/make_config.py +2 -0
- fameio/cli/options.py +4 -0
- fameio/cli/parser.py +17 -1
- fameio/cli/reformat.py +2 -0
- fameio/input/__init__.py +2 -1
- fameio/input/loader/__init__.py +1 -0
- fameio/input/loader/controller.py +20 -6
- fameio/input/loader/loader.py +2 -0
- fameio/input/metadata.py +2 -0
- fameio/input/resolver.py +2 -0
- fameio/input/scenario/__init__.py +2 -0
- fameio/input/scenario/agent.py +2 -0
- fameio/input/scenario/attribute.py +2 -0
- fameio/input/scenario/contract.py +2 -0
- fameio/input/scenario/exception.py +2 -0
- fameio/input/scenario/fameiofactory.py +2 -0
- fameio/input/scenario/generalproperties.py +2 -0
- fameio/input/scenario/scenario.py +5 -3
- fameio/input/scenario/stringset.py +2 -0
- fameio/input/schema/__init__.py +1 -0
- fameio/input/schema/agenttype.py +2 -0
- fameio/input/schema/attribute.py +2 -0
- fameio/input/schema/java_packages.py +2 -0
- fameio/input/schema/schema.py +8 -3
- fameio/input/validator.py +2 -0
- fameio/input/writer.py +16 -0
- fameio/logs.py +2 -1
- fameio/output/__init__.py +1 -0
- fameio/output/agent_type.py +24 -12
- fameio/output/conversion.py +2 -0
- fameio/output/csv_writer.py +10 -36
- fameio/output/data_transformer.py +2 -0
- fameio/output/execution_dao.py +5 -0
- fameio/output/files.py +55 -0
- fameio/output/input_dao.py +59 -15
- fameio/output/metadata/__init__.py +10 -0
- fameio/output/metadata/compiler.py +75 -0
- fameio/output/metadata/json_writer.py +36 -0
- fameio/output/metadata/locator.py +242 -0
- fameio/output/metadata/oeo_template.py +93 -0
- fameio/output/metadata/template_reader.py +65 -0
- fameio/output/output_dao.py +2 -0
- fameio/output/reader.py +1 -0
- fameio/output/yaml_writer.py +3 -1
- fameio/scripts/REUSE.toml +6 -0
- fameio/scripts/__init__.py +4 -0
- fameio/scripts/convert_results.py +46 -12
- fameio/scripts/exception.py +1 -0
- fameio/scripts/reformat.py +25 -1
- fameio/series.py +16 -7
- fameio/time.py +1 -0
- fameio/tools.py +1 -0
- fameio-3.5.1.dist-info/LICENSES/CC-BY-ND-4.0.txt +392 -0
- fameio-3.5.1.dist-info/METADATA +99 -0
- fameio-3.5.1.dist-info/RECORD +65 -0
- fameio/scripts/__init__.py.license +0 -3
- fameio/scripts/convert_results.py.license +0 -3
- fameio/scripts/make_config.py.license +0 -3
- fameio/scripts/reformat.py.license +0 -3
- fameio-3.4.0.dist-info/METADATA +0 -990
- fameio-3.4.0.dist-info/RECORD +0 -60
- {fameio-3.4.0.dist-info → fameio-3.5.1.dist-info}/LICENSE.txt +0 -0
- {fameio-3.4.0.dist-info → fameio-3.5.1.dist-info}/LICENSES/Apache-2.0.txt +0 -0
- {fameio-3.4.0.dist-info → fameio-3.5.1.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
- {fameio-3.4.0.dist-info → fameio-3.5.1.dist-info}/LICENSES/CC0-1.0.txt +0 -0
- {fameio-3.4.0.dist-info → fameio-3.5.1.dist-info}/WHEEL +0 -0
- {fameio-3.4.0.dist-info → fameio-3.5.1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,242 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
"""Locating replacement strings in complex template dictionaries."""
|
5
|
+
|
6
|
+
from __future__ import annotations
|
7
|
+
|
8
|
+
import re
|
9
|
+
from abc import ABC, abstractmethod
|
10
|
+
from typing import Any, Final
|
11
|
+
|
12
|
+
from fameio.logs import log_error
|
13
|
+
from fameio.output.metadata import MetadataCompilationError
|
14
|
+
from fameio.tools import keys_to_lower
|
15
|
+
|
16
|
+
|
17
|
+
class Locator(ABC):
|
18
|
+
"""Locates replacement strings within a given template and organises their replacement."""
|
19
|
+
|
20
|
+
KEY_BASE: Final[str] = "base".lower()
|
21
|
+
KEY_PER_AGENT: Final[str] = "perAgent".lower()
|
22
|
+
KEY_PER_COLUMN: Final[str] = "perColumn".lower()
|
23
|
+
|
24
|
+
PLACEHOLDER_START: Final[str] = "<"
|
25
|
+
PLACEHOLDER_END: Final[str] = ">"
|
26
|
+
_PLACEHOLDER_PATTERN: Final[re.Pattern] = re.compile(PLACEHOLDER_START + ".*?" + PLACEHOLDER_END)
|
27
|
+
|
28
|
+
KEY_AGENT: Final[str] = "Agent".lower()
|
29
|
+
KEY_COLUMN: Final[str] = "Column".lower()
|
30
|
+
|
31
|
+
ITERATION_START: Final[str] = "{{"
|
32
|
+
ITERATION_END: Final[str] = "}}"
|
33
|
+
_PER_AGENT_PATTERN: Final[str] = ITERATION_START + KEY_PER_AGENT + ITERATION_END
|
34
|
+
_PER_COLUMN_PATTERN: Final[str] = ITERATION_START + KEY_PER_COLUMN + ITERATION_END
|
35
|
+
|
36
|
+
_ESC = "\\"
|
37
|
+
ITERABLE_START: Final[str] = "("
|
38
|
+
ITERABLE_END: Final[str] = ")"
|
39
|
+
_AGENT_PATTERN: Final[re.Pattern] = re.compile(
|
40
|
+
f"{_ESC}{ITERABLE_START}{KEY_AGENT}{_ESC}{ITERABLE_END}", re.IGNORECASE
|
41
|
+
)
|
42
|
+
_COLUMN_PATTERN: Final[re.Pattern] = re.compile(
|
43
|
+
f"{_ESC}{ITERABLE_START}{KEY_COLUMN}{_ESC}{ITERABLE_END}", re.IGNORECASE
|
44
|
+
)
|
45
|
+
|
46
|
+
_ERR_MUST_BE_DICT = "Element '{}' in metadata template must be a dictionary."
|
47
|
+
|
48
|
+
def __init__(self, agent_columns: dict[str, list[str]]) -> None:
|
49
|
+
"""Initialise a new Locator.
|
50
|
+
|
51
|
+
Args:
|
52
|
+
agent_columns: agents and their output columns
|
53
|
+
"""
|
54
|
+
self._agent_columns: dict = agent_columns
|
55
|
+
self._per_agent_template: dict = {}
|
56
|
+
self._per_column_template: dict = {}
|
57
|
+
self._current_agent: str = ""
|
58
|
+
self._current_column: str = ""
|
59
|
+
|
60
|
+
def locate_and_replace(self, template: dict) -> dict:
|
61
|
+
"""Returns copy of given `template` with filled-in metadata to each placeholder - if available.
|
62
|
+
|
63
|
+
Args:
|
64
|
+
template: dict with placeholders to be filled
|
65
|
+
|
66
|
+
Returns:
|
67
|
+
template with filled in placeholders (if any)
|
68
|
+
|
69
|
+
Raises:
|
70
|
+
MetadataCompilationError: if template is ill-formatted, logged with level "ERROR"
|
71
|
+
"""
|
72
|
+
template = keys_to_lower(template)
|
73
|
+
per_agent_template = template.get(self.KEY_PER_AGENT, {})
|
74
|
+
self._ensure_is_dict(per_agent_template, self.KEY_PER_AGENT)
|
75
|
+
self._per_agent_template = per_agent_template
|
76
|
+
|
77
|
+
per_column_template = template.get(self.KEY_PER_COLUMN, {})
|
78
|
+
self._ensure_is_dict(per_column_template, self.KEY_PER_COLUMN)
|
79
|
+
self._per_column_template = per_column_template
|
80
|
+
|
81
|
+
self._current_column = ""
|
82
|
+
self._current_agent = ""
|
83
|
+
|
84
|
+
return self._fill_dict(template.get(self.KEY_BASE, {}))
|
85
|
+
|
86
|
+
def _ensure_is_dict(self, item: Any, key: str) -> None:
|
87
|
+
"""Raises an error if given `item` is not a dictionary.
|
88
|
+
|
89
|
+
Args:
|
90
|
+
item: to be tested if it is a dictionary
|
91
|
+
key: to be specified in error message
|
92
|
+
|
93
|
+
Raises:
|
94
|
+
MetadataCompilationError: if given `item` is not a dictionary, logged with level "ERROR"
|
95
|
+
"""
|
96
|
+
if not isinstance(item, dict):
|
97
|
+
raise log_error(MetadataCompilationError(self._ERR_MUST_BE_DICT.format(key)))
|
98
|
+
|
99
|
+
def _fill_dict(self, template: dict) -> dict:
|
100
|
+
"""Fills in metadata to each value of the given dictionary `template`.
|
101
|
+
|
102
|
+
Args:
|
103
|
+
template: dict with placeholders to be filled
|
104
|
+
|
105
|
+
Returns:
|
106
|
+
template with filled in placeholders (if any)
|
107
|
+
"""
|
108
|
+
result: dict = {}
|
109
|
+
for key, value in template.items():
|
110
|
+
if isinstance(value, dict):
|
111
|
+
result[key] = self._fill_dict(value)
|
112
|
+
elif isinstance(value, list):
|
113
|
+
result[key] = self._fill_list(value)
|
114
|
+
else:
|
115
|
+
result[key] = self._fill_value(value)
|
116
|
+
return result
|
117
|
+
|
118
|
+
def _fill_list(self, values: list) -> list:
|
119
|
+
"""Fills in metadata to each value of the given `values` list.
|
120
|
+
|
121
|
+
Args:
|
122
|
+
values: list of elements with potential placeholders to be filled
|
123
|
+
|
124
|
+
Returns:
|
125
|
+
values, potentially with filled-in placeholders
|
126
|
+
"""
|
127
|
+
result: list = []
|
128
|
+
for value in values:
|
129
|
+
if isinstance(value, dict):
|
130
|
+
result.append(self._fill_dict(value))
|
131
|
+
elif isinstance(value, list):
|
132
|
+
result.append(self._fill_list(value))
|
133
|
+
else:
|
134
|
+
filled_value = self._fill_value(value)
|
135
|
+
if isinstance(filled_value, list):
|
136
|
+
result.extend(filled_value)
|
137
|
+
else:
|
138
|
+
result.append(filled_value)
|
139
|
+
return result
|
140
|
+
|
141
|
+
def _fill_value(self, value: str | int | float) -> Any:
|
142
|
+
"""Checks for placeholders, iterables, or iteration template markers to replace.
|
143
|
+
|
144
|
+
Returns replacement or original value if replacement cannot be found.
|
145
|
+
|
146
|
+
Args:
|
147
|
+
value: to replace or return if no replacement is needed / available
|
148
|
+
|
149
|
+
Returns:
|
150
|
+
replacement or original value if replacement is not needed / available
|
151
|
+
"""
|
152
|
+
if isinstance(value, (int, float)):
|
153
|
+
return value
|
154
|
+
if self._is_agent_replacement(value):
|
155
|
+
value = self._replace_agent(value)
|
156
|
+
if self._is_column_replacement(value):
|
157
|
+
value = self._replace_column(value)
|
158
|
+
if self._has_basic_placeholder(value):
|
159
|
+
return self._replace_all(value)
|
160
|
+
if self._is_per_column(value):
|
161
|
+
return self._per_column()
|
162
|
+
if self._is_per_agent(value):
|
163
|
+
return self._per_agent()
|
164
|
+
return value
|
165
|
+
|
166
|
+
def _has_basic_placeholder(self, value: str) -> bool:
|
167
|
+
"""Returns true if given value contains placeholder symbols."""
|
168
|
+
return self.PLACEHOLDER_START in value and self.PLACEHOLDER_END in value
|
169
|
+
|
170
|
+
def _replace_all(self, value: str) -> str | Any:
|
171
|
+
"""Replaces all placeholders in given `value` and returns the string with all replacements.
|
172
|
+
|
173
|
+
If the whole string is a single placeholder, replace it completely with the replacement content - of any type.
|
174
|
+
Otherwise, replace the placeholders within the string with the replacement content.
|
175
|
+
"""
|
176
|
+
matches = re.findall(self._PLACEHOLDER_PATTERN, value)
|
177
|
+
if len(matches) == 1:
|
178
|
+
if re.fullmatch(self._PLACEHOLDER_PATTERN, value):
|
179
|
+
return self._replace(value)
|
180
|
+
return re.sub(self._PLACEHOLDER_PATTERN, self._callback_wrapper, value)
|
181
|
+
|
182
|
+
@abstractmethod
|
183
|
+
def _replace(self, data_identifier: str) -> Any | None:
|
184
|
+
"""Returns replacement for given data identifier, or None if replacement cannot be found.
|
185
|
+
|
186
|
+
Args:
|
187
|
+
data_identifier: locator for the replacement data
|
188
|
+
|
189
|
+
Returns:
|
190
|
+
either the found replacement data (of any type), or None if no data is found
|
191
|
+
"""
|
192
|
+
|
193
|
+
def _callback_wrapper(self, match: re.Match) -> str:
|
194
|
+
"""Extracts replacement string from given match and calls for its replacement."""
|
195
|
+
return str(self._replace(match.group(0)))
|
196
|
+
|
197
|
+
def _is_agent_replacement(self, value: str) -> bool:
|
198
|
+
"""Returns true if given value contains placeholder and agent iterable placeholder."""
|
199
|
+
return re.search(self._AGENT_PATTERN, value) is not None
|
200
|
+
|
201
|
+
def _replace_agent(self, value) -> str:
|
202
|
+
"""Replace all occurrences of agent iterable pattern in given string."""
|
203
|
+
return re.sub(self._AGENT_PATTERN, str(self._current_agent), value)
|
204
|
+
|
205
|
+
def _is_column_replacement(self, value: str) -> bool:
|
206
|
+
"""Returns true if given value contains placeholder and agent iterable placeholder."""
|
207
|
+
return re.search(self._COLUMN_PATTERN, value) is not None
|
208
|
+
|
209
|
+
def _replace_column(self, value) -> str:
|
210
|
+
"""Replace all occurrences of column iterable pattern in given string."""
|
211
|
+
return re.sub(self._COLUMN_PATTERN, str(self._current_column), value)
|
212
|
+
|
213
|
+
def _is_per_column(self, value: str) -> bool:
|
214
|
+
"""Returns true if given value is the 'perColumn' section."""
|
215
|
+
return value.strip().lower() == self._PER_COLUMN_PATTERN
|
216
|
+
|
217
|
+
def _per_column(self) -> list:
|
218
|
+
"""Returns list of metadata for all columns of current agent."""
|
219
|
+
column_metadata = []
|
220
|
+
if len(self._per_column_template) > 0:
|
221
|
+
for column in self._agent_columns[self._current_agent]:
|
222
|
+
template = self._per_column_template
|
223
|
+
self._current_column = column
|
224
|
+
column_metadata.append(self._fill_dict(template))
|
225
|
+
return column_metadata
|
226
|
+
|
227
|
+
def _is_per_agent(self, value: str) -> bool:
|
228
|
+
"""Returns true if given value is the 'perAgent' section."""
|
229
|
+
return value.strip().lower() == self._PER_AGENT_PATTERN
|
230
|
+
|
231
|
+
def _per_agent(self) -> list:
|
232
|
+
"""Returns list of metadata for all agents.
|
233
|
+
|
234
|
+
Returns empty list if either the per-agent template is missing, or no agents are registered for output.
|
235
|
+
"""
|
236
|
+
agent_metadata = []
|
237
|
+
if len(self._per_agent_template) > 0:
|
238
|
+
for agent in self._agent_columns.keys():
|
239
|
+
template = self._per_agent_template
|
240
|
+
self._current_agent = agent
|
241
|
+
agent_metadata.append(self._fill_dict(template))
|
242
|
+
return agent_metadata
|
@@ -0,0 +1,93 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
OEO_TEMPLATE = {
|
6
|
+
"base": {
|
7
|
+
"@context": "https://raw.githubusercontent.com/OpenEnergyPlatform/oemetadata/production/oemetadata/latest/context.json",
|
8
|
+
"name": "<schema:metadata:name>_result_metadata",
|
9
|
+
"title": "Results for <schema:metadata:name> run <scenario:metadata:RunId>.",
|
10
|
+
"description": """Input compiled with FAME-Io <execution:InputCompilation:SoftwareVersions:FameIo>.
|
11
|
+
Simulated by FAME-Core <execution:ModelRun:SoftwareVersions:FameCore>.
|
12
|
+
Extracted by FAME-Io <execution:OutputExtraction:SoftwareVersions:FameIo>.""",
|
13
|
+
"resources": ["{{perAgent}}"],
|
14
|
+
"metaMetadata": {
|
15
|
+
"metadataVersion": "OEMetadata-2.0.4",
|
16
|
+
"metadataLicense": {
|
17
|
+
"name": "CC0-1.0",
|
18
|
+
"title": "Creative Commons Zero v1.0 Universal",
|
19
|
+
"path": "https://creativecommons.org/publicdomain/zero/1.0",
|
20
|
+
},
|
21
|
+
},
|
22
|
+
},
|
23
|
+
"perAgent": {
|
24
|
+
"name": "(agent).*.csv",
|
25
|
+
"topics": [],
|
26
|
+
"title": "Results for (agent)",
|
27
|
+
"path": "./(agent).*.csv",
|
28
|
+
"description": "Simulation outputs by simulated time of agent type '(agent)'",
|
29
|
+
"languages": ["en-GB"],
|
30
|
+
"subject": "<schema:AgentTypes:(agent):metadata:subject>",
|
31
|
+
"keywords": "<schema:AgentTypes:(agent):metadata:keywords>",
|
32
|
+
"publicationDate": "<scenario:metadata:output:publicationDate>",
|
33
|
+
"embargoPeriod": "<scenario:metadata:output:embargoPeriod>",
|
34
|
+
"context": {
|
35
|
+
"title": "<schema:metadata:title>",
|
36
|
+
"homepage": "<schema:metadata:homepage>",
|
37
|
+
"documentation": "<schema:metadata:documentation>",
|
38
|
+
"sourceCode": "<schema:metadata:sourceCode>",
|
39
|
+
"publisher": "<schema:metadata:publisher>",
|
40
|
+
"publisherLogo": "<schema:metadata:publisherLogo>",
|
41
|
+
"contact": "<schema:metadata:contact>",
|
42
|
+
"fundingAgency": "<schema:metadata:fundingAgency>",
|
43
|
+
"fundingAgencyLogo": "<schema:metadata:fundingAgencyLogo>",
|
44
|
+
"grantNo": "<schema:metadata:grantNo>",
|
45
|
+
},
|
46
|
+
"spatial": {},
|
47
|
+
"temporal": {},
|
48
|
+
"sources": [],
|
49
|
+
"licenses": "<scenario:metadata:output:licenses>",
|
50
|
+
"contributors": "<schema:metadata:contributors>",
|
51
|
+
"type": "table",
|
52
|
+
"format": "CSV",
|
53
|
+
"encoding": "UTF-8",
|
54
|
+
"schema": {
|
55
|
+
"fields": [
|
56
|
+
{
|
57
|
+
"name": "AgentId",
|
58
|
+
"description": "Unique ID of the agent in the simulation",
|
59
|
+
"type": "integer",
|
60
|
+
"nullable": False,
|
61
|
+
"unit": "n/a",
|
62
|
+
"isAbout": [],
|
63
|
+
"valueReference": [],
|
64
|
+
},
|
65
|
+
{
|
66
|
+
"name": "TimeStep",
|
67
|
+
"description": "Simulated time these values are associated with",
|
68
|
+
"type": "integer or time stamp",
|
69
|
+
"nullable": False,
|
70
|
+
"unit": "n/a",
|
71
|
+
"isAbout": [
|
72
|
+
{"name": "TimeStamp", "@id": "https://openenergyplatform.org/ontology/oeo/OEO_00140043"}
|
73
|
+
],
|
74
|
+
"valueReference": [],
|
75
|
+
},
|
76
|
+
"{{perColumn}}",
|
77
|
+
],
|
78
|
+
"primaryKey": ["AgentId", "TimeStep"],
|
79
|
+
"foreignKeys": [],
|
80
|
+
"dialect": {"delimiter": ";", "decimalSeparator": "."},
|
81
|
+
"review": {},
|
82
|
+
},
|
83
|
+
},
|
84
|
+
"perColumn": {
|
85
|
+
"name": "(column)",
|
86
|
+
"description": "<schema:AgentTypes:(agent):outputs:(column):metadata:description>",
|
87
|
+
"type": "decimal",
|
88
|
+
"nullable": True,
|
89
|
+
"unit": "<schema:AgentTypes:(agent):outputs:(column):metadata:unit>",
|
90
|
+
"isAbout": "<schema:AgentTypes:(agent):outputs:(column):metadata:isAbout>",
|
91
|
+
"valueReference": "<schema:AgentTypes:(agent):outputs:(column):metadata:valueReference>",
|
92
|
+
},
|
93
|
+
}
|
@@ -0,0 +1,65 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
"""Reading of metadata templates in YAML or JSON format."""
|
5
|
+
|
6
|
+
from __future__ import annotations
|
7
|
+
|
8
|
+
from pathlib import Path
|
9
|
+
|
10
|
+
from fameio.input import YamlLoaderError
|
11
|
+
from fameio.input.loader import ALLOWED_SUFFIXES as YAML_SUFFIXES, load_yaml
|
12
|
+
from fameio.logs import log_error
|
13
|
+
from fameio.output.metadata import MetadataCompilationError
|
14
|
+
|
15
|
+
JSON_SUFFIX: str = ".json"
|
16
|
+
ENCODING = "UTF-8"
|
17
|
+
|
18
|
+
_ERR_UNKNOWN_ENDING = "Template file ending '{}' corresponds neither to a JSON or YAML file."
|
19
|
+
_ERR_READING_FILE = "Could not read template file: '{}'"
|
20
|
+
|
21
|
+
|
22
|
+
def read_template_file(file: Path) -> dict:
|
23
|
+
"""Reads and returns metadata template `file` encoded in UTF-8.
|
24
|
+
|
25
|
+
Args:
|
26
|
+
file: to be read
|
27
|
+
|
28
|
+
Returns:
|
29
|
+
dictionary content of the file
|
30
|
+
|
31
|
+
Raises:
|
32
|
+
MetadataCompilationError: if template file has unknown type, could not be opened/read, logged with level "ERROR"
|
33
|
+
"""
|
34
|
+
file_ending = file.suffix.lower()
|
35
|
+
if _has_yaml_ending(file_ending) or _has_json_ending(file_ending):
|
36
|
+
return _read_yaml(file)
|
37
|
+
raise log_error(MetadataCompilationError(_ERR_UNKNOWN_ENDING.format(file_ending)))
|
38
|
+
|
39
|
+
|
40
|
+
def _has_yaml_ending(file_ending: str) -> bool:
|
41
|
+
"""Returns True if `file_ending` corresponds to a YAML file."""
|
42
|
+
return file_ending in YAML_SUFFIXES
|
43
|
+
|
44
|
+
|
45
|
+
def _has_json_ending(file_ending: str) -> bool:
|
46
|
+
"""Returns True if `file_ending` corresponds to a JSON file."""
|
47
|
+
return file_ending == JSON_SUFFIX
|
48
|
+
|
49
|
+
|
50
|
+
def _read_yaml(file: Path) -> dict:
|
51
|
+
"""Returns content of the provided yaml file
|
52
|
+
|
53
|
+
Args:
|
54
|
+
file: to be opened and read
|
55
|
+
|
56
|
+
Returns:
|
57
|
+
file content as dict
|
58
|
+
|
59
|
+
Raises:
|
60
|
+
MetadataCompilationError: if file could not be opened or read, logged with level "ERROR"
|
61
|
+
"""
|
62
|
+
try:
|
63
|
+
return load_yaml(file, encoding=ENCODING)
|
64
|
+
except YamlLoaderError as e:
|
65
|
+
raise log_error(MetadataCompilationError(_ERR_READING_FILE.format(file))) from e
|
fameio/output/output_dao.py
CHANGED
fameio/output/reader.py
CHANGED
fameio/output/yaml_writer.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1
1
|
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
"""Writing of data to YAML files."""
|
5
|
+
|
4
6
|
from pathlib import Path
|
5
7
|
|
6
8
|
import yaml
|
@@ -8,7 +10,7 @@ import yaml
|
|
8
10
|
from fameio.logs import log, log_error
|
9
11
|
from fameio.output import OutputError
|
10
12
|
|
11
|
-
_ERR_OPEN_FILE = "Could not open file for
|
13
|
+
_ERR_OPEN_FILE = "Could not open file for writing: '{}'"
|
12
14
|
|
13
15
|
_INFO_DESTINATION = "Saving scenario to file at {}"
|
14
16
|
|
fameio/scripts/__init__.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
#!/usr/bin/env python
|
2
|
+
"""Main scripts callable from command line."""
|
2
3
|
import sys
|
3
4
|
|
4
5
|
from fameio.scripts.convert_results import DEFAULT_CONFIG as DEFAULT_CONVERT_CONFIG
|
@@ -13,6 +14,7 @@ from fameio.cli.reformat import handle_args as handle_reformat_args
|
|
13
14
|
|
14
15
|
# noinspection PyPep8Naming
|
15
16
|
def makeFameRunConfig():
|
17
|
+
"""Compiles FAME simulation input files in protobuf format."""
|
16
18
|
cli_config = handle_make_config_args(sys.argv[1:])
|
17
19
|
try:
|
18
20
|
make_config(cli_config)
|
@@ -22,6 +24,7 @@ def makeFameRunConfig():
|
|
22
24
|
|
23
25
|
# noinspection PyPep8Naming
|
24
26
|
def convertFameResults():
|
27
|
+
"""Converts a protobuf file to human-readable outputs."""
|
25
28
|
cli_config = handle_convert_results_args(sys.argv[1:], DEFAULT_CONVERT_CONFIG)
|
26
29
|
try:
|
27
30
|
convert_results(cli_config)
|
@@ -31,6 +34,7 @@ def convertFameResults():
|
|
31
34
|
|
32
35
|
# noinspection PyPep8Naming
|
33
36
|
def reformatTimeSeries():
|
37
|
+
"""Reformats a timeseries file to speed up its future usage in scenarios."""
|
34
38
|
cli_config = handle_reformat_args(sys.argv[1:])
|
35
39
|
try:
|
36
40
|
reformat(cli_config)
|
@@ -3,7 +3,7 @@ from __future__ import annotations
|
|
3
3
|
|
4
4
|
import sys
|
5
5
|
from pathlib import Path
|
6
|
-
from typing import Any, BinaryIO
|
6
|
+
from typing import Any, BinaryIO, Optional
|
7
7
|
|
8
8
|
import pandas as pd
|
9
9
|
|
@@ -18,7 +18,17 @@ from fameio.output.conversion import apply_time_option, apply_time_merging
|
|
18
18
|
from fameio.output.csv_writer import CsvWriter
|
19
19
|
from fameio.output.data_transformer import DataTransformer, INDEX
|
20
20
|
from fameio.output.execution_dao import ExecutionDao
|
21
|
+
from fameio.output.files import (
|
22
|
+
get_output_folder_name,
|
23
|
+
create_output_folder,
|
24
|
+
RECOVERED_INPUT_PATH,
|
25
|
+
RECOVERED_SCENARIO_PATH,
|
26
|
+
)
|
21
27
|
from fameio.output.input_dao import InputDao
|
28
|
+
from fameio.output.metadata.compiler import MetadataCompiler
|
29
|
+
from fameio.output.metadata.json_writer import data_to_json_file
|
30
|
+
from fameio.output.metadata.oeo_template import OEO_TEMPLATE
|
31
|
+
from fameio.output.metadata.template_reader import read_template_file
|
22
32
|
from fameio.output.output_dao import OutputDAO
|
23
33
|
from fameio.output.reader import Reader
|
24
34
|
from fameio.output.yaml_writer import data_to_yaml_file
|
@@ -65,7 +75,10 @@ def _extract_and_convert_data(config: dict[Options, Any], file_stream: BinaryIO,
|
|
65
75
|
OutputError: if file could not be opened or converted, logged with level "ERROR"
|
66
76
|
"""
|
67
77
|
log().info("Reading and extracting data...")
|
68
|
-
|
78
|
+
output_path = get_output_folder_name(config[Options.OUTPUT], file_path)
|
79
|
+
create_output_folder(output_path)
|
80
|
+
|
81
|
+
output_writer = CsvWriter(output_path, config[Options.SINGLE_AGENT_EXPORT])
|
69
82
|
agent_type_log = AgentTypeLog(_agent_name_filter_list=config[Options.AGENT_LIST])
|
70
83
|
data_transformer = DataTransformer.build(config[Options.RESOLVE_COMPLEX_FIELD])
|
71
84
|
reader = Reader.get_reader(file=file_stream, read_single=config[Options.MEMORY_SAVING])
|
@@ -73,7 +86,7 @@ def _extract_and_convert_data(config: dict[Options, Any], file_stream: BinaryIO,
|
|
73
86
|
execution_dao = ExecutionDao()
|
74
87
|
while data_storages := reader.read():
|
75
88
|
execution_dao.store_execution_metadata(data_storages)
|
76
|
-
if config[Options.INPUT_RECOVERY]:
|
89
|
+
if config[Options.INPUT_RECOVERY] or config[Options.METADATA]:
|
77
90
|
input_dao.store_inputs(data_storages)
|
78
91
|
output = OutputDAO(data_storages, agent_type_log)
|
79
92
|
for agent_name in output.get_sorted_agents_to_extract():
|
@@ -86,7 +99,7 @@ def _extract_and_convert_data(config: dict[Options, Any], file_stream: BinaryIO,
|
|
86
99
|
output_writer.write_to_files(agent_name, data_frames)
|
87
100
|
|
88
101
|
if config[Options.INPUT_RECOVERY]:
|
89
|
-
_recover_inputs(
|
102
|
+
_recover_inputs(output_path, input_dao, execution_dao.get_fameio_version())
|
90
103
|
if config[Options.MEMORY_SAVING]:
|
91
104
|
_memory_saving_apply_conversions(config, output_writer)
|
92
105
|
|
@@ -95,14 +108,21 @@ def _extract_and_convert_data(config: dict[Options, Any], file_stream: BinaryIO,
|
|
95
108
|
log().warning(_WARN_OUTPUT_SUPPRESSED.format(agent_type_log.get_agents_with_output()))
|
96
109
|
else:
|
97
110
|
log().warning(_WARN_OUTPUT_MISSING)
|
111
|
+
elif config[Options.METADATA]:
|
112
|
+
compiler = MetadataCompiler(
|
113
|
+
input_data=input_dao.get_input_data(),
|
114
|
+
execution_data=execution_dao.get_metadata_dict(),
|
115
|
+
agent_columns=agent_type_log.get_agent_columns(),
|
116
|
+
)
|
117
|
+
write_metadata(output_path, config[Options.METADATA_TEMPLATE], compiler)
|
98
118
|
log().info("Data conversion completed.")
|
99
119
|
|
100
120
|
|
101
|
-
def _recover_inputs(
|
121
|
+
def _recover_inputs(output_path: Path, input_dao: InputDao, fameio_version: str) -> None:
|
102
122
|
"""Reads scenario configuration from provided `input_dao`.
|
103
123
|
|
104
124
|
Args:
|
105
|
-
|
125
|
+
output_path: path to output files
|
106
126
|
input_dao: to recover the input data from
|
107
127
|
fameio_version: version of fameio that was used to create the input data
|
108
128
|
|
@@ -114,12 +134,10 @@ def _recover_inputs(config: dict[Options, Any], input_dao: InputDao, fameio_vers
|
|
114
134
|
timeseries, scenario = input_dao.recover_inputs()
|
115
135
|
except InputError as ex:
|
116
136
|
raise log_error(OutputError(_ERR_RECOVER_INPUT.format(fameio_version))) from ex
|
117
|
-
|
118
|
-
series_writer = CsvWriter(
|
119
|
-
config_output=Path(base_path, "./recovered"), input_file_path=Path("./"), single_export=False
|
120
|
-
)
|
137
|
+
|
138
|
+
series_writer = CsvWriter(output_folder=Path(output_path, RECOVERED_INPUT_PATH), single_export=False)
|
121
139
|
series_writer.write_all_time_series_to_disk(timeseries)
|
122
|
-
data_to_yaml_file(scenario.to_dict(), Path(
|
140
|
+
data_to_yaml_file(scenario.to_dict(), Path(output_path, RECOVERED_SCENARIO_PATH))
|
123
141
|
|
124
142
|
|
125
143
|
def _memory_saving_apply_conversions(config: dict[Options, Any], output_writer: CsvWriter) -> None:
|
@@ -143,8 +161,24 @@ def _memory_saving_apply_conversions(config: dict[Options, Any], output_writer:
|
|
143
161
|
output_writer.write_to_files(agent_name, parsed_data)
|
144
162
|
|
145
163
|
|
164
|
+
def write_metadata(output_path: Path, template_file: Optional[Path], compiler: MetadataCompiler):
|
165
|
+
"""Reads metadata templates, fills in available metadata, and writes output to a JSON file.
|
166
|
+
|
167
|
+
Args:
|
168
|
+
output_path: path to output folder
|
169
|
+
template_file: path to metadata template (None allowed)
|
170
|
+
compiler: to compile metadata with
|
171
|
+
|
172
|
+
Raises:
|
173
|
+
OutputError: in case templates could not be read or filled-in, or JSON writing failed, logged with level "ERROR"
|
174
|
+
"""
|
175
|
+
template = OEO_TEMPLATE if template_file is None else read_template_file(template_file)
|
176
|
+
output_metadata = compiler.locate_and_replace(template)
|
177
|
+
data_to_json_file(output_metadata, output_path)
|
178
|
+
|
179
|
+
|
146
180
|
def run(config: dict[Options, Any] | None = None) -> None:
|
147
|
-
"""Reads configured file in protobuf format and extracts its content to
|
181
|
+
"""Reads configured file in protobuf format and extracts its content to CSV and YAML file(s).
|
148
182
|
|
149
183
|
Args:
|
150
184
|
config: script configuration options
|
fameio/scripts/exception.py
CHANGED
fameio/scripts/reformat.py
CHANGED
@@ -19,6 +19,7 @@ FILE_NAME_APPENDIX = "_reformatted"
|
|
19
19
|
_ERR_FAIL = "Timeseries reformatting script failed."
|
20
20
|
_ERR_NO_FILES = "No file found matching this pattern: '{}'"
|
21
21
|
_ERR_FILE_CONVERSION = "Could not reformat file: '{}'"
|
22
|
+
_ERR_FILES_FAILED = "Could not reformat these files: '{}'"
|
22
23
|
|
23
24
|
|
24
25
|
def reformat_file(file: Path, replace: bool) -> None:
|
@@ -58,9 +59,32 @@ def run(config: dict[Options, Any] | None = None) -> None:
|
|
58
59
|
raise log_error(ScriptError(_ERR_NO_FILES.format(config[Options.FILE_PATTERN]))) from ex
|
59
60
|
if not files:
|
60
61
|
raise log_error(ScriptError(_ERR_NO_FILES.format(config[Options.FILE_PATTERN])))
|
62
|
+
|
63
|
+
erroneous_files = reformat_files(files, config[Options.REPLACE])
|
64
|
+
if len(erroneous_files) > 0:
|
65
|
+
log_error(ScriptError(_ERR_FILES_FAILED.format(erroneous_files)))
|
66
|
+
else:
|
67
|
+
log_and_print("All files reformatted.")
|
68
|
+
|
69
|
+
|
70
|
+
def reformat_files(files: list[Path], replace: bool) -> list[str]:
|
71
|
+
"""Reformats given files and potentially replaces them.
|
72
|
+
|
73
|
+
Args:
|
74
|
+
files: list of files to be reformatted
|
75
|
+
replace: if true, original files are replaced
|
76
|
+
|
77
|
+
Returns:
|
78
|
+
list of files that could not be reformatted
|
79
|
+
"""
|
80
|
+
erroneous_files: list[str] = []
|
61
81
|
for file in files:
|
62
82
|
log_and_print(f"Reformatting file: {file}")
|
63
|
-
|
83
|
+
try:
|
84
|
+
reformat_file(file, replace)
|
85
|
+
except ScriptError:
|
86
|
+
erroneous_files.append(file.as_posix())
|
87
|
+
return erroneous_files
|
64
88
|
|
65
89
|
|
66
90
|
if __name__ == "__main__":
|