fameio 3.4.0__py3-none-any.whl → 3.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. fameio/__init__.py +2 -1
  2. fameio/cli/__init__.py +2 -0
  3. fameio/cli/convert_results.py +8 -0
  4. fameio/cli/make_config.py +2 -0
  5. fameio/cli/options.py +4 -0
  6. fameio/cli/parser.py +17 -1
  7. fameio/cli/reformat.py +2 -0
  8. fameio/input/__init__.py +2 -1
  9. fameio/input/loader/__init__.py +1 -0
  10. fameio/input/loader/controller.py +20 -6
  11. fameio/input/loader/loader.py +2 -0
  12. fameio/input/metadata.py +2 -0
  13. fameio/input/resolver.py +2 -0
  14. fameio/input/scenario/__init__.py +2 -0
  15. fameio/input/scenario/agent.py +2 -0
  16. fameio/input/scenario/attribute.py +2 -0
  17. fameio/input/scenario/contract.py +2 -0
  18. fameio/input/scenario/exception.py +2 -0
  19. fameio/input/scenario/fameiofactory.py +2 -0
  20. fameio/input/scenario/generalproperties.py +2 -0
  21. fameio/input/scenario/scenario.py +5 -3
  22. fameio/input/scenario/stringset.py +2 -0
  23. fameio/input/schema/__init__.py +1 -0
  24. fameio/input/schema/agenttype.py +2 -0
  25. fameio/input/schema/attribute.py +2 -0
  26. fameio/input/schema/java_packages.py +2 -0
  27. fameio/input/schema/schema.py +8 -3
  28. fameio/input/validator.py +2 -0
  29. fameio/input/writer.py +16 -0
  30. fameio/logs.py +2 -1
  31. fameio/output/__init__.py +1 -0
  32. fameio/output/agent_type.py +24 -12
  33. fameio/output/conversion.py +2 -0
  34. fameio/output/csv_writer.py +10 -36
  35. fameio/output/data_transformer.py +2 -0
  36. fameio/output/execution_dao.py +5 -0
  37. fameio/output/files.py +55 -0
  38. fameio/output/input_dao.py +59 -15
  39. fameio/output/metadata/__init__.py +10 -0
  40. fameio/output/metadata/compiler.py +75 -0
  41. fameio/output/metadata/json_writer.py +36 -0
  42. fameio/output/metadata/locator.py +242 -0
  43. fameio/output/metadata/oeo_template.py +93 -0
  44. fameio/output/metadata/template_reader.py +65 -0
  45. fameio/output/output_dao.py +2 -0
  46. fameio/output/reader.py +1 -0
  47. fameio/output/yaml_writer.py +3 -1
  48. fameio/scripts/REUSE.toml +6 -0
  49. fameio/scripts/__init__.py +4 -0
  50. fameio/scripts/convert_results.py +46 -12
  51. fameio/scripts/exception.py +1 -0
  52. fameio/scripts/reformat.py +25 -1
  53. fameio/series.py +16 -7
  54. fameio/time.py +1 -0
  55. fameio/tools.py +1 -0
  56. fameio-3.5.1.dist-info/LICENSES/CC-BY-ND-4.0.txt +392 -0
  57. fameio-3.5.1.dist-info/METADATA +99 -0
  58. fameio-3.5.1.dist-info/RECORD +65 -0
  59. fameio/scripts/__init__.py.license +0 -3
  60. fameio/scripts/convert_results.py.license +0 -3
  61. fameio/scripts/make_config.py.license +0 -3
  62. fameio/scripts/reformat.py.license +0 -3
  63. fameio-3.4.0.dist-info/METADATA +0 -990
  64. fameio-3.4.0.dist-info/RECORD +0 -60
  65. {fameio-3.4.0.dist-info → fameio-3.5.1.dist-info}/LICENSE.txt +0 -0
  66. {fameio-3.4.0.dist-info → fameio-3.5.1.dist-info}/LICENSES/Apache-2.0.txt +0 -0
  67. {fameio-3.4.0.dist-info → fameio-3.5.1.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
  68. {fameio-3.4.0.dist-info → fameio-3.5.1.dist-info}/LICENSES/CC0-1.0.txt +0 -0
  69. {fameio-3.4.0.dist-info → fameio-3.5.1.dist-info}/WHEEL +0 -0
  70. {fameio-3.4.0.dist-info → fameio-3.5.1.dist-info}/entry_points.txt +0 -0
@@ -1,13 +1,15 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ """Writing of dataframes to CSV files."""
5
+
4
6
  from __future__ import annotations
5
7
 
6
8
  from pathlib import Path
7
9
 
8
10
  import pandas as pd
9
11
 
10
- from fameio.logs import log, log_error
12
+ from fameio.logs import log_error
11
13
  from fameio.output import OutputError
12
14
  from fameio.output.data_transformer import INDEX
13
15
  from fameio.series import TimeSeriesManager
@@ -21,49 +23,21 @@ class CsvWriterError(OutputError):
21
23
  class CsvWriter:
22
24
  """Writes dataframes to different csv files."""
23
25
 
24
- _ERR_DIR_CREATE = "Could not create directory for output files: '{}'"
25
26
  _ERR_FILE_OPEN = "Could not open file for writing: '{}'"
26
27
  _ERR_FILE_WRITE = "Could not write to file '{}' due to: {}"
27
28
 
28
- _INFO_USING_PATH = "Using specified output path: {}"
29
- _INFO_USING_DERIVED_PATH = "No output path specified - writing to new local folder: {}"
30
-
31
29
  CSV_FILE_SUFFIX = ".csv"
32
30
 
33
- def __init__(self, config_output: Path, input_file_path: Path, single_export: bool) -> None:
31
+ def __init__(self, output_folder: Path, single_export: bool) -> None:
34
32
  """Constructs a new CsvWriter.
35
33
 
36
- Raises:
37
- CsvWriterError: if output folder could not be created, logged with level "ERROR"
34
+ Args:
35
+ output_folder: to write the output files to
36
+ single_export: if true, one output file per unique agent is created
38
37
  """
39
38
  self._single_export = single_export
40
- self._output_folder = self._get_output_folder_name(config_output, input_file_path)
39
+ self._output_folder = output_folder
41
40
  self._files: dict[str, Path] = {}
42
- self._create_output_folder()
43
-
44
- @staticmethod
45
- def _get_output_folder_name(config_output: Path, input_file_path: Path) -> Path:
46
- """Returns name of the output folder derived either from the specified `config_output` or `input_file_path`."""
47
- if config_output:
48
- output_folder_name: str | Path = config_output
49
- log().info(CsvWriter._INFO_USING_PATH.format(config_output))
50
- else:
51
- output_folder_name = input_file_path.stem
52
- log().info(CsvWriter._INFO_USING_DERIVED_PATH.format(output_folder_name))
53
- return Path(output_folder_name)
54
-
55
- def _create_output_folder(self) -> None:
56
- """Creates output folder if not yet present.
57
-
58
- Raises:
59
- CsvWriterError: if output folder could not be created, logged with level "ERROR"
60
- """
61
- log().debug("Creating output folder if required...")
62
- if not self._output_folder.is_dir():
63
- try:
64
- self._output_folder.mkdir(parents=True)
65
- except OSError as e:
66
- raise log_error(CsvWriterError(self._ERR_DIR_CREATE.format(self._output_folder))) from e
67
41
 
68
42
  def write_to_files(self, agent_name: str, data: dict[None | str, pd.DataFrame]) -> None:
69
43
  """Writes `data` for given `agent_name` to .csv file(s).
@@ -115,7 +89,7 @@ class CsvWriter:
115
89
 
116
90
  @staticmethod
117
91
  def _dataframe_to_csv(data: pd.DataFrame, file: Path, header: bool, index: bool, mode: str) -> None:
118
- """Write given data to specified CSV file with specified parameters using semicolon separators.
92
+ """Write given data to specified CSV file in UTF8 encoding with specified parameters using semicolon separators.
119
93
 
120
94
  Args:
121
95
  data: to be written
@@ -128,7 +102,7 @@ class CsvWriter:
128
102
  CsvWriterError: if data could not be written to disk, logged on level "ERROR"
129
103
  """
130
104
  try:
131
- data.to_csv(path_or_buf=file, sep=";", header=header, index=index, mode=mode)
105
+ data.to_csv(path_or_buf=file, sep=";", header=header, index=index, mode=mode, encoding="UTF-8")
132
106
  except OSError as e:
133
107
  raise log_error(CsvWriterError(CsvWriter._ERR_FILE_OPEN.format(file))) from e
134
108
  except UnicodeError as e:
@@ -1,6 +1,8 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ """Transformation of (complex) time series outputs from agents."""
5
+
4
6
  from __future__ import annotations
5
7
 
6
8
  from abc import ABC
@@ -1,8 +1,11 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ """Accessing execution content of protobuf messages."""
5
+
4
6
  from __future__ import annotations
5
7
 
8
+ from importlib import metadata
6
9
  from typing import Any, Final
7
10
 
8
11
  from fameprotobuf.data_storage_pb2 import DataStorage
@@ -47,6 +50,7 @@ class ExecutionDao:
47
50
 
48
51
  KEY_COMPILATION: Final[str] = "InputCompilation"
49
52
  KEY_RUN: Final[str] = "ModelRun"
53
+ KEY_EXTRACTION: Final[str] = "OutputExtraction"
50
54
  KEY_VERSIONS: Final[str] = "SoftwareVersions"
51
55
  KEY_PROCESSES: Final[str] = "ProcessConfiguration"
52
56
  KEY_STATISTICS: Final[str] = "Statistics"
@@ -146,6 +150,7 @@ class ExecutionDao:
146
150
  self.KEY_PROCESSES: self._get_dict(self._run_configuration, PROCESS_MAP),
147
151
  self.KEY_STATISTICS: self._get_dict(self._run_simulation, STATISTICS_MAP),
148
152
  },
153
+ self.KEY_EXTRACTION: {self.KEY_VERSIONS: {"FameIo": metadata.version("fameio")}},
149
154
  }
150
155
  return result
151
156
 
fameio/output/files.py ADDED
@@ -0,0 +1,55 @@
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+ """Finding output folders and files, creating the output folder."""
5
+
6
+ from __future__ import annotations
7
+
8
+ from pathlib import Path
9
+ from typing import Final, Optional
10
+
11
+ from fameio.logs import log, log_error
12
+ from fameio.output import OutputError
13
+
14
+ _ERR_DIR_CREATE = "Could not create directory for output files: '{}'"
15
+
16
+ _INFO_USING_PATH = "Using specified output path: '{}'"
17
+ _INFO_USING_DERIVED_PATH = "No output path specified - writing to new local folder: '{}'"
18
+
19
+ _DEBUG_NEW_FOLDER = "Output folder '{}' not present, trying to create it..."
20
+ _DEBUG_EXISTING_FOLDER = "Output folder '{}' already exists..."
21
+
22
+ RECOVERED_INPUT_PATH: Final[str] = "./recovered"
23
+ RECOVERED_SCENARIO_PATH: Final[str] = "./recovered/scenario.yaml"
24
+ METADATA_FILE_NAME: Final[str] = "metadata.json"
25
+
26
+
27
+ class OutputPathError(OutputError):
28
+ """An error that occurred during creation of the output path."""
29
+
30
+
31
+ def get_output_folder_name(config_output: Optional[Path | str], input_file_path: Path) -> Path:
32
+ """Returns name of the output folder derived either from the specified `config_output` or `input_file_path`."""
33
+ if config_output:
34
+ output_folder_name = config_output
35
+ log().info(_INFO_USING_PATH.format(config_output))
36
+ else:
37
+ output_folder_name = input_file_path.stem
38
+ log().info(_INFO_USING_DERIVED_PATH.format(output_folder_name))
39
+ return Path(output_folder_name)
40
+
41
+
42
+ def create_output_folder(output_path: Path) -> None:
43
+ """Creates output folder if not yet present.
44
+
45
+ Raises:
46
+ OutputPathError: if output folder could not be created, logged with level "ERROR"
47
+ """
48
+ if not output_path.is_dir():
49
+ log().debug(_DEBUG_NEW_FOLDER.format(output_path))
50
+ try:
51
+ output_path.mkdir(parents=True)
52
+ except OSError as e:
53
+ raise log_error(OutputPathError(_ERR_DIR_CREATE.format(output_path))) from e
54
+ else:
55
+ log().debug(_DEBUG_EXISTING_FOLDER.format(output_path))
@@ -1,16 +1,18 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ """Accessing input content of protobuf messages."""
5
+
4
6
  import ast
5
- from typing import Any
7
+ from typing import Any, Optional
6
8
 
7
9
  from fameprotobuf.data_storage_pb2 import DataStorage
8
10
  from fameprotobuf.field_pb2 import NestedField
9
11
  from fameprotobuf.input_file_pb2 import InputData
10
12
 
11
- from fameio.input.scenario import GeneralProperties, Agent, Contract, Scenario
13
+ from fameio.input.scenario import GeneralProperties, Agent, Contract, Scenario, StringSet, Attribute
12
14
  from fameio.input.schema import Schema, AttributeSpecs, AttributeType
13
- from fameio.logs import log_error
15
+ from fameio.logs import log_error, log
14
16
  from fameio.output import OutputError
15
17
  from fameio.series import TimeSeriesManager, TimeSeriesError
16
18
 
@@ -26,6 +28,8 @@ class InputDao:
26
28
  _ERR_MULTIPLE_INPUTS = "File corrupt. More than one input section found on file."
27
29
  _ERR_NO_SCHEMA = "No schema found on file - cannot recover inputs."
28
30
  _ERR_SERIES_MISSING = "References time series '{}' was not registered on file."
31
+ _ERR_SCENARIO_METADATA = "Proceeding without metadata for scenario - could not be extracted due to: {}"
32
+ _ERR_STRING_SET_METADATA = "Proceeding without metadata for string set '{}' - could not be extracted due to: {}"
29
33
 
30
34
  _FIELD_NAME_MAP: dict = {
31
35
  AttributeType.STRING: "string_values",
@@ -61,9 +65,10 @@ class InputDao:
61
65
  InputConversionError: if inputs could not be recovered, logged with level "ERROR"
62
66
  InputError: if scenario in file is incompatible with this version of fameio, logged with level "ERROR"
63
67
  """
64
- input_data = self._get_input_data()
68
+ input_data = self.get_input_data()
65
69
  schema = self._get_schema(input_data)
66
- scenario = Scenario(schema, self._get_general_properties(input_data))
70
+ metadata = self._metadata_to_dict(input_data.metadata)
71
+ scenario = Scenario(schema, self._get_general_properties(input_data), metadata)
67
72
  for contract in self._get_contracts(input_data):
68
73
  scenario.add_contract(contract)
69
74
 
@@ -71,10 +76,13 @@ class InputDao:
71
76
  for agent in self._get_agents(input_data, schema):
72
77
  scenario.add_agent(agent)
73
78
 
79
+ for name, string_set in self._get_string_sets(input_data).items():
80
+ scenario.add_string_set(name, string_set)
81
+
74
82
  return self._timeseries_manager, scenario
75
83
 
76
- def _get_input_data(self) -> InputData:
77
- """Check that exactly one previously extracted input data exist, otherwise raises an exception.
84
+ def get_input_data(self) -> InputData:
85
+ """Check that exactly one previously extracted input data exist and returns them; otherwise raises an exception.
78
86
 
79
87
  Returns:
80
88
  the previously extracted input data
@@ -93,6 +101,16 @@ class InputDao:
93
101
  """Read and return Schema from given `input_data`."""
94
102
  return Schema.from_string(input_data.schema)
95
103
 
104
+ @staticmethod
105
+ def _metadata_to_dict(metadata: Optional[str] = None) -> dict:
106
+ """Convert given metadata `metadata to dict`, proceeds on error but logs given `message`"""
107
+ if metadata:
108
+ try:
109
+ return ast.literal_eval(metadata)
110
+ except (ValueError, TypeError, SyntaxError, MemoryError, RecursionError) as e:
111
+ log().error(InputDao._ERR_SCENARIO_METADATA.format(e))
112
+ return {}
113
+
96
114
  @staticmethod
97
115
  def _get_general_properties(input_data: InputData) -> GeneralProperties:
98
116
  """Read and return GeneralProperties from given `input_data`."""
@@ -119,6 +137,20 @@ class InputDao:
119
137
  for contract in input_data.contracts
120
138
  ]
121
139
 
140
+ @staticmethod
141
+ def _get_string_sets(input_data: InputData) -> dict[str, StringSet]:
142
+ """Read and return StringSets from given `input_data`."""
143
+ string_sets = {}
144
+ for dao in input_data.string_sets:
145
+ values = {
146
+ entry.name: {StringSet.KEY_METADATA: InputDao._metadata_to_dict(entry.metadata)} for entry in dao.values
147
+ }
148
+ metadata = InputDao._metadata_to_dict(dao.metadata)
149
+ string_sets[dao.name] = StringSet.from_dict(
150
+ {StringSet.KEY_VALUES: values, StringSet.KEY_METADATA: metadata}
151
+ )
152
+ return string_sets
153
+
122
154
  def _init_timeseries(self, input_data: InputData) -> None:
123
155
  """Read timeseries from given `input_data` and initialise TimeSeriesManager."""
124
156
  self._timeseries_manager.reconstruct_time_series(list(input_data.time_series))
@@ -144,14 +176,14 @@ class InputDao:
144
176
  type_name=agent_dao.class_name,
145
177
  metadata=ast.literal_eval(agent_dao.metadata) if agent_dao.metadata else None,
146
178
  )
147
- attribute_dict = self._get_attributes(
179
+ attributes_dict = self._get_attributes_dict(
148
180
  list(agent_dao.fields), schema.agent_types[agent_dao.class_name].attributes
149
181
  )
150
- agent.init_attributes_from_dict(attribute_dict)
182
+ agent.init_attributes_from_dict(attributes_dict)
151
183
  agents.append(agent)
152
184
  return agents
153
185
 
154
- def _get_attributes(self, fields: list[NestedField], schematics: dict[str, AttributeSpecs]) -> dict[str, Any]:
186
+ def _get_attributes_dict(self, fields: list[NestedField], schematics: dict[str, AttributeSpecs]) -> dict[str, dict]:
155
187
  """Read and return all Attributes as Dictionary from given list of fields.
156
188
 
157
189
  Args:
@@ -159,14 +191,15 @@ class InputDao:
159
191
  schematics: description of the attributes associated by name
160
192
 
161
193
  Returns:
162
- all recovered attributes and their associated values
194
+ all recovered attributes and their associated values as dictionary
163
195
 
164
196
  Raises:
165
197
  InputConversionError: if attributes could not be reconstructed, logged with level "ERROR"
166
198
  """
167
- attributes: dict[str, Any] = {}
199
+ attributes: dict[str, dict[str, Any]] = {}
168
200
  for field in fields:
169
- attributes[field.field_name] = self._get_field_value(field, schematics[field.field_name])
201
+ value = self._get_field_value(field, schematics[field.field_name])
202
+ attributes[field.field_name] = value if not field.metadata else self._get_field_dict(value, field.metadata)
170
203
  return attributes
171
204
 
172
205
  def _get_field_value(self, field: NestedField, schematic: AttributeSpecs) -> Any:
@@ -190,9 +223,20 @@ class InputDao:
190
223
  raise log_error(InputConversionError(self._ERR_SERIES_MISSING.format(field.series_id))) from e
191
224
  if attribute_type is AttributeType.BLOCK:
192
225
  if schematic.is_list:
193
- return [self._get_attributes(list(entry.fields), schematic.nested_attributes) for entry in field.fields]
194
- return self._get_attributes(list(field.fields), schematic.nested_attributes)
226
+ return [
227
+ self._get_attributes_dict(list(entry.fields), schematic.nested_attributes) for entry in field.fields
228
+ ]
229
+ return self._get_attributes_dict(list(field.fields), schematic.nested_attributes)
195
230
  value = getattr(field, self._FIELD_NAME_MAP[attribute_type])
196
231
  if schematic.is_list:
197
232
  return list(value)
198
233
  return list(value)[0]
234
+
235
+ def _get_field_dict(self, field_value: Any, metadata: str) -> dict[str, Any]:
236
+ """Returns dict with metadata and `field_value` associated with either singular or plural key, if is list."""
237
+ result: dict[str, Any] = {Attribute.KEY_METADATA: self._metadata_to_dict(metadata)}
238
+ if isinstance(field_value, list):
239
+ result[Attribute.KEY_VALUES] = field_value
240
+ else:
241
+ result[Attribute.KEY_VALUE] = field_value
242
+ return result
@@ -0,0 +1,10 @@
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: CC0-1.0
4
+ """Classes and modules to compile metadata associated with output files."""
5
+
6
+ from fameio.output import OutputError
7
+
8
+
9
+ class MetadataCompilationError(OutputError):
10
+ """An error occurred while compiling output metadata."""
@@ -0,0 +1,75 @@
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+ """Compiling metadata files accompanying the output CSV files."""
5
+
6
+ from __future__ import annotations
7
+
8
+ import ast
9
+ from typing import Final, Any
10
+
11
+ from fameprotobuf.input_file_pb2 import InputData
12
+
13
+ from fameio.input.metadata import Metadata
14
+ from fameio.logs import log_error, log
15
+ from fameio.output.metadata import MetadataCompilationError
16
+ from fameio.output.metadata.locator import Locator
17
+ from fameio.tools import keys_to_lower
18
+
19
+
20
+ class MetadataCompiler(Locator):
21
+ """Compiles metadata for output files based on ExecutionData and InputData."""
22
+
23
+ ENTRY_SCHEMA: Final[str] = "Schema".lower()
24
+ ENTRY_SCENARIO: Final[str] = "Scenario".lower()
25
+ ENTRY_EXECUTION: Final[str] = "Execution".lower()
26
+ SEPARATOR: Final[str] = ":"
27
+
28
+ _ERR_MALFORMED_DICT_STRING = "Input data reading failed: Malformed string representation of metadata dictionaries."
29
+ _INFO_NOT_FOUND = "Could not find element at '{}' in input section of provided file."
30
+
31
+ def __init__(
32
+ self, execution_data: dict[str, Any], input_data: InputData, agent_columns: dict[str, list[str]]
33
+ ) -> None:
34
+ """Initialises a new MetadataCompiler.
35
+
36
+ Args:
37
+ execution_data: to read execution metadata from
38
+ input_data: to read schema and scenario metadata from
39
+ agent_columns: agents and their output columns
40
+ """
41
+ super().__init__(agent_columns)
42
+ try:
43
+ self._data: dict[str, dict] = {
44
+ self.ENTRY_SCHEMA: ast.literal_eval(input_data.schema),
45
+ self.ENTRY_SCENARIO: {
46
+ Metadata.KEY_METADATA: ast.literal_eval(input_data.metadata) if input_data.metadata else {}
47
+ },
48
+ self.ENTRY_EXECUTION: execution_data,
49
+ }
50
+ except (ValueError, TypeError, SyntaxError, MemoryError, RecursionError) as e:
51
+ raise log_error(MetadataCompilationError(self._ERR_MALFORMED_DICT_STRING)) from e
52
+
53
+ def _replace(self, data_identifier: str) -> Any | None:
54
+ identifier = data_identifier[1:-1]
55
+ address = identifier.split(self.SEPARATOR)
56
+ data_source = address[0].lower()
57
+ try:
58
+ if data_source in self._data:
59
+ return self._get_from(self._data[data_source], address[1:])
60
+ except KeyError:
61
+ log().info(self._INFO_NOT_FOUND.format(self.SEPARATOR.join(address)))
62
+ return None
63
+ return None
64
+
65
+ @staticmethod
66
+ def _get_from(base: dict, address: list[str]) -> Any:
67
+ """Returns element in `base` at given `address`.
68
+
69
+ Raises:
70
+ KeyError: if element cannot be found; error not logged
71
+ """
72
+ element = base
73
+ for entry in address:
74
+ element = keys_to_lower(element)[entry.lower()]
75
+ return element
@@ -0,0 +1,36 @@
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+ """Writing of data to JSON files."""
5
+
6
+ import json
7
+ from pathlib import Path
8
+
9
+ from fameio.logs import log, log_error
10
+ from fameio.output import OutputError
11
+ from fameio.output.files import METADATA_FILE_NAME
12
+
13
+ _ERR_OPEN_FILE = "Could not open file for writing: '{}'"
14
+ _INFO_DESTINATION = "Saving JSON to file to {}"
15
+
16
+
17
+ class JsonWriterError(OutputError):
18
+ """An error occurred during writing a JSON file."""
19
+
20
+
21
+ def data_to_json_file(data: dict, base_path: Path) -> None:
22
+ """Save the given data to a JSON file at given path.
23
+
24
+ Args:
25
+ data: to be saved to JSON file
26
+ base_path: at which the JSON file will be created
27
+
28
+ Raises:
29
+ JsonWriterError: if file could not be opened or written, logged with level "ERROR"
30
+ """
31
+ log().info(_INFO_DESTINATION.format(base_path))
32
+ try:
33
+ with open(Path(base_path, METADATA_FILE_NAME), "w", encoding="utf-8") as f:
34
+ json.dump(data, f)
35
+ except OSError as e:
36
+ raise log_error(JsonWriterError(_ERR_OPEN_FILE.format(base_path))) from e