fameio 3.5.0__py3-none-any.whl → 3.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -31,7 +31,7 @@ class LoaderController:
31
31
  NODE_SPLIT_STRING: Final[str] = ":"
32
32
 
33
33
  _ERR_FILE_OPEN_ERROR = "Could not open file: '{}'"
34
- _ERR_FILE_LOAD_ERROR = "Could not parse YAML file due to errors in (line:column): ({}:{})"
34
+ _ERR_FILE_LOAD_ERROR = "Could not parse file '{}' due to error in (line:column): ({}:{})"
35
35
  _ERR_NODE_MISSING = "'!include_node [{}, {}]': Cannot find '{}'"
36
36
  _ERR_NOT_LIST = "!include can only combine list-like elements from multiple files!"
37
37
  _WARN_NOTHING_TO_INCLUDE = "Could not find any files matching this '!include' directive '{}'"
@@ -51,11 +51,11 @@ class LoaderController:
51
51
  self._path_resolver = path_resolver
52
52
  self._encoding: str | None = encoding
53
53
 
54
- def load(self, yaml_file_path: Path) -> dict:
54
+ def load(self, file_path: Path) -> dict:
55
55
  """Spawns a new FameYamlLoader, loads the given `yaml_file_path` and returns its content.
56
56
 
57
57
  Args:
58
- yaml_file_path: path to YAML file that is to be loaded
58
+ file_path: path to YAML file that is to be loaded
59
59
 
60
60
  Returns:
61
61
  dictionary representation of loaded file
@@ -64,14 +64,16 @@ class LoaderController:
64
64
  YamlLoaderError: if file could not be read, logged with level "CRITICAL"
65
65
  """
66
66
  try:
67
- with open(yaml_file_path, "r", encoding=self._encoding) as configfile:
67
+ with open(file_path, "r", encoding=self._encoding) as configfile:
68
68
  try:
69
69
  data = yaml.load(configfile, self._spawn_loader_builder()) # type: ignore[arg-type]
70
70
  except yaml.YAMLError as e:
71
71
  line, column = self._get_problem_position(e)
72
- raise log_critical(YamlLoaderError(self._ERR_FILE_LOAD_ERROR.format(line, column))) from e
72
+ raise log_critical(
73
+ YamlLoaderError(self._ERR_FILE_LOAD_ERROR.format(file_path, line, column))
74
+ ) from e
73
75
  except OSError as e:
74
- raise log_critical(YamlLoaderError(self._ERR_FILE_OPEN_ERROR.format(yaml_file_path))) from e
76
+ raise log_critical(YamlLoaderError(self._ERR_FILE_OPEN_ERROR.format(file_path))) from e
75
77
  return data
76
78
 
77
79
  @staticmethod
@@ -5,7 +5,7 @@
5
5
 
6
6
  from __future__ import annotations
7
7
 
8
- from typing import Final, Any
8
+ from typing import Final, Any, Optional
9
9
 
10
10
  from fameio.input import SchemaError
11
11
  from fameio.input.metadata import Metadata
@@ -34,8 +34,8 @@ class Scenario(Metadata):
34
34
  _ERR_MULTI_CONTRACT = "Could not create scenario: Definition of Contracts has errors: {}"
35
35
  _ERR_CONTRACT = "Could not create scenario: Definition of Contract has errors: {}"
36
36
 
37
- def __init__(self, schema: Schema, general_props: GeneralProperties) -> None:
38
- super().__init__()
37
+ def __init__(self, schema: Schema, general_props: GeneralProperties, metadata: Optional[dict] = None) -> None:
38
+ super().__init__({Metadata.KEY_METADATA: metadata})
39
39
  self._schema = schema
40
40
  self._general_props = general_props
41
41
  self._string_sets: dict[str, StringSet] = {}
@@ -77,24 +77,22 @@ class AgentTypeLog:
77
77
  self._agents_with_output: list[str] = []
78
78
 
79
79
  def update_agents(self, new_types: dict[str, Output.AgentType]) -> None:
80
- """Saves `new_types` if they are requested for extraction.
80
+ """Saves new types of agents for later inspection.
81
81
 
82
- If any new agent types are provided, checks if they are requested for extraction, and, if so, saves them.
83
- Agent types not requested for extraction are ignored.
82
+ If any new agent types are provided, registers them as "agents with output"
83
+ Then, checks if they are requested for extraction, and, if so, saves them as "requested agent types".
84
84
 
85
85
  Args:
86
- new_types: to be saved (if requested for extraction)
86
+ new_types: to be logged
87
87
 
88
88
  Raises:
89
89
  AgentTypeError: if agent type was already registered, logged with level "ERROR"
90
90
  """
91
- if not new_types:
92
- return
93
-
94
- self._agents_with_output.extend(list(new_types.keys()))
95
- filtered_types = self._filter_agents_by_name(new_types)
96
- self._ensure_no_duplication(filtered_types)
97
- self._requested_agent_types.update(filtered_types)
91
+ if new_types is not None and len(new_types) > 0:
92
+ self._agents_with_output.extend(list(new_types.keys()))
93
+ filtered_types = self._filter_agents_by_name(new_types)
94
+ self._ensure_no_duplication(filtered_types)
95
+ self._requested_agent_types.update(filtered_types)
98
96
 
99
97
  def _filter_agents_by_name(self, new_types: dict[str, Output.AgentType]) -> dict[str, Output.AgentType]:
100
98
  """Removes and entries from `new_types` not on `agent_name_filter_list`.
@@ -127,7 +125,7 @@ class AgentTypeLog:
127
125
  raise log_error(AgentTypeError(self._ERR_DOUBLE_DEFINITION.format(agent_name)))
128
126
 
129
127
  def has_any_agent_type(self) -> bool:
130
- """Returns True if any agent type was registered so far present."""
128
+ """Returns True if any agent type was registered so far."""
131
129
  return len(self._requested_agent_types) > 0
132
130
 
133
131
  def get_agent_type(self, agent_type_name: str) -> AgentType:
@@ -155,13 +153,13 @@ class AgentTypeLog:
155
153
  return self._agents_with_output
156
154
 
157
155
  def get_agent_columns(self) -> dict[str, list[str]]:
158
- """Returns all agents with output mapped to their simple output columns.
156
+ """Returns all agents that were not filtered, with their output mapped to their simple output columns.
159
157
 
160
158
  Raises:
161
159
  AgentTypeError: if - somehow - an agent type is not registered but has data, logged with level "ERROR"
162
160
  """
163
161
  result = {}
164
- for agent in self.get_agents_with_output():
165
- agent_type = self.get_agent_type(agent)
166
- result[agent] = list(agent_type.get_simple_column_map().values())
162
+ for agent_name in self._requested_agent_types:
163
+ agent_type = self.get_agent_type(agent_name)
164
+ result[agent_name] = list(agent_type.get_simple_column_map().values())
167
165
  return result
@@ -9,7 +9,7 @@ from pathlib import Path
9
9
 
10
10
  import pandas as pd
11
11
 
12
- from fameio.logs import log, log_error
12
+ from fameio.logs import log_error
13
13
  from fameio.output import OutputError
14
14
  from fameio.output.data_transformer import INDEX
15
15
  from fameio.series import TimeSeriesManager
@@ -23,49 +23,21 @@ class CsvWriterError(OutputError):
23
23
  class CsvWriter:
24
24
  """Writes dataframes to different csv files."""
25
25
 
26
- _ERR_DIR_CREATE = "Could not create directory for output files: '{}'"
27
26
  _ERR_FILE_OPEN = "Could not open file for writing: '{}'"
28
27
  _ERR_FILE_WRITE = "Could not write to file '{}' due to: {}"
29
28
 
30
- _INFO_USING_PATH = "Using specified output path: {}"
31
- _INFO_USING_DERIVED_PATH = "No output path specified - writing to new local folder: {}"
32
-
33
29
  CSV_FILE_SUFFIX = ".csv"
34
30
 
35
- def __init__(self, config_output: Path, input_file_path: Path, single_export: bool) -> None:
31
+ def __init__(self, output_folder: Path, single_export: bool) -> None:
36
32
  """Constructs a new CsvWriter.
37
33
 
38
- Raises:
39
- CsvWriterError: if output folder could not be created, logged with level "ERROR"
34
+ Args:
35
+ output_folder: to write the output files to
36
+ single_export: if true, one output file per unique agent is created
40
37
  """
41
38
  self._single_export = single_export
42
- self._output_folder = self._get_output_folder_name(config_output, input_file_path)
39
+ self._output_folder = output_folder
43
40
  self._files: dict[str, Path] = {}
44
- self._create_output_folder()
45
-
46
- @staticmethod
47
- def _get_output_folder_name(config_output: Path, input_file_path: Path) -> Path:
48
- """Returns name of the output folder derived either from the specified `config_output` or `input_file_path`."""
49
- if config_output:
50
- output_folder_name: str | Path = config_output
51
- log().info(CsvWriter._INFO_USING_PATH.format(config_output))
52
- else:
53
- output_folder_name = input_file_path.stem
54
- log().info(CsvWriter._INFO_USING_DERIVED_PATH.format(output_folder_name))
55
- return Path(output_folder_name)
56
-
57
- def _create_output_folder(self) -> None:
58
- """Creates output folder if not yet present.
59
-
60
- Raises:
61
- CsvWriterError: if output folder could not be created, logged with level "ERROR"
62
- """
63
- log().debug("Creating output folder if required...")
64
- if not self._output_folder.is_dir():
65
- try:
66
- self._output_folder.mkdir(parents=True)
67
- except OSError as e:
68
- raise log_error(CsvWriterError(self._ERR_DIR_CREATE.format(self._output_folder))) from e
69
41
 
70
42
  def write_to_files(self, agent_name: str, data: dict[None | str, pd.DataFrame]) -> None:
71
43
  """Writes `data` for given `agent_name` to .csv file(s).
fameio/output/files.py ADDED
@@ -0,0 +1,55 @@
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+ """Finding output folders and files, creating the output folder."""
5
+
6
+ from __future__ import annotations
7
+
8
+ from pathlib import Path
9
+ from typing import Final, Optional
10
+
11
+ from fameio.logs import log, log_error
12
+ from fameio.output import OutputError
13
+
14
+ _ERR_DIR_CREATE = "Could not create directory for output files: '{}'"
15
+
16
+ _INFO_USING_PATH = "Using specified output path: '{}'"
17
+ _INFO_USING_DERIVED_PATH = "No output path specified - writing to new local folder: '{}'"
18
+
19
+ _DEBUG_NEW_FOLDER = "Output folder '{}' not present, trying to create it..."
20
+ _DEBUG_EXISTING_FOLDER = "Output folder '{}' already exists..."
21
+
22
+ RECOVERED_INPUT_PATH: Final[str] = "./recovered"
23
+ RECOVERED_SCENARIO_PATH: Final[str] = "./recovered/scenario.yaml"
24
+ METADATA_FILE_NAME: Final[str] = "metadata.json"
25
+
26
+
27
+ class OutputPathError(OutputError):
28
+ """An error that occurred during creation of the output path."""
29
+
30
+
31
+ def get_output_folder_name(config_output: Optional[Path | str], input_file_path: Path) -> Path:
32
+ """Returns name of the output folder derived either from the specified `config_output` or `input_file_path`."""
33
+ if config_output:
34
+ output_folder_name = config_output
35
+ log().info(_INFO_USING_PATH.format(config_output))
36
+ else:
37
+ output_folder_name = input_file_path.stem
38
+ log().info(_INFO_USING_DERIVED_PATH.format(output_folder_name))
39
+ return Path(output_folder_name)
40
+
41
+
42
+ def create_output_folder(output_path: Path) -> None:
43
+ """Creates output folder if not yet present.
44
+
45
+ Raises:
46
+ OutputPathError: if output folder could not be created, logged with level "ERROR"
47
+ """
48
+ if not output_path.is_dir():
49
+ log().debug(_DEBUG_NEW_FOLDER.format(output_path))
50
+ try:
51
+ output_path.mkdir(parents=True)
52
+ except OSError as e:
53
+ raise log_error(OutputPathError(_ERR_DIR_CREATE.format(output_path))) from e
54
+ else:
55
+ log().debug(_DEBUG_EXISTING_FOLDER.format(output_path))
@@ -4,15 +4,15 @@
4
4
  """Accessing input content of protobuf messages."""
5
5
 
6
6
  import ast
7
- from typing import Any
7
+ from typing import Any, Optional
8
8
 
9
9
  from fameprotobuf.data_storage_pb2 import DataStorage
10
10
  from fameprotobuf.field_pb2 import NestedField
11
11
  from fameprotobuf.input_file_pb2 import InputData
12
12
 
13
- from fameio.input.scenario import GeneralProperties, Agent, Contract, Scenario
13
+ from fameio.input.scenario import GeneralProperties, Agent, Contract, Scenario, StringSet, Attribute
14
14
  from fameio.input.schema import Schema, AttributeSpecs, AttributeType
15
- from fameio.logs import log_error
15
+ from fameio.logs import log_error, log
16
16
  from fameio.output import OutputError
17
17
  from fameio.series import TimeSeriesManager, TimeSeriesError
18
18
 
@@ -28,6 +28,8 @@ class InputDao:
28
28
  _ERR_MULTIPLE_INPUTS = "File corrupt. More than one input section found on file."
29
29
  _ERR_NO_SCHEMA = "No schema found on file - cannot recover inputs."
30
30
  _ERR_SERIES_MISSING = "References time series '{}' was not registered on file."
31
+ _ERR_SCENARIO_METADATA = "Proceeding without metadata for scenario - could not be extracted due to: {}"
32
+ _ERR_STRING_SET_METADATA = "Proceeding without metadata for string set '{}' - could not be extracted due to: {}"
31
33
 
32
34
  _FIELD_NAME_MAP: dict = {
33
35
  AttributeType.STRING: "string_values",
@@ -65,7 +67,8 @@ class InputDao:
65
67
  """
66
68
  input_data = self.get_input_data()
67
69
  schema = self._get_schema(input_data)
68
- scenario = Scenario(schema, self._get_general_properties(input_data))
70
+ metadata = self._metadata_to_dict(input_data.metadata)
71
+ scenario = Scenario(schema, self._get_general_properties(input_data), metadata)
69
72
  for contract in self._get_contracts(input_data):
70
73
  scenario.add_contract(contract)
71
74
 
@@ -73,6 +76,9 @@ class InputDao:
73
76
  for agent in self._get_agents(input_data, schema):
74
77
  scenario.add_agent(agent)
75
78
 
79
+ for name, string_set in self._get_string_sets(input_data).items():
80
+ scenario.add_string_set(name, string_set)
81
+
76
82
  return self._timeseries_manager, scenario
77
83
 
78
84
  def get_input_data(self) -> InputData:
@@ -95,6 +101,16 @@ class InputDao:
95
101
  """Read and return Schema from given `input_data`."""
96
102
  return Schema.from_string(input_data.schema)
97
103
 
104
+ @staticmethod
105
+ def _metadata_to_dict(metadata: Optional[str] = None) -> dict:
106
+ """Convert given metadata `metadata to dict`, proceeds on error but logs given `message`"""
107
+ if metadata:
108
+ try:
109
+ return ast.literal_eval(metadata)
110
+ except (ValueError, TypeError, SyntaxError, MemoryError, RecursionError) as e:
111
+ log().error(InputDao._ERR_SCENARIO_METADATA.format(e))
112
+ return {}
113
+
98
114
  @staticmethod
99
115
  def _get_general_properties(input_data: InputData) -> GeneralProperties:
100
116
  """Read and return GeneralProperties from given `input_data`."""
@@ -121,6 +137,20 @@ class InputDao:
121
137
  for contract in input_data.contracts
122
138
  ]
123
139
 
140
+ @staticmethod
141
+ def _get_string_sets(input_data: InputData) -> dict[str, StringSet]:
142
+ """Read and return StringSets from given `input_data`."""
143
+ string_sets = {}
144
+ for dao in input_data.string_sets:
145
+ values = {
146
+ entry.name: {StringSet.KEY_METADATA: InputDao._metadata_to_dict(entry.metadata)} for entry in dao.values
147
+ }
148
+ metadata = InputDao._metadata_to_dict(dao.metadata)
149
+ string_sets[dao.name] = StringSet.from_dict(
150
+ {StringSet.KEY_VALUES: values, StringSet.KEY_METADATA: metadata}
151
+ )
152
+ return string_sets
153
+
124
154
  def _init_timeseries(self, input_data: InputData) -> None:
125
155
  """Read timeseries from given `input_data` and initialise TimeSeriesManager."""
126
156
  self._timeseries_manager.reconstruct_time_series(list(input_data.time_series))
@@ -146,14 +176,14 @@ class InputDao:
146
176
  type_name=agent_dao.class_name,
147
177
  metadata=ast.literal_eval(agent_dao.metadata) if agent_dao.metadata else None,
148
178
  )
149
- attribute_dict = self._get_attributes(
179
+ attributes_dict = self._get_attributes_dict(
150
180
  list(agent_dao.fields), schema.agent_types[agent_dao.class_name].attributes
151
181
  )
152
- agent.init_attributes_from_dict(attribute_dict)
182
+ agent.init_attributes_from_dict(attributes_dict)
153
183
  agents.append(agent)
154
184
  return agents
155
185
 
156
- def _get_attributes(self, fields: list[NestedField], schematics: dict[str, AttributeSpecs]) -> dict[str, Any]:
186
+ def _get_attributes_dict(self, fields: list[NestedField], schematics: dict[str, AttributeSpecs]) -> dict[str, dict]:
157
187
  """Read and return all Attributes as Dictionary from given list of fields.
158
188
 
159
189
  Args:
@@ -161,14 +191,15 @@ class InputDao:
161
191
  schematics: description of the attributes associated by name
162
192
 
163
193
  Returns:
164
- all recovered attributes and their associated values
194
+ all recovered attributes and their associated values as dictionary
165
195
 
166
196
  Raises:
167
197
  InputConversionError: if attributes could not be reconstructed, logged with level "ERROR"
168
198
  """
169
- attributes: dict[str, Any] = {}
199
+ attributes: dict[str, dict[str, Any]] = {}
170
200
  for field in fields:
171
- attributes[field.field_name] = self._get_field_value(field, schematics[field.field_name])
201
+ value = self._get_field_value(field, schematics[field.field_name])
202
+ attributes[field.field_name] = value if not field.metadata else self._get_field_dict(value, field.metadata)
172
203
  return attributes
173
204
 
174
205
  def _get_field_value(self, field: NestedField, schematic: AttributeSpecs) -> Any:
@@ -192,9 +223,20 @@ class InputDao:
192
223
  raise log_error(InputConversionError(self._ERR_SERIES_MISSING.format(field.series_id))) from e
193
224
  if attribute_type is AttributeType.BLOCK:
194
225
  if schematic.is_list:
195
- return [self._get_attributes(list(entry.fields), schematic.nested_attributes) for entry in field.fields]
196
- return self._get_attributes(list(field.fields), schematic.nested_attributes)
226
+ return [
227
+ self._get_attributes_dict(list(entry.fields), schematic.nested_attributes) for entry in field.fields
228
+ ]
229
+ return self._get_attributes_dict(list(field.fields), schematic.nested_attributes)
197
230
  value = getattr(field, self._FIELD_NAME_MAP[attribute_type])
198
231
  if schematic.is_list:
199
232
  return list(value)
200
233
  return list(value)[0]
234
+
235
+ def _get_field_dict(self, field_value: Any, metadata: str) -> dict[str, Any]:
236
+ """Returns dict with metadata and `field_value` associated with either singular or plural key, if is list."""
237
+ result: dict[str, Any] = {Attribute.KEY_METADATA: self._metadata_to_dict(metadata)}
238
+ if isinstance(field_value, list):
239
+ result[Attribute.KEY_VALUES] = field_value
240
+ else:
241
+ result[Attribute.KEY_VALUE] = field_value
242
+ return result
@@ -8,8 +8,7 @@ from pathlib import Path
8
8
 
9
9
  from fameio.logs import log, log_error
10
10
  from fameio.output import OutputError
11
-
12
- METADATA_FILE_NAME = "metadata.json"
11
+ from fameio.output.files import METADATA_FILE_NAME
13
12
 
14
13
  _ERR_OPEN_FILE = "Could not open file for writing: '{}'"
15
14
  _INFO_DESTINATION = "Saving JSON to file to {}"
@@ -0,0 +1,6 @@
1
+ version = 1
2
+
3
+ [[annotations]]
4
+ path = ["*.py"]
5
+ SPDX-FileCopyrightText = "2025 German Aerospace Center <fame@dlr.de>"
6
+ SPDX-License-Identifier = "Apache-2.0"
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
 
4
4
  import sys
5
5
  from pathlib import Path
6
- from typing import Any, BinaryIO
6
+ from typing import Any, BinaryIO, Optional
7
7
 
8
8
  import pandas as pd
9
9
 
@@ -18,6 +18,12 @@ from fameio.output.conversion import apply_time_option, apply_time_merging
18
18
  from fameio.output.csv_writer import CsvWriter
19
19
  from fameio.output.data_transformer import DataTransformer, INDEX
20
20
  from fameio.output.execution_dao import ExecutionDao
21
+ from fameio.output.files import (
22
+ get_output_folder_name,
23
+ create_output_folder,
24
+ RECOVERED_INPUT_PATH,
25
+ RECOVERED_SCENARIO_PATH,
26
+ )
21
27
  from fameio.output.input_dao import InputDao
22
28
  from fameio.output.metadata.compiler import MetadataCompiler
23
29
  from fameio.output.metadata.json_writer import data_to_json_file
@@ -69,7 +75,10 @@ def _extract_and_convert_data(config: dict[Options, Any], file_stream: BinaryIO,
69
75
  OutputError: if file could not be opened or converted, logged with level "ERROR"
70
76
  """
71
77
  log().info("Reading and extracting data...")
72
- output_writer = CsvWriter(config[Options.OUTPUT], file_path, config[Options.SINGLE_AGENT_EXPORT])
78
+ output_path = get_output_folder_name(config[Options.OUTPUT], file_path)
79
+ create_output_folder(output_path)
80
+
81
+ output_writer = CsvWriter(output_path, config[Options.SINGLE_AGENT_EXPORT])
73
82
  agent_type_log = AgentTypeLog(_agent_name_filter_list=config[Options.AGENT_LIST])
74
83
  data_transformer = DataTransformer.build(config[Options.RESOLVE_COMPLEX_FIELD])
75
84
  reader = Reader.get_reader(file=file_stream, read_single=config[Options.MEMORY_SAVING])
@@ -90,7 +99,7 @@ def _extract_and_convert_data(config: dict[Options, Any], file_stream: BinaryIO,
90
99
  output_writer.write_to_files(agent_name, data_frames)
91
100
 
92
101
  if config[Options.INPUT_RECOVERY]:
93
- _recover_inputs(config, input_dao, execution_dao.get_fameio_version())
102
+ _recover_inputs(output_path, input_dao, execution_dao.get_fameio_version())
94
103
  if config[Options.MEMORY_SAVING]:
95
104
  _memory_saving_apply_conversions(config, output_writer)
96
105
 
@@ -100,15 +109,20 @@ def _extract_and_convert_data(config: dict[Options, Any], file_stream: BinaryIO,
100
109
  else:
101
110
  log().warning(_WARN_OUTPUT_MISSING)
102
111
  elif config[Options.METADATA]:
103
- write_metadata(config, input_dao, execution_dao, agent_type_log)
112
+ compiler = MetadataCompiler(
113
+ input_data=input_dao.get_input_data(),
114
+ execution_data=execution_dao.get_metadata_dict(),
115
+ agent_columns=agent_type_log.get_agent_columns(),
116
+ )
117
+ write_metadata(output_path, config[Options.METADATA_TEMPLATE], compiler)
104
118
  log().info("Data conversion completed.")
105
119
 
106
120
 
107
- def _recover_inputs(config: dict[Options, Any], input_dao: InputDao, fameio_version: str) -> None:
121
+ def _recover_inputs(output_path: Path, input_dao: InputDao, fameio_version: str) -> None:
108
122
  """Reads scenario configuration from provided `input_dao`.
109
123
 
110
124
  Args:
111
- config: script configuration options
125
+ output_path: path to output files
112
126
  input_dao: to recover the input data from
113
127
  fameio_version: version of fameio that was used to create the input data
114
128
 
@@ -120,12 +134,10 @@ def _recover_inputs(config: dict[Options, Any], input_dao: InputDao, fameio_vers
120
134
  timeseries, scenario = input_dao.recover_inputs()
121
135
  except InputError as ex:
122
136
  raise log_error(OutputError(_ERR_RECOVER_INPUT.format(fameio_version))) from ex
123
- base_path = config[Options.OUTPUT] if config[Options.OUTPUT] is not None else "./"
124
- series_writer = CsvWriter(
125
- config_output=Path(base_path, "./recovered"), input_file_path=Path("./"), single_export=False
126
- )
137
+
138
+ series_writer = CsvWriter(output_folder=Path(output_path, RECOVERED_INPUT_PATH), single_export=False)
127
139
  series_writer.write_all_time_series_to_disk(timeseries)
128
- data_to_yaml_file(scenario.to_dict(), Path(base_path, "./recovered/scenario.yaml"))
140
+ data_to_yaml_file(scenario.to_dict(), Path(output_path, RECOVERED_SCENARIO_PATH))
129
141
 
130
142
 
131
143
  def _memory_saving_apply_conversions(config: dict[Options, Any], output_writer: CsvWriter) -> None:
@@ -149,31 +161,20 @@ def _memory_saving_apply_conversions(config: dict[Options, Any], output_writer:
149
161
  output_writer.write_to_files(agent_name, parsed_data)
150
162
 
151
163
 
152
- def write_metadata(
153
- config: dict[Options, Any], input_dao: InputDao, execution_dao: ExecutionDao, agent_type_log: AgentTypeLog
154
- ):
164
+ def write_metadata(output_path: Path, template_file: Optional[Path], compiler: MetadataCompiler):
155
165
  """Reads metadata templates, fills in available metadata, and writes output to a JSON file.
156
166
 
157
167
  Args:
158
- config: to determined metadata template, and output path
159
- input_dao: contains input data
160
- execution_dao: contains execution metadata
161
- agent_type_log: contains log about which agent output was created
168
+ output_path: path to output folder
169
+ template_file: path to metadata template (None allowed)
170
+ compiler: to compile metadata with
162
171
 
163
172
  Raises:
164
173
  OutputError: in case templates could not be read or filled-in, or JSON writing failed, logged with level "ERROR"
165
174
  """
166
- compiler = MetadataCompiler(
167
- input_data=input_dao.get_input_data(),
168
- execution_data=execution_dao.get_metadata_dict(),
169
- agent_columns=agent_type_log.get_agent_columns(),
170
- )
171
-
172
- template_file = config[Options.METADATA_TEMPLATE]
173
175
  template = OEO_TEMPLATE if template_file is None else read_template_file(template_file)
174
- metadata = compiler.locate_and_replace(template)
175
- base_path = config[Options.OUTPUT] if config[Options.OUTPUT] is not None else Path(".")
176
- data_to_json_file(metadata, base_path)
176
+ output_metadata = compiler.locate_and_replace(template)
177
+ data_to_json_file(output_metadata, output_path)
177
178
 
178
179
 
179
180
  def run(config: dict[Options, Any] | None = None) -> None:
@@ -19,6 +19,7 @@ FILE_NAME_APPENDIX = "_reformatted"
19
19
  _ERR_FAIL = "Timeseries reformatting script failed."
20
20
  _ERR_NO_FILES = "No file found matching this pattern: '{}'"
21
21
  _ERR_FILE_CONVERSION = "Could not reformat file: '{}'"
22
+ _ERR_FILES_FAILED = "Could not reformat these files: '{}'"
22
23
 
23
24
 
24
25
  def reformat_file(file: Path, replace: bool) -> None:
@@ -58,9 +59,32 @@ def run(config: dict[Options, Any] | None = None) -> None:
58
59
  raise log_error(ScriptError(_ERR_NO_FILES.format(config[Options.FILE_PATTERN]))) from ex
59
60
  if not files:
60
61
  raise log_error(ScriptError(_ERR_NO_FILES.format(config[Options.FILE_PATTERN])))
62
+
63
+ erroneous_files = reformat_files(files, config[Options.REPLACE])
64
+ if len(erroneous_files) > 0:
65
+ log_error(ScriptError(_ERR_FILES_FAILED.format(erroneous_files)))
66
+ else:
67
+ log_and_print("All files reformatted.")
68
+
69
+
70
+ def reformat_files(files: list[Path], replace: bool) -> list[str]:
71
+ """Reformats given files and potentially replaces them.
72
+
73
+ Args:
74
+ files: list of files to be reformatted
75
+ replace: if true, original files are replaced
76
+
77
+ Returns:
78
+ list of files that could not be reformatted
79
+ """
80
+ erroneous_files: list[str] = []
61
81
  for file in files:
62
82
  log_and_print(f"Reformatting file: {file}")
63
- reformat_file(file, config[Options.REPLACE])
83
+ try:
84
+ reformat_file(file, replace)
85
+ except ScriptError:
86
+ erroneous_files.append(file.as_posix())
87
+ return erroneous_files
64
88
 
65
89
 
66
90
  if __name__ == "__main__":
fameio/series.py CHANGED
@@ -13,6 +13,7 @@ from typing import Any
13
13
  import pandas as pd
14
14
  from fameprotobuf.input_file_pb2 import InputData
15
15
  from google.protobuf.internal.wire_format import INT64_MIN, INT64_MAX
16
+ from pandas.errors import EmptyDataError, ParserError
16
17
 
17
18
  from fameio.input import InputError
18
19
  from fameio.input.resolver import PathResolver
@@ -141,7 +142,7 @@ class TimeSeriesManager:
141
142
  """
142
143
  try:
143
144
  return pd.read_csv(file, sep=";", header=None, comment="#")
144
- except OSError as e:
145
+ except (OSError, EmptyDataError, ParserError) as e:
145
146
  raise log_error(TimeSeriesError(e)) from e
146
147
 
147
148
  @staticmethod
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fameio
3
- Version: 3.5.0
3
+ Version: 3.5.1
4
4
  Summary: Tools for input preparation and output digestion of FAME models
5
5
  License: Apache-2.0
6
6
  Keywords: FAME,fameio,agent-based modelling,energy systems
@@ -7,7 +7,7 @@ fameio/cli/parser.py,sha256=Ux_SW13y4UhfAEfe02qnODSFVBjW_lndPCSqqp2Prm0,11357
7
7
  fameio/cli/reformat.py,sha256=hnVsUyyMly3GN4TShl1QCVvb2w1d5gGgbUJUOWyN3Hw,2288
8
8
  fameio/input/__init__.py,sha256=UcBQmK24luQzLUXNaYiJBZcCmYiy8cF2RtM0RJ-Qr9w,615
9
9
  fameio/input/loader/__init__.py,sha256=YlxCAEJnORf6Dbab__m1Qr3ZOM55XvtczgFicO-erzc,2812
10
- fameio/input/loader/controller.py,sha256=-azfGvOiEmMKpRzYFpEjiMcsLrAGbuXK_auPqSLiE3I,8182
10
+ fameio/input/loader/controller.py,sha256=QacBjHH7oILviq3hP9Z_WIfZW1Ki5yaAmkJqLlIZowQ,8220
11
11
  fameio/input/loader/loader.py,sha256=CZxvbdwyPVOogKunH6vUn-OXxNLCCGqVoTqK4rWBKmg,5397
12
12
  fameio/input/metadata.py,sha256=VB96o-MMi6WPtzLSM9P0Lrdn0VC6zClXziWrbuovjsk,6638
13
13
  fameio/input/resolver.py,sha256=ZJnVICipcMSHp1KYXLeK-nZBXIFQBjmPh3V1Vvqb6nk,1936
@@ -18,7 +18,7 @@ fameio/input/scenario/contract.py,sha256=zMxJag_1pQwgthMCDxabmyqrSBKXd4Xpa60e-C5
18
18
  fameio/input/scenario/exception.py,sha256=VFo9wP7J9W0wx_S7ynnTsIcobVB4H82iklH1jUob7to,1998
19
19
  fameio/input/scenario/fameiofactory.py,sha256=xjc1s1FTwbnS84pheqcGNllOJsN1fs7--J36RKO7VYI,2854
20
20
  fameio/input/scenario/generalproperties.py,sha256=hfcp3fbE7ZtdXo9RrVTiObzHS05jxXf3A5kQP-FSMK8,3824
21
- fameio/input/scenario/scenario.py,sha256=HOCeYj0i2aoLTpmwoywj0LEnfWjjJN_AHete9ZsbPTw,8422
21
+ fameio/input/scenario/scenario.py,sha256=KtBVQshth7X5nA0SoN7nfbDPLmBU9z5vXQeVIh-tR2c,8498
22
22
  fameio/input/scenario/stringset.py,sha256=wDMh0teCX1oiANafrua-_r1DyufL8zfXYQ9e6wlYFdE,2576
23
23
  fameio/input/schema/__init__.py,sha256=PAqAvShdyhbxIa95WhJAVsiqeAFXYzb_nJiAHky84cU,401
24
24
  fameio/input/schema/agenttype.py,sha256=cnYgAv70ylqnLZJiDar7pqBUGhcr4RqkrC7BZetmgHo,5735
@@ -29,15 +29,16 @@ fameio/input/validator.py,sha256=55dE89u3Y6gYF3ClWXqKdctiVj6CvSU8l6kc1nJ6GEI,241
29
29
  fameio/input/writer.py,sha256=s-BarcsB6c8WqKBx3ctIp58EzqZWHceevMId_A-9df0,16635
30
30
  fameio/logs.py,sha256=HgdiotKyp9X308RFXhWAlLVzJjcDSBWZzI-vPRoftls,4595
31
31
  fameio/output/__init__.py,sha256=euHuzU3Uq4qWHOJvewNxobqYUha5mxidLuGTkus_Q2o,320
32
- fameio/output/agent_type.py,sha256=V4wsleNQGp6XLMURHLzYvSGy7K0e9_BiC9WVnBlWmzU,7133
32
+ fameio/output/agent_type.py,sha256=fuduSoIk8pw_33ZigMkI3WvC5IQieTDoThEn1C1QLKw,7174
33
33
  fameio/output/conversion.py,sha256=Z-x9vLqygFIBIpKGQ-z-qvOWyTobowug1QuI6BhS9dA,4769
34
- fameio/output/csv_writer.py,sha256=AhUfZSoRemhQ4Zsd8RK3t-LiZrQAaekLkT5PcqSDr_Q,8183
34
+ fameio/output/csv_writer.py,sha256=bzpLc5iNGJlCU4vDFJel7air3E2Hk8jFYBgiXJu0HME,6745
35
35
  fameio/output/data_transformer.py,sha256=--sUTADUL66F4zUmw6dOFuBoaJ8gdo_p22HLU8j-ZEQ,5321
36
36
  fameio/output/execution_dao.py,sha256=mP1YiTh3_TNgocrJzYqAkdo42oyx7etVTcaQVoOb5QE,7042
37
- fameio/output/input_dao.py,sha256=fs2SMvI7_aIIwAfMZdyHKGCgumFcFkJE9OGkKHlp4Jk,8530
37
+ fameio/output/files.py,sha256=v01a6KWjYIvT0x9pcDQF4MAwsQrpfcaI4fjjx2OtBTc,2100
38
+ fameio/output/input_dao.py,sha256=HHMJYt0GVjc7GggsvNSxiSTEdm511MEeA9OHQE_xr1A,10781
38
39
  fameio/output/metadata/__init__.py,sha256=JbKAcupner2ol7u5p2UyI0x-19OCd_INs24pW_VKmzc,339
39
40
  fameio/output/metadata/compiler.py,sha256=r1p_yXqRlS32vo3R0yzdQdBYz5x19WE9I9DKNCRicbY,2958
40
- fameio/output/metadata/json_writer.py,sha256=i-y_AuPLUiXaVhbZdVBM6gbLzvY8cC7AMdUIdG8Ky_0,1159
41
+ fameio/output/metadata/json_writer.py,sha256=58Uuts78R7thAHkCn3mQvJ4uspbPXt8poGck_FKLr6A,1171
41
42
  fameio/output/metadata/locator.py,sha256=_ky_aRdY-ktg7FtTRpPgojsu2VwUcFSx7tSYc7vU87w,9859
42
43
  fameio/output/metadata/oeo_template.py,sha256=LfxdnJ1go5OjckfmoeAuZ6ltYzEbH_gHjofOBnqmOQI,4160
43
44
  fameio/output/metadata/template_reader.py,sha256=A51XrHkr6ck2qHI3ZXJT0AxxXNl_-3Eyw0ada5ZnZws,2071
@@ -45,23 +46,20 @@ fameio/output/output_dao.py,sha256=eS3BwnK8-3GY2mjh1A2ypdlq1bwBTWSxyWnxtqRSOVA,4
45
46
  fameio/output/reader.py,sha256=AgCFDh_vBbLjLxN85OdC_AMOHC3Zst-U69FEa3sdLN4,6603
46
47
  fameio/output/yaml_writer.py,sha256=tXNmMHFwpteQwpuREULjNCUQkxJ-OvtKe4Ep_GvGkSg,1131
47
48
  fameio/scripts/__init__.py,sha256=8E2JBeaPRM4fmXC3zIugjaUNA1NIBugs5IHc5pZMiMc,1539
48
- fameio/scripts/__init__.py.license,sha256=2-OqCNxP4504xY2XQqseYypJi1_Qx4xJSzO3t7c3ACM,107
49
- fameio/scripts/convert_results.py,sha256=AJKn5x8AFMv7VFBhdat9FoJXd3B7rx5neDaJZMcR3S0,9569
50
- fameio/scripts/convert_results.py.license,sha256=EXKiZn-aoR7nO3auGMNGk9upHbobPLHAIBYUO0S6LUg,107
49
+ fameio/scripts/convert_results.py,sha256=4Dzf-GYCtc8_XuWXG0LuDcog5SkgWItUJ0YZn__4WfY,9472
51
50
  fameio/scripts/exception.py,sha256=zaIJ3V-KaAOwggW72DSo-LmyN2jtdTG4R-oYi1liVBc,283
52
51
  fameio/scripts/make_config.py,sha256=O6GghQ1M1Y6bxlaS1c9joB066SCaCWvG_V-WTFqFreU,2016
53
- fameio/scripts/make_config.py.license,sha256=EXKiZn-aoR7nO3auGMNGk9upHbobPLHAIBYUO0S6LUg,107
54
- fameio/scripts/reformat.py,sha256=jYJsl0UkXtZyn2GyA-QVAARilkHa_ZBWa5CGNIGNDuo,2850
55
- fameio/scripts/reformat.py.license,sha256=EXKiZn-aoR7nO3auGMNGk9upHbobPLHAIBYUO0S6LUg,107
56
- fameio/series.py,sha256=KNDKe9CxScYSWacQKJGjwoBEK2I8mezYDAQJfmkU-f8,13987
52
+ fameio/scripts/reformat.py,sha256=8E-zoUiQU_e8RI0a_h06QBt9NkrJwLjc4Mq824kFSTs,3627
53
+ fameio/scripts/REUSE.toml,sha256=6UE87HbRv_xOtHCQULBwsFIhpgPBmDJdrErq92wt22U,160
54
+ fameio/series.py,sha256=tiSqIOOnDtwej0YJxmucXHBdJztlVGFEfv0L9lDBxBw,14073
57
55
  fameio/time.py,sha256=jz-5ZXkIhrjcNuF0nNrQTShkyQFoeiyVOu5MEe7MPps,10253
58
56
  fameio/tools.py,sha256=A3UtAIb-gH6tCcxuTzoNYrj4COGXISxoU9Tn9M_fmSM,2544
59
- fameio-3.5.0.dist-info/entry_points.txt,sha256=IUbTceB_CLFOHulubEf9jgiCFsV2TchlzCssmjbiOKI,176
60
- fameio-3.5.0.dist-info/LICENSE.txt,sha256=eGHBZnhr9CWjE95SWjRfmhtK1lvVn5X4Fpf3KrrAZDg,10391
61
- fameio-3.5.0.dist-info/LICENSES/Apache-2.0.txt,sha256=eGHBZnhr9CWjE95SWjRfmhtK1lvVn5X4Fpf3KrrAZDg,10391
62
- fameio-3.5.0.dist-info/LICENSES/CC-BY-4.0.txt,sha256=y9WvMYKGt0ZW8UXf9QkZB8wj1tjJrQngKR7CSXeSukE,19051
63
- fameio-3.5.0.dist-info/LICENSES/CC-BY-ND-4.0.txt,sha256=i8xEGbjrLjDix0UDvo7U7-Mgb-riQ2jxX2Jozaz4U9k,18829
64
- fameio-3.5.0.dist-info/LICENSES/CC0-1.0.txt,sha256=9Ofzc7m5lpUDN-jUGkopOcLZC3cl6brz1QhKInF60yg,7169
65
- fameio-3.5.0.dist-info/METADATA,sha256=Ui_POdIBbt6Cv2KpCPKL7Y29HUsC6ri-GmL7Ob9zM8U,4796
66
- fameio-3.5.0.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
67
- fameio-3.5.0.dist-info/RECORD,,
57
+ fameio-3.5.1.dist-info/entry_points.txt,sha256=IUbTceB_CLFOHulubEf9jgiCFsV2TchlzCssmjbiOKI,176
58
+ fameio-3.5.1.dist-info/LICENSE.txt,sha256=eGHBZnhr9CWjE95SWjRfmhtK1lvVn5X4Fpf3KrrAZDg,10391
59
+ fameio-3.5.1.dist-info/LICENSES/Apache-2.0.txt,sha256=eGHBZnhr9CWjE95SWjRfmhtK1lvVn5X4Fpf3KrrAZDg,10391
60
+ fameio-3.5.1.dist-info/LICENSES/CC-BY-4.0.txt,sha256=y9WvMYKGt0ZW8UXf9QkZB8wj1tjJrQngKR7CSXeSukE,19051
61
+ fameio-3.5.1.dist-info/LICENSES/CC-BY-ND-4.0.txt,sha256=i8xEGbjrLjDix0UDvo7U7-Mgb-riQ2jxX2Jozaz4U9k,18829
62
+ fameio-3.5.1.dist-info/LICENSES/CC0-1.0.txt,sha256=9Ofzc7m5lpUDN-jUGkopOcLZC3cl6brz1QhKInF60yg,7169
63
+ fameio-3.5.1.dist-info/METADATA,sha256=BAtKp3BRkT3UnujpZj5hXszmpzWOTBwAUzOlM4OMA48,4796
64
+ fameio-3.5.1.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
65
+ fameio-3.5.1.dist-info/RECORD,,
@@ -1,3 +0,0 @@
1
- SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
2
-
3
- SPDX-License-Identifier: Apache-2.0
@@ -1,3 +0,0 @@
1
- SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
-
3
- SPDX-License-Identifier: Apache-2.0
@@ -1,3 +0,0 @@
1
- SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
-
3
- SPDX-License-Identifier: Apache-2.0
@@ -1,3 +0,0 @@
1
- SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
-
3
- SPDX-License-Identifier: Apache-2.0
File without changes