fameio 3.1.1__py3-none-any.whl → 3.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. fameio/cli/convert_results.py +10 -10
  2. fameio/cli/make_config.py +9 -9
  3. fameio/cli/options.py +6 -4
  4. fameio/cli/parser.py +87 -51
  5. fameio/cli/reformat.py +58 -0
  6. fameio/input/__init__.py +4 -4
  7. fameio/input/loader/__init__.py +13 -13
  8. fameio/input/loader/controller.py +64 -18
  9. fameio/input/loader/loader.py +25 -16
  10. fameio/input/metadata.py +57 -38
  11. fameio/input/resolver.py +9 -10
  12. fameio/input/scenario/agent.py +62 -26
  13. fameio/input/scenario/attribute.py +93 -40
  14. fameio/input/scenario/contract.py +160 -56
  15. fameio/input/scenario/exception.py +41 -18
  16. fameio/input/scenario/fameiofactory.py +57 -6
  17. fameio/input/scenario/generalproperties.py +22 -12
  18. fameio/input/scenario/scenario.py +117 -38
  19. fameio/input/scenario/stringset.py +29 -11
  20. fameio/input/schema/agenttype.py +27 -10
  21. fameio/input/schema/attribute.py +108 -45
  22. fameio/input/schema/java_packages.py +14 -12
  23. fameio/input/schema/schema.py +39 -15
  24. fameio/input/validator.py +198 -54
  25. fameio/input/writer.py +137 -46
  26. fameio/logs.py +28 -47
  27. fameio/output/__init__.py +5 -1
  28. fameio/output/agent_type.py +89 -28
  29. fameio/output/conversion.py +52 -37
  30. fameio/output/csv_writer.py +107 -27
  31. fameio/output/data_transformer.py +17 -24
  32. fameio/output/execution_dao.py +170 -0
  33. fameio/output/input_dao.py +71 -33
  34. fameio/output/output_dao.py +33 -11
  35. fameio/output/reader.py +64 -21
  36. fameio/output/yaml_writer.py +16 -8
  37. fameio/scripts/__init__.py +22 -4
  38. fameio/scripts/convert_results.py +126 -52
  39. fameio/scripts/convert_results.py.license +1 -1
  40. fameio/scripts/exception.py +7 -0
  41. fameio/scripts/make_config.py +34 -13
  42. fameio/scripts/make_config.py.license +1 -1
  43. fameio/scripts/reformat.py +71 -0
  44. fameio/scripts/reformat.py.license +3 -0
  45. fameio/series.py +174 -59
  46. fameio/time.py +79 -25
  47. fameio/tools.py +48 -8
  48. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/METADATA +50 -34
  49. fameio-3.3.0.dist-info/RECORD +60 -0
  50. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/WHEEL +1 -1
  51. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/entry_points.txt +1 -0
  52. CHANGELOG.md +0 -288
  53. fameio-3.1.1.dist-info/RECORD +0 -56
  54. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/LICENSE.txt +0 -0
  55. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/LICENSES/Apache-2.0.txt +0 -0
  56. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
  57. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/LICENSES/CC0-1.0.txt +0 -0
@@ -1,101 +1,175 @@
1
1
  #!/usr/bin/env python
2
+ from __future__ import annotations
3
+
2
4
  import sys
3
5
  from pathlib import Path
6
+ from typing import Any, BinaryIO
4
7
 
5
8
  import pandas as pd
6
9
 
10
+ from fameio.cli import update_default_config
7
11
  from fameio.cli.convert_results import handle_args, CLI_DEFAULTS as DEFAULT_CONFIG
8
12
  from fameio.cli.options import Options
9
- from fameio.cli import update_default_config
10
- from fameio.logs import log_critical_and_raise, fameio_logger, log
13
+ from fameio.input import InputError
14
+ from fameio.logs import fameio_logger, log, log_error, log_critical
15
+ from fameio.output import OutputError
11
16
  from fameio.output.agent_type import AgentTypeLog
12
17
  from fameio.output.conversion import apply_time_option, apply_time_merging
13
18
  from fameio.output.csv_writer import CsvWriter
14
19
  from fameio.output.data_transformer import DataTransformer, INDEX
20
+ from fameio.output.execution_dao import ExecutionDao
15
21
  from fameio.output.input_dao import InputDao
16
22
  from fameio.output.output_dao import OutputDAO
17
23
  from fameio.output.reader import Reader
18
24
  from fameio.output.yaml_writer import data_to_yaml_file
25
+ from fameio.scripts.exception import ScriptError
19
26
 
20
- ERR_OUT_OF_MEMORY = "Out of memory. Retry result conversion using `-m` or `--memory-saving` option."
21
- ERR_MEMORY_SEVERE = "Out of memory despite memory-saving mode. Reduce output interval in `FAME-Core` and rerun model."
27
+ _ERR_OUT_OF_MEMORY = "Out of memory. Retry result conversion using `-m` or `--memory-saving` option."
28
+ _ERR_MEMORY_SEVERE = "Out of memory despite memory-saving mode. Reduce output interval in `FAME-Core` and rerun model."
29
+ _ERR_FILE_OPEN_FAIL = "Could not open file: '{}'"
30
+ _ERR_RECOVER_INPUT = "Input recovery failed: File was created with `fameio=={}`. Use that version to recover inputs."
31
+ _ERR_FAIL = "Results conversion script failed."
22
32
 
23
- WARN_OUTPUT_MISSING = "Provided file did not contain any output data, only input recovery available."
33
+ _WARN_OUTPUT_SUPPRESSED = "All output data suppressed by agent filter, but there is data available for agent types: {}"
34
+ _WARN_OUTPUT_MISSING = "Provided file did not contain any output data, only input recovery available."
35
+ _INFO_MEMORY_SAVING = "Memory saving mode enabled: Disable on conversion of small files for performance improvements."
24
36
 
25
- INFO_MEMORY_SAVING = "Memory saving mode enabled: Disable on conversion of small files for performance improvements."
26
- INFO_RECOVERY = "Recovering inputs..."
27
- INFO_CONVERSION = "Applying time conversion and merging options to extracted files..."
28
37
 
38
+ def _read_and_extract_data(config: dict[Options, Any]) -> None:
39
+ """Read protobuf file, extracts, converts, and saves the converted data.
29
40
 
30
- def _extract_data(config: dict) -> bool:
31
- """Extracts, converts, and saves the converted data; Returns false if no result data was found"""
41
+ Args:
42
+ config: script configuration options
43
+
44
+ Raises:
45
+ OutputError: if file could not be opened or converted, logged with level "ERROR"
46
+ """
32
47
  file_path = Path(config[Options.FILE])
48
+ log().info("Opening file for reading...")
49
+ try:
50
+ with open(file_path, "rb") as file_stream:
51
+ _extract_and_convert_data(config, file_stream, file_path)
52
+ except OSError as ex:
53
+ raise log_error(OutputError(_ERR_FILE_OPEN_FAIL.format(file_path))) from ex
54
+
55
+
56
+ def _extract_and_convert_data(config: dict[Options, Any], file_stream: BinaryIO, file_path: Path) -> None:
57
+ """Extracts data from provided input file stream, converts it, and writes the result to output files.
58
+
59
+ Args:
60
+ config: script configuration options
61
+ file_stream: opened input file
62
+ file_path: path to input file
63
+
64
+ Raises:
65
+ OutputError: if file could not be opened or converted, logged with level "ERROR"
66
+ """
67
+ log().info("Reading and extracting data...")
33
68
  output_writer = CsvWriter(config[Options.OUTPUT], file_path, config[Options.SINGLE_AGENT_EXPORT])
34
- agent_type_log = AgentTypeLog(requested_agents=config[Options.AGENT_LIST])
69
+ agent_type_log = AgentTypeLog(_agent_name_filter_list=config[Options.AGENT_LIST])
35
70
  data_transformer = DataTransformer.build(config[Options.RESOLVE_COMPLEX_FIELD])
71
+ reader = Reader.get_reader(file=file_stream, read_single=config[Options.MEMORY_SAVING])
72
+ input_dao = InputDao()
73
+ execution_dao = ExecutionDao()
74
+ while data_storages := reader.read():
75
+ execution_dao.store_execution_metadata(data_storages)
76
+ if config[Options.INPUT_RECOVERY]:
77
+ input_dao.store_inputs(data_storages)
78
+ output = OutputDAO(data_storages, agent_type_log)
79
+ for agent_name in output.get_sorted_agents_to_extract():
80
+ log().debug(f"Extracting data for {agent_name}...")
81
+ data_frames = output.get_agent_data(agent_name, data_transformer)
82
+ if not config[Options.MEMORY_SAVING]:
83
+ apply_time_merging(data_frames, config[Options.TIME_MERGING])
84
+ apply_time_option(data_frames, config[Options.TIME])
85
+ log().debug(f"Writing data for {agent_name}...")
86
+ output_writer.write_to_files(agent_name, data_frames)
87
+
88
+ if config[Options.INPUT_RECOVERY]:
89
+ _recover_inputs(config, input_dao, execution_dao.get_fameio_version())
90
+ if config[Options.MEMORY_SAVING]:
91
+ _memory_saving_apply_conversions(config, output_writer)
36
92
 
37
- log().info("Reading and extracting data...")
38
- with open(file_path, "rb") as file_stream:
39
- reader = Reader.get_reader(file=file_stream, read_single=config[Options.MEMORY_SAVING])
40
- input_dao = InputDao()
41
- while data_storages := reader.read():
42
- if config[Options.INPUT_RECOVERY]:
43
- input_dao.store_inputs(data_storages)
44
- output = OutputDAO(data_storages, agent_type_log)
45
- for agent_name in output.get_sorted_agents_to_extract():
46
- log().debug(f"Extracting data for {agent_name}...")
47
- data_frames = output.get_agent_data(agent_name, data_transformer)
48
- if not config[Options.MEMORY_SAVING]:
49
- apply_time_merging(data_frames, config[Options.TIME_MERGING])
50
- apply_time_option(data_frames, config[Options.TIME])
51
- log().debug(f"Writing data for {agent_name}...")
52
- output_writer.write_to_files(agent_name, data_frames)
93
+ if not agent_type_log.has_any_agent_type():
94
+ if len(agent_type_log.get_agents_with_output()) > 0:
95
+ log().warning(_WARN_OUTPUT_SUPPRESSED.format(agent_type_log.get_agents_with_output()))
96
+ else:
97
+ log().warning(_WARN_OUTPUT_MISSING)
98
+ log().info("Data conversion completed.")
53
99
 
54
- if config[Options.INPUT_RECOVERY]:
55
- _recover_inputs(config, input_dao)
56
- if config[Options.MEMORY_SAVING]:
57
- _memory_saving_apply_conversions(config, output_writer)
58
- log().info("Data conversion completed.")
59
- return agent_type_log.has_any_agent_type()
60
100
 
101
+ def _recover_inputs(config: dict[Options, Any], input_dao: InputDao, fameio_version: str) -> None:
102
+ """Reads scenario configuration from provided `input_dao`.
103
+
104
+ Args:
105
+ config: script configuration options
106
+ input_dao: to recover the input data from
107
+ fameio_version: version of fameio that was used to create the input data
61
108
 
62
- def _recover_inputs(config: dict, input_dao: InputDao) -> None:
63
- """Reads scenario configuration from provided input_dao"""
64
- log().info(INFO_RECOVERY)
65
- timeseries, scenario = input_dao.recover_inputs()
109
+ Raises:
110
+ OutputError: if inputs could not be recovered or saved to files, logged with level "ERROR"
111
+ """
112
+ log().info("Recovering inputs...")
113
+ try:
114
+ timeseries, scenario = input_dao.recover_inputs()
115
+ except InputError as ex:
116
+ raise log_error(OutputError(_ERR_RECOVER_INPUT.format(fameio_version))) from ex
66
117
  base_path = config[Options.OUTPUT] if config[Options.OUTPUT] is not None else "./"
67
118
  series_writer = CsvWriter(
68
119
  config_output=Path(base_path, "./recovered"), input_file_path=Path("./"), single_export=False
69
120
  )
70
- series_writer.write_time_series_to_disk(timeseries)
121
+ series_writer.write_all_time_series_to_disk(timeseries)
71
122
  data_to_yaml_file(scenario.to_dict(), Path(base_path, "./recovered/scenario.yaml"))
72
123
 
73
124
 
74
- def _memory_saving_apply_conversions(config: dict, output_writer: CsvWriter) -> None:
75
- """Rewrite result files in memory saving mode: apply time-merging and time conversion options on a per-file basis"""
76
- log().info(INFO_CONVERSION)
125
+ def _memory_saving_apply_conversions(config: dict[Options, Any], output_writer: CsvWriter) -> None:
126
+ """Rewrite result files: applies time-merging and time conversion options on a per-file basis.
127
+
128
+ This is only required in memory saving mode.
129
+
130
+ Args:
131
+ config: script configuration options
132
+ output_writer: to rewrite the previously written files
133
+
134
+ Raises:
135
+ OutputError: in case files could not be read, converted, or re-written, logged with level "ERROR"
136
+ """
137
+ log().info("Applying time conversion and merging options to extracted files...")
77
138
  written_files = output_writer.pop_all_file_paths()
78
139
  for agent_name, file_path in written_files.items():
79
- parsed_data = {None: pd.read_csv(file_path, sep=";", index_col=INDEX)}
140
+ parsed_data: dict[str | None, pd.DataFrame] = {None: pd.read_csv(file_path, sep=";", index_col=INDEX)}
80
141
  apply_time_merging(parsed_data, config[Options.TIME_MERGING])
81
142
  apply_time_option(parsed_data, config[Options.TIME])
82
143
  output_writer.write_to_files(agent_name, parsed_data)
83
144
 
84
145
 
85
- def run(config: dict = None) -> None:
86
- """Reads configured file in protobuf format and extracts its content to .CSV and .YAML file(s)"""
146
+ def run(config: dict[Options, Any] | None = None) -> None:
147
+ """Reads configured file in protobuf format and extracts its content to .CSV and .YAML file(s).
148
+
149
+ Args:
150
+ config: script configuration options
151
+
152
+ Raises:
153
+ ScriptError: if any kind of expected error or a memory error occurred, logged with level "CRITICAL"
154
+ """
87
155
  config = update_default_config(config, DEFAULT_CONFIG)
88
156
  fameio_logger(log_level_name=config[Options.LOG_LEVEL], file_name=config[Options.LOG_FILE])
89
157
  if config[Options.MEMORY_SAVING]:
90
- log().info(INFO_MEMORY_SAVING)
158
+ log().info(_INFO_MEMORY_SAVING)
159
+
91
160
  try:
92
- found_result_data = _extract_data(config)
93
- if not found_result_data:
94
- log().warning(WARN_OUTPUT_MISSING)
95
- except MemoryError:
96
- log_critical_and_raise(MemoryError(ERR_MEMORY_SEVERE if config[Options.MEMORY_SAVING] else ERR_OUT_OF_MEMORY))
161
+ try:
162
+ _read_and_extract_data(config)
163
+ except MemoryError as ex:
164
+ error = OutputError(_ERR_MEMORY_SEVERE if config[Options.MEMORY_SAVING] else _ERR_OUT_OF_MEMORY)
165
+ raise log_critical(error) from ex
166
+ except OutputError as ex:
167
+ raise log_critical(ScriptError(_ERR_FAIL)) from ex
97
168
 
98
169
 
99
170
  if __name__ == "__main__":
100
- run_config = handle_args(sys.argv[1:])
101
- run(run_config)
171
+ cli_config = handle_args(sys.argv[1:])
172
+ try:
173
+ run(cli_config)
174
+ except ScriptError as e:
175
+ raise SystemExit(1) from e
@@ -1,3 +1,3 @@
1
- SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
 
3
3
  SPDX-License-Identifier: Apache-2.0
@@ -0,0 +1,7 @@
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+
6
+ class ScriptError(Exception):
7
+ """Any kind of expected error that occurred during execution of FAME-Io scripts."""
@@ -1,34 +1,55 @@
1
1
  #!/usr/bin/env python
2
+ from __future__ import annotations
3
+
2
4
  import sys
3
5
  from pathlib import Path
6
+ from typing import Any
4
7
 
8
+ from fameio.cli import update_default_config
5
9
  from fameio.cli.make_config import handle_args, CLI_DEFAULTS as DEFAULT_CONFIG
6
10
  from fameio.cli.options import Options
7
- from fameio.cli import update_default_config
11
+ from fameio.input import InputError
8
12
  from fameio.input.loader import load_yaml, validate_yaml_file_suffix
9
- from fameio.logs import fameio_logger, log
10
13
  from fameio.input.scenario import Scenario
11
14
  from fameio.input.validator import SchemaValidator
12
15
  from fameio.input.writer import ProtoWriter
16
+ from fameio.logs import fameio_logger, log, log_critical
17
+ from fameio.scripts.exception import ScriptError
18
+
19
+ _ERR_FAIL: str = "Creation of run configuration file failed."
20
+
21
+
22
+ def run(config: dict[Options, Any] | None = None) -> None:
23
+ """Executes the main workflow of building a FAME configuration file.
13
24
 
25
+ Args:
26
+ config: configuration options
14
27
 
15
- def run(config: dict = None) -> None:
16
- """Executes the main workflow for the building of a FAME configuration file"""
28
+ Raises:
29
+ ScriptError: if any kind of expected error occurred, logged with level "CRITICAL"
30
+ """
17
31
  config = update_default_config(config, DEFAULT_CONFIG)
18
32
  fameio_logger(log_level_name=config[Options.LOG_LEVEL], file_name=config[Options.LOG_FILE])
19
33
 
20
- file = config[Options.FILE]
21
- validate_yaml_file_suffix(Path(file))
22
- scenario = Scenario.from_dict(load_yaml(Path(file), encoding=config[Options.INPUT_ENCODING]))
23
- SchemaValidator.check_agents_have_contracts(scenario)
34
+ try:
35
+ file = config[Options.FILE]
36
+ validate_yaml_file_suffix(Path(file))
37
+ scenario_definition = load_yaml(Path(file), encoding=config[Options.INPUT_ENCODING])
38
+ scenario = Scenario.from_dict(scenario_definition)
39
+ SchemaValidator.check_agents_have_contracts(scenario)
24
40
 
25
- timeseries_manager = SchemaValidator.validate_scenario_and_timeseries(scenario)
26
- writer = ProtoWriter(config[Options.OUTPUT], timeseries_manager)
27
- writer.write_validated_scenario(scenario)
41
+ timeseries_manager = SchemaValidator.validate_scenario_and_timeseries(scenario)
42
+ writer = ProtoWriter(config[Options.OUTPUT], timeseries_manager)
43
+ writer.write_validated_scenario(scenario)
44
+ except InputError as ex:
45
+ raise log_critical(ScriptError(_ERR_FAIL)) from ex
28
46
 
29
47
  log().info("Configuration completed.")
30
48
 
31
49
 
32
50
  if __name__ == "__main__":
33
- run_config = handle_args(sys.argv[1:])
34
- run(run_config)
51
+ cli_config = handle_args(sys.argv[1:])
52
+ try:
53
+ run(cli_config)
54
+ except ScriptError as e:
55
+ raise SystemExit(1) from e
@@ -1,3 +1,3 @@
1
- SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
 
3
3
  SPDX-License-Identifier: Apache-2.0
@@ -0,0 +1,71 @@
1
+ #!/usr/bin/env python
2
+ from __future__ import annotations
3
+
4
+ import sys
5
+ from pathlib import Path
6
+ from typing import Any
7
+
8
+ from fameio.cli import update_default_config
9
+ from fameio.cli.options import Options
10
+ from fameio.cli.reformat import handle_args, CLI_DEFAULTS as DEFAULT_CONFIG
11
+ from fameio.logs import fameio_logger, log_error, log_and_print
12
+ from fameio.output.csv_writer import CsvWriter, CsvWriterError
13
+ from fameio.scripts.exception import ScriptError
14
+ from fameio.series import TimeSeriesManager, TimeSeriesError
15
+ from fameio.tools import get_csv_files_with_pattern, extend_file_name
16
+
17
+ FILE_NAME_APPENDIX = "_reformatted"
18
+
19
+ _ERR_FAIL = "Timeseries reformatting script failed."
20
+ _ERR_NO_FILES = "No file found matching this pattern: '{}'"
21
+ _ERR_FILE_CONVERSION = "Could not reformat file: '{}'"
22
+
23
+
24
+ def reformat_file(file: Path, replace: bool) -> None:
25
+ """Transforms content of specified CSV file to FAME format.
26
+
27
+ Args:
28
+ file: whose content is to be reformatted
29
+ replace: if true, original file will be replaced; otherwise, a new file will be created instead
30
+
31
+ Raises:
32
+ ScriptError: if file could not be read, file reformatting failed, or result file could not be written;
33
+ logged with level "ERROR"
34
+ """
35
+ try:
36
+ data = TimeSeriesManager.read_timeseries_file(file)
37
+ data = TimeSeriesManager.check_and_convert_series(data, str(file), warn=False)
38
+ target_path = file if replace else extend_file_name(file, FILE_NAME_APPENDIX)
39
+ CsvWriter.write_single_time_series_to_disk(data, target_path)
40
+ except (TimeSeriesError, CsvWriterError) as ex:
41
+ raise log_error(ScriptError(_ERR_FILE_CONVERSION.format(file))) from ex
42
+
43
+
44
+ def run(config: dict[Options, Any] | None = None) -> None:
45
+ """Executes the workflow of transforming time series file(s).
46
+
47
+ Args:
48
+ config: configuration options
49
+
50
+ Raises:
51
+ ScriptError: if no file could be found, or if any file could not be transformed, logged with level "ERROR"
52
+ """
53
+ config = update_default_config(config, DEFAULT_CONFIG)
54
+ fameio_logger(log_level_name=config[Options.LOG_LEVEL], file_name=config[Options.LOG_FILE])
55
+ try:
56
+ files = get_csv_files_with_pattern(Path("."), config[Options.FILE_PATTERN])
57
+ except ValueError as ex:
58
+ raise log_error(ScriptError(_ERR_NO_FILES.format(config[Options.FILE_PATTERN]))) from ex
59
+ if not files:
60
+ raise log_error(ScriptError(_ERR_NO_FILES.format(config[Options.FILE_PATTERN])))
61
+ for file in files:
62
+ log_and_print(f"Reformatting file: {file}")
63
+ reformat_file(file, config[Options.REPLACE])
64
+
65
+
66
+ if __name__ == "__main__":
67
+ cli_config = handle_args(sys.argv[1:])
68
+ try:
69
+ run(cli_config)
70
+ except ScriptError as e:
71
+ raise SystemExit(1) from e
@@ -0,0 +1,3 @@
1
+ SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+
3
+ SPDX-License-Identifier: Apache-2.0