fameio 3.1.0__py3-none-any.whl → 3.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. CHANGELOG.md +10 -1
  2. fameio/cli/__init__.py +2 -3
  3. fameio/cli/options.py +3 -3
  4. fameio/cli/parser.py +3 -3
  5. fameio/input/__init__.py +0 -8
  6. fameio/input/loader/controller.py +5 -6
  7. fameio/input/metadata.py +4 -7
  8. fameio/input/scenario/__init__.py +7 -8
  9. fameio/input/scenario/agent.py +3 -4
  10. fameio/input/scenario/attribute.py +14 -14
  11. fameio/input/scenario/contract.py +9 -9
  12. fameio/input/scenario/exception.py +8 -11
  13. fameio/input/schema/__init__.py +5 -5
  14. fameio/input/schema/agenttype.py +8 -9
  15. fameio/input/schema/attribute.py +95 -74
  16. fameio/input/validator.py +46 -37
  17. fameio/input/writer.py +4 -6
  18. fameio/logs.py +37 -3
  19. fameio/output/agent_type.py +11 -8
  20. fameio/output/conversion.py +2 -2
  21. fameio/output/data_transformer.py +6 -8
  22. fameio/output/input_dao.py +7 -12
  23. fameio/output/reader.py +4 -6
  24. fameio/output/yaml_writer.py +3 -3
  25. fameio/scripts/convert_results.py +52 -33
  26. fameio/series.py +18 -17
  27. fameio/time.py +15 -21
  28. fameio/tools.py +2 -3
  29. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/METADATA +8 -8
  30. fameio-3.1.1.dist-info/RECORD +56 -0
  31. fameio-3.1.0.dist-info/RECORD +0 -56
  32. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/LICENSE.txt +0 -0
  33. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/LICENSES/Apache-2.0.txt +0 -0
  34. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
  35. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/LICENSES/CC0-1.0.txt +0 -0
  36. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/WHEEL +0 -0
  37. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/entry_points.txt +0 -0
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  from __future__ import annotations
@@ -21,8 +21,8 @@ class DataTransformer(ABC):
21
21
  """Extracts and provides series data from parsed and processed output files for requested agents"""
22
22
 
23
23
  MODES = {
24
- ResolveOptions.IGNORE: lambda: DataTransformerIgnore(),
25
- ResolveOptions.SPLIT: lambda: DataTransformerSplit(),
24
+ ResolveOptions.IGNORE: lambda: DataTransformerIgnore(), # pylint: disable=unnecessary-lambda
25
+ ResolveOptions.SPLIT: lambda: DataTransformerSplit(), # pylint: disable=unnecessary-lambda
26
26
  }
27
27
  SIMPLE_COLUMN_INDEX = -1
28
28
 
@@ -95,13 +95,11 @@ class DataTransformer(ABC):
95
95
 
96
96
  @staticmethod
97
97
  def _merge_complex_column(column: Output.Series.Line.Column, values: list) -> None:
98
- """Does not merge complex column data"""
99
- pass
98
+ """Merges complex column data"""
100
99
 
101
100
  @staticmethod
102
- def _store_complex_values(column: Output.Series.Line.Column, container: dict[int, dict], index: tuple) -> None:
103
- """Does not store complex column data"""
104
- pass
101
+ def _store_complex_values(column: Output.Series.Line.Column, container: dict[int, dict], base_index: tuple) -> None:
102
+ """Stores complex column data"""
105
103
 
106
104
  @staticmethod
107
105
  def _get_column_map(agent_type: AgentType) -> dict[int, str]:
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  import ast
@@ -17,8 +17,6 @@ from fameio.series import TimeSeriesManager
17
17
  class InputConversionError(Exception):
18
18
  """Indicates an error during reconstruction of input from its protobuf representation"""
19
19
 
20
- pass
21
-
22
20
 
23
21
  class InputDao:
24
22
  """Data access object for inputs saved in protobuf"""
@@ -150,16 +148,13 @@ class InputDao:
150
148
  def _get_field_value(self, field: NestedField, schematic: AttributeSpecs) -> Any:
151
149
  """Extracts and returns value(s) of given `field`"""
152
150
  attribute_type: AttributeType = schematic.attr_type
153
- value = field.__getattribute__(self._FIELD_NAME_MAP[attribute_type])
151
+ value = getattr(field, self._FIELD_NAME_MAP[attribute_type])
154
152
  if attribute_type is AttributeType.TIME_SERIES:
155
153
  return self._timeseries_manager.get_reconstructed_series_by_id(field.series_id)
156
- elif attribute_type is AttributeType.BLOCK:
154
+ if attribute_type is AttributeType.BLOCK:
157
155
  if schematic.is_list:
158
156
  return [self._get_attributes(list(entry.fields), schematic.nested_attributes) for entry in field.fields]
159
- else:
160
- return self._get_attributes(list(field.fields), schematic.nested_attributes)
161
- else:
162
- if schematic.is_list:
163
- return list(value)
164
- else:
165
- return list(value)[0]
157
+ return self._get_attributes(list(field.fields), schematic.nested_attributes)
158
+ if schematic.is_list:
159
+ return list(value)
160
+ return list(value)[0]
fameio/output/reader.py CHANGED
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  from __future__ import annotations
@@ -17,8 +17,6 @@ from fameio.logs import log, log_critical_and_raise
17
17
  class ProtobufReaderError(Exception):
18
18
  """Indicates an error while reading a protobuf file"""
19
19
 
20
- pass
21
-
22
20
 
23
21
  class Reader(ABC):
24
22
  """Abstract base class for protobuf file readers"""
@@ -40,7 +38,7 @@ class Reader(ABC):
40
38
  _READER_HEADERS = {
41
39
  None: lambda file, mode: Reader._raise_error(Reader._ERR_DEPRECATED_V0),
42
40
  fameio.FILE_HEADER_V1: lambda file, mode: Reader._raise_error(Reader._ERR_DEPRECATED_V1),
43
- fameio.FILE_HEADER_V2: lambda file, mode: ReaderV2(file, mode),
41
+ fameio.FILE_HEADER_V2: lambda file, mode: ReaderV2(file, mode), # pylint: disable=unnecessary-lambda
44
42
  }
45
43
 
46
44
  @staticmethod
@@ -108,8 +106,8 @@ class Reader(ABC):
108
106
  data_storage = DataStorage()
109
107
  try:
110
108
  data_storage.ParseFromString(message)
111
- except DecodeError:
112
- raise IOError(Reader._ERR_PARSING_FAILED)
109
+ except DecodeError as e:
110
+ raise IOError(Reader._ERR_PARSING_FAILED) from e
113
111
  return data_storage
114
112
 
115
113
 
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  from pathlib import Path
@@ -21,7 +21,7 @@ def data_to_yaml_file(data: dict, file_path: Path) -> None:
21
21
  """
22
22
  log().info(INFO_DESTINATION.format(file_path))
23
23
  try:
24
- with open(file_path, "w") as f:
25
- yaml.dump(data, f, sort_keys=False)
24
+ with open(file_path, "w", encoding="utf-8") as f:
25
+ yaml.dump(data, f, sort_keys=False, encoding="utf-8")
26
26
  except Exception as e:
27
27
  raise RuntimeError(ERR_WRITE_EXCEPTION.format(file_path)) from e
@@ -17,27 +17,26 @@ from fameio.output.output_dao import OutputDAO
17
17
  from fameio.output.reader import Reader
18
18
  from fameio.output.yaml_writer import data_to_yaml_file
19
19
 
20
- ERR_MEMORY_ERROR = "Out of memory. Try using `-m` or `--memory-saving` option."
20
+ ERR_OUT_OF_MEMORY = "Out of memory. Retry result conversion using `-m` or `--memory-saving` option."
21
21
  ERR_MEMORY_SEVERE = "Out of memory despite memory-saving mode. Reduce output interval in `FAME-Core` and rerun model."
22
22
 
23
+ WARN_OUTPUT_MISSING = "Provided file did not contain any output data, only input recovery available."
23
24
 
24
- def run(config: dict = None) -> None:
25
- """Reads file in protobuf format for configures FILE and extracts its content to .csv file(s)"""
26
- config = update_default_config(config, DEFAULT_CONFIG)
27
- fameio_logger(log_level_name=config[Options.LOG_LEVEL], file_name=config[Options.LOG_FILE])
25
+ INFO_MEMORY_SAVING = "Memory saving mode enabled: Disable on conversion of small files for performance improvements."
26
+ INFO_RECOVERY = "Recovering inputs..."
27
+ INFO_CONVERSION = "Applying time conversion and merging options to extracted files..."
28
28
 
29
- file_path = config[Options.FILE]
30
- output_writer = CsvWriter(config[Options.OUTPUT], Path(file_path), config[Options.SINGLE_AGENT_EXPORT])
31
- file_stream = open(Path(file_path), "rb")
32
29
 
33
- if config[Options.MEMORY_SAVING]:
34
- log().info("Memory saving mode enabled: Disable on conversion of small files for performance improvements.")
35
-
36
- log().info("Reading and extracting data...")
37
- reader = Reader.get_reader(file=file_stream, read_single=config[Options.MEMORY_SAVING])
30
+ def _extract_data(config: dict) -> bool:
31
+ """Extracts, converts, and saves the converted data; Returns false if no result data was found"""
32
+ file_path = Path(config[Options.FILE])
33
+ output_writer = CsvWriter(config[Options.OUTPUT], file_path, config[Options.SINGLE_AGENT_EXPORT])
38
34
  agent_type_log = AgentTypeLog(requested_agents=config[Options.AGENT_LIST])
39
35
  data_transformer = DataTransformer.build(config[Options.RESOLVE_COMPLEX_FIELD])
40
- try:
36
+
37
+ log().info("Reading and extracting data...")
38
+ with open(file_path, "rb") as file_stream:
39
+ reader = Reader.get_reader(file=file_stream, read_single=config[Options.MEMORY_SAVING])
41
40
  input_dao = InputDao()
42
41
  while data_storages := reader.read():
43
42
  if config[Options.INPUT_RECOVERY]:
@@ -53,28 +52,48 @@ def run(config: dict = None) -> None:
53
52
  output_writer.write_to_files(agent_name, data_frames)
54
53
 
55
54
  if config[Options.INPUT_RECOVERY]:
56
- log().info("Recovering inputs...")
57
- timeseries, scenario = input_dao.recover_inputs()
58
- base_path = config[Options.OUTPUT] if config[Options.OUTPUT] is not None else "./"
59
- series_writer = CsvWriter(Path(base_path, "./recovered"), Path("./"), False)
60
- series_writer.write_time_series_to_disk(timeseries)
61
- data_to_yaml_file(scenario.to_dict(), Path(base_path, "./recovered/scenario.yaml"))
62
-
55
+ _recover_inputs(config, input_dao)
63
56
  if config[Options.MEMORY_SAVING]:
64
- written_files = output_writer.pop_all_file_paths()
65
- for agent_name, file_path in written_files.items():
66
- parsed_data = {None: pd.read_csv(file_path, sep=";", index_col=INDEX)}
67
- apply_time_merging(parsed_data, config[Options.TIME_MERGING])
68
- apply_time_option(parsed_data, config[Options.TIME])
69
- output_writer.write_to_files(agent_name, parsed_data)
70
-
57
+ _memory_saving_apply_conversions(config, output_writer)
71
58
  log().info("Data conversion completed.")
72
- except MemoryError:
73
- log_critical_and_raise(MemoryError(ERR_MEMORY_SEVERE if Options.MEMORY_SAVING else ERR_MEMORY_ERROR))
59
+ return agent_type_log.has_any_agent_type()
74
60
 
75
- file_stream.close()
76
- if not agent_type_log.has_any_agent_type():
77
- log().error("Provided file did not contain any output data.")
61
+
62
+ def _recover_inputs(config: dict, input_dao: InputDao) -> None:
63
+ """Reads scenario configuration from provided input_dao"""
64
+ log().info(INFO_RECOVERY)
65
+ timeseries, scenario = input_dao.recover_inputs()
66
+ base_path = config[Options.OUTPUT] if config[Options.OUTPUT] is not None else "./"
67
+ series_writer = CsvWriter(
68
+ config_output=Path(base_path, "./recovered"), input_file_path=Path("./"), single_export=False
69
+ )
70
+ series_writer.write_time_series_to_disk(timeseries)
71
+ data_to_yaml_file(scenario.to_dict(), Path(base_path, "./recovered/scenario.yaml"))
72
+
73
+
74
+ def _memory_saving_apply_conversions(config: dict, output_writer: CsvWriter) -> None:
75
+ """Rewrite result files in memory saving mode: apply time-merging and time conversion options on a per-file basis"""
76
+ log().info(INFO_CONVERSION)
77
+ written_files = output_writer.pop_all_file_paths()
78
+ for agent_name, file_path in written_files.items():
79
+ parsed_data = {None: pd.read_csv(file_path, sep=";", index_col=INDEX)}
80
+ apply_time_merging(parsed_data, config[Options.TIME_MERGING])
81
+ apply_time_option(parsed_data, config[Options.TIME])
82
+ output_writer.write_to_files(agent_name, parsed_data)
83
+
84
+
85
+ def run(config: dict = None) -> None:
86
+ """Reads configured file in protobuf format and extracts its content to .CSV and .YAML file(s)"""
87
+ config = update_default_config(config, DEFAULT_CONFIG)
88
+ fameio_logger(log_level_name=config[Options.LOG_LEVEL], file_name=config[Options.LOG_FILE])
89
+ if config[Options.MEMORY_SAVING]:
90
+ log().info(INFO_MEMORY_SAVING)
91
+ try:
92
+ found_result_data = _extract_data(config)
93
+ if not found_result_data:
94
+ log().warning(WARN_OUTPUT_MISSING)
95
+ except MemoryError:
96
+ log_critical_and_raise(MemoryError(ERR_MEMORY_SEVERE if config[Options.MEMORY_SAVING] else ERR_OUT_OF_MEMORY))
78
97
 
79
98
 
80
99
  if __name__ == "__main__":
fameio/series.py CHANGED
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  import math
@@ -12,16 +12,17 @@ from fameprotobuf.input_file_pb2 import InputData
12
12
  from google.protobuf.internal.wire_format import INT64_MIN, INT64_MAX
13
13
 
14
14
  from fameio.input.resolver import PathResolver
15
- from fameio.logs import log_error_and_raise, log
15
+ from fameio.logs import log, log_error
16
16
  from fameio.time import ConversionError, FameTime
17
17
  from fameio.tools import clean_up_file_name
18
18
 
19
19
 
20
+ CSV_FILE_SUFFIX = ".csv"
21
+
22
+
20
23
  class TimeSeriesError(Exception):
21
24
  """Indicates that an error occurred during management of time series"""
22
25
 
23
- pass
24
-
25
26
 
26
27
  class Entry(Enum):
27
28
  ID = auto()
@@ -67,7 +68,7 @@ class TimeSeriesManager:
67
68
 
68
69
  def _time_series_is_registered(self, identifier: Union[str, int, float]) -> bool:
69
70
  """Returns True if the value was already registered"""
70
- return identifier in self._series_by_id.keys()
71
+ return identifier in self._series_by_id
71
72
 
72
73
  def _register_time_series(self, identifier: Union[str, int, float]) -> None:
73
74
  """Assigns an id to the given `identifier` and loads the time series into a dataframe"""
@@ -84,14 +85,14 @@ class TimeSeriesManager:
84
85
  try:
85
86
  return identifier, self._check_and_convert_series(data)
86
87
  except TypeError as e:
87
- log_error_and_raise(TimeSeriesError(self._ERR_CORRUPT_TIME_SERIES_VALUE.format(identifier), e))
88
- except ConversionError:
89
- log_error_and_raise(TimeSeriesError(self._ERR_CORRUPT_TIME_SERIES_KEY.format(identifier)))
88
+ raise log_error(TimeSeriesError(self._ERR_CORRUPT_TIME_SERIES_VALUE.format(identifier), e)) from e
89
+ except ConversionError as e:
90
+ raise log_error(TimeSeriesError(self._ERR_CORRUPT_TIME_SERIES_KEY.format(identifier), e)) from e
90
91
  else:
91
92
  message = self._ERR_FILE_NOT_FOUND.format(identifier)
92
93
  if self._is_number_string(identifier):
93
94
  message += self._ERR_NUMERIC_STRING
94
- log_error_and_raise(TimeSeriesError(message))
95
+ raise log_error(TimeSeriesError(message))
95
96
  else:
96
97
  return self._create_timeseries_from_value(identifier)
97
98
 
@@ -111,10 +112,10 @@ class TimeSeriesManager:
111
112
  """Returns the given `value` if it is a numeric value other than NaN"""
112
113
  try:
113
114
  value = float(value)
114
- except ValueError:
115
- log_error_and_raise(TypeError(TimeSeriesManager._ERR_NON_NUMERIC.format(value)))
115
+ except ValueError as e:
116
+ raise log_error(TypeError(TimeSeriesManager._ERR_NON_NUMERIC.format(value))) from e
116
117
  if math.isnan(value):
117
- log_error_and_raise(TypeError(TimeSeriesManager._ERR_NAN_VALUE))
118
+ raise log_error(TypeError(TimeSeriesManager._ERR_NAN_VALUE))
118
119
  return value
119
120
 
120
121
  @staticmethod
@@ -130,7 +131,7 @@ class TimeSeriesManager:
130
131
  def _create_timeseries_from_value(value: Union[int, float]) -> tuple[str, pd.DataFrame]:
131
132
  """Returns name and dataframe for a new static timeseries created from the given `value`"""
132
133
  if math.isnan(value):
133
- log_error_and_raise(TimeSeriesError(TimeSeriesManager._ERR_NAN_VALUE))
134
+ raise log_error(TimeSeriesError(TimeSeriesManager._ERR_NAN_VALUE))
134
135
  data = pd.DataFrame({0: [INT64_MIN, INT64_MAX], 1: [value, value]})
135
136
  return TimeSeriesManager._CONSTANT_IDENTIFIER.format(value), data
136
137
 
@@ -148,7 +149,7 @@ class TimeSeriesManager:
148
149
  TimeSeriesException: if identifier was not yet registered
149
150
  """
150
151
  if not self._time_series_is_registered(identifier):
151
- log_error_and_raise(TimeSeriesError(self._ERR_UNREGISTERED_SERIES.format(identifier)))
152
+ raise log_error(TimeSeriesError(self._ERR_UNREGISTERED_SERIES.format(identifier)))
152
153
  return self._series_by_id.get(identifier)[Entry.ID]
153
154
 
154
155
  def get_all_series(self) -> list[tuple[int, str, pd.DataFrame]]:
@@ -175,15 +176,15 @@ class TimeSeriesManager:
175
176
  self._series_by_id[one_series.series_id] = reconstructed
176
177
 
177
178
  def _get_cleaned_file_name(self, timeseries_name: str):
178
- if timeseries_name.lower().endswith(".csv"):
179
+ if Path(timeseries_name).suffix.lower() == CSV_FILE_SUFFIX:
179
180
  filename = Path(timeseries_name).name
180
181
  else:
181
- filename = clean_up_file_name(timeseries_name) + ".csv"
182
+ filename = clean_up_file_name(timeseries_name) + CSV_FILE_SUFFIX
182
183
  return str(Path(self._TIMESERIES_RECONSTRUCTION_PATH, filename))
183
184
 
184
185
  def get_reconstructed_series_by_id(self, series_id: int) -> str:
185
186
  """Return name or path for given `series_id` if series these are identified by their number.
186
187
  Use this only if series were added via `reconstruct_time_series`"""
187
188
  if series_id < 0 or series_id > self._id_count:
188
- log_error_and_raise(TimeSeriesError(self._ERR_UNREGISTERED_SERIES.format(series_id)))
189
+ raise log_error(TimeSeriesError(self._ERR_UNREGISTERED_SERIES.format(series_id)))
189
190
  return self._series_by_id[series_id][Entry.NAME]
fameio/time.py CHANGED
@@ -1,14 +1,13 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
-
5
4
  import datetime as dt
6
5
  import math
7
6
  import re
8
7
  from enum import Enum, auto
9
8
  from typing import Union
10
9
 
11
- from fameio.logs import log_error_and_raise
10
+ from fameio.logs import log_error
12
11
 
13
12
  START_IN_REAL_TIME = "2000-01-01_00:00:00"
14
13
  DATE_FORMAT = "%Y-%m-%d_%H:%M:%S"
@@ -19,8 +18,6 @@ FAME_FIRST_DATETIME = dt.datetime.strptime(START_IN_REAL_TIME, DATE_FORMAT)
19
18
  class ConversionError(Exception):
20
19
  """Indicates that something went wrong during time stamp conversion"""
21
20
 
22
- pass
23
-
24
21
 
25
22
  class TimeUnit(Enum):
26
23
  """Time units defined in FAME"""
@@ -75,14 +72,14 @@ class FameTime:
75
72
  def convert_datetime_to_fame_time_step(datetime_string: str) -> int:
76
73
  """Converts real Datetime string to FAME time step"""
77
74
  if not FameTime.is_datetime(datetime_string):
78
- log_error_and_raise(ConversionError(FameTime._FORMAT_INVALID.format(datetime_string)))
75
+ raise log_error(ConversionError(FameTime._FORMAT_INVALID.format(datetime_string)))
79
76
  datetime = FameTime._convert_to_datetime(datetime_string)
80
77
  years_since_start_time = datetime.year - FAME_FIRST_DATETIME.year
81
78
  beginning_of_year = dt.datetime(year=datetime.year, month=1, day=1, hour=0, minute=0, second=0)
82
79
  seconds_since_beginning_of_year = int((datetime - beginning_of_year).total_seconds())
83
80
  steps_since_beginning_of_year = seconds_since_beginning_of_year * Constants.STEPS_PER_SECOND
84
81
  if steps_since_beginning_of_year > Constants.STEPS_PER_YEAR:
85
- log_error_and_raise(ConversionError(FameTime._INVALID_TOO_LARGE.format(datetime_string)))
82
+ raise log_error(ConversionError(FameTime._INVALID_TOO_LARGE.format(datetime_string)))
86
83
  year_offset = years_since_start_time * Constants.STEPS_PER_YEAR
87
84
  return year_offset + steps_since_beginning_of_year
88
85
 
@@ -91,8 +88,8 @@ class FameTime:
91
88
  """Converts given `datetime_string` to real-world datetime"""
92
89
  try:
93
90
  return dt.datetime.strptime(datetime_string, DATE_FORMAT)
94
- except ValueError:
95
- log_error_and_raise(ConversionError(FameTime._INVALID_TIMESTAMP.format(datetime_string)))
91
+ except ValueError as e:
92
+ raise log_error(ConversionError(FameTime._INVALID_TIMESTAMP.format(datetime_string))) from e
96
93
 
97
94
  @staticmethod
98
95
  def convert_fame_time_step_to_datetime(fame_time_steps: int, date_format: str = DATE_FORMAT) -> str:
@@ -108,8 +105,8 @@ class FameTime:
108
105
  datetime = beginning_of_year + dt.timedelta(seconds=seconds_in_current_year)
109
106
  try:
110
107
  return datetime.strftime(date_format)
111
- except ValueError:
112
- log_error_and_raise(ConversionError(FameTime._INVALID_DATE_FORMAT.format(date_format)))
108
+ except ValueError as e:
109
+ raise log_error(ConversionError(FameTime._INVALID_DATE_FORMAT.format(date_format))) from e
113
110
 
114
111
  @staticmethod
115
112
  def convert_time_span_to_fame_time_steps(value: int, unit: TimeUnit) -> int:
@@ -117,8 +114,7 @@ class FameTime:
117
114
  steps = Constants.steps_per_unit.get(unit)
118
115
  if steps:
119
116
  return steps * value
120
- else:
121
- log_error_and_raise(ConversionError(FameTime._TIME_UNIT_UNKNOWN.format(unit)))
117
+ raise log_error(ConversionError(FameTime._TIME_UNIT_UNKNOWN.format(unit)))
122
118
 
123
119
  @staticmethod
124
120
  def is_datetime(string: str) -> bool:
@@ -132,7 +128,7 @@ class FameTime:
132
128
  """Returns `True` if given int or string `value` can be converted to a FAME time step"""
133
129
  if isinstance(value, int):
134
130
  return True
135
- elif isinstance(value, str):
131
+ if isinstance(value, str):
136
132
  return FameTime.is_datetime(value) or FameTime._is_integer(value)
137
133
  return False
138
134
 
@@ -143,8 +139,7 @@ class FameTime:
143
139
  int(string)
144
140
  except ValueError:
145
141
  return False
146
- else:
147
- return True
142
+ return True
148
143
 
149
144
  @staticmethod
150
145
  def convert_string_if_is_datetime(value: Union[int, str]) -> int:
@@ -155,8 +150,7 @@ class FameTime:
155
150
  """
156
151
  if FameTime.is_datetime(value):
157
152
  return int(FameTime.convert_datetime_to_fame_time_step(value))
158
- else:
159
- try:
160
- return int(value)
161
- except ValueError:
162
- log_error_and_raise(ConversionError(FameTime._NO_TIMESTAMP.format(value)))
153
+ try:
154
+ return int(value)
155
+ except ValueError as e:
156
+ raise log_error(ConversionError(FameTime._NO_TIMESTAMP.format(value))) from e
fameio/tools.py CHANGED
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  from pathlib import Path
@@ -14,8 +14,7 @@ def ensure_is_list(value: Any) -> list:
14
14
  """Returns a list: Either the provided `value` if it is a list, or a new list containing the provided value"""
15
15
  if isinstance(value, list):
16
16
  return value
17
- else:
18
- return [value]
17
+ return [value]
19
18
 
20
19
 
21
20
  def ensure_path_exists(path: Union[Path, str]):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fameio
3
- Version: 3.1.0
3
+ Version: 3.1.1
4
4
  Summary: Tools for input preparation and output digestion of FAME models
5
5
  Home-page: https://gitlab.com/fame-framework/wiki/-/wikis/home
6
6
  License: Apache-2.0
@@ -31,13 +31,13 @@ Description-Content-Type: text/markdown
31
31
 
32
32
  SPDX-License-Identifier: Apache-2.0 -->
33
33
 
34
- | | |
35
- |---------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
36
- | **Package** | [![PyPI version](https://badge.fury.io/py/fameio.svg)](https://badge.fury.io/py/fameio) [![PyPI license](https://img.shields.io/pypi/l/fameio.svg)](https://badge.fury.io/py/fameio) [![REUSE status](https://api.reuse.software/badge/gitlab.com/fame-framework/fame-io)](https://api.reuse.software/info/gitlab.com/fame-framework/fame-io) |
37
- | **Tests** | [![pipeline status](https://gitlab.com/fame-framework/fame-io/badges/main/pipeline.svg)](https://gitlab.com/fame-framework/fame-io/commits/main) [![coverage report](https://gitlab.com/fame-framework/fame-io/badges/main/coverage.svg)](https://gitlab.com/fame-framework/fame-io/-/commits/main) |
38
- | **Activity** | ![GitLab last commit](https://img.shields.io/gitlab/last-commit/fame-framework%2Ffame-io) ![GitLab closed issues by-label](https://img.shields.io/gitlab/issues/closed/fame-framework%2Ffame-io) |
39
- | **Style** | [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Common Changelog](https://common-changelog.org/badge.svg)](https://common-changelog.org) |
40
- | **Reference** | [![JOSS](https://joss.theoj.org/papers/10.21105/joss.04958/status.svg)](https://doi.org/10.21105/joss.04958) [![Zenodo](https://zenodo.org/badge/DOI/10.5281/zenodo.4314337.svg)](https://doi.org/10.5281/zenodo.4314337) |
34
+ | | |
35
+ |---------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
36
+ | **Package** | [![PyPI version](https://badge.fury.io/py/fameio.svg)](https://badge.fury.io/py/fameio) [![PyPI license](https://img.shields.io/pypi/l/fameio.svg)](https://badge.fury.io/py/fameio) [![REUSE status](https://api.reuse.software/badge/gitlab.com/fame-framework/fame-io)](https://api.reuse.software/info/gitlab.com/fame-framework/fame-io) |
37
+ | **Tests** | [![pipeline status](https://gitlab.com/fame-framework/fame-io/badges/main/pipeline.svg)](https://gitlab.com/fame-framework/fame-io/commits/main) [![coverage report](https://gitlab.com/fame-framework/fame-io/badges/main/coverage.svg)](https://gitlab.com/fame-framework/fame-io/-/commits/main) |
38
+ | **Activity** | ![GitLab last commit](https://img.shields.io/gitlab/last-commit/fame-framework%2Ffame-io) ![GitLab closed issues by-label](https://img.shields.io/gitlab/issues/closed/fame-framework%2Ffame-io) |
39
+ | **Style** | [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Common Changelog](https://common-changelog.org/badge.svg)](https://common-changelog.org) [![linting: pylint](https://img.shields.io/badge/linting-pylint-green)](https://github.com/pylint-dev/pylint) |
40
+ | **Reference** | [![JOSS](https://joss.theoj.org/papers/10.21105/joss.04958/status.svg)](https://doi.org/10.21105/joss.04958) [![Zenodo](https://zenodo.org/badge/DOI/10.5281/zenodo.4314337.svg)](https://doi.org/10.5281/zenodo.4314337) |
41
41
 
42
42
  # FAME-Io
43
43
 
@@ -0,0 +1,56 @@
1
+ CHANGELOG.md,sha256=XB1lrNcdL2xRQVF9PHfmp5VThdP2UbecSPFFjFfcgYI,14514
2
+ fameio/__init__.py,sha256=LiE7kRXW0pMIB4hTPC0T_ppGz9O0swd0Ca1-b99hOMc,229
3
+ fameio/cli/__init__.py,sha256=YAxIBl8azJDFaKr0iGaba94UH3Xy-KhQtxrGtwz3FpM,179
4
+ fameio/cli/convert_results.py,sha256=-nAAuO_CznggZnZHeGctP_uXcQsQGjTDDZmHAqlBMJQ,3438
5
+ fameio/cli/make_config.py,sha256=Dbn_ITlhpumc-bK9ONKZyGnyYLXsc8YIWQN9OHLBHe4,2712
6
+ fameio/cli/options.py,sha256=k_bVfVo8GEoqJzLLfzVpfrol51IA9AWjIGASQsATruY,1269
7
+ fameio/cli/parser.py,sha256=le7oxhOpplEbuS6Bz9FZ7xtDMMxZBQlKrB5PXzjIZaM,8381
8
+ fameio/input/__init__.py,sha256=EuG1buGAz3rBLKeIM02ovq1GxlSaZyN9ne4SI4gwiNs,502
9
+ fameio/input/loader/__init__.py,sha256=ShZq2zBGz3-423Fd6DLCi1wAtD2JDgFiaknJ24lGToU,2659
10
+ fameio/input/loader/controller.py,sha256=S8c4j0LF8z0Vjmhzbf5r4KoI82Utpu5FOWzxhv62uMk,5801
11
+ fameio/input/loader/loader.py,sha256=uVCUXN2FqtO058zxDBQth2rabP8BPtInMZnAcI7SX8Q,5060
12
+ fameio/input/metadata.py,sha256=Mq7VzP8Sy-YP_W3yKstU3a9_J4kL5TL915hSuGBKUKo,5918
13
+ fameio/input/resolver.py,sha256=86APkFnupWSJNAaX4RP_SOYNv5W3WUwV_I6Iptkur_Q,1917
14
+ fameio/input/scenario/__init__.py,sha256=Pb8O9rVOTwEo48WIgiq1kBnpovpc4D_syC6EjTFGHew,404
15
+ fameio/input/scenario/agent.py,sha256=zaa80Re9ztCNkFObqlb3wbsN3biw7lLKuukMGmVFqYQ,4359
16
+ fameio/input/scenario/attribute.py,sha256=LEmXDQ8bFcqi2nzdBhnAkJKhF5AUN_3TbfI6H3Nz3Go,9613
17
+ fameio/input/scenario/contract.py,sha256=_wB9FB2po1EWlM08CsKOZQgLFFl39Y5KgP6cyWjS-w4,9133
18
+ fameio/input/scenario/exception.py,sha256=7mEoP3CTbJvlrdU3ovwy8lpXcXi0LRC2wVJNATeli7k,1376
19
+ fameio/input/scenario/fameiofactory.py,sha256=_8R3q_e5lHGd75DDbgJvTX6PJNn0i6nZSnW4DyGgdOA,1558
20
+ fameio/input/scenario/generalproperties.py,sha256=gTSLS0bgoZYAMDYhaFTGcJxipgik9AntSPihQBIcX6k,3504
21
+ fameio/input/scenario/scenario.py,sha256=f9gIV_FdtS52L_nWdstwSoXlN8gL7ovxGPAaWpEzOjc,4608
22
+ fameio/input/scenario/stringset.py,sha256=ORW0_4M9CGtB5NQAzQhzZJliHmyW0yQjG_tzMjnRxUw,1814
23
+ fameio/input/schema/__init__.py,sha256=oIsJVC0hUhwJf_XIBpd1NNTR8sOWEthAy98m6uR7gKc,327
24
+ fameio/input/schema/agenttype.py,sha256=UoPyDDa8rShtIAGQ1dxaJIH-wc-i4D7M8jp7bPAlZ6U,5110
25
+ fameio/input/schema/attribute.py,sha256=V0tzA2tlDUn44DIKOxTDf6neC_MhG3v0AJIk6etX4PM,12958
26
+ fameio/input/schema/java_packages.py,sha256=jkwEgR3mOGky-cvSgR3pGQEJ84hXN5NZS_oyoeYt-9g,2845
27
+ fameio/input/schema/schema.py,sha256=-Hm7r9Hka8NDnNZLe9GCeaULcBgvvhHQhEYjxQ4VzTg,3039
28
+ fameio/input/validator.py,sha256=_nDdIhqdrSFvnHJhCyAqoo_4yhfjBCmWsSH-F2LUEIE,18955
29
+ fameio/input/writer.py,sha256=51R6Qt00JBvDDj6SVapT-dW5i9hcJN-7jWgDHbbNayk,12772
30
+ fameio/logs.py,sha256=HNHpgwjq_PJdSRXDHT5FlvnUpDpdh4EMVmiu_iwcAjo,4963
31
+ fameio/output/__init__.py,sha256=IQm0MNOXkhBexiMXBoNZDK5xHUYgazH7oXm-lc0Vm04,109
32
+ fameio/output/agent_type.py,sha256=a2QnKfc3Rc-yXUZyd2esl5aiiI3nKo1_yaRbRqd6NyI,4389
33
+ fameio/output/conversion.py,sha256=e9SHhGh2o045AFR16ZEfb-73wivaADUkQBVHHtZwWzA,3990
34
+ fameio/output/csv_writer.py,sha256=hU3TM_c8bltY_j_6NZInNB7fyLsNZsYsOrfVvDNYwwE,5117
35
+ fameio/output/data_transformer.py,sha256=nbqmnobcmfWRyqxKH2O-yOsXXREvNoD2IRCI3G5pY-I,5510
36
+ fameio/output/input_dao.py,sha256=V4vSlJo1oJpevWPjzOzKac3r4cAYF5Lhh68B1cVloAI,6888
37
+ fameio/output/output_dao.py,sha256=f7xnsjY80E6OolHlFDzWkJG2J86wxXJJ2nRRmgUnVVc,4085
38
+ fameio/output/reader.py,sha256=rlcukAA0zDPxo3PxrmF4BOAgMY-wigyY-O5gAa_uk_s,5135
39
+ fameio/output/yaml_writer.py,sha256=p0-xry5JjIaCCW8Fjd9nzTqMyTPQuWMYprQI47_wwKQ,841
40
+ fameio/scripts/__init__.py,sha256=Owg46sNUIkq7qLJpfa75Cuhjtl3HGkR6l1-LEaNE54A,727
41
+ fameio/scripts/__init__.py.license,sha256=2-OqCNxP4504xY2XQqseYypJi1_Qx4xJSzO3t7c3ACM,107
42
+ fameio/scripts/convert_results.py,sha256=xWdxW48N_6_312_IrB3dnwEajIxSygNK303p-Nwiles,5112
43
+ fameio/scripts/convert_results.py.license,sha256=2-OqCNxP4504xY2XQqseYypJi1_Qx4xJSzO3t7c3ACM,107
44
+ fameio/scripts/make_config.py,sha256=LvwXbBlaGdKC25BRlk4LJDEwvZzxzCzYyVewtyHhIMM,1351
45
+ fameio/scripts/make_config.py.license,sha256=2-OqCNxP4504xY2XQqseYypJi1_Qx4xJSzO3t7c3ACM,107
46
+ fameio/series.py,sha256=TLJUsVbPIHWddt6z1-pE5mXK50KZ2hIeijcTBqzaklM,9134
47
+ fameio/time.py,sha256=pyzOO_LVKbQhRaH3_9_6f7ejJoZTDTf-q9IEtqaVgBY,6866
48
+ fameio/tools.py,sha256=0aee9qROcyZh2bQYY931yk2_3jssfmA-ZRV8pjTBTOo,1041
49
+ fameio-3.1.1.dist-info/entry_points.txt,sha256=jvQVfwJjZXPWQjJlhj1Dt6PTeblryTc1GxjKeK90twI,123
50
+ fameio-3.1.1.dist-info/LICENSE.txt,sha256=eGHBZnhr9CWjE95SWjRfmhtK1lvVn5X4Fpf3KrrAZDg,10391
51
+ fameio-3.1.1.dist-info/LICENSES/Apache-2.0.txt,sha256=eGHBZnhr9CWjE95SWjRfmhtK1lvVn5X4Fpf3KrrAZDg,10391
52
+ fameio-3.1.1.dist-info/LICENSES/CC-BY-4.0.txt,sha256=y9WvMYKGt0ZW8UXf9QkZB8wj1tjJrQngKR7CSXeSukE,19051
53
+ fameio-3.1.1.dist-info/LICENSES/CC0-1.0.txt,sha256=9Ofzc7m5lpUDN-jUGkopOcLZC3cl6brz1QhKInF60yg,7169
54
+ fameio-3.1.1.dist-info/METADATA,sha256=ulS2Y5wB_KCllU9GvdVBQSvQscgBSDcsQv9l2RNzdFc,38940
55
+ fameio-3.1.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
56
+ fameio-3.1.1.dist-info/RECORD,,
@@ -1,56 +0,0 @@
1
- CHANGELOG.md,sha256=2RFfQpBVhdLZ78_XZu2_B_sLe4RFb-iL_aH1PfrRPLY,14111
2
- fameio/__init__.py,sha256=LiE7kRXW0pMIB4hTPC0T_ppGz9O0swd0Ca1-b99hOMc,229
3
- fameio/cli/__init__.py,sha256=xAS0gRfzq1qepCW6PjIozRC6t3DOxzdNvHU9beFOGHU,167
4
- fameio/cli/convert_results.py,sha256=-nAAuO_CznggZnZHeGctP_uXcQsQGjTDDZmHAqlBMJQ,3438
5
- fameio/cli/make_config.py,sha256=Dbn_ITlhpumc-bK9ONKZyGnyYLXsc8YIWQN9OHLBHe4,2712
6
- fameio/cli/options.py,sha256=2XNwCAloiy_lC77C7LEngMvvT7rVmzASeE_JxQlpHak,1257
7
- fameio/cli/parser.py,sha256=fR28uzjf6MV3do4AefrXbRu2m4-x0bR-Xn-1Di1e0Lk,8391
8
- fameio/input/__init__.py,sha256=mDpz4hKZqpzrxtuvwK-eZl-zwHhegRbiH5ew1LpjaAg,550
9
- fameio/input/loader/__init__.py,sha256=ShZq2zBGz3-423Fd6DLCi1wAtD2JDgFiaknJ24lGToU,2659
10
- fameio/input/loader/controller.py,sha256=Ph7QvGwbJJ4MNb21nPiYJNTdjpPcRFms95Fd-evzGD0,5800
11
- fameio/input/loader/loader.py,sha256=uVCUXN2FqtO058zxDBQth2rabP8BPtInMZnAcI7SX8Q,5060
12
- fameio/input/metadata.py,sha256=l28BPnwjb62RRt-tBJ-HVmOJVO1QIRlKdMx-gjoP_mw,5957
13
- fameio/input/resolver.py,sha256=86APkFnupWSJNAaX4RP_SOYNv5W3WUwV_I6Iptkur_Q,1917
14
- fameio/input/scenario/__init__.py,sha256=azglGZTK39jpkDOmeVAhiLH9zIsUx2SZ_umQIlJiq6Q,322
15
- fameio/input/scenario/agent.py,sha256=Zl83x3U1f4nVv1gbCOhjQ911u-GwWXoWR5_QW0cRHtM,4377
16
- fameio/input/scenario/attribute.py,sha256=-u-dCzs8lUhSEc98hDot28nl6BLA7sEzAg9GzmNci4k,9635
17
- fameio/input/scenario/contract.py,sha256=D6iy-oVNwIqQxeZQ3rITERwIgFRrhzYHRysygn-ceDk,9046
18
- fameio/input/scenario/exception.py,sha256=zavHELqzpIJvb_4GhskYQyi2u5Y6LppwWQt-yGecMQY,1434
19
- fameio/input/scenario/fameiofactory.py,sha256=_8R3q_e5lHGd75DDbgJvTX6PJNn0i6nZSnW4DyGgdOA,1558
20
- fameio/input/scenario/generalproperties.py,sha256=gTSLS0bgoZYAMDYhaFTGcJxipgik9AntSPihQBIcX6k,3504
21
- fameio/input/scenario/scenario.py,sha256=f9gIV_FdtS52L_nWdstwSoXlN8gL7ovxGPAaWpEzOjc,4608
22
- fameio/input/scenario/stringset.py,sha256=ORW0_4M9CGtB5NQAzQhzZJliHmyW0yQjG_tzMjnRxUw,1814
23
- fameio/input/schema/__init__.py,sha256=V1m0Cx97lNeZwaQBFo-a5WMbohdsCwDfvmjtxD2vaBo,271
24
- fameio/input/schema/agenttype.py,sha256=NLcdIuMKjFsqhHG9zisYaHDnP7eDOctHWG1yEH9_ugg,5149
25
- fameio/input/schema/attribute.py,sha256=4nH-lA0Wh7pe8ZPOWXjuVkPa6Jl5cYQnxU5_e4Y54L0,11369
26
- fameio/input/schema/java_packages.py,sha256=jkwEgR3mOGky-cvSgR3pGQEJ84hXN5NZS_oyoeYt-9g,2845
27
- fameio/input/schema/schema.py,sha256=-Hm7r9Hka8NDnNZLe9GCeaULcBgvvhHQhEYjxQ4VzTg,3039
28
- fameio/input/validator.py,sha256=-X0aEEzKj6s5laxXhDzGcwAa0f4Ssp5waLmuriO-3u0,18359
29
- fameio/input/writer.py,sha256=TyFkS3KKdAaXiMq-yLDriwOcy65-P5ZeEtzOtUM1y38,12810
30
- fameio/logs.py,sha256=JxT4JkyKCxxB9QybGOhAcncCexmoIx6_dkGczjG-h8A,3992
31
- fameio/output/__init__.py,sha256=IQm0MNOXkhBexiMXBoNZDK5xHUYgazH7oXm-lc0Vm04,109
32
- fameio/output/agent_type.py,sha256=6niFUKOhxUdGw98NUVNtIM543hAQaCkyy7hacxlc9dU,4328
33
- fameio/output/conversion.py,sha256=2DQ6T2AuU0iDrKsUQ1HcM1TO_prqlENtq2n28wPKwcU,3991
34
- fameio/output/csv_writer.py,sha256=hU3TM_c8bltY_j_6NZInNB7fyLsNZsYsOrfVvDNYwwE,5117
35
- fameio/output/data_transformer.py,sha256=2fBXQ8byS3Lhsmocr2AB5B_I9NmDQrwzIViUtQeKYYg,5473
36
- fameio/output/input_dao.py,sha256=aTNdRM_uaVXB8X7EkWZ3eEd1IMH93Bfx8jCpFvhlBVE,6983
37
- fameio/output/output_dao.py,sha256=f7xnsjY80E6OolHlFDzWkJG2J86wxXJJ2nRRmgUnVVc,4085
38
- fameio/output/reader.py,sha256=wiTCkmETtcZR_ybb4CxnfyTtBF74bTNp2w96fFD8Qo0,5097
39
- fameio/output/yaml_writer.py,sha256=YIUKQB10BnDF9LghLf5bD23ryUI340lGxslB5j88_dk,805
40
- fameio/scripts/__init__.py,sha256=Owg46sNUIkq7qLJpfa75Cuhjtl3HGkR6l1-LEaNE54A,727
41
- fameio/scripts/__init__.py.license,sha256=2-OqCNxP4504xY2XQqseYypJi1_Qx4xJSzO3t7c3ACM,107
42
- fameio/scripts/convert_results.py,sha256=Olrw4l9nGzstgdVyhJJthHCKyaTubVXSlM26729Sjmk,4173
43
- fameio/scripts/convert_results.py.license,sha256=2-OqCNxP4504xY2XQqseYypJi1_Qx4xJSzO3t7c3ACM,107
44
- fameio/scripts/make_config.py,sha256=LvwXbBlaGdKC25BRlk4LJDEwvZzxzCzYyVewtyHhIMM,1351
45
- fameio/scripts/make_config.py.license,sha256=2-OqCNxP4504xY2XQqseYypJi1_Qx4xJSzO3t7c3ACM,107
46
- fameio/series.py,sha256=FhORd6MmBnAzCC56Nb-REhadPd-d5BxBpRjkOJ-yEaA,9107
47
- fameio/time.py,sha256=iiCVpEmBSxHgKft_X-E_D-dpOT-L2Y_xN-6pVFtJhDQ,6949
48
- fameio/tools.py,sha256=8Ia-J-mgjf1NCXMvjLDj10hDwEKzp6jS6eq6z8W005w,1056
49
- fameio-3.1.0.dist-info/entry_points.txt,sha256=jvQVfwJjZXPWQjJlhj1Dt6PTeblryTc1GxjKeK90twI,123
50
- fameio-3.1.0.dist-info/LICENSE.txt,sha256=eGHBZnhr9CWjE95SWjRfmhtK1lvVn5X4Fpf3KrrAZDg,10391
51
- fameio-3.1.0.dist-info/LICENSES/Apache-2.0.txt,sha256=eGHBZnhr9CWjE95SWjRfmhtK1lvVn5X4Fpf3KrrAZDg,10391
52
- fameio-3.1.0.dist-info/LICENSES/CC-BY-4.0.txt,sha256=y9WvMYKGt0ZW8UXf9QkZB8wj1tjJrQngKR7CSXeSukE,19051
53
- fameio-3.1.0.dist-info/LICENSES/CC0-1.0.txt,sha256=9Ofzc7m5lpUDN-jUGkopOcLZC3cl6brz1QhKInF60yg,7169
54
- fameio-3.1.0.dist-info/METADATA,sha256=r5SZ8p7ee6a3f6wEUCWZOUdsjdF3yFZb4G_X-cQRVrk,38857
55
- fameio-3.1.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
56
- fameio-3.1.0.dist-info/RECORD,,
File without changes