fameio 2.0.0__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHANGELOG.md +42 -22
- fameio/scripts/convert_results.py +9 -9
- fameio/scripts/make_config.py +3 -3
- fameio/source/cli/convert_results.py +11 -2
- fameio/source/cli/make_config.py +2 -2
- fameio/source/cli/options.py +0 -1
- fameio/source/cli/parser.py +3 -3
- fameio/source/loader.py +10 -10
- fameio/source/logs.py +69 -38
- fameio/source/results/conversion.py +4 -4
- fameio/source/results/csv_writer.py +4 -4
- fameio/source/results/data_transformer.py +3 -20
- fameio/source/results/input_dao.py +3 -3
- fameio/source/results/reader.py +8 -8
- fameio/source/results/yaml_writer.py +2 -2
- fameio/source/scenario/contract.py +2 -2
- fameio/source/scenario/exception.py +2 -2
- fameio/source/scenario/generalproperties.py +2 -2
- fameio/source/schema/agenttype.py +5 -5
- fameio/source/schema/attribute.py +4 -4
- fameio/source/schema/java_packages.py +69 -0
- fameio/source/schema/schema.py +23 -7
- fameio/source/series.py +5 -3
- fameio/source/validator.py +5 -5
- fameio/source/writer.py +23 -12
- {fameio-2.0.0.dist-info → fameio-2.1.0.dist-info}/METADATA +43 -31
- fameio-2.1.0.dist-info/RECORD +53 -0
- fameio-2.0.0.dist-info/RECORD +0 -52
- {fameio-2.0.0.dist-info → fameio-2.1.0.dist-info}/LICENSE.txt +0 -0
- {fameio-2.0.0.dist-info → fameio-2.1.0.dist-info}/LICENSES/Apache-2.0.txt +0 -0
- {fameio-2.0.0.dist-info → fameio-2.1.0.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
- {fameio-2.0.0.dist-info → fameio-2.1.0.dist-info}/LICENSES/CC0-1.0.txt +0 -0
- {fameio-2.0.0.dist-info → fameio-2.1.0.dist-info}/WHEEL +0 -0
- {fameio-2.0.0.dist-info → fameio-2.1.0.dist-info}/entry_points.txt +0 -0
CHANGELOG.md
CHANGED
@@ -1,10 +1,30 @@
|
|
1
|
-
<!-- SPDX-FileCopyrightText:
|
1
|
+
<!-- SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
|
2
2
|
|
3
3
|
SPDX-License-Identifier: CC0-1.0 -->
|
4
4
|
|
5
5
|
# Changelog
|
6
6
|
|
7
|
-
## [2.
|
7
|
+
## [2.1.0](https://gitlab.com/fame-framework/fame-io/-/tags/v2.1.0) - 2024-05-11
|
8
|
+
### Changed
|
9
|
+
- Changed format of auto-created timeseries from constant values #196 (@dlr-cjs)
|
10
|
+
- Changed default log level to "WARNING" #191 (@dlr_fn @dlr-cjs)
|
11
|
+
- Adapted link-formatting in Changelog !155 (@dlr-cjs)
|
12
|
+
|
13
|
+
### Added
|
14
|
+
- Read java package names from Schema and write to input.pb #198 (@dlr-cjs)
|
15
|
+
|
16
|
+
### Fixed
|
17
|
+
- Fix docstrings in CLI `handle_args()` #190 (@dlr-cjs @dlr_fn)
|
18
|
+
- Fix potential duplicates in logging #191 (@dlr_fn @dlr-cjs)
|
19
|
+
|
20
|
+
## [2.0.1](https://gitlab.com/fame-framework/fame-io/-/tags/v2.0.1) - 2024-04-05
|
21
|
+
### Fix
|
22
|
+
- Fix potential missing columns when memory-saving-mode `-m` is enabled #194 (@dlr_fn @dlr-cjs)
|
23
|
+
|
24
|
+
### Remove
|
25
|
+
- Remove convert results option `-cc MERGE` #194 (@dlr_fn @dlr-cjs)
|
26
|
+
|
27
|
+
## [2.0.0](https://gitlab.com/fame-framework/fame-io/-/tags/v2.0.0) - 2024-04-03
|
8
28
|
### Changed
|
9
29
|
- **Breaking**: Removed support for `python==3.8` #163 (@dlr-cjs @dlr_fn)
|
10
30
|
- **Breaking**: Signature of `run` functions in `make_config.py` and `convert_results.py` changed: the input file is now read from the configuration dictionary #163 (@dlr-cjs @dlr_fn)
|
@@ -45,11 +65,11 @@ SPDX-License-Identifier: CC0-1.0 -->
|
|
45
65
|
- Fix breaking tests in Pytest 8.0 #176 (@dlr-cjs)
|
46
66
|
- Fix PyTests for Python 3.12 #182 (@dlr_fn)
|
47
67
|
|
48
|
-
## [1.8.1
|
68
|
+
## [1.8.1](https://gitlab.com/fame-framework/fame-io/-/tags/v1.8.1) - 2023-05-04
|
49
69
|
### Fixed
|
50
70
|
- Fix fail of `ConvertFameResults` when `merge-times` was not specified
|
51
71
|
|
52
|
-
## [1.8.0
|
72
|
+
## [1.8.0](https://gitlab.com/fame-framework/fame-io/-/tags/v1.8) - 2023-04-14
|
53
73
|
### Changed
|
54
74
|
- Update repository to be compliant to `REUSE` standard
|
55
75
|
- Accept custom `date_format` (default: `"%Y-%m-%d_%H:%M:%S"`) for `FameTime.convert_fame_time_step_to_datetime()`
|
@@ -61,7 +81,7 @@ SPDX-License-Identifier: CC0-1.0 -->
|
|
61
81
|
- Add option to merge time steps in results with `convertFameResults`
|
62
82
|
- Add pre-commit hooks enforcing high coding standards and reducing CI runner minutes during development
|
63
83
|
|
64
|
-
## [1.7.0
|
84
|
+
## [1.7.0](https://gitlab.com/fame-framework/fame-io/-/tags/v1.7) - 2023-02-20
|
65
85
|
### Added
|
66
86
|
- Support dictionaries in Schema for field `Products` in elements of `AgentTypes`
|
67
87
|
- Support dictionaries in Schema for field `Values` in elements of `Attributes`
|
@@ -73,15 +93,15 @@ SPDX-License-Identifier: CC0-1.0 -->
|
|
73
93
|
### Remove
|
74
94
|
- **Breaking**: `Products` in Schema no longer support single non-list values
|
75
95
|
|
76
|
-
## [1.6.3
|
96
|
+
## [1.6.3](https://gitlab.com/fame-framework/fame-io/-/tags/v1.6.3) - 2022-11-04
|
77
97
|
### Added
|
78
98
|
- Allow parsing `Help` for `Attributes` in `schema`
|
79
99
|
|
80
|
-
## [1.6.1
|
100
|
+
## [1.6.1](https://gitlab.com/fame-framework/fame-io/-/tags/v1.6.1) - 2022-11-02
|
81
101
|
### Changed
|
82
102
|
- Use existing logger if already set up to avoid duplicates when `fameio` is used as dependency in third party workflows
|
83
103
|
|
84
|
-
## [1.6.0
|
104
|
+
## [1.6.0](https://gitlab.com/fame-framework/fame-io/-/tags/v1.6) - 2022-07-08
|
85
105
|
### Added
|
86
106
|
- Add option to enable memory saving mode using the flag `-m` or `--memory-saving`
|
87
107
|
- Add options to deal with complex indexed output columns using the flag `-cc` or `--complex-column` with
|
@@ -94,23 +114,23 @@ SPDX-License-Identifier: CC0-1.0 -->
|
|
94
114
|
- Reduce memory profile for `convertFameResults`
|
95
115
|
- Extract `source` scripts relevant for `convertFameResults` to be hosted in subpackage `results`
|
96
116
|
|
97
|
-
## [1.5.4
|
117
|
+
## [1.5.4](https://gitlab.com/fame-framework/fame-io/-/tags/v1.5.4) - 2022-06-01
|
98
118
|
### Changed
|
99
119
|
- Limit `protobuf` dependency to `>=3.19,<4.0`
|
100
120
|
|
101
|
-
## [1.5.3
|
121
|
+
## [1.5.3](https://gitlab.com/fame-framework/fame-io/-/tags/v1.5.3) - 2022-03-18
|
102
122
|
### Changed
|
103
123
|
- Harmonize interface with `famegui`
|
104
124
|
- Return `None` on failure of `resolve_series_file_path` instead of raising a `FileNotFoundError`
|
105
125
|
|
106
|
-
## [1.5.2
|
126
|
+
## [1.5.2](https://gitlab.com/fame-framework/fame-io/-/tags/v1.5.2) - 2022-03-10
|
107
127
|
### Changed
|
108
128
|
- Allow interfacing of `famegui` with `scenario` (e.g. serialization, error handling)
|
109
129
|
- Move `scenario` validation to `validator.py`
|
110
130
|
- Extract `path_resolver.py`
|
111
131
|
- Increase test coverage by incorporating [AMIRIS examples](https://gitlab.com/dlr-ve/esy/amiris/examples)
|
112
132
|
|
113
|
-
## [1.5.1
|
133
|
+
## [1.5.1](https://gitlab.com/fame-framework/fame-io/-/tags/v1.5.1) - 2022-01-10
|
114
134
|
### Added
|
115
135
|
- Provide documentation on installation using `pipx`
|
116
136
|
- Add optional argument `-se`/`--singleexport` for exporting individual files for each agent
|
@@ -120,7 +140,7 @@ SPDX-License-Identifier: CC0-1.0 -->
|
|
120
140
|
- Refactor `scenario.py`
|
121
141
|
- Ensure code formatting using `black`
|
122
142
|
|
123
|
-
## [1.5.0
|
143
|
+
## [1.5.0](https://gitlab.com/fame-framework/fame-io/-/tags/v1.5) - 2021-06-30
|
124
144
|
### Added
|
125
145
|
- Support specifying an output folder in command line interface of `convert_results.py`
|
126
146
|
|
@@ -128,7 +148,7 @@ SPDX-License-Identifier: CC0-1.0 -->
|
|
128
148
|
- Update to latest protobuf package
|
129
149
|
- Refactor code
|
130
150
|
|
131
|
-
## [1.4.0
|
151
|
+
## [1.4.0](https://gitlab.com/fame-framework/fame-io/-/tags/v1.4) - 2021-06-10
|
132
152
|
### Added
|
133
153
|
- Enable "Default" values for Attributes - these are used in case a mandatory attribute is not specified in the Scenario
|
134
154
|
- Allow "List" Attributes with multiple values
|
@@ -145,7 +165,7 @@ SPDX-License-Identifier: CC0-1.0 -->
|
|
145
165
|
### Fixed
|
146
166
|
- Fixed minor bugs
|
147
167
|
|
148
|
-
## [1.3.0
|
168
|
+
## [1.3.0](https://gitlab.com/fame-framework/fame-io/-/tags/v1.3) - 2021-04-13
|
149
169
|
### Added
|
150
170
|
- Enable `Attributes` in agents (formerly known as `Fields`) to be structured in complex tree-like data dictionaries
|
151
171
|
- Allow contracts to support `Attributes` of type `int`, `float`, `enum` or `dict`
|
@@ -160,26 +180,26 @@ SPDX-License-Identifier: CC0-1.0 -->
|
|
160
180
|
- Raise critical error when trying to convert empty protobuf output file
|
161
181
|
- Check if `product` in `contract` is valid according to `schema.yaml`
|
162
182
|
|
163
|
-
## [1.2.4
|
183
|
+
## [1.2.4](https://gitlab.com/fame-framework/fame-io/-/tags/v1.2.4) - 2021-02-26
|
164
184
|
### Changed
|
165
185
|
- Move `is_compatible` function to class `AttributeType`
|
166
186
|
|
167
|
-
## [1.2.3
|
187
|
+
## [1.2.3](https://gitlab.com/fame-framework/fame-io/-/tags/v1.2.3) - 2021-02-24
|
168
188
|
### Fixed
|
169
189
|
- Fix file prefix `IGNORE_` (used when loading a set of contract files with the !include argument) is now working consistently
|
170
190
|
|
171
|
-
## [1.2.2
|
191
|
+
## [1.2.2](https://gitlab.com/fame-framework/fame-io/-/tags/v1.2.2) - 2021-02-18
|
172
192
|
### Changed
|
173
193
|
- **Breaking**: Rename `fieldtype` to `attributetype` in `schema.yaml`
|
174
194
|
- Derive protobuf imports from `fameprotobuf` package
|
175
195
|
- Improve handling of cases for keys in `scenario.yaml`
|
176
196
|
- Improve handling of time stamp strings
|
177
197
|
|
178
|
-
## [1.2.1
|
198
|
+
## [1.2.1](https://gitlab.com/fame-framework/fame-io/-/tags/v1.2.1) - 2021-02-10
|
179
199
|
### Changed
|
180
200
|
- Improve key handling for contracts which are now case-insensitive
|
181
201
|
|
182
|
-
## [1.2.0
|
202
|
+
## [1.2.0](https://gitlab.com/fame-framework/fame-io/-/tags/v1.2) - 2021-02-04
|
183
203
|
### Added
|
184
204
|
- Add `!include` command to yaml loading to allow integrating additional yaml files
|
185
205
|
|
@@ -192,7 +212,7 @@ SPDX-License-Identifier: CC0-1.0 -->
|
|
192
212
|
### Fixed
|
193
213
|
- Fix bugs
|
194
214
|
|
195
|
-
## [1.1.0
|
215
|
+
## [1.1.0](https://gitlab.com/fame-framework/fame-io/-/tags/v1.1) - 2020-12-09
|
196
216
|
### Added
|
197
217
|
- Package to PyPI
|
198
218
|
- Provide executables for calling `makeFameRunConfig` and `convertFameResults`
|
@@ -200,5 +220,5 @@ SPDX-License-Identifier: CC0-1.0 -->
|
|
200
220
|
### Changed
|
201
221
|
- Improve documentation
|
202
222
|
|
203
|
-
## [1.0.0
|
223
|
+
## [1.0.0](https://gitlab.com/fame-framework/fame-io/-/tags/v1.0) - 2020-11-17
|
204
224
|
_Initial release of `famepy`_
|
@@ -6,7 +6,7 @@ from pathlib import Path
|
|
6
6
|
from fameio.source.cli.convert_results import handle_args, CLI_DEFAULTS as DEFAULT_CONFIG
|
7
7
|
from fameio.source.cli.options import Options
|
8
8
|
from fameio.source.cli.parser import update_default_config
|
9
|
-
from fameio.source.logs import log_and_raise_critical,
|
9
|
+
from fameio.source.logs import log_and_raise_critical, fameio_logger, log
|
10
10
|
from fameio.source.results.agent_type import AgentTypeLog
|
11
11
|
from fameio.source.results.conversion import apply_time_option, apply_time_merging
|
12
12
|
from fameio.source.results.csv_writer import CsvWriter
|
@@ -23,16 +23,16 @@ ERR_MEMORY_SEVERE = "Out of memory despite memory-saving mode. Reduce output int
|
|
23
23
|
def run(config: dict = None) -> None:
|
24
24
|
"""Reads file in protobuf format for configures FILE and extracts its content to .csv file(s)"""
|
25
25
|
config = update_default_config(config, DEFAULT_CONFIG)
|
26
|
-
|
26
|
+
fameio_logger(log_level_name=config[Options.LOG_LEVEL], file_name=config[Options.LOG_FILE])
|
27
27
|
|
28
28
|
file_path = config[Options.FILE]
|
29
29
|
output_writer = CsvWriter(config[Options.OUTPUT], Path(file_path), config[Options.SINGLE_AGENT_EXPORT])
|
30
30
|
file_stream = open(Path(file_path), "rb")
|
31
31
|
|
32
32
|
if config[Options.MEMORY_SAVING]:
|
33
|
-
|
33
|
+
log().info("Memory saving mode enabled: Disable on conversion of small files for performance improvements.")
|
34
34
|
|
35
|
-
|
35
|
+
log().info("Reading and extracting data...")
|
36
36
|
reader = Reader.get_reader(file=file_stream, read_single=config[Options.MEMORY_SAVING])
|
37
37
|
agent_type_log = AgentTypeLog(requested_agents=config[Options.AGENT_LIST])
|
38
38
|
data_transformer = DataTransformer.build(config[Options.RESOLVE_COMPLEX_FIELD])
|
@@ -43,27 +43,27 @@ def run(config: dict = None) -> None:
|
|
43
43
|
input_dao.store_inputs(data_storages)
|
44
44
|
output = OutputDAO(data_storages, agent_type_log)
|
45
45
|
for agent_name in output.get_sorted_agents_to_extract():
|
46
|
-
|
46
|
+
log().debug(f"Extracting data for {agent_name}...")
|
47
47
|
data_frames = output.get_agent_data(agent_name, data_transformer)
|
48
48
|
apply_time_merging(data_frames, config[Options.TIME_MERGING])
|
49
49
|
apply_time_option(data_frames, config[Options.TIME])
|
50
|
-
|
50
|
+
log().debug(f"Writing data for {agent_name}...")
|
51
51
|
output_writer.write_to_files(agent_name, data_frames)
|
52
52
|
|
53
53
|
if config[Options.INPUT_RECOVERY]:
|
54
|
-
|
54
|
+
log().info("Recovering inputs...")
|
55
55
|
timeseries, scenario = input_dao.recover_inputs()
|
56
56
|
series_writer = CsvWriter(Path(config[Options.OUTPUT], "./recovered"), Path("./"), False)
|
57
57
|
series_writer.write_time_series_to_disk(timeseries)
|
58
58
|
data_to_yaml_file(scenario.to_dict(), Path(config[Options.OUTPUT], "./recovered/scenario.yaml"))
|
59
59
|
|
60
|
-
|
60
|
+
log().info("Data conversion completed.")
|
61
61
|
except MemoryError:
|
62
62
|
log_and_raise_critical(ERR_MEMORY_SEVERE if Options.MEMORY_SAVING else ERR_MEMORY_ERROR)
|
63
63
|
|
64
64
|
file_stream.close()
|
65
65
|
if not agent_type_log.has_any_agent_type():
|
66
|
-
|
66
|
+
log().error("Provided file did not contain any output data.")
|
67
67
|
|
68
68
|
|
69
69
|
if __name__ == "__main__":
|
fameio/scripts/make_config.py
CHANGED
@@ -6,7 +6,7 @@ from fameio.source.cli.make_config import handle_args, CLI_DEFAULTS as DEFAULT_C
|
|
6
6
|
from fameio.source.cli.options import Options
|
7
7
|
from fameio.source.cli.parser import update_default_config
|
8
8
|
from fameio.source.loader import load_yaml, check_for_yaml_file_type
|
9
|
-
from fameio.source.logs import
|
9
|
+
from fameio.source.logs import fameio_logger, log
|
10
10
|
from fameio.source.scenario import Scenario
|
11
11
|
from fameio.source.validator import SchemaValidator
|
12
12
|
from fameio.source.writer import ProtoWriter
|
@@ -15,7 +15,7 @@ from fameio.source.writer import ProtoWriter
|
|
15
15
|
def run(config: dict = None) -> None:
|
16
16
|
"""Executes the main workflow for the building of a FAME configuration file"""
|
17
17
|
config = update_default_config(config, DEFAULT_CONFIG)
|
18
|
-
|
18
|
+
fameio_logger(log_level_name=config[Options.LOG_LEVEL], file_name=config[Options.LOG_FILE])
|
19
19
|
|
20
20
|
file = config[Options.FILE]
|
21
21
|
check_for_yaml_file_type(Path(file))
|
@@ -26,7 +26,7 @@ def run(config: dict = None) -> None:
|
|
26
26
|
writer = ProtoWriter(config[Options.OUTPUT], timeseries_manager)
|
27
27
|
writer.write_validated_scenario(scenario)
|
28
28
|
|
29
|
-
|
29
|
+
log().info("Configuration completed.")
|
30
30
|
|
31
31
|
|
32
32
|
if __name__ == "__main__":
|
@@ -22,7 +22,7 @@ from fameio.source.cli.parser import (
|
|
22
22
|
|
23
23
|
CLI_DEFAULTS = {
|
24
24
|
Options.FILE: None,
|
25
|
-
Options.LOG_LEVEL: "
|
25
|
+
Options.LOG_LEVEL: "WARN",
|
26
26
|
Options.LOG_FILE: None,
|
27
27
|
Options.AGENT_LIST: None,
|
28
28
|
Options.OUTPUT: None,
|
@@ -39,7 +39,16 @@ _OUTFILE_PATH_HELP = "Provide path to folder to store output .csv files"
|
|
39
39
|
|
40
40
|
|
41
41
|
def handle_args(args: List[str], defaults: Optional[Dict[Options, Any]] = None) -> Dict[Options, Any]:
|
42
|
-
"""
|
42
|
+
"""
|
43
|
+
Handles command line arguments and returns `run_config` for convert_results script
|
44
|
+
|
45
|
+
Args:
|
46
|
+
args: list of (command line) arguments, e.g., ['-f', 'my_file']; arg values take precedence over defaults
|
47
|
+
defaults: optional default values used for unspecified parameters; missing defaults are replaced by CLI defaults
|
48
|
+
|
49
|
+
Returns:
|
50
|
+
final configuration compiled from (given) `defaults` and given `args`
|
51
|
+
"""
|
43
52
|
parser = _prepare_parser(defaults)
|
44
53
|
parsed = parser.parse_args(args)
|
45
54
|
return map_namespace_to_options_dict(parsed)
|
fameio/source/cli/make_config.py
CHANGED
@@ -16,7 +16,7 @@ from fameio.source.cli.parser import (
|
|
16
16
|
|
17
17
|
CLI_DEFAULTS = {
|
18
18
|
Options.FILE: None,
|
19
|
-
Options.LOG_LEVEL: "
|
19
|
+
Options.LOG_LEVEL: "WARN",
|
20
20
|
Options.LOG_FILE: None,
|
21
21
|
Options.OUTPUT: Path("config.pb"),
|
22
22
|
}
|
@@ -34,7 +34,7 @@ def handle_args(args: List[str], defaults: Optional[Dict[Options, Any]] = None)
|
|
34
34
|
defaults: optional default values used for unspecified parameters; missing defaults are replaced by CLI defaults
|
35
35
|
|
36
36
|
Returns:
|
37
|
-
final configuration compiled from (given) defaults and
|
37
|
+
final configuration compiled from (given) `defaults` and given `args`
|
38
38
|
"""
|
39
39
|
parser = _prepare_parser(defaults)
|
40
40
|
parsed = parser.parse_args(args)
|
fameio/source/cli/options.py
CHANGED
fameio/source/cli/parser.py
CHANGED
@@ -8,7 +8,7 @@ from pathlib import Path
|
|
8
8
|
from typing import Optional, Dict, Any, List, Union
|
9
9
|
|
10
10
|
from fameio.source.cli.options import MergingOptions, TimeOptions, ResolveOptions, Options
|
11
|
-
from fameio.source.logs import
|
11
|
+
from fameio.source.logs import LogLevel
|
12
12
|
|
13
13
|
_ERR_NEGATIVE_INT = "Given value `{}` is not a non-negative int."
|
14
14
|
|
@@ -74,8 +74,8 @@ def add_log_level_argument(parser: ArgumentParser, default_value: str) -> None:
|
|
74
74
|
"-l",
|
75
75
|
"--log",
|
76
76
|
default=default_value,
|
77
|
-
choices=
|
78
|
-
type=str.
|
77
|
+
choices=[level.name for level in LogLevel if level not in [LogLevel.PRINT, LogLevel.WARN]],
|
78
|
+
type=str.upper,
|
79
79
|
help=help_text,
|
80
80
|
)
|
81
81
|
|
fameio/source/loader.py
CHANGED
@@ -8,7 +8,7 @@ from fnmatch import fnmatch
|
|
8
8
|
from typing import IO, Any, Callable
|
9
9
|
|
10
10
|
import yaml
|
11
|
-
from fameio.source.logs import log_and_raise_critical,
|
11
|
+
from fameio.source.logs import log_and_raise_critical, log
|
12
12
|
from fameio.source.path_resolver import PathResolver
|
13
13
|
|
14
14
|
DISABLING_YAML_FILE_PREFIX = "IGNORE_"
|
@@ -28,7 +28,7 @@ def read_args(loader, args):
|
|
28
28
|
file_string = None
|
29
29
|
if isinstance(args, yaml.nodes.ScalarNode):
|
30
30
|
file_string = loader.construct_scalar(args)
|
31
|
-
|
31
|
+
log().debug("Found instance `ScalarNode` in {}".format(file_string))
|
32
32
|
elif isinstance(args, yaml.nodes.SequenceNode):
|
33
33
|
argument_list = loader.construct_sequence(args)
|
34
34
|
if len(argument_list) not in [1, 2]:
|
@@ -36,7 +36,7 @@ def read_args(loader, args):
|
|
36
36
|
elif len(argument_list) == 2:
|
37
37
|
node_string = argument_list[1]
|
38
38
|
file_string = argument_list[0]
|
39
|
-
|
39
|
+
log().debug("Found instance `SequenceNode` in {}".format(file_string))
|
40
40
|
elif isinstance(args, yaml.nodes.MappingNode):
|
41
41
|
argument_map = loader.construct_mapping(args)
|
42
42
|
for key, value in argument_map.items():
|
@@ -55,7 +55,7 @@ def read_args(loader, args):
|
|
55
55
|
|
56
56
|
def split_nodes(node_string):
|
57
57
|
"""Returns a list of nodes created from the given `node_string`"""
|
58
|
-
|
58
|
+
log().debug("Splitting given node_string `{}`".format(node_string))
|
59
59
|
return node_string.split(":")
|
60
60
|
|
61
61
|
|
@@ -63,7 +63,7 @@ class FameYamlLoader(yaml.SafeLoader):
|
|
63
63
|
"""Custom YAML Loader for `!include` constructor"""
|
64
64
|
|
65
65
|
def __init__(self, stream: IO, path_resolver=PathResolver()) -> None:
|
66
|
-
|
66
|
+
log().debug("Initialize custom YAML loader")
|
67
67
|
self._path_resolver = path_resolver
|
68
68
|
try:
|
69
69
|
self._root_path = os.path.split(stream.name)[0]
|
@@ -99,12 +99,12 @@ def resolve_imported_path(loader: FameYamlLoader, included_path: str):
|
|
99
99
|
cleaned_file_list = []
|
100
100
|
for file in file_list:
|
101
101
|
if fnmatch(file, ignore_filter):
|
102
|
-
|
102
|
+
log().debug("Ignoring file {} due to prefix {}".format(file, DISABLING_YAML_FILE_PREFIX))
|
103
103
|
else:
|
104
104
|
cleaned_file_list.append(file)
|
105
105
|
if not cleaned_file_list:
|
106
106
|
log_and_raise_critical("Failed to find any file matching the `!include` directive `{}`".format(included_path))
|
107
|
-
|
107
|
+
log().debug("Collected file(s) `{}` from given included path `{}`".format(cleaned_file_list, included_path))
|
108
108
|
return cleaned_file_list
|
109
109
|
|
110
110
|
|
@@ -117,7 +117,7 @@ def read_data_from_file(file, node_address, path_resolver: PathResolver):
|
|
117
117
|
data = data[node]
|
118
118
|
except KeyError:
|
119
119
|
log_and_raise_critical("'!include_node [{}, {}]': Cannot find '{}'.".format(file, node_address, node))
|
120
|
-
|
120
|
+
log().debug("Searched file `{}` for node `{}`".format(file, node_address))
|
121
121
|
return data
|
122
122
|
|
123
123
|
|
@@ -156,7 +156,7 @@ def construct_include(loader: FameYamlLoader, args: yaml.Node) -> Any:
|
|
156
156
|
with open(file_name, "r") as open_file:
|
157
157
|
data = read_data_from_file(open_file, nodes, loader.path_resolver)
|
158
158
|
joined_data = join_data(data, joined_data)
|
159
|
-
|
159
|
+
log().debug("Joined all files `{}` to joined data `{}`".format(files, joined_data))
|
160
160
|
return joined_data
|
161
161
|
|
162
162
|
|
@@ -165,7 +165,7 @@ FameYamlLoader.add_constructor("!include", construct_include)
|
|
165
165
|
|
166
166
|
def load_yaml(yaml_file_path: Path, path_resolver=PathResolver()):
|
167
167
|
"""Loads the yaml file from given `yaml_file_path` and returns its content"""
|
168
|
-
|
168
|
+
log().info("Loading yaml from {}".format(yaml_file_path))
|
169
169
|
with open(yaml_file_path, "r") as configfile:
|
170
170
|
data = yaml.load(configfile, make_yaml_loader_builder(path_resolver))
|
171
171
|
return data
|
fameio/source/logs.py
CHANGED
@@ -1,72 +1,102 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
|
5
|
-
import logging as
|
5
|
+
import logging as pylog
|
6
|
+
from enum import Enum
|
6
7
|
from pathlib import Path
|
7
8
|
from typing import NoReturn, Optional, List
|
8
9
|
|
9
|
-
LOG_LEVELS = {
|
10
|
-
"critical": log.CRITICAL,
|
11
|
-
"error": log.ERROR,
|
12
|
-
"warn": log.WARNING,
|
13
|
-
"warning": log.WARNING,
|
14
|
-
"info": log.INFO,
|
15
|
-
"debug": log.DEBUG,
|
16
|
-
}
|
17
10
|
|
18
|
-
|
11
|
+
class LogLevel(Enum):
|
12
|
+
"""Levels for Logging"""
|
13
|
+
|
14
|
+
PRINT = 100
|
15
|
+
CRITICAL = pylog.CRITICAL
|
16
|
+
ERROR = pylog.ERROR
|
17
|
+
WARN = pylog.WARNING
|
18
|
+
WARNING = pylog.WARNING
|
19
|
+
INFO = pylog.INFO
|
20
|
+
DEBUG = pylog.DEBUG
|
21
|
+
|
22
|
+
|
23
|
+
_loggers: List[pylog.Logger] = []
|
24
|
+
_handlers: List[pylog.Handler] = []
|
19
25
|
|
20
26
|
_FORMAT_NORMAL = "%(asctime)s — %(levelname)s — %(message)s" # noqa
|
21
27
|
_FORMAT_DETAILLED = "%(asctime)s.%(msecs)03d — %(levelname)s — %(module)s:%(funcName)s:%(lineno)d — %(message)s" # noqa
|
22
28
|
_TIME_FORMAT = "%H:%M:%S"
|
23
29
|
|
24
|
-
|
25
|
-
_WARN_NOT_INITIALIZED = "Logger not initialised
|
30
|
+
_INFO_UPDATING_LOG_LEVEL = "Updating fameio log level to: {}"
|
31
|
+
_WARN_NOT_INITIALIZED = "Logger for fameio not initialised: using default log level `WARNING`"
|
26
32
|
|
27
33
|
LOGGER_NAME = "fameio"
|
34
|
+
DEFAULT_LOG_LEVEL = LogLevel.WARNING
|
28
35
|
|
29
36
|
|
30
|
-
def
|
31
|
-
"""Returns already set up FAME-Io's logger or - if not set up - a new logger with
|
37
|
+
def log() -> pylog.Logger:
|
38
|
+
"""Returns already set up FAME-Io's logger or - if not set up - a new logger with `WARNING`"""
|
32
39
|
if not _loggers:
|
33
|
-
|
34
|
-
|
40
|
+
fameio_logger(DEFAULT_LOG_LEVEL.name)
|
41
|
+
pylog.warning(_WARN_NOT_INITIALIZED)
|
35
42
|
return _loggers[0]
|
36
43
|
|
37
44
|
|
38
45
|
def log_and_raise_critical(message: str) -> NoReturn:
|
39
46
|
"""Raises a critical error and logs with given `error_message`"""
|
40
|
-
|
47
|
+
log().critical(message)
|
41
48
|
raise Exception(message)
|
42
49
|
|
43
50
|
|
44
51
|
def log_error_and_raise(exception: Exception) -> NoReturn:
|
45
52
|
"""Raises the specified `exception` and logs an error with the same `message`"""
|
46
|
-
|
53
|
+
log().error(str(exception))
|
47
54
|
raise exception
|
48
55
|
|
49
56
|
|
50
|
-
def
|
51
|
-
"""
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
57
|
+
def fameio_logger(log_level_name: str, file_name: Optional[Path] = None) -> None:
|
58
|
+
"""
|
59
|
+
Ensures a logger for fameio is present and uses the specified options
|
60
|
+
|
61
|
+
Args:
|
62
|
+
log_level_name: one of Python's official logging level names, e.g. "INFO"
|
63
|
+
file_name: if present, logs are also written to the specified file path
|
64
|
+
"""
|
65
|
+
log_level = LogLevel[log_level_name.upper()]
|
66
|
+
logger = _get_logger(log_level)
|
67
|
+
|
68
|
+
formatter = _get_formatter(log_level)
|
69
|
+
_add_handler(logger, pylog.StreamHandler(), formatter)
|
70
|
+
if file_name:
|
71
|
+
_add_handler(logger, pylog.FileHandler(file_name, mode="w"), formatter)
|
72
|
+
|
73
|
+
if _loggers:
|
74
|
+
pylog.info(_INFO_UPDATING_LOG_LEVEL.format(log_level_name))
|
75
|
+
_loggers[0] = logger
|
60
76
|
else:
|
61
|
-
|
77
|
+
_loggers.append(logger)
|
78
|
+
|
79
|
+
|
80
|
+
def _get_logger(level: LogLevel) -> pylog.Logger:
|
81
|
+
"""
|
82
|
+
Returns fameio logger with given log level without any handler and, not propagating to parent
|
62
83
|
|
84
|
+
Args:
|
85
|
+
level: integer representing the log level
|
63
86
|
|
64
|
-
|
65
|
-
|
66
|
-
|
87
|
+
Returns:
|
88
|
+
logger for fameio with specified level
|
89
|
+
"""
|
90
|
+
logger = pylog.getLogger(LOGGER_NAME)
|
91
|
+
logger.setLevel(level.value)
|
92
|
+
logger.propagate = False
|
93
|
+
for handler in _handlers:
|
94
|
+
logger.removeHandler(handler)
|
95
|
+
_handlers.clear()
|
96
|
+
return logger
|
67
97
|
|
68
98
|
|
69
|
-
def _get_formatter(level:
|
99
|
+
def _get_formatter(level: LogLevel) -> pylog.Formatter:
|
70
100
|
"""
|
71
101
|
Returns a log formatter depending on the given log `level`
|
72
102
|
Args:
|
@@ -74,10 +104,11 @@ def _get_formatter(level: int) -> log.Formatter:
|
|
74
104
|
Returns:
|
75
105
|
new log formatter
|
76
106
|
"""
|
77
|
-
return
|
107
|
+
return pylog.Formatter(_FORMAT_DETAILLED if level is LogLevel.DEBUG else _FORMAT_NORMAL, _TIME_FORMAT)
|
78
108
|
|
79
109
|
|
80
|
-
def _add_handler(handler:
|
81
|
-
"""Adds given `handler`
|
110
|
+
def _add_handler(logger: pylog.Logger, handler: pylog.Handler, formatter: pylog.Formatter) -> None:
|
111
|
+
"""Adds given `handler` using the specified `formatter` to given `logger` and `_handlers` list"""
|
82
112
|
handler.setFormatter(formatter)
|
83
|
-
|
113
|
+
_handlers.append(handler)
|
114
|
+
logger.addHandler(handler)
|
@@ -9,7 +9,7 @@ import pandas as pd
|
|
9
9
|
|
10
10
|
from fameio.source import FameTime
|
11
11
|
from fameio.source.cli.options import TimeOptions, MergingOptions
|
12
|
-
from fameio.source.logs import log_error_and_raise,
|
12
|
+
from fameio.source.logs import log_error_and_raise, log
|
13
13
|
from fameio.source.time import ConversionException
|
14
14
|
|
15
15
|
_ERR_UNIMPLEMENTED = "Time conversion mode '{}' not implemented."
|
@@ -27,7 +27,7 @@ def apply_time_merging(data: Dict[Optional[str], pd.DataFrame], config: Optional
|
|
27
27
|
Nothing - data is modified inplace
|
28
28
|
"""
|
29
29
|
if config:
|
30
|
-
|
30
|
+
log().debug(f"Grouping TimeSteps...")
|
31
31
|
offset = config[MergingOptions.STEPS_BEFORE]
|
32
32
|
period = config[MergingOptions.STEPS_AFTER] + config[MergingOptions.STEPS_BEFORE] + 1
|
33
33
|
first_positive_focal_point = config[MergingOptions.FOCAL_POINT] % period
|
@@ -67,7 +67,7 @@ def apply_time_option(data: Dict[Optional[str], pd.DataFrame], mode: TimeOptions
|
|
67
67
|
Nothing - data is modified inplace
|
68
68
|
"""
|
69
69
|
if mode == TimeOptions.INT:
|
70
|
-
|
70
|
+
log().debug("No time conversion...")
|
71
71
|
elif mode == TimeOptions.UTC:
|
72
72
|
_convert_time_index(data, "%Y-%m-%d %H:%M:%S")
|
73
73
|
elif mode == TimeOptions.FAME:
|
@@ -88,7 +88,7 @@ def _convert_time_index(data: Dict[Optional[str], pd.DataFrame], datetime_format
|
|
88
88
|
Returns:
|
89
89
|
Nothing - data is modified inplace
|
90
90
|
"""
|
91
|
-
|
91
|
+
log().debug(f"Converting TimeStep to format '{datetime_format}'...")
|
92
92
|
for _, df in data.items():
|
93
93
|
index_columns = df.index.names
|
94
94
|
df.reset_index(inplace=True)
|
@@ -7,7 +7,7 @@ from typing import Dict, Union
|
|
7
7
|
|
8
8
|
import pandas as pd
|
9
9
|
|
10
|
-
from fameio.source.logs import
|
10
|
+
from fameio.source.logs import log
|
11
11
|
from fameio.source.results.data_transformer import INDEX
|
12
12
|
from fameio.source.series import TimeSeriesManager
|
13
13
|
from fameio.source.tools import ensure_path_exists
|
@@ -30,15 +30,15 @@ class CsvWriter:
|
|
30
30
|
"""Returns name of the output folder derived either from the specified `config_output` or `input_file_path`"""
|
31
31
|
if config_output:
|
32
32
|
output_folder_name = config_output
|
33
|
-
|
33
|
+
log().info(CsvWriter._INFO_USING_PATH.format(config_output))
|
34
34
|
else:
|
35
35
|
output_folder_name = input_file_path.stem
|
36
|
-
|
36
|
+
log().info(CsvWriter._INFO_USING_DERIVED_PATH.format(output_folder_name))
|
37
37
|
return Path(output_folder_name)
|
38
38
|
|
39
39
|
def _create_output_folder(self) -> None:
|
40
40
|
"""Creates output folder if not yet present"""
|
41
|
-
|
41
|
+
log().debug("Creating output folder if required...")
|
42
42
|
if not self._output_folder.is_dir():
|
43
43
|
self._output_folder.mkdir(parents=True)
|
44
44
|
|