flixopt 1.0.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flixopt might be problematic. Click here for more details.

flixOpt/config.py ADDED
@@ -0,0 +1,258 @@
1
+ import logging
2
+ import os
3
+ import types
4
+ from dataclasses import dataclass, fields, is_dataclass
5
+ from typing import Annotated, Literal, Optional
6
+
7
+ import yaml
8
+ from rich.console import Console
9
+ from rich.logging import RichHandler
10
+
11
+ logger = logging.getLogger('flixOpt')
12
+
13
+
14
+ def merge_configs(defaults: dict, overrides: dict) -> dict:
15
+ """
16
+ Merge the default configuration with user-provided overrides.
17
+
18
+ :param defaults: Default configuration dictionary.
19
+ :param overrides: User configuration dictionary.
20
+ :return: Merged configuration dictionary.
21
+ """
22
+ for key, value in overrides.items():
23
+ if isinstance(value, dict) and key in defaults and isinstance(defaults[key], dict):
24
+ # Recursively merge nested dictionaries
25
+ defaults[key] = merge_configs(defaults[key], value)
26
+ else:
27
+ # Override the default value
28
+ defaults[key] = value
29
+ return defaults
30
+
31
+
32
+ def dataclass_from_dict_with_validation(cls, data: dict):
33
+ """
34
+ Recursively initialize a dataclass from a dictionary.
35
+ """
36
+ if not is_dataclass(cls):
37
+ raise TypeError(f'{cls} must be a dataclass')
38
+
39
+ # Build kwargs for the dataclass constructor
40
+ kwargs = {}
41
+ for field in fields(cls):
42
+ field_name = field.name
43
+ field_type = field.type
44
+ field_value = data.get(field_name)
45
+
46
+ # If the field type is a dataclass and the value is a dict, recursively initialize
47
+ if is_dataclass(field_type) and isinstance(field_value, dict):
48
+ kwargs[field_name] = dataclass_from_dict_with_validation(field_type, field_value)
49
+ else:
50
+ kwargs[field_name] = field_value # Pass as-is if no special handling is needed
51
+
52
+ return cls(**kwargs)
53
+
54
+
55
+ @dataclass()
56
+ class ValidatedConfig:
57
+ def __setattr__(self, name, value):
58
+ if field := self.__dataclass_fields__.get(name):
59
+ if metadata := getattr(field.type, '__metadata__', None):
60
+ assert metadata[0](value), f'Invalid value passed to {name!r}: {value=}'
61
+ super().__setattr__(name, value)
62
+
63
+
64
+ @dataclass
65
+ class LoggingConfig(ValidatedConfig):
66
+ level: Annotated[
67
+ Literal['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
68
+ lambda level: level in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
69
+ ]
70
+ file: Annotated[str, lambda file: isinstance(file, str)]
71
+ rich: Annotated[bool, lambda rich: isinstance(rich, bool)]
72
+
73
+
74
+ @dataclass
75
+ class ModelingConfig(ValidatedConfig):
76
+ BIG: Annotated[int, lambda x: isinstance(x, int)]
77
+ EPSILON: Annotated[float, lambda x: isinstance(x, float)]
78
+ BIG_BINARY_BOUND: Annotated[int, lambda x: isinstance(x, int)]
79
+
80
+
81
+ @dataclass
82
+ class ConfigSchema(ValidatedConfig):
83
+ config_name: Annotated[str, lambda x: isinstance(x, str)]
84
+ logging: LoggingConfig
85
+ modeling: ModelingConfig
86
+
87
+
88
+ class CONFIG:
89
+ """
90
+ A configuration class that stores global configuration values as class attributes.
91
+ """
92
+
93
+ config_name: str = None
94
+ modeling: ModelingConfig = None
95
+ logging: LoggingConfig = None
96
+
97
+ @classmethod
98
+ def load_config(cls, user_config_file: Optional[str] = None):
99
+ """
100
+ Initialize configuration using defaults or user-specified file.
101
+ """
102
+ # Default config file
103
+ default_config_path = os.path.join(os.path.dirname(__file__), 'config.yaml')
104
+
105
+ if user_config_file is None:
106
+ with open(default_config_path, 'r') as file:
107
+ new_config = yaml.safe_load(file)
108
+ elif not os.path.exists(user_config_file):
109
+ raise FileNotFoundError(f'Config file not found: {user_config_file}')
110
+ else:
111
+ with open(user_config_file, 'r') as user_file:
112
+ new_config = yaml.safe_load(user_file)
113
+
114
+ # Convert the merged config to ConfigSchema
115
+ config_data = dataclass_from_dict_with_validation(ConfigSchema, new_config)
116
+
117
+ # Store the configuration in the class as class attributes
118
+ cls.logging = config_data.logging
119
+ cls.modeling = config_data.modeling
120
+ cls.config_name = config_data.config_name
121
+
122
+ setup_logging(default_level=cls.logging.level, log_file=cls.logging.file, use_rich_handler=cls.logging.rich)
123
+
124
+ @classmethod
125
+ def to_dict(cls):
126
+ """
127
+ Convert the configuration class into a dictionary for JSON serialization.
128
+ Handles dataclasses and simple types like str, int, etc.
129
+ """
130
+ config_dict = {}
131
+ for attribute, value in cls.__dict__.items():
132
+ # Only consider attributes (not methods, etc.)
133
+ if (
134
+ not attribute.startswith('_')
135
+ and not isinstance(value, (types.FunctionType, types.MethodType))
136
+ and not isinstance(value, classmethod)
137
+ ):
138
+ if is_dataclass(value):
139
+ config_dict[attribute] = value.__dict__
140
+ else: # Assuming only basic types here!
141
+ config_dict[attribute] = value
142
+
143
+ return config_dict
144
+
145
+
146
+ class MultilineFormater(logging.Formatter):
147
+ def format(self, record):
148
+ message_lines = record.getMessage().split('\n')
149
+
150
+ # Prepare the log prefix (timestamp + log level)
151
+ timestamp = self.formatTime(record, self.datefmt)
152
+ log_level = record.levelname.ljust(8) # Align log levels for consistency
153
+ log_prefix = f'{timestamp} | {log_level} |'
154
+
155
+ # Format all lines
156
+ first_line = [f'{log_prefix} {message_lines[0]}']
157
+ if len(message_lines) > 1:
158
+ lines = first_line + [f'{log_prefix} {line}' for line in message_lines[1:]]
159
+ else:
160
+ lines = first_line
161
+
162
+ return '\n'.join(lines)
163
+
164
+
165
+ class ColoredMultilineFormater(MultilineFormater):
166
+ # ANSI escape codes for colors
167
+ COLORS = {
168
+ 'DEBUG': '\033[32m', # Green
169
+ 'INFO': '\033[34m', # Blue
170
+ 'WARNING': '\033[33m', # Yellow
171
+ 'ERROR': '\033[31m', # Red
172
+ 'CRITICAL': '\033[1m\033[31m', # Bold Red
173
+ }
174
+ RESET = '\033[0m'
175
+
176
+ def format(self, record):
177
+ lines = super().format(record).splitlines()
178
+ log_color = self.COLORS.get(record.levelname, self.RESET)
179
+
180
+ # Create a formatted message for each line separately
181
+ formatted_lines = []
182
+ for line in lines:
183
+ formatted_lines.append(f'{log_color}{line}{self.RESET}')
184
+
185
+ return '\n'.join(formatted_lines)
186
+
187
+
188
+ def _get_logging_handler(log_file: Optional[str] = None, use_rich_handler: bool = False) -> logging.Handler:
189
+ """Returns a logging handler for the given log file."""
190
+ if use_rich_handler and log_file is None:
191
+ # RichHandler for console output
192
+ console = Console(width=120)
193
+ rich_handler = RichHandler(
194
+ console=console,
195
+ rich_tracebacks=True,
196
+ omit_repeated_times=True,
197
+ show_path=False,
198
+ log_time_format='%Y-%m-%d %H:%M:%S',
199
+ )
200
+ rich_handler.setFormatter(logging.Formatter('%(message)s')) # Simplified formatting
201
+
202
+ return rich_handler
203
+ elif log_file is None:
204
+ # Regular Logger with custom formating enabled
205
+ file_handler = logging.StreamHandler()
206
+ file_handler.setFormatter(
207
+ ColoredMultilineFormater(
208
+ fmt='%(message)s',
209
+ datefmt='%Y-%m-%d %H:%M:%S',
210
+ )
211
+ )
212
+ return file_handler
213
+ else:
214
+ # FileHandler for file output
215
+ file_handler = logging.FileHandler(log_file)
216
+ file_handler.setFormatter(
217
+ MultilineFormater(
218
+ fmt='%(message)s',
219
+ datefmt='%Y-%m-%d %H:%M:%S',
220
+ )
221
+ )
222
+ return file_handler
223
+
224
+
225
+ def setup_logging(
226
+ default_level: Literal['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'] = 'INFO',
227
+ log_file: Optional[str] = 'flixOpt.log',
228
+ use_rich_handler: bool = False,
229
+ ):
230
+ """Setup logging configuration"""
231
+ logger = logging.getLogger('flixOpt') # Use a specific logger name for your package
232
+ logger.setLevel(get_logging_level_by_name(default_level))
233
+ # Clear existing handlers
234
+ if logger.hasHandlers():
235
+ logger.handlers.clear()
236
+
237
+ logger.addHandler(_get_logging_handler(use_rich_handler=use_rich_handler))
238
+ if log_file is not None:
239
+ logger.addHandler(_get_logging_handler(log_file, use_rich_handler=False))
240
+
241
+ return logger
242
+
243
+
244
+ def get_logging_level_by_name(level_name: Literal['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']) -> int:
245
+ possible_logging_levels = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
246
+ if level_name.upper() not in possible_logging_levels:
247
+ raise ValueError(f'Invalid logging level {level_name}')
248
+ else:
249
+ logging_level = getattr(logging, level_name.upper(), logging.WARNING)
250
+ return logging_level
251
+
252
+
253
+ def change_logging_level(level_name: Literal['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']):
254
+ logger = logging.getLogger('flixOpt')
255
+ logging_level = get_logging_level_by_name(level_name)
256
+ logger.setLevel(logging_level)
257
+ for handler in logger.handlers:
258
+ handler.setLevel(logging_level)
flixOpt/config.yaml ADDED
@@ -0,0 +1,10 @@
1
+ # Default configuration of flixOpt
2
+ config_name: flixOpt # Name of the config file. This has no effect on the configuration itself.
3
+ logging:
4
+ level: INFO
5
+ file: flixOpt.log
6
+ rich: false # logging output is formatted using rich. This is only advisable when using a proper terminal
7
+ modeling:
8
+ BIG: 10000000 # 1e notation not possible in yaml
9
+ EPSILON: 0.00001
10
+ BIG_BINARY_BOUND: 100000
flixOpt/core.py ADDED
@@ -0,0 +1,182 @@
1
+ """
2
+ This module contains the core functionality of the flixOpt framework.
3
+ It provides Datatypes, logging functionality, and some functions to transform data structures.
4
+ """
5
+
6
+ import inspect
7
+ import logging
8
+ from typing import Any, Dict, List, Optional, Union
9
+
10
+ import numpy as np
11
+
12
+ from . import utils
13
+
14
+ logger = logging.getLogger('flixOpt')
15
+
16
+ Skalar = Union[int, float] # Datatype
17
+ Numeric = Union[int, float, np.ndarray] # Datatype
18
+
19
+
20
+ class TimeSeriesData:
21
+ # TODO: Move to Interface.py
22
+ def __init__(self, data: Numeric, agg_group: Optional[str] = None, agg_weight: Optional[float] = None):
23
+ """
24
+ timeseries class for transmit timeseries AND special characteristics of timeseries,
25
+ i.g. to define weights needed in calculation_type 'aggregated'
26
+ EXAMPLE solar:
27
+ you have several solar timeseries. These should not be overweighted
28
+ compared to the remaining timeseries (i.g. heat load, price)!
29
+ fixed_relative_profile_solar1 = TimeSeriesData(sol_array_1, type = 'solar')
30
+ fixed_relative_profile_solar2 = TimeSeriesData(sol_array_2, type = 'solar')
31
+ fixed_relative_profile_solar3 = TimeSeriesData(sol_array_3, type = 'solar')
32
+ --> this 3 series of same type share one weight, i.e. internally assigned each weight = 1/3
33
+ (instead of standard weight = 1)
34
+
35
+ Parameters
36
+ ----------
37
+ data : Union[int, float, np.ndarray]
38
+ The timeseries data, which can be a scalar, array, or numpy array.
39
+ agg_group : str, optional
40
+ The group this TimeSeriesData is a part of. agg_weight is split between members of a group. Default is None.
41
+ agg_weight : float, optional
42
+ The weight for calculation_type 'aggregated', should be between 0 and 1. Default is None.
43
+
44
+ Raises
45
+ ------
46
+ Exception
47
+ If both agg_group and agg_weight are set, an exception is raised.
48
+ """
49
+ self.data = data
50
+ self.agg_group = agg_group
51
+ self.agg_weight = agg_weight
52
+ if (agg_group is not None) and (agg_weight is not None):
53
+ raise Exception('Either <agg_group> or explicit <agg_weigth> can be used. Not both!')
54
+ self.label: Optional[str] = None
55
+
56
+ def __repr__(self):
57
+ # Get the constructor arguments and their current values
58
+ init_signature = inspect.signature(self.__init__)
59
+ init_args = init_signature.parameters
60
+
61
+ # Create a dictionary with argument names and their values
62
+ args_str = ', '.join(f'{name}={repr(getattr(self, name, None))}' for name in init_args if name != 'self')
63
+ return f'{self.__class__.__name__}({args_str})'
64
+
65
+ def __str__(self):
66
+ return str(self.data)
67
+
68
+
69
+ Numeric_TS = Union[
70
+ Skalar, np.ndarray, TimeSeriesData
71
+ ] # TODO: This is not really correct throughozt the codebase. Sometimes its used for TimeSeries aswell?
72
+
73
+
74
+ class TimeSeries:
75
+ """
76
+ Class for data that applies to time series, stored as vector (np.ndarray) or scalar.
77
+
78
+ This class represents a vector or scalar value that makes the handling of time series easier.
79
+ It supports various operations such as activation of specific time indices, setting explicit active data, and
80
+ aggregation weight management.
81
+
82
+ Attributes
83
+ ----------
84
+ label : str
85
+ The label for the time series.
86
+ data : Optional[Numeric]
87
+ The actual data for the time series. Can be None.
88
+ aggregated_data : Optional[Numeric]
89
+ aggregated_data to use instead of data if provided.
90
+ active_indices : Optional[np.ndarray]
91
+ Indices of the time steps to activate.
92
+ aggregation_weight : float
93
+ Weight for aggregation method, between 0 and 1, normally 1.
94
+ aggregation_group : str
95
+ Group for calculating the aggregation weigth for aggregation method.
96
+ """
97
+
98
+ def __init__(self, label: str, data: Optional[Numeric_TS]):
99
+ self.label: str = label
100
+ if isinstance(data, TimeSeriesData):
101
+ self.data = self.make_scalar_if_possible(data.data)
102
+ self.aggregation_weight, self.aggregation_group = data.agg_weight, data.agg_group
103
+ data.label = self.label # Connecting User_time_series to real Time_series
104
+ else:
105
+ self.data = self.make_scalar_if_possible(data)
106
+ self.aggregation_weight, self.aggregation_group = None, None
107
+
108
+ self.active_indices: Optional[Union[range, List[int]]] = None
109
+ self.aggregated_data: Optional[Numeric] = None
110
+
111
+ def activate_indices(self, indices: Optional[Union[range, List[int]]], aggregated_data: Optional[Numeric] = None):
112
+ self.active_indices = indices
113
+
114
+ if aggregated_data is not None:
115
+ assert len(aggregated_data) == len(self.active_indices) or len(aggregated_data) == 1, (
116
+ f'The aggregated_data has the wrong length for TimeSeries {self.label}. '
117
+ f'Length should be: {len(self.active_indices)} or 1, but is {len(aggregated_data)}'
118
+ )
119
+ self.aggregated_data = self.make_scalar_if_possible(aggregated_data)
120
+
121
+ def clear_indices_and_aggregated_data(self):
122
+ self.active_indices = None
123
+ self.aggregated_data = None
124
+
125
+ @property
126
+ def active_data(self) -> Numeric:
127
+ if self.aggregated_data is not None: # Aggregated data is always active, if present
128
+ return self.aggregated_data
129
+
130
+ indices_not_applicable = np.isscalar(self.data) or (self.data is None) or (self.active_indices is None)
131
+ if indices_not_applicable:
132
+ return self.data
133
+ else:
134
+ return self.data[self.active_indices]
135
+
136
+ @property
137
+ def active_data_vector(self) -> np.ndarray:
138
+ # Always returns the active data as a vector.
139
+ return utils.as_vector(self.active_data, len(self.active_indices))
140
+
141
+ @property
142
+ def is_scalar(self) -> bool:
143
+ return np.isscalar(self.data)
144
+
145
+ @property
146
+ def is_array(self) -> bool:
147
+ return not self.is_scalar and self.data is not None
148
+
149
+ def __repr__(self):
150
+ # Retrieve all attributes and their values
151
+ attrs = vars(self)
152
+ # Format each attribute as 'key=value'
153
+ attrs_str = ', '.join(f'{key}={value!r}' for key, value in attrs.items())
154
+ # Format the output as 'ClassName(attr1=value1, attr2=value2, ...)'
155
+ return f'{self.__class__.__name__}({attrs_str})'
156
+
157
+ def __str__(self):
158
+ return str(self.active_data)
159
+
160
+ @staticmethod
161
+ def make_scalar_if_possible(data: Optional[Numeric]) -> Optional[Numeric]:
162
+ """
163
+ Convert an array to a scalar if all values are equal, or return the array as-is.
164
+ Can Return None if the passed data is None
165
+
166
+ Parameters
167
+ ----------
168
+ data : Numeric, None
169
+ The data to process.
170
+
171
+ Returns
172
+ -------
173
+ Numeric
174
+ A scalar if all values in the array are equal, otherwise the array itself. None, if the passed value is None
175
+ """
176
+ # TODO: Should this really return None Values?
177
+ if np.isscalar(data) or data is None:
178
+ return data
179
+ data = np.array(data)
180
+ if np.all(data == data[0]):
181
+ return data[0]
182
+ return data