flixopt 1.0.12__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flixopt might be problematic. Click here for more details.

Files changed (73) hide show
  1. docs/examples/00-Minimal Example.md +5 -0
  2. docs/examples/01-Basic Example.md +5 -0
  3. docs/examples/02-Complex Example.md +10 -0
  4. docs/examples/03-Calculation Modes.md +5 -0
  5. docs/examples/index.md +5 -0
  6. docs/faq/contribute.md +49 -0
  7. docs/faq/index.md +3 -0
  8. docs/images/architecture_flixOpt-pre2.0.0.png +0 -0
  9. docs/images/architecture_flixOpt.png +0 -0
  10. docs/images/flixopt-icon.svg +1 -0
  11. docs/javascripts/mathjax.js +18 -0
  12. docs/release-notes/_template.txt +32 -0
  13. docs/release-notes/index.md +7 -0
  14. docs/release-notes/v2.0.0.md +93 -0
  15. docs/release-notes/v2.0.1.md +12 -0
  16. docs/user-guide/Mathematical Notation/Bus.md +33 -0
  17. docs/user-guide/Mathematical Notation/Effects, Penalty & Objective.md +132 -0
  18. docs/user-guide/Mathematical Notation/Flow.md +26 -0
  19. docs/user-guide/Mathematical Notation/LinearConverter.md +21 -0
  20. docs/user-guide/Mathematical Notation/Piecewise.md +49 -0
  21. docs/user-guide/Mathematical Notation/Storage.md +44 -0
  22. docs/user-guide/Mathematical Notation/index.md +22 -0
  23. docs/user-guide/Mathematical Notation/others.md +3 -0
  24. docs/user-guide/index.md +124 -0
  25. {flixOpt → flixopt}/__init__.py +5 -2
  26. {flixOpt → flixopt}/aggregation.py +113 -140
  27. flixopt/calculation.py +455 -0
  28. {flixOpt → flixopt}/commons.py +7 -4
  29. flixopt/components.py +630 -0
  30. {flixOpt → flixopt}/config.py +9 -8
  31. {flixOpt → flixopt}/config.yaml +3 -3
  32. flixopt/core.py +970 -0
  33. flixopt/effects.py +386 -0
  34. flixopt/elements.py +534 -0
  35. flixopt/features.py +1042 -0
  36. flixopt/flow_system.py +409 -0
  37. flixopt/interface.py +265 -0
  38. flixopt/io.py +308 -0
  39. flixopt/linear_converters.py +331 -0
  40. flixopt/plotting.py +1340 -0
  41. flixopt/results.py +898 -0
  42. flixopt/solvers.py +77 -0
  43. flixopt/structure.py +630 -0
  44. flixopt/utils.py +62 -0
  45. flixopt-2.0.1.dist-info/METADATA +145 -0
  46. flixopt-2.0.1.dist-info/RECORD +57 -0
  47. {flixopt-1.0.12.dist-info → flixopt-2.0.1.dist-info}/WHEEL +1 -1
  48. flixopt-2.0.1.dist-info/top_level.txt +6 -0
  49. pics/architecture_flixOpt-pre2.0.0.png +0 -0
  50. pics/architecture_flixOpt.png +0 -0
  51. pics/flixopt-icon.svg +1 -0
  52. pics/pics.pptx +0 -0
  53. scripts/gen_ref_pages.py +54 -0
  54. site/release-notes/_template.txt +32 -0
  55. flixOpt/calculation.py +0 -629
  56. flixOpt/components.py +0 -614
  57. flixOpt/core.py +0 -182
  58. flixOpt/effects.py +0 -410
  59. flixOpt/elements.py +0 -489
  60. flixOpt/features.py +0 -942
  61. flixOpt/flow_system.py +0 -351
  62. flixOpt/interface.py +0 -203
  63. flixOpt/linear_converters.py +0 -325
  64. flixOpt/math_modeling.py +0 -1145
  65. flixOpt/plotting.py +0 -712
  66. flixOpt/results.py +0 -563
  67. flixOpt/solvers.py +0 -21
  68. flixOpt/structure.py +0 -733
  69. flixOpt/utils.py +0 -134
  70. flixopt-1.0.12.dist-info/METADATA +0 -174
  71. flixopt-1.0.12.dist-info/RECORD +0 -29
  72. flixopt-1.0.12.dist-info/top_level.txt +0 -3
  73. {flixopt-1.0.12.dist-info → flixopt-2.0.1.dist-info/licenses}/LICENSE +0 -0
flixopt/io.py ADDED
@@ -0,0 +1,308 @@
1
+ import importlib.util
2
+ import json
3
+ import logging
4
+ import pathlib
5
+ import re
6
+ from dataclasses import dataclass
7
+ from typing import Dict, Literal, Optional, Tuple, Union
8
+
9
+ import linopy
10
+ import xarray as xr
11
+ import yaml
12
+
13
+ from .core import TimeSeries
14
+
15
+ logger = logging.getLogger('flixopt')
16
+
17
+
18
+ def replace_timeseries(obj, mode: Literal['name', 'stats', 'data'] = 'name'):
19
+ """Recursively replaces TimeSeries objects with their names prefixed by '::::'."""
20
+ if isinstance(obj, dict):
21
+ return {k: replace_timeseries(v, mode) for k, v in obj.items()}
22
+ elif isinstance(obj, list):
23
+ return [replace_timeseries(v, mode) for v in obj]
24
+ elif isinstance(obj, TimeSeries): # Adjust this based on the actual class
25
+ if obj.all_equal:
26
+ return obj.active_data.values[0].item()
27
+ elif mode == 'name':
28
+ return f'::::{obj.name}'
29
+ elif mode == 'stats':
30
+ return obj.stats
31
+ elif mode == 'data':
32
+ return obj
33
+ else:
34
+ raise ValueError(f'Invalid mode {mode}')
35
+ else:
36
+ return obj
37
+
38
+
39
+ def insert_dataarray(obj, ds: xr.Dataset):
40
+ """Recursively inserts TimeSeries objects into a dataset."""
41
+ if isinstance(obj, dict):
42
+ return {k: insert_dataarray(v, ds) for k, v in obj.items()}
43
+ elif isinstance(obj, list):
44
+ return [insert_dataarray(v, ds) for v in obj]
45
+ elif isinstance(obj, str) and obj.startswith('::::'):
46
+ da = ds[obj[4:]]
47
+ if da.isel(time=-1).isnull():
48
+ return da.isel(time=slice(0, -1))
49
+ return da
50
+ else:
51
+ return obj
52
+
53
+
54
+ def remove_none_and_empty(obj):
55
+ """Recursively removes None and empty dicts and lists values from a dictionary or list."""
56
+
57
+ if isinstance(obj, dict):
58
+ return {
59
+ k: remove_none_and_empty(v)
60
+ for k, v in obj.items()
61
+ if not (v is None or (isinstance(v, (list, dict)) and not v))
62
+ }
63
+
64
+ elif isinstance(obj, list):
65
+ return [remove_none_and_empty(v) for v in obj if not (v is None or (isinstance(v, (list, dict)) and not v))]
66
+
67
+ else:
68
+ return obj
69
+
70
+
71
+ def _save_to_yaml(data, output_file='formatted_output.yaml'):
72
+ """
73
+ Save dictionary data to YAML with proper multi-line string formatting.
74
+ Handles complex string patterns including backticks, special characters,
75
+ and various newline formats.
76
+
77
+ Args:
78
+ data (dict): Dictionary containing string data
79
+ output_file (str): Path to output YAML file
80
+ """
81
+ # Process strings to normalize all newlines and handle special patterns
82
+ processed_data = _process_complex_strings(data)
83
+
84
+ # Define a custom representer for strings
85
+ def represent_str(dumper, data):
86
+ # Use literal block style (|) for any string with newlines
87
+ if '\n' in data:
88
+ return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
89
+
90
+ # Use quoted style for strings with special characters to ensure proper parsing
91
+ elif any(char in data for char in ':`{}[]#,&*!|>%@'):
92
+ return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='"')
93
+
94
+ # Use plain style for simple strings
95
+ return dumper.represent_scalar('tag:yaml.org,2002:str', data)
96
+
97
+ # Add the string representer to SafeDumper
98
+ yaml.add_representer(str, represent_str, Dumper=yaml.SafeDumper)
99
+
100
+ # Write to file with settings that ensure proper formatting
101
+ with open(output_file, 'w', encoding='utf-8') as file:
102
+ yaml.dump(
103
+ processed_data,
104
+ file,
105
+ Dumper=yaml.SafeDumper,
106
+ sort_keys=False, # Preserve dictionary order
107
+ default_flow_style=False, # Use block style for mappings
108
+ width=float('inf'), # Don't wrap long lines
109
+ allow_unicode=True, # Support Unicode characters
110
+ )
111
+
112
+
113
+ def _process_complex_strings(data):
114
+ """
115
+ Process dictionary data recursively with comprehensive string normalization.
116
+ Handles various types of strings and special formatting.
117
+
118
+ Args:
119
+ data: The data to process (dict, list, str, or other)
120
+
121
+ Returns:
122
+ Processed data with normalized strings
123
+ """
124
+ if isinstance(data, dict):
125
+ return {k: _process_complex_strings(v) for k, v in data.items()}
126
+ elif isinstance(data, list):
127
+ return [_process_complex_strings(item) for item in data]
128
+ elif isinstance(data, str):
129
+ # Step 1: Normalize line endings to \n
130
+ normalized = data.replace('\r\n', '\n').replace('\r', '\n')
131
+
132
+ # Step 2: Handle escaped newlines with robust regex
133
+ normalized = re.sub(r'(?<!\\)\\n', '\n', normalized)
134
+
135
+ # Step 3: Handle unnecessary double backslashes
136
+ normalized = re.sub(r'\\\\(n)', r'\\\1', normalized)
137
+
138
+ # Step 4: Ensure proper formatting of "[time: N]:\n---------"
139
+ normalized = re.sub(r'(\[time: \d+\]):\s*\\?n', r'\1:\n', normalized)
140
+
141
+ # Step 5: Ensure "Constraint `...`" patterns are properly formatted
142
+ normalized = re.sub(r'Constraint `([^`]+)`\\?n', r'Constraint `\1`\n', normalized)
143
+
144
+ return normalized
145
+ else:
146
+ return data
147
+
148
+
149
+ def document_linopy_model(model: linopy.Model, path: pathlib.Path = None) -> Dict[str, str]:
150
+ """
151
+ Convert all model variables and constraints to a structured string representation.
152
+ This can take multiple seconds for large models.
153
+ The output can be saved to a yaml file with readable formating applied.
154
+
155
+ Args:
156
+ path (pathlib.Path, optional): Path to save the document. Defaults to None.
157
+ """
158
+ documentation = {
159
+ 'objective': model.objective.__repr__(),
160
+ 'termination_condition': model.termination_condition,
161
+ 'status': model.status,
162
+ 'nvars': model.nvars,
163
+ 'nvarsbin': model.binaries.nvars if len(model.binaries) > 0 else 0, # Temporary, waiting for linopy to fix
164
+ 'nvarscont': model.continuous.nvars if len(model.continuous) > 0 else 0, # Temporary, waiting for linopy to fix
165
+ 'ncons': model.ncons,
166
+ 'variables': {variable_name: variable.__repr__() for variable_name, variable in model.variables.items()},
167
+ 'constraints': {
168
+ constraint_name: constraint.__repr__() for constraint_name, constraint in model.constraints.items()
169
+ },
170
+ 'binaries': list(model.binaries),
171
+ 'integers': list(model.integers),
172
+ 'continuous': list(model.continuous),
173
+ 'infeasible_constraints': '',
174
+ }
175
+
176
+ if model.status == 'warning':
177
+ logger.critical(f'The model has a warning status {model.status=}. Trying to extract infeasibilities')
178
+ try:
179
+ import io
180
+ from contextlib import redirect_stdout
181
+
182
+ f = io.StringIO()
183
+
184
+ # Redirect stdout to our buffer
185
+ with redirect_stdout(f):
186
+ model.print_infeasibilities()
187
+
188
+ documentation['infeasible_constraints'] = f.getvalue()
189
+ except NotImplementedError:
190
+ logger.critical(
191
+ 'Infeasible constraints could not get retrieved. This functionality is only availlable with gurobi'
192
+ )
193
+ documentation['infeasible_constraints'] = 'Not possible to retrieve infeasible constraints'
194
+
195
+ if path is not None:
196
+ if path.suffix not in ['.yaml', '.yml']:
197
+ raise ValueError(f'Invalid file extension for path {path}. Only .yaml and .yml are supported')
198
+ _save_to_yaml(documentation, path)
199
+
200
+ return documentation
201
+
202
+
203
+ def save_dataset_to_netcdf(
204
+ ds: xr.Dataset,
205
+ path: Union[str, pathlib.Path],
206
+ compression: int = 0,
207
+ ) -> None:
208
+ """
209
+ Save a dataset to a netcdf file. Store the attrs as a json string in the 'attrs' attribute.
210
+
211
+ Args:
212
+ ds: Dataset to save.
213
+ path: Path to save the dataset to.
214
+ compression: Compression level for the dataset (0-9). 0 means no compression. 5 is a good default.
215
+
216
+ Raises:
217
+ ValueError: If the path has an invalid file extension.
218
+ """
219
+ if path.suffix not in ['.nc', '.nc4']:
220
+ raise ValueError(f'Invalid file extension for path {path}. Only .nc and .nc4 are supported')
221
+
222
+ apply_encoding = False
223
+ if compression != 0:
224
+ if importlib.util.find_spec('netCDF4') is not None:
225
+ apply_encoding = True
226
+ else:
227
+ logger.warning(
228
+ 'Dataset was exported without compression due to missing dependency "netcdf4".'
229
+ 'Install netcdf4 via `pip install netcdf4`.'
230
+ )
231
+ ds = ds.copy(deep=True)
232
+ ds.attrs = {'attrs': json.dumps(ds.attrs)}
233
+ ds.to_netcdf(
234
+ path,
235
+ encoding=None
236
+ if not apply_encoding
237
+ else {data_var: {'zlib': True, 'complevel': 5} for data_var in ds.data_vars},
238
+ )
239
+
240
+
241
+ def load_dataset_from_netcdf(path: Union[str, pathlib.Path]) -> xr.Dataset:
242
+ """
243
+ Load a dataset from a netcdf file. Load the attrs from the 'attrs' attribute.
244
+
245
+ Args:
246
+ path: Path to load the dataset from.
247
+
248
+ Returns:
249
+ Dataset: Loaded dataset.
250
+ """
251
+ ds = xr.load_dataset(path)
252
+ ds.attrs = json.loads(ds.attrs['attrs'])
253
+ return ds
254
+
255
+
256
+ @dataclass
257
+ class CalculationResultsPaths:
258
+ """Container for all paths related to saving CalculationResults."""
259
+
260
+ folder: pathlib.Path
261
+ name: str
262
+
263
+ def __post_init__(self):
264
+ """Initialize all path attributes."""
265
+ self._update_paths()
266
+
267
+ def _update_paths(self):
268
+ """Update all path attributes based on current folder and name."""
269
+ self.linopy_model = self.folder / f'{self.name}--linopy_model.nc4'
270
+ self.solution = self.folder / f'{self.name}--solution.nc4'
271
+ self.summary = self.folder / f'{self.name}--summary.yaml'
272
+ self.network = self.folder / f'{self.name}--network.json'
273
+ self.flow_system = self.folder / f'{self.name}--flow_system.nc4'
274
+ self.model_documentation = self.folder / f'{self.name}--model_documentation.yaml'
275
+
276
+ def all_paths(self) -> Dict[str, pathlib.Path]:
277
+ """Return a dictionary of all paths."""
278
+ return {
279
+ 'linopy_model': self.linopy_model,
280
+ 'solution': self.solution,
281
+ 'summary': self.summary,
282
+ 'network': self.network,
283
+ 'flow_system': self.flow_system,
284
+ 'model_documentation': self.model_documentation,
285
+ }
286
+
287
+ def create_folders(self, parents: bool = False) -> None:
288
+ """Ensure the folder exists.
289
+ Args:
290
+ parents: Whether to create the parent folders if they do not exist.
291
+ """
292
+ if not self.folder.exists():
293
+ try:
294
+ self.folder.mkdir(parents=parents)
295
+ except FileNotFoundError as e:
296
+ raise FileNotFoundError(
297
+ f'Folder {self.folder} and its parent do not exist. Please create them first.'
298
+ ) from e
299
+
300
+ def update(self, new_name: Optional[str] = None, new_folder: Optional[pathlib.Path] = None) -> None:
301
+ """Update name and/or folder and refresh all paths."""
302
+ if new_name is not None:
303
+ self.name = new_name
304
+ if new_folder is not None:
305
+ if not new_folder.is_dir() or not new_folder.exists():
306
+ raise FileNotFoundError(f'Folder {new_folder} does not exist or is not a directory.')
307
+ self.folder = new_folder
308
+ self._update_paths()
@@ -0,0 +1,331 @@
1
+ """
2
+ This Module contains high-level classes to easily model a FlowSystem.
3
+ """
4
+
5
+ import logging
6
+ from typing import Dict, Optional
7
+
8
+ import numpy as np
9
+
10
+ from .components import LinearConverter
11
+ from .core import NumericDataTS, TimeSeriesData
12
+ from .elements import Flow
13
+ from .interface import OnOffParameters
14
+ from .structure import register_class_for_io
15
+
16
+ logger = logging.getLogger('flixopt')
17
+
18
+
19
+ @register_class_for_io
20
+ class Boiler(LinearConverter):
21
+ def __init__(
22
+ self,
23
+ label: str,
24
+ eta: NumericDataTS,
25
+ Q_fu: Flow,
26
+ Q_th: Flow,
27
+ on_off_parameters: OnOffParameters = None,
28
+ meta_data: Optional[Dict] = None,
29
+ ):
30
+ """
31
+ Args:
32
+ label: The label of the Element. Used to identify it in the FlowSystem
33
+ eta: thermal efficiency.
34
+ Q_fu: fuel input-flow
35
+ Q_th: thermal output-flow.
36
+ on_off_parameters: Parameters defining the on/off behavior of the component.
37
+ meta_data: used to store more information about the Element. Is not used internally, but saved in the results. Only use python native types.
38
+ """
39
+ super().__init__(
40
+ label,
41
+ inputs=[Q_fu],
42
+ outputs=[Q_th],
43
+ conversion_factors=[{Q_fu.label: eta, Q_th.label: 1}],
44
+ on_off_parameters=on_off_parameters,
45
+ meta_data=meta_data,
46
+ )
47
+ self.Q_fu = Q_fu
48
+ self.Q_th = Q_th
49
+
50
+ @property
51
+ def eta(self):
52
+ return self.conversion_factors[0][self.Q_fu.label]
53
+
54
+ @eta.setter
55
+ def eta(self, value):
56
+ check_bounds(value, 'eta', self.label_full, 0, 1)
57
+ self.conversion_factors[0][self.Q_fu.label] = value
58
+
59
+
60
+ @register_class_for_io
61
+ class Power2Heat(LinearConverter):
62
+ def __init__(
63
+ self,
64
+ label: str,
65
+ eta: NumericDataTS,
66
+ P_el: Flow,
67
+ Q_th: Flow,
68
+ on_off_parameters: OnOffParameters = None,
69
+ meta_data: Optional[Dict] = None,
70
+ ):
71
+ """
72
+ Args:
73
+ label: The label of the Element. Used to identify it in the FlowSystem
74
+ eta: thermal efficiency.
75
+ P_el: electric input-flow
76
+ Q_th: thermal output-flow.
77
+ on_off_parameters: Parameters defining the on/off behavior of the component.
78
+ meta_data: used to store more information about the Element. Is not used internally, but saved in the results. Only use python native types.
79
+ """
80
+ super().__init__(
81
+ label,
82
+ inputs=[P_el],
83
+ outputs=[Q_th],
84
+ conversion_factors=[{P_el.label: eta, Q_th.label: 1}],
85
+ on_off_parameters=on_off_parameters,
86
+ meta_data=meta_data,
87
+ )
88
+
89
+ self.P_el = P_el
90
+ self.Q_th = Q_th
91
+
92
+ @property
93
+ def eta(self):
94
+ return self.conversion_factors[0][self.P_el.label]
95
+
96
+ @eta.setter
97
+ def eta(self, value):
98
+ check_bounds(value, 'eta', self.label_full, 0, 1)
99
+ self.conversion_factors[0][self.P_el.label] = value
100
+
101
+
102
+ @register_class_for_io
103
+ class HeatPump(LinearConverter):
104
+ def __init__(
105
+ self,
106
+ label: str,
107
+ COP: NumericDataTS,
108
+ P_el: Flow,
109
+ Q_th: Flow,
110
+ on_off_parameters: OnOffParameters = None,
111
+ meta_data: Optional[Dict] = None,
112
+ ):
113
+ """
114
+ Args:
115
+ label: The label of the Element. Used to identify it in the FlowSystem
116
+ COP: Coefficient of performance.
117
+ P_el: electricity input-flow.
118
+ Q_th: thermal output-flow.
119
+ on_off_parameters: Parameters defining the on/off behavior of the component.
120
+ meta_data: used to store more information about the Element. Is not used internally, but saved in the results. Only use python native types.
121
+ """
122
+ super().__init__(
123
+ label,
124
+ inputs=[P_el],
125
+ outputs=[Q_th],
126
+ conversion_factors=[{P_el.label: COP, Q_th.label: 1}],
127
+ on_off_parameters=on_off_parameters,
128
+ meta_data=meta_data,
129
+ )
130
+ self.P_el = P_el
131
+ self.Q_th = Q_th
132
+ self.COP = COP
133
+
134
+ @property
135
+ def COP(self): # noqa: N802
136
+ return self.conversion_factors[0][self.P_el.label]
137
+
138
+ @COP.setter
139
+ def COP(self, value): # noqa: N802
140
+ check_bounds(value, 'COP', self.label_full, 1, 20)
141
+ self.conversion_factors[0][self.P_el.label] = value
142
+
143
+
144
+ @register_class_for_io
145
+ class CoolingTower(LinearConverter):
146
+ def __init__(
147
+ self,
148
+ label: str,
149
+ specific_electricity_demand: NumericDataTS,
150
+ P_el: Flow,
151
+ Q_th: Flow,
152
+ on_off_parameters: OnOffParameters = None,
153
+ meta_data: Optional[Dict] = None,
154
+ ):
155
+ """
156
+ Args:
157
+ label: The label of the Element. Used to identify it in the FlowSystem
158
+ specific_electricity_demand: auxiliary electricty demand per cooling power, i.g. 0.02 (2 %).
159
+ P_el: electricity input-flow.
160
+ Q_th: thermal input-flow.
161
+ on_off_parameters: Parameters defining the on/off behavior of the component.
162
+ meta_data: used to store more information about the Element. Is not used internally, but saved in the results. Only use python native types.
163
+ """
164
+ super().__init__(
165
+ label,
166
+ inputs=[P_el, Q_th],
167
+ outputs=[],
168
+ conversion_factors=[{P_el.label: 1, Q_th.label: -specific_electricity_demand}],
169
+ on_off_parameters=on_off_parameters,
170
+ meta_data=meta_data,
171
+ )
172
+
173
+ self.P_el = P_el
174
+ self.Q_th = Q_th
175
+
176
+ check_bounds(specific_electricity_demand, 'specific_electricity_demand', self.label_full, 0, 1)
177
+
178
+ @property
179
+ def specific_electricity_demand(self):
180
+ return -self.conversion_factors[0][self.Q_th.label]
181
+
182
+ @specific_electricity_demand.setter
183
+ def specific_electricity_demand(self, value):
184
+ check_bounds(value, 'specific_electricity_demand', self.label_full, 0, 1)
185
+ self.conversion_factors[0][self.Q_th.label] = -value
186
+
187
+
188
+ @register_class_for_io
189
+ class CHP(LinearConverter):
190
+ def __init__(
191
+ self,
192
+ label: str,
193
+ eta_th: NumericDataTS,
194
+ eta_el: NumericDataTS,
195
+ Q_fu: Flow,
196
+ P_el: Flow,
197
+ Q_th: Flow,
198
+ on_off_parameters: OnOffParameters = None,
199
+ meta_data: Optional[Dict] = None,
200
+ ):
201
+ """
202
+ Args:
203
+ label: The label of the Element. Used to identify it in the FlowSystem
204
+ eta_th: thermal efficiency.
205
+ eta_el: electrical efficiency.
206
+ Q_fu: fuel input-flow.
207
+ P_el: electricity output-flow.
208
+ Q_th: heat output-flow.
209
+ on_off_parameters: Parameters defining the on/off behavior of the component.
210
+ meta_data: used to store more information about the Element. Is not used internally, but saved in the results. Only use python native types.
211
+ """
212
+ heat = {Q_fu.label: eta_th, Q_th.label: 1}
213
+ electricity = {Q_fu.label: eta_el, P_el.label: 1}
214
+
215
+ super().__init__(
216
+ label,
217
+ inputs=[Q_fu],
218
+ outputs=[Q_th, P_el],
219
+ conversion_factors=[heat, electricity],
220
+ on_off_parameters=on_off_parameters,
221
+ meta_data=meta_data,
222
+ )
223
+
224
+ self.Q_fu = Q_fu
225
+ self.P_el = P_el
226
+ self.Q_th = Q_th
227
+
228
+ check_bounds(eta_el + eta_th, 'eta_th+eta_el', self.label_full, 0, 1)
229
+
230
+ @property
231
+ def eta_th(self):
232
+ return self.conversion_factors[0][self.Q_fu.label]
233
+
234
+ @eta_th.setter
235
+ def eta_th(self, value):
236
+ check_bounds(value, 'eta_th', self.label_full, 0, 1)
237
+ self.conversion_factors[0][self.Q_fu.label] = value
238
+
239
+ @property
240
+ def eta_el(self):
241
+ return self.conversion_factors[1][self.Q_fu.label]
242
+
243
+ @eta_el.setter
244
+ def eta_el(self, value):
245
+ check_bounds(value, 'eta_el', self.label_full, 0, 1)
246
+ self.conversion_factors[1][self.Q_fu.label] = value
247
+
248
+
249
+ @register_class_for_io
250
+ class HeatPumpWithSource(LinearConverter):
251
+ def __init__(
252
+ self,
253
+ label: str,
254
+ COP: NumericDataTS,
255
+ P_el: Flow,
256
+ Q_ab: Flow,
257
+ Q_th: Flow,
258
+ on_off_parameters: OnOffParameters = None,
259
+ meta_data: Optional[Dict] = None,
260
+ ):
261
+ """
262
+ Args:
263
+ label: The label of the Element. Used to identify it in the FlowSystem
264
+ COP: Coefficient of performance.
265
+ Q_ab: Heatsource input-flow.
266
+ P_el: electricity input-flow.
267
+ Q_th: thermal output-flow.
268
+ on_off_parameters: Parameters defining the on/off behavior of the component.
269
+ meta_data: used to store more information about the Element. Is not used internally, but saved in the results. Only use python native types.
270
+ """
271
+
272
+ # super:
273
+ electricity = {P_el.label: COP, Q_th.label: 1}
274
+ heat_source = {Q_ab.label: COP / (COP - 1), Q_th.label: 1}
275
+
276
+ super().__init__(
277
+ label,
278
+ inputs=[P_el, Q_ab],
279
+ outputs=[Q_th],
280
+ conversion_factors=[electricity, heat_source],
281
+ on_off_parameters=on_off_parameters,
282
+ meta_data=meta_data,
283
+ )
284
+ self.P_el = P_el
285
+ self.Q_ab = Q_ab
286
+ self.Q_th = Q_th
287
+
288
+ @property
289
+ def COP(self): # noqa: N802
290
+ return self.conversion_factors[0][self.Q_th.label]
291
+
292
+ @COP.setter
293
+ def COP(self, value): # noqa: N802
294
+ check_bounds(value, 'COP', self.label_full, 1, 20)
295
+ self.conversion_factors[0][self.Q_th.label] = value
296
+ self.conversion_factors[1][self.Q_th.label] = value / (value - 1)
297
+
298
+
299
+ def check_bounds(
300
+ value: NumericDataTS,
301
+ parameter_label: str,
302
+ element_label: str,
303
+ lower_bound: NumericDataTS,
304
+ upper_bound: NumericDataTS,
305
+ ) -> None:
306
+ """
307
+ Check if the value is within the bounds. The bounds are exclusive.
308
+ If not, log a warning.
309
+ Args:
310
+ value: The value to check.
311
+ parameter_label: The label of the value.
312
+ element_label: The label of the element.
313
+ lower_bound: The lower bound.
314
+ upper_bound: The upper bound.
315
+ """
316
+ if isinstance(value, TimeSeriesData):
317
+ value = value.data
318
+ if isinstance(lower_bound, TimeSeriesData):
319
+ lower_bound = lower_bound.data
320
+ if isinstance(upper_bound, TimeSeriesData):
321
+ upper_bound = upper_bound.data
322
+ if not np.all(value > lower_bound):
323
+ logger.warning(
324
+ f"'{element_label}.{parameter_label}' is equal or below the common lower bound {lower_bound}."
325
+ f' {parameter_label}.min={np.min(value)}; {parameter_label}={value}'
326
+ )
327
+ if not np.all(value < upper_bound):
328
+ logger.warning(
329
+ f"'{element_label}.{parameter_label}' exceeds or matches the common upper bound {upper_bound}."
330
+ f' {parameter_label}.max={np.max(value)}; {parameter_label}={value}'
331
+ )