flixopt 2.1.6__py3-none-any.whl → 2.1.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flixopt might be problematic. Click here for more details.

Files changed (45) hide show
  1. docs/examples/00-Minimal Example.md +1 -1
  2. docs/examples/01-Basic Example.md +1 -1
  3. docs/examples/02-Complex Example.md +1 -1
  4. docs/examples/index.md +1 -1
  5. docs/faq/contribute.md +26 -14
  6. docs/faq/index.md +1 -1
  7. docs/javascripts/mathjax.js +1 -1
  8. docs/user-guide/Mathematical Notation/Bus.md +1 -1
  9. docs/user-guide/Mathematical Notation/Effects, Penalty & Objective.md +21 -21
  10. docs/user-guide/Mathematical Notation/Flow.md +3 -3
  11. docs/user-guide/Mathematical Notation/InvestParameters.md +3 -0
  12. docs/user-guide/Mathematical Notation/LinearConverter.md +5 -5
  13. docs/user-guide/Mathematical Notation/OnOffParameters.md +3 -0
  14. docs/user-guide/Mathematical Notation/Piecewise.md +1 -1
  15. docs/user-guide/Mathematical Notation/Storage.md +2 -2
  16. docs/user-guide/Mathematical Notation/index.md +1 -1
  17. docs/user-guide/Mathematical Notation/others.md +1 -1
  18. docs/user-guide/index.md +2 -2
  19. flixopt/__init__.py +4 -0
  20. flixopt/aggregation.py +33 -32
  21. flixopt/calculation.py +161 -65
  22. flixopt/components.py +687 -154
  23. flixopt/config.py +17 -8
  24. flixopt/core.py +69 -60
  25. flixopt/effects.py +146 -64
  26. flixopt/elements.py +297 -110
  27. flixopt/features.py +78 -71
  28. flixopt/flow_system.py +72 -50
  29. flixopt/interface.py +952 -113
  30. flixopt/io.py +15 -10
  31. flixopt/linear_converters.py +373 -81
  32. flixopt/network_app.py +445 -266
  33. flixopt/plotting.py +215 -87
  34. flixopt/results.py +382 -209
  35. flixopt/solvers.py +25 -21
  36. flixopt/structure.py +41 -39
  37. flixopt/utils.py +10 -7
  38. {flixopt-2.1.6.dist-info → flixopt-2.1.8.dist-info}/METADATA +64 -53
  39. flixopt-2.1.8.dist-info/RECORD +56 -0
  40. scripts/extract_release_notes.py +5 -5
  41. scripts/gen_ref_pages.py +1 -1
  42. flixopt-2.1.6.dist-info/RECORD +0 -54
  43. {flixopt-2.1.6.dist-info → flixopt-2.1.8.dist-info}/WHEEL +0 -0
  44. {flixopt-2.1.6.dist-info → flixopt-2.1.8.dist-info}/licenses/LICENSE +0 -0
  45. {flixopt-2.1.6.dist-info → flixopt-2.1.8.dist-info}/top_level.txt +0 -0
flixopt/results.py CHANGED
@@ -1,11 +1,12 @@
1
+ from __future__ import annotations
2
+
1
3
  import datetime
2
4
  import json
3
5
  import logging
4
6
  import pathlib
5
- from typing import TYPE_CHECKING, Dict, List, Literal, Optional, Tuple, Union
7
+ from typing import TYPE_CHECKING, Literal
6
8
 
7
9
  import linopy
8
- import matplotlib.pyplot as plt
9
10
  import numpy as np
10
11
  import pandas as pd
11
12
  import plotly
@@ -17,6 +18,7 @@ from . import plotting
17
18
  from .core import TimeSeriesCollection
18
19
 
19
20
  if TYPE_CHECKING:
21
+ import matplotlib.pyplot as plt
20
22
  import pyvis
21
23
 
22
24
  from .calculation import Calculation, SegmentedCalculation
@@ -26,55 +28,95 @@ logger = logging.getLogger('flixopt')
26
28
 
27
29
 
28
30
  class CalculationResults:
29
- """Results container for Calculation results.
30
-
31
- This class is used to collect the results of a Calculation.
32
- It provides access to component, bus, and effect
33
- results, and includes methods for filtering, plotting, and saving results.
34
-
35
- The recommended way to create instances is through the class methods
36
- `from_file()` or `from_calculation()`, rather than direct initialization.
31
+ """Comprehensive container for optimization calculation results and analysis tools.
32
+
33
+ This class provides unified access to all optimization results including flow rates,
34
+ component states, bus balances, and system effects. It offers powerful analysis
35
+ capabilities through filtering, plotting, and export functionality, making it
36
+ the primary interface for post-processing optimization results.
37
+
38
+ Key Features:
39
+ **Unified Access**: Single interface to all solution variables and constraints
40
+ **Element Results**: Direct access to component, bus, and effect-specific results
41
+ **Visualization**: Built-in plotting methods for heatmaps, time series, and networks
42
+ **Persistence**: Save/load functionality with compression for large datasets
43
+ **Analysis Tools**: Filtering, aggregation, and statistical analysis methods
44
+
45
+ Result Organization:
46
+ - **Components**: Equipment-specific results (flows, states, constraints)
47
+ - **Buses**: Network node balances and energy flows
48
+ - **Effects**: System-wide impacts (costs, emissions, resource consumption)
49
+ - **Solution**: Raw optimization variables and their values
50
+ - **Metadata**: Calculation parameters, timing, and system configuration
37
51
 
38
52
  Attributes:
39
- solution (xr.Dataset): Dataset containing optimization results.
40
- flow_system (xr.Dataset): Dataset containing the flow system.
41
- summary (Dict): Information about the calculation.
42
- name (str): Name identifier for the calculation.
43
- model (linopy.Model): The optimization model (if available).
44
- folder (pathlib.Path): Path to the results directory.
45
- components (Dict[str, ComponentResults]): Results for each component.
46
- buses (Dict[str, BusResults]): Results for each bus.
47
- effects (Dict[str, EffectResults]): Results for each effect.
48
- timesteps_extra (pd.DatetimeIndex): The extended timesteps.
49
- hours_per_timestep (xr.DataArray): Duration of each timestep in hours.
50
-
51
- Example:
52
- Load results from saved files:
53
-
54
- >>> results = CalculationResults.from_file('results_dir', 'optimization_run_1')
55
- >>> element_result = results['Boiler']
56
- >>> results.plot_heatmap('Boiler(Q_th)|flow_rate')
57
- >>> results.to_file(compression=5)
58
- >>> results.to_file(folder='new_results_dir', compression=5) # Save the results to a new folder
53
+ solution: Dataset containing all optimization variable solutions
54
+ flow_system: Dataset with complete system configuration and parameters. Restore the used FlowSystem for further analysis.
55
+ summary: Calculation metadata including solver status, timing, and statistics
56
+ name: Unique identifier for this calculation
57
+ model: Original linopy optimization model (if available)
58
+ folder: Directory path for result storage and loading
59
+ components: Dictionary mapping component labels to ComponentResults objects
60
+ buses: Dictionary mapping bus labels to BusResults objects
61
+ effects: Dictionary mapping effect names to EffectResults objects
62
+ timesteps_extra: Extended time index including boundary conditions
63
+ hours_per_timestep: Duration of each timestep for proper energy calculations
64
+
65
+ Examples:
66
+ Load and analyze saved results:
67
+
68
+ ```python
69
+ # Load results from file
70
+ results = CalculationResults.from_file('results', 'annual_optimization')
71
+
72
+ # Access specific component results
73
+ boiler_results = results['Boiler_01']
74
+ heat_pump_results = results['HeatPump_02']
75
+
76
+ # Plot component flow rates
77
+ results.plot_heatmap('Boiler_01(Natural_Gas)|flow_rate')
78
+ results['Boiler_01'].plot_node_balance()
79
+
80
+ # Access raw solution dataarrays
81
+ electricity_flows = results.solution[['Generator_01(Grid)|flow_rate', 'HeatPump_02(Grid)|flow_rate']]
82
+
83
+ # Filter and analyze results
84
+ peak_demand_hours = results.filter_solution(variable_dims='time')
85
+ costs_solution = results.effects['cost'].solution
86
+ ```
87
+
88
+ Advanced filtering and aggregation:
89
+
90
+ ```python
91
+ # Filter by variable type
92
+ scalar_results = results.filter_solution(variable_dims='scalar')
93
+ time_series = results.filter_solution(variable_dims='time')
94
+
95
+ # Custom data analysis leveraging xarray
96
+ peak_power = results.solution['Generator_01(Grid)|flow_rate'].max()
97
+ avg_efficiency = (
98
+ results.solution['HeatPump(Heat)|flow_rate'] / results.solution['HeatPump(Electricity)|flow_rate']
99
+ ).mean()
100
+ ```
101
+
102
+ Design Patterns:
103
+ **Factory Methods**: Use `from_file()` and `from_calculation()` for creation or access directly from `Calculation.results`
104
+ **Dictionary Access**: Use `results[element_label]` for element-specific results
105
+ **Lazy Loading**: Results objects created on-demand for memory efficiency
106
+ **Unified Interface**: Consistent API across different result types
107
+
59
108
  """
60
109
 
61
110
  @classmethod
62
- def from_file(cls, folder: Union[str, pathlib.Path], name: str):
63
- """Create CalculationResults instance by loading from saved files.
64
-
65
- This method loads the calculation results from previously saved files,
66
- including the solution, flow system, model (if available), and metadata.
111
+ def from_file(cls, folder: str | pathlib.Path, name: str) -> CalculationResults:
112
+ """Load CalculationResults from saved files.
67
113
 
68
114
  Args:
69
- folder: Path to the directory containing the saved files.
70
- name: Base name of the saved files (without file extensions).
115
+ folder: Directory containing saved files.
116
+ name: Base name of saved files (without extensions).
71
117
 
72
118
  Returns:
73
- CalculationResults: A new instance containing the loaded data.
74
-
75
- Raises:
76
- FileNotFoundError: If required files cannot be found.
77
- ValueError: If files exist but cannot be properly loaded.
119
+ CalculationResults: Loaded instance.
78
120
  """
79
121
  folder = pathlib.Path(folder)
80
122
  paths = fx_io.CalculationResultsPaths(folder, name)
@@ -87,7 +129,7 @@ class CalculationResults:
87
129
  except Exception as e:
88
130
  logger.critical(f'Could not load the linopy model "{name}" from file ("{paths.linopy_model}"): {e}')
89
131
 
90
- with open(paths.summary, 'r', encoding='utf-8') as f:
132
+ with open(paths.summary, encoding='utf-8') as f:
91
133
  summary = yaml.load(f, Loader=yaml.FullLoader)
92
134
 
93
135
  return cls(
@@ -100,21 +142,14 @@ class CalculationResults:
100
142
  )
101
143
 
102
144
  @classmethod
103
- def from_calculation(cls, calculation: 'Calculation'):
104
- """Create CalculationResults directly from a Calculation object.
105
-
106
- This method extracts the solution, flow system, and other relevant
107
- information directly from an existing Calculation object.
145
+ def from_calculation(cls, calculation: Calculation) -> CalculationResults:
146
+ """Create CalculationResults from a Calculation object.
108
147
 
109
148
  Args:
110
- calculation: A Calculation object containing a solved model.
149
+ calculation: Calculation object with solved model.
111
150
 
112
151
  Returns:
113
- CalculationResults: A new instance containing the results from
114
- the provided calculation.
115
-
116
- Raises:
117
- AttributeError: If the calculation doesn't have required attributes.
152
+ CalculationResults: New instance with extracted results.
118
153
  """
119
154
  return cls(
120
155
  solution=calculation.model.solution,
@@ -130,18 +165,20 @@ class CalculationResults:
130
165
  solution: xr.Dataset,
131
166
  flow_system: xr.Dataset,
132
167
  name: str,
133
- summary: Dict,
134
- folder: Optional[pathlib.Path] = None,
135
- model: Optional[linopy.Model] = None,
168
+ summary: dict,
169
+ folder: pathlib.Path | None = None,
170
+ model: linopy.Model | None = None,
136
171
  ):
137
- """
172
+ """Initialize CalculationResults with optimization data.
173
+ Usually, this class is instantiated by the Calculation class, or by loading from file.
174
+
138
175
  Args:
139
- solution: The solution of the optimization.
140
- flow_system: The flow_system that was used to create the calculation as a datatset.
141
- name: The name of the calculation.
142
- summary: Information about the calculation,
143
- folder: The folder where the results are saved.
144
- model: The linopy model that was used to solve the calculation.
176
+ solution: Optimization solution dataset.
177
+ flow_system: Flow system configuration dataset.
178
+ name: Calculation name.
179
+ summary: Calculation metadata.
180
+ folder: Results storage folder.
181
+ model: Linopy optimization model.
145
182
  """
146
183
  self.solution = solution
147
184
  self.flow_system = flow_system
@@ -162,7 +199,7 @@ class CalculationResults:
162
199
  self.timesteps_extra = self.solution.indexes['time']
163
200
  self.hours_per_timestep = TimeSeriesCollection.calculate_hours_per_timestep(self.timesteps_extra)
164
201
 
165
- def __getitem__(self, key: str) -> Union['ComponentResults', 'BusResults', 'EffectResults']:
202
+ def __getitem__(self, key: str) -> ComponentResults | BusResults | EffectResults:
166
203
  if key in self.components:
167
204
  return self.components[key]
168
205
  if key in self.buses:
@@ -172,39 +209,40 @@ class CalculationResults:
172
209
  raise KeyError(f'No element with label {key} found.')
173
210
 
174
211
  @property
175
- def storages(self) -> List['ComponentResults']:
176
- """All storages in the results."""
212
+ def storages(self) -> list[ComponentResults]:
213
+ """Get all storage components in the results."""
177
214
  return [comp for comp in self.components.values() if comp.is_storage]
178
215
 
179
216
  @property
180
217
  def objective(self) -> float:
181
- """The objective result of the optimization."""
218
+ """Get optimization objective value."""
182
219
  return self.summary['Main Results']['Objective']
183
220
 
184
221
  @property
185
222
  def variables(self) -> linopy.Variables:
186
- """The variables of the optimization. Only available if the linopy.Model is available."""
223
+ """Get optimization variables (requires linopy model)."""
187
224
  if self.model is None:
188
225
  raise ValueError('The linopy model is not available.')
189
226
  return self.model.variables
190
227
 
191
228
  @property
192
229
  def constraints(self) -> linopy.Constraints:
193
- """The constraints of the optimization. Only available if the linopy.Model is available."""
230
+ """Get optimization constraints (requires linopy model)."""
194
231
  if self.model is None:
195
232
  raise ValueError('The linopy model is not available.')
196
233
  return self.model.constraints
197
234
 
198
235
  def filter_solution(
199
- self, variable_dims: Optional[Literal['scalar', 'time']] = None, element: Optional[str] = None
236
+ self, variable_dims: Literal['scalar', 'time'] | None = None, element: str | None = None
200
237
  ) -> xr.Dataset:
201
- """
202
- Filter the solution to a specific variable dimension and element.
203
- If no element is specified, all elements are included.
238
+ """Filter solution by variable dimension and/or element.
204
239
 
205
240
  Args:
206
- variable_dims: The dimension of the variables to filter for.
207
- element: The element to filter for.
241
+ variable_dims: Variable dimension to filter ('scalar' or 'time').
242
+ element: Element label to filter.
243
+
244
+ Returns:
245
+ xr.Dataset: Filtered solution dataset.
208
246
  """
209
247
  if element is not None:
210
248
  return filter_dataset(self[element].solution, variable_dims)
@@ -216,10 +254,10 @@ class CalculationResults:
216
254
  heatmap_timeframes: Literal['YS', 'MS', 'W', 'D', 'h', '15min', 'min'] = 'D',
217
255
  heatmap_timesteps_per_frame: Literal['W', 'D', 'h', '15min', 'min'] = 'h',
218
256
  color_map: str = 'portland',
219
- save: Union[bool, pathlib.Path] = False,
257
+ save: bool | pathlib.Path = False,
220
258
  show: bool = True,
221
259
  engine: plotting.PlottingEngine = 'plotly',
222
- ) -> Union[plotly.graph_objs.Figure, Tuple[plt.Figure, plt.Axes]]:
260
+ ) -> plotly.graph_objs.Figure | tuple[plt.Figure, plt.Axes]:
223
261
  return plot_heatmap(
224
262
  dataarray=self.solution[variable_name],
225
263
  name=variable_name,
@@ -234,16 +272,22 @@ class CalculationResults:
234
272
 
235
273
  def plot_network(
236
274
  self,
237
- controls: Union[
238
- bool,
239
- List[
275
+ controls: (
276
+ bool
277
+ | list[
240
278
  Literal['nodes', 'edges', 'layout', 'interaction', 'manipulation', 'physics', 'selection', 'renderer']
241
- ],
242
- ] = True,
243
- path: Optional[pathlib.Path] = None,
279
+ ]
280
+ ) = True,
281
+ path: pathlib.Path | None = None,
244
282
  show: bool = False,
245
- ) -> 'pyvis.network.Network':
246
- """See flixopt.flow_system.FlowSystem.plot_network"""
283
+ ) -> pyvis.network.Network | None:
284
+ """Plot interactive network visualization of the system.
285
+
286
+ Args:
287
+ controls: Enable/disable interactive controls.
288
+ path: Save path for network HTML.
289
+ show: Whether to display the plot.
290
+ """
247
291
  try:
248
292
  from .flow_system import FlowSystem
249
293
 
@@ -257,21 +301,20 @@ class CalculationResults:
257
301
 
258
302
  def to_file(
259
303
  self,
260
- folder: Optional[Union[str, pathlib.Path]] = None,
261
- name: Optional[str] = None,
304
+ folder: str | pathlib.Path | None = None,
305
+ name: str | None = None,
262
306
  compression: int = 5,
263
307
  document_model: bool = True,
264
308
  save_linopy_model: bool = False,
265
309
  ):
266
- """
267
- Save the results to a file
310
+ """Save results to files.
311
+
268
312
  Args:
269
- folder: The folder where the results should be saved. Defaults to the folder of the calculation.
270
- name: The name of the results file. If not provided, Defaults to the name of the calculation.
271
- compression: The compression level to use when saving the solution file (0-9). 0 means no compression.
272
- document_model: Wether to document the mathematical formulations in the model.
273
- save_linopy_model: Wether to save the model to file. If True, the (linopy) model is saved as a .nc4 file.
274
- The model file size is rougly 100 times larger than the solution file.
313
+ folder: Save folder (defaults to calculation folder).
314
+ name: File name (defaults to calculation name).
315
+ compression: Compression level 0-9.
316
+ document_model: Whether to document model formulations as yaml.
317
+ save_linopy_model: Whether to save linopy model file.
275
318
  """
276
319
  folder = self.folder if folder is None else pathlib.Path(folder)
277
320
  name = self.name if name is None else name
@@ -308,11 +351,11 @@ class CalculationResults:
308
351
 
309
352
  class _ElementResults:
310
353
  @classmethod
311
- def from_json(cls, calculation_results, json_data: Dict) -> '_ElementResults':
354
+ def from_json(cls, calculation_results, json_data: dict) -> _ElementResults:
312
355
  return cls(calculation_results, json_data['label'], json_data['variables'], json_data['constraints'])
313
356
 
314
357
  def __init__(
315
- self, calculation_results: CalculationResults, label: str, variables: List[str], constraints: List[str]
358
+ self, calculation_results: CalculationResults, label: str, variables: list[str], constraints: list[str]
316
359
  ):
317
360
  self._calculation_results = calculation_results
318
361
  self.label = label
@@ -323,11 +366,10 @@ class _ElementResults:
323
366
 
324
367
  @property
325
368
  def variables(self) -> linopy.Variables:
326
- """
327
- Returns the variables of the element.
369
+ """Get element variables (requires linopy model).
328
370
 
329
371
  Raises:
330
- ValueError: If the linopy model is not availlable.
372
+ ValueError: If linopy model is unavailable.
331
373
  """
332
374
  if self._calculation_results.model is None:
333
375
  raise ValueError('The linopy model is not available.')
@@ -335,29 +377,30 @@ class _ElementResults:
335
377
 
336
378
  @property
337
379
  def constraints(self) -> linopy.Constraints:
338
- """
339
- Returns the variables of the element.
380
+ """Get element constraints (requires linopy model).
340
381
 
341
382
  Raises:
342
- ValueError: If the linopy model is not availlable.
383
+ ValueError: If linopy model is unavailable.
343
384
  """
344
385
  if self._calculation_results.model is None:
345
386
  raise ValueError('The linopy model is not available.')
346
387
  return self._calculation_results.model.constraints[self._constraint_names]
347
388
 
348
- def filter_solution(self, variable_dims: Optional[Literal['scalar', 'time']] = None) -> xr.Dataset:
349
- """
350
- Filter the solution of the element by dimension.
389
+ def filter_solution(self, variable_dims: Literal['scalar', 'time'] | None = None) -> xr.Dataset:
390
+ """Filter element solution by dimension.
351
391
 
352
392
  Args:
353
- variable_dims: The dimension of the variables to filter for.
393
+ variable_dims: Variable dimension to filter.
394
+
395
+ Returns:
396
+ xr.Dataset: Filtered solution dataset.
354
397
  """
355
398
  return filter_dataset(self.solution, variable_dims)
356
399
 
357
400
 
358
401
  class _NodeResults(_ElementResults):
359
402
  @classmethod
360
- def from_json(cls, calculation_results, json_data: Dict) -> '_NodeResults':
403
+ def from_json(cls, calculation_results, json_data: dict) -> _NodeResults:
361
404
  return cls(
362
405
  calculation_results,
363
406
  json_data['label'],
@@ -371,10 +414,10 @@ class _NodeResults(_ElementResults):
371
414
  self,
372
415
  calculation_results: CalculationResults,
373
416
  label: str,
374
- variables: List[str],
375
- constraints: List[str],
376
- inputs: List[str],
377
- outputs: List[str],
417
+ variables: list[str],
418
+ constraints: list[str],
419
+ inputs: list[str],
420
+ outputs: list[str],
378
421
  ):
379
422
  super().__init__(calculation_results, label, variables, constraints)
380
423
  self.inputs = inputs
@@ -382,17 +425,21 @@ class _NodeResults(_ElementResults):
382
425
 
383
426
  def plot_node_balance(
384
427
  self,
385
- save: Union[bool, pathlib.Path] = False,
428
+ save: bool | pathlib.Path = False,
386
429
  show: bool = True,
387
430
  colors: plotting.ColorType = 'viridis',
388
431
  engine: plotting.PlottingEngine = 'plotly',
389
- ) -> Union[plotly.graph_objs.Figure, Tuple[plt.Figure, plt.Axes]]:
390
- """
391
- Plots the node balance of the Component or Bus.
432
+ ) -> plotly.graph_objs.Figure | tuple[plt.Figure, plt.Axes]:
433
+ """Plot node balance flows.
434
+
392
435
  Args:
393
- save: Whether to save the plot or not. If a path is provided, the plot will be saved at that location.
394
- show: Whether to show the plot or not.
395
- engine: The engine to use for plotting. Can be either 'plotly' or 'matplotlib'.
436
+ save: Whether to save plot (path or boolean).
437
+ show: Whether to display plot.
438
+ colors: Color scheme. Also see plotly.
439
+ engine: Plotting engine ('plotly' or 'matplotlib').
440
+
441
+ Returns:
442
+ Figure object.
396
443
  """
397
444
  if engine == 'plotly':
398
445
  figure_like = plotting.with_plotly(
@@ -427,20 +474,19 @@ class _NodeResults(_ElementResults):
427
474
  lower_percentage_group: float = 5,
428
475
  colors: plotting.ColorType = 'viridis',
429
476
  text_info: str = 'percent+label+value',
430
- save: Union[bool, pathlib.Path] = False,
477
+ save: bool | pathlib.Path = False,
431
478
  show: bool = True,
432
479
  engine: plotting.PlottingEngine = 'plotly',
433
- ) -> plotly.graph_objects.Figure:
434
- """
435
- Plots a pie chart of the flow hours of the inputs and outputs of buses or components.
480
+ ) -> plotly.graph_objs.Figure | tuple[plt.Figure, list[plt.Axes]]:
481
+ """Plot pie chart of flow hours distribution.
436
482
 
437
483
  Args:
438
- colors: a colorscale or a list of colors to use for the plot
439
- lower_percentage_group: The percentage of flow_hours that is grouped in "Others" (0...100)
440
- text_info: What information to display on the pie plot
441
- save: Whether to save the figure.
442
- show: Whether to show the figure.
443
- engine: Plotting engine to use. Only 'plotly' is implemented atm.
484
+ lower_percentage_group: Percentage threshold for "Others" grouping.
485
+ colors: Color scheme. Also see plotly.
486
+ text_info: Information to display on pie slices.
487
+ save: Whether to save plot.
488
+ show: Whether to display plot.
489
+ engine: Plotting engine ('plotly' or 'matplotlib').
444
490
  """
445
491
  inputs = (
446
492
  sanitize_dataset(
@@ -501,7 +547,7 @@ class _NodeResults(_ElementResults):
501
547
  self,
502
548
  negate_inputs: bool = True,
503
549
  negate_outputs: bool = False,
504
- threshold: Optional[float] = 1e-5,
550
+ threshold: float | None = 1e-5,
505
551
  with_last_timestep: bool = False,
506
552
  ) -> xr.Dataset:
507
553
  return sanitize_dataset(
@@ -521,11 +567,11 @@ class _NodeResults(_ElementResults):
521
567
 
522
568
 
523
569
  class BusResults(_NodeResults):
524
- """Results for a Bus"""
570
+ """Results container for energy/material balance nodes in the system."""
525
571
 
526
572
 
527
573
  class ComponentResults(_NodeResults):
528
- """Results for a Component"""
574
+ """Results container for individual system components with specialized analysis tools."""
529
575
 
530
576
  @property
531
577
  def is_storage(self) -> bool:
@@ -537,28 +583,31 @@ class ComponentResults(_NodeResults):
537
583
 
538
584
  @property
539
585
  def charge_state(self) -> xr.DataArray:
540
- """Get the solution of the charge state of the Storage."""
586
+ """Get storage charge state solution."""
541
587
  if not self.is_storage:
542
588
  raise ValueError(f'Cant get charge_state. "{self.label}" is not a storage')
543
589
  return self.solution[self._charge_state]
544
590
 
545
591
  def plot_charge_state(
546
592
  self,
547
- save: Union[bool, pathlib.Path] = False,
593
+ save: bool | pathlib.Path = False,
548
594
  show: bool = True,
549
595
  colors: plotting.ColorType = 'viridis',
550
596
  engine: plotting.PlottingEngine = 'plotly',
551
597
  ) -> plotly.graph_objs.Figure:
552
- """
553
- Plots the charge state of a Storage.
598
+ """Plot storage charge state over time, combined with the node balance.
599
+
554
600
  Args:
555
- save: Whether to save the plot or not. If a path is provided, the plot will be saved at that location.
556
- show: Whether to show the plot or not.
557
- colors: The c
558
- engine: Plotting engine to use. Only 'plotly' is implemented atm.
601
+ save: Whether to save plot.
602
+ show: Whether to display plot.
603
+ colors: Color scheme. Also see plotly.
604
+ engine: Plotting engine (only 'plotly' supported).
605
+
606
+ Returns:
607
+ plotly.graph_objs.Figure: Charge state plot.
559
608
 
560
609
  Raises:
561
- ValueError: If the Component is not a Storage.
610
+ ValueError: If component is not a storage.
562
611
  """
563
612
  if engine != 'plotly':
564
613
  raise NotImplementedError(
@@ -594,17 +643,20 @@ class ComponentResults(_NodeResults):
594
643
  )
595
644
 
596
645
  def node_balance_with_charge_state(
597
- self, negate_inputs: bool = True, negate_outputs: bool = False, threshold: Optional[float] = 1e-5
646
+ self, negate_inputs: bool = True, negate_outputs: bool = False, threshold: float | None = 1e-5
598
647
  ) -> xr.Dataset:
599
- """
600
- Returns a dataset with the node balance of the Storage including its charge state.
648
+ """Get storage node balance including charge state.
649
+
601
650
  Args:
602
- negate_inputs: Whether to negate the inputs of the Storage.
603
- negate_outputs: Whether to negate the outputs of the Storage.
604
- threshold: The threshold for small values.
651
+ negate_inputs: Whether to negate input flows.
652
+ negate_outputs: Whether to negate output flows.
653
+ threshold: Threshold for small values.
654
+
655
+ Returns:
656
+ xr.Dataset: Node balance with charge state.
605
657
 
606
658
  Raises:
607
- ValueError: If the Component is not a Storage.
659
+ ValueError: If component is not a storage.
608
660
  """
609
661
  if not self.is_storage:
610
662
  raise ValueError(f'Cant get charge_state. "{self.label}" is not a storage')
@@ -629,17 +681,115 @@ class EffectResults(_ElementResults):
629
681
  """Results for an Effect"""
630
682
 
631
683
  def get_shares_from(self, element: str):
632
- """Get the shares from an Element (without subelements) to the Effect"""
684
+ """Get effect shares from specific element.
685
+
686
+ Args:
687
+ element: Element label to get shares from.
688
+
689
+ Returns:
690
+ xr.Dataset: Element shares to this effect.
691
+ """
633
692
  return self.solution[[name for name in self._variable_names if name.startswith(f'{element}->')]]
634
693
 
635
694
 
636
695
  class SegmentedCalculationResults:
637
- """
638
- Class to store the results of a SegmentedCalculation.
696
+ """Results container for segmented optimization calculations with temporal decomposition.
697
+
698
+ This class manages results from SegmentedCalculation runs where large optimization
699
+ problems are solved by dividing the time horizon into smaller, overlapping segments.
700
+ It provides unified access to results across all segments while maintaining the
701
+ ability to analyze individual segment behavior.
702
+
703
+ Key Features:
704
+ **Unified Time Series**: Automatically assembles results from all segments into
705
+ continuous time series, removing overlaps and boundary effects
706
+ **Segment Analysis**: Access individual segment results for debugging and validation
707
+ **Consistency Checks**: Verify solution continuity at segment boundaries
708
+ **Memory Efficiency**: Handles large datasets that exceed single-segment memory limits
709
+
710
+ Temporal Handling:
711
+ The class manages the complex task of combining overlapping segment solutions
712
+ into coherent time series, ensuring proper treatment of:
713
+ - Storage state continuity between segments
714
+ - Flow rate transitions at segment boundaries
715
+ - Aggregated results over the full time horizon
716
+
717
+ Examples:
718
+ Load and analyze segmented results:
719
+
720
+ ```python
721
+ # Load segmented calculation results
722
+ results = SegmentedCalculationResults.from_file('results', 'annual_segmented')
723
+
724
+ # Access unified results across all segments
725
+ full_timeline = results.all_timesteps
726
+ total_segments = len(results.segment_results)
727
+
728
+ # Analyze individual segments
729
+ for i, segment in enumerate(results.segment_results):
730
+ print(f'Segment {i + 1}: {len(segment.solution.time)} timesteps')
731
+ segment_costs = segment.effects['cost'].total_value
732
+
733
+ # Check solution continuity at boundaries
734
+ segment_boundaries = results.get_boundary_analysis()
735
+ max_discontinuity = segment_boundaries['max_storage_jump']
736
+ ```
737
+
738
+ Create from segmented calculation:
739
+
740
+ ```python
741
+ # After running segmented calculation
742
+ segmented_calc = SegmentedCalculation(
743
+ name='annual_system',
744
+ flow_system=system,
745
+ timesteps_per_segment=730, # Monthly segments
746
+ overlap_timesteps=48, # 2-day overlap
747
+ )
748
+ segmented_calc.do_modeling_and_solve(solver='gurobi')
749
+
750
+ # Extract unified results
751
+ results = SegmentedCalculationResults.from_calculation(segmented_calc)
752
+
753
+ # Save combined results
754
+ results.to_file(compression=5)
755
+ ```
756
+
757
+ Performance analysis across segments:
758
+
759
+ ```python
760
+ # Compare segment solve times
761
+ solve_times = [seg.summary['durations']['solving'] for seg in results.segment_results]
762
+ avg_solve_time = sum(solve_times) / len(solve_times)
763
+
764
+ # Verify solution quality consistency
765
+ segment_objectives = [seg.summary['objective_value'] for seg in results.segment_results]
766
+
767
+ # Storage continuity analysis
768
+ if 'Battery' in results.segment_results[0].components:
769
+ storage_continuity = results.check_storage_continuity('Battery')
770
+ ```
771
+
772
+ Design Considerations:
773
+ **Boundary Effects**: Monitor solution quality at segment interfaces where
774
+ foresight is limited compared to full-horizon optimization.
775
+
776
+ **Memory Management**: Individual segment results are maintained for detailed
777
+ analysis while providing unified access for system-wide metrics.
778
+
779
+ **Validation Tools**: Built-in methods to verify temporal consistency and
780
+ identify potential issues from segmentation approach.
781
+
782
+ Common Use Cases:
783
+ - **Large-Scale Analysis**: Annual or multi-year optimization results
784
+ - **Memory-Constrained Systems**: Results from systems exceeding hardware limits
785
+ - **Segment Validation**: Verifying segmentation approach effectiveness
786
+ - **Performance Monitoring**: Comparing segmented vs. full-horizon solutions
787
+ - **Debugging**: Identifying issues specific to temporal decomposition
788
+
639
789
  """
640
790
 
641
791
  @classmethod
642
- def from_calculation(cls, calculation: 'SegmentedCalculation'):
792
+ def from_calculation(cls, calculation: SegmentedCalculation):
643
793
  return cls(
644
794
  [calc.results for calc in calculation.sub_calculations],
645
795
  all_timesteps=calculation.all_timesteps,
@@ -650,13 +800,20 @@ class SegmentedCalculationResults:
650
800
  )
651
801
 
652
802
  @classmethod
653
- def from_file(cls, folder: Union[str, pathlib.Path], name: str):
654
- """Create SegmentedCalculationResults directly from file"""
803
+ def from_file(cls, folder: str | pathlib.Path, name: str):
804
+ """Load SegmentedCalculationResults from saved files.
805
+
806
+ Args:
807
+ folder: Directory containing saved files.
808
+ name: Base name of saved files.
809
+
810
+ Returns:
811
+ SegmentedCalculationResults: Loaded instance.
812
+ """
655
813
  folder = pathlib.Path(folder)
656
814
  path = folder / name
657
- nc_file = path.with_suffix('.nc4')
658
- logger.info(f'loading calculation "{name}" from file ("{nc_file}")')
659
- with open(path.with_suffix('.json'), 'r', encoding='utf-8') as f:
815
+ logger.info(f'loading calculation "{name}" from file ("{path.with_suffix(".nc4")}")')
816
+ with open(path.with_suffix('.json'), encoding='utf-8') as f:
660
817
  meta_data = json.load(f)
661
818
  return cls(
662
819
  [CalculationResults.from_file(folder, name) for name in meta_data['sub_calculations']],
@@ -671,12 +828,12 @@ class SegmentedCalculationResults:
671
828
 
672
829
  def __init__(
673
830
  self,
674
- segment_results: List[CalculationResults],
831
+ segment_results: list[CalculationResults],
675
832
  all_timesteps: pd.DatetimeIndex,
676
833
  timesteps_per_segment: int,
677
834
  overlap_timesteps: int,
678
835
  name: str,
679
- folder: Optional[pathlib.Path] = None,
836
+ folder: pathlib.Path | None = None,
680
837
  ):
681
838
  self.segment_results = segment_results
682
839
  self.all_timesteps = all_timesteps
@@ -687,7 +844,7 @@ class SegmentedCalculationResults:
687
844
  self.hours_per_timestep = TimeSeriesCollection.calculate_hours_per_timestep(self.all_timesteps)
688
845
 
689
846
  @property
690
- def meta_data(self) -> Dict[str, Union[int, List[str]]]:
847
+ def meta_data(self) -> dict[str, int | list[str]]:
691
848
  return {
692
849
  'all_timesteps': [datetime.datetime.isoformat(date) for date in self.all_timesteps],
693
850
  'timesteps_per_segment': self.timesteps_per_segment,
@@ -696,11 +853,18 @@ class SegmentedCalculationResults:
696
853
  }
697
854
 
698
855
  @property
699
- def segment_names(self) -> List[str]:
856
+ def segment_names(self) -> list[str]:
700
857
  return [segment.name for segment in self.segment_results]
701
858
 
702
859
  def solution_without_overlap(self, variable_name: str) -> xr.DataArray:
703
- """Returns the solution of a variable without overlapping timesteps"""
860
+ """Get variable solution removing segment overlaps.
861
+
862
+ Args:
863
+ variable_name: Name of variable to extract.
864
+
865
+ Returns:
866
+ xr.DataArray: Continuous solution without overlaps.
867
+ """
704
868
  dataarrays = [
705
869
  result.solution[variable_name].isel(time=slice(None, self.timesteps_per_segment))
706
870
  for result in self.segment_results[:-1]
@@ -713,21 +877,23 @@ class SegmentedCalculationResults:
713
877
  heatmap_timeframes: Literal['YS', 'MS', 'W', 'D', 'h', '15min', 'min'] = 'D',
714
878
  heatmap_timesteps_per_frame: Literal['W', 'D', 'h', '15min', 'min'] = 'h',
715
879
  color_map: str = 'portland',
716
- save: Union[bool, pathlib.Path] = False,
880
+ save: bool | pathlib.Path = False,
717
881
  show: bool = True,
718
882
  engine: plotting.PlottingEngine = 'plotly',
719
- ) -> Union[plotly.graph_objs.Figure, Tuple[plt.Figure, plt.Axes]]:
720
- """
721
- Plots a heatmap of the solution of a variable.
883
+ ) -> plotly.graph_objs.Figure | tuple[plt.Figure, plt.Axes]:
884
+ """Plot heatmap of variable solution across segments.
722
885
 
723
886
  Args:
724
- variable_name: The name of the variable to plot.
725
- heatmap_timeframes: The timeframes to use for the heatmap.
726
- heatmap_timesteps_per_frame: The timesteps per frame to use for the heatmap.
727
- color_map: The color map to use for the heatmap.
728
- save: Whether to save the plot or not. If a path is provided, the plot will be saved at that location.
729
- show: Whether to show the plot or not.
730
- engine: The engine to use for plotting. Can be either 'plotly' or 'matplotlib'.
887
+ variable_name: Variable to plot.
888
+ heatmap_timeframes: Time aggregation level.
889
+ heatmap_timesteps_per_frame: Timesteps per frame.
890
+ color_map: Color scheme. Also see plotly.
891
+ save: Whether to save plot.
892
+ show: Whether to display plot.
893
+ engine: Plotting engine.
894
+
895
+ Returns:
896
+ Figure object.
731
897
  """
732
898
  return plot_heatmap(
733
899
  dataarray=self.solution_without_overlap(variable_name),
@@ -741,10 +907,14 @@ class SegmentedCalculationResults:
741
907
  engine=engine,
742
908
  )
743
909
 
744
- def to_file(
745
- self, folder: Optional[Union[str, pathlib.Path]] = None, name: Optional[str] = None, compression: int = 5
746
- ):
747
- """Save the results to a file"""
910
+ def to_file(self, folder: str | pathlib.Path | None = None, name: str | None = None, compression: int = 5):
911
+ """Save segmented results to files.
912
+
913
+ Args:
914
+ folder: Save folder (defaults to instance folder).
915
+ name: File name (defaults to instance name).
916
+ compression: Compression level 0-9.
917
+ """
748
918
  folder = self.folder if folder is None else pathlib.Path(folder)
749
919
  name = self.name if name is None else name
750
920
  path = folder / name
@@ -770,23 +940,25 @@ def plot_heatmap(
770
940
  heatmap_timeframes: Literal['YS', 'MS', 'W', 'D', 'h', '15min', 'min'] = 'D',
771
941
  heatmap_timesteps_per_frame: Literal['W', 'D', 'h', '15min', 'min'] = 'h',
772
942
  color_map: str = 'portland',
773
- save: Union[bool, pathlib.Path] = False,
943
+ save: bool | pathlib.Path = False,
774
944
  show: bool = True,
775
945
  engine: plotting.PlottingEngine = 'plotly',
776
946
  ):
777
- """
778
- Plots a heatmap of the solution of a variable.
947
+ """Plot heatmap of time series data.
779
948
 
780
949
  Args:
781
- dataarray: The dataarray to plot.
782
- name: The name of the variable to plot.
783
- folder: The folder to save the plot to.
784
- heatmap_timeframes: The timeframes to use for the heatmap.
785
- heatmap_timesteps_per_frame: The timesteps per frame to use for the heatmap.
786
- color_map: The color map to use for the heatmap.
787
- save: Whether to save the plot or not. If a path is provided, the plot will be saved at that location.
788
- show: Whether to show the plot or not.
789
- engine: The engine to use for plotting. Can be either 'plotly' or 'matplotlib'.
950
+ dataarray: Data to plot.
951
+ name: Variable name for title.
952
+ folder: Save folder.
953
+ heatmap_timeframes: Time aggregation level.
954
+ heatmap_timesteps_per_frame: Timesteps per frame.
955
+ color_map: Color scheme. Also see plotly.
956
+ save: Whether to save plot.
957
+ show: Whether to display plot.
958
+ engine: Plotting engine.
959
+
960
+ Returns:
961
+ Figure object.
790
962
  """
791
963
  heatmap_data = plotting.heat_map_data_from_df(
792
964
  dataarray.to_dataframe(name), heatmap_timeframes, heatmap_timesteps_per_frame, 'ffill'
@@ -819,25 +991,24 @@ def plot_heatmap(
819
991
 
820
992
  def sanitize_dataset(
821
993
  ds: xr.Dataset,
822
- timesteps: Optional[pd.DatetimeIndex] = None,
823
- threshold: Optional[float] = 1e-5,
824
- negate: Optional[List[str]] = None,
994
+ timesteps: pd.DatetimeIndex | None = None,
995
+ threshold: float | None = 1e-5,
996
+ negate: list[str] | None = None,
825
997
  drop_small_vars: bool = True,
826
998
  zero_small_values: bool = False,
827
999
  ) -> xr.Dataset:
828
- """
829
- Sanitizes a dataset by handling small values (dropping or zeroing) and optionally reindexing the time axis.
1000
+ """Clean dataset by handling small values and reindexing time.
830
1001
 
831
1002
  Args:
832
- ds: The dataset to sanitize.
833
- timesteps: The timesteps to reindex the dataset to. If None, the original timesteps are kept.
834
- threshold: The threshold for small values processing. If None, no processing is done.
835
- negate: The variables to negate. If None, no variables are negated.
836
- drop_small_vars: If True, drops variables where all values are below threshold.
837
- zero_small_values: If True, sets values below threshold to zero.
1003
+ ds: Dataset to sanitize.
1004
+ timesteps: Time index for reindexing (optional).
1005
+ threshold: Threshold for small values processing.
1006
+ negate: Variables to negate.
1007
+ drop_small_vars: Whether to drop variables below threshold.
1008
+ zero_small_values: Whether to zero values below threshold.
838
1009
 
839
1010
  Returns:
840
- xr.Dataset: The sanitized dataset.
1011
+ xr.Dataset: Sanitized dataset.
841
1012
  """
842
1013
  # Create a copy to avoid modifying the original
843
1014
  ds = ds.copy()
@@ -854,7 +1025,7 @@ def sanitize_dataset(
854
1025
 
855
1026
  # Option 1: Drop variables where all values are below threshold
856
1027
  if drop_small_vars:
857
- vars_to_drop = [var for var in ds.data_vars if (ds_no_nan_abs[var] <= threshold).all()]
1028
+ vars_to_drop = [var for var in ds.data_vars if (ds_no_nan_abs[var] <= threshold).all().item()]
858
1029
  ds = ds.drop_vars(vars_to_drop)
859
1030
 
860
1031
  # Option 2: Set small values to zero
@@ -863,7 +1034,7 @@ def sanitize_dataset(
863
1034
  # Create a boolean mask of values below threshold
864
1035
  mask = ds_no_nan_abs[var] <= threshold
865
1036
  # Only proceed if there are values to zero out
866
- if mask.any():
1037
+ if bool(mask.any().item()):
867
1038
  # Create a copy to ensure we don't modify data with views
868
1039
  ds[var] = ds[var].copy()
869
1040
  # Set values below threshold to zero
@@ -878,14 +1049,16 @@ def sanitize_dataset(
878
1049
 
879
1050
  def filter_dataset(
880
1051
  ds: xr.Dataset,
881
- variable_dims: Optional[Literal['scalar', 'time']] = None,
1052
+ variable_dims: Literal['scalar', 'time'] | None = None,
882
1053
  ) -> xr.Dataset:
883
- """
884
- Filters a dataset by its dimensions.
1054
+ """Filter dataset by variable dimensions.
885
1055
 
886
1056
  Args:
887
- ds: The dataset to filter.
888
- variable_dims: The dimension of the variables to filter for.
1057
+ ds: Dataset to filter.
1058
+ variable_dims: Variable dimension to filter ('scalar' or 'time').
1059
+
1060
+ Returns:
1061
+ xr.Dataset: Filtered dataset.
889
1062
  """
890
1063
  if variable_dims is None:
891
1064
  return ds