flixopt 2.2.0b0__py3-none-any.whl → 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flixopt might be problematic. Click here for more details.

Files changed (63) hide show
  1. flixopt/__init__.py +35 -1
  2. flixopt/aggregation.py +60 -81
  3. flixopt/calculation.py +381 -196
  4. flixopt/components.py +1022 -359
  5. flixopt/config.py +553 -191
  6. flixopt/core.py +475 -1315
  7. flixopt/effects.py +477 -214
  8. flixopt/elements.py +591 -344
  9. flixopt/features.py +403 -957
  10. flixopt/flow_system.py +781 -293
  11. flixopt/interface.py +1159 -189
  12. flixopt/io.py +50 -55
  13. flixopt/linear_converters.py +384 -92
  14. flixopt/modeling.py +759 -0
  15. flixopt/network_app.py +789 -0
  16. flixopt/plotting.py +273 -135
  17. flixopt/results.py +639 -383
  18. flixopt/solvers.py +25 -21
  19. flixopt/structure.py +928 -442
  20. flixopt/utils.py +34 -5
  21. flixopt-3.0.0.dist-info/METADATA +209 -0
  22. flixopt-3.0.0.dist-info/RECORD +26 -0
  23. {flixopt-2.2.0b0.dist-info → flixopt-3.0.0.dist-info}/WHEEL +1 -1
  24. flixopt-3.0.0.dist-info/top_level.txt +1 -0
  25. docs/examples/00-Minimal Example.md +0 -5
  26. docs/examples/01-Basic Example.md +0 -5
  27. docs/examples/02-Complex Example.md +0 -10
  28. docs/examples/03-Calculation Modes.md +0 -5
  29. docs/examples/index.md +0 -5
  30. docs/faq/contribute.md +0 -49
  31. docs/faq/index.md +0 -3
  32. docs/images/architecture_flixOpt-pre2.0.0.png +0 -0
  33. docs/images/architecture_flixOpt.png +0 -0
  34. docs/images/flixopt-icon.svg +0 -1
  35. docs/javascripts/mathjax.js +0 -18
  36. docs/release-notes/_template.txt +0 -32
  37. docs/release-notes/index.md +0 -7
  38. docs/release-notes/v2.0.0.md +0 -93
  39. docs/release-notes/v2.0.1.md +0 -12
  40. docs/release-notes/v2.1.0.md +0 -31
  41. docs/release-notes/v2.2.0.md +0 -55
  42. docs/user-guide/Mathematical Notation/Bus.md +0 -33
  43. docs/user-guide/Mathematical Notation/Effects, Penalty & Objective.md +0 -132
  44. docs/user-guide/Mathematical Notation/Flow.md +0 -26
  45. docs/user-guide/Mathematical Notation/Investment.md +0 -115
  46. docs/user-guide/Mathematical Notation/LinearConverter.md +0 -21
  47. docs/user-guide/Mathematical Notation/Piecewise.md +0 -49
  48. docs/user-guide/Mathematical Notation/Storage.md +0 -44
  49. docs/user-guide/Mathematical Notation/index.md +0 -22
  50. docs/user-guide/Mathematical Notation/others.md +0 -3
  51. docs/user-guide/index.md +0 -124
  52. flixopt/config.yaml +0 -10
  53. flixopt-2.2.0b0.dist-info/METADATA +0 -146
  54. flixopt-2.2.0b0.dist-info/RECORD +0 -59
  55. flixopt-2.2.0b0.dist-info/top_level.txt +0 -5
  56. pics/architecture_flixOpt-pre2.0.0.png +0 -0
  57. pics/architecture_flixOpt.png +0 -0
  58. pics/flixOpt_plotting.jpg +0 -0
  59. pics/flixopt-icon.svg +0 -1
  60. pics/pics.pptx +0 -0
  61. scripts/gen_ref_pages.py +0 -54
  62. tests/ressources/Zeitreihen2020.csv +0 -35137
  63. {flixopt-2.2.0b0.dist-info → flixopt-3.0.0.dist-info}/licenses/LICENSE +0 -0
flixopt/flow_system.py CHANGED
@@ -2,143 +2,440 @@
2
2
  This module contains the FlowSystem class, which is used to collect instances of many other classes by the end User.
3
3
  """
4
4
 
5
+ from __future__ import annotations
6
+
5
7
  import json
6
8
  import logging
7
- import pathlib
8
9
  import warnings
9
- from io import StringIO
10
- from typing import TYPE_CHECKING, Dict, List, Literal, Optional, Tuple, Union
10
+ from typing import TYPE_CHECKING, Any, Literal, Optional
11
11
 
12
12
  import numpy as np
13
13
  import pandas as pd
14
14
  import xarray as xr
15
- from rich.console import Console
16
- from rich.pretty import Pretty
17
15
 
18
- from . import io as fx_io
19
- from .core import Scalar, ScenarioData, TimeSeries, TimeSeriesCollection, TimeSeriesData, TimestepData
16
+ from .core import (
17
+ ConversionError,
18
+ DataConverter,
19
+ FlowSystemDimensions,
20
+ PeriodicData,
21
+ PeriodicDataUser,
22
+ TemporalData,
23
+ TemporalDataUser,
24
+ TimeSeriesData,
25
+ )
20
26
  from .effects import (
21
27
  Effect,
22
28
  EffectCollection,
23
- EffectTimeSeries,
24
- EffectValuesDict,
25
- EffectValuesUserScenario,
26
- EffectValuesUserTimestep,
29
+ PeriodicEffects,
30
+ PeriodicEffectsUser,
31
+ TemporalEffects,
32
+ TemporalEffectsUser,
27
33
  )
28
34
  from .elements import Bus, Component, Flow
29
- from .structure import CLASS_REGISTRY, Element, SystemModel
35
+ from .structure import Element, FlowSystemModel, Interface
30
36
 
31
37
  if TYPE_CHECKING:
38
+ import pathlib
39
+ from collections.abc import Collection
40
+
32
41
  import pyvis
33
42
 
34
43
  logger = logging.getLogger('flixopt')
35
44
 
36
45
 
37
- class FlowSystem:
46
+ class FlowSystem(Interface):
38
47
  """
39
- A FlowSystem organizes the high level Elements (Components & Effects).
48
+ A FlowSystem organizes the high level Elements (Components, Buses & Effects).
49
+
50
+ This is the main container class that users work with to build and manage their System.
51
+
52
+ Args:
53
+ timesteps: The timesteps of the model.
54
+ periods: The periods of the model.
55
+ scenarios: The scenarios of the model.
56
+ hours_of_last_timestep: The duration of the last time step. Uses the last time interval if not specified
57
+ hours_of_previous_timesteps: The duration of previous timesteps.
58
+ If None, the first time increment of time_series is used.
59
+ This is needed to calculate previous durations (for example consecutive_on_hours).
60
+ If you use an array, take care that its long enough to cover all previous values!
61
+ weights: The weights of each period and scenario. If None, all scenarios have the same weight (normalized to 1).
62
+ Its recommended to normalize the weights to sum up to 1.
63
+ scenario_independent_sizes: Controls whether investment sizes are equalized across scenarios.
64
+ - True: All sizes are shared/equalized across scenarios
65
+ - False: All sizes are optimized separately per scenario
66
+ - list[str]: Only specified components (by label_full) are equalized across scenarios
67
+ scenario_independent_flow_rates: Controls whether flow rates are equalized across scenarios.
68
+ - True: All flow rates are shared/equalized across scenarios
69
+ - False: All flow rates are optimized separately per scenario
70
+ - list[str]: Only specified flows (by label_full) are equalized across scenarios
71
+
72
+ Notes:
73
+ - Creates an empty registry for components and buses, an empty EffectCollection, and a placeholder for a SystemModel.
74
+ - The instance starts disconnected (self._connected_and_transformed == False) and will be
75
+ connected_and_transformed automatically when trying to solve a calculation.
40
76
  """
41
77
 
42
78
  def __init__(
43
79
  self,
44
80
  timesteps: pd.DatetimeIndex,
45
- scenarios: Optional[pd.Index] = None,
46
- hours_of_last_timestep: Optional[float] = None,
47
- hours_of_previous_timesteps: Optional[Union[int, float, np.ndarray]] = None,
48
- scenario_weights: Optional[ScenarioData] = None,
81
+ periods: pd.Index | None = None,
82
+ scenarios: pd.Index | None = None,
83
+ hours_of_last_timestep: float | None = None,
84
+ hours_of_previous_timesteps: int | float | np.ndarray | None = None,
85
+ weights: PeriodicDataUser | None = None,
86
+ scenario_independent_sizes: bool | list[str] = True,
87
+ scenario_independent_flow_rates: bool | list[str] = False,
49
88
  ):
89
+ self.timesteps = self._validate_timesteps(timesteps)
90
+ self.timesteps_extra = self._create_timesteps_with_extra(self.timesteps, hours_of_last_timestep)
91
+ self.hours_of_previous_timesteps = self._calculate_hours_of_previous_timesteps(
92
+ self.timesteps, hours_of_previous_timesteps
93
+ )
94
+
95
+ self.periods = None if periods is None else self._validate_periods(periods)
96
+ self.scenarios = None if scenarios is None else self._validate_scenarios(scenarios)
97
+
98
+ self.weights = weights
99
+
100
+ hours_per_timestep = self.calculate_hours_per_timestep(self.timesteps_extra)
101
+
102
+ self.hours_of_last_timestep = hours_per_timestep[-1].item()
103
+
104
+ self.hours_per_timestep = self.fit_to_model_coords('hours_per_timestep', hours_per_timestep)
105
+
106
+ # Element collections
107
+ self.components: dict[str, Component] = {}
108
+ self.buses: dict[str, Bus] = {}
109
+ self.effects: EffectCollection = EffectCollection()
110
+ self.model: FlowSystemModel | None = None
111
+
112
+ self._connected_and_transformed = False
113
+ self._used_in_calculation = False
114
+
115
+ self._network_app = None
116
+
117
+ # Use properties to validate and store scenario dimension settings
118
+ self.scenario_independent_sizes = scenario_independent_sizes
119
+ self.scenario_independent_flow_rates = scenario_independent_flow_rates
120
+
121
+ @staticmethod
122
+ def _validate_timesteps(timesteps: pd.DatetimeIndex) -> pd.DatetimeIndex:
123
+ """Validate timesteps format and rename if needed."""
124
+ if not isinstance(timesteps, pd.DatetimeIndex):
125
+ raise TypeError('timesteps must be a pandas DatetimeIndex')
126
+ if len(timesteps) < 2:
127
+ raise ValueError('timesteps must contain at least 2 timestamps')
128
+ if timesteps.name != 'time':
129
+ timesteps.name = 'time'
130
+ if not timesteps.is_monotonic_increasing:
131
+ raise ValueError('timesteps must be sorted')
132
+ return timesteps
133
+
134
+ @staticmethod
135
+ def _validate_scenarios(scenarios: pd.Index) -> pd.Index:
50
136
  """
137
+ Validate and prepare scenario index.
138
+
51
139
  Args:
52
- timesteps: The timesteps of the model.
53
- scenarios: The scenarios of the model.
54
- hours_of_last_timestep: The duration of the last time step. Uses the last time interval if not specified
55
- hours_of_previous_timesteps: The duration of previous timesteps.
56
- If None, the first time increment of time_series is used.
57
- This is needed to calculate previous durations (for example consecutive_on_hours).
58
- If you use an array, take care that its long enough to cover all previous values!
59
- scenario_weights: The weights of the scenarios. If None, all scenarios have the same weight. All weights are normalized to 1.
60
- """
61
- self.time_series_collection = TimeSeriesCollection(
62
- timesteps=timesteps,
63
- scenarios=scenarios,
64
- hours_of_last_timestep=hours_of_last_timestep,
65
- hours_of_previous_timesteps=hours_of_previous_timesteps,
66
- )
67
- self.scenario_weights = self.create_time_series(
68
- 'scenario_weights', scenario_weights, has_time_dim=False, has_scenario_dim=True
140
+ scenarios: The scenario index to validate
141
+ """
142
+ if not isinstance(scenarios, pd.Index) or len(scenarios) == 0:
143
+ raise ConversionError('Scenarios must be a non-empty Index')
144
+
145
+ if scenarios.name != 'scenario':
146
+ scenarios = scenarios.rename('scenario')
147
+
148
+ return scenarios
149
+
150
+ @staticmethod
151
+ def _validate_periods(periods: pd.Index) -> pd.Index:
152
+ """
153
+ Validate and prepare period index.
154
+
155
+ Args:
156
+ periods: The period index to validate
157
+ """
158
+ if not isinstance(periods, pd.Index) or len(periods) == 0:
159
+ raise ConversionError(f'Periods must be a non-empty Index. Got {periods}')
160
+
161
+ if not (
162
+ periods.dtype.kind == 'i' # integer dtype
163
+ and periods.is_monotonic_increasing # rising
164
+ and periods.is_unique
165
+ ):
166
+ raise ConversionError(f'Periods must be a monotonically increasing and unique Index. Got {periods}')
167
+
168
+ if periods.name != 'period':
169
+ periods = periods.rename('period')
170
+
171
+ return periods
172
+
173
+ @staticmethod
174
+ def _create_timesteps_with_extra(
175
+ timesteps: pd.DatetimeIndex, hours_of_last_timestep: float | None
176
+ ) -> pd.DatetimeIndex:
177
+ """Create timesteps with an extra step at the end."""
178
+ if hours_of_last_timestep is None:
179
+ hours_of_last_timestep = (timesteps[-1] - timesteps[-2]) / pd.Timedelta(hours=1)
180
+
181
+ last_date = pd.DatetimeIndex([timesteps[-1] + pd.Timedelta(hours=hours_of_last_timestep)], name='time')
182
+ return pd.DatetimeIndex(timesteps.append(last_date), name='time')
183
+
184
+ @staticmethod
185
+ def calculate_hours_per_timestep(timesteps_extra: pd.DatetimeIndex) -> xr.DataArray:
186
+ """Calculate duration of each timestep as a 1D DataArray."""
187
+ hours_per_step = np.diff(timesteps_extra) / pd.Timedelta(hours=1)
188
+ return xr.DataArray(
189
+ hours_per_step, coords={'time': timesteps_extra[:-1]}, dims='time', name='hours_per_timestep'
69
190
  )
70
191
 
71
- # defaults:
72
- self.components: Dict[str, Component] = {}
73
- self.buses: Dict[str, Bus] = {}
74
- self.effects: EffectCollection = EffectCollection()
75
- self.model: Optional[SystemModel] = None
192
+ @staticmethod
193
+ def _calculate_hours_of_previous_timesteps(
194
+ timesteps: pd.DatetimeIndex, hours_of_previous_timesteps: float | np.ndarray | None
195
+ ) -> float | np.ndarray:
196
+ """Calculate duration of regular timesteps."""
197
+ if hours_of_previous_timesteps is not None:
198
+ return hours_of_previous_timesteps
199
+ # Calculate from the first interval
200
+ first_interval = timesteps[1] - timesteps[0]
201
+ return first_interval.total_seconds() / 3600 # Convert to hours
202
+
203
+ def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]:
204
+ """
205
+ Override Interface method to handle FlowSystem-specific serialization.
206
+ Combines custom FlowSystem logic with Interface pattern for nested objects.
207
+
208
+ Returns:
209
+ Tuple of (reference_structure, extracted_arrays_dict)
210
+ """
211
+ # Start with Interface base functionality for constructor parameters
212
+ reference_structure, all_extracted_arrays = super()._create_reference_structure()
213
+
214
+ # Remove timesteps, as it's directly stored in dataset index
215
+ reference_structure.pop('timesteps', None)
216
+
217
+ # Extract from components
218
+ components_structure = {}
219
+ for comp_label, component in self.components.items():
220
+ comp_structure, comp_arrays = component._create_reference_structure()
221
+ all_extracted_arrays.update(comp_arrays)
222
+ components_structure[comp_label] = comp_structure
223
+ reference_structure['components'] = components_structure
224
+
225
+ # Extract from buses
226
+ buses_structure = {}
227
+ for bus_label, bus in self.buses.items():
228
+ bus_structure, bus_arrays = bus._create_reference_structure()
229
+ all_extracted_arrays.update(bus_arrays)
230
+ buses_structure[bus_label] = bus_structure
231
+ reference_structure['buses'] = buses_structure
232
+
233
+ # Extract from effects
234
+ effects_structure = {}
235
+ for effect in self.effects:
236
+ effect_structure, effect_arrays = effect._create_reference_structure()
237
+ all_extracted_arrays.update(effect_arrays)
238
+ effects_structure[effect.label] = effect_structure
239
+ reference_structure['effects'] = effects_structure
240
+
241
+ return reference_structure, all_extracted_arrays
242
+
243
+ def to_dataset(self) -> xr.Dataset:
244
+ """
245
+ Convert the FlowSystem to an xarray Dataset.
246
+ Ensures FlowSystem is connected before serialization.
247
+
248
+ Returns:
249
+ xr.Dataset: Dataset containing all DataArrays with structure in attributes
250
+ """
251
+ if not self.connected_and_transformed:
252
+ logger.warning('FlowSystem is not connected_and_transformed. Connecting and transforming data now.')
253
+ self.connect_and_transform()
76
254
 
77
- self._connected = False
255
+ return super().to_dataset()
78
256
 
79
257
  @classmethod
80
- def from_dataset(cls, ds: xr.Dataset):
81
- timesteps_extra = pd.DatetimeIndex(ds.attrs['timesteps_extra'], name='time')
82
- hours_of_last_timestep = TimeSeriesCollection.calculate_hours_per_timestep(timesteps_extra).isel(time=-1).item()
83
-
84
- scenarios = pd.Index(ds.attrs['scenarios'], name='scenario') if ds.attrs.get('scenarios') is not None else None
85
- scenario_weights = fx_io.insert_dataarray(ds.attrs['scenario_weights'], ds)
86
-
87
- flow_system = FlowSystem(
88
- timesteps=timesteps_extra[:-1],
89
- hours_of_last_timestep=hours_of_last_timestep,
90
- hours_of_previous_timesteps=ds.attrs['hours_of_previous_timesteps'],
91
- scenarios=scenarios,
92
- scenario_weights=scenario_weights,
93
- )
258
+ def from_dataset(cls, ds: xr.Dataset) -> FlowSystem:
259
+ """
260
+ Create a FlowSystem from an xarray Dataset.
261
+ Handles FlowSystem-specific reconstruction logic.
262
+
263
+ Args:
264
+ ds: Dataset containing the FlowSystem data
94
265
 
95
- structure = fx_io.insert_dataarray({key: ds.attrs[key] for key in ['components', 'buses', 'effects']}, ds)
96
- flow_system.add_elements(
97
- *[Bus.from_dict(bus) for bus in structure['buses'].values()]
98
- + [Effect.from_dict(effect) for effect in structure['effects'].values()]
99
- + [CLASS_REGISTRY[comp['__class__']].from_dict(comp) for comp in structure['components'].values()]
266
+ Returns:
267
+ FlowSystem instance
268
+ """
269
+ # Get the reference structure from attrs
270
+ reference_structure = dict(ds.attrs)
271
+
272
+ # Create arrays dictionary from dataset variables
273
+ arrays_dict = {name: array for name, array in ds.data_vars.items()}
274
+
275
+ # Create FlowSystem instance with constructor parameters
276
+ flow_system = cls(
277
+ timesteps=ds.indexes['time'],
278
+ periods=ds.indexes.get('period'),
279
+ scenarios=ds.indexes.get('scenario'),
280
+ weights=cls._resolve_dataarray_reference(reference_structure['weights'], arrays_dict)
281
+ if 'weights' in reference_structure
282
+ else None,
283
+ hours_of_last_timestep=reference_structure.get('hours_of_last_timestep'),
284
+ hours_of_previous_timesteps=reference_structure.get('hours_of_previous_timesteps'),
285
+ scenario_independent_sizes=reference_structure.get('scenario_independent_sizes', True),
286
+ scenario_independent_flow_rates=reference_structure.get('scenario_independent_flow_rates', False),
100
287
  )
288
+
289
+ # Restore components
290
+ components_structure = reference_structure.get('components', {})
291
+ for comp_label, comp_data in components_structure.items():
292
+ component = cls._resolve_reference_structure(comp_data, arrays_dict)
293
+ if not isinstance(component, Component):
294
+ logger.critical(f'Restoring component {comp_label} failed.')
295
+ flow_system._add_components(component)
296
+
297
+ # Restore buses
298
+ buses_structure = reference_structure.get('buses', {})
299
+ for bus_label, bus_data in buses_structure.items():
300
+ bus = cls._resolve_reference_structure(bus_data, arrays_dict)
301
+ if not isinstance(bus, Bus):
302
+ logger.critical(f'Restoring bus {bus_label} failed.')
303
+ flow_system._add_buses(bus)
304
+
305
+ # Restore effects
306
+ effects_structure = reference_structure.get('effects', {})
307
+ for effect_label, effect_data in effects_structure.items():
308
+ effect = cls._resolve_reference_structure(effect_data, arrays_dict)
309
+ if not isinstance(effect, Effect):
310
+ logger.critical(f'Restoring effect {effect_label} failed.')
311
+ flow_system._add_effects(effect)
312
+
101
313
  return flow_system
102
314
 
103
- @classmethod
104
- def from_dict(cls, data: Dict) -> 'FlowSystem':
315
+ def to_netcdf(self, path: str | pathlib.Path, compression: int = 0):
105
316
  """
106
- Load a FlowSystem from a dictionary.
317
+ Save the FlowSystem to a NetCDF file.
318
+ Ensures FlowSystem is connected before saving.
107
319
 
108
320
  Args:
109
- data: Dictionary containing the FlowSystem data.
110
- """
111
- timesteps_extra = pd.DatetimeIndex(data['timesteps_extra'], name='time')
112
- hours_of_last_timestep = TimeSeriesCollection.calculate_hours_per_timestep(timesteps_extra).isel(time=-1).item()
113
- scenarios = pd.Index(data['scenarios'], name='scenario') if data.get('scenarios') is not None else None
114
- scenario_weights = data.get('scenario_weights').selected_data if data.get('scenario_weights') is not None else None
115
-
116
- flow_system = FlowSystem(
117
- timesteps=timesteps_extra[:-1],
118
- hours_of_last_timestep=hours_of_last_timestep,
119
- hours_of_previous_timesteps=data['hours_of_previous_timesteps'],
120
- scenarios=scenarios,
121
- scenario_weights=scenario_weights,
122
- )
321
+ path: The path to the netCDF file.
322
+ compression: The compression level to use when saving the file.
323
+ """
324
+ if not self.connected_and_transformed:
325
+ logger.warning('FlowSystem is not connected. Calling connect_and_transform() now.')
326
+ self.connect_and_transform()
123
327
 
124
- flow_system.add_elements(*[Bus.from_dict(bus) for bus in data['buses'].values()])
328
+ super().to_netcdf(path, compression)
329
+ logger.info(f'Saved FlowSystem to {path}')
125
330
 
126
- flow_system.add_elements(*[Effect.from_dict(effect) for effect in data['effects'].values()])
331
+ def get_structure(self, clean: bool = False, stats: bool = False) -> dict:
332
+ """
333
+ Get FlowSystem structure.
334
+ Ensures FlowSystem is connected before getting structure.
127
335
 
128
- flow_system.add_elements(
129
- *[CLASS_REGISTRY[comp['__class__']].from_dict(comp) for comp in data['components'].values()]
130
- )
336
+ Args:
337
+ clean: If True, remove None and empty dicts and lists.
338
+ stats: If True, replace DataArray references with statistics
339
+ """
340
+ if not self.connected_and_transformed:
341
+ logger.warning('FlowSystem is not connected. Calling connect_and_transform() now.')
342
+ self.connect_and_transform()
131
343
 
132
- flow_system.transform_data()
344
+ return super().get_structure(clean, stats)
133
345
 
134
- return flow_system
346
+ def to_json(self, path: str | pathlib.Path):
347
+ """
348
+ Save the flow system to a JSON file.
349
+ Ensures FlowSystem is connected before saving.
135
350
 
136
- @classmethod
137
- def from_netcdf(cls, path: Union[str, pathlib.Path]):
351
+ Args:
352
+ path: The path to the JSON file.
138
353
  """
139
- Load a FlowSystem from a netcdf file
354
+ if not self.connected_and_transformed:
355
+ logger.warning(
356
+ 'FlowSystem needs to be connected and transformed before saving to JSON. Calling connect_and_transform() now.'
357
+ )
358
+ self.connect_and_transform()
359
+
360
+ super().to_json(path)
361
+
362
+ def fit_to_model_coords(
363
+ self,
364
+ name: str,
365
+ data: TemporalDataUser | PeriodicDataUser | None,
366
+ dims: Collection[FlowSystemDimensions] | None = None,
367
+ ) -> TemporalData | PeriodicData | None:
140
368
  """
141
- return cls.from_dataset(fx_io.load_dataset_from_netcdf(path))
369
+ Fit data to model coordinate system (currently time, but extensible).
370
+
371
+ Args:
372
+ name: Name of the data
373
+ data: Data to fit to model coordinates
374
+ dims: Collection of dimension names to use for fitting. If None, all dimensions are used.
375
+
376
+ Returns:
377
+ xr.DataArray aligned to model coordinate system. If data is None, returns None.
378
+ """
379
+ if data is None:
380
+ return None
381
+
382
+ coords = self.coords
383
+
384
+ if dims is not None:
385
+ coords = {k: coords[k] for k in dims if k in coords}
386
+
387
+ # Rest of your method stays the same, just pass coords
388
+ if isinstance(data, TimeSeriesData):
389
+ try:
390
+ data.name = name # Set name of previous object!
391
+ return data.fit_to_coords(coords)
392
+ except ConversionError as e:
393
+ raise ConversionError(
394
+ f'Could not convert time series data "{name}" to DataArray:\n{data}\nOriginal Error: {e}'
395
+ ) from e
396
+
397
+ try:
398
+ return DataConverter.to_dataarray(data, coords=coords).rename(name)
399
+ except ConversionError as e:
400
+ raise ConversionError(f'Could not convert data "{name}" to DataArray:\n{data}\nOriginal Error: {e}') from e
401
+
402
+ def fit_effects_to_model_coords(
403
+ self,
404
+ label_prefix: str | None,
405
+ effect_values: TemporalEffectsUser | PeriodicEffectsUser | None,
406
+ label_suffix: str | None = None,
407
+ dims: Collection[FlowSystemDimensions] | None = None,
408
+ delimiter: str = '|',
409
+ ) -> TemporalEffects | PeriodicEffects | None:
410
+ """
411
+ Transform EffectValues from the user to Internal Datatypes aligned with model coordinates.
412
+ """
413
+ if effect_values is None:
414
+ return None
415
+
416
+ effect_values_dict = self.effects.create_effect_values_dict(effect_values)
417
+
418
+ return {
419
+ effect: self.fit_to_model_coords(
420
+ str(delimiter).join(filter(None, [label_prefix, effect, label_suffix])),
421
+ value,
422
+ dims=dims,
423
+ )
424
+ for effect, value in effect_values_dict.items()
425
+ }
426
+
427
+ def connect_and_transform(self):
428
+ """Transform data for all elements using the new simplified approach."""
429
+ if self.connected_and_transformed:
430
+ logger.debug('FlowSystem already connected and transformed')
431
+ return
432
+
433
+ self.weights = self.fit_to_model_coords('weights', self.weights, dims=['period', 'scenario'])
434
+
435
+ self._connect_network()
436
+ for element in list(self.components.values()) + list(self.effects.effects.values()) + list(self.buses.values()):
437
+ element.transform_data(self)
438
+ self._connected_and_transformed = True
142
439
 
143
440
  def add_elements(self, *elements: Element) -> None:
144
441
  """
@@ -148,12 +445,12 @@ class FlowSystem:
148
445
  *elements: childs of Element like Boiler, HeatPump, Bus,...
149
446
  modeling Elements
150
447
  """
151
- if self._connected:
448
+ if self.connected_and_transformed:
152
449
  warnings.warn(
153
450
  'You are adding elements to an already connected FlowSystem. This is not recommended (But it works).',
154
451
  stacklevel=2,
155
452
  )
156
- self._connected = False
453
+ self._connected_and_transformed = False
157
454
  for new_element in list(elements):
158
455
  if isinstance(new_element, Component):
159
456
  self._add_components(new_element)
@@ -166,77 +463,29 @@ class FlowSystem:
166
463
  f'Tried to add incompatible object to FlowSystem: {type(new_element)=}: {new_element=} '
167
464
  )
168
465
 
169
- def to_json(self, path: Union[str, pathlib.Path]):
170
- """
171
- Saves the flow system to a json file.
172
- This not meant to be reloaded and recreate the object,
173
- but rather used to document or compare the flow_system to others.
174
-
175
- Args:
176
- path: The path to the json file.
177
- """
178
- with open(path, 'w', encoding='utf-8') as f:
179
- json.dump(self.as_dict('stats'), f, indent=4, ensure_ascii=False)
180
-
181
- def as_dict(self, data_mode: Literal['data', 'name', 'stats'] = 'data') -> Dict:
182
- """Convert the object to a dictionary representation."""
183
- data = {
184
- 'components': {
185
- comp.label: comp.to_dict()
186
- for comp in sorted(self.components.values(), key=lambda component: component.label.upper())
187
- },
188
- 'buses': {
189
- bus.label: bus.to_dict() for bus in sorted(self.buses.values(), key=lambda bus: bus.label.upper())
190
- },
191
- 'effects': {
192
- effect.label: effect.to_dict()
193
- for effect in sorted(self.effects, key=lambda effect: effect.label.upper())
194
- },
195
- 'timesteps_extra': [date.isoformat() for date in self.time_series_collection.timesteps_extra],
196
- 'hours_of_previous_timesteps': self.time_series_collection.hours_of_previous_timesteps,
197
- 'scenarios': self.time_series_collection.scenarios.tolist() if self.time_series_collection.scenarios is not None else None,
198
- 'scenario_weights': self.scenario_weights,
199
- }
200
- if data_mode == 'data':
201
- return fx_io.replace_timeseries(data, 'data')
202
- elif data_mode == 'stats':
203
- return fx_io.remove_none_and_empty(fx_io.replace_timeseries(data, data_mode))
204
- return fx_io.replace_timeseries(data, data_mode)
205
-
206
- def as_dataset(self, constants_in_dataset: bool = False) -> xr.Dataset:
466
+ def create_model(self, normalize_weights: bool = True) -> FlowSystemModel:
207
467
  """
208
- Convert the FlowSystem to a xarray Dataset.
468
+ Create a linopy model from the FlowSystem.
209
469
 
210
470
  Args:
211
- constants_in_dataset: If True, constants are included as Dataset variables.
471
+ normalize_weights: Whether to automatically normalize the weights (periods and scenarios) to sum up to 1 when solving.
212
472
  """
213
- ds = self.time_series_collection.as_dataset()
214
- ds.attrs = self.as_dict(data_mode='name')
215
- return ds
216
-
217
- def to_netcdf(self, path: Union[str, pathlib.Path], compression: int = 0, constants_in_dataset: bool = True):
218
- """
219
- Saves the FlowSystem to a netCDF file.
220
- Args:
221
- path: The path to the netCDF file.
222
- compression: The compression level to use when saving the file.
223
- constants_in_dataset: If True, constants are included as Dataset variables.
224
- """
225
- ds = self.as_dataset(constants_in_dataset=constants_in_dataset)
226
- fx_io.save_dataset_to_netcdf(ds, path, compression=compression)
227
- logger.info(f'Saved FlowSystem to {path}')
473
+ if not self.connected_and_transformed:
474
+ raise RuntimeError(
475
+ 'FlowSystem is not connected_and_transformed. Call FlowSystem.connect_and_transform() first.'
476
+ )
477
+ self.model = FlowSystemModel(self, normalize_weights)
478
+ return self.model
228
479
 
229
480
  def plot_network(
230
481
  self,
231
- path: Union[bool, str, pathlib.Path] = 'flow_system.html',
232
- controls: Union[
233
- bool,
234
- List[
235
- Literal['nodes', 'edges', 'layout', 'interaction', 'manipulation', 'physics', 'selection', 'renderer']
236
- ],
482
+ path: bool | str | pathlib.Path = 'flow_system.html',
483
+ controls: bool
484
+ | list[
485
+ Literal['nodes', 'edges', 'layout', 'interaction', 'manipulation', 'physics', 'selection', 'renderer']
237
486
  ] = True,
238
487
  show: bool = False,
239
- ) -> Optional['pyvis.network.Network']:
488
+ ) -> pyvis.network.Network | None:
240
489
  """
241
490
  Visualizes the network structure of a FlowSystem using PyVis, saving it as an interactive HTML file.
242
491
 
@@ -251,7 +500,7 @@ class FlowSystem:
251
500
  show: Whether to open the visualization in the web browser.
252
501
 
253
502
  Returns:
254
- - Optional[pyvis.network.Network]: The `Network` instance representing the visualization, or `None` if `pyvis` is not installed.
503
+ - 'pyvis.network.Network' | None: The `Network` instance representing the visualization, or `None` if `pyvis` is not installed.
255
504
 
256
505
  Examples:
257
506
  >>> flow_system.plot_network()
@@ -267,9 +516,63 @@ class FlowSystem:
267
516
  node_infos, edge_infos = self.network_infos()
268
517
  return plotting.plot_network(node_infos, edge_infos, path, controls, show)
269
518
 
270
- def network_infos(self) -> Tuple[Dict[str, Dict[str, str]], Dict[str, Dict[str, str]]]:
271
- if not self._connected:
519
+ def start_network_app(self):
520
+ """Visualizes the network structure of a FlowSystem using Dash, Cytoscape, and networkx.
521
+ Requires optional dependencies: dash, dash-cytoscape, dash-daq, networkx, flask, werkzeug.
522
+ """
523
+ from .network_app import DASH_CYTOSCAPE_AVAILABLE, VISUALIZATION_ERROR, flow_graph, shownetwork
524
+
525
+ warnings.warn(
526
+ 'The network visualization is still experimental and might change in the future.',
527
+ stacklevel=2,
528
+ category=UserWarning,
529
+ )
530
+
531
+ if not DASH_CYTOSCAPE_AVAILABLE:
532
+ raise ImportError(
533
+ f'Network visualization requires optional dependencies. '
534
+ f'Install with: `pip install flixopt[network_viz]`, `pip install flixopt[full]` '
535
+ f'or: `pip install dash dash-cytoscape dash-daq networkx werkzeug`. '
536
+ f'Original error: {VISUALIZATION_ERROR}'
537
+ )
538
+
539
+ if not self._connected_and_transformed:
272
540
  self._connect_network()
541
+
542
+ if self._network_app is not None:
543
+ logger.warning('The network app is already running. Restarting it.')
544
+ self.stop_network_app()
545
+
546
+ self._network_app = shownetwork(flow_graph(self))
547
+
548
+ def stop_network_app(self):
549
+ """Stop the network visualization server."""
550
+ from .network_app import DASH_CYTOSCAPE_AVAILABLE, VISUALIZATION_ERROR
551
+
552
+ if not DASH_CYTOSCAPE_AVAILABLE:
553
+ raise ImportError(
554
+ f'Network visualization requires optional dependencies. '
555
+ f'Install with: `pip install flixopt[network_viz]`, `pip install flixopt[full]` '
556
+ f'or: `pip install dash dash-cytoscape dash-daq networkx werkzeug`. '
557
+ f'Original error: {VISUALIZATION_ERROR}'
558
+ )
559
+
560
+ if self._network_app is None:
561
+ logger.warning("No network app is currently running. Can't stop it")
562
+ return
563
+
564
+ try:
565
+ logger.info('Stopping network visualization server...')
566
+ self._network_app.server_instance.shutdown()
567
+ logger.info('Network visualization stopped.')
568
+ except Exception as e:
569
+ logger.error(f'Failed to stop the network visualization app: {e}')
570
+ finally:
571
+ self._network_app = None
572
+
573
+ def network_infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, str]]]:
574
+ if not self.connected_and_transformed:
575
+ self.connect_and_transform()
273
576
  nodes = {
274
577
  node.label_full: {
275
578
  'label': node.label,
@@ -291,124 +594,6 @@ class FlowSystem:
291
594
 
292
595
  return nodes, edges
293
596
 
294
- def transform_data(self):
295
- if not self._connected:
296
- self._connect_network()
297
- self.scenario_weights = self.create_time_series(
298
- 'scenario_weights', self.scenario_weights, has_time_dim=False, has_scenario_dim=True
299
- )
300
- for element in self.all_elements.values():
301
- element.transform_data(self)
302
-
303
- def create_time_series(
304
- self,
305
- name: str,
306
- data: Optional[Union[TimestepData, TimeSeriesData, TimeSeries]],
307
- has_time_dim: bool = True,
308
- has_scenario_dim: bool = True,
309
- has_extra_timestep: bool = False,
310
- ) -> Optional[Union[Scalar, TimeSeries]]:
311
- """
312
- Tries to create a TimeSeries from TimestepData and adds it to the time_series_collection
313
- If the data already is a TimeSeries, nothing happens and the TimeSeries gets reset and returned
314
- If the data is a TimeSeriesData, it is converted to a TimeSeries, and the aggregation weights are applied.
315
- If the data is None, nothing happens.
316
-
317
- Args:
318
- name: The name of the TimeSeries
319
- data: The data to create a TimeSeries from
320
- has_time_dim: Whether the data has a time dimension
321
- has_scenario_dim: Whether the data has a scenario dimension
322
- has_extra_timestep: Whether the data has an extra timestep
323
- """
324
- if not has_time_dim and not has_scenario_dim:
325
- raise ValueError('At least one of the dimensions must be present')
326
-
327
- if data is None:
328
- return None
329
-
330
- if not has_time_dim and self.time_series_collection.scenarios is None:
331
- return data
332
-
333
- if isinstance(data, TimeSeries):
334
- data.restore_data()
335
- if data in self.time_series_collection:
336
- return data
337
- return self.time_series_collection.add_time_series(
338
- data=data.selected_data,
339
- name=name,
340
- has_time_dim=has_time_dim,
341
- has_scenario_dim=has_scenario_dim,
342
- has_extra_timestep=has_extra_timestep,
343
- )
344
- elif isinstance(data, TimeSeriesData):
345
- data.label = name
346
- return self.time_series_collection.add_time_series(
347
- data=data.data,
348
- name=name,
349
- has_time_dim=has_time_dim,
350
- has_scenario_dim=has_scenario_dim,
351
- has_extra_timestep=has_extra_timestep,
352
- aggregation_weight=data.agg_weight,
353
- aggregation_group=data.agg_group,
354
- )
355
- return self.time_series_collection.add_time_series(
356
- data=data,
357
- name=name,
358
- has_time_dim=has_time_dim,
359
- has_scenario_dim=has_scenario_dim,
360
- has_extra_timestep=has_extra_timestep,
361
- )
362
-
363
- def create_effect_time_series(
364
- self,
365
- label_prefix: Optional[str],
366
- effect_values: Union[EffectValuesUserScenario, EffectValuesUserTimestep],
367
- label_suffix: Optional[str] = None,
368
- has_time_dim: bool = True,
369
- has_scenario_dim: bool = True,
370
- ) -> Optional[Union[EffectTimeSeries, EffectValuesDict]]:
371
- """
372
- Transform EffectValues to EffectTimeSeries.
373
- Creates a TimeSeries for each key in the nested_values dictionary, using the value as the data.
374
-
375
- The resulting label of the TimeSeries is the label of the parent_element,
376
- followed by the label of the Effect in the nested_values and the label_suffix.
377
- If the key in the EffectValues is None, the alias 'Standard_Effect' is used
378
-
379
- Args:
380
- label_prefix: Prefix for the TimeSeries name
381
- effect_values: Dictionary of EffectValues
382
- label_suffix: Suffix for the TimeSeries name
383
- has_time_dim: Whether the data has a time dimension
384
- has_scenario_dim: Whether the data has a scenario dimension
385
- """
386
- if not has_time_dim and not has_scenario_dim:
387
- raise ValueError('At least one of the dimensions must be present')
388
-
389
- effect_values: Optional[EffectValuesDict] = self.effects.create_effect_values_dict(effect_values)
390
- if effect_values is None:
391
- return None
392
-
393
- if not has_time_dim and self.time_series_collection.scenarios is None:
394
- return effect_values
395
-
396
- return {
397
- effect: self.create_time_series(
398
- name='|'.join(filter(None, [label_prefix, effect, label_suffix])),
399
- data=value,
400
- has_time_dim=has_time_dim,
401
- has_scenario_dim=has_scenario_dim,
402
- )
403
- for effect, value in effect_values.items()
404
- }
405
-
406
- def create_model(self) -> SystemModel:
407
- if not self._connected:
408
- raise RuntimeError('FlowSystem is not connected. Call FlowSystem.connect() first.')
409
- self.model = SystemModel(self)
410
- return self.model
411
-
412
597
  def _check_if_element_is_unique(self, element: Element) -> None:
413
598
  """
414
599
  checks if element or label of element already exists in list
@@ -417,25 +602,25 @@ class FlowSystem:
417
602
  element: new element to check
418
603
  """
419
604
  if element in self.all_elements.values():
420
- raise ValueError(f'Element {element.label} already added to FlowSystem!')
605
+ raise ValueError(f'Element {element.label_full} already added to FlowSystem!')
421
606
  # check if name is already used:
422
607
  if element.label_full in self.all_elements:
423
- raise ValueError(f'Label of Element {element.label} already used in another element!')
608
+ raise ValueError(f'Label of Element {element.label_full} already used in another element!')
424
609
 
425
610
  def _add_effects(self, *args: Effect) -> None:
426
611
  self.effects.add_effects(*args)
427
612
 
428
613
  def _add_components(self, *components: Component) -> None:
429
614
  for new_component in list(components):
430
- logger.info(f'Registered new Component: {new_component.label}')
615
+ logger.info(f'Registered new Component: {new_component.label_full}')
431
616
  self._check_if_element_is_unique(new_component) # check if already exists:
432
- self.components[new_component.label] = new_component # Add to existing components
617
+ self.components[new_component.label_full] = new_component # Add to existing components
433
618
 
434
619
  def _add_buses(self, *buses: Bus):
435
620
  for new_bus in list(buses):
436
- logger.info(f'Registered new Bus: {new_bus.label}')
621
+ logger.info(f'Registered new Bus: {new_bus.label_full}')
437
622
  self._check_if_element_is_unique(new_bus) # check if already exists:
438
- self.buses[new_bus.label] = new_bus # Add to existing components
623
+ self.buses[new_bus.label_full] = new_bus # Add to existing components
439
624
 
440
625
  def _connect_network(self):
441
626
  """Connects the network of components and buses. Can be rerun without changes if no elements were added"""
@@ -446,14 +631,14 @@ class FlowSystem:
446
631
 
447
632
  # Add Bus if not already added (deprecated)
448
633
  if flow._bus_object is not None and flow._bus_object not in self.buses.values():
449
- self._add_buses(flow._bus_object)
450
634
  warnings.warn(
451
- f'The Bus {flow._bus_object.label} was added to the FlowSystem from {flow.label_full}.'
635
+ f'The Bus {flow._bus_object.label_full} was added to the FlowSystem from {flow.label_full}.'
452
636
  f'This is deprecated and will be removed in the future. '
453
637
  f'Please pass the Bus.label to the Flow and the Bus to the FlowSystem instead.',
454
- UserWarning,
638
+ DeprecationWarning,
455
639
  stacklevel=1,
456
640
  )
641
+ self._add_buses(flow._bus_object)
457
642
 
458
643
  # Connect Buses
459
644
  bus = self.buses.get(flow.bus)
@@ -470,23 +655,326 @@ class FlowSystem:
470
655
  f'Connected {len(self.buses)} Buses and {len(self.components)} '
471
656
  f'via {len(self.flows)} Flows inside the FlowSystem.'
472
657
  )
473
- self._connected = True
474
658
 
475
- def __repr__(self):
476
- return f'<{self.__class__.__name__} with {len(self.components)} components and {len(self.effects)} effects>'
659
+ def __repr__(self) -> str:
660
+ """Compact representation for debugging."""
661
+ status = '✓' if self.connected_and_transformed else '⚠'
662
+
663
+ # Build dimension info
664
+ dims = f'{len(self.timesteps)} timesteps [{self.timesteps[0].strftime("%Y-%m-%d")} to {self.timesteps[-1].strftime("%Y-%m-%d")}]'
665
+ if self.periods is not None:
666
+ dims += f', {len(self.periods)} periods'
667
+ if self.scenarios is not None:
668
+ dims += f', {len(self.scenarios)} scenarios'
669
+
670
+ return f'FlowSystem({dims}, {len(self.components)} Components, {len(self.buses)} Buses, {len(self.effects)} Effects, {status})'
671
+
672
+ def __str__(self) -> str:
673
+ """Structured summary for users."""
674
+
675
+ def format_elements(element_names: list, label: str, alignment: int = 12):
676
+ name_list = ', '.join(element_names[:3])
677
+ if len(element_names) > 3:
678
+ name_list += f' ... (+{len(element_names) - 3} more)'
679
+
680
+ suffix = f' ({name_list})' if element_names else ''
681
+ padding = alignment - len(label) - 1 # -1 for the colon
682
+ return f'{label}:{"":<{padding}} {len(element_names)}{suffix}'
683
+
684
+ time_period = f'Time period: {self.timesteps[0].date()} to {self.timesteps[-1].date()}'
685
+ freq_str = str(self.timesteps.freq).replace('<', '').replace('>', '') if self.timesteps.freq else 'irregular'
686
+
687
+ lines = [
688
+ f'Timesteps: {len(self.timesteps)} ({freq_str}) [{time_period}]',
689
+ ]
690
+
691
+ # Add periods if present
692
+ if self.periods is not None:
693
+ period_names = ', '.join(str(p) for p in self.periods[:3])
694
+ if len(self.periods) > 3:
695
+ period_names += f' ... (+{len(self.periods) - 3} more)'
696
+ lines.append(f'Periods: {len(self.periods)} ({period_names})')
697
+
698
+ # Add scenarios if present
699
+ if self.scenarios is not None:
700
+ scenario_names = ', '.join(str(s) for s in self.scenarios[:3])
701
+ if len(self.scenarios) > 3:
702
+ scenario_names += f' ... (+{len(self.scenarios) - 3} more)'
703
+ lines.append(f'Scenarios: {len(self.scenarios)} ({scenario_names})')
704
+
705
+ lines.extend(
706
+ [
707
+ format_elements(list(self.components.keys()), 'Components'),
708
+ format_elements(list(self.buses.keys()), 'Buses'),
709
+ format_elements(list(self.effects.effects.keys()), 'Effects'),
710
+ f'Status: {"Connected & Transformed" if self.connected_and_transformed else "Not connected"}',
711
+ ]
712
+ )
713
+ lines = ['FlowSystem:', f'{"─" * max(len(line) for line in lines)}'] + lines
477
714
 
478
- def __str__(self):
479
- with StringIO() as output_buffer:
480
- console = Console(file=output_buffer, width=1000) # Adjust width as needed
481
- console.print(Pretty(self.as_dict('stats'), expand_all=True, indent_guides=True))
482
- value = output_buffer.getvalue()
483
- return value
715
+ return '\n'.join(lines)
716
+
717
+ def __eq__(self, other: FlowSystem):
718
+ """Check if two FlowSystems are equal by comparing their dataset representations."""
719
+ if not isinstance(other, FlowSystem):
720
+ raise NotImplementedError('Comparison with other types is not implemented for class FlowSystem')
721
+
722
+ ds_me = self.to_dataset()
723
+ ds_other = other.to_dataset()
724
+
725
+ try:
726
+ xr.testing.assert_equal(ds_me, ds_other)
727
+ except AssertionError:
728
+ return False
729
+
730
+ if ds_me.attrs != ds_other.attrs:
731
+ return False
732
+
733
+ return True
734
+
735
+ def __getitem__(self, item) -> Element:
736
+ """Get element by exact label with helpful error messages."""
737
+ if item in self.all_elements:
738
+ return self.all_elements[item]
739
+
740
+ # Provide helpful error with suggestions
741
+ from difflib import get_close_matches
742
+
743
+ suggestions = get_close_matches(item, self.all_elements.keys(), n=3, cutoff=0.6)
744
+
745
+ if suggestions:
746
+ suggestion_str = ', '.join(f"'{s}'" for s in suggestions)
747
+ raise KeyError(f"Element '{item}' not found. Did you mean: {suggestion_str}?")
748
+ else:
749
+ raise KeyError(f"Element '{item}' not found in FlowSystem")
750
+
751
+ def __contains__(self, item: str) -> bool:
752
+ """Check if element exists in the FlowSystem."""
753
+ return item in self.all_elements
754
+
755
+ def __iter__(self):
756
+ """Iterate over element labels."""
757
+ return iter(self.all_elements.keys())
484
758
 
485
759
  @property
486
- def flows(self) -> Dict[str, Flow]:
760
+ def flows(self) -> dict[str, Flow]:
487
761
  set_of_flows = {flow for comp in self.components.values() for flow in comp.inputs + comp.outputs}
488
762
  return {flow.label_full: flow for flow in set_of_flows}
489
763
 
490
764
  @property
491
- def all_elements(self) -> Dict[str, Element]:
765
+ def all_elements(self) -> dict[str, Element]:
492
766
  return {**self.components, **self.effects.effects, **self.flows, **self.buses}
767
+
768
+ @property
769
+ def coords(self) -> dict[FlowSystemDimensions, pd.Index]:
770
+ active_coords = {'time': self.timesteps}
771
+ if self.periods is not None:
772
+ active_coords['period'] = self.periods
773
+ if self.scenarios is not None:
774
+ active_coords['scenario'] = self.scenarios
775
+ return active_coords
776
+
777
+ @property
778
+ def used_in_calculation(self) -> bool:
779
+ return self._used_in_calculation
780
+
781
+ def _validate_scenario_parameter(self, value: bool | list[str], param_name: str, element_type: str) -> None:
782
+ """
783
+ Validate scenario parameter value.
784
+
785
+ Args:
786
+ value: The value to validate
787
+ param_name: Name of the parameter (for error messages)
788
+ element_type: Type of elements expected in list (e.g., 'component label_full', 'flow label_full')
789
+
790
+ Raises:
791
+ TypeError: If value is not bool or list[str]
792
+ ValueError: If list contains non-string elements
793
+ """
794
+ if isinstance(value, bool):
795
+ return # Valid
796
+ elif isinstance(value, list):
797
+ if not all(isinstance(item, str) for item in value):
798
+ raise ValueError(f'{param_name} list must contain only strings ({element_type} values)')
799
+ else:
800
+ raise TypeError(f'{param_name} must be bool or list[str], got {type(value).__name__}')
801
+
802
+ @property
803
+ def scenario_independent_sizes(self) -> bool | list[str]:
804
+ """
805
+ Controls whether investment sizes are equalized across scenarios.
806
+
807
+ Returns:
808
+ bool or list[str]: Configuration for scenario-independent sizing
809
+ """
810
+ return self._scenario_independent_sizes
811
+
812
+ @scenario_independent_sizes.setter
813
+ def scenario_independent_sizes(self, value: bool | list[str]) -> None:
814
+ """
815
+ Set whether investment sizes should be equalized across scenarios.
816
+
817
+ Args:
818
+ value: True (all equalized), False (all vary), or list of component label_full strings to equalize
819
+
820
+ Raises:
821
+ TypeError: If value is not bool or list[str]
822
+ ValueError: If list contains non-string elements
823
+ """
824
+ self._validate_scenario_parameter(value, 'scenario_independent_sizes', 'Element.label_full')
825
+ self._scenario_independent_sizes = value
826
+
827
+ @property
828
+ def scenario_independent_flow_rates(self) -> bool | list[str]:
829
+ """
830
+ Controls whether flow rates are equalized across scenarios.
831
+
832
+ Returns:
833
+ bool or list[str]: Configuration for scenario-independent flow rates
834
+ """
835
+ return self._scenario_independent_flow_rates
836
+
837
+ @scenario_independent_flow_rates.setter
838
+ def scenario_independent_flow_rates(self, value: bool | list[str]) -> None:
839
+ """
840
+ Set whether flow rates should be equalized across scenarios.
841
+
842
+ Args:
843
+ value: True (all equalized), False (all vary), or list of flow label_full strings to equalize
844
+
845
+ Raises:
846
+ TypeError: If value is not bool or list[str]
847
+ ValueError: If list contains non-string elements
848
+ """
849
+ self._validate_scenario_parameter(value, 'scenario_independent_flow_rates', 'Flow.label_full')
850
+ self._scenario_independent_flow_rates = value
851
+
852
+ def sel(
853
+ self,
854
+ time: str | slice | list[str] | pd.Timestamp | pd.DatetimeIndex | None = None,
855
+ period: int | slice | list[int] | pd.Index | None = None,
856
+ scenario: str | slice | list[str] | pd.Index | None = None,
857
+ ) -> FlowSystem:
858
+ """
859
+ Select a subset of the flowsystem by the time coordinate.
860
+
861
+ Args:
862
+ time: Time selection (e.g., slice('2023-01-01', '2023-12-31'), '2023-06-15', or list of times)
863
+ period: Period selection (e.g., slice(2023, 2024), or list of periods)
864
+ scenario: Scenario selection (e.g., slice('scenario1', 'scenario2'), or list of scenarios)
865
+
866
+ Returns:
867
+ FlowSystem: New FlowSystem with selected data
868
+ """
869
+ if not self.connected_and_transformed:
870
+ self.connect_and_transform()
871
+
872
+ ds = self.to_dataset()
873
+
874
+ # Build indexers dict from non-None parameters
875
+ indexers = {}
876
+ if time is not None:
877
+ indexers['time'] = time
878
+ if period is not None:
879
+ indexers['period'] = period
880
+ if scenario is not None:
881
+ indexers['scenario'] = scenario
882
+
883
+ if not indexers:
884
+ return self.copy() # Return a copy when no selection
885
+
886
+ selected_dataset = ds.sel(**indexers)
887
+ return self.__class__.from_dataset(selected_dataset)
888
+
889
+ def isel(
890
+ self,
891
+ time: int | slice | list[int] | None = None,
892
+ period: int | slice | list[int] | None = None,
893
+ scenario: int | slice | list[int] | None = None,
894
+ ) -> FlowSystem:
895
+ """
896
+ Select a subset of the flowsystem by integer indices.
897
+
898
+ Args:
899
+ time: Time selection by integer index (e.g., slice(0, 100), 50, or [0, 5, 10])
900
+ period: Period selection by integer index (e.g., slice(0, 100), 50, or [0, 5, 10])
901
+ scenario: Scenario selection by integer index (e.g., slice(0, 3), 50, or [0, 5, 10])
902
+
903
+ Returns:
904
+ FlowSystem: New FlowSystem with selected data
905
+ """
906
+ if not self.connected_and_transformed:
907
+ self.connect_and_transform()
908
+
909
+ ds = self.to_dataset()
910
+
911
+ # Build indexers dict from non-None parameters
912
+ indexers = {}
913
+ if time is not None:
914
+ indexers['time'] = time
915
+ if period is not None:
916
+ indexers['period'] = period
917
+ if scenario is not None:
918
+ indexers['scenario'] = scenario
919
+
920
+ if not indexers:
921
+ return self.copy() # Return a copy when no selection
922
+
923
+ selected_dataset = ds.isel(**indexers)
924
+ return self.__class__.from_dataset(selected_dataset)
925
+
926
+ def resample(
927
+ self,
928
+ time: str,
929
+ method: Literal['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count'] = 'mean',
930
+ **kwargs: Any,
931
+ ) -> FlowSystem:
932
+ """
933
+ Create a resampled FlowSystem by resampling data along the time dimension (like xr.Dataset.resample()).
934
+ Only resamples data variables that have a time dimension.
935
+
936
+ Args:
937
+ time: Resampling frequency (e.g., '3h', '2D', '1M')
938
+ method: Resampling method. Recommended: 'mean', 'first', 'last', 'max', 'min'
939
+ **kwargs: Additional arguments passed to xarray.resample()
940
+
941
+ Returns:
942
+ FlowSystem: New FlowSystem with resampled data
943
+ """
944
+ if not self.connected_and_transformed:
945
+ self.connect_and_transform()
946
+
947
+ dataset = self.to_dataset()
948
+
949
+ # Separate variables with and without time dimension
950
+ time_vars = {}
951
+ non_time_vars = {}
952
+
953
+ for var_name, var in dataset.data_vars.items():
954
+ if 'time' in var.dims:
955
+ time_vars[var_name] = var
956
+ else:
957
+ non_time_vars[var_name] = var
958
+
959
+ # Only resample variables that have time dimension
960
+ time_dataset = dataset[list(time_vars.keys())]
961
+ resampler = time_dataset.resample(time=time, **kwargs)
962
+
963
+ if hasattr(resampler, method):
964
+ resampled_time_data = getattr(resampler, method)()
965
+ else:
966
+ available_methods = ['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count']
967
+ raise ValueError(f'Unsupported resampling method: {method}. Available: {available_methods}')
968
+
969
+ # Combine resampled time variables with non-time variables
970
+ if non_time_vars:
971
+ non_time_dataset = dataset[list(non_time_vars.keys())]
972
+ resampled_dataset = xr.merge([resampled_time_data, non_time_dataset])
973
+ else:
974
+ resampled_dataset = resampled_time_data
975
+
976
+ return self.__class__.from_dataset(resampled_dataset)
977
+
978
+ @property
979
+ def connected_and_transformed(self) -> bool:
980
+ return self._connected_and_transformed