flixopt 3.0.1__py3-none-any.whl → 6.0.0rc7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. flixopt/__init__.py +57 -49
  2. flixopt/carrier.py +159 -0
  3. flixopt/clustering/__init__.py +51 -0
  4. flixopt/clustering/base.py +1746 -0
  5. flixopt/clustering/intercluster_helpers.py +201 -0
  6. flixopt/color_processing.py +372 -0
  7. flixopt/comparison.py +819 -0
  8. flixopt/components.py +848 -270
  9. flixopt/config.py +853 -496
  10. flixopt/core.py +111 -98
  11. flixopt/effects.py +294 -284
  12. flixopt/elements.py +484 -223
  13. flixopt/features.py +220 -118
  14. flixopt/flow_system.py +2026 -389
  15. flixopt/interface.py +504 -286
  16. flixopt/io.py +1718 -55
  17. flixopt/linear_converters.py +291 -230
  18. flixopt/modeling.py +304 -181
  19. flixopt/network_app.py +2 -1
  20. flixopt/optimization.py +788 -0
  21. flixopt/optimize_accessor.py +373 -0
  22. flixopt/plot_result.py +143 -0
  23. flixopt/plotting.py +1177 -1034
  24. flixopt/results.py +1331 -372
  25. flixopt/solvers.py +12 -4
  26. flixopt/statistics_accessor.py +2412 -0
  27. flixopt/stats_accessor.py +75 -0
  28. flixopt/structure.py +954 -120
  29. flixopt/topology_accessor.py +676 -0
  30. flixopt/transform_accessor.py +2277 -0
  31. flixopt/types.py +120 -0
  32. flixopt-6.0.0rc7.dist-info/METADATA +290 -0
  33. flixopt-6.0.0rc7.dist-info/RECORD +36 -0
  34. {flixopt-3.0.1.dist-info → flixopt-6.0.0rc7.dist-info}/WHEEL +1 -1
  35. flixopt/aggregation.py +0 -382
  36. flixopt/calculation.py +0 -672
  37. flixopt/commons.py +0 -51
  38. flixopt/utils.py +0 -86
  39. flixopt-3.0.1.dist-info/METADATA +0 -209
  40. flixopt-3.0.1.dist-info/RECORD +0 -26
  41. {flixopt-3.0.1.dist-info → flixopt-6.0.0rc7.dist-info}/licenses/LICENSE +0 -0
  42. {flixopt-3.0.1.dist-info → flixopt-6.0.0rc7.dist-info}/top_level.txt +0 -0
flixopt/flow_system.py CHANGED
@@ -6,60 +6,83 @@ from __future__ import annotations
6
6
 
7
7
  import json
8
8
  import logging
9
+ import pathlib
9
10
  import warnings
10
- from typing import TYPE_CHECKING, Any, Literal, Optional
11
+ from itertools import chain
12
+ from typing import TYPE_CHECKING, Any, Literal
11
13
 
12
14
  import numpy as np
13
15
  import pandas as pd
14
16
  import xarray as xr
15
17
 
18
+ from . import io as fx_io
19
+ from .components import Storage
20
+ from .config import CONFIG, DEPRECATION_REMOVAL_VERSION
16
21
  from .core import (
17
22
  ConversionError,
18
23
  DataConverter,
19
24
  FlowSystemDimensions,
20
- PeriodicData,
21
- PeriodicDataUser,
22
- TemporalData,
23
- TemporalDataUser,
24
25
  TimeSeriesData,
25
26
  )
26
- from .effects import (
27
- Effect,
28
- EffectCollection,
29
- PeriodicEffects,
30
- PeriodicEffectsUser,
31
- TemporalEffects,
32
- TemporalEffectsUser,
33
- )
27
+ from .effects import Effect, EffectCollection
34
28
  from .elements import Bus, Component, Flow
35
- from .structure import Element, FlowSystemModel, Interface
29
+ from .optimize_accessor import OptimizeAccessor
30
+ from .statistics_accessor import StatisticsAccessor
31
+ from .structure import (
32
+ CompositeContainerMixin,
33
+ Element,
34
+ ElementContainer,
35
+ FlowSystemModel,
36
+ Interface,
37
+ VariableCategory,
38
+ )
39
+ from .topology_accessor import TopologyAccessor
40
+ from .transform_accessor import TransformAccessor
36
41
 
37
42
  if TYPE_CHECKING:
38
- import pathlib
39
43
  from collections.abc import Collection
40
44
 
41
45
  import pyvis
42
46
 
47
+ from .clustering import Clustering
48
+ from .solvers import _Solver
49
+ from .types import Effect_TPS, Numeric_S, Numeric_TPS, NumericOrBool
50
+
51
+ from .carrier import Carrier, CarrierContainer
52
+
53
+ # Register clustering classes for IO (deferred to avoid circular imports)
54
+ from .clustering.base import _register_clustering_classes
55
+
56
+ _register_clustering_classes()
57
+
43
58
  logger = logging.getLogger('flixopt')
44
59
 
45
60
 
46
- class FlowSystem(Interface):
61
+ class FlowSystem(Interface, CompositeContainerMixin[Element]):
47
62
  """
48
- A FlowSystem organizes the high level Elements (Components, Buses & Effects).
63
+ A FlowSystem organizes the high level Elements (Components, Buses, Effects & Flows).
49
64
 
50
- This is the main container class that users work with to build and manage their System.
65
+ This is the main container class that users work with to build and manage their energy or material flow system.
66
+ FlowSystem provides both direct container access (via .components, .buses, .effects, .flows) and a unified
67
+ dict-like interface for accessing any element by label across all container types.
51
68
 
52
69
  Args:
53
70
  timesteps: The timesteps of the model.
54
71
  periods: The periods of the model.
55
72
  scenarios: The scenarios of the model.
56
- hours_of_last_timestep: The duration of the last time step. Uses the last time interval if not specified
57
- hours_of_previous_timesteps: The duration of previous timesteps.
58
- If None, the first time increment of time_series is used.
59
- This is needed to calculate previous durations (for example consecutive_on_hours).
60
- If you use an array, take care that its long enough to cover all previous values!
61
- weights: The weights of each period and scenario. If None, all scenarios have the same weight (normalized to 1).
62
- Its recommended to normalize the weights to sum up to 1.
73
+ hours_of_last_timestep: Duration of the last timestep. If None, computed from the last time interval.
74
+ hours_of_previous_timesteps: Duration of previous timesteps. If None, computed from the first time interval.
75
+ Can be a scalar (all previous timesteps have same duration) or array (different durations).
76
+ Used to calculate previous values (e.g., uptime and downtime).
77
+ weight_of_last_period: Weight/duration of the last period. If None, computed from the last period interval.
78
+ Used for calculating sums over periods in multi-period models.
79
+ scenario_weights: The weights of each scenario. If None, all scenarios have the same weight (normalized to 1).
80
+ Period weights are always computed internally from the period index (like timestep_duration for time).
81
+ The final `weights` array (accessible via `flow_system.model.objective_weights`) is computed as period_weights × normalized_scenario_weights, with normalization applied to the scenario weights by default.
82
+ cluster_weight: Weight for each cluster.
83
+ If None (default), all clusters have weight 1.0. Used by cluster() to specify
84
+ how many original timesteps each cluster represents. Multiply with timestep_duration
85
+ for proper time aggregation in clustered models.
63
86
  scenario_independent_sizes: Controls whether investment sizes are equalized across scenarios.
64
87
  - True: All sizes are shared/equalized across scenarios
65
88
  - False: All sizes are optimized separately per scenario
@@ -69,64 +92,215 @@ class FlowSystem(Interface):
69
92
  - False: All flow rates are optimized separately per scenario
70
93
  - list[str]: Only specified flows (by label_full) are equalized across scenarios
71
94
 
95
+ Examples:
96
+ Creating a FlowSystem and accessing elements:
97
+
98
+ >>> import flixopt as fx
99
+ >>> import pandas as pd
100
+ >>> timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
101
+ >>> flow_system = fx.FlowSystem(timesteps)
102
+ >>>
103
+ >>> # Add elements to the system
104
+ >>> boiler = fx.Component('Boiler', inputs=[heat_flow], status_parameters=...)
105
+ >>> heat_bus = fx.Bus('Heat', imbalance_penalty_per_flow_hour=1e4)
106
+ >>> costs = fx.Effect('costs', is_objective=True, is_standard=True)
107
+ >>> flow_system.add_elements(boiler, heat_bus, costs)
108
+
109
+ Unified dict-like access (recommended for most cases):
110
+
111
+ >>> # Access any element by label, regardless of type
112
+ >>> boiler = flow_system['Boiler'] # Returns Component
113
+ >>> heat_bus = flow_system['Heat'] # Returns Bus
114
+ >>> costs = flow_system['costs'] # Returns Effect
115
+ >>>
116
+ >>> # Check if element exists
117
+ >>> if 'Boiler' in flow_system:
118
+ ... print('Boiler found in system')
119
+ >>>
120
+ >>> # Iterate over all elements
121
+ >>> for label in flow_system.keys():
122
+ ... element = flow_system[label]
123
+ ... print(f'{label}: {type(element).__name__}')
124
+ >>>
125
+ >>> # Get all element labels and objects
126
+ >>> all_labels = list(flow_system.keys())
127
+ >>> all_elements = list(flow_system.values())
128
+ >>> for label, element in flow_system.items():
129
+ ... print(f'{label}: {element}')
130
+
131
+ Direct container access for type-specific operations:
132
+
133
+ >>> # Access specific container when you need type filtering
134
+ >>> for component in flow_system.components.values():
135
+ ... print(f'{component.label}: {len(component.inputs)} inputs')
136
+ >>>
137
+ >>> # Access buses directly
138
+ >>> for bus in flow_system.buses.values():
139
+ ... print(f'{bus.label}')
140
+ >>>
141
+ >>> # Flows are automatically collected from all components
142
+
143
+ Power user pattern - Efficient chaining without conversion overhead:
144
+
145
+ >>> # Instead of chaining (causes multiple conversions):
146
+ >>> result = flow_system.sel(time='2020-01').resample('2h') # Slow
147
+ >>>
148
+ >>> # Use dataset methods directly (single conversion):
149
+ >>> ds = flow_system.to_dataset()
150
+ >>> ds = FlowSystem._dataset_sel(ds, time='2020-01')
151
+ >>> ds = flow_system._dataset_resample(ds, freq='2h', method='mean')
152
+ >>> result = FlowSystem.from_dataset(ds) # Fast!
153
+ >>>
154
+ >>> # Available dataset methods:
155
+ >>> # - FlowSystem._dataset_sel(dataset, time=..., period=..., scenario=...)
156
+ >>> # - FlowSystem._dataset_isel(dataset, time=..., period=..., scenario=...)
157
+ >>> # - flow_system._dataset_resample(dataset, freq=..., method=..., **kwargs)
158
+ >>> for flow in flow_system.flows.values():
159
+ ... print(f'{flow.label_full}: {flow.size}')
160
+ >>>
161
+ >>> # Access effects
162
+ >>> for effect in flow_system.effects.values():
163
+ ... print(f'{effect.label}')
164
+
72
165
  Notes:
166
+ - The dict-like interface (`flow_system['element']`) searches across all containers
167
+ (components, buses, effects, flows) to find the element with the matching label.
168
+ - Element labels must be unique across all container types. Attempting to add
169
+ elements with duplicate labels will raise an error, ensuring each label maps to exactly one element.
170
+ - Direct container access (`.components`, `.buses`, `.effects`, `.flows`) is useful
171
+ when you need type-specific filtering or operations.
172
+ - The `.flows` container is automatically populated from all component inputs and outputs.
73
173
  - Creates an empty registry for components and buses, an empty EffectCollection, and a placeholder for a SystemModel.
74
174
  - The instance starts disconnected (self._connected_and_transformed == False) and will be
75
- connected_and_transformed automatically when trying to solve a calculation.
175
+ connected_and_transformed automatically when trying to optimize.
76
176
  """
77
177
 
178
+ model: FlowSystemModel | None
179
+
78
180
  def __init__(
79
181
  self,
80
- timesteps: pd.DatetimeIndex,
182
+ timesteps: pd.DatetimeIndex | pd.RangeIndex,
81
183
  periods: pd.Index | None = None,
82
184
  scenarios: pd.Index | None = None,
83
- hours_of_last_timestep: float | None = None,
185
+ clusters: pd.Index | None = None,
186
+ hours_of_last_timestep: int | float | None = None,
84
187
  hours_of_previous_timesteps: int | float | np.ndarray | None = None,
85
- weights: PeriodicDataUser | None = None,
188
+ weight_of_last_period: int | float | None = None,
189
+ scenario_weights: Numeric_S | None = None,
190
+ cluster_weight: Numeric_TPS | None = None,
86
191
  scenario_independent_sizes: bool | list[str] = True,
87
192
  scenario_independent_flow_rates: bool | list[str] = False,
193
+ name: str | None = None,
194
+ timestep_duration: xr.DataArray | None = None,
88
195
  ):
89
196
  self.timesteps = self._validate_timesteps(timesteps)
90
- self.timesteps_extra = self._create_timesteps_with_extra(self.timesteps, hours_of_last_timestep)
91
- self.hours_of_previous_timesteps = self._calculate_hours_of_previous_timesteps(
92
- self.timesteps, hours_of_previous_timesteps
93
- )
197
+
198
+ # Compute all time-related metadata using shared helper
199
+ (
200
+ self.timesteps_extra,
201
+ self.hours_of_last_timestep,
202
+ self.hours_of_previous_timesteps,
203
+ computed_timestep_duration,
204
+ ) = self._compute_time_metadata(self.timesteps, hours_of_last_timestep, hours_of_previous_timesteps)
94
205
 
95
206
  self.periods = None if periods is None else self._validate_periods(periods)
96
207
  self.scenarios = None if scenarios is None else self._validate_scenarios(scenarios)
208
+ self.clusters = clusters # Cluster dimension for clustered FlowSystems
209
+
210
+ # Use provided timestep_duration if given (for segmented systems), otherwise use computed value
211
+ # For RangeIndex (segmented systems), computed_timestep_duration is None
212
+ if timestep_duration is not None:
213
+ self.timestep_duration = self.fit_to_model_coords('timestep_duration', timestep_duration)
214
+ elif computed_timestep_duration is not None:
215
+ self.timestep_duration = self.fit_to_model_coords('timestep_duration', computed_timestep_duration)
216
+ else:
217
+ # RangeIndex (segmented systems) requires explicit timestep_duration
218
+ if isinstance(self.timesteps, pd.RangeIndex):
219
+ raise ValueError(
220
+ 'timestep_duration is required when using RangeIndex timesteps (segmented systems). '
221
+ 'Provide timestep_duration explicitly or use DatetimeIndex timesteps.'
222
+ )
223
+ self.timestep_duration = None
224
+
225
+ # Cluster weight for cluster() optimization (default 1.0)
226
+ # Represents how many original timesteps each cluster represents
227
+ # May have period/scenario dimensions if cluster() was used with those
228
+ self.cluster_weight: xr.DataArray | None = (
229
+ self.fit_to_model_coords(
230
+ 'cluster_weight',
231
+ cluster_weight,
232
+ )
233
+ if cluster_weight is not None
234
+ else None
235
+ )
97
236
 
98
- self.weights = weights
99
-
100
- hours_per_timestep = self.calculate_hours_per_timestep(self.timesteps_extra)
237
+ self.scenario_weights = scenario_weights # Use setter
101
238
 
102
- self.hours_of_last_timestep = hours_per_timestep[-1].item()
239
+ # Compute all period-related metadata using shared helper
240
+ (self.periods_extra, self.weight_of_last_period, weight_per_period) = self._compute_period_metadata(
241
+ self.periods, weight_of_last_period
242
+ )
103
243
 
104
- self.hours_per_timestep = self.fit_to_model_coords('hours_per_timestep', hours_per_timestep)
244
+ self.period_weights: xr.DataArray | None = weight_per_period
105
245
 
106
246
  # Element collections
107
- self.components: dict[str, Component] = {}
108
- self.buses: dict[str, Bus] = {}
109
- self.effects: EffectCollection = EffectCollection()
247
+ self.components: ElementContainer[Component] = ElementContainer(
248
+ element_type_name='components', truncate_repr=10
249
+ )
250
+ self.buses: ElementContainer[Bus] = ElementContainer(element_type_name='buses', truncate_repr=10)
251
+ self.effects: EffectCollection = EffectCollection(truncate_repr=10)
110
252
  self.model: FlowSystemModel | None = None
111
253
 
112
254
  self._connected_and_transformed = False
113
- self._used_in_calculation = False
255
+ self._used_in_optimization = False
114
256
 
115
257
  self._network_app = None
258
+ self._flows_cache: ElementContainer[Flow] | None = None
259
+ self._storages_cache: ElementContainer[Storage] | None = None
260
+
261
+ # Solution dataset - populated after optimization or loaded from file
262
+ self._solution: xr.Dataset | None = None
263
+
264
+ # Variable categories for segment expansion handling
265
+ # Populated when model is built, used by transform.expand()
266
+ self._variable_categories: dict[str, VariableCategory] = {}
267
+
268
+ # Aggregation info - populated by transform.cluster()
269
+ self.clustering: Clustering | None = None
270
+
271
+ # Statistics accessor cache - lazily initialized, invalidated on new solution
272
+ self._statistics: StatisticsAccessor | None = None
273
+
274
+ # Topology accessor cache - lazily initialized, invalidated on structure change
275
+ self._topology: TopologyAccessor | None = None
276
+
277
+ # Carrier container - local carriers override CONFIG.Carriers
278
+ self._carriers: CarrierContainer = CarrierContainer()
279
+
280
+ # Cached flow→carrier mapping (built lazily after connect_and_transform)
281
+ self._flow_carriers: dict[str, str] | None = None
116
282
 
117
283
  # Use properties to validate and store scenario dimension settings
118
284
  self.scenario_independent_sizes = scenario_independent_sizes
119
285
  self.scenario_independent_flow_rates = scenario_independent_flow_rates
120
286
 
287
+ # Optional name for identification (derived from filename on load)
288
+ self.name = name
289
+
121
290
  @staticmethod
122
- def _validate_timesteps(timesteps: pd.DatetimeIndex) -> pd.DatetimeIndex:
123
- """Validate timesteps format and rename if needed."""
124
- if not isinstance(timesteps, pd.DatetimeIndex):
125
- raise TypeError('timesteps must be a pandas DatetimeIndex')
291
+ def _validate_timesteps(
292
+ timesteps: pd.DatetimeIndex | pd.RangeIndex,
293
+ ) -> pd.DatetimeIndex | pd.RangeIndex:
294
+ """Validate timesteps format and rename if needed.
295
+
296
+ Accepts either DatetimeIndex (standard) or RangeIndex (for segmented systems).
297
+ """
298
+ if not isinstance(timesteps, (pd.DatetimeIndex, pd.RangeIndex)):
299
+ raise TypeError('timesteps must be a pandas DatetimeIndex or RangeIndex')
126
300
  if len(timesteps) < 2:
127
301
  raise ValueError('timesteps must contain at least 2 timestamps')
128
302
  if timesteps.name != 'time':
129
- timesteps.name = 'time'
303
+ timesteps = timesteps.rename('time')
130
304
  if not timesteps.is_monotonic_increasing:
131
305
  raise ValueError('timesteps must be sorted')
132
306
  return timesteps
@@ -172,9 +346,18 @@ class FlowSystem(Interface):
172
346
 
173
347
  @staticmethod
174
348
  def _create_timesteps_with_extra(
175
- timesteps: pd.DatetimeIndex, hours_of_last_timestep: float | None
176
- ) -> pd.DatetimeIndex:
177
- """Create timesteps with an extra step at the end."""
349
+ timesteps: pd.DatetimeIndex | pd.RangeIndex, hours_of_last_timestep: float | None
350
+ ) -> pd.DatetimeIndex | pd.RangeIndex:
351
+ """Create timesteps with an extra step at the end.
352
+
353
+ For DatetimeIndex, adds an extra timestep using hours_of_last_timestep.
354
+ For RangeIndex (segmented systems), simply appends the next integer.
355
+ """
356
+ if isinstance(timesteps, pd.RangeIndex):
357
+ # For RangeIndex, preserve start and step, extend by one step
358
+ new_stop = timesteps.stop + timesteps.step
359
+ return pd.RangeIndex(start=timesteps.start, stop=new_stop, step=timesteps.step, name='time')
360
+
178
361
  if hours_of_last_timestep is None:
179
362
  hours_of_last_timestep = (timesteps[-1] - timesteps[-2]) / pd.Timedelta(hours=1)
180
363
 
@@ -182,24 +365,292 @@ class FlowSystem(Interface):
182
365
  return pd.DatetimeIndex(timesteps.append(last_date), name='time')
183
366
 
184
367
  @staticmethod
185
- def calculate_hours_per_timestep(timesteps_extra: pd.DatetimeIndex) -> xr.DataArray:
186
- """Calculate duration of each timestep as a 1D DataArray."""
368
+ def calculate_timestep_duration(
369
+ timesteps_extra: pd.DatetimeIndex | pd.RangeIndex,
370
+ ) -> xr.DataArray | None:
371
+ """Calculate duration of each timestep in hours as a 1D DataArray.
372
+
373
+ For RangeIndex (segmented systems), returns None since duration cannot be
374
+ computed from the index. Use timestep_duration parameter instead.
375
+ """
376
+ if isinstance(timesteps_extra, pd.RangeIndex):
377
+ # Cannot compute duration from RangeIndex - must be provided externally
378
+ return None
379
+
187
380
  hours_per_step = np.diff(timesteps_extra) / pd.Timedelta(hours=1)
188
381
  return xr.DataArray(
189
- hours_per_step, coords={'time': timesteps_extra[:-1]}, dims='time', name='hours_per_timestep'
382
+ hours_per_step, coords={'time': timesteps_extra[:-1]}, dims='time', name='timestep_duration'
190
383
  )
191
384
 
192
385
  @staticmethod
193
386
  def _calculate_hours_of_previous_timesteps(
194
- timesteps: pd.DatetimeIndex, hours_of_previous_timesteps: float | np.ndarray | None
195
- ) -> float | np.ndarray:
196
- """Calculate duration of regular timesteps."""
387
+ timesteps: pd.DatetimeIndex | pd.RangeIndex, hours_of_previous_timesteps: float | np.ndarray | None
388
+ ) -> float | np.ndarray | None:
389
+ """Calculate duration of regular timesteps.
390
+
391
+ For RangeIndex (segmented systems), returns None if not provided.
392
+ """
197
393
  if hours_of_previous_timesteps is not None:
198
394
  return hours_of_previous_timesteps
395
+ if isinstance(timesteps, pd.RangeIndex):
396
+ # Cannot compute from RangeIndex
397
+ return None
199
398
  # Calculate from the first interval
200
399
  first_interval = timesteps[1] - timesteps[0]
201
400
  return first_interval.total_seconds() / 3600 # Convert to hours
202
401
 
402
+ @staticmethod
403
+ def _create_periods_with_extra(periods: pd.Index, weight_of_last_period: int | float | None) -> pd.Index:
404
+ """Create periods with an extra period at the end.
405
+
406
+ Args:
407
+ periods: The period index (must be monotonically increasing integers)
408
+ weight_of_last_period: Weight of the last period. If None, computed from the period index.
409
+
410
+ Returns:
411
+ Period index with an extra period appended at the end
412
+ """
413
+ if weight_of_last_period is None:
414
+ if len(periods) < 2:
415
+ raise ValueError(
416
+ 'FlowSystem: weight_of_last_period must be provided explicitly when only one period is defined.'
417
+ )
418
+ # Calculate weight from difference between last two periods
419
+ weight_of_last_period = int(periods[-1]) - int(periods[-2])
420
+
421
+ # Create the extra period value
422
+ last_period_value = int(periods[-1]) + weight_of_last_period
423
+ periods_extra = periods.append(pd.Index([last_period_value], name='period'))
424
+ return periods_extra
425
+
426
+ @staticmethod
427
+ def calculate_weight_per_period(periods_extra: pd.Index) -> xr.DataArray:
428
+ """Calculate weight of each period from period index differences.
429
+
430
+ Args:
431
+ periods_extra: Period index with an extra period at the end
432
+
433
+ Returns:
434
+ DataArray with weights for each period (1D, 'period' dimension)
435
+ """
436
+ weights = np.diff(periods_extra.to_numpy().astype(int))
437
+ return xr.DataArray(weights, coords={'period': periods_extra[:-1]}, dims='period', name='weight_per_period')
438
+
439
+ @classmethod
440
+ def _compute_time_metadata(
441
+ cls,
442
+ timesteps: pd.DatetimeIndex | pd.RangeIndex,
443
+ hours_of_last_timestep: int | float | None = None,
444
+ hours_of_previous_timesteps: int | float | np.ndarray | None = None,
445
+ ) -> tuple[
446
+ pd.DatetimeIndex | pd.RangeIndex,
447
+ float | None,
448
+ float | np.ndarray | None,
449
+ xr.DataArray | None,
450
+ ]:
451
+ """
452
+ Compute all time-related metadata from timesteps.
453
+
454
+ This is the single source of truth for time metadata computation, used by both
455
+ __init__ and dataset operations (sel/isel/resample) to ensure consistency.
456
+
457
+ For RangeIndex (segmented systems), timestep_duration cannot be calculated from
458
+ the index and must be provided externally after FlowSystem creation.
459
+
460
+ Args:
461
+ timesteps: The time index to compute metadata from (DatetimeIndex or RangeIndex)
462
+ hours_of_last_timestep: Duration of the last timestep. If None, computed from the time index.
463
+ hours_of_previous_timesteps: Duration of previous timesteps. If None, computed from the time index.
464
+ Can be a scalar or array.
465
+
466
+ Returns:
467
+ Tuple of (timesteps_extra, hours_of_last_timestep, hours_of_previous_timesteps, timestep_duration)
468
+ For RangeIndex, hours_of_last_timestep and timestep_duration may be None.
469
+ """
470
+ # Create timesteps with extra step at the end
471
+ timesteps_extra = cls._create_timesteps_with_extra(timesteps, hours_of_last_timestep)
472
+
473
+ # Calculate timestep duration (returns None for RangeIndex)
474
+ timestep_duration = cls.calculate_timestep_duration(timesteps_extra)
475
+
476
+ # Extract hours_of_last_timestep if not provided
477
+ if hours_of_last_timestep is None and timestep_duration is not None:
478
+ hours_of_last_timestep = timestep_duration.isel(time=-1).item()
479
+
480
+ # Compute hours_of_previous_timesteps (handles both None and provided cases)
481
+ hours_of_previous_timesteps = cls._calculate_hours_of_previous_timesteps(timesteps, hours_of_previous_timesteps)
482
+
483
+ return timesteps_extra, hours_of_last_timestep, hours_of_previous_timesteps, timestep_duration
484
+
485
+ @classmethod
486
+ def _compute_period_metadata(
487
+ cls, periods: pd.Index | None, weight_of_last_period: int | float | None = None
488
+ ) -> tuple[pd.Index | None, int | float | None, xr.DataArray | None]:
489
+ """
490
+ Compute all period-related metadata from periods.
491
+
492
+ This is the single source of truth for period metadata computation, used by both
493
+ __init__ and dataset operations to ensure consistency.
494
+
495
+ Args:
496
+ periods: The period index to compute metadata from (or None if no periods)
497
+ weight_of_last_period: Weight of the last period. If None, computed from the period index.
498
+
499
+ Returns:
500
+ Tuple of (periods_extra, weight_of_last_period, weight_per_period)
501
+ All return None if periods is None
502
+ """
503
+ if periods is None:
504
+ return None, None, None
505
+
506
+ # Create periods with extra period at the end
507
+ periods_extra = cls._create_periods_with_extra(periods, weight_of_last_period)
508
+
509
+ # Calculate weight per period
510
+ weight_per_period = cls.calculate_weight_per_period(periods_extra)
511
+
512
+ # Extract weight_of_last_period if not provided
513
+ if weight_of_last_period is None:
514
+ weight_of_last_period = weight_per_period.isel(period=-1).item()
515
+
516
+ return periods_extra, weight_of_last_period, weight_per_period
517
+
518
+ @classmethod
519
+ def _update_time_metadata(
520
+ cls,
521
+ dataset: xr.Dataset,
522
+ hours_of_last_timestep: int | float | None = None,
523
+ hours_of_previous_timesteps: int | float | np.ndarray | None = None,
524
+ ) -> xr.Dataset:
525
+ """
526
+ Update time-related attributes and data variables in dataset based on its time index.
527
+
528
+ Recomputes hours_of_last_timestep, hours_of_previous_timesteps, and timestep_duration
529
+ from the dataset's time index when these parameters are None. This ensures time metadata
530
+ stays synchronized with the actual timesteps after operations like resampling or selection.
531
+
532
+ Args:
533
+ dataset: Dataset to update (will be modified in place)
534
+ hours_of_last_timestep: Duration of the last timestep. If None, computed from the time index.
535
+ hours_of_previous_timesteps: Duration of previous timesteps. If None, computed from the time index.
536
+ Can be a scalar or array.
537
+
538
+ Returns:
539
+ The same dataset with updated time-related attributes and data variables
540
+ """
541
+ new_time_index = dataset.indexes.get('time')
542
+ if new_time_index is not None and len(new_time_index) >= 2:
543
+ # Use shared helper to compute all time metadata
544
+ _, hours_of_last_timestep, hours_of_previous_timesteps, timestep_duration = cls._compute_time_metadata(
545
+ new_time_index, hours_of_last_timestep, hours_of_previous_timesteps
546
+ )
547
+
548
+ # Update timestep_duration DataArray if it exists in the dataset and new value is computed
549
+ # This prevents stale data after resampling operations
550
+ # Skip for RangeIndex (segmented systems) where timestep_duration is None
551
+ if 'timestep_duration' in dataset.data_vars and timestep_duration is not None:
552
+ dataset['timestep_duration'] = timestep_duration
553
+
554
+ # Update time-related attributes only when new values are provided/computed
555
+ # This preserves existing metadata instead of overwriting with None
556
+ if hours_of_last_timestep is not None:
557
+ dataset.attrs['hours_of_last_timestep'] = hours_of_last_timestep
558
+ if hours_of_previous_timesteps is not None:
559
+ dataset.attrs['hours_of_previous_timesteps'] = hours_of_previous_timesteps
560
+
561
+ return dataset
562
+
563
+ @classmethod
564
+ def _update_period_metadata(
565
+ cls,
566
+ dataset: xr.Dataset,
567
+ weight_of_last_period: int | float | None = None,
568
+ ) -> xr.Dataset:
569
+ """
570
+ Update period-related attributes and data variables in dataset based on its period index.
571
+
572
+ Recomputes weight_of_last_period and period_weights from the dataset's
573
+ period index. This ensures period metadata stays synchronized with the actual
574
+ periods after operations like selection.
575
+
576
+ When the period dimension is dropped (single value selected), this method
577
+ removes the scalar coordinate, period_weights DataArray, and cleans up attributes.
578
+
579
+ This is analogous to _update_time_metadata() for time-related metadata.
580
+
581
+ Args:
582
+ dataset: Dataset to update (will be modified in place)
583
+ weight_of_last_period: Weight of the last period. If None, reused from dataset attrs
584
+ (essential for single-period subsets where it cannot be inferred from intervals).
585
+
586
+ Returns:
587
+ The same dataset with updated period-related attributes and data variables
588
+ """
589
+ new_period_index = dataset.indexes.get('period')
590
+
591
+ if new_period_index is None:
592
+ # Period dimension was dropped (single value selected)
593
+ if 'period' in dataset.coords:
594
+ dataset = dataset.drop_vars('period')
595
+ dataset = dataset.drop_vars(['period_weights'], errors='ignore')
596
+ dataset.attrs.pop('weight_of_last_period', None)
597
+ return dataset
598
+
599
+ if len(new_period_index) >= 1:
600
+ # Reuse stored weight_of_last_period when not explicitly overridden.
601
+ # This is essential for single-period subsets where it cannot be inferred from intervals.
602
+ if weight_of_last_period is None:
603
+ weight_of_last_period = dataset.attrs.get('weight_of_last_period')
604
+
605
+ # Use shared helper to compute all period metadata
606
+ _, weight_of_last_period, period_weights = cls._compute_period_metadata(
607
+ new_period_index, weight_of_last_period
608
+ )
609
+
610
+ # Update period_weights DataArray if it exists in the dataset
611
+ if 'period_weights' in dataset.data_vars:
612
+ dataset['period_weights'] = period_weights
613
+
614
+ # Update period-related attributes only when new values are provided/computed
615
+ if weight_of_last_period is not None:
616
+ dataset.attrs['weight_of_last_period'] = weight_of_last_period
617
+
618
+ return dataset
619
+
620
+ @classmethod
621
+ def _update_scenario_metadata(cls, dataset: xr.Dataset) -> xr.Dataset:
622
+ """
623
+ Update scenario-related attributes and data variables in dataset based on its scenario index.
624
+
625
+ Recomputes or removes scenario weights. This ensures scenario metadata stays synchronized with the actual
626
+ scenarios after operations like selection.
627
+
628
+ When the scenario dimension is dropped (single value selected), this method
629
+ removes the scalar coordinate, scenario_weights DataArray, and cleans up attributes.
630
+
631
+ This is analogous to _update_period_metadata() for time-related metadata.
632
+
633
+ Args:
634
+ dataset: Dataset to update (will be modified in place)
635
+
636
+ Returns:
637
+ The same dataset with updated scenario-related attributes and data variables
638
+ """
639
+ new_scenario_index = dataset.indexes.get('scenario')
640
+
641
+ if new_scenario_index is None:
642
+ # Scenario dimension was dropped (single value selected)
643
+ if 'scenario' in dataset.coords:
644
+ dataset = dataset.drop_vars('scenario')
645
+ dataset = dataset.drop_vars(['scenario_weights'], errors='ignore')
646
+ dataset.attrs.pop('scenario_weights', None)
647
+ return dataset
648
+
649
+ if len(new_scenario_index) <= 1:
650
+ dataset.attrs.pop('scenario_weights', None)
651
+
652
+ return dataset
653
+
203
654
  def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]:
204
655
  """
205
656
  Override Interface method to handle FlowSystem-specific serialization.
@@ -213,6 +664,11 @@ class FlowSystem(Interface):
213
664
 
214
665
  # Remove timesteps, as it's directly stored in dataset index
215
666
  reference_structure.pop('timesteps', None)
667
+ # For DatetimeIndex, timestep_duration can be computed from timesteps_extra on load
668
+ # For RangeIndex (segmented systems), it must be saved as it cannot be computed
669
+ if isinstance(self.timesteps, pd.DatetimeIndex):
670
+ reference_structure.pop('timestep_duration', None)
671
+ all_extracted_arrays.pop('timestep_duration', None)
216
672
 
217
673
  # Extract from components
218
674
  components_structure = {}
@@ -232,7 +688,7 @@ class FlowSystem(Interface):
232
688
 
233
689
  # Extract from effects
234
690
  effects_structure = {}
235
- for effect in self.effects:
691
+ for effect in self.effects.values():
236
692
  effect_structure, effect_arrays = effect._create_reference_structure()
237
693
  all_extracted_arrays.update(effect_arrays)
238
694
  effects_structure[effect.label] = effect_structure
@@ -240,93 +696,318 @@ class FlowSystem(Interface):
240
696
 
241
697
  return reference_structure, all_extracted_arrays
242
698
 
243
- def to_dataset(self) -> xr.Dataset:
699
+ def to_dataset(self, include_solution: bool = True, include_original_data: bool = True) -> xr.Dataset:
244
700
  """
245
701
  Convert the FlowSystem to an xarray Dataset.
246
702
  Ensures FlowSystem is connected before serialization.
247
703
 
704
+ Data is stored in minimal form (scalars stay scalar, 1D arrays stay 1D) without
705
+ broadcasting to full model dimensions. This provides significant memory savings
706
+ for multi-period and multi-scenario models.
707
+
708
+ If a solution is present and `include_solution=True`, it will be included
709
+ in the dataset with variable names prefixed by 'solution|' to avoid conflicts
710
+ with FlowSystem configuration variables. Solution time coordinates are renamed
711
+ to 'solution_time' to preserve them independently of the FlowSystem's time coordinates.
712
+
713
+ Args:
714
+ include_solution: Whether to include the optimization solution in the dataset.
715
+ Defaults to True. Set to False to get only the FlowSystem structure
716
+ without solution data (useful for copying or saving templates).
717
+ include_original_data: Whether to include clustering.original_data in the dataset.
718
+ Defaults to True. Set to False for smaller files (~38% reduction) when
719
+ clustering.plot.compare() isn't needed after loading. The core workflow
720
+ (optimize → expand) works without original_data.
721
+
248
722
  Returns:
249
723
  xr.Dataset: Dataset containing all DataArrays with structure in attributes
724
+
725
+ See Also:
726
+ from_dataset: Create FlowSystem from dataset
727
+ to_netcdf: Save to NetCDF file
250
728
  """
251
729
  if not self.connected_and_transformed:
252
- logger.warning('FlowSystem is not connected_and_transformed. Connecting and transforming data now.')
730
+ logger.info('FlowSystem is not connected_and_transformed. Connecting and transforming data now.')
253
731
  self.connect_and_transform()
254
732
 
255
- return super().to_dataset()
733
+ # Get base dataset from parent class
734
+ base_ds = super().to_dataset()
735
+
736
+ # Add FlowSystem-specific data (solution, clustering, metadata)
737
+ return fx_io.flow_system_to_dataset(self, base_ds, include_solution, include_original_data)
256
738
 
257
739
  @classmethod
258
740
  def from_dataset(cls, ds: xr.Dataset) -> FlowSystem:
259
741
  """
260
742
  Create a FlowSystem from an xarray Dataset.
261
- Handles FlowSystem-specific reconstruction logic.
743
+
744
+ If the dataset contains solution data (variables prefixed with 'solution|'),
745
+ the solution will be restored to the FlowSystem. Solution time coordinates
746
+ are renamed back from 'solution_time' to 'time'.
747
+
748
+ Supports clustered datasets with (cluster, time) dimensions. When detected,
749
+ creates a synthetic DatetimeIndex for compatibility and stores the clustered
750
+ data structure for later use.
262
751
 
263
752
  Args:
264
753
  ds: Dataset containing the FlowSystem data
265
754
 
266
755
  Returns:
267
756
  FlowSystem instance
268
- """
269
- # Get the reference structure from attrs
270
- reference_structure = dict(ds.attrs)
271
-
272
- # Create arrays dictionary from dataset variables
273
- arrays_dict = {name: array for name, array in ds.data_vars.items()}
274
-
275
- # Create FlowSystem instance with constructor parameters
276
- flow_system = cls(
277
- timesteps=ds.indexes['time'],
278
- periods=ds.indexes.get('period'),
279
- scenarios=ds.indexes.get('scenario'),
280
- weights=cls._resolve_dataarray_reference(reference_structure['weights'], arrays_dict)
281
- if 'weights' in reference_structure
282
- else None,
283
- hours_of_last_timestep=reference_structure.get('hours_of_last_timestep'),
284
- hours_of_previous_timesteps=reference_structure.get('hours_of_previous_timesteps'),
285
- scenario_independent_sizes=reference_structure.get('scenario_independent_sizes', True),
286
- scenario_independent_flow_rates=reference_structure.get('scenario_independent_flow_rates', False),
287
- )
288
-
289
- # Restore components
290
- components_structure = reference_structure.get('components', {})
291
- for comp_label, comp_data in components_structure.items():
292
- component = cls._resolve_reference_structure(comp_data, arrays_dict)
293
- if not isinstance(component, Component):
294
- logger.critical(f'Restoring component {comp_label} failed.')
295
- flow_system._add_components(component)
296
-
297
- # Restore buses
298
- buses_structure = reference_structure.get('buses', {})
299
- for bus_label, bus_data in buses_structure.items():
300
- bus = cls._resolve_reference_structure(bus_data, arrays_dict)
301
- if not isinstance(bus, Bus):
302
- logger.critical(f'Restoring bus {bus_label} failed.')
303
- flow_system._add_buses(bus)
304
-
305
- # Restore effects
306
- effects_structure = reference_structure.get('effects', {})
307
- for effect_label, effect_data in effects_structure.items():
308
- effect = cls._resolve_reference_structure(effect_data, arrays_dict)
309
- if not isinstance(effect, Effect):
310
- logger.critical(f'Restoring effect {effect_label} failed.')
311
- flow_system._add_effects(effect)
312
757
 
313
- return flow_system
758
+ See Also:
759
+ to_dataset: Convert FlowSystem to dataset
760
+ from_netcdf: Load from NetCDF file
761
+ """
762
+ return fx_io.restore_flow_system_from_dataset(ds)
314
763
 
315
- def to_netcdf(self, path: str | pathlib.Path, compression: int = 0):
764
+ def to_netcdf(
765
+ self,
766
+ path: str | pathlib.Path,
767
+ compression: int = 5,
768
+ overwrite: bool = False,
769
+ include_original_data: bool = True,
770
+ ):
316
771
  """
317
772
  Save the FlowSystem to a NetCDF file.
318
773
  Ensures FlowSystem is connected before saving.
319
774
 
775
+ The FlowSystem's name is automatically set from the filename
776
+ (without extension) when saving.
777
+
320
778
  Args:
321
- path: The path to the netCDF file.
322
- compression: The compression level to use when saving the file.
779
+ path: The path to the netCDF file. Parent directories are created if they don't exist.
780
+ compression: The compression level to use when saving the file (0-9).
781
+ overwrite: If True, overwrite existing file. If False, raise error if file exists.
782
+ include_original_data: Whether to include clustering.original_data in the file.
783
+ Defaults to True. Set to False for smaller files (~38% reduction) when
784
+ clustering.plot.compare() isn't needed after loading.
785
+
786
+ Raises:
787
+ FileExistsError: If overwrite=False and file already exists.
323
788
  """
324
789
  if not self.connected_and_transformed:
325
790
  logger.warning('FlowSystem is not connected. Calling connect_and_transform() now.')
326
791
  self.connect_and_transform()
327
792
 
328
- super().to_netcdf(path, compression)
329
- logger.info(f'Saved FlowSystem to {path}')
793
+ path = pathlib.Path(path)
794
+
795
+ if not overwrite and path.exists():
796
+ raise FileExistsError(f'File already exists: {path}. Use overwrite=True to overwrite existing file.')
797
+
798
+ path.parent.mkdir(parents=True, exist_ok=True)
799
+
800
+ # Set name from filename (without extension)
801
+ self.name = path.stem
802
+
803
+ try:
804
+ ds = self.to_dataset(include_original_data=include_original_data)
805
+ fx_io.save_dataset_to_netcdf(ds, path, compression=compression)
806
+ logger.info(f'Saved FlowSystem to {path}')
807
+ except Exception as e:
808
+ raise OSError(f'Failed to save FlowSystem to NetCDF file {path}: {e}') from e
809
+
810
+ @classmethod
811
+ def from_netcdf(cls, path: str | pathlib.Path) -> FlowSystem:
812
+ """
813
+ Load a FlowSystem from a NetCDF file.
814
+
815
+ The FlowSystem's name is automatically derived from the filename
816
+ (without extension), overriding any name that may have been stored.
817
+
818
+ Args:
819
+ path: Path to the NetCDF file
820
+
821
+ Returns:
822
+ FlowSystem instance with name set from filename
823
+ """
824
+ path = pathlib.Path(path)
825
+ flow_system = super().from_netcdf(path)
826
+ # Derive name from filename (without extension)
827
+ flow_system.name = path.stem
828
+ return flow_system
829
+
830
+ @classmethod
831
+ def from_old_results(cls, folder: str | pathlib.Path, name: str) -> FlowSystem:
832
+ """
833
+ Load a FlowSystem from old-format Results files (pre-v5 API).
834
+
835
+ This method loads results saved with the deprecated Results API
836
+ (which used multiple files: ``*--flow_system.nc4``, ``*--solution.nc4``)
837
+ and converts them to a FlowSystem with the solution attached.
838
+
839
+ The method performs the following:
840
+
841
+ - Loads the old multi-file format
842
+ - Renames deprecated parameters in the FlowSystem structure
843
+ (e.g., ``on_off_parameters`` → ``status_parameters``)
844
+ - Attaches the solution data to the FlowSystem
845
+
846
+ Args:
847
+ folder: Directory containing the saved result files
848
+ name: Base name of the saved files (without extensions)
849
+
850
+ Returns:
851
+ FlowSystem instance with solution attached
852
+
853
+ Warning:
854
+ This is a best-effort migration for accessing old results:
855
+
856
+ - **Solution variable names are NOT renamed** - only basic variables
857
+ work (flow rates, sizes, charge states, effect totals)
858
+ - Advanced variable access may require using the original names
859
+ - Summary metadata (solver info, timing) is not loaded
860
+
861
+ For full compatibility, re-run optimizations with the new API.
862
+
863
+ Examples:
864
+ ```python
865
+ # Load old results
866
+ fs = FlowSystem.from_old_results('results_folder', 'my_optimization')
867
+
868
+ # Access basic solution data
869
+ fs.solution['Boiler(Q_th)|flow_rate'].plot()
870
+
871
+ # Save in new single-file format
872
+ fs.to_netcdf('my_optimization.nc')
873
+ ```
874
+
875
+ Deprecated:
876
+ This method will be removed in v6.
877
+ """
878
+ warnings.warn(
879
+ f'from_old_results() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
880
+ 'This utility is only for migrating results from flixopt versions before v5.',
881
+ DeprecationWarning,
882
+ stacklevel=2,
883
+ )
884
+ from flixopt.io import load_dataset_from_netcdf
885
+
886
+ folder = pathlib.Path(folder)
887
+ flow_system_path = folder / f'{name}--flow_system.nc4'
888
+ solution_path = folder / f'{name}--solution.nc4'
889
+
890
+ # Load FlowSystem using from_old_dataset (suppress its deprecation warning)
891
+ with warnings.catch_warnings():
892
+ warnings.simplefilter('ignore', DeprecationWarning)
893
+ flow_system = cls.from_old_dataset(flow_system_path)
894
+ flow_system.name = name
895
+
896
+ # Attach solution (convert attrs from dicts to JSON strings for consistency)
897
+ solution = load_dataset_from_netcdf(solution_path)
898
+ for key in ['Components', 'Buses', 'Effects', 'Flows']:
899
+ if key in solution.attrs and isinstance(solution.attrs[key], dict):
900
+ solution.attrs[key] = json.dumps(solution.attrs[key])
901
+ flow_system.solution = solution
902
+
903
+ return flow_system
904
+
905
+ @classmethod
906
+ def from_old_dataset(cls, path: str | pathlib.Path) -> FlowSystem:
907
+ """
908
+ Load a FlowSystem from an old-format dataset file (pre-v5 API).
909
+
910
+ This method loads a FlowSystem saved with older versions of flixopt
911
+ (the ``*--flow_system.nc4`` file) and converts parameter names to the
912
+ current API. Unlike :meth:`from_old_results`, this does not require
913
+ a solution file and returns a FlowSystem without solution data.
914
+
915
+ The method performs the following:
916
+
917
+ - Loads the old netCDF format
918
+ - Renames deprecated parameters in the FlowSystem structure
919
+ (e.g., ``on_off_parameters`` → ``status_parameters``)
920
+
921
+ Args:
922
+ path: Path to the old-format FlowSystem file (typically ``*--flow_system.nc4``)
923
+
924
+ Returns:
925
+ FlowSystem instance without solution
926
+
927
+ Warning:
928
+ This is a best-effort migration for loading old FlowSystem definitions.
929
+ For full compatibility, consider re-saving with the new API after loading.
930
+
931
+ Examples:
932
+ ```python
933
+ # Load old FlowSystem file
934
+ fs = FlowSystem.from_old_dataset('results/my_run--flow_system.nc4')
935
+
936
+ # Modify and optimize with current API
937
+ fs.optimize(solver)
938
+
939
+ # Save in new single-file format
940
+ fs.to_netcdf('my_run.nc')
941
+ ```
942
+
943
+ Deprecated:
944
+ This method will be removed in v6.
945
+ """
946
+ warnings.warn(
947
+ f'from_old_dataset() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
948
+ 'This utility is only for migrating FlowSystems from flixopt versions before v5.',
949
+ DeprecationWarning,
950
+ stacklevel=2,
951
+ )
952
+ from flixopt.io import convert_old_dataset, load_dataset_from_netcdf
953
+
954
+ path = pathlib.Path(path)
955
+
956
+ # Load dataset
957
+ flow_system_data = load_dataset_from_netcdf(path)
958
+
959
+ # Convert to new parameter names and reduce constant dimensions
960
+ flow_system_data = convert_old_dataset(flow_system_data)
961
+
962
+ # Reconstruct FlowSystem
963
+ flow_system = cls.from_dataset(flow_system_data)
964
+ flow_system.name = path.stem.replace('--flow_system', '')
965
+
966
+ # Set previous_flow_rate=0 for flows of components with status_parameters
967
+ # In v4 API, previous_flow_rate=None defaulted to previous_status=0 (off)
968
+ # Now previous_flow_rate=None means relaxed (no constraint at t=0)
969
+ for comp in flow_system.components.values():
970
+ if getattr(comp, 'status_parameters', None) is not None:
971
+ for flow in comp.inputs + comp.outputs:
972
+ if flow.previous_flow_rate is None:
973
+ flow.previous_flow_rate = 0
974
+
975
+ return flow_system
976
+
977
+ def copy(self) -> FlowSystem:
978
+ """Create a copy of the FlowSystem without optimization state.
979
+
980
+ Creates a new FlowSystem with copies of all elements, but without:
981
+ - The solution dataset
982
+ - The optimization model
983
+ - Element submodels and variable/constraint names
984
+
985
+ This is useful for creating variations of a FlowSystem for different
986
+ optimization scenarios without affecting the original.
987
+
988
+ Returns:
989
+ A new FlowSystem instance that can be modified and optimized independently.
990
+
991
+ Examples:
992
+ >>> original = FlowSystem(timesteps)
993
+ >>> original.add_elements(boiler, bus)
994
+ >>> original.optimize(solver) # Original now has solution
995
+ >>>
996
+ >>> # Create a copy to try different parameters
997
+ >>> variant = original.copy() # No solution, can be modified
998
+ >>> variant.add_elements(new_component)
999
+ >>> variant.optimize(solver)
1000
+ """
1001
+ ds = self.to_dataset(include_solution=False)
1002
+ return FlowSystem.from_dataset(ds.copy(deep=True))
1003
+
1004
+ def __copy__(self):
1005
+ """Support for copy.copy()."""
1006
+ return self.copy()
1007
+
1008
+ def __deepcopy__(self, memo):
1009
+ """Support for copy.deepcopy()."""
1010
+ return self.copy()
330
1011
 
331
1012
  def get_structure(self, clean: bool = False, stats: bool = False) -> dict:
332
1013
  """
@@ -362,15 +1043,15 @@ class FlowSystem(Interface):
362
1043
  def fit_to_model_coords(
363
1044
  self,
364
1045
  name: str,
365
- data: TemporalDataUser | PeriodicDataUser | None,
1046
+ data: NumericOrBool | None,
366
1047
  dims: Collection[FlowSystemDimensions] | None = None,
367
- ) -> TemporalData | PeriodicData | None:
1048
+ ) -> xr.DataArray | None:
368
1049
  """
369
1050
  Fit data to model coordinate system (currently time, but extensible).
370
1051
 
371
1052
  Args:
372
1053
  name: Name of the data
373
- data: Data to fit to model coordinates
1054
+ data: Data to fit to model coordinates (accepts any dimensionality including scalars)
374
1055
  dims: Collection of dimension names to use for fitting. If None, all dimensions are used.
375
1056
 
376
1057
  Returns:
@@ -379,7 +1060,7 @@ class FlowSystem(Interface):
379
1060
  if data is None:
380
1061
  return None
381
1062
 
382
- coords = self.coords
1063
+ coords = self.indexes
383
1064
 
384
1065
  if dims is not None:
385
1066
  coords = {k: coords[k] for k in dims if k in coords}
@@ -402,11 +1083,11 @@ class FlowSystem(Interface):
402
1083
  def fit_effects_to_model_coords(
403
1084
  self,
404
1085
  label_prefix: str | None,
405
- effect_values: TemporalEffectsUser | PeriodicEffectsUser | None,
1086
+ effect_values: Effect_TPS | Numeric_TPS | None,
406
1087
  label_suffix: str | None = None,
407
1088
  dims: Collection[FlowSystemDimensions] | None = None,
408
1089
  delimiter: str = '|',
409
- ) -> TemporalEffects | PeriodicEffects | None:
1090
+ ) -> Effect_TPS | None:
410
1091
  """
411
1092
  Transform EffectValues from the user to Internal Datatypes aligned with model coordinates.
412
1093
  """
@@ -425,18 +1106,87 @@ class FlowSystem(Interface):
425
1106
  }
426
1107
 
427
1108
  def connect_and_transform(self):
428
- """Transform data for all elements using the new simplified approach."""
1109
+ """Connect the network and transform all element data to model coordinates.
1110
+
1111
+ This method performs the following steps:
1112
+
1113
+ 1. Connects flows to buses (establishing the network topology)
1114
+ 2. Registers any missing carriers from CONFIG defaults
1115
+ 3. Assigns colors to elements without explicit colors
1116
+ 4. Transforms all element data to xarray DataArrays aligned with
1117
+ FlowSystem coordinates (time, period, scenario)
1118
+ 5. Validates system integrity
1119
+
1120
+ This is called automatically by :meth:`build_model` and :meth:`optimize`.
1121
+
1122
+ Warning:
1123
+ After this method runs, element attributes (e.g., ``flow.size``,
1124
+ ``flow.relative_minimum``) contain transformed xarray DataArrays,
1125
+ not the original input values. If you modify element attributes after
1126
+ transformation, call :meth:`invalidate` to ensure the changes take
1127
+ effect on the next optimization.
1128
+
1129
+ Note:
1130
+ This method is idempotent within a single model lifecycle - calling
1131
+ it multiple times has no effect once ``connected_and_transformed``
1132
+ is True. Use :meth:`invalidate` to reset this flag.
1133
+ """
429
1134
  if self.connected_and_transformed:
430
1135
  logger.debug('FlowSystem already connected and transformed')
431
1136
  return
432
1137
 
433
- self.weights = self.fit_to_model_coords('weights', self.weights, dims=['period', 'scenario'])
434
-
435
1138
  self._connect_network()
436
- for element in list(self.components.values()) + list(self.effects.effects.values()) + list(self.buses.values()):
437
- element.transform_data(self)
1139
+ self._register_missing_carriers()
1140
+ self._assign_element_colors()
1141
+
1142
+ for element in chain(self.components.values(), self.effects.values(), self.buses.values()):
1143
+ element.transform_data()
1144
+
1145
+ # Validate cross-element references immediately after transformation
1146
+ self._validate_system_integrity()
1147
+
438
1148
  self._connected_and_transformed = True
439
1149
 
1150
+ def _register_missing_carriers(self) -> None:
1151
+ """Auto-register carriers from CONFIG for buses that reference unregistered carriers."""
1152
+ for bus in self.buses.values():
1153
+ if not bus.carrier:
1154
+ continue
1155
+ carrier_key = bus.carrier.lower()
1156
+ if carrier_key not in self._carriers:
1157
+ # Try to get from CONFIG defaults (try original case first, then lowercase)
1158
+ default_carrier = getattr(CONFIG.Carriers, bus.carrier, None) or getattr(
1159
+ CONFIG.Carriers, carrier_key, None
1160
+ )
1161
+ if default_carrier is not None:
1162
+ self._carriers[carrier_key] = default_carrier
1163
+ logger.debug(f"Auto-registered carrier '{carrier_key}' from CONFIG")
1164
+
1165
+ def _assign_element_colors(self) -> None:
1166
+ """Auto-assign colors to elements that don't have explicit colors set.
1167
+
1168
+ Components and buses without explicit colors are assigned colors from the
1169
+ default qualitative colorscale. This ensures zero-config color support
1170
+ while still allowing users to override with explicit colors.
1171
+ """
1172
+ from .color_processing import process_colors
1173
+
1174
+ # Collect elements without colors (components only - buses use carrier colors)
1175
+ # Use label_full for consistent keying with ElementContainer
1176
+ elements_without_colors = [comp.label_full for comp in self.components.values() if comp.color is None]
1177
+
1178
+ if not elements_without_colors:
1179
+ return
1180
+
1181
+ # Generate colors from the default colorscale
1182
+ colorscale = CONFIG.Plotting.default_qualitative_colorscale
1183
+ color_mapping = process_colors(colorscale, elements_without_colors)
1184
+
1185
+ # Assign colors to elements
1186
+ for label_full, color in color_mapping.items():
1187
+ self.components[label_full].color = color
1188
+ logger.debug(f"Auto-assigned color '{color}' to component '{label_full}'")
1189
+
440
1190
  def add_elements(self, *elements: Element) -> None:
441
1191
  """
442
1192
  Add Components(Storages, Boilers, Heatpumps, ...), Buses or Effects to the FlowSystem
@@ -444,155 +1194,645 @@ class FlowSystem(Interface):
444
1194
  Args:
445
1195
  *elements: childs of Element like Boiler, HeatPump, Bus,...
446
1196
  modeling Elements
1197
+
1198
+ Raises:
1199
+ RuntimeError: If the FlowSystem is locked (has a solution).
1200
+ Call `reset()` to unlock it first.
447
1201
  """
448
- if self.connected_and_transformed:
1202
+ if self.is_locked:
1203
+ raise RuntimeError(
1204
+ 'Cannot add elements to a FlowSystem that has a solution. '
1205
+ 'Call `reset()` first to clear the solution and allow modifications.'
1206
+ )
1207
+
1208
+ if self.model is not None:
449
1209
  warnings.warn(
450
- 'You are adding elements to an already connected FlowSystem. This is not recommended (But it works).',
1210
+ 'Adding elements to a FlowSystem with an existing model. The model will be invalidated.',
451
1211
  stacklevel=2,
452
1212
  )
453
- self._connected_and_transformed = False
1213
+ # Always invalidate when adding elements to ensure new elements get transformed
1214
+ if self.model is not None or self._connected_and_transformed:
1215
+ self._invalidate_model()
1216
+
454
1217
  for new_element in list(elements):
1218
+ # Validate element type first
1219
+ if not isinstance(new_element, (Component, Effect, Bus)):
1220
+ raise TypeError(
1221
+ f'Tried to add incompatible object to FlowSystem: {type(new_element)=}: {new_element=} '
1222
+ )
1223
+
1224
+ # Common validations for all element types (before any state changes)
1225
+ self._check_if_element_already_assigned(new_element)
1226
+ self._check_if_element_is_unique(new_element)
1227
+
1228
+ # Dispatch to type-specific handlers
455
1229
  if isinstance(new_element, Component):
456
1230
  self._add_components(new_element)
457
1231
  elif isinstance(new_element, Effect):
458
1232
  self._add_effects(new_element)
459
1233
  elif isinstance(new_element, Bus):
460
1234
  self._add_buses(new_element)
461
- else:
462
- raise TypeError(
463
- f'Tried to add incompatible object to FlowSystem: {type(new_element)=}: {new_element=} '
464
- )
465
1235
 
466
- def create_model(self, normalize_weights: bool = True) -> FlowSystemModel:
467
- """
468
- Create a linopy model from the FlowSystem.
1236
+ # Log registration
1237
+ element_type = type(new_element).__name__
1238
+ logger.info(f'Registered new {element_type}: {new_element.label_full}')
469
1239
 
470
- Args:
471
- normalize_weights: Whether to automatically normalize the weights (periods and scenarios) to sum up to 1 when solving.
1240
+ def add_carriers(self, *carriers: Carrier) -> None:
1241
+ """Register a custom carrier for this FlowSystem.
1242
+
1243
+ Custom carriers registered on the FlowSystem take precedence over
1244
+ CONFIG.Carriers defaults when resolving colors and units for buses.
1245
+
1246
+ Args:
1247
+ carriers: Carrier objects defining the carrier properties.
1248
+
1249
+ Raises:
1250
+ RuntimeError: If the FlowSystem is locked (has a solution).
1251
+ Call `reset()` to unlock it first.
1252
+
1253
+ Examples:
1254
+ ```python
1255
+ import flixopt as fx
1256
+
1257
+ fs = fx.FlowSystem(timesteps)
1258
+
1259
+ # Define and register custom carriers
1260
+ biogas = fx.Carrier('biogas', '#228B22', 'kW', 'Biogas fuel')
1261
+ fs.add_carriers(biogas)
1262
+
1263
+ # Now buses can reference this carrier by name
1264
+ bus = fx.Bus('BioGasNetwork', carrier='biogas')
1265
+ fs.add_elements(bus)
1266
+
1267
+ # The carrier color will be used in plots automatically
1268
+ ```
1269
+ """
1270
+ if self.is_locked:
1271
+ raise RuntimeError(
1272
+ 'Cannot add carriers to a FlowSystem that has a solution. '
1273
+ 'Call `reset()` first to clear the solution and allow modifications.'
1274
+ )
1275
+
1276
+ if self.model is not None:
1277
+ warnings.warn(
1278
+ 'Adding carriers to a FlowSystem with an existing model. The model will be invalidated.',
1279
+ stacklevel=2,
1280
+ )
1281
+ # Always invalidate when adding carriers to ensure proper re-transformation
1282
+ if self.model is not None or self._connected_and_transformed:
1283
+ self._invalidate_model()
1284
+
1285
+ for carrier in list(carriers):
1286
+ if not isinstance(carrier, Carrier):
1287
+ raise TypeError(f'Expected Carrier object, got {type(carrier)}')
1288
+ self._carriers.add(carrier)
1289
+ logger.debug(f'Adding carrier {carrier} to FlowSystem')
1290
+
1291
+ def get_carrier(self, label: str) -> Carrier | None:
1292
+ """Get the carrier for a bus or flow.
1293
+
1294
+ Args:
1295
+ label: Bus label (e.g., 'Fernwärme') or flow label (e.g., 'Boiler(Q_th)').
1296
+
1297
+ Returns:
1298
+ Carrier or None if not found.
1299
+
1300
+ Note:
1301
+ To access a carrier directly by name, use ``flow_system.carriers['electricity']``.
1302
+
1303
+ Raises:
1304
+ RuntimeError: If FlowSystem is not connected_and_transformed.
1305
+ """
1306
+ if not self.connected_and_transformed:
1307
+ raise RuntimeError(
1308
+ 'FlowSystem is not connected_and_transformed. Call FlowSystem.connect_and_transform() first.'
1309
+ )
1310
+
1311
+ # Try as bus label
1312
+ bus = self.buses.get(label)
1313
+ if bus and bus.carrier:
1314
+ return self._carriers.get(bus.carrier.lower())
1315
+
1316
+ # Try as flow label
1317
+ flow = self.flows.get(label)
1318
+ if flow and flow.bus:
1319
+ bus = self.buses.get(flow.bus)
1320
+ if bus and bus.carrier:
1321
+ return self._carriers.get(bus.carrier.lower())
1322
+
1323
+ return None
1324
+
1325
+ @property
1326
+ def carriers(self) -> CarrierContainer:
1327
+ """Carriers registered on this FlowSystem."""
1328
+ return self._carriers
1329
+
1330
+ @property
1331
+ def flow_carriers(self) -> dict[str, str]:
1332
+ """Cached mapping of flow labels to carrier names.
1333
+
1334
+ Returns:
1335
+ Dict mapping flow label to carrier name (lowercase).
1336
+ Flows without a carrier are not included.
1337
+
1338
+ Raises:
1339
+ RuntimeError: If FlowSystem is not connected_and_transformed.
472
1340
  """
473
1341
  if not self.connected_and_transformed:
474
1342
  raise RuntimeError(
475
1343
  'FlowSystem is not connected_and_transformed. Call FlowSystem.connect_and_transform() first.'
476
1344
  )
477
- self.model = FlowSystemModel(self, normalize_weights)
1345
+
1346
+ if self._flow_carriers is None:
1347
+ self._flow_carriers = {}
1348
+ for flow_label, flow in self.flows.items():
1349
+ bus = self.buses.get(flow.bus)
1350
+ if bus and bus.carrier:
1351
+ self._flow_carriers[flow_label] = bus.carrier.lower()
1352
+
1353
+ return self._flow_carriers
1354
+
1355
+ def create_model(self, normalize_weights: bool | None = None) -> FlowSystemModel:
1356
+ """
1357
+ Create a linopy model from the FlowSystem.
1358
+
1359
+ Args:
1360
+ normalize_weights: Deprecated. Scenario weights are now always normalized in FlowSystem.
1361
+ """
1362
+ if normalize_weights is not None:
1363
+ warnings.warn(
1364
+ f'\n\nnormalize_weights parameter is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
1365
+ 'Scenario weights are now always normalized when set on FlowSystem.\n',
1366
+ DeprecationWarning,
1367
+ stacklevel=2,
1368
+ )
1369
+ if not self.connected_and_transformed:
1370
+ raise RuntimeError(
1371
+ 'FlowSystem is not connected_and_transformed. Call FlowSystem.connect_and_transform() first.'
1372
+ )
1373
+ # System integrity was already validated in connect_and_transform()
1374
+ self.model = FlowSystemModel(self)
478
1375
  return self.model
479
1376
 
480
- def plot_network(
481
- self,
482
- path: bool | str | pathlib.Path = 'flow_system.html',
483
- controls: bool
484
- | list[
485
- Literal['nodes', 'edges', 'layout', 'interaction', 'manipulation', 'physics', 'selection', 'renderer']
486
- ] = True,
487
- show: bool = False,
488
- ) -> pyvis.network.Network | None:
1377
+ def build_model(self, normalize_weights: bool | None = None) -> FlowSystem:
489
1378
  """
490
- Visualizes the network structure of a FlowSystem using PyVis, saving it as an interactive HTML file.
1379
+ Build the optimization model for this FlowSystem.
1380
+
1381
+ This method prepares the FlowSystem for optimization by:
1382
+ 1. Connecting and transforming all elements (if not already done)
1383
+ 2. Creating the FlowSystemModel with all variables and constraints
1384
+ 3. Adding clustering constraints (if this is a clustered FlowSystem)
1385
+ 4. Adding typical periods modeling (if this is a reduced FlowSystem)
1386
+
1387
+ After calling this method, `self.model` will be available for inspection
1388
+ before solving.
491
1389
 
492
1390
  Args:
493
- path: Path to save the HTML visualization.
494
- - `False`: Visualization is created but not saved.
495
- - `str` or `Path`: Specifies file path (default: 'flow_system.html').
496
- controls: UI controls to add to the visualization.
497
- - `True`: Enables all available controls.
498
- - `List`: Specify controls, e.g., ['nodes', 'layout'].
499
- - Options: 'nodes', 'edges', 'layout', 'interaction', 'manipulation', 'physics', 'selection', 'renderer'.
500
- show: Whether to open the visualization in the web browser.
1391
+ normalize_weights: Deprecated. Scenario weights are now always normalized in FlowSystem.
501
1392
 
502
1393
  Returns:
503
- - 'pyvis.network.Network' | None: The `Network` instance representing the visualization, or `None` if `pyvis` is not installed.
1394
+ Self, for method chaining.
504
1395
 
505
1396
  Examples:
506
- >>> flow_system.plot_network()
507
- >>> flow_system.plot_network(show=False)
508
- >>> flow_system.plot_network(path='output/custom_network.html', controls=['nodes', 'layout'])
1397
+ >>> flow_system.build_model()
1398
+ >>> print(flow_system.model.variables) # Inspect variables before solving
1399
+ >>> flow_system.solve(solver)
1400
+ """
1401
+ if normalize_weights is not None:
1402
+ warnings.warn(
1403
+ f'\n\nnormalize_weights parameter is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
1404
+ 'Scenario weights are now always normalized when set on FlowSystem.\n',
1405
+ DeprecationWarning,
1406
+ stacklevel=2,
1407
+ )
1408
+ self.connect_and_transform()
1409
+ self.create_model()
1410
+
1411
+ self.model.do_modeling()
509
1412
 
510
- Notes:
511
- - This function requires `pyvis`. If not installed, the function prints a warning and returns `None`.
512
- - Nodes are styled based on type (e.g., circles for buses, boxes for components) and annotated with node information.
1413
+ return self
1414
+
1415
+ def solve(self, solver: _Solver) -> FlowSystem:
513
1416
  """
514
- from . import plotting
1417
+ Solve the optimization model and populate the solution.
1418
+
1419
+ This method solves the previously built model using the specified solver.
1420
+ After solving, `self.solution` will contain the optimization results,
1421
+ and each element's `.solution` property will provide access to its
1422
+ specific variables.
1423
+
1424
+ Args:
1425
+ solver: The solver to use (e.g., HighsSolver, GurobiSolver).
1426
+
1427
+ Returns:
1428
+ Self, for method chaining.
515
1429
 
516
- node_infos, edge_infos = self.network_infos()
517
- return plotting.plot_network(node_infos, edge_infos, path, controls, show)
1430
+ Raises:
1431
+ RuntimeError: If the model has not been built yet (call build_model first).
1432
+ RuntimeError: If the model is infeasible.
518
1433
 
519
- def start_network_app(self):
520
- """Visualizes the network structure of a FlowSystem using Dash, Cytoscape, and networkx.
521
- Requires optional dependencies: dash, dash-cytoscape, dash-daq, networkx, flask, werkzeug.
1434
+ Examples:
1435
+ >>> flow_system.build_model()
1436
+ >>> flow_system.solve(HighsSolver())
1437
+ >>> print(flow_system.solution)
522
1438
  """
523
- from .network_app import DASH_CYTOSCAPE_AVAILABLE, VISUALIZATION_ERROR, flow_graph, shownetwork
1439
+ if self.model is None:
1440
+ raise RuntimeError('Model has not been built. Call build_model() first.')
524
1441
 
525
- warnings.warn(
526
- 'The network visualization is still experimental and might change in the future.',
527
- stacklevel=2,
528
- category=UserWarning,
1442
+ self.model.solve(
1443
+ solver_name=solver.name,
1444
+ progress=CONFIG.Solving.log_to_console,
1445
+ **solver.options,
529
1446
  )
530
1447
 
531
- if not DASH_CYTOSCAPE_AVAILABLE:
532
- raise ImportError(
533
- f'Network visualization requires optional dependencies. '
534
- f'Install with: `pip install flixopt[network_viz]`, `pip install flixopt[full]` '
535
- f'or: `pip install dash dash-cytoscape dash-daq networkx werkzeug`. '
536
- f'Original error: {VISUALIZATION_ERROR}'
537
- )
1448
+ if self.model.termination_condition in ('infeasible', 'infeasible_or_unbounded'):
1449
+ if CONFIG.Solving.compute_infeasibilities:
1450
+ import io
1451
+ from contextlib import redirect_stdout
1452
+
1453
+ f = io.StringIO()
1454
+
1455
+ # Redirect stdout to our buffer
1456
+ with redirect_stdout(f):
1457
+ self.model.print_infeasibilities()
1458
+
1459
+ infeasibilities = f.getvalue()
1460
+ logger.error('Successfully extracted infeasibilities: \n%s', infeasibilities)
1461
+ raise RuntimeError(f'Model was infeasible. Status: {self.model.status}. Check your constraints and bounds.')
1462
+
1463
+ # Store solution on FlowSystem for direct Element access
1464
+ self.solution = self.model.solution
1465
+
1466
+ # Copy variable categories for segment expansion handling
1467
+ self._variable_categories = self.model.variable_categories.copy()
1468
+
1469
+ logger.info(f'Optimization solved successfully. Objective: {self.model.objective.value:.4f}')
538
1470
 
539
- if not self._connected_and_transformed:
540
- self._connect_network()
1471
+ return self
541
1472
 
542
- if self._network_app is not None:
543
- logger.warning('The network app is already running. Restarting it.')
544
- self.stop_network_app()
1473
+ @property
1474
+ def solution(self) -> xr.Dataset | None:
1475
+ """
1476
+ Access the optimization solution as an xarray Dataset.
545
1477
 
546
- self._network_app = shownetwork(flow_graph(self))
1478
+ The solution is indexed by ``timesteps_extra`` (the original timesteps plus
1479
+ one additional timestep at the end). Variables that do not have data for the
1480
+ extra timestep (most variables except storage charge states) will contain
1481
+ NaN values at the final timestep.
547
1482
 
548
- def stop_network_app(self):
549
- """Stop the network visualization server."""
550
- from .network_app import DASH_CYTOSCAPE_AVAILABLE, VISUALIZATION_ERROR
1483
+ Returns:
1484
+ xr.Dataset: The solution dataset with all optimization variable results,
1485
+ or None if the model hasn't been solved yet.
551
1486
 
552
- if not DASH_CYTOSCAPE_AVAILABLE:
553
- raise ImportError(
554
- f'Network visualization requires optional dependencies. '
555
- f'Install with: `pip install flixopt[network_viz]`, `pip install flixopt[full]` '
556
- f'or: `pip install dash dash-cytoscape dash-daq networkx werkzeug`. '
557
- f'Original error: {VISUALIZATION_ERROR}'
1487
+ Example:
1488
+ >>> flow_system.optimize(solver)
1489
+ >>> flow_system.solution.isel(time=slice(None, -1)) # Exclude trailing NaN (and final charge states)
1490
+ """
1491
+ return self._solution
1492
+
1493
+ @solution.setter
1494
+ def solution(self, value: xr.Dataset | None) -> None:
1495
+ """Set the solution dataset and invalidate statistics cache."""
1496
+ self._solution = value
1497
+ self._statistics = None # Invalidate cached statistics
1498
+
1499
+ @property
1500
+ def variable_categories(self) -> dict[str, VariableCategory]:
1501
+ """Variable categories for filtering and segment expansion.
1502
+
1503
+ Returns:
1504
+ Dict mapping variable names to their VariableCategory.
1505
+ """
1506
+ return self._variable_categories
1507
+
1508
+ def get_variables_by_category(self, *categories: VariableCategory, from_solution: bool = True) -> list[str]:
1509
+ """Get variable names matching any of the specified categories.
1510
+
1511
+ Args:
1512
+ *categories: One or more VariableCategory values to filter by.
1513
+ from_solution: If True, only return variables present in solution.
1514
+ If False, return all registered variables matching categories.
1515
+
1516
+ Returns:
1517
+ List of variable names matching any of the specified categories.
1518
+
1519
+ Example:
1520
+ >>> fs.get_variables_by_category(VariableCategory.FLOW_RATE)
1521
+ ['Boiler(Q_th)|flow_rate', 'CHP(Q_th)|flow_rate', ...]
1522
+ >>> fs.get_variables_by_category(VariableCategory.SIZE, VariableCategory.INVESTED)
1523
+ ['Boiler(Q_th)|size', 'Boiler(Q_th)|invested', ...]
1524
+ """
1525
+ category_set = set(categories)
1526
+
1527
+ if self._variable_categories:
1528
+ # Use registered categories
1529
+ matching = [name for name, cat in self._variable_categories.items() if cat in category_set]
1530
+ elif self._solution is not None:
1531
+ # Fallback for old files without categories: match by suffix pattern
1532
+ # Category values match the variable suffix (e.g., FLOW_RATE.value = 'flow_rate')
1533
+ matching = []
1534
+ for cat in category_set:
1535
+ # Handle new sub-categories that map to old |size suffix
1536
+ if cat == VariableCategory.FLOW_SIZE:
1537
+ flow_labels = set(self.flows.keys())
1538
+ matching.extend(
1539
+ v
1540
+ for v in self._solution.data_vars
1541
+ if v.endswith('|size') and v.rsplit('|', 1)[0] in flow_labels
1542
+ )
1543
+ elif cat == VariableCategory.STORAGE_SIZE:
1544
+ storage_labels = set(self.storages.keys())
1545
+ matching.extend(
1546
+ v
1547
+ for v in self._solution.data_vars
1548
+ if v.endswith('|size') and v.rsplit('|', 1)[0] in storage_labels
1549
+ )
1550
+ else:
1551
+ # Standard suffix matching
1552
+ suffix = f'|{cat.value}'
1553
+ matching.extend(v for v in self._solution.data_vars if v.endswith(suffix))
1554
+ else:
1555
+ matching = []
1556
+
1557
+ if from_solution and self._solution is not None:
1558
+ solution_vars = set(self._solution.data_vars)
1559
+ matching = [v for v in matching if v in solution_vars]
1560
+ return matching
1561
+
1562
+ @property
1563
+ def is_locked(self) -> bool:
1564
+ """Check if the FlowSystem is locked (has a solution).
1565
+
1566
+ A locked FlowSystem cannot be modified. Use `reset()` to unlock it.
1567
+ """
1568
+ return self._solution is not None
1569
+
1570
+ def _invalidate_model(self) -> None:
1571
+ """Invalidate the model and element submodels when structure changes.
1572
+
1573
+ This clears the model, resets the ``connected_and_transformed`` flag,
1574
+ clears all element submodels and variable/constraint names, and invalidates
1575
+ the topology accessor cache.
1576
+
1577
+ Called internally by :meth:`add_elements`, :meth:`add_carriers`,
1578
+ :meth:`reset`, and :meth:`invalidate`.
1579
+
1580
+ See Also:
1581
+ :meth:`invalidate`: Public method for manual invalidation.
1582
+ :meth:`reset`: Clears solution and invalidates (for locked FlowSystems).
1583
+ """
1584
+ self.model = None
1585
+ self._connected_and_transformed = False
1586
+ self._topology = None # Invalidate topology accessor (and its cached colors)
1587
+ self._flow_carriers = None # Invalidate flow-to-carrier mapping
1588
+ self._variable_categories.clear() # Clear stale categories for segment expansion
1589
+ for element in self.values():
1590
+ element.submodel = None
1591
+ element._variable_names = []
1592
+ element._constraint_names = []
1593
+
1594
+ def reset(self) -> FlowSystem:
1595
+ """Clear optimization state to allow modifications.
1596
+
1597
+ This method unlocks the FlowSystem by clearing:
1598
+ - The solution dataset
1599
+ - The optimization model
1600
+ - All element submodels and variable/constraint names
1601
+ - The connected_and_transformed flag
1602
+
1603
+ After calling reset(), the FlowSystem can be modified again
1604
+ (e.g., adding elements or carriers).
1605
+
1606
+ Returns:
1607
+ Self, for method chaining.
1608
+
1609
+ Examples:
1610
+ >>> flow_system.optimize(solver) # FlowSystem is now locked
1611
+ >>> flow_system.add_elements(new_bus) # Raises RuntimeError
1612
+ >>> flow_system.reset() # Unlock the FlowSystem
1613
+ >>> flow_system.add_elements(new_bus) # Now works
1614
+ """
1615
+ self.solution = None # Also clears _statistics via setter
1616
+ self._invalidate_model()
1617
+ return self
1618
+
1619
+ def invalidate(self) -> FlowSystem:
1620
+ """Invalidate the model to allow re-transformation after modifying elements.
1621
+
1622
+ Call this after modifying existing element attributes (e.g., ``flow.size``,
1623
+ ``flow.relative_minimum``) to ensure changes take effect on the next
1624
+ optimization. The next call to :meth:`optimize` or :meth:`build_model`
1625
+ will re-run :meth:`connect_and_transform`.
1626
+
1627
+ Note:
1628
+ Adding new elements via :meth:`add_elements` automatically invalidates
1629
+ the model. This method is only needed when modifying attributes of
1630
+ elements that are already part of the FlowSystem.
1631
+
1632
+ Returns:
1633
+ Self, for method chaining.
1634
+
1635
+ Raises:
1636
+ RuntimeError: If the FlowSystem has a solution. Call :meth:`reset`
1637
+ first to clear the solution.
1638
+
1639
+ Examples:
1640
+ Modify a flow's size and re-optimize:
1641
+
1642
+ >>> flow_system.optimize(solver)
1643
+ >>> flow_system.reset() # Clear solution first
1644
+ >>> flow_system.components['Boiler'].inputs[0].size = 200
1645
+ >>> flow_system.invalidate()
1646
+ >>> flow_system.optimize(solver) # Re-runs connect_and_transform
1647
+
1648
+ Modify before first optimization:
1649
+
1650
+ >>> flow_system.connect_and_transform()
1651
+ >>> # Oops, need to change something
1652
+ >>> flow_system.components['Boiler'].inputs[0].size = 200
1653
+ >>> flow_system.invalidate()
1654
+ >>> flow_system.optimize(solver) # Changes take effect
1655
+ """
1656
+ if self.is_locked:
1657
+ raise RuntimeError(
1658
+ 'Cannot invalidate a FlowSystem with a solution. Call `reset()` first to clear the solution.'
558
1659
  )
1660
+ self._invalidate_model()
1661
+ return self
559
1662
 
560
- if self._network_app is None:
561
- logger.warning("No network app is currently running. Can't stop it")
562
- return
1663
+ @property
1664
+ def optimize(self) -> OptimizeAccessor:
1665
+ """
1666
+ Access optimization methods for this FlowSystem.
563
1667
 
564
- try:
565
- logger.info('Stopping network visualization server...')
566
- self._network_app.server_instance.shutdown()
567
- logger.info('Network visualization stopped.')
568
- except Exception as e:
569
- logger.error(f'Failed to stop the network visualization app: {e}')
570
- finally:
571
- self._network_app = None
1668
+ This property returns an OptimizeAccessor that can be called directly
1669
+ for standard optimization, or used to access specialized optimization modes.
572
1670
 
573
- def network_infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, str]]]:
574
- if not self.connected_and_transformed:
575
- self.connect_and_transform()
576
- nodes = {
577
- node.label_full: {
578
- 'label': node.label,
579
- 'class': 'Bus' if isinstance(node, Bus) else 'Component',
580
- 'infos': node.__str__(),
581
- }
582
- for node in list(self.components.values()) + list(self.buses.values())
583
- }
1671
+ Returns:
1672
+ An OptimizeAccessor instance.
584
1673
 
585
- edges = {
586
- flow.label_full: {
587
- 'label': flow.label,
588
- 'start': flow.bus if flow.is_input_in_component else flow.component,
589
- 'end': flow.component if flow.is_input_in_component else flow.bus,
590
- 'infos': flow.__str__(),
591
- }
592
- for flow in self.flows.values()
593
- }
1674
+ Examples:
1675
+ Standard optimization (call directly):
1676
+
1677
+ >>> flow_system.optimize(HighsSolver())
1678
+ >>> print(flow_system.solution['Boiler(Q_th)|flow_rate'])
1679
+
1680
+ Access element solutions directly:
1681
+
1682
+ >>> flow_system.optimize(solver)
1683
+ >>> boiler = flow_system.components['Boiler']
1684
+ >>> print(boiler.solution)
1685
+
1686
+ Future specialized modes:
1687
+
1688
+ >>> flow_system.optimize.clustered(solver, aggregation=params)
1689
+ >>> flow_system.optimize.mga(solver, alternatives=5)
1690
+ """
1691
+ return OptimizeAccessor(self)
1692
+
1693
+ @property
1694
+ def transform(self) -> TransformAccessor:
1695
+ """
1696
+ Access transformation methods for this FlowSystem.
1697
+
1698
+ This property returns a TransformAccessor that provides methods to create
1699
+ transformed versions of this FlowSystem (e.g., clustered for time aggregation).
1700
+
1701
+ Returns:
1702
+ A TransformAccessor instance.
1703
+
1704
+ Examples:
1705
+ Clustered optimization:
1706
+
1707
+ >>> params = ClusteringParameters(hours_per_period=24, nr_of_periods=8)
1708
+ >>> clustered_fs = flow_system.transform.cluster(params)
1709
+ >>> clustered_fs.optimize(solver)
1710
+ >>> print(clustered_fs.solution)
1711
+ """
1712
+ return TransformAccessor(self)
1713
+
1714
+ @property
1715
+ def statistics(self) -> StatisticsAccessor:
1716
+ """
1717
+ Access statistics and plotting methods for optimization results.
1718
+
1719
+ This property returns a StatisticsAccessor that provides methods to analyze
1720
+ and visualize optimization results stored in this FlowSystem's solution.
1721
+
1722
+ Note:
1723
+ The FlowSystem must have a solution (from optimize() or solve()) before
1724
+ most statistics methods can be used.
1725
+
1726
+ Returns:
1727
+ A cached StatisticsAccessor instance.
1728
+
1729
+ Examples:
1730
+ After optimization:
1731
+
1732
+ >>> flow_system.optimize(solver)
1733
+ >>> flow_system.statistics.plot.balance('ElectricityBus')
1734
+ >>> flow_system.statistics.plot.heatmap('Boiler|on')
1735
+ >>> ds = flow_system.statistics.flow_rates # Get data for analysis
1736
+ """
1737
+ if self._statistics is None:
1738
+ self._statistics = StatisticsAccessor(self)
1739
+ return self._statistics
594
1740
 
595
- return nodes, edges
1741
+ @property
1742
+ def topology(self) -> TopologyAccessor:
1743
+ """
1744
+ Access network topology inspection and visualization methods.
1745
+
1746
+ This property returns a cached TopologyAccessor that provides methods to inspect
1747
+ the network structure and visualize it. The accessor is invalidated when the
1748
+ FlowSystem structure changes (via reset() or invalidate()).
1749
+
1750
+ Returns:
1751
+ A cached TopologyAccessor instance.
1752
+
1753
+ Examples:
1754
+ Visualize the network:
1755
+
1756
+ >>> flow_system.topology.plot()
1757
+ >>> flow_system.topology.plot(path='my_network.html', show=True)
1758
+
1759
+ Interactive visualization:
1760
+
1761
+ >>> flow_system.topology.start_app()
1762
+ >>> # ... interact with the visualization ...
1763
+ >>> flow_system.topology.stop_app()
1764
+
1765
+ Get network structure info:
1766
+
1767
+ >>> nodes, edges = flow_system.topology.infos()
1768
+ """
1769
+ if self._topology is None:
1770
+ self._topology = TopologyAccessor(self)
1771
+ return self._topology
1772
+
1773
+ def plot_network(
1774
+ self,
1775
+ path: bool | str | pathlib.Path = 'flow_system.html',
1776
+ controls: bool
1777
+ | list[
1778
+ Literal['nodes', 'edges', 'layout', 'interaction', 'manipulation', 'physics', 'selection', 'renderer']
1779
+ ] = True,
1780
+ show: bool | None = None,
1781
+ ) -> pyvis.network.Network | None:
1782
+ """
1783
+ Deprecated: Use `flow_system.topology.plot()` instead.
1784
+
1785
+ Visualizes the network structure of a FlowSystem using PyVis.
1786
+ """
1787
+ warnings.warn(
1788
+ f'plot_network() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
1789
+ 'Use flow_system.topology.plot() instead.',
1790
+ DeprecationWarning,
1791
+ stacklevel=2,
1792
+ )
1793
+ return self.topology.plot_legacy(path=path, controls=controls, show=show)
1794
+
1795
+ def start_network_app(self) -> None:
1796
+ """
1797
+ Deprecated: Use `flow_system.topology.start_app()` instead.
1798
+
1799
+ Visualizes the network structure using Dash and Cytoscape.
1800
+ """
1801
+ warnings.warn(
1802
+ f'start_network_app() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
1803
+ 'Use flow_system.topology.start_app() instead.',
1804
+ DeprecationWarning,
1805
+ stacklevel=2,
1806
+ )
1807
+ self.topology.start_app()
1808
+
1809
+ def stop_network_app(self) -> None:
1810
+ """
1811
+ Deprecated: Use `flow_system.topology.stop_app()` instead.
1812
+
1813
+ Stop the network visualization server.
1814
+ """
1815
+ warnings.warn(
1816
+ f'stop_network_app() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
1817
+ 'Use flow_system.topology.stop_app() instead.',
1818
+ DeprecationWarning,
1819
+ stacklevel=2,
1820
+ )
1821
+ self.topology.stop_app()
1822
+
1823
+ def network_infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, str]]]:
1824
+ """
1825
+ Deprecated: Use `flow_system.topology.infos()` instead.
1826
+
1827
+ Get network topology information as dictionaries.
1828
+ """
1829
+ warnings.warn(
1830
+ f'network_infos() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
1831
+ 'Use flow_system.topology.infos() instead.',
1832
+ DeprecationWarning,
1833
+ stacklevel=2,
1834
+ )
1835
+ return self.topology.infos()
596
1836
 
597
1837
  def _check_if_element_is_unique(self, element: Element) -> None:
598
1838
  """
@@ -601,26 +1841,73 @@ class FlowSystem(Interface):
601
1841
  Args:
602
1842
  element: new element to check
603
1843
  """
604
- if element in self.all_elements.values():
605
- raise ValueError(f'Element {element.label_full} already added to FlowSystem!')
606
1844
  # check if name is already used:
607
- if element.label_full in self.all_elements:
1845
+ if element.label_full in self:
608
1846
  raise ValueError(f'Label of Element {element.label_full} already used in another element!')
609
1847
 
1848
+ def _check_if_element_already_assigned(self, element: Element) -> None:
1849
+ """
1850
+ Check if element already belongs to another FlowSystem.
1851
+
1852
+ Args:
1853
+ element: Element to check
1854
+
1855
+ Raises:
1856
+ ValueError: If element is already assigned to a different FlowSystem
1857
+ """
1858
+ if element._flow_system is not None and element._flow_system is not self:
1859
+ raise ValueError(
1860
+ f'Element "{element.label_full}" is already assigned to another FlowSystem. '
1861
+ f'Each element can only belong to one FlowSystem at a time. '
1862
+ f'To use this element in multiple systems, create a copy: '
1863
+ f'flow_system.add_elements(element.copy())'
1864
+ )
1865
+
1866
+ def _validate_system_integrity(self) -> None:
1867
+ """
1868
+ Validate cross-element references to ensure system consistency.
1869
+
1870
+ This performs system-level validation that requires knowledge of multiple elements:
1871
+ - Validates that all Flow.bus references point to existing buses
1872
+ - Can be extended for other cross-element validations
1873
+
1874
+ Should be called after connect_and_transform and before create_model.
1875
+
1876
+ Raises:
1877
+ ValueError: If any cross-element reference is invalid
1878
+ """
1879
+ # Validate bus references in flows
1880
+ for flow in self.flows.values():
1881
+ if flow.bus not in self.buses:
1882
+ available_buses = list(self.buses.keys())
1883
+ raise ValueError(
1884
+ f'Flow "{flow.label_full}" references bus "{flow.bus}" which does not exist in FlowSystem. '
1885
+ f'Available buses: {available_buses}. '
1886
+ f'Did you forget to add the bus using flow_system.add_elements(Bus("{flow.bus}"))?'
1887
+ )
1888
+
610
1889
  def _add_effects(self, *args: Effect) -> None:
1890
+ for effect in args:
1891
+ effect.link_to_flow_system(self) # Link element to FlowSystem
611
1892
  self.effects.add_effects(*args)
612
1893
 
613
1894
  def _add_components(self, *components: Component) -> None:
614
1895
  for new_component in list(components):
615
- logger.info(f'Registered new Component: {new_component.label_full}')
616
- self._check_if_element_is_unique(new_component) # check if already exists:
617
- self.components[new_component.label_full] = new_component # Add to existing components
1896
+ new_component.link_to_flow_system(self) # Link element to FlowSystem
1897
+ self.components.add(new_component) # Add to existing components
1898
+ # Invalidate cache once after all additions
1899
+ if components:
1900
+ self._flows_cache = None
1901
+ self._storages_cache = None
618
1902
 
619
1903
  def _add_buses(self, *buses: Bus):
620
1904
  for new_bus in list(buses):
621
- logger.info(f'Registered new Bus: {new_bus.label_full}')
622
- self._check_if_element_is_unique(new_bus) # check if already exists:
623
- self.buses[new_bus.label_full] = new_bus # Add to existing components
1905
+ new_bus.link_to_flow_system(self) # Link element to FlowSystem
1906
+ self.buses.add(new_bus) # Add to existing buses
1907
+ # Invalidate cache once after all additions
1908
+ if buses:
1909
+ self._flows_cache = None
1910
+ self._storages_cache = None
624
1911
 
625
1912
  def _connect_network(self):
626
1913
  """Connects the network of components and buses. Can be rerun without changes if no elements were added"""
@@ -629,17 +1916,6 @@ class FlowSystem(Interface):
629
1916
  flow.component = component.label_full
630
1917
  flow.is_input_in_component = True if flow in component.inputs else False
631
1918
 
632
- # Add Bus if not already added (deprecated)
633
- if flow._bus_object is not None and flow._bus_object not in self.buses.values():
634
- warnings.warn(
635
- f'The Bus {flow._bus_object.label_full} was added to the FlowSystem from {flow.label_full}.'
636
- f'This is deprecated and will be removed in the future. '
637
- f'Please pass the Bus.label to the Flow and the Bus to the FlowSystem instead.',
638
- DeprecationWarning,
639
- stacklevel=1,
640
- )
641
- self._add_buses(flow._bus_object)
642
-
643
1919
  # Connect Buses
644
1920
  bus = self.buses.get(flow.bus)
645
1921
  if bus is None:
@@ -651,68 +1927,58 @@ class FlowSystem(Interface):
651
1927
  bus.outputs.append(flow)
652
1928
  elif not flow.is_input_in_component and flow not in bus.inputs:
653
1929
  bus.inputs.append(flow)
1930
+
1931
+ # Count flows manually to avoid triggering cache rebuild
1932
+ flow_count = sum(len(c.inputs) + len(c.outputs) for c in self.components.values())
654
1933
  logger.debug(
655
1934
  f'Connected {len(self.buses)} Buses and {len(self.components)} '
656
- f'via {len(self.flows)} Flows inside the FlowSystem.'
1935
+ f'via {flow_count} Flows inside the FlowSystem.'
657
1936
  )
658
1937
 
659
1938
  def __repr__(self) -> str:
660
- """Compact representation for debugging."""
661
- status = '' if self.connected_and_transformed else ''
662
-
663
- # Build dimension info
664
- dims = f'{len(self.timesteps)} timesteps [{self.timesteps[0].strftime("%Y-%m-%d")} to {self.timesteps[-1].strftime("%Y-%m-%d")}]'
665
- if self.periods is not None:
666
- dims += f', {len(self.periods)} periods'
667
- if self.scenarios is not None:
668
- dims += f', {len(self.scenarios)} scenarios'
669
-
670
- return f'FlowSystem({dims}, {len(self.components)} Components, {len(self.buses)} Buses, {len(self.effects)} Effects, {status})'
671
-
672
- def __str__(self) -> str:
673
- """Structured summary for users."""
674
-
675
- def format_elements(element_names: list, label: str, alignment: int = 12):
676
- name_list = ', '.join(element_names[:3])
677
- if len(element_names) > 3:
678
- name_list += f' ... (+{len(element_names) - 3} more)'
1939
+ """Return a detailed string representation showing all containers."""
1940
+ r = fx_io.format_title_with_underline('FlowSystem', '=')
679
1941
 
680
- suffix = f' ({name_list})' if element_names else ''
681
- padding = alignment - len(label) - 1 # -1 for the colon
682
- return f'{label}:{"":<{padding}} {len(element_names)}{suffix}'
683
-
684
- time_period = f'Time period: {self.timesteps[0].date()} to {self.timesteps[-1].date()}'
685
- freq_str = str(self.timesteps.freq).replace('<', '').replace('>', '') if self.timesteps.freq else 'irregular'
1942
+ # Timestep info - handle both DatetimeIndex and RangeIndex (segmented)
1943
+ if self.is_segmented:
1944
+ r += f'Timesteps: {len(self.timesteps)} segments (segmented)\n'
1945
+ else:
1946
+ time_period = f'{self.timesteps[0].date()} to {self.timesteps[-1].date()}'
1947
+ freq_str = (
1948
+ str(self.timesteps.freq).replace('<', '').replace('>', '') if self.timesteps.freq else 'irregular'
1949
+ )
1950
+ r += f'Timesteps: {len(self.timesteps)} ({freq_str}) [{time_period}]\n'
686
1951
 
687
- lines = [
688
- f'Timesteps: {len(self.timesteps)} ({freq_str}) [{time_period}]',
689
- ]
1952
+ # Add clusters if present
1953
+ if self.clusters is not None:
1954
+ r += f'Clusters: {len(self.clusters)}\n'
690
1955
 
691
1956
  # Add periods if present
692
1957
  if self.periods is not None:
693
1958
  period_names = ', '.join(str(p) for p in self.periods[:3])
694
1959
  if len(self.periods) > 3:
695
1960
  period_names += f' ... (+{len(self.periods) - 3} more)'
696
- lines.append(f'Periods: {len(self.periods)} ({period_names})')
1961
+ r += f'Periods: {len(self.periods)} ({period_names})\n'
1962
+ else:
1963
+ r += 'Periods: None\n'
697
1964
 
698
1965
  # Add scenarios if present
699
1966
  if self.scenarios is not None:
700
1967
  scenario_names = ', '.join(str(s) for s in self.scenarios[:3])
701
1968
  if len(self.scenarios) > 3:
702
1969
  scenario_names += f' ... (+{len(self.scenarios) - 3} more)'
703
- lines.append(f'Scenarios: {len(self.scenarios)} ({scenario_names})')
704
-
705
- lines.extend(
706
- [
707
- format_elements(list(self.components.keys()), 'Components'),
708
- format_elements(list(self.buses.keys()), 'Buses'),
709
- format_elements(list(self.effects.effects.keys()), 'Effects'),
710
- f'Status: {"Connected & Transformed" if self.connected_and_transformed else "Not connected"}',
711
- ]
712
- )
713
- lines = ['FlowSystem:', f'{"─" * max(len(line) for line in lines)}'] + lines
1970
+ r += f'Scenarios: {len(self.scenarios)} ({scenario_names})\n'
1971
+ else:
1972
+ r += 'Scenarios: None\n'
714
1973
 
715
- return '\n'.join(lines)
1974
+ # Add status
1975
+ status = '✓' if self.connected_and_transformed else '⚠'
1976
+ r += f'Status: {status}\n'
1977
+
1978
+ # Add grouped container view
1979
+ r += '\n' + self._format_grouped_containers()
1980
+
1981
+ return r
716
1982
 
717
1983
  def __eq__(self, other: FlowSystem):
718
1984
  """Check if two FlowSystems are equal by comparing their dataset representations."""
@@ -732,51 +1998,284 @@ class FlowSystem(Interface):
732
1998
 
733
1999
  return True
734
2000
 
735
- def __getitem__(self, item) -> Element:
736
- """Get element by exact label with helpful error messages."""
737
- if item in self.all_elements:
738
- return self.all_elements[item]
2001
+ def _get_container_groups(self) -> dict[str, ElementContainer]:
2002
+ """Return ordered container groups for CompositeContainerMixin."""
2003
+ return {
2004
+ 'Components': self.components,
2005
+ 'Buses': self.buses,
2006
+ 'Effects': self.effects,
2007
+ 'Flows': self.flows,
2008
+ }
739
2009
 
740
- # Provide helpful error with suggestions
741
- from difflib import get_close_matches
2010
+ @property
2011
+ def flows(self) -> ElementContainer[Flow]:
2012
+ if self._flows_cache is None:
2013
+ flows = [f for c in self.components.values() for f in c.inputs + c.outputs]
2014
+ # Deduplicate by id and sort for reproducibility
2015
+ flows = sorted({id(f): f for f in flows}.values(), key=lambda f: f.label_full.lower())
2016
+ self._flows_cache = ElementContainer(flows, element_type_name='flows', truncate_repr=10)
2017
+ return self._flows_cache
742
2018
 
743
- suggestions = get_close_matches(item, self.all_elements.keys(), n=3, cutoff=0.6)
2019
+ @property
2020
+ def storages(self) -> ElementContainer[Storage]:
2021
+ """All storage components as an ElementContainer.
744
2022
 
745
- if suggestions:
746
- suggestion_str = ', '.join(f"'{s}'" for s in suggestions)
747
- raise KeyError(f"Element '{item}' not found. Did you mean: {suggestion_str}?")
748
- else:
749
- raise KeyError(f"Element '{item}' not found in FlowSystem")
2023
+ Returns:
2024
+ ElementContainer containing all Storage components in the FlowSystem,
2025
+ sorted by label for reproducibility.
2026
+ """
2027
+ if self._storages_cache is None:
2028
+ storages = [c for c in self.components.values() if isinstance(c, Storage)]
2029
+ storages = sorted(storages, key=lambda s: s.label_full.lower())
2030
+ self._storages_cache = ElementContainer(storages, element_type_name='storages', truncate_repr=10)
2031
+ return self._storages_cache
2032
+
2033
+ @property
2034
+ def dims(self) -> list[str]:
2035
+ """Active dimension names.
2036
+
2037
+ Returns:
2038
+ List of active dimension names in order.
750
2039
 
751
- def __contains__(self, item: str) -> bool:
752
- """Check if element exists in the FlowSystem."""
753
- return item in self.all_elements
2040
+ Example:
2041
+ >>> fs.dims
2042
+ ['time'] # simple case
2043
+ >>> fs_clustered.dims
2044
+ ['cluster', 'time', 'period', 'scenario'] # full case
2045
+ """
2046
+ result = []
2047
+ if self.clusters is not None:
2048
+ result.append('cluster')
2049
+ result.append('time')
2050
+ if self.periods is not None:
2051
+ result.append('period')
2052
+ if self.scenarios is not None:
2053
+ result.append('scenario')
2054
+ return result
2055
+
2056
+ @property
2057
+ def indexes(self) -> dict[str, pd.Index]:
2058
+ """Indexes for active dimensions.
2059
+
2060
+ Returns:
2061
+ Dict mapping dimension names to pandas Index objects.
754
2062
 
755
- def __iter__(self):
756
- """Iterate over element labels."""
757
- return iter(self.all_elements.keys())
2063
+ Example:
2064
+ >>> fs.indexes['time']
2065
+ DatetimeIndex(['2024-01-01', ...], dtype='datetime64[ns]', name='time')
2066
+ """
2067
+ result: dict[str, pd.Index] = {}
2068
+ if self.clusters is not None:
2069
+ result['cluster'] = self.clusters
2070
+ result['time'] = self.timesteps
2071
+ if self.periods is not None:
2072
+ result['period'] = self.periods
2073
+ if self.scenarios is not None:
2074
+ result['scenario'] = self.scenarios
2075
+ return result
758
2076
 
759
2077
  @property
760
- def flows(self) -> dict[str, Flow]:
761
- set_of_flows = {flow for comp in self.components.values() for flow in comp.inputs + comp.outputs}
762
- return {flow.label_full: flow for flow in set_of_flows}
2078
+ def temporal_dims(self) -> list[str]:
2079
+ """Temporal dimensions for summing over time.
2080
+
2081
+ Returns ['time', 'cluster'] for clustered systems, ['time'] otherwise.
2082
+ """
2083
+ if self.clusters is not None:
2084
+ return ['time', 'cluster']
2085
+ return ['time']
763
2086
 
764
2087
  @property
765
- def all_elements(self) -> dict[str, Element]:
766
- return {**self.components, **self.effects.effects, **self.flows, **self.buses}
2088
+ def temporal_weight(self) -> xr.DataArray:
2089
+ """Combined temporal weight (timestep_duration × cluster_weight).
2090
+
2091
+ Use for converting rates to totals before summing.
2092
+ Note: cluster_weight is used even without a clusters dimension.
2093
+ """
2094
+ # Use cluster_weight directly if set, otherwise check weights dict, fallback to 1.0
2095
+ cluster_weight = self.weights.get('cluster', self.cluster_weight if self.cluster_weight is not None else 1.0)
2096
+ return self.weights['time'] * cluster_weight
767
2097
 
768
2098
  @property
769
2099
  def coords(self) -> dict[FlowSystemDimensions, pd.Index]:
770
- active_coords = {'time': self.timesteps}
2100
+ """Active coordinates for variable creation.
2101
+
2102
+ .. deprecated::
2103
+ Use :attr:`indexes` instead.
2104
+
2105
+ Returns a dict of dimension names to coordinate arrays. When clustered,
2106
+ includes 'cluster' dimension before 'time'.
2107
+
2108
+ Returns:
2109
+ Dict mapping dimension names to coordinate arrays.
2110
+ """
2111
+ warnings.warn(
2112
+ f'FlowSystem.coords is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
2113
+ 'Use FlowSystem.indexes instead.',
2114
+ DeprecationWarning,
2115
+ stacklevel=2,
2116
+ )
2117
+ return self.indexes
2118
+
2119
+ @property
2120
+ def _use_true_cluster_dims(self) -> bool:
2121
+ """Check if true (cluster, time) dimensions should be used."""
2122
+ return self.clusters is not None
2123
+
2124
+ @property
2125
+ def _cluster_n_clusters(self) -> int | None:
2126
+ """Get number of clusters."""
2127
+ return len(self.clusters) if self.clusters is not None else None
2128
+
2129
+ @property
2130
+ def _cluster_timesteps_per_cluster(self) -> int | None:
2131
+ """Get timesteps per cluster (same as len(timesteps) for clustered systems)."""
2132
+ return len(self.timesteps) if self.clusters is not None else None
2133
+
2134
+ @property
2135
+ def _cluster_time_coords(self) -> pd.DatetimeIndex | pd.RangeIndex | None:
2136
+ """Get time coordinates for clustered system (same as timesteps)."""
2137
+ return self.timesteps if self.clusters is not None else None
2138
+
2139
+ @property
2140
+ def is_segmented(self) -> bool:
2141
+ """Check if this FlowSystem uses segmented time (RangeIndex instead of DatetimeIndex).
2142
+
2143
+ Segmented systems have variable timestep durations stored in timestep_duration,
2144
+ and use a RangeIndex for time coordinates instead of DatetimeIndex.
2145
+ """
2146
+ return isinstance(self.timesteps, pd.RangeIndex)
2147
+
2148
+ @property
2149
+ def n_timesteps(self) -> int:
2150
+ """Number of timesteps (within each cluster if clustered)."""
2151
+ if self.is_clustered:
2152
+ return self.clustering.timesteps_per_cluster
2153
+ return len(self.timesteps)
2154
+
2155
+ @property
2156
+ def used_in_calculation(self) -> bool:
2157
+ return self._used_in_optimization
2158
+
2159
+ @property
2160
+ def scenario_weights(self) -> xr.DataArray | None:
2161
+ """
2162
+ Weights for each scenario.
2163
+
2164
+ Returns:
2165
+ xr.DataArray: Scenario weights with 'scenario' dimension
2166
+ """
2167
+ return self._scenario_weights
2168
+
2169
+ @scenario_weights.setter
2170
+ def scenario_weights(self, value: Numeric_S | None) -> None:
2171
+ """
2172
+ Set scenario weights (always normalized to sum to 1).
2173
+
2174
+ Args:
2175
+ value: Scenario weights to set (will be converted to DataArray with 'scenario' dimension
2176
+ and normalized to sum to 1), or None to clear weights.
2177
+
2178
+ Raises:
2179
+ ValueError: If value is not None and no scenarios are defined in the FlowSystem.
2180
+ ValueError: If weights sum to zero (cannot normalize).
2181
+ """
2182
+ if value is None:
2183
+ self._scenario_weights = None
2184
+ return
2185
+
2186
+ if self.scenarios is None:
2187
+ raise ValueError(
2188
+ 'FlowSystem.scenario_weights cannot be set when no scenarios are defined. '
2189
+ 'Either define scenarios in FlowSystem(scenarios=...) or set scenario_weights to None.'
2190
+ )
2191
+
2192
+ weights = self.fit_to_model_coords('scenario_weights', value, dims=['scenario'])
2193
+
2194
+ # Normalize to sum to 1
2195
+ norm = weights.sum('scenario')
2196
+ if np.isclose(norm, 0.0).any().item():
2197
+ # Provide detailed error for multi-dimensional weights
2198
+ if norm.ndim > 0:
2199
+ zero_locations = np.argwhere(np.isclose(norm.values, 0.0))
2200
+ coords_info = ', '.join(
2201
+ f'{dim}={norm.coords[dim].values[idx]}'
2202
+ for idx, dim in zip(zero_locations[0], norm.dims, strict=False)
2203
+ )
2204
+ raise ValueError(
2205
+ f'scenario_weights sum to 0 at {coords_info}; cannot normalize. '
2206
+ f'Ensure all scenario weight combinations sum to a positive value.'
2207
+ )
2208
+ raise ValueError('scenario_weights sum to 0; cannot normalize.')
2209
+ self._scenario_weights = weights / norm
2210
+
2211
+ def _unit_weight(self, dim: str) -> xr.DataArray:
2212
+ """Create a unit weight DataArray (all 1.0) for a dimension."""
2213
+ index = self.indexes[dim]
2214
+ return xr.DataArray(
2215
+ np.ones(len(index), dtype=float),
2216
+ coords={dim: index},
2217
+ dims=[dim],
2218
+ name=f'{dim}_weight',
2219
+ )
2220
+
2221
+ @property
2222
+ def weights(self) -> dict[str, xr.DataArray]:
2223
+ """Weights for active dimensions (unit weights if not explicitly set).
2224
+
2225
+ Returns:
2226
+ Dict mapping dimension names to weight DataArrays.
2227
+ Keys match :attr:`dims` and :attr:`indexes`.
2228
+
2229
+ Example:
2230
+ >>> fs.weights['time'] # timestep durations
2231
+ >>> fs.weights['cluster'] # cluster weights (unit if not set)
2232
+ """
2233
+ result: dict[str, xr.DataArray] = {'time': self.timestep_duration}
2234
+ if self.clusters is not None:
2235
+ result['cluster'] = self.cluster_weight if self.cluster_weight is not None else self._unit_weight('cluster')
771
2236
  if self.periods is not None:
772
- active_coords['period'] = self.periods
2237
+ result['period'] = self.period_weights if self.period_weights is not None else self._unit_weight('period')
773
2238
  if self.scenarios is not None:
774
- active_coords['scenario'] = self.scenarios
775
- return active_coords
2239
+ result['scenario'] = (
2240
+ self.scenario_weights if self.scenario_weights is not None else self._unit_weight('scenario')
2241
+ )
2242
+ return result
2243
+
2244
+ def sum_temporal(self, data: xr.DataArray) -> xr.DataArray:
2245
+ """Sum data over temporal dimensions with full temporal weighting.
2246
+
2247
+ Applies both timestep_duration and cluster_weight, then sums over temporal dimensions.
2248
+ Use this to convert rates to totals (e.g., flow_rate → total_energy).
2249
+
2250
+ Args:
2251
+ data: Data with time dimension (and optionally cluster).
2252
+ Typically a rate (e.g., flow_rate in MW, status as 0/1).
2253
+
2254
+ Returns:
2255
+ Data summed over temporal dims with full temporal weighting applied.
2256
+
2257
+ Example:
2258
+ >>> total_energy = fs.sum_temporal(flow_rate) # MW → MWh total
2259
+ >>> active_hours = fs.sum_temporal(status) # count → hours
2260
+ """
2261
+ return (data * self.temporal_weight).sum(self.temporal_dims)
776
2262
 
777
2263
  @property
778
- def used_in_calculation(self) -> bool:
779
- return self._used_in_calculation
2264
+ def is_clustered(self) -> bool:
2265
+ """Check if this FlowSystem uses time series clustering.
2266
+
2267
+ Returns:
2268
+ True if the FlowSystem was created with transform.cluster(),
2269
+ False otherwise.
2270
+
2271
+ Example:
2272
+ >>> fs_clustered = flow_system.transform.cluster(n_clusters=8, cluster_duration='1D')
2273
+ >>> fs_clustered.is_clustered
2274
+ True
2275
+ >>> flow_system.is_clustered
2276
+ False
2277
+ """
2278
+ return getattr(self, 'clustering', None) is not None
780
2279
 
781
2280
  def _validate_scenario_parameter(self, value: bool | list[str], param_name: str, element_type: str) -> None:
782
2281
  """
@@ -849,6 +2348,61 @@ class FlowSystem(Interface):
849
2348
  self._validate_scenario_parameter(value, 'scenario_independent_flow_rates', 'Flow.label_full')
850
2349
  self._scenario_independent_flow_rates = value
851
2350
 
2351
+ @classmethod
2352
+ def _dataset_sel(
2353
+ cls,
2354
+ dataset: xr.Dataset,
2355
+ time: str | slice | list[str] | pd.Timestamp | pd.DatetimeIndex | None = None,
2356
+ period: int | slice | list[int] | pd.Index | None = None,
2357
+ scenario: str | slice | list[str] | pd.Index | None = None,
2358
+ hours_of_last_timestep: int | float | None = None,
2359
+ hours_of_previous_timesteps: int | float | np.ndarray | None = None,
2360
+ ) -> xr.Dataset:
2361
+ """
2362
+ Select subset of dataset by label (for power users to avoid conversion overhead).
2363
+
2364
+ This method operates directly on xarray Datasets, allowing power users to chain
2365
+ operations efficiently without repeated FlowSystem conversions:
2366
+
2367
+ Example:
2368
+ # Power user pattern (single conversion):
2369
+ >>> ds = flow_system.to_dataset()
2370
+ >>> ds = FlowSystem._dataset_sel(ds, time='2020-01')
2371
+ >>> ds = FlowSystem._dataset_resample(ds, freq='2h', method='mean')
2372
+ >>> result = FlowSystem.from_dataset(ds)
2373
+
2374
+ # vs. simple pattern (multiple conversions):
2375
+ >>> result = flow_system.sel(time='2020-01').resample('2h')
2376
+
2377
+ Args:
2378
+ dataset: xarray Dataset from FlowSystem.to_dataset()
2379
+ time: Time selection (e.g., '2020-01', slice('2020-01-01', '2020-06-30'))
2380
+ period: Period selection (e.g., 2020, slice(2020, 2022))
2381
+ scenario: Scenario selection (e.g., 'Base Case', ['Base Case', 'High Demand'])
2382
+ hours_of_last_timestep: Duration of the last timestep. If None, computed from the selected time index.
2383
+ hours_of_previous_timesteps: Duration of previous timesteps. If None, computed from the selected time index.
2384
+ Can be a scalar or array.
2385
+
2386
+ Returns:
2387
+ xr.Dataset: Selected dataset
2388
+ """
2389
+ warnings.warn(
2390
+ f'\n_dataset_sel() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
2391
+ 'Use TransformAccessor._dataset_sel() instead.',
2392
+ DeprecationWarning,
2393
+ stacklevel=2,
2394
+ )
2395
+ from .transform_accessor import TransformAccessor
2396
+
2397
+ return TransformAccessor._dataset_sel(
2398
+ dataset,
2399
+ time=time,
2400
+ period=period,
2401
+ scenario=scenario,
2402
+ hours_of_last_timestep=hours_of_last_timestep,
2403
+ hours_of_previous_timesteps=hours_of_previous_timesteps,
2404
+ )
2405
+
852
2406
  def sel(
853
2407
  self,
854
2408
  time: str | slice | list[str] | pd.Timestamp | pd.DatetimeIndex | None = None,
@@ -856,35 +2410,70 @@ class FlowSystem(Interface):
856
2410
  scenario: str | slice | list[str] | pd.Index | None = None,
857
2411
  ) -> FlowSystem:
858
2412
  """
859
- Select a subset of the flowsystem by the time coordinate.
2413
+ Select a subset of the flowsystem by label.
2414
+
2415
+ .. deprecated::
2416
+ Use ``flow_system.transform.sel()`` instead. Will be removed in v6.0.0.
860
2417
 
861
2418
  Args:
862
- time: Time selection (e.g., slice('2023-01-01', '2023-12-31'), '2023-06-15', or list of times)
2419
+ time: Time selection (e.g., slice('2023-01-01', '2023-12-31'), '2023-06-15')
863
2420
  period: Period selection (e.g., slice(2023, 2024), or list of periods)
864
- scenario: Scenario selection (e.g., slice('scenario1', 'scenario2'), or list of scenarios)
2421
+ scenario: Scenario selection (e.g., 'scenario1', or list of scenarios)
865
2422
 
866
2423
  Returns:
867
- FlowSystem: New FlowSystem with selected data
2424
+ FlowSystem: New FlowSystem with selected data (no solution).
868
2425
  """
869
- if not self.connected_and_transformed:
870
- self.connect_and_transform()
2426
+ warnings.warn(
2427
+ f'\nsel() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
2428
+ 'Use flow_system.transform.sel() instead.',
2429
+ DeprecationWarning,
2430
+ stacklevel=2,
2431
+ )
2432
+ return self.transform.sel(time=time, period=period, scenario=scenario)
871
2433
 
872
- ds = self.to_dataset()
2434
+ @classmethod
2435
+ def _dataset_isel(
2436
+ cls,
2437
+ dataset: xr.Dataset,
2438
+ time: int | slice | list[int] | None = None,
2439
+ period: int | slice | list[int] | None = None,
2440
+ scenario: int | slice | list[int] | None = None,
2441
+ hours_of_last_timestep: int | float | None = None,
2442
+ hours_of_previous_timesteps: int | float | np.ndarray | None = None,
2443
+ ) -> xr.Dataset:
2444
+ """
2445
+ Select subset of dataset by integer index (for power users to avoid conversion overhead).
873
2446
 
874
- # Build indexers dict from non-None parameters
875
- indexers = {}
876
- if time is not None:
877
- indexers['time'] = time
878
- if period is not None:
879
- indexers['period'] = period
880
- if scenario is not None:
881
- indexers['scenario'] = scenario
2447
+ See _dataset_sel() for usage pattern.
882
2448
 
883
- if not indexers:
884
- return self.copy() # Return a copy when no selection
2449
+ Args:
2450
+ dataset: xarray Dataset from FlowSystem.to_dataset()
2451
+ time: Time selection by index (e.g., slice(0, 100), [0, 5, 10])
2452
+ period: Period selection by index
2453
+ scenario: Scenario selection by index
2454
+ hours_of_last_timestep: Duration of the last timestep. If None, computed from the selected time index.
2455
+ hours_of_previous_timesteps: Duration of previous timesteps. If None, computed from the selected time index.
2456
+ Can be a scalar or array.
885
2457
 
886
- selected_dataset = ds.sel(**indexers)
887
- return self.__class__.from_dataset(selected_dataset)
2458
+ Returns:
2459
+ xr.Dataset: Selected dataset
2460
+ """
2461
+ warnings.warn(
2462
+ f'\n_dataset_isel() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
2463
+ 'Use TransformAccessor._dataset_isel() instead.',
2464
+ DeprecationWarning,
2465
+ stacklevel=2,
2466
+ )
2467
+ from .transform_accessor import TransformAccessor
2468
+
2469
+ return TransformAccessor._dataset_isel(
2470
+ dataset,
2471
+ time=time,
2472
+ period=period,
2473
+ scenario=scenario,
2474
+ hours_of_last_timestep=hours_of_last_timestep,
2475
+ hours_of_previous_timesteps=hours_of_previous_timesteps,
2476
+ )
888
2477
 
889
2478
  def isel(
890
2479
  self,
@@ -895,85 +2484,133 @@ class FlowSystem(Interface):
895
2484
  """
896
2485
  Select a subset of the flowsystem by integer indices.
897
2486
 
2487
+ .. deprecated::
2488
+ Use ``flow_system.transform.isel()`` instead. Will be removed in v6.0.0.
2489
+
898
2490
  Args:
899
2491
  time: Time selection by integer index (e.g., slice(0, 100), 50, or [0, 5, 10])
900
- period: Period selection by integer index (e.g., slice(0, 100), 50, or [0, 5, 10])
901
- scenario: Scenario selection by integer index (e.g., slice(0, 3), 50, or [0, 5, 10])
2492
+ period: Period selection by integer index
2493
+ scenario: Scenario selection by integer index
902
2494
 
903
2495
  Returns:
904
- FlowSystem: New FlowSystem with selected data
2496
+ FlowSystem: New FlowSystem with selected data (no solution).
905
2497
  """
906
- if not self.connected_and_transformed:
907
- self.connect_and_transform()
2498
+ warnings.warn(
2499
+ f'\nisel() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
2500
+ 'Use flow_system.transform.isel() instead.',
2501
+ DeprecationWarning,
2502
+ stacklevel=2,
2503
+ )
2504
+ return self.transform.isel(time=time, period=period, scenario=scenario)
908
2505
 
909
- ds = self.to_dataset()
2506
+ @classmethod
2507
+ def _dataset_resample(
2508
+ cls,
2509
+ dataset: xr.Dataset,
2510
+ freq: str,
2511
+ method: Literal['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count'] = 'mean',
2512
+ hours_of_last_timestep: int | float | None = None,
2513
+ hours_of_previous_timesteps: int | float | np.ndarray | None = None,
2514
+ **kwargs: Any,
2515
+ ) -> xr.Dataset:
2516
+ """
2517
+ Resample dataset along time dimension (for power users to avoid conversion overhead).
2518
+ Preserves only the attrs of the Dataset.
910
2519
 
911
- # Build indexers dict from non-None parameters
912
- indexers = {}
913
- if time is not None:
914
- indexers['time'] = time
915
- if period is not None:
916
- indexers['period'] = period
917
- if scenario is not None:
918
- indexers['scenario'] = scenario
2520
+ Uses optimized _resample_by_dimension_groups() to avoid broadcasting issues.
2521
+ See _dataset_sel() for usage pattern.
919
2522
 
920
- if not indexers:
921
- return self.copy() # Return a copy when no selection
2523
+ Args:
2524
+ dataset: xarray Dataset from FlowSystem.to_dataset()
2525
+ freq: Resampling frequency (e.g., '2h', '1D', '1M')
2526
+ method: Resampling method (e.g., 'mean', 'sum', 'first')
2527
+ hours_of_last_timestep: Duration of the last timestep after resampling. If None, computed from the last time interval.
2528
+ hours_of_previous_timesteps: Duration of previous timesteps after resampling. If None, computed from the first time interval.
2529
+ Can be a scalar or array.
2530
+ **kwargs: Additional arguments passed to xarray.resample()
922
2531
 
923
- selected_dataset = ds.isel(**indexers)
924
- return self.__class__.from_dataset(selected_dataset)
2532
+ Returns:
2533
+ xr.Dataset: Resampled dataset
2534
+ """
2535
+ warnings.warn(
2536
+ f'\n_dataset_resample() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
2537
+ 'Use TransformAccessor._dataset_resample() instead.',
2538
+ DeprecationWarning,
2539
+ stacklevel=2,
2540
+ )
2541
+ from .transform_accessor import TransformAccessor
2542
+
2543
+ return TransformAccessor._dataset_resample(
2544
+ dataset,
2545
+ freq=freq,
2546
+ method=method,
2547
+ hours_of_last_timestep=hours_of_last_timestep,
2548
+ hours_of_previous_timesteps=hours_of_previous_timesteps,
2549
+ **kwargs,
2550
+ )
2551
+
2552
+ @classmethod
2553
+ def _resample_by_dimension_groups(
2554
+ cls,
2555
+ time_dataset: xr.Dataset,
2556
+ time: str,
2557
+ method: str,
2558
+ **kwargs: Any,
2559
+ ) -> xr.Dataset:
2560
+ """
2561
+ Resample variables grouped by their dimension structure to avoid broadcasting.
2562
+
2563
+ .. deprecated::
2564
+ Use ``TransformAccessor._resample_by_dimension_groups()`` instead.
2565
+ Will be removed in v6.0.0.
2566
+ """
2567
+ warnings.warn(
2568
+ f'\n_resample_by_dimension_groups() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
2569
+ 'Use TransformAccessor._resample_by_dimension_groups() instead.',
2570
+ DeprecationWarning,
2571
+ stacklevel=2,
2572
+ )
2573
+ from .transform_accessor import TransformAccessor
2574
+
2575
+ return TransformAccessor._resample_by_dimension_groups(time_dataset, time, method, **kwargs)
925
2576
 
926
2577
  def resample(
927
2578
  self,
928
2579
  time: str,
929
2580
  method: Literal['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count'] = 'mean',
2581
+ hours_of_last_timestep: int | float | None = None,
2582
+ hours_of_previous_timesteps: int | float | np.ndarray | None = None,
930
2583
  **kwargs: Any,
931
2584
  ) -> FlowSystem:
932
2585
  """
933
- Create a resampled FlowSystem by resampling data along the time dimension (like xr.Dataset.resample()).
934
- Only resamples data variables that have a time dimension.
2586
+ Create a resampled FlowSystem by resampling data along the time dimension.
2587
+
2588
+ .. deprecated::
2589
+ Use ``flow_system.transform.resample()`` instead. Will be removed in v6.0.0.
935
2590
 
936
2591
  Args:
937
2592
  time: Resampling frequency (e.g., '3h', '2D', '1M')
938
2593
  method: Resampling method. Recommended: 'mean', 'first', 'last', 'max', 'min'
2594
+ hours_of_last_timestep: Duration of the last timestep after resampling.
2595
+ hours_of_previous_timesteps: Duration of previous timesteps after resampling.
939
2596
  **kwargs: Additional arguments passed to xarray.resample()
940
2597
 
941
2598
  Returns:
942
- FlowSystem: New FlowSystem with resampled data
2599
+ FlowSystem: New resampled FlowSystem (no solution).
943
2600
  """
944
- if not self.connected_and_transformed:
945
- self.connect_and_transform()
946
-
947
- dataset = self.to_dataset()
948
-
949
- # Separate variables with and without time dimension
950
- time_vars = {}
951
- non_time_vars = {}
952
-
953
- for var_name, var in dataset.data_vars.items():
954
- if 'time' in var.dims:
955
- time_vars[var_name] = var
956
- else:
957
- non_time_vars[var_name] = var
958
-
959
- # Only resample variables that have time dimension
960
- time_dataset = dataset[list(time_vars.keys())]
961
- resampler = time_dataset.resample(time=time, **kwargs)
962
-
963
- if hasattr(resampler, method):
964
- resampled_time_data = getattr(resampler, method)()
965
- else:
966
- available_methods = ['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count']
967
- raise ValueError(f'Unsupported resampling method: {method}. Available: {available_methods}')
968
-
969
- # Combine resampled time variables with non-time variables
970
- if non_time_vars:
971
- non_time_dataset = dataset[list(non_time_vars.keys())]
972
- resampled_dataset = xr.merge([resampled_time_data, non_time_dataset])
973
- else:
974
- resampled_dataset = resampled_time_data
975
-
976
- return self.__class__.from_dataset(resampled_dataset)
2601
+ warnings.warn(
2602
+ f'\nresample() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
2603
+ 'Use flow_system.transform.resample() instead.',
2604
+ DeprecationWarning,
2605
+ stacklevel=2,
2606
+ )
2607
+ return self.transform.resample(
2608
+ time=time,
2609
+ method=method,
2610
+ hours_of_last_timestep=hours_of_last_timestep,
2611
+ hours_of_previous_timesteps=hours_of_previous_timesteps,
2612
+ **kwargs,
2613
+ )
977
2614
 
978
2615
  @property
979
2616
  def connected_and_transformed(self) -> bool: