flixopt 3.0.1__py3-none-any.whl → 6.0.0rc7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flixopt/__init__.py +57 -49
- flixopt/carrier.py +159 -0
- flixopt/clustering/__init__.py +51 -0
- flixopt/clustering/base.py +1746 -0
- flixopt/clustering/intercluster_helpers.py +201 -0
- flixopt/color_processing.py +372 -0
- flixopt/comparison.py +819 -0
- flixopt/components.py +848 -270
- flixopt/config.py +853 -496
- flixopt/core.py +111 -98
- flixopt/effects.py +294 -284
- flixopt/elements.py +484 -223
- flixopt/features.py +220 -118
- flixopt/flow_system.py +2026 -389
- flixopt/interface.py +504 -286
- flixopt/io.py +1718 -55
- flixopt/linear_converters.py +291 -230
- flixopt/modeling.py +304 -181
- flixopt/network_app.py +2 -1
- flixopt/optimization.py +788 -0
- flixopt/optimize_accessor.py +373 -0
- flixopt/plot_result.py +143 -0
- flixopt/plotting.py +1177 -1034
- flixopt/results.py +1331 -372
- flixopt/solvers.py +12 -4
- flixopt/statistics_accessor.py +2412 -0
- flixopt/stats_accessor.py +75 -0
- flixopt/structure.py +954 -120
- flixopt/topology_accessor.py +676 -0
- flixopt/transform_accessor.py +2277 -0
- flixopt/types.py +120 -0
- flixopt-6.0.0rc7.dist-info/METADATA +290 -0
- flixopt-6.0.0rc7.dist-info/RECORD +36 -0
- {flixopt-3.0.1.dist-info → flixopt-6.0.0rc7.dist-info}/WHEEL +1 -1
- flixopt/aggregation.py +0 -382
- flixopt/calculation.py +0 -672
- flixopt/commons.py +0 -51
- flixopt/utils.py +0 -86
- flixopt-3.0.1.dist-info/METADATA +0 -209
- flixopt-3.0.1.dist-info/RECORD +0 -26
- {flixopt-3.0.1.dist-info → flixopt-6.0.0rc7.dist-info}/licenses/LICENSE +0 -0
- {flixopt-3.0.1.dist-info → flixopt-6.0.0rc7.dist-info}/top_level.txt +0 -0
flixopt/core.py
CHANGED
|
@@ -4,26 +4,18 @@ It provides Datatypes, logging functionality, and some functions to transform da
|
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
6
|
import logging
|
|
7
|
-
import warnings
|
|
8
7
|
from itertools import permutations
|
|
9
|
-
from typing import Any, Literal
|
|
8
|
+
from typing import Any, Literal
|
|
10
9
|
|
|
11
10
|
import numpy as np
|
|
12
11
|
import pandas as pd
|
|
13
12
|
import xarray as xr
|
|
14
13
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
Scalar = int | float
|
|
18
|
-
"""A single number, either integer or float."""
|
|
19
|
-
|
|
20
|
-
PeriodicDataUser = int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray
|
|
21
|
-
"""User data which has no time dimension. Internally converted to a Scalar or an xr.DataArray without a time dimension."""
|
|
14
|
+
from .types import NumericOrBool
|
|
22
15
|
|
|
23
|
-
|
|
24
|
-
"""Internally used datatypes for periodic data."""
|
|
16
|
+
logger = logging.getLogger('flixopt')
|
|
25
17
|
|
|
26
|
-
FlowSystemDimensions = Literal['time', 'period', 'scenario']
|
|
18
|
+
FlowSystemDimensions = Literal['time', 'cluster', 'period', 'scenario']
|
|
27
19
|
"""Possible dimensions of a FlowSystem."""
|
|
28
20
|
|
|
29
21
|
|
|
@@ -40,46 +32,38 @@ class ConversionError(Exception):
|
|
|
40
32
|
|
|
41
33
|
|
|
42
34
|
class TimeSeriesData(xr.DataArray):
|
|
43
|
-
"""Minimal TimeSeriesData that inherits from xr.DataArray with
|
|
35
|
+
"""Minimal TimeSeriesData that inherits from xr.DataArray with clustering metadata."""
|
|
44
36
|
|
|
45
37
|
__slots__ = () # No additional instance attributes - everything goes in attrs
|
|
46
38
|
|
|
47
39
|
def __init__(
|
|
48
40
|
self,
|
|
49
41
|
*args: Any,
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
agg_group: str | None = None,
|
|
53
|
-
agg_weight: float | None = None,
|
|
42
|
+
clustering_group: str | None = None,
|
|
43
|
+
clustering_weight: float | None = None,
|
|
54
44
|
**kwargs: Any,
|
|
55
45
|
):
|
|
56
46
|
"""
|
|
57
47
|
Args:
|
|
58
48
|
*args: Arguments passed to DataArray
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
49
|
+
clustering_group: Clustering group name. Use this when multiple time series should share the same
|
|
50
|
+
clustering weight (1/n where n is the number of series in the group). Mutually exclusive with clustering_weight.
|
|
51
|
+
clustering_weight: Clustering weight (0-1). Use this to assign a specific weight to a single time series.
|
|
52
|
+
Mutually exclusive with clustering_group.
|
|
63
53
|
**kwargs: Additional arguments passed to DataArray
|
|
64
54
|
"""
|
|
65
|
-
if agg_group is not None:
|
|
66
|
-
warnings.warn('agg_group is deprecated, use aggregation_group instead', DeprecationWarning, stacklevel=2)
|
|
67
|
-
aggregation_group = agg_group
|
|
68
|
-
if agg_weight is not None:
|
|
69
|
-
warnings.warn('agg_weight is deprecated, use aggregation_weight instead', DeprecationWarning, stacklevel=2)
|
|
70
|
-
aggregation_weight = agg_weight
|
|
71
55
|
|
|
72
|
-
if (
|
|
73
|
-
raise ValueError('Use either
|
|
56
|
+
if (clustering_group is not None) and (clustering_weight is not None):
|
|
57
|
+
raise ValueError('Use either clustering_group or clustering_weight, not both')
|
|
74
58
|
|
|
75
59
|
# Let xarray handle all the initialization complexity
|
|
76
60
|
super().__init__(*args, **kwargs)
|
|
77
61
|
|
|
78
62
|
# Add our metadata to attrs after initialization
|
|
79
|
-
if
|
|
80
|
-
self.attrs['
|
|
81
|
-
if
|
|
82
|
-
self.attrs['
|
|
63
|
+
if clustering_group is not None:
|
|
64
|
+
self.attrs['clustering_group'] = clustering_group
|
|
65
|
+
if clustering_weight is not None:
|
|
66
|
+
self.attrs['clustering_weight'] = clustering_weight
|
|
83
67
|
|
|
84
68
|
# Always mark as TimeSeriesData
|
|
85
69
|
self.attrs['__timeseries_data__'] = True
|
|
@@ -96,33 +80,33 @@ class TimeSeriesData(xr.DataArray):
|
|
|
96
80
|
da = DataConverter.to_dataarray(self.data, coords=coords)
|
|
97
81
|
return self.__class__(
|
|
98
82
|
da,
|
|
99
|
-
|
|
100
|
-
|
|
83
|
+
clustering_group=self.clustering_group,
|
|
84
|
+
clustering_weight=self.clustering_weight,
|
|
101
85
|
name=name if name is not None else self.name,
|
|
102
86
|
)
|
|
103
87
|
|
|
104
88
|
@property
|
|
105
|
-
def
|
|
106
|
-
return self.attrs.get('
|
|
89
|
+
def clustering_group(self) -> str | None:
|
|
90
|
+
return self.attrs.get('clustering_group')
|
|
107
91
|
|
|
108
92
|
@property
|
|
109
|
-
def
|
|
110
|
-
return self.attrs.get('
|
|
93
|
+
def clustering_weight(self) -> float | None:
|
|
94
|
+
return self.attrs.get('clustering_weight')
|
|
111
95
|
|
|
112
96
|
@classmethod
|
|
113
97
|
def from_dataarray(
|
|
114
|
-
cls,
|
|
98
|
+
cls,
|
|
99
|
+
da: xr.DataArray,
|
|
100
|
+
clustering_group: str | None = None,
|
|
101
|
+
clustering_weight: float | None = None,
|
|
115
102
|
):
|
|
116
103
|
"""Create TimeSeriesData from DataArray, extracting metadata from attrs."""
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
)
|
|
121
|
-
final_aggregation_weight = (
|
|
122
|
-
aggregation_weight if aggregation_weight is not None else da.attrs.get('aggregation_weight')
|
|
104
|
+
final_clustering_group = clustering_group if clustering_group is not None else da.attrs.get('clustering_group')
|
|
105
|
+
final_clustering_weight = (
|
|
106
|
+
clustering_weight if clustering_weight is not None else da.attrs.get('clustering_weight')
|
|
123
107
|
)
|
|
124
108
|
|
|
125
|
-
return cls(da,
|
|
109
|
+
return cls(da, clustering_group=final_clustering_group, clustering_weight=final_clustering_weight)
|
|
126
110
|
|
|
127
111
|
@classmethod
|
|
128
112
|
def is_timeseries_data(cls, obj) -> bool:
|
|
@@ -130,34 +114,15 @@ class TimeSeriesData(xr.DataArray):
|
|
|
130
114
|
return isinstance(obj, xr.DataArray) and obj.attrs.get('__timeseries_data__', False)
|
|
131
115
|
|
|
132
116
|
def __repr__(self):
|
|
133
|
-
|
|
134
|
-
if self.
|
|
135
|
-
|
|
136
|
-
if self.
|
|
137
|
-
|
|
117
|
+
clustering_info = []
|
|
118
|
+
if self.clustering_group:
|
|
119
|
+
clustering_info.append(f"clustering_group='{self.clustering_group}'")
|
|
120
|
+
if self.clustering_weight is not None:
|
|
121
|
+
clustering_info.append(f'clustering_weight={self.clustering_weight}')
|
|
138
122
|
|
|
139
|
-
info_str = f'TimeSeriesData({", ".join(
|
|
123
|
+
info_str = f'TimeSeriesData({", ".join(clustering_info)})' if clustering_info else 'TimeSeriesData'
|
|
140
124
|
return f'{info_str}\n{super().__repr__()}'
|
|
141
125
|
|
|
142
|
-
@property
|
|
143
|
-
def agg_group(self):
|
|
144
|
-
warnings.warn('agg_group is deprecated, use aggregation_group instead', DeprecationWarning, stacklevel=2)
|
|
145
|
-
return self.aggregation_group
|
|
146
|
-
|
|
147
|
-
@property
|
|
148
|
-
def agg_weight(self):
|
|
149
|
-
warnings.warn('agg_weight is deprecated, use aggregation_weight instead', DeprecationWarning, stacklevel=2)
|
|
150
|
-
return self.aggregation_weight
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
TemporalDataUser = (
|
|
154
|
-
int | float | np.integer | np.floating | np.ndarray | pd.Series | pd.DataFrame | xr.DataArray | TimeSeriesData
|
|
155
|
-
)
|
|
156
|
-
"""User data which might have a time dimension. Internally converted to an xr.DataArray with time dimension."""
|
|
157
|
-
|
|
158
|
-
TemporalData = xr.DataArray | TimeSeriesData
|
|
159
|
-
"""Internally used datatypes for temporal data (data with a time dimension)."""
|
|
160
|
-
|
|
161
126
|
|
|
162
127
|
class DataConverter:
|
|
163
128
|
"""
|
|
@@ -395,26 +360,66 @@ class DataConverter:
|
|
|
395
360
|
)
|
|
396
361
|
|
|
397
362
|
# Create target template for broadcasting
|
|
398
|
-
|
|
399
|
-
target_template = xr.DataArray(np.empty(target_shape), coords=target_coords, dims=target_dims)
|
|
363
|
+
target_template = xr.DataArray(coords=target_coords, dims=target_dims)
|
|
400
364
|
|
|
401
365
|
# Perform broadcasting and ensure proper dimension ordering
|
|
402
366
|
broadcasted = source_data.broadcast_like(target_template)
|
|
403
367
|
return broadcasted.transpose(*target_dims)
|
|
404
368
|
|
|
369
|
+
@staticmethod
|
|
370
|
+
def _validate_dataarray_dims(
|
|
371
|
+
data: xr.DataArray, target_coords: dict[str, pd.Index], target_dims: tuple[str, ...]
|
|
372
|
+
) -> xr.DataArray:
|
|
373
|
+
"""
|
|
374
|
+
Validate that DataArray dims are a subset of target dims (without broadcasting).
|
|
375
|
+
|
|
376
|
+
This method validates compatibility without expanding to full dimensions,
|
|
377
|
+
allowing data to remain in compact form. Broadcasting happens later at
|
|
378
|
+
the linopy interface (FlowSystemModel.add_variables).
|
|
379
|
+
|
|
380
|
+
Also reduces constant dimensions and transposes data to canonical dimension
|
|
381
|
+
order (matching target_dims order).
|
|
382
|
+
|
|
383
|
+
Args:
|
|
384
|
+
data: DataArray to validate
|
|
385
|
+
target_coords: Target coordinates {dim_name: coordinate_index}
|
|
386
|
+
target_dims: Target dimension names in canonical order
|
|
387
|
+
|
|
388
|
+
Returns:
|
|
389
|
+
DataArray with validated dims, reduced constants, transposed to canonical order
|
|
390
|
+
|
|
391
|
+
Raises:
|
|
392
|
+
ConversionError: If data has dimensions not in target_dims,
|
|
393
|
+
or coordinate values don't match
|
|
394
|
+
"""
|
|
395
|
+
# Validate: all data dimensions must exist in target
|
|
396
|
+
extra_dims = set(data.dims) - set(target_dims)
|
|
397
|
+
if extra_dims:
|
|
398
|
+
raise ConversionError(f'Data has dimensions {extra_dims} not in target dimensions {target_dims}')
|
|
399
|
+
|
|
400
|
+
# Validate: coordinate compatibility for overlapping dimensions
|
|
401
|
+
for dim in data.dims:
|
|
402
|
+
if dim in data.coords and dim in target_coords:
|
|
403
|
+
data_coords = data.coords[dim]
|
|
404
|
+
target_coords_for_dim = target_coords[dim]
|
|
405
|
+
|
|
406
|
+
if not np.array_equal(data_coords.values, target_coords_for_dim.values):
|
|
407
|
+
raise ConversionError(
|
|
408
|
+
f'Coordinate mismatch for dimension "{dim}". Data and target coordinates have different values.'
|
|
409
|
+
)
|
|
410
|
+
|
|
411
|
+
# Transpose to canonical dimension order (subset of target_dims that data has)
|
|
412
|
+
if data.dims:
|
|
413
|
+
canonical_order = tuple(d for d in target_dims if d in data.dims)
|
|
414
|
+
if data.dims != canonical_order:
|
|
415
|
+
data = data.transpose(*canonical_order)
|
|
416
|
+
|
|
417
|
+
return data
|
|
418
|
+
|
|
405
419
|
@classmethod
|
|
406
420
|
def to_dataarray(
|
|
407
421
|
cls,
|
|
408
|
-
data:
|
|
409
|
-
| float
|
|
410
|
-
| bool
|
|
411
|
-
| np.integer
|
|
412
|
-
| np.floating
|
|
413
|
-
| np.bool_
|
|
414
|
-
| np.ndarray
|
|
415
|
-
| pd.Series
|
|
416
|
-
| pd.DataFrame
|
|
417
|
-
| xr.DataArray,
|
|
422
|
+
data: NumericOrBool,
|
|
418
423
|
coords: dict[str, pd.Index] | None = None,
|
|
419
424
|
) -> xr.DataArray:
|
|
420
425
|
"""
|
|
@@ -525,8 +530,9 @@ class DataConverter:
|
|
|
525
530
|
f'Unsupported data type: {type(data).__name__}. Supported types: {", ".join(supported_types)}'
|
|
526
531
|
)
|
|
527
532
|
|
|
528
|
-
#
|
|
529
|
-
|
|
533
|
+
# Validate dims are compatible (no broadcasting - data stays compact)
|
|
534
|
+
# Broadcasting happens at FlowSystemModel.add_variables() via _ensure_coords
|
|
535
|
+
return cls._validate_dataarray_dims(intermediate, validated_coords, target_dims)
|
|
530
536
|
|
|
531
537
|
@staticmethod
|
|
532
538
|
def _validate_and_prepare_target_coordinates(
|
|
@@ -567,7 +573,9 @@ class DataConverter:
|
|
|
567
573
|
coord_index = coord_index.rename(dim_name)
|
|
568
574
|
|
|
569
575
|
# Special validation for time dimensions (common pattern)
|
|
570
|
-
|
|
576
|
+
# Allow integer indices when 'cluster' dimension is present (clustered mode)
|
|
577
|
+
has_cluster_dim = 'cluster' in coords
|
|
578
|
+
if dim_name == 'time' and not isinstance(coord_index, pd.DatetimeIndex) and not has_cluster_dim:
|
|
571
579
|
raise ConversionError(
|
|
572
580
|
f'Dimension named "time" should use DatetimeIndex for proper '
|
|
573
581
|
f'time-series functionality, got {type(coord_index).__name__}'
|
|
@@ -606,28 +614,39 @@ def get_dataarray_stats(arr: xr.DataArray) -> dict:
|
|
|
606
614
|
return stats
|
|
607
615
|
|
|
608
616
|
|
|
609
|
-
def drop_constant_arrays(
|
|
617
|
+
def drop_constant_arrays(
|
|
618
|
+
ds: xr.Dataset, dim: str = 'time', drop_arrays_without_dim: bool = True, atol: float = 1e-10
|
|
619
|
+
) -> xr.Dataset:
|
|
610
620
|
"""Drop variables with constant values along a dimension.
|
|
611
621
|
|
|
612
622
|
Args:
|
|
613
623
|
ds: Input dataset to filter.
|
|
614
624
|
dim: Dimension along which to check for constant values.
|
|
615
625
|
drop_arrays_without_dim: If True, also drop variables that don't have the specified dimension.
|
|
626
|
+
atol: Absolute tolerance for considering values as constant (based on max - min).
|
|
616
627
|
|
|
617
628
|
Returns:
|
|
618
629
|
Dataset with constant variables removed.
|
|
619
630
|
"""
|
|
620
631
|
drop_vars = []
|
|
632
|
+
# Use ds.variables for faster access (avoids _construct_dataarray overhead)
|
|
633
|
+
variables = ds.variables
|
|
621
634
|
|
|
622
|
-
for name
|
|
635
|
+
for name in ds.data_vars:
|
|
636
|
+
var = variables[name]
|
|
623
637
|
# Skip variables without the dimension
|
|
624
|
-
if dim not in
|
|
638
|
+
if dim not in var.dims:
|
|
625
639
|
if drop_arrays_without_dim:
|
|
626
640
|
drop_vars.append(name)
|
|
627
641
|
continue
|
|
628
642
|
|
|
629
|
-
# Check if variable is constant along the dimension
|
|
630
|
-
|
|
643
|
+
# Check if variable is constant along the dimension using numpy (ptp < atol)
|
|
644
|
+
axis = var.dims.index(dim)
|
|
645
|
+
data = var.values
|
|
646
|
+
# Use numpy operations directly for speed
|
|
647
|
+
with np.errstate(invalid='ignore'): # Ignore NaN warnings
|
|
648
|
+
ptp = np.nanmax(data, axis=axis) - np.nanmin(data, axis=axis)
|
|
649
|
+
if np.all(ptp < atol):
|
|
631
650
|
drop_vars.append(name)
|
|
632
651
|
|
|
633
652
|
if drop_vars:
|
|
@@ -637,9 +656,3 @@ def drop_constant_arrays(ds: xr.Dataset, dim: str = 'time', drop_arrays_without_
|
|
|
637
656
|
)
|
|
638
657
|
|
|
639
658
|
return ds.drop_vars(drop_vars)
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
# Backward compatibility aliases
|
|
643
|
-
# TODO: Needed?
|
|
644
|
-
NonTemporalDataUser = PeriodicDataUser
|
|
645
|
-
NonTemporalData = PeriodicData
|