flixopt 2.2.0rc2__py3-none-any.whl → 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flixopt might be problematic. Click here for more details.

Files changed (58) hide show
  1. flixopt/__init__.py +33 -4
  2. flixopt/aggregation.py +60 -80
  3. flixopt/calculation.py +395 -178
  4. flixopt/commons.py +1 -10
  5. flixopt/components.py +939 -448
  6. flixopt/config.py +553 -191
  7. flixopt/core.py +513 -846
  8. flixopt/effects.py +644 -178
  9. flixopt/elements.py +610 -355
  10. flixopt/features.py +394 -966
  11. flixopt/flow_system.py +736 -219
  12. flixopt/interface.py +1104 -302
  13. flixopt/io.py +103 -79
  14. flixopt/linear_converters.py +387 -95
  15. flixopt/modeling.py +759 -0
  16. flixopt/network_app.py +73 -39
  17. flixopt/plotting.py +294 -138
  18. flixopt/results.py +1253 -299
  19. flixopt/solvers.py +25 -21
  20. flixopt/structure.py +938 -396
  21. flixopt/utils.py +38 -12
  22. flixopt-3.0.0.dist-info/METADATA +209 -0
  23. flixopt-3.0.0.dist-info/RECORD +26 -0
  24. flixopt-3.0.0.dist-info/top_level.txt +1 -0
  25. docs/examples/00-Minimal Example.md +0 -5
  26. docs/examples/01-Basic Example.md +0 -5
  27. docs/examples/02-Complex Example.md +0 -10
  28. docs/examples/03-Calculation Modes.md +0 -5
  29. docs/examples/index.md +0 -5
  30. docs/faq/contribute.md +0 -61
  31. docs/faq/index.md +0 -3
  32. docs/images/architecture_flixOpt-pre2.0.0.png +0 -0
  33. docs/images/architecture_flixOpt.png +0 -0
  34. docs/images/flixopt-icon.svg +0 -1
  35. docs/javascripts/mathjax.js +0 -18
  36. docs/user-guide/Mathematical Notation/Bus.md +0 -33
  37. docs/user-guide/Mathematical Notation/Effects, Penalty & Objective.md +0 -132
  38. docs/user-guide/Mathematical Notation/Flow.md +0 -26
  39. docs/user-guide/Mathematical Notation/LinearConverter.md +0 -21
  40. docs/user-guide/Mathematical Notation/Piecewise.md +0 -49
  41. docs/user-guide/Mathematical Notation/Storage.md +0 -44
  42. docs/user-guide/Mathematical Notation/index.md +0 -22
  43. docs/user-guide/Mathematical Notation/others.md +0 -3
  44. docs/user-guide/index.md +0 -124
  45. flixopt/config.yaml +0 -10
  46. flixopt-2.2.0rc2.dist-info/METADATA +0 -167
  47. flixopt-2.2.0rc2.dist-info/RECORD +0 -54
  48. flixopt-2.2.0rc2.dist-info/top_level.txt +0 -5
  49. pics/architecture_flixOpt-pre2.0.0.png +0 -0
  50. pics/architecture_flixOpt.png +0 -0
  51. pics/flixOpt_plotting.jpg +0 -0
  52. pics/flixopt-icon.svg +0 -1
  53. pics/pics.pptx +0 -0
  54. scripts/extract_release_notes.py +0 -45
  55. scripts/gen_ref_pages.py +0 -54
  56. tests/ressources/Zeitreihen2020.csv +0 -35137
  57. {flixopt-2.2.0rc2.dist-info → flixopt-3.0.0.dist-info}/WHEEL +0 -0
  58. {flixopt-2.2.0rc2.dist-info → flixopt-3.0.0.dist-info}/licenses/LICENSE +0 -0
flixopt/structure.py CHANGED
@@ -3,13 +3,18 @@ This module contains the core structure of the flixopt framework.
3
3
  These classes are not directly used by the end user, but are used by other modules.
4
4
  """
5
5
 
6
+ from __future__ import annotations
7
+
6
8
  import inspect
7
9
  import json
8
10
  import logging
9
- import pathlib
10
- from datetime import datetime
11
+ from dataclasses import dataclass
11
12
  from io import StringIO
12
- from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Tuple, Union
13
+ from typing import (
14
+ TYPE_CHECKING,
15
+ Any,
16
+ Literal,
17
+ )
13
18
 
14
19
  import linopy
15
20
  import numpy as np
@@ -18,10 +23,13 @@ import xarray as xr
18
23
  from rich.console import Console
19
24
  from rich.pretty import Pretty
20
25
 
21
- from .config import CONFIG
22
- from .core import NumericData, Scalar, TimeSeries, TimeSeriesCollection, TimeSeriesData
26
+ from . import io as fx_io
27
+ from .core import TimeSeriesData, get_dataarray_stats
23
28
 
24
29
  if TYPE_CHECKING: # for type checking and preventing circular imports
30
+ import pathlib
31
+ from collections.abc import Collection, ItemsView, Iterator
32
+
25
33
  from .effects import EffectCollectionModel
26
34
  from .flow_system import FlowSystem
27
35
 
@@ -43,221 +51,812 @@ def register_class_for_io(cls):
43
51
  return cls
44
52
 
45
53
 
46
- class SystemModel(linopy.Model):
54
+ class SubmodelsMixin:
55
+ """Mixin that provides submodel functionality for both FlowSystemModel and Submodel."""
56
+
57
+ submodels: Submodels
58
+
59
+ @property
60
+ def all_submodels(self) -> list[Submodel]:
61
+ """Get all submodels including nested ones recursively."""
62
+ direct_submodels = list(self.submodels.values())
63
+
64
+ # Recursively collect nested sub-models
65
+ nested_submodels = []
66
+ for submodel in direct_submodels:
67
+ nested_submodels.extend(submodel.all_submodels)
68
+
69
+ return direct_submodels + nested_submodels
70
+
71
+ def add_submodels(self, submodel: Submodel, short_name: str = None) -> Submodel:
72
+ """Register a sub-model with the model"""
73
+ if short_name is None:
74
+ short_name = submodel.__class__.__name__
75
+ if short_name in self.submodels:
76
+ raise ValueError(f'Short name "{short_name}" already assigned to model')
77
+ self.submodels.add(submodel, name=short_name)
78
+
79
+ return submodel
80
+
81
+
82
+ class FlowSystemModel(linopy.Model, SubmodelsMixin):
47
83
  """
48
- The SystemModel is the linopy Model that is used to create the mathematical model of the flow_system.
84
+ The FlowSystemModel is the linopy Model that is used to create the mathematical model of the flow_system.
49
85
  It is used to create and store the variables and constraints for the flow_system.
86
+
87
+ Args:
88
+ flow_system: The flow_system that is used to create the model.
89
+ normalize_weights: Whether to automatically normalize the weights to sum up to 1 when solving.
50
90
  """
51
91
 
52
- def __init__(self, flow_system: 'FlowSystem'):
53
- """
54
- Args:
55
- flow_system: The flow_system that is used to create the model.
56
- """
92
+ def __init__(self, flow_system: FlowSystem, normalize_weights: bool):
57
93
  super().__init__(force_dim_names=True)
58
94
  self.flow_system = flow_system
59
- self.time_series_collection = flow_system.time_series_collection
60
- self.effects: Optional[EffectCollectionModel] = None
95
+ self.normalize_weights = normalize_weights
96
+ self.effects: EffectCollectionModel | None = None
97
+ self.submodels: Submodels = Submodels({})
61
98
 
62
99
  def do_modeling(self):
63
100
  self.effects = self.flow_system.effects.create_model(self)
64
- self.effects.do_modeling()
65
- component_models = [component.create_model(self) for component in self.flow_system.components.values()]
66
- bus_models = [bus.create_model(self) for bus in self.flow_system.buses.values()]
67
- for component_model in component_models:
68
- component_model.do_modeling()
69
- for bus_model in bus_models: # Buses after Components, because FlowModels are created in ComponentModels
70
- bus_model.do_modeling()
101
+ for component in self.flow_system.components.values():
102
+ component.create_model(self)
103
+ for bus in self.flow_system.buses.values():
104
+ bus.create_model(self)
105
+
106
+ # Add scenario equality constraints after all elements are modeled
107
+ self._add_scenario_equality_constraints()
108
+
109
+ def _add_scenario_equality_for_parameter_type(
110
+ self,
111
+ parameter_type: Literal['flow_rate', 'size'],
112
+ config: bool | list[str],
113
+ ):
114
+ """Add scenario equality constraints for a specific parameter type.
115
+
116
+ Args:
117
+ parameter_type: The type of parameter ('flow_rate' or 'size')
118
+ config: Configuration value (True = equalize all, False = equalize none, list = equalize these)
119
+ """
120
+ if config is False:
121
+ return # All vary per scenario, no constraints needed
122
+
123
+ suffix = f'|{parameter_type}'
124
+ if config is True:
125
+ # All should be scenario-independent
126
+ vars_to_constrain = [var for var in self.variables if var.endswith(suffix)]
127
+ else:
128
+ # Only those in the list should be scenario-independent
129
+ all_vars = [var for var in self.variables if var.endswith(suffix)]
130
+ to_equalize = {f'{element}{suffix}' for element in config}
131
+ vars_to_constrain = [var for var in all_vars if var in to_equalize]
132
+
133
+ # Validate that all specified variables exist
134
+ missing_vars = [v for v in vars_to_constrain if v not in self.variables]
135
+ if missing_vars:
136
+ param_name = 'scenario_independent_sizes' if parameter_type == 'size' else 'scenario_independent_flow_rates'
137
+ raise ValueError(f'{param_name} contains invalid labels: {missing_vars}')
138
+
139
+ logger.debug(f'Adding scenario equality constraints for {len(vars_to_constrain)} {parameter_type} variables')
140
+ for var in vars_to_constrain:
141
+ self.add_constraints(
142
+ self.variables[var].isel(scenario=0) == self.variables[var].isel(scenario=slice(1, None)),
143
+ name=f'{var}|scenario_independent',
144
+ )
145
+
146
+ def _add_scenario_equality_constraints(self):
147
+ """Add equality constraints to equalize variables across scenarios based on FlowSystem configuration."""
148
+ # Only proceed if we have scenarios
149
+ if self.flow_system.scenarios is None or len(self.flow_system.scenarios) <= 1:
150
+ return
151
+
152
+ self._add_scenario_equality_for_parameter_type('flow_rate', self.flow_system.scenario_independent_flow_rates)
153
+ self._add_scenario_equality_for_parameter_type('size', self.flow_system.scenario_independent_sizes)
71
154
 
72
155
  @property
73
156
  def solution(self):
74
157
  solution = super().solution
158
+ solution['objective'] = self.objective.value
75
159
  solution.attrs = {
76
160
  'Components': {
77
- comp.label_full: comp.model.results_structure()
161
+ comp.label_full: comp.submodel.results_structure()
78
162
  for comp in sorted(
79
163
  self.flow_system.components.values(), key=lambda component: component.label_full.upper()
80
164
  )
81
165
  },
82
166
  'Buses': {
83
- bus.label_full: bus.model.results_structure()
167
+ bus.label_full: bus.submodel.results_structure()
84
168
  for bus in sorted(self.flow_system.buses.values(), key=lambda bus: bus.label_full.upper())
85
169
  },
86
170
  'Effects': {
87
- effect.label_full: effect.model.results_structure()
171
+ effect.label_full: effect.submodel.results_structure()
88
172
  for effect in sorted(self.flow_system.effects, key=lambda effect: effect.label_full.upper())
89
173
  },
174
+ 'Flows': {
175
+ flow.label_full: flow.submodel.results_structure()
176
+ for flow in sorted(self.flow_system.flows.values(), key=lambda flow: flow.label_full.upper())
177
+ },
90
178
  }
91
- return solution.reindex(time=self.time_series_collection.timesteps_extra)
179
+ return solution.reindex(time=self.flow_system.timesteps_extra)
92
180
 
93
181
  @property
94
182
  def hours_per_step(self):
95
- return self.time_series_collection.hours_per_timestep
183
+ return self.flow_system.hours_per_timestep
96
184
 
97
185
  @property
98
186
  def hours_of_previous_timesteps(self):
99
- return self.time_series_collection.hours_of_previous_timesteps
187
+ return self.flow_system.hours_of_previous_timesteps
100
188
 
101
- @property
102
- def coords(self) -> Tuple[pd.DatetimeIndex]:
103
- return (self.time_series_collection.timesteps,)
189
+ def get_coords(
190
+ self,
191
+ dims: Collection[str] | None = None,
192
+ extra_timestep: bool = False,
193
+ ) -> xr.Coordinates | None:
194
+ """
195
+ Returns the coordinates of the model
196
+
197
+ Args:
198
+ dims: The dimensions to include in the coordinates. If None, includes all dimensions
199
+ extra_timestep: If True, uses extra timesteps instead of regular timesteps
200
+
201
+ Returns:
202
+ The coordinates of the model, or None if no coordinates are available
203
+
204
+ Raises:
205
+ ValueError: If extra_timestep=True but 'time' is not in dims
206
+ """
207
+ if extra_timestep and dims is not None and 'time' not in dims:
208
+ raise ValueError('extra_timestep=True requires "time" to be included in dims')
209
+
210
+ if dims is None:
211
+ coords = dict(self.flow_system.coords)
212
+ else:
213
+ coords = {k: v for k, v in self.flow_system.coords.items() if k in dims}
214
+
215
+ if extra_timestep and coords:
216
+ coords['time'] = self.flow_system.timesteps_extra
217
+
218
+ return xr.Coordinates(coords) if coords else None
104
219
 
105
220
  @property
106
- def coords_extra(self) -> Tuple[pd.DatetimeIndex]:
107
- return (self.time_series_collection.timesteps_extra,)
221
+ def weights(self) -> int | xr.DataArray:
222
+ """Returns the weights of the FlowSystem. Normalizes to 1 if normalize_weights is True"""
223
+ if self.flow_system.weights is not None:
224
+ weights = self.flow_system.weights
225
+ else:
226
+ weights = self.flow_system.fit_to_model_coords('weights', 1, dims=['period', 'scenario'])
227
+
228
+ if not self.normalize_weights:
229
+ return weights
230
+
231
+ return weights / weights.sum()
232
+
233
+ def __repr__(self) -> str:
234
+ """
235
+ Return a string representation of the FlowSystemModel, borrowed from linopy.Model.
236
+ """
237
+ # Extract content from existing representations
238
+ sections = {
239
+ f'Variables: [{len(self.variables)}]': self.variables.__repr__().split('\n', 2)[2],
240
+ f'Constraints: [{len(self.constraints)}]': self.constraints.__repr__().split('\n', 2)[2],
241
+ f'Submodels: [{len(self.submodels)}]': self.submodels.__repr__().split('\n', 2)[2],
242
+ 'Status': self.status,
243
+ }
244
+
245
+ # Format sections with headers and underlines
246
+ formatted_sections = []
247
+ for section_header, section_content in sections.items():
248
+ formatted_sections.append(f'{section_header}\n{"-" * len(section_header)}\n{section_content}')
249
+
250
+ title = f'FlowSystemModel ({self.type})'
251
+ all_sections = '\n'.join(formatted_sections)
252
+
253
+ return f'{title}\n{"=" * len(title)}\n\n{all_sections}'
108
254
 
109
255
 
110
256
  class Interface:
111
257
  """
112
- This class is used to collect arguments about a Model. Its the base class for all Elements and Models in flixopt.
258
+ Base class for all Elements and Models in flixopt that provides serialization capabilities.
259
+
260
+ This class enables automatic serialization/deserialization of objects containing xarray DataArrays
261
+ and nested Interface objects to/from xarray Datasets and NetCDF files. It uses introspection
262
+ of constructor parameters to automatically handle most serialization scenarios.
263
+
264
+ Key Features:
265
+ - Automatic extraction and restoration of xarray DataArrays
266
+ - Support for nested Interface objects
267
+ - NetCDF and JSON export/import
268
+ - Recursive handling of complex nested structures
269
+
270
+ Subclasses must implement:
271
+ transform_data(flow_system): Transform data to match FlowSystem dimensions
113
272
  """
114
273
 
115
- def transform_data(self, flow_system: 'FlowSystem'):
116
- """Transforms the data of the interface to match the FlowSystem's dimensions"""
117
- raise NotImplementedError('Every Interface needs a transform_data() method')
274
+ def transform_data(self, flow_system: FlowSystem, name_prefix: str = '') -> None:
275
+ """Transform the data of the interface to match the FlowSystem's dimensions.
276
+
277
+ Args:
278
+ flow_system: The FlowSystem containing timing and dimensional information
279
+ name_prefix: The prefix to use for the names of the variables. Defaults to '', which results in no prefix.
118
280
 
119
- def infos(self, use_numpy: bool = True, use_element_label: bool = False) -> Dict:
281
+ Raises:
282
+ NotImplementedError: Must be implemented by subclasses
120
283
  """
121
- Generate a dictionary representation of the object's constructor arguments.
122
- Excludes default values and empty dictionaries and lists.
123
- Converts data to be compatible with JSON.
284
+ raise NotImplementedError('Every Interface subclass needs a transform_data() method')
124
285
 
125
- Args:
126
- use_numpy: Whether to convert NumPy arrays to lists. Defaults to True.
127
- If True, numeric numpy arrays (`np.ndarray`) are preserved as-is.
128
- If False, they are converted to lists.
129
- use_element_label: Whether to use the element label instead of the infos of the element. Defaults to False.
130
- Note that Elements used as keys in dictionaries are always converted to their labels.
286
+ def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]:
287
+ """
288
+ Convert all DataArrays to references and extract them.
289
+ This is the core method that both to_dict() and to_dataset() build upon.
131
290
 
132
291
  Returns:
133
- A dictionary representation of the object's constructor arguments.
292
+ Tuple of (reference_structure, extracted_arrays_dict)
134
293
 
294
+ Raises:
295
+ ValueError: If DataArrays don't have unique names or are duplicated
135
296
  """
136
- # Get the constructor arguments and their default values
137
- init_params = sorted(
138
- inspect.signature(self.__init__).parameters.items(),
139
- key=lambda x: (x[0].lower() != 'label', x[0].lower()), # Prioritize 'label'
140
- )
141
- # Build a dict of attribute=value pairs, excluding defaults
142
- details = {'class': ':'.join([cls.__name__ for cls in self.__class__.__mro__])}
143
- for name, param in init_params:
144
- if name == 'self':
297
+ # Get constructor parameters using caching for performance
298
+ if not hasattr(self, '_cached_init_params'):
299
+ self._cached_init_params = list(inspect.signature(self.__init__).parameters.keys())
300
+
301
+ # Process all constructor parameters
302
+ reference_structure = {'__class__': self.__class__.__name__}
303
+ all_extracted_arrays = {}
304
+
305
+ for name in self._cached_init_params:
306
+ if name == 'self': # Skip self and timesteps. Timesteps are directly stored in Datasets
307
+ continue
308
+
309
+ value = getattr(self, name, None)
310
+
311
+ if value is None:
145
312
  continue
146
- value, default = getattr(self, name, None), param.default
147
- # Ignore default values and empty dicts and list
148
- if np.all(value == default) or (isinstance(value, (dict, list)) and not value):
313
+ if isinstance(value, pd.Index):
314
+ logger.debug(f'Skipping {name=} because it is an Index')
149
315
  continue
150
- details[name] = copy_and_convert_datatypes(value, use_numpy, use_element_label)
151
- return details
152
316
 
153
- def to_json(self, path: Union[str, pathlib.Path]):
317
+ # Extract arrays and get reference structure
318
+ processed_value, extracted_arrays = self._extract_dataarrays_recursive(value, name)
319
+
320
+ # Check for array name conflicts
321
+ conflicts = set(all_extracted_arrays.keys()) & set(extracted_arrays.keys())
322
+ if conflicts:
323
+ raise ValueError(
324
+ f'DataArray name conflicts detected: {conflicts}. '
325
+ f'Each DataArray must have a unique name for serialization.'
326
+ )
327
+
328
+ # Add extracted arrays to the collection
329
+ all_extracted_arrays.update(extracted_arrays)
330
+
331
+ # Only store in structure if it's not None/empty after processing
332
+ if processed_value is not None and not self._is_empty_container(processed_value):
333
+ reference_structure[name] = processed_value
334
+
335
+ return reference_structure, all_extracted_arrays
336
+
337
+ @staticmethod
338
+ def _is_empty_container(obj) -> bool:
339
+ """Check if object is an empty container (dict, list, tuple, set)."""
340
+ return isinstance(obj, (dict, list, tuple, set)) and len(obj) == 0
341
+
342
+ def _extract_dataarrays_recursive(self, obj, context_name: str = '') -> tuple[Any, dict[str, xr.DataArray]]:
154
343
  """
155
- Saves the element to a json file.
156
- This not meant to be reloaded and recreate the object, but rather used to document or compare the object.
344
+ Recursively extract DataArrays from nested structures.
157
345
 
158
346
  Args:
159
- path: The path to the json file.
160
- """
161
- data = get_compact_representation(self.infos(use_numpy=True, use_element_label=True))
162
- with open(path, 'w', encoding='utf-8') as f:
163
- json.dump(data, f, indent=4, ensure_ascii=False)
347
+ obj: Object to process
348
+ context_name: Name context for better error messages
164
349
 
165
- def to_dict(self) -> Dict:
166
- """Convert the object to a dictionary representation."""
167
- data = {'__class__': self.__class__.__name__}
350
+ Returns:
351
+ Tuple of (processed_object_with_references, extracted_arrays_dict)
168
352
 
169
- # Get the constructor parameters
170
- init_params = inspect.signature(self.__init__).parameters
353
+ Raises:
354
+ ValueError: If DataArrays don't have unique names
355
+ """
356
+ extracted_arrays = {}
357
+
358
+ # Handle DataArrays directly - use their unique name
359
+ if isinstance(obj, xr.DataArray):
360
+ if not obj.name:
361
+ raise ValueError(
362
+ f'DataArrays must have a unique name for serialization. '
363
+ f'Unnamed DataArray found in {context_name}. Please set array.name = "unique_name"'
364
+ )
171
365
 
172
- for name in init_params:
173
- if name == 'self':
174
- continue
366
+ array_name = str(obj.name) # Ensure string type
367
+ if array_name in extracted_arrays:
368
+ raise ValueError(
369
+ f'DataArray name "{array_name}" is duplicated in {context_name}. '
370
+ f'Each DataArray must have a unique name for serialization.'
371
+ )
175
372
 
176
- value = getattr(self, name, None)
177
- data[name] = self._serialize_value(value)
178
-
179
- return data
180
-
181
- def _serialize_value(self, value: Any):
182
- """Helper method to serialize a value based on its type."""
183
- if value is None:
184
- return None
185
- elif isinstance(value, Interface):
186
- return value.to_dict()
187
- elif isinstance(value, (list, tuple)):
188
- return self._serialize_list(value)
189
- elif isinstance(value, dict):
190
- return self._serialize_dict(value)
373
+ extracted_arrays[array_name] = obj
374
+ return f':::{array_name}', extracted_arrays
375
+
376
+ # Handle Interface objects - extract their DataArrays too
377
+ elif isinstance(obj, Interface):
378
+ try:
379
+ interface_structure, interface_arrays = obj._create_reference_structure()
380
+ extracted_arrays.update(interface_arrays)
381
+ return interface_structure, extracted_arrays
382
+ except Exception as e:
383
+ raise ValueError(f'Failed to process nested Interface object in {context_name}: {e}') from e
384
+
385
+ # Handle sequences (lists, tuples)
386
+ elif isinstance(obj, (list, tuple)):
387
+ processed_items = []
388
+ for i, item in enumerate(obj):
389
+ item_context = f'{context_name}[{i}]' if context_name else f'item[{i}]'
390
+ processed_item, nested_arrays = self._extract_dataarrays_recursive(item, item_context)
391
+ extracted_arrays.update(nested_arrays)
392
+ processed_items.append(processed_item)
393
+ return processed_items, extracted_arrays
394
+
395
+ # Handle dictionaries
396
+ elif isinstance(obj, dict):
397
+ processed_dict = {}
398
+ for key, value in obj.items():
399
+ key_context = f'{context_name}.{key}' if context_name else str(key)
400
+ processed_value, nested_arrays = self._extract_dataarrays_recursive(value, key_context)
401
+ extracted_arrays.update(nested_arrays)
402
+ processed_dict[key] = processed_value
403
+ return processed_dict, extracted_arrays
404
+
405
+ # Handle sets (convert to list for JSON compatibility)
406
+ elif isinstance(obj, set):
407
+ processed_items = []
408
+ for i, item in enumerate(obj):
409
+ item_context = f'{context_name}.set_item[{i}]' if context_name else f'set_item[{i}]'
410
+ processed_item, nested_arrays = self._extract_dataarrays_recursive(item, item_context)
411
+ extracted_arrays.update(nested_arrays)
412
+ processed_items.append(processed_item)
413
+ return processed_items, extracted_arrays
414
+
415
+ # For all other types, serialize to basic types
191
416
  else:
192
- return value
417
+ return self._serialize_to_basic_types(obj), extracted_arrays
418
+
419
+ def _handle_deprecated_kwarg(
420
+ self,
421
+ kwargs: dict,
422
+ old_name: str,
423
+ new_name: str,
424
+ current_value: Any = None,
425
+ transform: callable = None,
426
+ check_conflict: bool = True,
427
+ ) -> Any:
428
+ """
429
+ Handle a deprecated keyword argument by issuing a warning and returning the appropriate value.
430
+
431
+ This centralizes the deprecation pattern used across multiple classes (Source, Sink, InvestParameters, etc.).
432
+
433
+ Args:
434
+ kwargs: Dictionary of keyword arguments to check and modify
435
+ old_name: Name of the deprecated parameter
436
+ new_name: Name of the replacement parameter
437
+ current_value: Current value of the new parameter (if already set)
438
+ transform: Optional callable to transform the old value before returning (e.g., lambda x: [x] to wrap in list)
439
+ check_conflict: Whether to check if both old and new parameters are specified (default: True).
440
+ Note: For parameters with non-None default values (e.g., bool parameters with default=False),
441
+ set check_conflict=False since we cannot distinguish between an explicit value and the default.
442
+
443
+ Returns:
444
+ The value to use (either from old parameter or current_value)
445
+
446
+ Raises:
447
+ ValueError: If both old and new parameters are specified and check_conflict is True
448
+
449
+ Example:
450
+ # For parameters where None is the default (conflict checking works):
451
+ value = self._handle_deprecated_kwarg(kwargs, 'old_param', 'new_param', current_value)
452
+
453
+ # For parameters with non-None defaults (disable conflict checking):
454
+ mandatory = self._handle_deprecated_kwarg(
455
+ kwargs, 'optional', 'mandatory', mandatory,
456
+ transform=lambda x: not x,
457
+ check_conflict=False # Cannot detect if mandatory was explicitly passed
458
+ )
459
+ """
460
+ import warnings
461
+
462
+ old_value = kwargs.pop(old_name, None)
463
+ if old_value is not None:
464
+ warnings.warn(
465
+ f'The use of the "{old_name}" argument is deprecated. Use the "{new_name}" argument instead.',
466
+ DeprecationWarning,
467
+ stacklevel=3, # Stack: this method -> __init__ -> caller
468
+ )
469
+ # Check for conflicts: only raise error if both were explicitly provided
470
+ if check_conflict and current_value is not None:
471
+ raise ValueError(f'Either {old_name} or {new_name} can be specified, but not both.')
472
+
473
+ # Apply transformation if provided
474
+ if transform is not None:
475
+ return transform(old_value)
476
+ return old_value
477
+
478
+ return current_value
193
479
 
194
- def _serialize_list(self, items):
195
- """Serialize a list of items."""
196
- return [self._serialize_value(item) for item in items]
480
+ def _validate_kwargs(self, kwargs: dict, class_name: str = None) -> None:
481
+ """
482
+ Validate that no unexpected keyword arguments are present in kwargs.
483
+
484
+ This method uses inspect to get the actual function signature and filters out
485
+ any parameters that are not defined in the __init__ method, while also
486
+ handling the special case of 'kwargs' itself which can appear during deserialization.
197
487
 
198
- def _serialize_dict(self, d):
199
- """Serialize a dictionary of items."""
200
- return {k: self._serialize_value(v) for k, v in d.items()}
488
+ Args:
489
+ kwargs: Dictionary of keyword arguments to validate
490
+ class_name: Optional class name for error messages. If None, uses self.__class__.__name__
491
+
492
+ Raises:
493
+ TypeError: If unexpected keyword arguments are found
494
+ """
495
+ if not kwargs:
496
+ return
497
+
498
+ import inspect
499
+
500
+ sig = inspect.signature(self.__init__)
501
+ known_params = set(sig.parameters.keys()) - {'self', 'kwargs'}
502
+ # Also filter out 'kwargs' itself which can appear during deserialization
503
+ extra_kwargs = {k: v for k, v in kwargs.items() if k not in known_params and k != 'kwargs'}
504
+
505
+ if extra_kwargs:
506
+ class_name = class_name or self.__class__.__name__
507
+ unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys())
508
+ raise TypeError(f'{class_name}.__init__() got unexpected keyword argument(s): {unexpected_params}')
201
509
 
202
510
  @classmethod
203
- def _deserialize_dict(cls, data: Dict) -> Union[Dict, 'Interface']:
204
- if '__class__' in data:
205
- class_name = data.pop('__class__')
206
- try:
207
- class_type = CLASS_REGISTRY[class_name]
208
- if issubclass(class_type, Interface):
209
- # Use _deserialize_dict to process the arguments
210
- processed_data = {k: cls._deserialize_value(v) for k, v in data.items()}
211
- return class_type(**processed_data)
212
- else:
213
- raise ValueError(f'Class "{class_name}" is not an Interface.')
214
- except (AttributeError, KeyError) as e:
215
- raise ValueError(f'Class "{class_name}" could not get reconstructed.') from e
216
- else:
217
- return {k: cls._deserialize_value(v) for k, v in data.items()}
511
+ def _resolve_dataarray_reference(
512
+ cls, reference: str, arrays_dict: dict[str, xr.DataArray]
513
+ ) -> xr.DataArray | TimeSeriesData:
514
+ """
515
+ Resolve a single DataArray reference (:::name) to actual DataArray or TimeSeriesData.
516
+
517
+ Args:
518
+ reference: Reference string starting with ":::"
519
+ arrays_dict: Dictionary of available DataArrays
520
+
521
+ Returns:
522
+ Resolved DataArray or TimeSeriesData object
523
+
524
+ Raises:
525
+ ValueError: If referenced array is not found
526
+ """
527
+ array_name = reference[3:] # Remove ":::" prefix
528
+ if array_name not in arrays_dict:
529
+ raise ValueError(f"Referenced DataArray '{array_name}' not found in dataset")
530
+
531
+ array = arrays_dict[array_name]
532
+
533
+ # Handle null values with warning
534
+ if array.isnull().any():
535
+ logger.error(f"DataArray '{array_name}' contains null values. Dropping all-null along present dims.")
536
+ if 'time' in array.dims:
537
+ array = array.dropna(dim='time', how='all')
538
+
539
+ # Check if this should be restored as TimeSeriesData
540
+ if TimeSeriesData.is_timeseries_data(array):
541
+ return TimeSeriesData.from_dataarray(array)
542
+
543
+ return array
218
544
 
219
545
  @classmethod
220
- def _deserialize_list(cls, data: List) -> List:
221
- return [cls._deserialize_value(value) for value in data]
546
+ def _resolve_reference_structure(cls, structure, arrays_dict: dict[str, xr.DataArray]):
547
+ """
548
+ Convert reference structure back to actual objects using provided arrays.
549
+
550
+ Args:
551
+ structure: Structure containing references (:::name) or special type markers
552
+ arrays_dict: Dictionary of available DataArrays
553
+
554
+ Returns:
555
+ Structure with references resolved to actual DataArrays or objects
556
+
557
+ Raises:
558
+ ValueError: If referenced arrays are not found or class is not registered
559
+ """
560
+ # Handle DataArray references
561
+ if isinstance(structure, str) and structure.startswith(':::'):
562
+ return cls._resolve_dataarray_reference(structure, arrays_dict)
563
+
564
+ elif isinstance(structure, list):
565
+ resolved_list = []
566
+ for item in structure:
567
+ resolved_item = cls._resolve_reference_structure(item, arrays_dict)
568
+ if resolved_item is not None: # Filter out None values from missing references
569
+ resolved_list.append(resolved_item)
570
+ return resolved_list
571
+
572
+ elif isinstance(structure, dict):
573
+ if structure.get('__class__'):
574
+ class_name = structure['__class__']
575
+ if class_name not in CLASS_REGISTRY:
576
+ raise ValueError(
577
+ f"Class '{class_name}' not found in CLASS_REGISTRY. "
578
+ f'Available classes: {list(CLASS_REGISTRY.keys())}'
579
+ )
580
+
581
+ # This is a nested Interface object - restore it recursively
582
+ nested_class = CLASS_REGISTRY[class_name]
583
+ # Remove the __class__ key and process the rest
584
+ nested_data = {k: v for k, v in structure.items() if k != '__class__'}
585
+ # Resolve references in the nested data
586
+ resolved_nested_data = cls._resolve_reference_structure(nested_data, arrays_dict)
587
+
588
+ try:
589
+ return nested_class(**resolved_nested_data)
590
+ except Exception as e:
591
+ raise ValueError(f'Failed to create instance of {class_name}: {e}') from e
592
+ else:
593
+ # Regular dictionary - resolve references in values
594
+ resolved_dict = {}
595
+ for key, value in structure.items():
596
+ resolved_value = cls._resolve_reference_structure(value, arrays_dict)
597
+ if resolved_value is not None or value is None: # Keep None values if they were originally None
598
+ resolved_dict[key] = resolved_value
599
+ return resolved_dict
600
+
601
+ else:
602
+ return structure
603
+
604
+ def _serialize_to_basic_types(self, obj):
605
+ """
606
+ Convert object to basic Python types only (no DataArrays, no custom objects).
607
+
608
+ Args:
609
+ obj: Object to serialize
610
+
611
+ Returns:
612
+ Object converted to basic Python types (str, int, float, bool, list, dict)
613
+ """
614
+ if obj is None or isinstance(obj, (str, int, float, bool)):
615
+ return obj
616
+ elif isinstance(obj, np.integer):
617
+ return int(obj)
618
+ elif isinstance(obj, np.floating):
619
+ return float(obj)
620
+ elif isinstance(obj, np.bool_):
621
+ return bool(obj)
622
+ elif isinstance(obj, (np.ndarray, pd.Series, pd.DataFrame)):
623
+ return obj.tolist() if hasattr(obj, 'tolist') else list(obj)
624
+ elif isinstance(obj, dict):
625
+ return {k: self._serialize_to_basic_types(v) for k, v in obj.items()}
626
+ elif isinstance(obj, (list, tuple)):
627
+ return [self._serialize_to_basic_types(item) for item in obj]
628
+ elif isinstance(obj, set):
629
+ return [self._serialize_to_basic_types(item) for item in obj]
630
+ elif hasattr(obj, 'isoformat'): # datetime objects
631
+ return obj.isoformat()
632
+ elif hasattr(obj, '__dict__'): # Custom objects with attributes
633
+ logger.warning(f'Converting custom object {type(obj)} to dict representation: {obj}')
634
+ return {str(k): self._serialize_to_basic_types(v) for k, v in obj.__dict__.items()}
635
+ else:
636
+ # For any other object, try to convert to string as fallback
637
+ logger.error(f'Converting unknown type {type(obj)} to string: {obj}')
638
+ return str(obj)
639
+
640
+ def to_dataset(self) -> xr.Dataset:
641
+ """
642
+ Convert the object to an xarray Dataset representation.
643
+ All DataArrays become dataset variables, everything else goes to attrs.
644
+
645
+ Its recommended to only call this method on Interfaces with all numeric data stored as xr.DataArrays.
646
+ Interfaces inside a FlowSystem are automatically converted this form after connecting and transforming the FlowSystem.
647
+
648
+ Returns:
649
+ xr.Dataset: Dataset containing all DataArrays with basic objects only in attributes
650
+
651
+ Raises:
652
+ ValueError: If serialization fails due to naming conflicts or invalid data
653
+ """
654
+ try:
655
+ reference_structure, extracted_arrays = self._create_reference_structure()
656
+ # Create the dataset with extracted arrays as variables and structure as attrs
657
+ return xr.Dataset(extracted_arrays, attrs=reference_structure)
658
+ except Exception as e:
659
+ raise ValueError(
660
+ f'Failed to convert {self.__class__.__name__} to dataset. Its recommended to only call this method on '
661
+ f'a fully connected and transformed FlowSystem, or Interfaces inside such a FlowSystem.'
662
+ f'Original Error: {e}'
663
+ ) from e
664
+
665
+ def to_netcdf(self, path: str | pathlib.Path, compression: int = 0):
666
+ """
667
+ Save the object to a NetCDF file.
668
+
669
+ Args:
670
+ path: Path to save the NetCDF file
671
+ compression: Compression level (0-9)
672
+
673
+ Raises:
674
+ ValueError: If serialization fails
675
+ IOError: If file cannot be written
676
+ """
677
+ try:
678
+ ds = self.to_dataset()
679
+ fx_io.save_dataset_to_netcdf(ds, path, compression=compression)
680
+ except Exception as e:
681
+ raise OSError(f'Failed to save {self.__class__.__name__} to NetCDF file {path}: {e}') from e
222
682
 
223
683
  @classmethod
224
- def _deserialize_value(cls, value: Any):
225
- """Helper method to deserialize a value based on its type."""
226
- if value is None:
227
- return None
228
- elif isinstance(value, dict):
229
- return cls._deserialize_dict(value)
230
- elif isinstance(value, list):
231
- return cls._deserialize_list(value)
232
- return value
684
+ def from_dataset(cls, ds: xr.Dataset) -> Interface:
685
+ """
686
+ Create an instance from an xarray Dataset.
687
+
688
+ Args:
689
+ ds: Dataset containing the object data
690
+
691
+ Returns:
692
+ Interface instance
693
+
694
+ Raises:
695
+ ValueError: If dataset format is invalid or class mismatch
696
+ """
697
+ try:
698
+ # Get class name and verify it matches
699
+ class_name = ds.attrs.get('__class__')
700
+ if class_name and class_name != cls.__name__:
701
+ logger.warning(f"Dataset class '{class_name}' doesn't match target class '{cls.__name__}'")
702
+
703
+ # Get the reference structure from attrs
704
+ reference_structure = dict(ds.attrs)
705
+
706
+ # Remove the class name since it's not a constructor parameter
707
+ reference_structure.pop('__class__', None)
708
+
709
+ # Create arrays dictionary from dataset variables
710
+ arrays_dict = {name: array for name, array in ds.data_vars.items()}
711
+
712
+ # Resolve all references using the centralized method
713
+ resolved_params = cls._resolve_reference_structure(reference_structure, arrays_dict)
714
+
715
+ return cls(**resolved_params)
716
+ except Exception as e:
717
+ raise ValueError(f'Failed to create {cls.__name__} from dataset: {e}') from e
233
718
 
234
719
  @classmethod
235
- def from_dict(cls, data: Dict) -> 'Interface':
720
+ def from_netcdf(cls, path: str | pathlib.Path) -> Interface:
236
721
  """
237
- Create an instance from a dictionary representation.
722
+ Load an instance from a NetCDF file.
238
723
 
239
724
  Args:
240
- data: Dictionary containing the data for the object.
725
+ path: Path to the NetCDF file
726
+
727
+ Returns:
728
+ Interface instance
729
+
730
+ Raises:
731
+ IOError: If file cannot be read
732
+ ValueError: If file format is invalid
241
733
  """
242
- return cls._deserialize_dict(data)
734
+ try:
735
+ ds = fx_io.load_dataset_from_netcdf(path)
736
+ return cls.from_dataset(ds)
737
+ except Exception as e:
738
+ raise OSError(f'Failed to load {cls.__name__} from NetCDF file {path}: {e}') from e
243
739
 
244
- def __repr__(self):
245
- # Get the constructor arguments and their current values
246
- init_signature = inspect.signature(self.__init__)
247
- init_args = init_signature.parameters
740
+ def get_structure(self, clean: bool = False, stats: bool = False) -> dict:
741
+ """
742
+ Get object structure as a dictionary.
248
743
 
249
- # Create a dictionary with argument names and their values
250
- args_str = ', '.join(f'{name}={repr(getattr(self, name, None))}' for name in init_args if name != 'self')
251
- return f'{self.__class__.__name__}({args_str})'
744
+ Args:
745
+ clean: If True, remove None and empty dicts and lists.
746
+ stats: If True, replace DataArray references with statistics
747
+
748
+ Returns:
749
+ Dictionary representation of the object structure
750
+ """
751
+ reference_structure, extracted_arrays = self._create_reference_structure()
752
+
753
+ if stats:
754
+ # Replace references with statistics
755
+ reference_structure = self._replace_references_with_stats(reference_structure, extracted_arrays)
756
+
757
+ if clean:
758
+ return fx_io.remove_none_and_empty(reference_structure)
759
+ return reference_structure
760
+
761
+ def _replace_references_with_stats(self, structure, arrays_dict: dict[str, xr.DataArray]):
762
+ """Replace DataArray references with statistical summaries."""
763
+ if isinstance(structure, str) and structure.startswith(':::'):
764
+ array_name = structure[3:]
765
+ if array_name in arrays_dict:
766
+ return get_dataarray_stats(arrays_dict[array_name])
767
+ return structure
768
+
769
+ elif isinstance(structure, dict):
770
+ return {k: self._replace_references_with_stats(v, arrays_dict) for k, v in structure.items()}
771
+
772
+ elif isinstance(structure, list):
773
+ return [self._replace_references_with_stats(item, arrays_dict) for item in structure]
774
+
775
+ return structure
776
+
777
+ def to_json(self, path: str | pathlib.Path):
778
+ """
779
+ Save the object to a JSON file.
780
+ This is meant for documentation and comparison, not for reloading.
781
+
782
+ Args:
783
+ path: The path to the JSON file.
784
+
785
+ Raises:
786
+ IOError: If file cannot be written
787
+ """
788
+ try:
789
+ # Use the stats mode for JSON export (cleaner output)
790
+ data = self.get_structure(clean=True, stats=True)
791
+ with open(path, 'w', encoding='utf-8') as f:
792
+ json.dump(data, f, indent=4, ensure_ascii=False)
793
+ except Exception as e:
794
+ raise OSError(f'Failed to save {self.__class__.__name__} to JSON file {path}: {e}') from e
795
+
796
+ def __repr__(self):
797
+ """Return a detailed string representation for debugging."""
798
+ try:
799
+ # Get the constructor arguments and their current values
800
+ init_signature = inspect.signature(self.__init__)
801
+ init_args = init_signature.parameters
802
+
803
+ # Create a dictionary with argument names and their values, with better formatting
804
+ args_parts = []
805
+ for name in init_args:
806
+ if name == 'self':
807
+ continue
808
+ value = getattr(self, name, None)
809
+ # Truncate long representations
810
+ value_repr = repr(value)
811
+ if len(value_repr) > 50:
812
+ value_repr = value_repr[:47] + '...'
813
+ args_parts.append(f'{name}={value_repr}')
814
+
815
+ args_str = ', '.join(args_parts)
816
+ return f'{self.__class__.__name__}({args_str})'
817
+ except Exception:
818
+ # Fallback if introspection fails
819
+ return f'{self.__class__.__name__}(<repr_failed>)'
252
820
 
253
821
  def __str__(self):
254
- return get_str_representation(self.infos(use_numpy=True, use_element_label=True))
822
+ """Return a user-friendly string representation."""
823
+ try:
824
+ data = self.get_structure(clean=True, stats=True)
825
+ with StringIO() as output_buffer:
826
+ console = Console(file=output_buffer, width=1000) # Adjust width as needed
827
+ console.print(Pretty(data, expand_all=True, indent_guides=True))
828
+ return output_buffer.getvalue()
829
+ except Exception:
830
+ # Fallback if structure generation fails
831
+ return f'{self.__class__.__name__} instance'
832
+
833
+ def copy(self) -> Interface:
834
+ """
835
+ Create a copy of the Interface object.
836
+
837
+ Uses the existing serialization infrastructure to ensure proper copying
838
+ of all DataArrays and nested objects.
839
+
840
+ Returns:
841
+ A new instance of the same class with copied data.
842
+ """
843
+ # Convert to dataset, copy it, and convert back
844
+ dataset = self.to_dataset().copy(deep=True)
845
+ return self.__class__.from_dataset(dataset)
846
+
847
+ def __copy__(self):
848
+ """Support for copy.copy()."""
849
+ return self.copy()
850
+
851
+ def __deepcopy__(self, memo):
852
+ """Support for copy.deepcopy()."""
853
+ return self.copy()
255
854
 
256
855
 
257
856
  class Element(Interface):
258
857
  """This class is the basic Element of flixopt. Every Element has a label"""
259
858
 
260
- def __init__(self, label: str, meta_data: Dict = None):
859
+ def __init__(self, label: str, meta_data: dict | None = None):
261
860
  """
262
861
  Args:
263
862
  label: The label of the element
@@ -265,13 +864,14 @@ class Element(Interface):
265
864
  """
266
865
  self.label = Element._valid_label(label)
267
866
  self.meta_data = meta_data if meta_data is not None else {}
268
- self.model: Optional[ElementModel] = None
867
+ self.submodel: ElementModel | None = None
269
868
 
270
869
  def _plausibility_checks(self) -> None:
271
- """This function is used to do some basic plausibility checks for each Element during initialization"""
870
+ """This function is used to do some basic plausibility checks for each Element during initialization.
871
+ This is run after all data is transformed to the correct format/type"""
272
872
  raise NotImplementedError('Every Element needs a _plausibility_checks() method')
273
873
 
274
- def create_model(self, model: SystemModel) -> 'ElementModel':
874
+ def create_model(self, model: FlowSystemModel) -> ElementModel:
275
875
  raise NotImplementedError('Every Element needs a create_model() method')
276
876
 
277
877
  @property
@@ -295,69 +895,105 @@ class Element(Interface):
295
895
  f'Use any other symbol instead'
296
896
  )
297
897
  if label.endswith(' '):
298
- logger.warning(f'Label "{label}" ends with a space. This will be removed.')
898
+ logger.error(f'Label "{label}" ends with a space. This will be removed.')
299
899
  return label.rstrip()
300
900
  return label
301
901
 
302
902
 
303
- class Model:
304
- """Stores Variables and Constraints."""
903
+ class Submodel(SubmodelsMixin):
904
+ """Stores Variables and Constraints. Its a subset of a FlowSystemModel.
905
+ Variables and constraints are stored in the main FlowSystemModel, and are referenced here.
906
+ Can have other Submodels assigned, and can be a Submodel of another Submodel.
907
+ """
305
908
 
306
- def __init__(self, model: SystemModel, label_of_element: str, label: str = '', label_full: Optional[str] = None):
909
+ def __init__(self, model: FlowSystemModel, label_of_element: str, label_of_model: str | None = None):
307
910
  """
308
911
  Args:
309
- model: The SystemModel that is used to create the model.
912
+ model: The FlowSystemModel that is used to create the model.
310
913
  label_of_element: The label of the parent (Element). Used to construct the full label of the model.
311
- label: The label of the model. Used to construct the full label of the model.
312
- label_full: The full label of the model. Can overwrite the full label constructed from the other labels.
914
+ label_of_model: The label of the model. Used as a prefix in all variables and constraints.
313
915
  """
314
916
  self._model = model
315
917
  self.label_of_element = label_of_element
316
- self._label = label
317
- self._label_full = label_full
318
-
319
- self._variables_direct: List[str] = []
320
- self._constraints_direct: List[str] = []
321
- self.sub_models: List[Model] = []
322
-
323
- self._variables_short: Dict[str, str] = {}
324
- self._constraints_short: Dict[str, str] = {}
325
- self._sub_models_short: Dict[str, str] = {}
326
- logger.debug(f'Created {self.__class__.__name__} "{self.label_full}"')
327
-
328
- def do_modeling(self):
329
- raise NotImplementedError('Every Model needs a do_modeling() method')
330
-
331
- def add(
332
- self, item: Union[linopy.Variable, linopy.Constraint, 'Model'], short_name: Optional[str] = None
333
- ) -> Union[linopy.Variable, linopy.Constraint, 'Model']:
334
- """
335
- Add a variable, constraint or sub-model to the model
336
-
337
- Args:
338
- item: The variable, constraint or sub-model to add to the model
339
- short_name: The short name of the variable, constraint or sub-model. If not provided, the full name is used.
340
- """
341
- # TODO: Check uniquenes of short names
342
- if isinstance(item, linopy.Variable):
343
- self._variables_direct.append(item.name)
344
- self._variables_short[item.name] = short_name or item.name
345
- elif isinstance(item, linopy.Constraint):
346
- self._constraints_direct.append(item.name)
347
- self._constraints_short[item.name] = short_name or item.name
348
- elif isinstance(item, Model):
349
- self.sub_models.append(item)
350
- self._sub_models_short[item.label_full] = short_name or item.label_full
351
- else:
352
- raise ValueError(
353
- f'Item must be a linopy.Variable, linopy.Constraint or flixopt.structure.Model, got {type(item)}'
354
- )
355
- return item
918
+ self.label_of_model = label_of_model if label_of_model is not None else self.label_of_element
919
+
920
+ self._variables: dict[str, linopy.Variable] = {} # Mapping from short name to variable
921
+ self._constraints: dict[str, linopy.Constraint] = {} # Mapping from short name to constraint
922
+ self.submodels: Submodels = Submodels({})
923
+
924
+ logger.debug(f'Creating {self.__class__.__name__} "{self.label_full}"')
925
+ self._do_modeling()
926
+
927
+ def add_variables(self, short_name: str = None, **kwargs) -> linopy.Variable:
928
+ """Create and register a variable in one step"""
929
+ if kwargs.get('name') is None:
930
+ if short_name is None:
931
+ raise ValueError('Short name must be provided when no name is given')
932
+ kwargs['name'] = f'{self.label_of_model}|{short_name}'
933
+
934
+ variable = self._model.add_variables(**kwargs)
935
+ self.register_variable(variable, short_name)
936
+ return variable
937
+
938
+ def add_constraints(self, expression, short_name: str = None, **kwargs) -> linopy.Constraint:
939
+ """Create and register a constraint in one step"""
940
+ if kwargs.get('name') is None:
941
+ if short_name is None:
942
+ raise ValueError('Short name must be provided when no name is given')
943
+ kwargs['name'] = f'{self.label_of_model}|{short_name}'
944
+
945
+ constraint = self._model.add_constraints(expression, **kwargs)
946
+ self.register_constraint(constraint, short_name)
947
+ return constraint
948
+
949
+ def register_variable(self, variable: linopy.Variable, short_name: str = None) -> linopy.Variable:
950
+ """Register a variable with the model"""
951
+ if short_name is None:
952
+ short_name = variable.name
953
+ elif short_name in self._variables:
954
+ raise ValueError(f'Short name "{short_name}" already assigned to model variables')
955
+
956
+ self._variables[short_name] = variable
957
+ return variable
958
+
959
+ def register_constraint(self, constraint: linopy.Constraint, short_name: str = None) -> linopy.Constraint:
960
+ """Register a constraint with the model"""
961
+ if short_name is None:
962
+ short_name = constraint.name
963
+ elif short_name in self._constraints:
964
+ raise ValueError(f'Short name "{short_name}" already assigned to model constraint')
965
+
966
+ self._constraints[short_name] = constraint
967
+ return constraint
968
+
969
+ def __getitem__(self, key: str) -> linopy.Variable:
970
+ """Get a variable by its short name"""
971
+ if key in self._variables:
972
+ return self._variables[key]
973
+ raise KeyError(f'Variable "{key}" not found in model "{self.label_full}"')
974
+
975
+ def __contains__(self, name: str) -> bool:
976
+ """Check if a variable exists in the model"""
977
+ return name in self._variables or name in self.variables
978
+
979
+ def get(self, name: str, default=None):
980
+ """Get variable by short name, returning default if not found"""
981
+ try:
982
+ return self[name]
983
+ except KeyError:
984
+ return default
985
+
986
+ def get_coords(
987
+ self,
988
+ dims: Collection[str] | None = None,
989
+ extra_timestep: bool = False,
990
+ ) -> xr.Coordinates | None:
991
+ return self._model.get_coords(dims=dims, extra_timestep=extra_timestep)
356
992
 
357
993
  def filter_variables(
358
994
  self,
359
- filter_by: Optional[Literal['binary', 'continuous', 'integer']] = None,
360
- length: Literal['scalar', 'time'] = None,
995
+ filter_by: Literal['binary', 'continuous', 'integer'] | None = None,
996
+ length: Literal['scalar', 'time'] | None = None,
361
997
  ):
362
998
  if filter_by is None:
363
999
  all_variables = self.variables
@@ -377,252 +1013,158 @@ class Model:
377
1013
  return all_variables[[name for name in all_variables if 'time' in all_variables[name].dims]]
378
1014
  raise ValueError(f'Invalid length "{length}", must be one of "scalar", "time" or None')
379
1015
 
380
- @property
381
- def label(self) -> str:
382
- return self._label if self._label else self.label_of_element
383
-
384
1016
  @property
385
1017
  def label_full(self) -> str:
386
- """Used to construct the names of variables and constraints"""
387
- if self._label_full:
388
- return self._label_full
389
- elif self._label:
390
- return f'{self.label_of_element}|{self.label}'
391
- return self.label_of_element
1018
+ return self.label_of_model
392
1019
 
393
1020
  @property
394
1021
  def variables_direct(self) -> linopy.Variables:
395
- return self._model.variables[self._variables_direct]
1022
+ """Variables of the model, excluding those of sub-models"""
1023
+ return self._model.variables[[var.name for var in self._variables.values()]]
396
1024
 
397
1025
  @property
398
1026
  def constraints_direct(self) -> linopy.Constraints:
399
- return self._model.constraints[self._constraints_direct]
1027
+ """Constraints of the model, excluding those of sub-models"""
1028
+ return self._model.constraints[[con.name for con in self._constraints.values()]]
400
1029
 
401
1030
  @property
402
- def _variables(self) -> List[str]:
403
- all_variables = self._variables_direct.copy()
404
- for sub_model in self.sub_models:
405
- for variable in sub_model._variables:
406
- if variable in all_variables:
407
- raise KeyError(
408
- f"Duplicate key found: '{variable}' in both {self.label_full} and {sub_model.label_full}!"
409
- )
410
- all_variables.append(variable)
411
- return all_variables
1031
+ def constraints(self) -> linopy.Constraints:
1032
+ """All constraints of the model, including those of all sub-models"""
1033
+ names = list(self.constraints_direct) + [
1034
+ constraint_name for submodel in self.submodels.values() for constraint_name in submodel.constraints
1035
+ ]
412
1036
 
413
- @property
414
- def _constraints(self) -> List[str]:
415
- all_constraints = self._constraints_direct.copy()
416
- for sub_model in self.sub_models:
417
- for constraint in sub_model._constraints:
418
- if constraint in all_constraints:
419
- raise KeyError(f"Duplicate key found: '{constraint}' in both main model and submodel!")
420
- all_constraints.append(constraint)
421
- return all_constraints
1037
+ return self._model.constraints[names]
422
1038
 
423
1039
  @property
424
1040
  def variables(self) -> linopy.Variables:
425
- return self._model.variables[self._variables]
1041
+ """All variables of the model, including those of all sub-models"""
1042
+ names = list(self.variables_direct) + [
1043
+ variable_name for submodel in self.submodels.values() for variable_name in submodel.variables
1044
+ ]
426
1045
 
427
- @property
428
- def constraints(self) -> linopy.Constraints:
429
- return self._model.constraints[self._constraints]
1046
+ return self._model.variables[names]
430
1047
 
431
- @property
432
- def all_sub_models(self) -> List['Model']:
433
- return [model for sub_model in self.sub_models for model in [sub_model] + sub_model.all_sub_models]
1048
+ def __repr__(self) -> str:
1049
+ """
1050
+ Return a string representation of the linopy model.
1051
+ """
1052
+ # Extract content from existing representations
1053
+ sections = {
1054
+ f'Variables: [{len(self.variables)}/{len(self._model.variables)}]': self.variables.__repr__().split(
1055
+ '\n', 2
1056
+ )[2],
1057
+ f'Constraints: [{len(self.constraints)}/{len(self._model.constraints)}]': self.constraints.__repr__().split(
1058
+ '\n', 2
1059
+ )[2],
1060
+ f'Submodels: [{len(self.submodels)}]': self.submodels.__repr__().split('\n', 2)[2],
1061
+ }
434
1062
 
1063
+ # Format sections with headers and underlines
1064
+ formatted_sections = []
1065
+ for section_header, section_content in sections.items():
1066
+ formatted_sections.append(f'{section_header}\n{"-" * len(section_header)}\n{section_content}')
435
1067
 
436
- class ElementModel(Model):
437
- """Stores the mathematical Variables and Constraints for Elements"""
1068
+ model_string = f'Submodel "{self.label_of_model}":'
1069
+ all_sections = '\n'.join(formatted_sections)
438
1070
 
439
- def __init__(self, model: SystemModel, element: Element):
440
- """
441
- Args:
442
- model: The SystemModel that is used to create the model.
443
- element: The element this model is created for.
444
- """
445
- super().__init__(model, label_of_element=element.label_full, label=element.label, label_full=element.label_full)
446
- self.element = element
1071
+ return f'{model_string}\n{"=" * len(model_string)}\n\n{all_sections}'
447
1072
 
448
- def results_structure(self):
449
- return {
450
- 'label': self.label,
451
- 'label_full': self.label_full,
452
- 'variables': list(self.variables),
453
- 'constraints': list(self.constraints),
454
- }
1073
+ @property
1074
+ def hours_per_step(self):
1075
+ return self._model.hours_per_step
455
1076
 
1077
+ def _do_modeling(self):
1078
+ """Called at the end of initialization. Override in subclasses to create variables and constraints."""
1079
+ pass
456
1080
 
457
- def copy_and_convert_datatypes(data: Any, use_numpy: bool = True, use_element_label: bool = False) -> Any:
458
- """
459
- Converts values in a nested data structure into JSON-compatible types while preserving or transforming numpy arrays
460
- and custom `Element` objects based on the specified options.
461
1081
 
462
- The function handles various data types and transforms them into a consistent, readable format:
463
- - Primitive types (`int`, `float`, `str`, `bool`, `None`) are returned as-is.
464
- - Numpy scalars are converted to their corresponding Python scalar types.
465
- - Collections (`list`, `tuple`, `set`, `dict`) are recursively processed to ensure all elements are compatible.
466
- - Numpy arrays are preserved or converted to lists, depending on `use_numpy`.
467
- - Custom `Element` objects can be represented either by their `label` or their initialization parameters as a dictionary.
468
- - Timestamps (`datetime`) are converted to ISO 8601 strings.
1082
+ @dataclass(repr=False)
1083
+ class Submodels:
1084
+ """A simple collection for storing submodels with easy access and representation."""
469
1085
 
470
- Args:
471
- data: The input data to process, which may be deeply nested and contain a mix of types.
472
- use_numpy: If `True`, numeric numpy arrays (`np.ndarray`) are preserved as-is. If `False`, they are converted to lists.
473
- Default is `True`.
474
- use_element_label: If `True`, `Element` objects are represented by their `label`. If `False`, they are converted into a dictionary
475
- based on their initialization parameters. Default is `False`.
476
-
477
- Returns:
478
- A transformed version of the input data, containing only JSON-compatible types:
479
- - `int`, `float`, `str`, `bool`, `None`
480
- - `list`, `dict`
481
- - `np.ndarray` (if `use_numpy=True`. This is NOT JSON-compatible)
482
-
483
- Raises:
484
- TypeError: If the data cannot be converted to the specified types.
485
-
486
- Examples:
487
- >>> copy_and_convert_datatypes({'a': np.array([1, 2, 3]), 'b': Element(label='example')})
488
- {'a': array([1, 2, 3]), 'b': {'class': 'Element', 'label': 'example'}}
489
-
490
- >>> copy_and_convert_datatypes({'a': np.array([1, 2, 3]), 'b': Element(label='example')}, use_numpy=False)
491
- {'a': [1, 2, 3], 'b': {'class': 'Element', 'label': 'example'}}
492
-
493
- Notes:
494
- - The function gracefully handles unexpected types by issuing a warning and returning a deep copy of the data.
495
- - Empty collections (lists, dictionaries) and default parameter values in `Element` objects are omitted from the output.
496
- - Numpy arrays with non-numeric data types are automatically converted to lists.
497
- """
498
- if isinstance(data, np.integer): # This must be checked before checking for regular int and float!
499
- return int(data)
500
- elif isinstance(data, np.floating):
501
- return float(data)
502
-
503
- elif isinstance(data, (int, float, str, bool, type(None))):
504
- return data
505
- elif isinstance(data, datetime):
506
- return data.isoformat()
507
-
508
- elif isinstance(data, (tuple, set)):
509
- return copy_and_convert_datatypes([item for item in data], use_numpy, use_element_label)
510
- elif isinstance(data, dict):
511
- return {
512
- copy_and_convert_datatypes(key, use_numpy, use_element_label=True): copy_and_convert_datatypes(
513
- value, use_numpy, use_element_label
514
- )
515
- for key, value in data.items()
516
- }
517
- elif isinstance(data, list): # Shorten arrays/lists to be readable
518
- if use_numpy and all([isinstance(value, (int, float)) for value in data]):
519
- return np.array([item for item in data])
520
- else:
521
- return [copy_and_convert_datatypes(item, use_numpy, use_element_label) for item in data]
1086
+ data: dict[str, Submodel]
522
1087
 
523
- elif isinstance(data, np.ndarray):
524
- if not use_numpy:
525
- return copy_and_convert_datatypes(data.tolist(), use_numpy, use_element_label)
526
- elif use_numpy and np.issubdtype(data.dtype, np.number):
527
- return data
528
- else:
529
- logger.critical(
530
- f'An np.array with non-numeric content was found: {data=}.It will be converted to a list instead'
531
- )
532
- return copy_and_convert_datatypes(data.tolist(), use_numpy, use_element_label)
1088
+ def __getitem__(self, name: str) -> Submodel:
1089
+ """Get a submodel by its name."""
1090
+ return self.data[name]
533
1091
 
534
- elif isinstance(data, TimeSeries):
535
- return copy_and_convert_datatypes(data.active_data, use_numpy, use_element_label)
536
- elif isinstance(data, TimeSeriesData):
537
- return copy_and_convert_datatypes(data.data, use_numpy, use_element_label)
1092
+ def __getattr__(self, name: str) -> Submodel:
1093
+ """Get a submodel by attribute access."""
1094
+ if name in self.data:
1095
+ return self.data[name]
1096
+ raise AttributeError(f"Submodels has no attribute '{name}'")
538
1097
 
539
- elif isinstance(data, Interface):
540
- if use_element_label and isinstance(data, Element):
541
- return data.label
542
- return data.infos(use_numpy, use_element_label)
543
- elif isinstance(data, xr.DataArray):
544
- # TODO: This is a temporary basic work around
545
- return copy_and_convert_datatypes(data.values, use_numpy, use_element_label)
546
- else:
547
- raise TypeError(f'copy_and_convert_datatypes() did get unexpected data of type "{type(data)}": {data=}')
1098
+ def __len__(self) -> int:
1099
+ return len(self.data)
548
1100
 
1101
+ def __iter__(self) -> Iterator[str]:
1102
+ return iter(self.data)
549
1103
 
550
- def get_compact_representation(data: Any, array_threshold: int = 50, decimals: int = 2) -> Dict:
551
- """
552
- Generate a compact json serializable representation of deeply nested data.
553
- Numpy arrays are statistically described if they exceed a threshold and converted to lists.
1104
+ def __contains__(self, name: str) -> bool:
1105
+ return name in self.data
554
1106
 
555
- Args:
556
- data (Any): The data to format and represent.
557
- array_threshold (int): Maximum length of NumPy arrays to display. Longer arrays are statistically described.
558
- decimals (int): Number of decimal places in which to describe the arrays.
1107
+ def __repr__(self) -> str:
1108
+ """Simple representation of the submodels collection."""
1109
+ if not self.data:
1110
+ return 'flixopt.structure.Submodels:\n----------------------------\n <empty>\n'
559
1111
 
560
- Returns:
561
- Dict: A dictionary representation of the data
562
- """
1112
+ total_vars = sum(len(submodel.variables) for submodel in self.data.values())
1113
+ total_cons = sum(len(submodel.constraints) for submodel in self.data.values())
563
1114
 
564
- def format_np_array_if_found(value: Any) -> Any:
565
- """Recursively processes the data, formatting NumPy arrays."""
566
- if isinstance(value, (int, float, str, bool, type(None))):
567
- return value
568
- elif isinstance(value, np.ndarray):
569
- return describe_numpy_arrays(value)
570
- elif isinstance(value, dict):
571
- return {format_np_array_if_found(k): format_np_array_if_found(v) for k, v in value.items()}
572
- elif isinstance(value, (list, tuple, set)):
573
- return [format_np_array_if_found(v) for v in value]
574
- else:
575
- logger.warning(
576
- f'Unexpected value found when trying to format numpy array numpy array: {type(value)=}; {value=}'
577
- )
578
- return value
1115
+ title = (
1116
+ f'flixopt.structure.Submodels ({total_vars} vars, {total_cons} constraints, {len(self.data)} submodels):'
1117
+ )
1118
+ underline = '-' * len(title)
579
1119
 
580
- def describe_numpy_arrays(arr: np.ndarray) -> Union[str, List]:
581
- """Shortens NumPy arrays if they exceed the specified length."""
1120
+ if not self.data:
1121
+ return f'{title}\n{underline}\n <empty>\n'
1122
+ sub_models_string = ''
1123
+ for name, submodel in self.data.items():
1124
+ type_name = submodel.__class__.__name__
1125
+ var_count = len(submodel.variables)
1126
+ con_count = len(submodel.constraints)
1127
+ sub_models_string += f'\n * {name} [{type_name}] ({var_count}v/{con_count}c)'
582
1128
 
583
- def normalized_center_of_mass(array: Any) -> float:
584
- # position in array (0 bis 1 normiert)
585
- positions = np.linspace(0, 1, len(array)) # weights w_i
586
- # mass center
587
- if np.sum(array) == 0:
588
- return np.nan
589
- else:
590
- return np.sum(positions * array) / np.sum(array)
591
-
592
- if arr.size > array_threshold: # Calculate basic statistics
593
- fmt = f'.{decimals}f'
594
- return (
595
- f'Array (min={np.min(arr):{fmt}}, max={np.max(arr):{fmt}}, mean={np.mean(arr):{fmt}}, '
596
- f'median={np.median(arr):{fmt}}, std={np.std(arr):{fmt}}, len={len(arr)}, '
597
- f'center={normalized_center_of_mass(arr):{fmt}})'
598
- )
599
- else:
600
- return np.around(arr, decimals=decimals).tolist()
1129
+ return f'{title}\n{underline}{sub_models_string}\n'
601
1130
 
602
- # Process the data to handle NumPy arrays
603
- formatted_data = format_np_array_if_found(copy_and_convert_datatypes(data, use_numpy=True))
1131
+ def items(self) -> ItemsView[str, Submodel]:
1132
+ return self.data.items()
604
1133
 
605
- return formatted_data
1134
+ def keys(self):
1135
+ return self.data.keys()
606
1136
 
1137
+ def values(self):
1138
+ return self.data.values()
607
1139
 
608
- def get_str_representation(data: Any, array_threshold: int = 50, decimals: int = 2) -> str:
609
- """
610
- Generate a string representation of deeply nested data using `rich.print`.
611
- NumPy arrays are shortened to the specified length and converted to strings.
1140
+ def add(self, submodel: Submodel, name: str) -> None:
1141
+ """Add a submodel to the collection."""
1142
+ self.data[name] = submodel
612
1143
 
613
- Args:
614
- data (Any): The data to format and represent.
615
- array_threshold (int): Maximum length of NumPy arrays to display. Longer arrays are statistically described.
616
- decimals (int): Number of decimal places in which to describe the arrays.
1144
+ def get(self, name: str, default=None):
1145
+ """Get submodel by name, returning default if not found."""
1146
+ return self.data.get(name, default)
617
1147
 
618
- Returns:
619
- str: The formatted string representation of the data.
1148
+
1149
+ class ElementModel(Submodel):
1150
+ """
1151
+ Stores the mathematical Variables and Constraints for Elements.
1152
+ ElementModels are directly registered in the main FlowSystemModel
620
1153
  """
621
1154
 
622
- formatted_data = get_compact_representation(data, array_threshold, decimals)
1155
+ def __init__(self, model: FlowSystemModel, element: Element):
1156
+ """
1157
+ Args:
1158
+ model: The FlowSystemModel that is used to create the model.
1159
+ element: The element this model is created for.
1160
+ """
1161
+ self.element = element
1162
+ super().__init__(model, label_of_element=element.label_full, label_of_model=element.label_full)
1163
+ self._model.add_submodels(self, short_name=self.label_of_model)
623
1164
 
624
- # Use Rich to format and print the data
625
- with StringIO() as output_buffer:
626
- console = Console(file=output_buffer, width=1000) # Adjust width as needed
627
- console.print(Pretty(formatted_data, expand_all=True, indent_guides=True))
628
- return output_buffer.getvalue()
1165
+ def results_structure(self):
1166
+ return {
1167
+ 'label': self.label_full,
1168
+ 'variables': list(self.variables),
1169
+ 'constraints': list(self.constraints),
1170
+ }