flixopt 3.1.1__py3-none-any.whl → 3.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flixopt might be problematic. Click here for more details.
- flixopt/aggregation.py +13 -4
- flixopt/calculation.py +2 -3
- flixopt/color_processing.py +261 -0
- flixopt/config.py +59 -4
- flixopt/flow_system.py +5 -3
- flixopt/interface.py +2 -1
- flixopt/io.py +239 -22
- flixopt/plotting.py +583 -789
- flixopt/results.py +445 -56
- flixopt/structure.py +1 -3
- {flixopt-3.1.1.dist-info → flixopt-3.2.0.dist-info}/METADATA +2 -2
- flixopt-3.2.0.dist-info/RECORD +26 -0
- flixopt/utils.py +0 -86
- flixopt-3.1.1.dist-info/RECORD +0 -26
- {flixopt-3.1.1.dist-info → flixopt-3.2.0.dist-info}/WHEEL +0 -0
- {flixopt-3.1.1.dist-info → flixopt-3.2.0.dist-info}/licenses/LICENSE +0 -0
- {flixopt-3.1.1.dist-info → flixopt-3.2.0.dist-info}/top_level.txt +0 -0
flixopt/io.py
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import importlib.util
|
|
4
3
|
import json
|
|
5
4
|
import logging
|
|
6
5
|
import pathlib
|
|
7
6
|
import re
|
|
8
7
|
from dataclasses import dataclass
|
|
9
|
-
from typing import TYPE_CHECKING,
|
|
8
|
+
from typing import TYPE_CHECKING, Any
|
|
10
9
|
|
|
10
|
+
import numpy as np
|
|
11
11
|
import xarray as xr
|
|
12
12
|
import yaml
|
|
13
13
|
|
|
@@ -34,7 +34,235 @@ def remove_none_and_empty(obj):
|
|
|
34
34
|
return obj
|
|
35
35
|
|
|
36
36
|
|
|
37
|
-
def
|
|
37
|
+
def round_nested_floats(obj: dict | list | float | int | Any, decimals: int = 2) -> dict | list | float | int | Any:
|
|
38
|
+
"""Recursively round floating point numbers in nested data structures and convert it to python native types.
|
|
39
|
+
|
|
40
|
+
This function traverses nested data structures (dictionaries, lists) and rounds
|
|
41
|
+
any floating point numbers to the specified number of decimal places. It handles
|
|
42
|
+
various data types including NumPy arrays and xarray DataArrays by converting
|
|
43
|
+
them to lists with rounded values.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
obj: The object to process. Can be a dict, list, float, int, numpy.ndarray,
|
|
47
|
+
xarray.DataArray, or any other type.
|
|
48
|
+
decimals (int, optional): Number of decimal places to round to. Defaults to 2.
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
The processed object with the same structure as the input, but with all floating point numbers rounded to the specified precision. NumPy arrays and xarray DataArrays are converted to lists.
|
|
52
|
+
|
|
53
|
+
Examples:
|
|
54
|
+
>>> data = {'a': 3.14159, 'b': [1.234, 2.678]}
|
|
55
|
+
>>> round_nested_floats(data, decimals=2)
|
|
56
|
+
{'a': 3.14, 'b': [1.23, 2.68]}
|
|
57
|
+
|
|
58
|
+
>>> import numpy as np
|
|
59
|
+
>>> arr = np.array([1.234, 5.678])
|
|
60
|
+
>>> round_nested_floats(arr, decimals=1)
|
|
61
|
+
[1.2, 5.7]
|
|
62
|
+
"""
|
|
63
|
+
if isinstance(obj, dict):
|
|
64
|
+
return {k: round_nested_floats(v, decimals) for k, v in obj.items()}
|
|
65
|
+
elif isinstance(obj, list):
|
|
66
|
+
return [round_nested_floats(v, decimals) for v in obj]
|
|
67
|
+
elif isinstance(obj, np.floating):
|
|
68
|
+
return round(float(obj), decimals)
|
|
69
|
+
elif isinstance(obj, np.integer):
|
|
70
|
+
return int(obj)
|
|
71
|
+
elif isinstance(obj, np.bool_):
|
|
72
|
+
return bool(obj)
|
|
73
|
+
elif isinstance(obj, float):
|
|
74
|
+
return round(obj, decimals)
|
|
75
|
+
elif isinstance(obj, int):
|
|
76
|
+
return obj
|
|
77
|
+
elif isinstance(obj, np.ndarray):
|
|
78
|
+
return np.round(obj, decimals).tolist()
|
|
79
|
+
elif isinstance(obj, xr.DataArray):
|
|
80
|
+
return obj.round(decimals).values.tolist()
|
|
81
|
+
return obj
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# ============================================================================
|
|
85
|
+
# Centralized JSON and YAML I/O Functions
|
|
86
|
+
# ============================================================================
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def load_json(path: str | pathlib.Path) -> dict | list:
|
|
90
|
+
"""
|
|
91
|
+
Load data from a JSON file.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
path: Path to the JSON file.
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
Loaded data (typically dict or list).
|
|
98
|
+
|
|
99
|
+
Raises:
|
|
100
|
+
FileNotFoundError: If the file does not exist.
|
|
101
|
+
json.JSONDecodeError: If the file is not valid JSON.
|
|
102
|
+
"""
|
|
103
|
+
path = pathlib.Path(path)
|
|
104
|
+
with open(path, encoding='utf-8') as f:
|
|
105
|
+
return json.load(f)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def save_json(
|
|
109
|
+
data: dict | list,
|
|
110
|
+
path: str | pathlib.Path,
|
|
111
|
+
indent: int = 4,
|
|
112
|
+
ensure_ascii: bool = False,
|
|
113
|
+
**kwargs,
|
|
114
|
+
) -> None:
|
|
115
|
+
"""
|
|
116
|
+
Save data to a JSON file with consistent formatting.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
data: Data to save (dict or list).
|
|
120
|
+
path: Path to save the JSON file.
|
|
121
|
+
indent: Number of spaces for indentation (default: 4).
|
|
122
|
+
ensure_ascii: If False, allow Unicode characters (default: False).
|
|
123
|
+
**kwargs: Additional arguments to pass to json.dump().
|
|
124
|
+
"""
|
|
125
|
+
path = pathlib.Path(path)
|
|
126
|
+
with open(path, 'w', encoding='utf-8') as f:
|
|
127
|
+
json.dump(data, f, indent=indent, ensure_ascii=ensure_ascii, **kwargs)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def load_yaml(path: str | pathlib.Path) -> dict | list:
|
|
131
|
+
"""
|
|
132
|
+
Load data from a YAML file.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
path: Path to the YAML file.
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
Loaded data (typically dict or list), or empty dict if file is empty.
|
|
139
|
+
|
|
140
|
+
Raises:
|
|
141
|
+
FileNotFoundError: If the file does not exist.
|
|
142
|
+
yaml.YAMLError: If the file is not valid YAML.
|
|
143
|
+
Note: Returns {} for empty YAML files instead of None.
|
|
144
|
+
"""
|
|
145
|
+
path = pathlib.Path(path)
|
|
146
|
+
with open(path, encoding='utf-8') as f:
|
|
147
|
+
return yaml.safe_load(f) or {}
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def _load_yaml_unsafe(path: str | pathlib.Path) -> dict | list:
|
|
151
|
+
"""
|
|
152
|
+
INTERNAL: Load YAML allowing arbitrary tags. Do not use on untrusted input.
|
|
153
|
+
|
|
154
|
+
This function exists only for loading internally-generated files that may
|
|
155
|
+
contain custom YAML tags. Never use this on user-provided files.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
path: Path to the YAML file.
|
|
159
|
+
|
|
160
|
+
Returns:
|
|
161
|
+
Loaded data (typically dict or list), or empty dict if file is empty.
|
|
162
|
+
"""
|
|
163
|
+
path = pathlib.Path(path)
|
|
164
|
+
with open(path, encoding='utf-8') as f:
|
|
165
|
+
return yaml.unsafe_load(f) or {}
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def save_yaml(
|
|
169
|
+
data: dict | list,
|
|
170
|
+
path: str | pathlib.Path,
|
|
171
|
+
indent: int = 4,
|
|
172
|
+
width: int = 1000,
|
|
173
|
+
allow_unicode: bool = True,
|
|
174
|
+
sort_keys: bool = False,
|
|
175
|
+
**kwargs,
|
|
176
|
+
) -> None:
|
|
177
|
+
"""
|
|
178
|
+
Save data to a YAML file with consistent formatting.
|
|
179
|
+
|
|
180
|
+
Args:
|
|
181
|
+
data: Data to save (dict or list).
|
|
182
|
+
path: Path to save the YAML file.
|
|
183
|
+
indent: Number of spaces for indentation (default: 4).
|
|
184
|
+
width: Maximum line width (default: 1000).
|
|
185
|
+
allow_unicode: If True, allow Unicode characters (default: True).
|
|
186
|
+
sort_keys: If True, sort dictionary keys (default: False).
|
|
187
|
+
**kwargs: Additional arguments to pass to yaml.safe_dump().
|
|
188
|
+
"""
|
|
189
|
+
path = pathlib.Path(path)
|
|
190
|
+
with open(path, 'w', encoding='utf-8') as f:
|
|
191
|
+
yaml.safe_dump(
|
|
192
|
+
data,
|
|
193
|
+
f,
|
|
194
|
+
indent=indent,
|
|
195
|
+
width=width,
|
|
196
|
+
allow_unicode=allow_unicode,
|
|
197
|
+
sort_keys=sort_keys,
|
|
198
|
+
default_flow_style=False,
|
|
199
|
+
**kwargs,
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def load_config_file(path: str | pathlib.Path) -> dict:
|
|
204
|
+
"""
|
|
205
|
+
Load a configuration file, automatically detecting JSON or YAML format.
|
|
206
|
+
|
|
207
|
+
This function intelligently tries to load the file based on its extension,
|
|
208
|
+
with fallback support if the primary format fails.
|
|
209
|
+
|
|
210
|
+
Supported extensions:
|
|
211
|
+
- .json: Tries JSON first, falls back to YAML
|
|
212
|
+
- .yaml, .yml: Tries YAML first, falls back to JSON
|
|
213
|
+
- Others: Tries YAML, then JSON
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
path: Path to the configuration file.
|
|
217
|
+
|
|
218
|
+
Returns:
|
|
219
|
+
Loaded configuration as a dictionary.
|
|
220
|
+
|
|
221
|
+
Raises:
|
|
222
|
+
FileNotFoundError: If the file does not exist.
|
|
223
|
+
ValueError: If neither JSON nor YAML parsing succeeds.
|
|
224
|
+
"""
|
|
225
|
+
path = pathlib.Path(path)
|
|
226
|
+
|
|
227
|
+
if not path.exists():
|
|
228
|
+
raise FileNotFoundError(f'Configuration file not found: {path}')
|
|
229
|
+
|
|
230
|
+
# Try based on file extension
|
|
231
|
+
# Normalize extension to lowercase for case-insensitive matching
|
|
232
|
+
suffix = path.suffix.lower()
|
|
233
|
+
|
|
234
|
+
if suffix == '.json':
|
|
235
|
+
try:
|
|
236
|
+
return load_json(path)
|
|
237
|
+
except json.JSONDecodeError:
|
|
238
|
+
logger.warning(f'Failed to parse {path} as JSON, trying YAML')
|
|
239
|
+
try:
|
|
240
|
+
return load_yaml(path)
|
|
241
|
+
except yaml.YAMLError as e:
|
|
242
|
+
raise ValueError(f'Failed to parse {path} as JSON or YAML') from e
|
|
243
|
+
|
|
244
|
+
elif suffix in ['.yaml', '.yml']:
|
|
245
|
+
try:
|
|
246
|
+
return load_yaml(path)
|
|
247
|
+
except yaml.YAMLError:
|
|
248
|
+
logger.warning(f'Failed to parse {path} as YAML, trying JSON')
|
|
249
|
+
try:
|
|
250
|
+
return load_json(path)
|
|
251
|
+
except json.JSONDecodeError as e:
|
|
252
|
+
raise ValueError(f'Failed to parse {path} as YAML or JSON') from e
|
|
253
|
+
|
|
254
|
+
else:
|
|
255
|
+
# Unknown extension, try YAML first (more common for config)
|
|
256
|
+
try:
|
|
257
|
+
return load_yaml(path)
|
|
258
|
+
except yaml.YAMLError:
|
|
259
|
+
try:
|
|
260
|
+
return load_json(path)
|
|
261
|
+
except json.JSONDecodeError as e:
|
|
262
|
+
raise ValueError(f'Failed to parse {path} as YAML or JSON') from e
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def _save_yaml_multiline(data, output_file='formatted_output.yaml'):
|
|
38
266
|
"""
|
|
39
267
|
Save dictionary data to YAML with proper multi-line string formatting.
|
|
40
268
|
Handles complex string patterns including backticks, special characters,
|
|
@@ -62,14 +290,14 @@ def _save_to_yaml(data, output_file='formatted_output.yaml'):
|
|
|
62
290
|
# Use plain style for simple strings
|
|
63
291
|
return dumper.represent_scalar('tag:yaml.org,2002:str', data)
|
|
64
292
|
|
|
65
|
-
# Add the string representer to SafeDumper
|
|
66
|
-
yaml.add_representer(str, represent_str, Dumper=yaml.SafeDumper)
|
|
67
|
-
|
|
68
293
|
# Configure dumper options for better formatting
|
|
69
294
|
class CustomDumper(yaml.SafeDumper):
|
|
70
295
|
def increase_indent(self, flow=False, indentless=False):
|
|
71
296
|
return super().increase_indent(flow, False)
|
|
72
297
|
|
|
298
|
+
# Bind representer locally to CustomDumper to avoid global side effects
|
|
299
|
+
CustomDumper.add_representer(str, represent_str)
|
|
300
|
+
|
|
73
301
|
# Write to file with settings that ensure proper formatting
|
|
74
302
|
with open(output_file, 'w', encoding='utf-8') as file:
|
|
75
303
|
yaml.dump(
|
|
@@ -80,7 +308,7 @@ def _save_to_yaml(data, output_file='formatted_output.yaml'):
|
|
|
80
308
|
default_flow_style=False, # Use block style for mappings
|
|
81
309
|
width=1000, # Set a reasonable line width
|
|
82
310
|
allow_unicode=True, # Support Unicode characters
|
|
83
|
-
indent=
|
|
311
|
+
indent=4, # Set consistent indentation
|
|
84
312
|
)
|
|
85
313
|
|
|
86
314
|
|
|
@@ -190,7 +418,7 @@ def document_linopy_model(model: linopy.Model, path: pathlib.Path | None = None)
|
|
|
190
418
|
if path is not None:
|
|
191
419
|
if path.suffix not in ['.yaml', '.yml']:
|
|
192
420
|
raise ValueError(f'Invalid file extension for path {path}. Only .yaml and .yml are supported')
|
|
193
|
-
|
|
421
|
+
_save_yaml_multiline(documentation, str(path))
|
|
194
422
|
|
|
195
423
|
return documentation
|
|
196
424
|
|
|
@@ -199,7 +427,6 @@ def save_dataset_to_netcdf(
|
|
|
199
427
|
ds: xr.Dataset,
|
|
200
428
|
path: str | pathlib.Path,
|
|
201
429
|
compression: int = 0,
|
|
202
|
-
engine: Literal['netcdf4', 'scipy', 'h5netcdf'] = 'h5netcdf',
|
|
203
430
|
) -> None:
|
|
204
431
|
"""
|
|
205
432
|
Save a dataset to a netcdf file. Store all attrs as JSON strings in 'attrs' attributes.
|
|
@@ -216,16 +443,6 @@ def save_dataset_to_netcdf(
|
|
|
216
443
|
if path.suffix not in ['.nc', '.nc4']:
|
|
217
444
|
raise ValueError(f'Invalid file extension for path {path}. Only .nc and .nc4 are supported')
|
|
218
445
|
|
|
219
|
-
apply_encoding = False
|
|
220
|
-
if compression != 0:
|
|
221
|
-
if importlib.util.find_spec(engine) is not None:
|
|
222
|
-
apply_encoding = True
|
|
223
|
-
else:
|
|
224
|
-
logger.warning(
|
|
225
|
-
f'Dataset was exported without compression due to missing dependency "{engine}".'
|
|
226
|
-
f'Install {engine} via `pip install {engine}`.'
|
|
227
|
-
)
|
|
228
|
-
|
|
229
446
|
ds = ds.copy(deep=True)
|
|
230
447
|
ds.attrs = {'attrs': json.dumps(ds.attrs)}
|
|
231
448
|
|
|
@@ -242,9 +459,9 @@ def save_dataset_to_netcdf(
|
|
|
242
459
|
ds.to_netcdf(
|
|
243
460
|
path,
|
|
244
461
|
encoding=None
|
|
245
|
-
if
|
|
462
|
+
if compression == 0
|
|
246
463
|
else {data_var: {'zlib': True, 'complevel': compression} for data_var in ds.data_vars},
|
|
247
|
-
engine=
|
|
464
|
+
engine='netcdf4',
|
|
248
465
|
)
|
|
249
466
|
|
|
250
467
|
|
|
@@ -258,7 +475,7 @@ def load_dataset_from_netcdf(path: str | pathlib.Path) -> xr.Dataset:
|
|
|
258
475
|
Returns:
|
|
259
476
|
Dataset: Loaded dataset with restored attrs.
|
|
260
477
|
"""
|
|
261
|
-
ds = xr.load_dataset(str(path), engine='
|
|
478
|
+
ds = xr.load_dataset(str(path), engine='netcdf4')
|
|
262
479
|
|
|
263
480
|
# Restore Dataset attrs
|
|
264
481
|
if 'attrs' in ds.attrs:
|