pypsa2smspp 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,63 @@
1
+
2
+ """
3
+ pypsa2smspp package init
4
+
5
+ Exposes high-level transformation and network correction utilities.
6
+ """
7
+
8
+ import logging
9
+
10
+ logger = logging.getLogger("pypsa2smspp")
11
+ logger.setLevel(logging.INFO)
12
+
13
+ if not logger.handlers: # 👈 evita di aggiungere più handler
14
+ console_handler = logging.StreamHandler()
15
+ console_handler.setLevel(logging.DEBUG)
16
+
17
+ formatter = logging.Formatter(
18
+ '[%(asctime)s] %(levelname)s - %(name)s - %(message)s'
19
+ )
20
+ console_handler.setFormatter(formatter)
21
+
22
+ logger.addHandler(console_handler)
23
+
24
+ # opzionale ma consigliato in una libreria:
25
+ logger.propagate = False
26
+
27
+
28
+
29
+ # Transformation logic (PyPSA ↔ SMS++)
30
+ from pypsa2smspp.transformation import Transformation
31
+ from pypsa2smspp.transformation_config import TransformationConfig
32
+
33
+ # PyPSA network correction tools
34
+ from pypsa2smspp.network_correction import (
35
+ clean_marginal_cost,
36
+ clean_global_constraints,
37
+ clean_e_sum,
38
+ clean_efficiency_link,
39
+ clean_ciclicity_storage,
40
+ clean_marginal_cost_intermittent,
41
+ clean_storage_units,
42
+ clean_stores,
43
+ parse_txt_file,
44
+ clean_p_min_pu,
45
+ one_bus_network,
46
+ )
47
+
48
+ __all__ = [
49
+ "Transformation",
50
+ "TransformationConfig",
51
+ # network correction tools
52
+ "clean_marginal_cost",
53
+ "clean_global_constraints",
54
+ "clean_e_sum",
55
+ "clean_efficiency_link",
56
+ "clean_ciclicity_storage",
57
+ "clean_marginal_cost_intermittent",
58
+ "clean_storage_units",
59
+ "clean_stores",
60
+ "parse_txt_file",
61
+ "clean_p_min_pu",
62
+ "one_bus_network",
63
+ ]
@@ -0,0 +1,59 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Thu Jun 26 15:50:01 2025
4
+
5
+ @author: aless
6
+ """
7
+
8
+ """
9
+ constants.py
10
+
11
+ This module contains structural constants used throughout the PyPSA2SMSpp
12
+ transformation process. These values define generic mappings and categories
13
+ used repeatedly in the Transformation class and related utilities.
14
+
15
+ They are not meant to be modified by the user and do not depend on any
16
+ specific instance or configuration of the network.
17
+ """
18
+
19
+ # Dictionary mapping internal shorthand dimensions to full SMS++ dimension names
20
+ conversion_dict = {
21
+ "T": "TimeHorizon",
22
+ "NU": "NumberUnits",
23
+ "NE": "NumberElectricalGenerators",
24
+ "N": "NumberNodes",
25
+ "L": "NumberLines",
26
+ "Li": "Links",
27
+ "NA": "NumberArcs",
28
+ "NR": "NumberReservoirs",
29
+ "NP": "TotalNumberPieces",
30
+ "Nass": "NumAssets",
31
+ "NB": "NumberBranches",
32
+ "NDL": "NumberDesignLines",
33
+ "NDLL": "NumberDesignLines_lines",
34
+ "NDLLi": "NumberDesignLines_links",
35
+ "1": 1
36
+ }
37
+
38
+ # Mapping from PyPSA component types to their nominal attribute
39
+ nominal_attrs = {
40
+ "Generator": "p_nom",
41
+ "Line": "s_nom",
42
+ "Transformer": "s_nom",
43
+ "Link": "p_nom",
44
+ "Store": "e_nom",
45
+ "StorageUnit": "p_nom",
46
+ }
47
+
48
+ # List of renewable carriers used to identify IntermittentUnitBlocks
49
+ renewable_carriers = [
50
+ "solar",
51
+ "solar-hsat",
52
+ "onwind",
53
+ "offwind-ac",
54
+ "offwind-dc",
55
+ "offwind-float",
56
+ "PV",
57
+ "wind",
58
+ "ror"
59
+ ]
@@ -0,0 +1,22 @@
1
+ transformation:
2
+ merge_links: true
3
+ expansion_ucblock: false # To include with investmentblock... Collect everything with mode
4
+ max_hours_stores: 1
5
+
6
+ run:
7
+ mode: investmentblock # ucblock | investmentblock
8
+
9
+ smspp:
10
+ ucblock:
11
+ output_prefix: uc
12
+ template: UCBlock/uc_solverconfig_grb
13
+ investmentblock:
14
+ output_prefix: inv
15
+ template: InvestmentBlock/BSPar.txt
16
+ inner_block_name: InvestmentBlock
17
+ log_executable_call: true
18
+
19
+ io:
20
+ workdir: output
21
+ name: test_case
22
+ overwrite: true
Binary file
pypsa2smspp/inverse.py ADDED
@@ -0,0 +1,143 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Thu Jun 26 15:55:15 2025
4
+
5
+ @author: aless
6
+ """
7
+
8
+ """
9
+ inverse.py
10
+
11
+ This module handles the inverse transformation from an SMS++ solution
12
+ back into a PyPSA-compatible xarray.Dataset. It reconstructs time-series
13
+ and scalar variables from SMS unit blocks and prepares them for use with
14
+ assign_solution in PyPSA.
15
+ """
16
+
17
+ import numpy as np
18
+ import xarray as xr
19
+
20
+
21
+ def normalize_key(key: str) -> str:
22
+ """
23
+ Normalizes a key string by converting to lowercase and replacing spaces with underscores.
24
+ """
25
+ return key.lower().replace(" ", "_")
26
+
27
+
28
+ def component_definition(n, unit_block: dict) -> str:
29
+ """
30
+ Maps a unit block type to its corresponding PyPSA component name.
31
+ """
32
+ block = unit_block['block']
33
+ match block:
34
+ case "IntermittentUnitBlock":
35
+ return "Generator"
36
+ case "ThermalUnitBlock":
37
+ return "Generator"
38
+ case "HydroUnitBlock":
39
+ return "StorageUnit"
40
+ case "BatteryUnitBlock":
41
+ return "StorageUnit" if unit_block['name'] in n.storage_units.index else "Store"
42
+ case "DCNetworkBlock_lines":
43
+ return "Line"
44
+ case "DCNetworkBlock_links":
45
+ return "Link"
46
+ case "SlackUnitBlock":
47
+ return "Generator"
48
+ case _:
49
+ raise ValueError(f"Unknown unit block type: {block}")
50
+
51
+
52
+ def evaluate_function(func, normalized_keys, unit_block, df):
53
+ """
54
+ Evaluates a callable inverse function by extracting arguments from unit block or network.
55
+ """
56
+ param_names = func.__code__.co_varnames[:func.__code__.co_argcount]
57
+ args = []
58
+ for param in param_names:
59
+ param = normalize_key(param)
60
+ if param in normalized_keys:
61
+ args.append(unit_block[normalized_keys[param]])
62
+ else:
63
+ args.append(df.loc[unit_block['name']][param])
64
+ return func(*args)
65
+
66
+
67
+ def dataarray_components(n, value, component, unit_block, key):
68
+ """
69
+ Build xarray dims/coords compliant with PyPSA 1.0 assign_solution():
70
+ - static per-unit: dims -> ('name',)
71
+ - time series: dims -> ('snapshot','name')
72
+ """
73
+ # Unmask masked arrays; use NaN for masked scalars
74
+ if isinstance(value, np.ma.MaskedArray):
75
+ value = value.filled(np.nan)
76
+
77
+ value = np.asarray(value)
78
+ unit_name = unit_block["name"]
79
+
80
+ if value.ndim == 0:
81
+ # scalar -> one value for this unit: ('name',)
82
+ value = value.reshape(1)
83
+ dims = ["name"]
84
+ coords = {"name": [unit_name]}
85
+
86
+ elif value.ndim == 1:
87
+ if len(value) == len(n.snapshots):
88
+ # (T,) -> ('snapshot','name')
89
+ value = value[:, np.newaxis] # (T,1)
90
+ dims = ["snapshot", "name"]
91
+ coords = {"snapshot": n.snapshots, "name": [unit_name]}
92
+ else:
93
+ # vector but not time-based -> treat as per-unit static ('name',)
94
+ # (se è un vettore con più elementi non-temporali, somma o valida prima)
95
+ value = np.array([value.sum()]) if value.size > 1 else value.reshape(1)
96
+ dims = ["name"]
97
+ coords = {"name": [unit_name]}
98
+
99
+ elif value.ndim == 2:
100
+ T = len(n.snapshots)
101
+ if value.shape == (T, 1):
102
+ dims = ["snapshot", "name"]
103
+ coords = {"snapshot": n.snapshots, "name": [unit_name]}
104
+ elif value.shape == (1, T):
105
+ value = value.T
106
+ dims = ["snapshot", "name"]
107
+ coords = {"snapshot": n.snapshots, "name": [unit_name]}
108
+ else:
109
+ raise ValueError(f"Unsupported shape for variable {key}: {value.shape}")
110
+ else:
111
+ raise ValueError(f"Unsupported ndim for variable {key}: {value.ndim}")
112
+
113
+ var_name = f"{component}-{key}" # e.g., 'Generator-p', 'Store-e_nom', 'Link-p'
114
+ return value, dims, coords, var_name
115
+
116
+
117
+
118
+ def block_to_dataarrays(n, unit_name, unit_block, component, config) -> dict:
119
+ """
120
+ Transforms a unit block into a dictionary of DataArrays.
121
+ """
122
+ attr_name = f"{unit_block['block']}_inverse"
123
+ converted_dict = {}
124
+ normalized_keys = {normalize_key(k): k for k in unit_block.keys()}
125
+
126
+ if hasattr(config, attr_name):
127
+ unitblock_parameters = getattr(config, attr_name)
128
+ else:
129
+ print(f"Block {unit_block['block']} not yet implemented")
130
+ return {}
131
+
132
+ df = getattr(n, config.component_mapping[component])
133
+
134
+ for key, func in unitblock_parameters.items():
135
+ if callable(func):
136
+ value = evaluate_function(func, normalized_keys, unit_block, df)
137
+ if isinstance(value, np.ndarray) and value.ndim == 2 and all(dim > 1 for dim in value.shape):
138
+ value = value.sum(axis=0)
139
+ value, dims, coords, var_name = dataarray_components(n, value, component, unit_block, key)
140
+ converted_dict[var_name] = xr.DataArray(value, dims=dims, coords=coords, name=var_name)
141
+
142
+ return converted_dict
143
+
@@ -0,0 +1,233 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Thu Jun 26 15:57:30 2025
4
+
5
+ @author: aless
6
+ """
7
+
8
+ """
9
+ io_parser.py
10
+
11
+ This module handles the parsing of SMS++ output files (both .txt and NetCDF formats)
12
+ and prepares data structures that can be used to populate the Transformation class
13
+ or to re-assign results into a PyPSA network.
14
+
15
+ It includes:
16
+ - parsing unit blocks from .txt file
17
+ - parsing solution objects from SMSNetwork
18
+ - conversion of parsed data into xarray or PyPSA structures
19
+ """
20
+
21
+ import numpy as np
22
+ import re
23
+ import xarray as xr
24
+
25
+
26
+ def parse_txt_to_unitblocks(file_path: str, unitblocks: dict) -> None:
27
+ """
28
+ Parses an SMS++ textual solution file and populates the unitblocks dictionary.
29
+
30
+ Parameters
31
+ ----------
32
+ file_path : str
33
+ Path to the text file.
34
+ unitblocks : dict
35
+ Dictionary of unitblocks to populate with parsed data.
36
+ """
37
+ current_block = None
38
+ current_block_key = None
39
+
40
+ with open(file_path, "r") as file:
41
+ for line in file:
42
+ match_time = re.search(r"Elapsed time:\s*([\deE\+\.-]+)\s*s", line)
43
+ if match_time:
44
+ continue # Skip timing info
45
+
46
+ block_match = re.search(r"(ThermalUnitBlock|BatteryUnitBlock|IntermittentUnitBlock|HydroUnitBlock)\s*(\d+)", line)
47
+ if block_match:
48
+ block_type, number = block_match.groups()
49
+ number = int(number)
50
+ current_block = block_type
51
+ current_block_key = f"{block_type}_{number}"
52
+ unitblocks[current_block_key]["block"] = block_type
53
+ unitblocks[current_block_key]["enumerate"] = number
54
+ continue
55
+
56
+ match = re.match(r"([\w\s]+?)(?:\s*\[(\d+)\])?\s+=\s+\[([^\]]*)\]", line)
57
+ if match and current_block_key:
58
+ key_base, sub_index, values = match.groups()
59
+ key_base = key_base.strip()
60
+ values_array = np.array([float(x) for x in values.split()])
61
+
62
+ if sub_index is not None:
63
+ sub_index = int(sub_index)
64
+ if key_base in unitblocks[current_block_key] and not isinstance(unitblocks[current_block_key][key_base], dict):
65
+ unitblocks[current_block_key][key_base] = {0: unitblocks[current_block_key][key_base]}
66
+ if key_base not in unitblocks[current_block_key]:
67
+ unitblocks[current_block_key][key_base] = {}
68
+ unitblocks[current_block_key][key_base][sub_index] = values_array
69
+ else:
70
+ unitblocks[current_block_key][key_base] = values_array
71
+
72
+
73
+ def assign_design_variables_to_unitblocks(unitblocks, block_names_investment, design_vars):
74
+ """
75
+ Assigns design variable values to the corresponding unitblocks based on investment block mapping.
76
+
77
+ Parameters
78
+ ----------
79
+ unitblocks : dict
80
+ Dictionary of unitblocks.
81
+ block_names_investment : list of str
82
+ List of unitblock names that received investments.
83
+ design_vars : np.ndarray
84
+ Array of design variable values.
85
+
86
+ Raises
87
+ ------
88
+ ValueError or KeyError
89
+ If a mismatch in shapes or missing keys occurs.
90
+ """
91
+ if len(design_vars) != len(block_names_investment):
92
+ raise ValueError("Mismatch between design variables and investment blocks")
93
+
94
+ for name, value in zip(block_names_investment, design_vars):
95
+ if name not in unitblocks:
96
+ raise KeyError(f"DesignVariable refers to unknown unitblock '{name}'")
97
+ unitblocks[name]["DesignVariable"] = value
98
+
99
+
100
+ def split_merged_dcnetworkblocks(unitblocks, delimiter="__", reuse_index_for_first=True, logger=print):
101
+ """
102
+ Split merged DCNetworkBlock_* entries in a unitblocks dict into two blocks.
103
+
104
+ Parameters
105
+ ----------
106
+ unitblocks : dict
107
+ Dictionary of unitblocks (as built by parse_solution_to_unitblocks).
108
+ delimiter : str, default="__"
109
+ String that separates the two original link names inside the merged name.
110
+ reuse_index_for_first : bool, default=True
111
+ If True, the first split block keeps the original index
112
+ (e.g., DCNetworkBlock_7), the second gets a new index.
113
+ If False, the original entry is removed and two new blocks are appended.
114
+ logger : callable, default=print
115
+ Logging function.
116
+
117
+ Returns
118
+ -------
119
+ unitblocks : dict
120
+ Modified dictionary with merged blocks split into two entries.
121
+ """
122
+ keys = list(unitblocks.keys())
123
+ candidates = []
124
+ for k in keys:
125
+ if not k.startswith("DCNetworkBlock_"):
126
+ continue
127
+ blk = unitblocks[k]
128
+ name = blk.get("name", "")
129
+ if isinstance(name, str) and delimiter in name:
130
+ candidates.append(k)
131
+
132
+ if not candidates:
133
+ logger("[split] No merged DCNetworkBlock entries found; nothing to do.")
134
+ return unitblocks
135
+
136
+ # Next fresh index for UnitBlock/DCNetworkBlock
137
+ def _next_index():
138
+ max_idx = -1
139
+ for kk in unitblocks.keys():
140
+ if "_" in kk and kk.split("_")[-1].isdigit():
141
+ max_idx = max(max_idx, int(kk.split("_")[-1]))
142
+ return max_idx + 1
143
+
144
+ for k in candidates:
145
+ blk = unitblocks[k]
146
+ merged_name = blk.get("name", "")
147
+ parts = merged_name.split(delimiter)
148
+ if len(parts) != 2:
149
+ logger(f"[split] Skipping '{k}' because name does not split cleanly: {merged_name}")
150
+ continue
151
+ name_ch, name_dis = parts[0].strip(), parts[1].strip()
152
+
153
+ flow = blk.get("FlowValue", None)
154
+ if flow is None:
155
+ logger(f"[split] Block '{k}' has no FlowValue; skipping.")
156
+ continue
157
+
158
+ flow_charge = np.maximum(flow, 0.0)
159
+ flow_dis = np.maximum(-flow, 0.0)
160
+
161
+ base_charge = dict(blk)
162
+ base_dis = dict(blk)
163
+
164
+ base_charge["name"] = name_ch
165
+ base_charge["FlowValue"] = flow_charge
166
+
167
+ base_dis["name"] = name_dis
168
+ base_dis["FlowValue"] = flow_dis
169
+
170
+ if reuse_index_for_first:
171
+ idx_first = int(k.split("_")[-1])
172
+ key_first = f"DCNetworkBlock_{idx_first}"
173
+ enum_first = f"UnitBlock_{idx_first}"
174
+
175
+ idx_second = _next_index()
176
+ key_second = f"DCNetworkBlock_{idx_second}"
177
+ enum_second = f"UnitBlock_{idx_second}"
178
+
179
+ base_charge["enumerate"] = enum_first
180
+ unitblocks[key_first] = base_charge
181
+
182
+ base_dis["enumerate"] = enum_second
183
+ unitblocks[key_second] = base_dis
184
+ else:
185
+ del unitblocks[k]
186
+ idx_first = _next_index()
187
+ idx_second = idx_first + 1
188
+ key_first = f"DCNetworkBlock_{idx_first}"
189
+ key_second = f"DCNetworkBlock_{idx_second}"
190
+ base_charge["enumerate"] = f"UnitBlock_{idx_first}"
191
+ base_dis["enumerate"] = f"UnitBlock_{idx_second}"
192
+ unitblocks[key_first] = base_charge
193
+ unitblocks[key_second] = base_dis
194
+
195
+ logger(f"[split] '{k}' -> '{name_ch}' + '{name_dis}'")
196
+
197
+ return unitblocks
198
+
199
+
200
+ class FakeVariable:
201
+ """
202
+ A dummy wrapper used to emulate PyPSA-style model.variable.solution attributes.
203
+ """
204
+ def __init__(self, solution):
205
+ self.solution = solution
206
+
207
+
208
+ def prepare_solution(n, ds: xr.Dataset, objective_smspp: float) -> None:
209
+ """
210
+ Prepares a fake PyPSA model that wraps the xarray Dataset as a PyPSA-compatible solution.
211
+
212
+ Parameters
213
+ ----------
214
+ n : pypsa.Network
215
+ The original PyPSA network.
216
+ ds : xarray.Dataset
217
+ The solution dataset to attach to the network.
218
+
219
+ Returns
220
+ -------
221
+ None (modifies n in place)
222
+ """
223
+ n._model = type("FakeModel", (), {})()
224
+ n._model.variables = {name: FakeVariable(solution=dataarray) for name, dataarray in ds.items()}
225
+
226
+ n._model.parameters = type("FakeParameters", (), {})()
227
+ n._model.parameters.snapshots = xr.DataArray(n.snapshots, dims=["snapshot"])
228
+
229
+ n._model.constraints = type("FakeConstraints", (), {})()
230
+ n._model.constraints.snapshots = xr.DataArray(n.snapshots, dims=["snapshot"])
231
+
232
+ n._model.objective = type("FakeObjective", (), {})()
233
+ n._model.objective.value = objective_smspp