pyconvexity 0.4.6.post1__py3-none-any.whl → 0.4.8.post1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyconvexity/__init__.py +15 -0
- pyconvexity/_version.py +1 -1
- pyconvexity/dashboard.py +265 -0
- pyconvexity/data/schema/01_core_schema.sql +1 -1
- pyconvexity/data/schema/03_validation_data.sql +2 -2
- pyconvexity/solvers/pypsa/__init__.py +6 -0
- pyconvexity/solvers/pypsa/clearing_price.py +678 -0
- pyconvexity/solvers/pypsa/storage.py +98 -0
- {pyconvexity-0.4.6.post1.dist-info → pyconvexity-0.4.8.post1.dist-info}/METADATA +1 -1
- {pyconvexity-0.4.6.post1.dist-info → pyconvexity-0.4.8.post1.dist-info}/RECORD +12 -10
- {pyconvexity-0.4.6.post1.dist-info → pyconvexity-0.4.8.post1.dist-info}/WHEEL +0 -0
- {pyconvexity-0.4.6.post1.dist-info → pyconvexity-0.4.8.post1.dist-info}/top_level.txt +0 -0
pyconvexity/__init__.py
CHANGED
|
@@ -96,6 +96,15 @@ from pyconvexity.timeseries import (
|
|
|
96
96
|
numpy_to_timeseries,
|
|
97
97
|
)
|
|
98
98
|
|
|
99
|
+
# Dashboard configuration for Convexity app
|
|
100
|
+
from pyconvexity.dashboard import (
|
|
101
|
+
DashboardConfig,
|
|
102
|
+
set_dashboard_config,
|
|
103
|
+
get_dashboard_config,
|
|
104
|
+
clear_dashboard_config,
|
|
105
|
+
auto_layout,
|
|
106
|
+
)
|
|
107
|
+
|
|
99
108
|
# High-level API functions
|
|
100
109
|
__all__ = [
|
|
101
110
|
# Version info
|
|
@@ -171,6 +180,12 @@ __all__ = [
|
|
|
171
180
|
"get_multiple_timeseries",
|
|
172
181
|
"timeseries_to_numpy",
|
|
173
182
|
"numpy_to_timeseries",
|
|
183
|
+
# Dashboard configuration
|
|
184
|
+
"DashboardConfig",
|
|
185
|
+
"set_dashboard_config",
|
|
186
|
+
"get_dashboard_config",
|
|
187
|
+
"clear_dashboard_config",
|
|
188
|
+
"auto_layout",
|
|
174
189
|
]
|
|
175
190
|
|
|
176
191
|
# Data module imports
|
pyconvexity/_version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.4.
|
|
1
|
+
__version__ = "0.4.8.post1"
|
pyconvexity/dashboard.py
ADDED
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Dashboard configuration for Convexity app visualization.
|
|
3
|
+
|
|
4
|
+
Allows programmatic configuration of the analytics dashboard layout
|
|
5
|
+
that will be displayed when the model is loaded in the Convexity app.
|
|
6
|
+
|
|
7
|
+
Example:
|
|
8
|
+
>>> from pyconvexity.dashboard import set_dashboard_config, DashboardConfig, auto_layout
|
|
9
|
+
>>>
|
|
10
|
+
>>> charts = [
|
|
11
|
+
... {
|
|
12
|
+
... "id": "dispatch-1",
|
|
13
|
+
... "title": "Generation by Carrier",
|
|
14
|
+
... "visible": True,
|
|
15
|
+
... "view": {
|
|
16
|
+
... "timeseries": {
|
|
17
|
+
... "component": "Generator",
|
|
18
|
+
... "attribute": "p",
|
|
19
|
+
... "group_by": "carrier"
|
|
20
|
+
... }
|
|
21
|
+
... }
|
|
22
|
+
... },
|
|
23
|
+
... {
|
|
24
|
+
... "id": "lmp-1",
|
|
25
|
+
... "title": "Locational Marginal Prices",
|
|
26
|
+
... "visible": True,
|
|
27
|
+
... "view": {
|
|
28
|
+
... "timeseries": {
|
|
29
|
+
... "component": "Bus",
|
|
30
|
+
... "attribute": "marginal_price"
|
|
31
|
+
... }
|
|
32
|
+
... }
|
|
33
|
+
... }
|
|
34
|
+
... ]
|
|
35
|
+
>>>
|
|
36
|
+
>>> config = DashboardConfig(charts=charts, layout=auto_layout(charts))
|
|
37
|
+
>>> set_dashboard_config(conn, config)
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
import json
|
|
41
|
+
import sqlite3
|
|
42
|
+
from dataclasses import dataclass, field, asdict
|
|
43
|
+
from typing import Any, Dict, List, Optional
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@dataclass
|
|
47
|
+
class DashboardConfig:
|
|
48
|
+
"""
|
|
49
|
+
Dashboard configuration for the Convexity app analytics view.
|
|
50
|
+
|
|
51
|
+
Attributes:
|
|
52
|
+
charts: List of chart configurations. Each chart is a dict with:
|
|
53
|
+
- id: Unique identifier (e.g., "dispatch-1", "lmp-main")
|
|
54
|
+
- title: Display title
|
|
55
|
+
- visible: Whether chart is shown
|
|
56
|
+
- view: Chart type configuration (see below)
|
|
57
|
+
- filters: Optional chart-specific filters
|
|
58
|
+
|
|
59
|
+
layout: List of layout positions. Each position is a dict with:
|
|
60
|
+
- i: Chart ID (must match a chart's id)
|
|
61
|
+
- x: Grid column (0-11)
|
|
62
|
+
- y: Grid row
|
|
63
|
+
- w: Width in grid units (max 12)
|
|
64
|
+
- h: Height in grid units
|
|
65
|
+
|
|
66
|
+
selected_scenario_id: Pre-selected scenario ID (optional)
|
|
67
|
+
selected_ensemble_name: Pre-selected ensemble name (optional)
|
|
68
|
+
selected_bus_id: Pre-selected bus ID for filtering (optional)
|
|
69
|
+
|
|
70
|
+
Chart View Types:
|
|
71
|
+
Timeseries (dispatch, LMP, etc.):
|
|
72
|
+
{"timeseries": {"component": "Generator", "attribute": "p", "group_by": "carrier"}}
|
|
73
|
+
{"timeseries": {"component": "Bus", "attribute": "marginal_price"}}
|
|
74
|
+
{"timeseries": {"component": "Load", "attribute": "p"}}
|
|
75
|
+
|
|
76
|
+
Network map:
|
|
77
|
+
{"network": {"network": true}}
|
|
78
|
+
|
|
79
|
+
Statistics/Summary:
|
|
80
|
+
{"statistic": {"statistic": "optimal_capacity", "metric": "capacity"}}
|
|
81
|
+
{"statistic": {"statistic": "total_cost"}}
|
|
82
|
+
"""
|
|
83
|
+
charts: List[Dict[str, Any]]
|
|
84
|
+
layout: List[Dict[str, Any]]
|
|
85
|
+
selected_scenario_id: Optional[int] = None
|
|
86
|
+
selected_ensemble_name: Optional[str] = None
|
|
87
|
+
selected_bus_id: Optional[int] = None
|
|
88
|
+
|
|
89
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
90
|
+
"""Convert to dictionary for JSON serialization."""
|
|
91
|
+
result = {
|
|
92
|
+
"charts": self.charts,
|
|
93
|
+
"layout": self.layout,
|
|
94
|
+
}
|
|
95
|
+
# Only include optional fields if set (matches Rust's skip_serializing_if)
|
|
96
|
+
if self.selected_scenario_id is not None:
|
|
97
|
+
result["selected_scenario_id"] = self.selected_scenario_id
|
|
98
|
+
if self.selected_ensemble_name is not None:
|
|
99
|
+
result["selected_ensemble_name"] = self.selected_ensemble_name
|
|
100
|
+
if self.selected_bus_id is not None:
|
|
101
|
+
result["selected_bus_id"] = self.selected_bus_id
|
|
102
|
+
return result
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def set_dashboard_config(conn: sqlite3.Connection, config: DashboardConfig) -> int:
|
|
106
|
+
"""
|
|
107
|
+
Save dashboard configuration to the database.
|
|
108
|
+
|
|
109
|
+
This configuration will be loaded by the Convexity app when the model
|
|
110
|
+
is opened, setting up the analytics dashboard with the specified charts
|
|
111
|
+
and layout.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
conn: Database connection
|
|
115
|
+
config: Dashboard configuration
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
Row ID of the stored configuration
|
|
119
|
+
|
|
120
|
+
Example:
|
|
121
|
+
>>> config = DashboardConfig(
|
|
122
|
+
... charts=[{"id": "dispatch-1", "title": "Dispatch", "visible": True,
|
|
123
|
+
... "view": {"timeseries": {"component": "Generator", "attribute": "p", "group_by": "carrier"}}}],
|
|
124
|
+
... layout=[{"i": "dispatch-1", "x": 0, "y": 0, "w": 12, "h": 40}]
|
|
125
|
+
... )
|
|
126
|
+
>>> set_dashboard_config(conn, config)
|
|
127
|
+
"""
|
|
128
|
+
data_json = json.dumps(config.to_dict())
|
|
129
|
+
data_bytes = data_json.encode('utf-8')
|
|
130
|
+
|
|
131
|
+
# Check if analytics config exists
|
|
132
|
+
cursor = conn.execute(
|
|
133
|
+
"SELECT id FROM network_data_store WHERE category = 'analytics_view' AND name = 'default'"
|
|
134
|
+
)
|
|
135
|
+
row = cursor.fetchone()
|
|
136
|
+
|
|
137
|
+
if row:
|
|
138
|
+
# Update existing
|
|
139
|
+
row_id = row[0]
|
|
140
|
+
conn.execute(
|
|
141
|
+
"UPDATE network_data_store SET data = ?, updated_at = datetime('now') WHERE id = ?",
|
|
142
|
+
(data_bytes, row_id)
|
|
143
|
+
)
|
|
144
|
+
else:
|
|
145
|
+
# Insert new
|
|
146
|
+
conn.execute(
|
|
147
|
+
"""INSERT INTO network_data_store (category, name, data_format, data, created_at, updated_at)
|
|
148
|
+
VALUES ('analytics_view', 'default', 'json', ?, datetime('now'), datetime('now'))""",
|
|
149
|
+
(data_bytes,)
|
|
150
|
+
)
|
|
151
|
+
row_id = conn.execute("SELECT last_insert_rowid()").fetchone()[0]
|
|
152
|
+
|
|
153
|
+
conn.commit()
|
|
154
|
+
return row_id
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def get_dashboard_config(conn: sqlite3.Connection) -> Optional[DashboardConfig]:
|
|
158
|
+
"""
|
|
159
|
+
Get the current dashboard configuration from the database.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
conn: Database connection
|
|
163
|
+
|
|
164
|
+
Returns:
|
|
165
|
+
DashboardConfig if one exists, None otherwise
|
|
166
|
+
"""
|
|
167
|
+
cursor = conn.execute(
|
|
168
|
+
"""SELECT data FROM network_data_store
|
|
169
|
+
WHERE category = 'analytics_view'
|
|
170
|
+
ORDER BY updated_at DESC LIMIT 1"""
|
|
171
|
+
)
|
|
172
|
+
row = cursor.fetchone()
|
|
173
|
+
|
|
174
|
+
if not row:
|
|
175
|
+
return None
|
|
176
|
+
|
|
177
|
+
data_bytes = row[0]
|
|
178
|
+
if isinstance(data_bytes, bytes):
|
|
179
|
+
data_str = data_bytes.decode('utf-8')
|
|
180
|
+
else:
|
|
181
|
+
data_str = data_bytes
|
|
182
|
+
|
|
183
|
+
data = json.loads(data_str)
|
|
184
|
+
|
|
185
|
+
return DashboardConfig(
|
|
186
|
+
charts=data.get("charts", []),
|
|
187
|
+
layout=data.get("layout", []),
|
|
188
|
+
selected_scenario_id=data.get("selected_scenario_id"),
|
|
189
|
+
selected_ensemble_name=data.get("selected_ensemble_name"),
|
|
190
|
+
selected_bus_id=data.get("selected_bus_id"),
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def auto_layout(charts: List[Dict[str, Any]], cols: int = 12) -> List[Dict[str, Any]]:
|
|
195
|
+
"""
|
|
196
|
+
Automatically generate layout positions for charts.
|
|
197
|
+
|
|
198
|
+
Places charts in a vertical stack, each taking full width.
|
|
199
|
+
Timeseries charts get height 40, others get height 20.
|
|
200
|
+
|
|
201
|
+
Args:
|
|
202
|
+
charts: List of chart configurations
|
|
203
|
+
cols: Grid columns (default 12)
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
List of layout position dicts
|
|
207
|
+
|
|
208
|
+
Example:
|
|
209
|
+
>>> charts = [
|
|
210
|
+
... {"id": "dispatch-1", "title": "Dispatch", "visible": True,
|
|
211
|
+
... "view": {"timeseries": {...}}},
|
|
212
|
+
... {"id": "network-1", "title": "Network", "visible": True,
|
|
213
|
+
... "view": {"network": {"network": True}}}
|
|
214
|
+
... ]
|
|
215
|
+
>>> layout = auto_layout(charts)
|
|
216
|
+
>>> # Returns: [{"i": "dispatch-1", "x": 0, "y": 0, "w": 12, "h": 40}, ...]
|
|
217
|
+
"""
|
|
218
|
+
layout = []
|
|
219
|
+
y = 0
|
|
220
|
+
|
|
221
|
+
for chart in charts:
|
|
222
|
+
if not chart.get("visible", True):
|
|
223
|
+
continue
|
|
224
|
+
|
|
225
|
+
chart_id = chart["id"]
|
|
226
|
+
view = chart.get("view", {})
|
|
227
|
+
|
|
228
|
+
# Determine chart dimensions based on type
|
|
229
|
+
if "timeseries" in view:
|
|
230
|
+
w, h = 12, 40
|
|
231
|
+
elif "network" in view:
|
|
232
|
+
w, h = 6, 40
|
|
233
|
+
elif "statistic" in view:
|
|
234
|
+
w, h = 6, 20
|
|
235
|
+
else:
|
|
236
|
+
w, h = 12, 20
|
|
237
|
+
|
|
238
|
+
layout.append({
|
|
239
|
+
"i": chart_id,
|
|
240
|
+
"x": 0,
|
|
241
|
+
"y": y,
|
|
242
|
+
"w": w,
|
|
243
|
+
"h": h,
|
|
244
|
+
})
|
|
245
|
+
|
|
246
|
+
y += h
|
|
247
|
+
|
|
248
|
+
return layout
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def clear_dashboard_config(conn: sqlite3.Connection) -> bool:
|
|
252
|
+
"""
|
|
253
|
+
Remove any existing dashboard configuration.
|
|
254
|
+
|
|
255
|
+
Args:
|
|
256
|
+
conn: Database connection
|
|
257
|
+
|
|
258
|
+
Returns:
|
|
259
|
+
True if a config was deleted, False if none existed
|
|
260
|
+
"""
|
|
261
|
+
cursor = conn.execute(
|
|
262
|
+
"DELETE FROM network_data_store WHERE category = 'analytics_view' AND name = 'default'"
|
|
263
|
+
)
|
|
264
|
+
conn.commit()
|
|
265
|
+
return cursor.rowcount > 0
|
|
@@ -54,7 +54,7 @@ CREATE TABLE carriers (
|
|
|
54
54
|
nice_name TEXT, -- Display name
|
|
55
55
|
max_growth REAL DEFAULT NULL, -- MW - can be infinite
|
|
56
56
|
max_relative_growth REAL DEFAULT 0.0, -- MW
|
|
57
|
-
curtailable BOOLEAN DEFAULT
|
|
57
|
+
curtailable BOOLEAN DEFAULT TRUE, -- Whether the carrier can be curtailed
|
|
58
58
|
|
|
59
59
|
-- Metadata
|
|
60
60
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
@@ -33,8 +33,8 @@ INSERT INTO attribute_validation_rules (component_type, attribute_name, display_
|
|
|
33
33
|
('BUS', 'q', 'Reactive Power', 'float', 'MVar', '0', 'timeseries', FALSE, FALSE, 'reactive power (positive if net generation at bus)', NULL, NULL, 'electrical', TRUE),
|
|
34
34
|
('BUS', 'v_mag_pu', 'Voltage Magnitude', 'float', 'per unit', '1', 'timeseries', FALSE, FALSE, 'Voltage magnitude, per unit of v_nom', NULL, NULL, 'electrical', TRUE),
|
|
35
35
|
('BUS', 'v_ang', 'Voltage Angle', 'float', 'radians', '0', 'timeseries', FALSE, FALSE, 'Voltage angle', NULL, NULL, 'electrical', TRUE),
|
|
36
|
-
('BUS', 'marginal_price', 'Marginal Price', 'float', 'currency/MWh', '0', 'timeseries', FALSE, FALSE, 'Locational marginal price from LOPF from power balance constraint', NULL, NULL, 'costs', TRUE),
|
|
37
|
-
('BUS', '
|
|
36
|
+
('BUS', 'marginal_price', 'Marginal Price', 'float', 'currency/MWh', '0', 'timeseries', FALSE, FALSE, 'Locational marginal price from LOPF from power balance constraint (shadow price). Includes effects of UC constraints, ramping limits, and other binding constraints.', NULL, NULL, 'costs', TRUE),
|
|
37
|
+
('BUS', 'clearing_price', 'Clearing Price', 'float', 'currency/MWh', '0', 'timeseries', FALSE, FALSE, 'Pay-as-clear price: marginal cost of the cheapest available source (generator, storage, or import via uncongested link) with spare capacity. Calculated as min of: (1) local generators/storage with spare capacity, (2) adjacent bus local marginal + link cost adjusted for efficiency, for uncongested links, (3) unmet load price in scarcity. Differs from marginal_price which is the LP shadow price.', NULL, NULL, 'costs', TRUE);
|
|
38
38
|
|
|
39
39
|
-- ============================================================================
|
|
40
40
|
-- GENERATOR ATTRIBUTES
|
|
@@ -3,6 +3,10 @@ PyPSA solver integration for PyConvexity.
|
|
|
3
3
|
|
|
4
4
|
Provides high-level and low-level APIs for building PyPSA networks from database,
|
|
5
5
|
solving them, and storing results back to the database.
|
|
6
|
+
|
|
7
|
+
After solving, the following prices are stored on each bus:
|
|
8
|
+
- marginal_price: LP shadow price of power balance constraint (from PyPSA)
|
|
9
|
+
- clearing_price: Pay-as-clear price based on cheapest source with spare capacity
|
|
6
10
|
"""
|
|
7
11
|
|
|
8
12
|
from pyconvexity.solvers.pypsa.api import (
|
|
@@ -13,6 +17,7 @@ from pyconvexity.solvers.pypsa.api import (
|
|
|
13
17
|
apply_constraints,
|
|
14
18
|
store_solve_results,
|
|
15
19
|
)
|
|
20
|
+
from pyconvexity.solvers.pypsa.clearing_price import ClearingPriceCalculator
|
|
16
21
|
|
|
17
22
|
__all__ = [
|
|
18
23
|
"solve_network",
|
|
@@ -21,4 +26,5 @@ __all__ = [
|
|
|
21
26
|
"load_network_components",
|
|
22
27
|
"apply_constraints",
|
|
23
28
|
"store_solve_results",
|
|
29
|
+
"ClearingPriceCalculator",
|
|
24
30
|
]
|
|
@@ -0,0 +1,678 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Clearing Price Calculator
|
|
3
|
+
=========================
|
|
4
|
+
|
|
5
|
+
Calculates pay-as-clear prices for all buses after network solve.
|
|
6
|
+
|
|
7
|
+
The clearing price at each bus represents the cost of the marginal MW
|
|
8
|
+
that could be supplied to that bus, considering:
|
|
9
|
+
- Local generators with spare capacity (dispatch < available capacity)
|
|
10
|
+
- Local storage units with spare discharge capacity
|
|
11
|
+
- Imports via uncongested links from adjacent buses
|
|
12
|
+
|
|
13
|
+
This differs from PyPSA's marginal_price (shadow price) which includes
|
|
14
|
+
effects of UC constraints, ramping limits, and other binding constraints.
|
|
15
|
+
|
|
16
|
+
Algorithm (Single-Pass "Local Marginal"):
|
|
17
|
+
=========================================
|
|
18
|
+
For each bus at each timestep:
|
|
19
|
+
1. Find the cheapest LOCAL source with spare capacity:
|
|
20
|
+
- Generators where p < p_nom * p_max_pu (has spare capacity)
|
|
21
|
+
- Storage units where p < p_nom * p_max_pu (can discharge more)
|
|
22
|
+
- Exclude "unmet load" penalty generators (marginal_cost > threshold)
|
|
23
|
+
|
|
24
|
+
2. Find the cheapest IMPORT option via uncongested inbound links:
|
|
25
|
+
- For each link where this bus is the destination (bus1):
|
|
26
|
+
- Check if link has spare import capacity: p0 < p_nom * p_max_pu
|
|
27
|
+
- Import price = local_marginal[source_bus] + link.marginal_cost) / link.efficiency
|
|
28
|
+
- For bidirectional links, check both directions
|
|
29
|
+
|
|
30
|
+
3. clearing_price = min(local_marginal, cheapest_import)
|
|
31
|
+
|
|
32
|
+
4. If no source has spare capacity (scarcity):
|
|
33
|
+
- Use the unmet load penalty price
|
|
34
|
+
|
|
35
|
+
Note on Multi-hop Imports:
|
|
36
|
+
--------------------------
|
|
37
|
+
This single-pass algorithm uses the LOCAL marginal price of adjacent buses,
|
|
38
|
+
not their full clearing prices. This means multi-hop import economics are
|
|
39
|
+
not captured. For example, if Bus A can import from Bus B, and Bus B can
|
|
40
|
+
import cheaply from Bus C, this algorithm won't reflect Bus A's ability
|
|
41
|
+
to effectively import from C via B.
|
|
42
|
+
|
|
43
|
+
This is a deliberate simplification that:
|
|
44
|
+
- Avoids iteration/recursion for meshed networks
|
|
45
|
+
- Matches a "local market" interpretation where each bus sees instantaneous offers
|
|
46
|
+
- Is correct for radial networks (tree topology)
|
|
47
|
+
|
|
48
|
+
For most practical networks (radial or nearly radial), this gives accurate results.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
import logging
|
|
52
|
+
from dataclasses import dataclass, field
|
|
53
|
+
from typing import Dict, List, Optional, Tuple, Any
|
|
54
|
+
|
|
55
|
+
import numpy as np
|
|
56
|
+
import pandas as pd
|
|
57
|
+
|
|
58
|
+
logger = logging.getLogger(__name__)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@dataclass
|
|
62
|
+
class LinkInfo:
|
|
63
|
+
"""Information about a link for clearing price calculation."""
|
|
64
|
+
link_name: str
|
|
65
|
+
adjacent_bus: str
|
|
66
|
+
marginal_cost: np.ndarray # Per timestep
|
|
67
|
+
efficiency: float
|
|
68
|
+
spare_capacity_mask: np.ndarray # Boolean per timestep: True if has spare capacity
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
@dataclass
|
|
72
|
+
class PriceSetter:
|
|
73
|
+
"""Tracks which asset sets the clearing price."""
|
|
74
|
+
asset_name: str
|
|
75
|
+
asset_type: str # 'generator', 'storage', 'link_import'
|
|
76
|
+
marginal_cost: float
|
|
77
|
+
bus: str
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class ClearingPriceCalculator:
|
|
81
|
+
"""
|
|
82
|
+
Calculate pay-as-clear prices for all buses in a solved network.
|
|
83
|
+
|
|
84
|
+
The clearing price is the marginal cost of the cheapest source with
|
|
85
|
+
spare capacity that could supply the next MW to a bus. This differs
|
|
86
|
+
from the LP shadow price (marginal_price) which includes constraint effects.
|
|
87
|
+
|
|
88
|
+
Example usage:
|
|
89
|
+
calculator = ClearingPriceCalculator()
|
|
90
|
+
clearing_prices = calculator.calculate_all_buses(conn, network, scenario_id)
|
|
91
|
+
# Returns: {'GB_Main': array([45.2, 46.1, ...]), 'FR': array([...]), ...}
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
def __init__(
|
|
95
|
+
self,
|
|
96
|
+
unmet_load_threshold: float = 10000.0,
|
|
97
|
+
spare_capacity_tolerance: float = 0.01,
|
|
98
|
+
min_dispatch_threshold: float = 1.0,
|
|
99
|
+
verbose: bool = True,
|
|
100
|
+
include_storage: bool = False,
|
|
101
|
+
min_marginal_cost: float = 1.0,
|
|
102
|
+
):
|
|
103
|
+
"""
|
|
104
|
+
Initialize the calculator.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
unmet_load_threshold: Marginal cost above which a generator is
|
|
108
|
+
considered an "unmet load" penalty generator and excluded
|
|
109
|
+
from normal clearing price calculation.
|
|
110
|
+
spare_capacity_tolerance: Fraction tolerance for "at capacity".
|
|
111
|
+
A source is considered to have spare capacity if
|
|
112
|
+
dispatch < available_capacity * (1 - tolerance).
|
|
113
|
+
min_dispatch_threshold: Minimum dispatch (MW) to consider a source
|
|
114
|
+
as "dispatching". Handles numerical noise.
|
|
115
|
+
verbose: Enable detailed logging of price-setting assets.
|
|
116
|
+
include_storage: Whether to include storage units in clearing price.
|
|
117
|
+
Default False because storage marginal_cost in PyPSA is typically
|
|
118
|
+
~0 (no fuel cost) and doesn't represent the market clearing price.
|
|
119
|
+
In pay-as-clear markets, storage is a price-taker, not a price-setter.
|
|
120
|
+
min_marginal_cost: Minimum marginal cost to consider for price setting.
|
|
121
|
+
Sources with marginal_cost below this are excluded (e.g., to filter
|
|
122
|
+
out renewables with mc=0 that shouldn't set the clearing price).
|
|
123
|
+
"""
|
|
124
|
+
self.unmet_load_threshold = unmet_load_threshold
|
|
125
|
+
self.spare_capacity_tolerance = spare_capacity_tolerance
|
|
126
|
+
self.min_dispatch_threshold = min_dispatch_threshold
|
|
127
|
+
self.verbose = verbose
|
|
128
|
+
self.include_storage = include_storage
|
|
129
|
+
self.min_marginal_cost = min_marginal_cost
|
|
130
|
+
|
|
131
|
+
# Track price setters for logging
|
|
132
|
+
self._price_setters: Dict[str, List[Optional[PriceSetter]]] = {}
|
|
133
|
+
|
|
134
|
+
def calculate_all_buses(
|
|
135
|
+
self,
|
|
136
|
+
conn,
|
|
137
|
+
network: "pypsa.Network",
|
|
138
|
+
scenario_id: Optional[int] = None,
|
|
139
|
+
) -> Dict[str, np.ndarray]:
|
|
140
|
+
"""
|
|
141
|
+
Calculate clearing prices for all buses in the network.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
conn: Database connection (for component lookups if needed)
|
|
145
|
+
network: Solved PyPSA Network object
|
|
146
|
+
scenario_id: Scenario ID (for logging)
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
Dict mapping bus_name -> array of clearing prices per timestep.
|
|
150
|
+
Length of each array equals len(network.snapshots).
|
|
151
|
+
"""
|
|
152
|
+
n_periods = len(network.snapshots)
|
|
153
|
+
bus_names = list(network.buses.index)
|
|
154
|
+
|
|
155
|
+
logger.debug(f"Clearing price calculation: {len(bus_names)} buses, {n_periods} periods")
|
|
156
|
+
|
|
157
|
+
# Reset price setters tracking
|
|
158
|
+
self._price_setters = {bus: [None] * n_periods for bus in bus_names}
|
|
159
|
+
|
|
160
|
+
# Step 1: Calculate local marginal price at each bus
|
|
161
|
+
local_marginals, local_setters = self._calculate_local_marginals(network, n_periods)
|
|
162
|
+
|
|
163
|
+
# Step 2: Build link adjacency map (which buses can import from where)
|
|
164
|
+
link_adjacency = self._build_link_adjacency(network, n_periods)
|
|
165
|
+
|
|
166
|
+
# Step 3: Calculate clearing prices (single pass)
|
|
167
|
+
clearing_prices = {}
|
|
168
|
+
for bus_name in bus_names:
|
|
169
|
+
clearing_prices[bus_name], setters = self._calculate_bus_clearing_price(
|
|
170
|
+
bus_name,
|
|
171
|
+
local_marginals,
|
|
172
|
+
local_setters,
|
|
173
|
+
link_adjacency,
|
|
174
|
+
n_periods,
|
|
175
|
+
)
|
|
176
|
+
self._price_setters[bus_name] = setters
|
|
177
|
+
|
|
178
|
+
# Log summary for key buses only
|
|
179
|
+
self._log_clearing_price_summary(clearing_prices, n_periods)
|
|
180
|
+
|
|
181
|
+
return clearing_prices
|
|
182
|
+
|
|
183
|
+
def _calculate_local_marginals(
|
|
184
|
+
self,
|
|
185
|
+
network: "pypsa.Network",
|
|
186
|
+
n_periods: int
|
|
187
|
+
) -> Tuple[Dict[str, np.ndarray], Dict[str, List[Optional[PriceSetter]]]]:
|
|
188
|
+
"""
|
|
189
|
+
For each bus, calculate the marginal cost of the cheapest local source
|
|
190
|
+
(generator or storage unit) with spare capacity at each timestep.
|
|
191
|
+
|
|
192
|
+
Returns:
|
|
193
|
+
Tuple of:
|
|
194
|
+
- Dict mapping bus_name -> array of local marginal prices
|
|
195
|
+
- Dict mapping bus_name -> list of PriceSetter objects (one per timestep)
|
|
196
|
+
"""
|
|
197
|
+
bus_names = list(network.buses.index)
|
|
198
|
+
local_marginals = {bus: np.full(n_periods, np.inf) for bus in bus_names}
|
|
199
|
+
local_setters = {bus: [None] * n_periods for bus in bus_names}
|
|
200
|
+
|
|
201
|
+
# Process generators
|
|
202
|
+
gen_stats = self._process_generators(network, n_periods, local_marginals, local_setters)
|
|
203
|
+
|
|
204
|
+
# Process storage units (discharge as source) - only if enabled
|
|
205
|
+
# By default, storage is excluded because:
|
|
206
|
+
# 1. Storage marginal_cost in PyPSA is typically ~0 (no fuel cost)
|
|
207
|
+
# 2. In pay-as-clear markets, storage is a price-taker, not a price-setter
|
|
208
|
+
# 3. Storage arbitrages between periods; its cost is opportunity cost, not marginal cost
|
|
209
|
+
if self.include_storage:
|
|
210
|
+
storage_stats = self._process_storage_units(network, n_periods, local_marginals, local_setters)
|
|
211
|
+
else:
|
|
212
|
+
storage_stats = {'processed': 0, 'with_spare': 0}
|
|
213
|
+
|
|
214
|
+
# Process stores (if they can inject power)
|
|
215
|
+
self._process_stores(network, n_periods, local_marginals, local_setters)
|
|
216
|
+
|
|
217
|
+
logger.debug(f" Generators: {gen_stats['processed']} processed, {gen_stats['with_spare']} with spare capacity")
|
|
218
|
+
logger.debug(f" Storage units: {storage_stats['processed']} processed, {storage_stats['with_spare']} with spare capacity")
|
|
219
|
+
|
|
220
|
+
return local_marginals, local_setters
|
|
221
|
+
|
|
222
|
+
def _process_generators(
|
|
223
|
+
self,
|
|
224
|
+
network: "pypsa.Network",
|
|
225
|
+
n_periods: int,
|
|
226
|
+
local_marginals: Dict[str, np.ndarray],
|
|
227
|
+
local_setters: Dict[str, List[Optional[PriceSetter]]],
|
|
228
|
+
) -> Dict[str, int]:
|
|
229
|
+
"""Process generators to find local marginals at each bus."""
|
|
230
|
+
stats = {'processed': 0, 'with_spare': 0, 'skipped_unmet': 0, 'skipped_no_pnom': 0}
|
|
231
|
+
|
|
232
|
+
if network.generators.empty:
|
|
233
|
+
return stats
|
|
234
|
+
|
|
235
|
+
generators = network.generators
|
|
236
|
+
|
|
237
|
+
# Get dispatch timeseries
|
|
238
|
+
if hasattr(network.generators_t, 'p') and not network.generators_t.p.empty:
|
|
239
|
+
p_dispatch = network.generators_t.p
|
|
240
|
+
else:
|
|
241
|
+
logger.warning(" No generator dispatch data found (generators_t.p empty)")
|
|
242
|
+
return stats
|
|
243
|
+
|
|
244
|
+
# Get p_max_pu timeseries (or static)
|
|
245
|
+
if hasattr(network.generators_t, 'p_max_pu') and not network.generators_t.p_max_pu.empty:
|
|
246
|
+
p_max_pu_ts = network.generators_t.p_max_pu
|
|
247
|
+
else:
|
|
248
|
+
p_max_pu_ts = None
|
|
249
|
+
|
|
250
|
+
# Get marginal_cost timeseries (or static)
|
|
251
|
+
if hasattr(network.generators_t, 'marginal_cost') and not network.generators_t.marginal_cost.empty:
|
|
252
|
+
marginal_cost_ts = network.generators_t.marginal_cost
|
|
253
|
+
else:
|
|
254
|
+
marginal_cost_ts = None
|
|
255
|
+
|
|
256
|
+
# First pass: regular generators (not unmet load)
|
|
257
|
+
for gen_name in generators.index:
|
|
258
|
+
gen = generators.loc[gen_name]
|
|
259
|
+
bus = gen['bus']
|
|
260
|
+
|
|
261
|
+
if bus not in local_marginals:
|
|
262
|
+
continue
|
|
263
|
+
|
|
264
|
+
# Get p_nom
|
|
265
|
+
p_nom = gen.get('p_nom', 0)
|
|
266
|
+
if p_nom <= 0:
|
|
267
|
+
stats['skipped_no_pnom'] += 1
|
|
268
|
+
continue
|
|
269
|
+
|
|
270
|
+
# Get dispatch values
|
|
271
|
+
if gen_name not in p_dispatch.columns:
|
|
272
|
+
continue
|
|
273
|
+
p_values = p_dispatch[gen_name].values
|
|
274
|
+
if len(p_values) != n_periods:
|
|
275
|
+
p_values = self._pad_or_truncate(p_values, n_periods)
|
|
276
|
+
|
|
277
|
+
# Get p_max_pu values
|
|
278
|
+
if p_max_pu_ts is not None and gen_name in p_max_pu_ts.columns:
|
|
279
|
+
p_max_pu_values = p_max_pu_ts[gen_name].values
|
|
280
|
+
else:
|
|
281
|
+
p_max_pu_values = np.full(n_periods, gen.get('p_max_pu', 1.0))
|
|
282
|
+
if len(p_max_pu_values) != n_periods:
|
|
283
|
+
p_max_pu_values = self._pad_or_truncate(p_max_pu_values, n_periods)
|
|
284
|
+
|
|
285
|
+
# Get marginal_cost values
|
|
286
|
+
if marginal_cost_ts is not None and gen_name in marginal_cost_ts.columns:
|
|
287
|
+
mc_values = marginal_cost_ts[gen_name].values
|
|
288
|
+
else:
|
|
289
|
+
mc_values = np.full(n_periods, gen.get('marginal_cost', 0.0))
|
|
290
|
+
if len(mc_values) != n_periods:
|
|
291
|
+
mc_values = self._pad_or_truncate(mc_values, n_periods)
|
|
292
|
+
|
|
293
|
+
# Skip unmet load generators (handle in second pass)
|
|
294
|
+
if np.any(mc_values > self.unmet_load_threshold):
|
|
295
|
+
stats['skipped_unmet'] += 1
|
|
296
|
+
continue
|
|
297
|
+
|
|
298
|
+
stats['processed'] += 1
|
|
299
|
+
|
|
300
|
+
# Calculate available capacity
|
|
301
|
+
available = p_nom * p_max_pu_values
|
|
302
|
+
|
|
303
|
+
# Find timesteps where generator has spare capacity
|
|
304
|
+
spare_capacity_mask = p_values < available * (1 - self.spare_capacity_tolerance)
|
|
305
|
+
n_spare = spare_capacity_mask.sum()
|
|
306
|
+
|
|
307
|
+
if n_spare > 0:
|
|
308
|
+
stats['with_spare'] += 1
|
|
309
|
+
|
|
310
|
+
# Log details for this generator
|
|
311
|
+
if self.verbose and n_spare > 0:
|
|
312
|
+
mean_mc = np.mean(mc_values)
|
|
313
|
+
mean_dispatch = np.mean(p_values)
|
|
314
|
+
mean_available = np.mean(available)
|
|
315
|
+
logger.debug(f" {gen_name} @ {bus}: mc={mean_mc:.2f}, dispatch={mean_dispatch:.1f}MW, available={mean_available:.1f}MW, spare_periods={n_spare}/{n_periods}")
|
|
316
|
+
|
|
317
|
+
# Update local marginal where this generator is cheaper and has spare capacity
|
|
318
|
+
# Also filter by min_marginal_cost (e.g., to exclude renewables with mc=0)
|
|
319
|
+
for t in range(n_periods):
|
|
320
|
+
if (spare_capacity_mask[t]
|
|
321
|
+
and mc_values[t] >= self.min_marginal_cost
|
|
322
|
+
and mc_values[t] < local_marginals[bus][t]):
|
|
323
|
+
local_marginals[bus][t] = mc_values[t]
|
|
324
|
+
local_setters[bus][t] = PriceSetter(
|
|
325
|
+
asset_name=gen_name,
|
|
326
|
+
asset_type='generator',
|
|
327
|
+
marginal_cost=mc_values[t],
|
|
328
|
+
bus=bus
|
|
329
|
+
)
|
|
330
|
+
|
|
331
|
+
# Second pass: handle unmet load generators (only set price if no other source available)
|
|
332
|
+
for gen_name in generators.index:
|
|
333
|
+
gen = generators.loc[gen_name]
|
|
334
|
+
bus = gen['bus']
|
|
335
|
+
|
|
336
|
+
if bus not in local_marginals:
|
|
337
|
+
continue
|
|
338
|
+
|
|
339
|
+
p_nom = gen.get('p_nom', 0)
|
|
340
|
+
if p_nom <= 0:
|
|
341
|
+
continue
|
|
342
|
+
|
|
343
|
+
# Get marginal_cost values
|
|
344
|
+
if marginal_cost_ts is not None and gen_name in marginal_cost_ts.columns:
|
|
345
|
+
mc_values = marginal_cost_ts[gen_name].values
|
|
346
|
+
else:
|
|
347
|
+
mc_values = np.full(n_periods, gen.get('marginal_cost', 0.0))
|
|
348
|
+
if len(mc_values) != n_periods:
|
|
349
|
+
mc_values = self._pad_or_truncate(mc_values, n_periods)
|
|
350
|
+
|
|
351
|
+
# Only process unmet load generators
|
|
352
|
+
if not np.any(mc_values > self.unmet_load_threshold):
|
|
353
|
+
continue
|
|
354
|
+
|
|
355
|
+
# Get dispatch and check spare capacity
|
|
356
|
+
if gen_name not in p_dispatch.columns:
|
|
357
|
+
continue
|
|
358
|
+
p_values = p_dispatch[gen_name].values
|
|
359
|
+
if len(p_values) != n_periods:
|
|
360
|
+
p_values = self._pad_or_truncate(p_values, n_periods)
|
|
361
|
+
|
|
362
|
+
if p_max_pu_ts is not None and gen_name in p_max_pu_ts.columns:
|
|
363
|
+
p_max_pu_values = p_max_pu_ts[gen_name].values
|
|
364
|
+
else:
|
|
365
|
+
p_max_pu_values = np.full(n_periods, gen.get('p_max_pu', 1.0))
|
|
366
|
+
if len(p_max_pu_values) != n_periods:
|
|
367
|
+
p_max_pu_values = self._pad_or_truncate(p_max_pu_values, n_periods)
|
|
368
|
+
|
|
369
|
+
available = p_nom * p_max_pu_values
|
|
370
|
+
spare_capacity_mask = p_values < available * (1 - self.spare_capacity_tolerance)
|
|
371
|
+
|
|
372
|
+
# Only use unmet load price where local_marginal is still inf (no other source)
|
|
373
|
+
n_set = 0
|
|
374
|
+
for t in range(n_periods):
|
|
375
|
+
if spare_capacity_mask[t] and np.isinf(local_marginals[bus][t]):
|
|
376
|
+
local_marginals[bus][t] = mc_values[t]
|
|
377
|
+
local_setters[bus][t] = PriceSetter(
|
|
378
|
+
asset_name=gen_name,
|
|
379
|
+
asset_type='unmet_load',
|
|
380
|
+
marginal_cost=mc_values[t],
|
|
381
|
+
bus=bus
|
|
382
|
+
)
|
|
383
|
+
n_set += 1
|
|
384
|
+
|
|
385
|
+
if n_set > 0:
|
|
386
|
+
logger.debug(f" UNMET LOAD {gen_name}: set price for {n_set} periods (no other source)")
|
|
387
|
+
|
|
388
|
+
return stats
|
|
389
|
+
|
|
390
|
+
def _process_storage_units(
|
|
391
|
+
self,
|
|
392
|
+
network: "pypsa.Network",
|
|
393
|
+
n_periods: int,
|
|
394
|
+
local_marginals: Dict[str, np.ndarray],
|
|
395
|
+
local_setters: Dict[str, List[Optional[PriceSetter]]],
|
|
396
|
+
) -> Dict[str, int]:
|
|
397
|
+
"""Process storage units (discharge capacity) to find local marginals."""
|
|
398
|
+
stats = {'processed': 0, 'with_spare': 0}
|
|
399
|
+
|
|
400
|
+
if network.storage_units.empty:
|
|
401
|
+
return stats
|
|
402
|
+
|
|
403
|
+
storage_units = network.storage_units
|
|
404
|
+
|
|
405
|
+
# Get dispatch timeseries (positive = discharge)
|
|
406
|
+
if hasattr(network.storage_units_t, 'p') and not network.storage_units_t.p.empty:
|
|
407
|
+
p_dispatch = network.storage_units_t.p
|
|
408
|
+
else:
|
|
409
|
+
logger.warning(" No storage unit dispatch data found")
|
|
410
|
+
return stats
|
|
411
|
+
|
|
412
|
+
for su_name in storage_units.index:
|
|
413
|
+
su = storage_units.loc[su_name]
|
|
414
|
+
bus = su['bus']
|
|
415
|
+
|
|
416
|
+
if bus not in local_marginals:
|
|
417
|
+
continue
|
|
418
|
+
|
|
419
|
+
# Get p_nom (discharge capacity)
|
|
420
|
+
p_nom = su.get('p_nom', 0)
|
|
421
|
+
if p_nom <= 0:
|
|
422
|
+
continue
|
|
423
|
+
|
|
424
|
+
# Get dispatch values
|
|
425
|
+
if su_name not in p_dispatch.columns:
|
|
426
|
+
continue
|
|
427
|
+
p_values = p_dispatch[su_name].values
|
|
428
|
+
if len(p_values) != n_periods:
|
|
429
|
+
p_values = self._pad_or_truncate(p_values, n_periods)
|
|
430
|
+
|
|
431
|
+
# Get p_max_pu
|
|
432
|
+
if hasattr(network.storage_units_t, 'p_max_pu') and su_name in network.storage_units_t.p_max_pu.columns:
|
|
433
|
+
p_max_pu_values = network.storage_units_t.p_max_pu[su_name].values
|
|
434
|
+
else:
|
|
435
|
+
p_max_pu_values = np.full(n_periods, su.get('p_max_pu', 1.0))
|
|
436
|
+
if len(p_max_pu_values) != n_periods:
|
|
437
|
+
p_max_pu_values = self._pad_or_truncate(p_max_pu_values, n_periods)
|
|
438
|
+
|
|
439
|
+
# Get marginal_cost
|
|
440
|
+
if hasattr(network.storage_units_t, 'marginal_cost') and su_name in network.storage_units_t.marginal_cost.columns:
|
|
441
|
+
mc_values = network.storage_units_t.marginal_cost[su_name].values
|
|
442
|
+
else:
|
|
443
|
+
mc_values = np.full(n_periods, su.get('marginal_cost', 0.0))
|
|
444
|
+
if len(mc_values) != n_periods:
|
|
445
|
+
mc_values = self._pad_or_truncate(mc_values, n_periods)
|
|
446
|
+
|
|
447
|
+
stats['processed'] += 1
|
|
448
|
+
|
|
449
|
+
# Calculate available discharge capacity
|
|
450
|
+
available = p_nom * p_max_pu_values
|
|
451
|
+
|
|
452
|
+
# Spare capacity for discharge: current discharge < max discharge
|
|
453
|
+
spare_capacity_mask = p_values < available * (1 - self.spare_capacity_tolerance)
|
|
454
|
+
n_spare = spare_capacity_mask.sum()
|
|
455
|
+
|
|
456
|
+
if n_spare > 0:
|
|
457
|
+
stats['with_spare'] += 1
|
|
458
|
+
|
|
459
|
+
if self.verbose and n_spare > 0:
|
|
460
|
+
logger.debug(f" {su_name} @ {bus}: mc={np.mean(mc_values):.2f}, spare_periods={n_spare}/{n_periods}")
|
|
461
|
+
|
|
462
|
+
# Update local marginal
|
|
463
|
+
for t in range(n_periods):
|
|
464
|
+
if spare_capacity_mask[t] and mc_values[t] < local_marginals[bus][t]:
|
|
465
|
+
local_marginals[bus][t] = mc_values[t]
|
|
466
|
+
local_setters[bus][t] = PriceSetter(
|
|
467
|
+
asset_name=su_name,
|
|
468
|
+
asset_type='storage',
|
|
469
|
+
marginal_cost=mc_values[t],
|
|
470
|
+
bus=bus
|
|
471
|
+
)
|
|
472
|
+
|
|
473
|
+
return stats
|
|
474
|
+
|
|
475
|
+
def _process_stores(
|
|
476
|
+
self,
|
|
477
|
+
network: "pypsa.Network",
|
|
478
|
+
n_periods: int,
|
|
479
|
+
local_marginals: Dict[str, np.ndarray],
|
|
480
|
+
local_setters: Dict[str, List[Optional[PriceSetter]]],
|
|
481
|
+
) -> None:
|
|
482
|
+
"""Process stores (if they can inject power) to find local marginals."""
|
|
483
|
+
if network.stores.empty:
|
|
484
|
+
return
|
|
485
|
+
|
|
486
|
+
# Stores are complex - skip for now
|
|
487
|
+
# They don't have a fixed p_nom like generators/storage_units
|
|
488
|
+
logger.debug(f" Skipping {len(network.stores)} stores (complex capacity constraints)")
|
|
489
|
+
|
|
490
|
+
def _build_link_adjacency(
|
|
491
|
+
self,
|
|
492
|
+
network: "pypsa.Network",
|
|
493
|
+
n_periods: int,
|
|
494
|
+
) -> Dict[str, List[LinkInfo]]:
|
|
495
|
+
"""
|
|
496
|
+
Build a map of bus -> list of inbound link options.
|
|
497
|
+
"""
|
|
498
|
+
link_adjacency: Dict[str, List[LinkInfo]] = {}
|
|
499
|
+
|
|
500
|
+
if network.links.empty:
|
|
501
|
+
return link_adjacency
|
|
502
|
+
|
|
503
|
+
links = network.links
|
|
504
|
+
|
|
505
|
+
# Get link flow timeseries
|
|
506
|
+
if hasattr(network.links_t, 'p0') and not network.links_t.p0.empty:
|
|
507
|
+
p0_dispatch = network.links_t.p0
|
|
508
|
+
else:
|
|
509
|
+
p0_dispatch = None
|
|
510
|
+
logger.warning(" No link flow data found (links_t.p0 empty)")
|
|
511
|
+
|
|
512
|
+
for link_name in links.index:
|
|
513
|
+
link = links.loc[link_name]
|
|
514
|
+
bus0 = link['bus0']
|
|
515
|
+
bus1 = link['bus1']
|
|
516
|
+
|
|
517
|
+
p_nom = link.get('p_nom', 0)
|
|
518
|
+
if p_nom <= 0:
|
|
519
|
+
continue
|
|
520
|
+
|
|
521
|
+
efficiency = link.get('efficiency', 1.0)
|
|
522
|
+
if pd.isna(efficiency) or efficiency <= 0:
|
|
523
|
+
efficiency = 1.0
|
|
524
|
+
|
|
525
|
+
# Get marginal_cost
|
|
526
|
+
if hasattr(network.links_t, 'marginal_cost') and link_name in network.links_t.marginal_cost.columns:
|
|
527
|
+
mc_values = network.links_t.marginal_cost[link_name].values
|
|
528
|
+
else:
|
|
529
|
+
mc_values = np.full(n_periods, link.get('marginal_cost', 0.0))
|
|
530
|
+
if len(mc_values) != n_periods:
|
|
531
|
+
mc_values = self._pad_or_truncate(mc_values, n_periods)
|
|
532
|
+
|
|
533
|
+
# Get p_max_pu
|
|
534
|
+
if hasattr(network.links_t, 'p_max_pu') and link_name in network.links_t.p_max_pu.columns:
|
|
535
|
+
p_max_pu_values = network.links_t.p_max_pu[link_name].values
|
|
536
|
+
else:
|
|
537
|
+
p_max_pu_values = np.full(n_periods, link.get('p_max_pu', 1.0))
|
|
538
|
+
if len(p_max_pu_values) != n_periods:
|
|
539
|
+
p_max_pu_values = self._pad_or_truncate(p_max_pu_values, n_periods)
|
|
540
|
+
|
|
541
|
+
# Get p_min_pu
|
|
542
|
+
if hasattr(network.links_t, 'p_min_pu') and link_name in network.links_t.p_min_pu.columns:
|
|
543
|
+
p_min_pu_values = network.links_t.p_min_pu[link_name].values
|
|
544
|
+
else:
|
|
545
|
+
p_min_pu_values = np.full(n_periods, link.get('p_min_pu', 0.0))
|
|
546
|
+
if len(p_min_pu_values) != n_periods:
|
|
547
|
+
p_min_pu_values = self._pad_or_truncate(p_min_pu_values, n_periods)
|
|
548
|
+
|
|
549
|
+
# Get actual flow
|
|
550
|
+
if p0_dispatch is not None and link_name in p0_dispatch.columns:
|
|
551
|
+
p0_values = p0_dispatch[link_name].values
|
|
552
|
+
else:
|
|
553
|
+
p0_values = np.zeros(n_periods)
|
|
554
|
+
if len(p0_values) != n_periods:
|
|
555
|
+
p0_values = self._pad_or_truncate(p0_values, n_periods)
|
|
556
|
+
|
|
557
|
+
# Direction 1: bus0 -> bus1 (positive flow)
|
|
558
|
+
max_forward = p_nom * p_max_pu_values
|
|
559
|
+
spare_forward = p0_values < max_forward * (1 - self.spare_capacity_tolerance)
|
|
560
|
+
|
|
561
|
+
if bus1 not in link_adjacency:
|
|
562
|
+
link_adjacency[bus1] = []
|
|
563
|
+
link_adjacency[bus1].append(LinkInfo(
|
|
564
|
+
link_name=link_name,
|
|
565
|
+
adjacent_bus=bus0,
|
|
566
|
+
marginal_cost=mc_values,
|
|
567
|
+
efficiency=efficiency,
|
|
568
|
+
spare_capacity_mask=spare_forward,
|
|
569
|
+
))
|
|
570
|
+
|
|
571
|
+
logger.debug(f" {link_name}: {bus0} -> {bus1}, p_nom={p_nom:.0f}MW, eff={efficiency:.2f}, spare_periods={spare_forward.sum()}")
|
|
572
|
+
|
|
573
|
+
# Direction 2: bus1 -> bus0 (negative flow, if allowed)
|
|
574
|
+
if np.any(p_min_pu_values < 0):
|
|
575
|
+
max_reverse = p_nom * np.abs(p_min_pu_values)
|
|
576
|
+
current_reverse = np.maximum(-p0_values, 0)
|
|
577
|
+
spare_reverse = current_reverse < max_reverse * (1 - self.spare_capacity_tolerance)
|
|
578
|
+
|
|
579
|
+
if bus0 not in link_adjacency:
|
|
580
|
+
link_adjacency[bus0] = []
|
|
581
|
+
link_adjacency[bus0].append(LinkInfo(
|
|
582
|
+
link_name=f"{link_name}_reverse",
|
|
583
|
+
adjacent_bus=bus1,
|
|
584
|
+
marginal_cost=mc_values,
|
|
585
|
+
efficiency=efficiency,
|
|
586
|
+
spare_capacity_mask=spare_reverse,
|
|
587
|
+
))
|
|
588
|
+
|
|
589
|
+
logger.debug(f" {link_name}_reverse: {bus1} -> {bus0}, spare_periods={spare_reverse.sum()}")
|
|
590
|
+
|
|
591
|
+
return link_adjacency
|
|
592
|
+
|
|
593
|
+
def _calculate_bus_clearing_price(
|
|
594
|
+
self,
|
|
595
|
+
bus_name: str,
|
|
596
|
+
local_marginals: Dict[str, np.ndarray],
|
|
597
|
+
local_setters: Dict[str, List[Optional[PriceSetter]]],
|
|
598
|
+
link_adjacency: Dict[str, List[LinkInfo]],
|
|
599
|
+
n_periods: int,
|
|
600
|
+
) -> Tuple[np.ndarray, List[Optional[PriceSetter]]]:
|
|
601
|
+
"""
|
|
602
|
+
Calculate clearing price for a single bus.
|
|
603
|
+
|
|
604
|
+
Returns:
|
|
605
|
+
Tuple of (clearing_prices array, list of PriceSetter for each timestep)
|
|
606
|
+
"""
|
|
607
|
+
clearing_prices = np.copy(local_marginals.get(bus_name, np.full(n_periods, np.inf)))
|
|
608
|
+
setters = list(local_setters.get(bus_name, [None] * n_periods))
|
|
609
|
+
|
|
610
|
+
# Check import options
|
|
611
|
+
n_import_better = 0
|
|
612
|
+
if bus_name in link_adjacency:
|
|
613
|
+
for link_info in link_adjacency[bus_name]:
|
|
614
|
+
adj_bus = link_info.adjacent_bus
|
|
615
|
+
adj_marginal = local_marginals.get(adj_bus, np.full(n_periods, np.inf))
|
|
616
|
+
|
|
617
|
+
for t in range(n_periods):
|
|
618
|
+
if link_info.spare_capacity_mask[t]:
|
|
619
|
+
# Import price = (adjacent marginal + link cost) / efficiency
|
|
620
|
+
import_price = (adj_marginal[t] + link_info.marginal_cost[t]) / link_info.efficiency
|
|
621
|
+
if import_price < clearing_prices[t]:
|
|
622
|
+
clearing_prices[t] = import_price
|
|
623
|
+
setters[t] = PriceSetter(
|
|
624
|
+
asset_name=link_info.link_name,
|
|
625
|
+
asset_type='link_import',
|
|
626
|
+
marginal_cost=import_price,
|
|
627
|
+
bus=bus_name
|
|
628
|
+
)
|
|
629
|
+
n_import_better += 1
|
|
630
|
+
|
|
631
|
+
# Handle remaining inf values (true scarcity)
|
|
632
|
+
n_scarcity = np.isinf(clearing_prices).sum()
|
|
633
|
+
clearing_prices = np.where(np.isinf(clearing_prices), self.unmet_load_threshold * 10, clearing_prices)
|
|
634
|
+
|
|
635
|
+
# Log warning only for problematic buses
|
|
636
|
+
n_zeros = (clearing_prices == 0).sum()
|
|
637
|
+
if n_zeros > 0 or n_scarcity > 0:
|
|
638
|
+
logger.warning(f" {bus_name}: zeros={n_zeros}, scarcity={n_scarcity}")
|
|
639
|
+
|
|
640
|
+
return clearing_prices, setters
|
|
641
|
+
|
|
642
|
+
def _log_clearing_price_summary(
|
|
643
|
+
self,
|
|
644
|
+
clearing_prices: Dict[str, np.ndarray],
|
|
645
|
+
n_periods: int,
|
|
646
|
+
) -> None:
|
|
647
|
+
"""Log compact summary of clearing prices."""
|
|
648
|
+
# Log summary for key buses (GB_Main if present, otherwise all)
|
|
649
|
+
key_buses = ['GB_Main'] if 'GB_Main' in clearing_prices else list(clearing_prices.keys())[:3]
|
|
650
|
+
|
|
651
|
+
for bus_name in key_buses:
|
|
652
|
+
prices = clearing_prices.get(bus_name)
|
|
653
|
+
if prices is None:
|
|
654
|
+
continue
|
|
655
|
+
|
|
656
|
+
setters = self._price_setters.get(bus_name, [])
|
|
657
|
+
|
|
658
|
+
# Count price setters by type
|
|
659
|
+
setter_counts: Dict[str, int] = {}
|
|
660
|
+
for setter in setters:
|
|
661
|
+
key = setter.asset_type if setter else 'none'
|
|
662
|
+
setter_counts[key] = setter_counts.get(key, 0) + 1
|
|
663
|
+
|
|
664
|
+
# Summary stats
|
|
665
|
+
valid = prices[(prices > 0) & (prices < self.unmet_load_threshold)]
|
|
666
|
+
setters_str = ", ".join(f"{k}:{v}" for k, v in sorted(setter_counts.items(), key=lambda x: -x[1]))
|
|
667
|
+
|
|
668
|
+
if len(valid) > 0:
|
|
669
|
+
logger.info(f" Clearing prices [{bus_name}]: mean=£{np.mean(valid):.2f}, range=[£{np.min(valid):.2f}, £{np.max(valid):.2f}], setters: {setters_str}")
|
|
670
|
+
|
|
671
|
+
def _pad_or_truncate(self, arr: np.ndarray, target_length: int) -> np.ndarray:
|
|
672
|
+
"""Pad array with last value or truncate to target length."""
|
|
673
|
+
arr = np.asarray(arr)
|
|
674
|
+
if len(arr) >= target_length:
|
|
675
|
+
return arr[:target_length]
|
|
676
|
+
else:
|
|
677
|
+
padding = np.full(target_length - len(arr), arr[-1] if len(arr) > 0 else 0)
|
|
678
|
+
return np.concatenate([arr, padding])
|
|
@@ -4,12 +4,16 @@ Result storage functionality for PyPSA solver integration.
|
|
|
4
4
|
Handles storing solve results back to the database with proper validation and error handling.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
+
import logging
|
|
7
8
|
import uuid
|
|
8
9
|
import pandas as pd
|
|
9
10
|
import numpy as np
|
|
10
11
|
from typing import Dict, Any, Optional, Callable
|
|
11
12
|
|
|
12
13
|
from pyconvexity.core.types import StaticValue
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
13
17
|
from pyconvexity.models import (
|
|
14
18
|
list_components_by_type,
|
|
15
19
|
set_static_attribute,
|
|
@@ -60,6 +64,12 @@ class ResultStorage:
|
|
|
60
64
|
# Store component results
|
|
61
65
|
component_stats = self._store_component_results(conn, network, scenario_id)
|
|
62
66
|
|
|
67
|
+
# Calculate and store clearing prices for all buses
|
|
68
|
+
clearing_prices_stored = self._calculate_and_store_clearing_prices(
|
|
69
|
+
conn, network, scenario_id
|
|
70
|
+
)
|
|
71
|
+
component_stats["stored_clearing_prices"] = clearing_prices_stored
|
|
72
|
+
|
|
63
73
|
# Calculate network statistics first
|
|
64
74
|
network_stats = self._calculate_network_statistics(
|
|
65
75
|
conn, network, solve_result
|
|
@@ -193,6 +203,94 @@ class ResultStorage:
|
|
|
193
203
|
results_stats["errors"] += 1
|
|
194
204
|
return results_stats
|
|
195
205
|
|
|
206
|
+
def _calculate_and_store_clearing_prices(
|
|
207
|
+
self,
|
|
208
|
+
conn,
|
|
209
|
+
network: "pypsa.Network",
|
|
210
|
+
scenario_id: Optional[int],
|
|
211
|
+
) -> int:
|
|
212
|
+
"""
|
|
213
|
+
Calculate and store clearing prices for all buses.
|
|
214
|
+
|
|
215
|
+
The clearing price at each bus is the pay-as-clear price: the marginal
|
|
216
|
+
cost of the cheapest source (generator, storage, or import) with spare
|
|
217
|
+
capacity. This differs from the marginal_price (LP shadow price).
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
conn: Database connection
|
|
221
|
+
network: Solved PyPSA Network object
|
|
222
|
+
scenario_id: Scenario ID for result storage
|
|
223
|
+
|
|
224
|
+
Returns:
|
|
225
|
+
Number of buses with clearing prices stored
|
|
226
|
+
"""
|
|
227
|
+
logger.info(f"=== CALCULATING AND STORING CLEARING PRICES (scenario_id={scenario_id}) ===")
|
|
228
|
+
|
|
229
|
+
try:
|
|
230
|
+
from .clearing_price import ClearingPriceCalculator
|
|
231
|
+
|
|
232
|
+
calculator = ClearingPriceCalculator(verbose=True)
|
|
233
|
+
clearing_prices = calculator.calculate_all_buses(conn, network, scenario_id)
|
|
234
|
+
|
|
235
|
+
if not clearing_prices:
|
|
236
|
+
logger.warning("No clearing prices calculated - clearing_prices dict is empty")
|
|
237
|
+
return 0
|
|
238
|
+
|
|
239
|
+
# Log what we got from the calculator
|
|
240
|
+
logger.info(f"Clearing prices calculated for {len(clearing_prices)} buses")
|
|
241
|
+
for bus_name, prices in clearing_prices.items():
|
|
242
|
+
n_zeros = np.sum(prices == 0)
|
|
243
|
+
n_inf = np.sum(np.isinf(prices))
|
|
244
|
+
valid = prices[(prices > 0) & np.isfinite(prices)]
|
|
245
|
+
if len(valid) > 0:
|
|
246
|
+
logger.info(f" {bus_name}: {len(prices)} periods, mean=£{np.mean(valid):.2f}, "
|
|
247
|
+
f"zeros={n_zeros}, inf={n_inf}, range=[£{np.min(valid):.2f}, £{np.max(valid):.2f}]")
|
|
248
|
+
else:
|
|
249
|
+
logger.warning(f" {bus_name}: {len(prices)} periods, ALL ZERO OR INF (zeros={n_zeros}, inf={n_inf})")
|
|
250
|
+
|
|
251
|
+
# Get bus component IDs
|
|
252
|
+
buses = list_components_by_type(conn, "BUS")
|
|
253
|
+
bus_name_to_id = {bus.name: bus.id for bus in buses}
|
|
254
|
+
logger.info(f"Found {len(buses)} buses in database: {list(bus_name_to_id.keys())}")
|
|
255
|
+
|
|
256
|
+
stored_count = 0
|
|
257
|
+
for bus_name, prices in clearing_prices.items():
|
|
258
|
+
if bus_name not in bus_name_to_id:
|
|
259
|
+
logger.warning(f" {bus_name}: not found in database - skipping")
|
|
260
|
+
continue
|
|
261
|
+
|
|
262
|
+
bus_id = bus_name_to_id[bus_name]
|
|
263
|
+
values = [float(p) if np.isfinite(p) else 0.0 for p in prices]
|
|
264
|
+
|
|
265
|
+
# Log what we're about to store
|
|
266
|
+
n_zeros = sum(1 for v in values if v == 0)
|
|
267
|
+
if n_zeros > 0:
|
|
268
|
+
logger.warning(f" {bus_name}: storing {len(values)} values, {n_zeros} zeros")
|
|
269
|
+
|
|
270
|
+
try:
|
|
271
|
+
set_timeseries_attribute(
|
|
272
|
+
conn, bus_id, "clearing_price", values, scenario_id
|
|
273
|
+
)
|
|
274
|
+
stored_count += 1
|
|
275
|
+
logger.info(f" ✅ {bus_name} (id={bus_id}): stored {len(values)} clearing prices")
|
|
276
|
+
except Exception as e:
|
|
277
|
+
logger.error(f" ❌ {bus_name} (id={bus_id}): failed to store clearing_price: {e}")
|
|
278
|
+
import traceback
|
|
279
|
+
traceback.print_exc()
|
|
280
|
+
continue
|
|
281
|
+
|
|
282
|
+
logger.info(f"=== CLEARING PRICES: Stored {stored_count}/{len(clearing_prices)} buses ===")
|
|
283
|
+
return stored_count
|
|
284
|
+
|
|
285
|
+
except ImportError as e:
|
|
286
|
+
logger.error(f"ClearingPriceCalculator not available - skipping clearing price calculation: {e}")
|
|
287
|
+
return 0
|
|
288
|
+
except Exception as e:
|
|
289
|
+
logger.error(f"Failed to calculate/store clearing prices: {e}")
|
|
290
|
+
import traceback
|
|
291
|
+
traceback.print_exc()
|
|
292
|
+
return 0
|
|
293
|
+
|
|
196
294
|
def _store_component_type_results(
|
|
197
295
|
self,
|
|
198
296
|
conn,
|
|
@@ -1,5 +1,6 @@
|
|
|
1
|
-
pyconvexity/__init__.py,sha256=
|
|
2
|
-
pyconvexity/_version.py,sha256=
|
|
1
|
+
pyconvexity/__init__.py,sha256=P67QJ8npf-QWmBX12im__eICLoRz8cByQ5OJXiyIBmA,5706
|
|
2
|
+
pyconvexity/_version.py,sha256=E6iPG1WE6yyF-fToZYo4ZM-iwegVLaXOKuEXlAJ_kvg,28
|
|
3
|
+
pyconvexity/dashboard.py,sha256=7x04Hr-EwzTAf-YJdHzfV83Gf2etltwtzwh_bCYJ5lk,8579
|
|
3
4
|
pyconvexity/timeseries.py,sha256=QdKbiqjAlxkJATyKm2Kelx1Ea2PsAnnCYfVLU5VER1Y,11085
|
|
4
5
|
pyconvexity/core/__init__.py,sha256=gdyyHNqOc4h9Nfe9u6NA936GNzH6coGNCMgBvvvOnGE,1196
|
|
5
6
|
pyconvexity/core/database.py,sha256=vwCmuN0B0xwImh6L0bFR4vNWHw_wVfYSG1KwsUjK4iY,14831
|
|
@@ -9,9 +10,9 @@ pyconvexity/data/README.md,sha256=-tyDHVjqzfWbVvgM4yYYx8cysmgvFXI6plVQNxSHBmo,31
|
|
|
9
10
|
pyconvexity/data/__init__.py,sha256=CFFwuIKS0qBk0HVLSByOK-oA5qm4krstJTUGFwUZyjo,509
|
|
10
11
|
pyconvexity/data/loaders/__init__.py,sha256=6xPtOmH2n1mNby7ZjA-2Mk9F48Q246RNsyMnCnJ6gwA,60
|
|
11
12
|
pyconvexity/data/loaders/cache.py,sha256=R-DUIiFpphjyi5EitcUZwzwUdZeqN6poYVyuNpKzB4g,7040
|
|
12
|
-
pyconvexity/data/schema/01_core_schema.sql,sha256=
|
|
13
|
+
pyconvexity/data/schema/01_core_schema.sql,sha256=8uz5_adp5IdW9kqrpK9pAlaWpeyHQFIej5GhhElSYu4,16902
|
|
13
14
|
pyconvexity/data/schema/02_data_metadata.sql,sha256=BbpTkH1s7IbZQkDBRF2kL_UR9tzMEWDBYS3VBkwDRu0,4323
|
|
14
|
-
pyconvexity/data/schema/03_validation_data.sql,sha256=
|
|
15
|
+
pyconvexity/data/schema/03_validation_data.sql,sha256=fepFx1n-Gvls-MbGmVOEhxp82xFZXfNitpNw5k6R_IM,89083
|
|
15
16
|
pyconvexity/data/sources/__init__.py,sha256=Dn6_oS7wB-vLjMj2YeXlmIl6hNjACbicimSabKxIWnc,108
|
|
16
17
|
pyconvexity/data/sources/gem.py,sha256=v8OYCMsb2t-8u-YmK8vzMsgI9ArUAOAXMZZQOFpJ-nI,14923
|
|
17
18
|
pyconvexity/io/__init__.py,sha256=FCyvRDfBUrrNei-y5JVod6MMN1bkPMSSfE0fpKi1aKQ,751
|
|
@@ -27,16 +28,17 @@ pyconvexity/models/network.py,sha256=P2Cuxv2lX9gWDwIBDDFqLs1sznhAXYquYNYstaMPjfU
|
|
|
27
28
|
pyconvexity/models/results.py,sha256=6j1H4AwVmp94L97gl_sGnE8izMxkU5o89guKIU8JdtE,4169
|
|
28
29
|
pyconvexity/models/scenarios.py,sha256=-0UPUDXf6r9mFriA-z2fD5KKMARm2PUBjLba49S9mCI,5867
|
|
29
30
|
pyconvexity/solvers/__init__.py,sha256=t1gOUTqbYDCtIvKPqGVY1fjKwqJi2Od9bGeIO7bPvJE,667
|
|
30
|
-
pyconvexity/solvers/pypsa/__init__.py,sha256=
|
|
31
|
+
pyconvexity/solvers/pypsa/__init__.py,sha256=W3kmb8a5M0t9ktyf-1NIBcCGLN723RnNk12N2uFAjBM,881
|
|
31
32
|
pyconvexity/solvers/pypsa/api.py,sha256=CWKslptTlZrSbuHy916_PHhCG8nO9SCfjTXkJZylLM8,17512
|
|
32
33
|
pyconvexity/solvers/pypsa/batch_loader.py,sha256=ZgOcZqMnMS3TOYTq2Ly2O4cuwhNNAicu3EDq1Fj38OI,11929
|
|
33
34
|
pyconvexity/solvers/pypsa/builder.py,sha256=1ZU68Wtl_jQSXHzspKQDkR6bxAVU1nKvPfnPUl0aO3k,23256
|
|
35
|
+
pyconvexity/solvers/pypsa/clearing_price.py,sha256=HdAk7GPfJFVI4t6mL0zQGEOMAvuyfpl0yNCnah1ZGH0,29164
|
|
34
36
|
pyconvexity/solvers/pypsa/constraints.py,sha256=20WliFDhPQGMAsS4VOTU8LZJpsFpLVRHpNsZW49GTcc,16397
|
|
35
37
|
pyconvexity/solvers/pypsa/solver.py,sha256=M-s-VUCnRD8Jdh22PCUA-gWgYp1eH6_sgpoSzcv6kNQ,59762
|
|
36
|
-
pyconvexity/solvers/pypsa/storage.py,sha256=
|
|
38
|
+
pyconvexity/solvers/pypsa/storage.py,sha256=0C5WBHjm5gnU7KOqPogWE_j6dS_ZOv5JFSVM-1GY2DE,96100
|
|
37
39
|
pyconvexity/validation/__init__.py,sha256=VJNZlFoWABsWwUKktNk2jbtXIepH5omvC0WtsTS7o3o,583
|
|
38
40
|
pyconvexity/validation/rules.py,sha256=GiNadc8hvbWBr09vUkGiLLTmSdvtNSeGLFwvCjlikYY,9241
|
|
39
|
-
pyconvexity-0.4.
|
|
40
|
-
pyconvexity-0.4.
|
|
41
|
-
pyconvexity-0.4.
|
|
42
|
-
pyconvexity-0.4.
|
|
41
|
+
pyconvexity-0.4.8.post1.dist-info/METADATA,sha256=hd4yVPZttXDtysXtC-y_E3qxNz2nD173nUAiavPaBEA,4973
|
|
42
|
+
pyconvexity-0.4.8.post1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
43
|
+
pyconvexity-0.4.8.post1.dist-info/top_level.txt,sha256=wFPEDXVaebR3JO5Tt3HNse-ws5aROCcxEco15d6j64s,12
|
|
44
|
+
pyconvexity-0.4.8.post1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|