pyconvexity 0.4.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (44) hide show
  1. pyconvexity/__init__.py +241 -0
  2. pyconvexity/_version.py +1 -0
  3. pyconvexity/core/__init__.py +60 -0
  4. pyconvexity/core/database.py +485 -0
  5. pyconvexity/core/errors.py +106 -0
  6. pyconvexity/core/types.py +400 -0
  7. pyconvexity/dashboard.py +265 -0
  8. pyconvexity/data/README.md +101 -0
  9. pyconvexity/data/__init__.py +17 -0
  10. pyconvexity/data/loaders/__init__.py +3 -0
  11. pyconvexity/data/loaders/cache.py +213 -0
  12. pyconvexity/data/schema/01_core_schema.sql +420 -0
  13. pyconvexity/data/schema/02_data_metadata.sql +120 -0
  14. pyconvexity/data/schema/03_validation_data.sql +507 -0
  15. pyconvexity/data/sources/__init__.py +5 -0
  16. pyconvexity/data/sources/gem.py +442 -0
  17. pyconvexity/io/__init__.py +26 -0
  18. pyconvexity/io/excel_exporter.py +1226 -0
  19. pyconvexity/io/excel_importer.py +1381 -0
  20. pyconvexity/io/netcdf_exporter.py +191 -0
  21. pyconvexity/io/netcdf_importer.py +1802 -0
  22. pyconvexity/models/__init__.py +195 -0
  23. pyconvexity/models/attributes.py +730 -0
  24. pyconvexity/models/carriers.py +159 -0
  25. pyconvexity/models/components.py +611 -0
  26. pyconvexity/models/network.py +503 -0
  27. pyconvexity/models/results.py +148 -0
  28. pyconvexity/models/scenarios.py +234 -0
  29. pyconvexity/solvers/__init__.py +29 -0
  30. pyconvexity/solvers/pypsa/__init__.py +30 -0
  31. pyconvexity/solvers/pypsa/api.py +446 -0
  32. pyconvexity/solvers/pypsa/batch_loader.py +296 -0
  33. pyconvexity/solvers/pypsa/builder.py +655 -0
  34. pyconvexity/solvers/pypsa/clearing_price.py +678 -0
  35. pyconvexity/solvers/pypsa/constraints.py +405 -0
  36. pyconvexity/solvers/pypsa/solver.py +1442 -0
  37. pyconvexity/solvers/pypsa/storage.py +2096 -0
  38. pyconvexity/timeseries.py +330 -0
  39. pyconvexity/validation/__init__.py +25 -0
  40. pyconvexity/validation/rules.py +312 -0
  41. pyconvexity-0.4.8.dist-info/METADATA +148 -0
  42. pyconvexity-0.4.8.dist-info/RECORD +44 -0
  43. pyconvexity-0.4.8.dist-info/WHEEL +5 -0
  44. pyconvexity-0.4.8.dist-info/top_level.txt +1 -0
@@ -0,0 +1,678 @@
1
+ """
2
+ Clearing Price Calculator
3
+ =========================
4
+
5
+ Calculates pay-as-clear prices for all buses after network solve.
6
+
7
+ The clearing price at each bus represents the cost of the marginal MW
8
+ that could be supplied to that bus, considering:
9
+ - Local generators with spare capacity (dispatch < available capacity)
10
+ - Local storage units with spare discharge capacity
11
+ - Imports via uncongested links from adjacent buses
12
+
13
+ This differs from PyPSA's marginal_price (shadow price) which includes
14
+ effects of UC constraints, ramping limits, and other binding constraints.
15
+
16
+ Algorithm (Single-Pass "Local Marginal"):
17
+ =========================================
18
+ For each bus at each timestep:
19
+ 1. Find the cheapest LOCAL source with spare capacity:
20
+ - Generators where p < p_nom * p_max_pu (has spare capacity)
21
+ - Storage units where p < p_nom * p_max_pu (can discharge more)
22
+ - Exclude "unmet load" penalty generators (marginal_cost > threshold)
23
+
24
+ 2. Find the cheapest IMPORT option via uncongested inbound links:
25
+ - For each link where this bus is the destination (bus1):
26
+ - Check if link has spare import capacity: p0 < p_nom * p_max_pu
27
+ - Import price = local_marginal[source_bus] + link.marginal_cost) / link.efficiency
28
+ - For bidirectional links, check both directions
29
+
30
+ 3. clearing_price = min(local_marginal, cheapest_import)
31
+
32
+ 4. If no source has spare capacity (scarcity):
33
+ - Use the unmet load penalty price
34
+
35
+ Note on Multi-hop Imports:
36
+ --------------------------
37
+ This single-pass algorithm uses the LOCAL marginal price of adjacent buses,
38
+ not their full clearing prices. This means multi-hop import economics are
39
+ not captured. For example, if Bus A can import from Bus B, and Bus B can
40
+ import cheaply from Bus C, this algorithm won't reflect Bus A's ability
41
+ to effectively import from C via B.
42
+
43
+ This is a deliberate simplification that:
44
+ - Avoids iteration/recursion for meshed networks
45
+ - Matches a "local market" interpretation where each bus sees instantaneous offers
46
+ - Is correct for radial networks (tree topology)
47
+
48
+ For most practical networks (radial or nearly radial), this gives accurate results.
49
+ """
50
+
51
+ import logging
52
+ from dataclasses import dataclass, field
53
+ from typing import Dict, List, Optional, Tuple, Any
54
+
55
+ import numpy as np
56
+ import pandas as pd
57
+
58
+ logger = logging.getLogger(__name__)
59
+
60
+
61
+ @dataclass
62
+ class LinkInfo:
63
+ """Information about a link for clearing price calculation."""
64
+ link_name: str
65
+ adjacent_bus: str
66
+ marginal_cost: np.ndarray # Per timestep
67
+ efficiency: float
68
+ spare_capacity_mask: np.ndarray # Boolean per timestep: True if has spare capacity
69
+
70
+
71
+ @dataclass
72
+ class PriceSetter:
73
+ """Tracks which asset sets the clearing price."""
74
+ asset_name: str
75
+ asset_type: str # 'generator', 'storage', 'link_import'
76
+ marginal_cost: float
77
+ bus: str
78
+
79
+
80
+ class ClearingPriceCalculator:
81
+ """
82
+ Calculate pay-as-clear prices for all buses in a solved network.
83
+
84
+ The clearing price is the marginal cost of the cheapest source with
85
+ spare capacity that could supply the next MW to a bus. This differs
86
+ from the LP shadow price (marginal_price) which includes constraint effects.
87
+
88
+ Example usage:
89
+ calculator = ClearingPriceCalculator()
90
+ clearing_prices = calculator.calculate_all_buses(conn, network, scenario_id)
91
+ # Returns: {'GB_Main': array([45.2, 46.1, ...]), 'FR': array([...]), ...}
92
+ """
93
+
94
+ def __init__(
95
+ self,
96
+ unmet_load_threshold: float = 10000.0,
97
+ spare_capacity_tolerance: float = 0.01,
98
+ min_dispatch_threshold: float = 1.0,
99
+ verbose: bool = True,
100
+ include_storage: bool = False,
101
+ min_marginal_cost: float = 1.0,
102
+ ):
103
+ """
104
+ Initialize the calculator.
105
+
106
+ Args:
107
+ unmet_load_threshold: Marginal cost above which a generator is
108
+ considered an "unmet load" penalty generator and excluded
109
+ from normal clearing price calculation.
110
+ spare_capacity_tolerance: Fraction tolerance for "at capacity".
111
+ A source is considered to have spare capacity if
112
+ dispatch < available_capacity * (1 - tolerance).
113
+ min_dispatch_threshold: Minimum dispatch (MW) to consider a source
114
+ as "dispatching". Handles numerical noise.
115
+ verbose: Enable detailed logging of price-setting assets.
116
+ include_storage: Whether to include storage units in clearing price.
117
+ Default False because storage marginal_cost in PyPSA is typically
118
+ ~0 (no fuel cost) and doesn't represent the market clearing price.
119
+ In pay-as-clear markets, storage is a price-taker, not a price-setter.
120
+ min_marginal_cost: Minimum marginal cost to consider for price setting.
121
+ Sources with marginal_cost below this are excluded (e.g., to filter
122
+ out renewables with mc=0 that shouldn't set the clearing price).
123
+ """
124
+ self.unmet_load_threshold = unmet_load_threshold
125
+ self.spare_capacity_tolerance = spare_capacity_tolerance
126
+ self.min_dispatch_threshold = min_dispatch_threshold
127
+ self.verbose = verbose
128
+ self.include_storage = include_storage
129
+ self.min_marginal_cost = min_marginal_cost
130
+
131
+ # Track price setters for logging
132
+ self._price_setters: Dict[str, List[Optional[PriceSetter]]] = {}
133
+
134
+ def calculate_all_buses(
135
+ self,
136
+ conn,
137
+ network: "pypsa.Network",
138
+ scenario_id: Optional[int] = None,
139
+ ) -> Dict[str, np.ndarray]:
140
+ """
141
+ Calculate clearing prices for all buses in the network.
142
+
143
+ Args:
144
+ conn: Database connection (for component lookups if needed)
145
+ network: Solved PyPSA Network object
146
+ scenario_id: Scenario ID (for logging)
147
+
148
+ Returns:
149
+ Dict mapping bus_name -> array of clearing prices per timestep.
150
+ Length of each array equals len(network.snapshots).
151
+ """
152
+ n_periods = len(network.snapshots)
153
+ bus_names = list(network.buses.index)
154
+
155
+ logger.debug(f"Clearing price calculation: {len(bus_names)} buses, {n_periods} periods")
156
+
157
+ # Reset price setters tracking
158
+ self._price_setters = {bus: [None] * n_periods for bus in bus_names}
159
+
160
+ # Step 1: Calculate local marginal price at each bus
161
+ local_marginals, local_setters = self._calculate_local_marginals(network, n_periods)
162
+
163
+ # Step 2: Build link adjacency map (which buses can import from where)
164
+ link_adjacency = self._build_link_adjacency(network, n_periods)
165
+
166
+ # Step 3: Calculate clearing prices (single pass)
167
+ clearing_prices = {}
168
+ for bus_name in bus_names:
169
+ clearing_prices[bus_name], setters = self._calculate_bus_clearing_price(
170
+ bus_name,
171
+ local_marginals,
172
+ local_setters,
173
+ link_adjacency,
174
+ n_periods,
175
+ )
176
+ self._price_setters[bus_name] = setters
177
+
178
+ # Log summary for key buses only
179
+ self._log_clearing_price_summary(clearing_prices, n_periods)
180
+
181
+ return clearing_prices
182
+
183
+ def _calculate_local_marginals(
184
+ self,
185
+ network: "pypsa.Network",
186
+ n_periods: int
187
+ ) -> Tuple[Dict[str, np.ndarray], Dict[str, List[Optional[PriceSetter]]]]:
188
+ """
189
+ For each bus, calculate the marginal cost of the cheapest local source
190
+ (generator or storage unit) with spare capacity at each timestep.
191
+
192
+ Returns:
193
+ Tuple of:
194
+ - Dict mapping bus_name -> array of local marginal prices
195
+ - Dict mapping bus_name -> list of PriceSetter objects (one per timestep)
196
+ """
197
+ bus_names = list(network.buses.index)
198
+ local_marginals = {bus: np.full(n_periods, np.inf) for bus in bus_names}
199
+ local_setters = {bus: [None] * n_periods for bus in bus_names}
200
+
201
+ # Process generators
202
+ gen_stats = self._process_generators(network, n_periods, local_marginals, local_setters)
203
+
204
+ # Process storage units (discharge as source) - only if enabled
205
+ # By default, storage is excluded because:
206
+ # 1. Storage marginal_cost in PyPSA is typically ~0 (no fuel cost)
207
+ # 2. In pay-as-clear markets, storage is a price-taker, not a price-setter
208
+ # 3. Storage arbitrages between periods; its cost is opportunity cost, not marginal cost
209
+ if self.include_storage:
210
+ storage_stats = self._process_storage_units(network, n_periods, local_marginals, local_setters)
211
+ else:
212
+ storage_stats = {'processed': 0, 'with_spare': 0}
213
+
214
+ # Process stores (if they can inject power)
215
+ self._process_stores(network, n_periods, local_marginals, local_setters)
216
+
217
+ logger.debug(f" Generators: {gen_stats['processed']} processed, {gen_stats['with_spare']} with spare capacity")
218
+ logger.debug(f" Storage units: {storage_stats['processed']} processed, {storage_stats['with_spare']} with spare capacity")
219
+
220
+ return local_marginals, local_setters
221
+
222
+ def _process_generators(
223
+ self,
224
+ network: "pypsa.Network",
225
+ n_periods: int,
226
+ local_marginals: Dict[str, np.ndarray],
227
+ local_setters: Dict[str, List[Optional[PriceSetter]]],
228
+ ) -> Dict[str, int]:
229
+ """Process generators to find local marginals at each bus."""
230
+ stats = {'processed': 0, 'with_spare': 0, 'skipped_unmet': 0, 'skipped_no_pnom': 0}
231
+
232
+ if network.generators.empty:
233
+ return stats
234
+
235
+ generators = network.generators
236
+
237
+ # Get dispatch timeseries
238
+ if hasattr(network.generators_t, 'p') and not network.generators_t.p.empty:
239
+ p_dispatch = network.generators_t.p
240
+ else:
241
+ logger.warning(" No generator dispatch data found (generators_t.p empty)")
242
+ return stats
243
+
244
+ # Get p_max_pu timeseries (or static)
245
+ if hasattr(network.generators_t, 'p_max_pu') and not network.generators_t.p_max_pu.empty:
246
+ p_max_pu_ts = network.generators_t.p_max_pu
247
+ else:
248
+ p_max_pu_ts = None
249
+
250
+ # Get marginal_cost timeseries (or static)
251
+ if hasattr(network.generators_t, 'marginal_cost') and not network.generators_t.marginal_cost.empty:
252
+ marginal_cost_ts = network.generators_t.marginal_cost
253
+ else:
254
+ marginal_cost_ts = None
255
+
256
+ # First pass: regular generators (not unmet load)
257
+ for gen_name in generators.index:
258
+ gen = generators.loc[gen_name]
259
+ bus = gen['bus']
260
+
261
+ if bus not in local_marginals:
262
+ continue
263
+
264
+ # Get p_nom
265
+ p_nom = gen.get('p_nom', 0)
266
+ if p_nom <= 0:
267
+ stats['skipped_no_pnom'] += 1
268
+ continue
269
+
270
+ # Get dispatch values
271
+ if gen_name not in p_dispatch.columns:
272
+ continue
273
+ p_values = p_dispatch[gen_name].values
274
+ if len(p_values) != n_periods:
275
+ p_values = self._pad_or_truncate(p_values, n_periods)
276
+
277
+ # Get p_max_pu values
278
+ if p_max_pu_ts is not None and gen_name in p_max_pu_ts.columns:
279
+ p_max_pu_values = p_max_pu_ts[gen_name].values
280
+ else:
281
+ p_max_pu_values = np.full(n_periods, gen.get('p_max_pu', 1.0))
282
+ if len(p_max_pu_values) != n_periods:
283
+ p_max_pu_values = self._pad_or_truncate(p_max_pu_values, n_periods)
284
+
285
+ # Get marginal_cost values
286
+ if marginal_cost_ts is not None and gen_name in marginal_cost_ts.columns:
287
+ mc_values = marginal_cost_ts[gen_name].values
288
+ else:
289
+ mc_values = np.full(n_periods, gen.get('marginal_cost', 0.0))
290
+ if len(mc_values) != n_periods:
291
+ mc_values = self._pad_or_truncate(mc_values, n_periods)
292
+
293
+ # Skip unmet load generators (handle in second pass)
294
+ if np.any(mc_values > self.unmet_load_threshold):
295
+ stats['skipped_unmet'] += 1
296
+ continue
297
+
298
+ stats['processed'] += 1
299
+
300
+ # Calculate available capacity
301
+ available = p_nom * p_max_pu_values
302
+
303
+ # Find timesteps where generator has spare capacity
304
+ spare_capacity_mask = p_values < available * (1 - self.spare_capacity_tolerance)
305
+ n_spare = spare_capacity_mask.sum()
306
+
307
+ if n_spare > 0:
308
+ stats['with_spare'] += 1
309
+
310
+ # Log details for this generator
311
+ if self.verbose and n_spare > 0:
312
+ mean_mc = np.mean(mc_values)
313
+ mean_dispatch = np.mean(p_values)
314
+ mean_available = np.mean(available)
315
+ logger.debug(f" {gen_name} @ {bus}: mc={mean_mc:.2f}, dispatch={mean_dispatch:.1f}MW, available={mean_available:.1f}MW, spare_periods={n_spare}/{n_periods}")
316
+
317
+ # Update local marginal where this generator is cheaper and has spare capacity
318
+ # Also filter by min_marginal_cost (e.g., to exclude renewables with mc=0)
319
+ for t in range(n_periods):
320
+ if (spare_capacity_mask[t]
321
+ and mc_values[t] >= self.min_marginal_cost
322
+ and mc_values[t] < local_marginals[bus][t]):
323
+ local_marginals[bus][t] = mc_values[t]
324
+ local_setters[bus][t] = PriceSetter(
325
+ asset_name=gen_name,
326
+ asset_type='generator',
327
+ marginal_cost=mc_values[t],
328
+ bus=bus
329
+ )
330
+
331
+ # Second pass: handle unmet load generators (only set price if no other source available)
332
+ for gen_name in generators.index:
333
+ gen = generators.loc[gen_name]
334
+ bus = gen['bus']
335
+
336
+ if bus not in local_marginals:
337
+ continue
338
+
339
+ p_nom = gen.get('p_nom', 0)
340
+ if p_nom <= 0:
341
+ continue
342
+
343
+ # Get marginal_cost values
344
+ if marginal_cost_ts is not None and gen_name in marginal_cost_ts.columns:
345
+ mc_values = marginal_cost_ts[gen_name].values
346
+ else:
347
+ mc_values = np.full(n_periods, gen.get('marginal_cost', 0.0))
348
+ if len(mc_values) != n_periods:
349
+ mc_values = self._pad_or_truncate(mc_values, n_periods)
350
+
351
+ # Only process unmet load generators
352
+ if not np.any(mc_values > self.unmet_load_threshold):
353
+ continue
354
+
355
+ # Get dispatch and check spare capacity
356
+ if gen_name not in p_dispatch.columns:
357
+ continue
358
+ p_values = p_dispatch[gen_name].values
359
+ if len(p_values) != n_periods:
360
+ p_values = self._pad_or_truncate(p_values, n_periods)
361
+
362
+ if p_max_pu_ts is not None and gen_name in p_max_pu_ts.columns:
363
+ p_max_pu_values = p_max_pu_ts[gen_name].values
364
+ else:
365
+ p_max_pu_values = np.full(n_periods, gen.get('p_max_pu', 1.0))
366
+ if len(p_max_pu_values) != n_periods:
367
+ p_max_pu_values = self._pad_or_truncate(p_max_pu_values, n_periods)
368
+
369
+ available = p_nom * p_max_pu_values
370
+ spare_capacity_mask = p_values < available * (1 - self.spare_capacity_tolerance)
371
+
372
+ # Only use unmet load price where local_marginal is still inf (no other source)
373
+ n_set = 0
374
+ for t in range(n_periods):
375
+ if spare_capacity_mask[t] and np.isinf(local_marginals[bus][t]):
376
+ local_marginals[bus][t] = mc_values[t]
377
+ local_setters[bus][t] = PriceSetter(
378
+ asset_name=gen_name,
379
+ asset_type='unmet_load',
380
+ marginal_cost=mc_values[t],
381
+ bus=bus
382
+ )
383
+ n_set += 1
384
+
385
+ if n_set > 0:
386
+ logger.debug(f" UNMET LOAD {gen_name}: set price for {n_set} periods (no other source)")
387
+
388
+ return stats
389
+
390
+ def _process_storage_units(
391
+ self,
392
+ network: "pypsa.Network",
393
+ n_periods: int,
394
+ local_marginals: Dict[str, np.ndarray],
395
+ local_setters: Dict[str, List[Optional[PriceSetter]]],
396
+ ) -> Dict[str, int]:
397
+ """Process storage units (discharge capacity) to find local marginals."""
398
+ stats = {'processed': 0, 'with_spare': 0}
399
+
400
+ if network.storage_units.empty:
401
+ return stats
402
+
403
+ storage_units = network.storage_units
404
+
405
+ # Get dispatch timeseries (positive = discharge)
406
+ if hasattr(network.storage_units_t, 'p') and not network.storage_units_t.p.empty:
407
+ p_dispatch = network.storage_units_t.p
408
+ else:
409
+ logger.warning(" No storage unit dispatch data found")
410
+ return stats
411
+
412
+ for su_name in storage_units.index:
413
+ su = storage_units.loc[su_name]
414
+ bus = su['bus']
415
+
416
+ if bus not in local_marginals:
417
+ continue
418
+
419
+ # Get p_nom (discharge capacity)
420
+ p_nom = su.get('p_nom', 0)
421
+ if p_nom <= 0:
422
+ continue
423
+
424
+ # Get dispatch values
425
+ if su_name not in p_dispatch.columns:
426
+ continue
427
+ p_values = p_dispatch[su_name].values
428
+ if len(p_values) != n_periods:
429
+ p_values = self._pad_or_truncate(p_values, n_periods)
430
+
431
+ # Get p_max_pu
432
+ if hasattr(network.storage_units_t, 'p_max_pu') and su_name in network.storage_units_t.p_max_pu.columns:
433
+ p_max_pu_values = network.storage_units_t.p_max_pu[su_name].values
434
+ else:
435
+ p_max_pu_values = np.full(n_periods, su.get('p_max_pu', 1.0))
436
+ if len(p_max_pu_values) != n_periods:
437
+ p_max_pu_values = self._pad_or_truncate(p_max_pu_values, n_periods)
438
+
439
+ # Get marginal_cost
440
+ if hasattr(network.storage_units_t, 'marginal_cost') and su_name in network.storage_units_t.marginal_cost.columns:
441
+ mc_values = network.storage_units_t.marginal_cost[su_name].values
442
+ else:
443
+ mc_values = np.full(n_periods, su.get('marginal_cost', 0.0))
444
+ if len(mc_values) != n_periods:
445
+ mc_values = self._pad_or_truncate(mc_values, n_periods)
446
+
447
+ stats['processed'] += 1
448
+
449
+ # Calculate available discharge capacity
450
+ available = p_nom * p_max_pu_values
451
+
452
+ # Spare capacity for discharge: current discharge < max discharge
453
+ spare_capacity_mask = p_values < available * (1 - self.spare_capacity_tolerance)
454
+ n_spare = spare_capacity_mask.sum()
455
+
456
+ if n_spare > 0:
457
+ stats['with_spare'] += 1
458
+
459
+ if self.verbose and n_spare > 0:
460
+ logger.debug(f" {su_name} @ {bus}: mc={np.mean(mc_values):.2f}, spare_periods={n_spare}/{n_periods}")
461
+
462
+ # Update local marginal
463
+ for t in range(n_periods):
464
+ if spare_capacity_mask[t] and mc_values[t] < local_marginals[bus][t]:
465
+ local_marginals[bus][t] = mc_values[t]
466
+ local_setters[bus][t] = PriceSetter(
467
+ asset_name=su_name,
468
+ asset_type='storage',
469
+ marginal_cost=mc_values[t],
470
+ bus=bus
471
+ )
472
+
473
+ return stats
474
+
475
+ def _process_stores(
476
+ self,
477
+ network: "pypsa.Network",
478
+ n_periods: int,
479
+ local_marginals: Dict[str, np.ndarray],
480
+ local_setters: Dict[str, List[Optional[PriceSetter]]],
481
+ ) -> None:
482
+ """Process stores (if they can inject power) to find local marginals."""
483
+ if network.stores.empty:
484
+ return
485
+
486
+ # Stores are complex - skip for now
487
+ # They don't have a fixed p_nom like generators/storage_units
488
+ logger.debug(f" Skipping {len(network.stores)} stores (complex capacity constraints)")
489
+
490
+ def _build_link_adjacency(
491
+ self,
492
+ network: "pypsa.Network",
493
+ n_periods: int,
494
+ ) -> Dict[str, List[LinkInfo]]:
495
+ """
496
+ Build a map of bus -> list of inbound link options.
497
+ """
498
+ link_adjacency: Dict[str, List[LinkInfo]] = {}
499
+
500
+ if network.links.empty:
501
+ return link_adjacency
502
+
503
+ links = network.links
504
+
505
+ # Get link flow timeseries
506
+ if hasattr(network.links_t, 'p0') and not network.links_t.p0.empty:
507
+ p0_dispatch = network.links_t.p0
508
+ else:
509
+ p0_dispatch = None
510
+ logger.warning(" No link flow data found (links_t.p0 empty)")
511
+
512
+ for link_name in links.index:
513
+ link = links.loc[link_name]
514
+ bus0 = link['bus0']
515
+ bus1 = link['bus1']
516
+
517
+ p_nom = link.get('p_nom', 0)
518
+ if p_nom <= 0:
519
+ continue
520
+
521
+ efficiency = link.get('efficiency', 1.0)
522
+ if pd.isna(efficiency) or efficiency <= 0:
523
+ efficiency = 1.0
524
+
525
+ # Get marginal_cost
526
+ if hasattr(network.links_t, 'marginal_cost') and link_name in network.links_t.marginal_cost.columns:
527
+ mc_values = network.links_t.marginal_cost[link_name].values
528
+ else:
529
+ mc_values = np.full(n_periods, link.get('marginal_cost', 0.0))
530
+ if len(mc_values) != n_periods:
531
+ mc_values = self._pad_or_truncate(mc_values, n_periods)
532
+
533
+ # Get p_max_pu
534
+ if hasattr(network.links_t, 'p_max_pu') and link_name in network.links_t.p_max_pu.columns:
535
+ p_max_pu_values = network.links_t.p_max_pu[link_name].values
536
+ else:
537
+ p_max_pu_values = np.full(n_periods, link.get('p_max_pu', 1.0))
538
+ if len(p_max_pu_values) != n_periods:
539
+ p_max_pu_values = self._pad_or_truncate(p_max_pu_values, n_periods)
540
+
541
+ # Get p_min_pu
542
+ if hasattr(network.links_t, 'p_min_pu') and link_name in network.links_t.p_min_pu.columns:
543
+ p_min_pu_values = network.links_t.p_min_pu[link_name].values
544
+ else:
545
+ p_min_pu_values = np.full(n_periods, link.get('p_min_pu', 0.0))
546
+ if len(p_min_pu_values) != n_periods:
547
+ p_min_pu_values = self._pad_or_truncate(p_min_pu_values, n_periods)
548
+
549
+ # Get actual flow
550
+ if p0_dispatch is not None and link_name in p0_dispatch.columns:
551
+ p0_values = p0_dispatch[link_name].values
552
+ else:
553
+ p0_values = np.zeros(n_periods)
554
+ if len(p0_values) != n_periods:
555
+ p0_values = self._pad_or_truncate(p0_values, n_periods)
556
+
557
+ # Direction 1: bus0 -> bus1 (positive flow)
558
+ max_forward = p_nom * p_max_pu_values
559
+ spare_forward = p0_values < max_forward * (1 - self.spare_capacity_tolerance)
560
+
561
+ if bus1 not in link_adjacency:
562
+ link_adjacency[bus1] = []
563
+ link_adjacency[bus1].append(LinkInfo(
564
+ link_name=link_name,
565
+ adjacent_bus=bus0,
566
+ marginal_cost=mc_values,
567
+ efficiency=efficiency,
568
+ spare_capacity_mask=spare_forward,
569
+ ))
570
+
571
+ logger.debug(f" {link_name}: {bus0} -> {bus1}, p_nom={p_nom:.0f}MW, eff={efficiency:.2f}, spare_periods={spare_forward.sum()}")
572
+
573
+ # Direction 2: bus1 -> bus0 (negative flow, if allowed)
574
+ if np.any(p_min_pu_values < 0):
575
+ max_reverse = p_nom * np.abs(p_min_pu_values)
576
+ current_reverse = np.maximum(-p0_values, 0)
577
+ spare_reverse = current_reverse < max_reverse * (1 - self.spare_capacity_tolerance)
578
+
579
+ if bus0 not in link_adjacency:
580
+ link_adjacency[bus0] = []
581
+ link_adjacency[bus0].append(LinkInfo(
582
+ link_name=f"{link_name}_reverse",
583
+ adjacent_bus=bus1,
584
+ marginal_cost=mc_values,
585
+ efficiency=efficiency,
586
+ spare_capacity_mask=spare_reverse,
587
+ ))
588
+
589
+ logger.debug(f" {link_name}_reverse: {bus1} -> {bus0}, spare_periods={spare_reverse.sum()}")
590
+
591
+ return link_adjacency
592
+
593
+ def _calculate_bus_clearing_price(
594
+ self,
595
+ bus_name: str,
596
+ local_marginals: Dict[str, np.ndarray],
597
+ local_setters: Dict[str, List[Optional[PriceSetter]]],
598
+ link_adjacency: Dict[str, List[LinkInfo]],
599
+ n_periods: int,
600
+ ) -> Tuple[np.ndarray, List[Optional[PriceSetter]]]:
601
+ """
602
+ Calculate clearing price for a single bus.
603
+
604
+ Returns:
605
+ Tuple of (clearing_prices array, list of PriceSetter for each timestep)
606
+ """
607
+ clearing_prices = np.copy(local_marginals.get(bus_name, np.full(n_periods, np.inf)))
608
+ setters = list(local_setters.get(bus_name, [None] * n_periods))
609
+
610
+ # Check import options
611
+ n_import_better = 0
612
+ if bus_name in link_adjacency:
613
+ for link_info in link_adjacency[bus_name]:
614
+ adj_bus = link_info.adjacent_bus
615
+ adj_marginal = local_marginals.get(adj_bus, np.full(n_periods, np.inf))
616
+
617
+ for t in range(n_periods):
618
+ if link_info.spare_capacity_mask[t]:
619
+ # Import price = (adjacent marginal + link cost) / efficiency
620
+ import_price = (adj_marginal[t] + link_info.marginal_cost[t]) / link_info.efficiency
621
+ if import_price < clearing_prices[t]:
622
+ clearing_prices[t] = import_price
623
+ setters[t] = PriceSetter(
624
+ asset_name=link_info.link_name,
625
+ asset_type='link_import',
626
+ marginal_cost=import_price,
627
+ bus=bus_name
628
+ )
629
+ n_import_better += 1
630
+
631
+ # Handle remaining inf values (true scarcity)
632
+ n_scarcity = np.isinf(clearing_prices).sum()
633
+ clearing_prices = np.where(np.isinf(clearing_prices), self.unmet_load_threshold * 10, clearing_prices)
634
+
635
+ # Log warning only for problematic buses
636
+ n_zeros = (clearing_prices == 0).sum()
637
+ if n_zeros > 0 or n_scarcity > 0:
638
+ logger.warning(f" {bus_name}: zeros={n_zeros}, scarcity={n_scarcity}")
639
+
640
+ return clearing_prices, setters
641
+
642
+ def _log_clearing_price_summary(
643
+ self,
644
+ clearing_prices: Dict[str, np.ndarray],
645
+ n_periods: int,
646
+ ) -> None:
647
+ """Log compact summary of clearing prices."""
648
+ # Log summary for key buses (GB_Main if present, otherwise all)
649
+ key_buses = ['GB_Main'] if 'GB_Main' in clearing_prices else list(clearing_prices.keys())[:3]
650
+
651
+ for bus_name in key_buses:
652
+ prices = clearing_prices.get(bus_name)
653
+ if prices is None:
654
+ continue
655
+
656
+ setters = self._price_setters.get(bus_name, [])
657
+
658
+ # Count price setters by type
659
+ setter_counts: Dict[str, int] = {}
660
+ for setter in setters:
661
+ key = setter.asset_type if setter else 'none'
662
+ setter_counts[key] = setter_counts.get(key, 0) + 1
663
+
664
+ # Summary stats
665
+ valid = prices[(prices > 0) & (prices < self.unmet_load_threshold)]
666
+ setters_str = ", ".join(f"{k}:{v}" for k, v in sorted(setter_counts.items(), key=lambda x: -x[1]))
667
+
668
+ if len(valid) > 0:
669
+ logger.info(f" Clearing prices [{bus_name}]: mean=£{np.mean(valid):.2f}, range=[£{np.min(valid):.2f}, £{np.max(valid):.2f}], setters: {setters_str}")
670
+
671
+ def _pad_or_truncate(self, arr: np.ndarray, target_length: int) -> np.ndarray:
672
+ """Pad array with last value or truncate to target length."""
673
+ arr = np.asarray(arr)
674
+ if len(arr) >= target_length:
675
+ return arr[:target_length]
676
+ else:
677
+ padding = np.full(target_length - len(arr), arr[-1] if len(arr) > 0 else 0)
678
+ return np.concatenate([arr, padding])