pyconvexity 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (35) hide show
  1. pyconvexity/__init__.py +30 -6
  2. pyconvexity/_version.py +1 -1
  3. pyconvexity/data/README.md +101 -0
  4. pyconvexity/data/__init__.py +18 -0
  5. pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  6. pyconvexity/data/loaders/__init__.py +3 -0
  7. pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  8. pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  9. pyconvexity/data/loaders/cache.py +212 -0
  10. pyconvexity/data/sources/__init__.py +5 -0
  11. pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  12. pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  13. pyconvexity/data/sources/gem.py +412 -0
  14. pyconvexity/io/__init__.py +32 -0
  15. pyconvexity/io/excel_exporter.py +991 -0
  16. pyconvexity/io/excel_importer.py +1112 -0
  17. pyconvexity/io/netcdf_exporter.py +192 -0
  18. pyconvexity/io/netcdf_importer.py +599 -0
  19. pyconvexity/models/__init__.py +7 -0
  20. pyconvexity/models/attributes.py +3 -1
  21. pyconvexity/models/components.py +3 -0
  22. pyconvexity/models/scenarios.py +177 -0
  23. pyconvexity/solvers/__init__.py +29 -0
  24. pyconvexity/solvers/pypsa/__init__.py +24 -0
  25. pyconvexity/solvers/pypsa/api.py +398 -0
  26. pyconvexity/solvers/pypsa/batch_loader.py +311 -0
  27. pyconvexity/solvers/pypsa/builder.py +656 -0
  28. pyconvexity/solvers/pypsa/constraints.py +321 -0
  29. pyconvexity/solvers/pypsa/solver.py +1255 -0
  30. pyconvexity/solvers/pypsa/storage.py +2207 -0
  31. {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/METADATA +5 -2
  32. pyconvexity-0.1.3.dist-info/RECORD +45 -0
  33. pyconvexity-0.1.1.dist-info/RECORD +0 -20
  34. {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/WHEEL +0 -0
  35. {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,177 @@
1
+ """
2
+ Scenario management operations for PyConvexity.
3
+
4
+ Provides operations for creating and managing scenarios within networks.
5
+ """
6
+
7
+ import sqlite3
8
+ import logging
9
+ from typing import List, Optional
10
+ from datetime import datetime
11
+
12
+ from pyconvexity.core.errors import ValidationError, DatabaseError
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ def create_scenario(
18
+ conn: sqlite3.Connection,
19
+ network_id: int,
20
+ name: str,
21
+ description: Optional[str] = None,
22
+ is_master: bool = False,
23
+ ) -> int:
24
+ """
25
+ Create a new scenario for a network.
26
+
27
+ Args:
28
+ conn: Database connection
29
+ network_id: ID of the network
30
+ name: Name of the scenario
31
+ description: Optional description
32
+ is_master: Whether this is a master scenario (default False)
33
+
34
+ Returns:
35
+ ID of the newly created scenario
36
+
37
+ Raises:
38
+ ValidationError: If network doesn't exist or scenario name conflicts
39
+ DatabaseError: If creation fails
40
+ """
41
+
42
+ # Validate network exists
43
+ cursor = conn.execute("SELECT COUNT(*) FROM networks WHERE id = ?", (network_id,))
44
+ if cursor.fetchone()[0] == 0:
45
+ raise ValidationError(f"Network with ID {network_id} not found")
46
+
47
+ # Check for name conflicts within the network
48
+ cursor = conn.execute(
49
+ "SELECT COUNT(*) FROM scenarios WHERE network_id = ? AND name = ?",
50
+ (network_id, name)
51
+ )
52
+ if cursor.fetchone()[0] > 0:
53
+ raise ValidationError(f"Scenario with name '{name}' already exists in network {network_id}")
54
+
55
+ # Insert the scenario (database triggers will handle master scenario uniqueness)
56
+ cursor = conn.execute(
57
+ "INSERT INTO scenarios (network_id, name, description, is_master, created_at) "
58
+ "VALUES (?, ?, ?, ?, datetime('now'))",
59
+ (network_id, name, description, is_master)
60
+ )
61
+
62
+ scenario_id = cursor.lastrowid
63
+ if not scenario_id:
64
+ raise DatabaseError("Failed to create scenario")
65
+
66
+ logger.info(f"Created scenario '{name}' (ID: {scenario_id}) for network {network_id}")
67
+ return scenario_id
68
+
69
+
70
+ def list_scenarios(conn: sqlite3.Connection, network_id: int) -> List[dict]:
71
+ """
72
+ List all scenarios for a network.
73
+
74
+ Args:
75
+ conn: Database connection
76
+ network_id: ID of the network
77
+
78
+ Returns:
79
+ List of scenario dictionaries with keys: id, network_id, name, description, is_master, created_at
80
+
81
+ Raises:
82
+ DatabaseError: If query fails
83
+ """
84
+
85
+ cursor = conn.execute(
86
+ "SELECT id, network_id, name, description, is_master, created_at "
87
+ "FROM scenarios "
88
+ "WHERE network_id = ? "
89
+ "ORDER BY is_master DESC, created_at ASC",
90
+ (network_id,)
91
+ )
92
+
93
+ scenarios = []
94
+ for row in cursor.fetchall():
95
+ scenarios.append({
96
+ 'id': row[0],
97
+ 'network_id': row[1],
98
+ 'name': row[2],
99
+ 'description': row[3],
100
+ 'is_master': bool(row[4]),
101
+ 'created_at': row[5],
102
+ })
103
+
104
+ logger.debug(f"Found {len(scenarios)} scenarios for network {network_id}")
105
+ return scenarios
106
+
107
+
108
+ def get_scenario(conn: sqlite3.Connection, scenario_id: int) -> dict:
109
+ """
110
+ Get a specific scenario by ID.
111
+
112
+ Args:
113
+ conn: Database connection
114
+ scenario_id: ID of the scenario
115
+
116
+ Returns:
117
+ Scenario dictionary with keys: id, network_id, name, description, is_master, created_at
118
+
119
+ Raises:
120
+ ValidationError: If scenario not found
121
+ DatabaseError: If query fails
122
+ """
123
+
124
+ cursor = conn.execute(
125
+ "SELECT id, network_id, name, description, is_master, created_at "
126
+ "FROM scenarios "
127
+ "WHERE id = ?",
128
+ (scenario_id,)
129
+ )
130
+
131
+ row = cursor.fetchone()
132
+ if not row:
133
+ raise ValidationError(f"Scenario with ID {scenario_id} not found")
134
+
135
+ return {
136
+ 'id': row[0],
137
+ 'network_id': row[1],
138
+ 'name': row[2],
139
+ 'description': row[3],
140
+ 'is_master': bool(row[4]),
141
+ 'created_at': row[5],
142
+ }
143
+
144
+
145
+ def delete_scenario(conn: sqlite3.Connection, scenario_id: int) -> None:
146
+ """
147
+ Delete a scenario (cannot delete master scenarios).
148
+
149
+ Args:
150
+ conn: Database connection
151
+ scenario_id: ID of the scenario to delete
152
+
153
+ Raises:
154
+ ValidationError: If scenario not found or is master scenario
155
+ DatabaseError: If deletion fails
156
+ """
157
+
158
+ # Check if scenario exists and is not master
159
+ cursor = conn.execute(
160
+ "SELECT is_master FROM scenarios WHERE id = ?",
161
+ (scenario_id,)
162
+ )
163
+
164
+ row = cursor.fetchone()
165
+ if not row:
166
+ raise ValidationError(f"Scenario with ID {scenario_id} not found")
167
+
168
+ if row[0]: # is_master
169
+ raise ValidationError("Cannot delete master scenario")
170
+
171
+ # Delete the scenario (this will cascade to delete related component attributes)
172
+ cursor = conn.execute("DELETE FROM scenarios WHERE id = ?", (scenario_id,))
173
+
174
+ if cursor.rowcount == 0:
175
+ raise DatabaseError("Failed to delete scenario")
176
+
177
+ logger.info(f"Deleted scenario {scenario_id}")
@@ -0,0 +1,29 @@
1
+ """
2
+ Solver module for PyConvexity.
3
+
4
+ Provides interfaces to various optimization solvers for energy system modeling.
5
+ """
6
+
7
+ # Try to import PyPSA solver with graceful fallback
8
+ try:
9
+ from pyconvexity.solvers.pypsa import (
10
+ solve_network,
11
+ build_pypsa_network,
12
+ solve_pypsa_network,
13
+ load_network_components,
14
+ apply_constraints,
15
+ store_solve_results
16
+ )
17
+
18
+ __all__ = [
19
+ "solve_network",
20
+ "build_pypsa_network",
21
+ "solve_pypsa_network",
22
+ "load_network_components",
23
+ "apply_constraints",
24
+ "store_solve_results"
25
+ ]
26
+
27
+ except ImportError:
28
+ # PyPSA not available
29
+ __all__ = []
@@ -0,0 +1,24 @@
1
+ """
2
+ PyPSA solver integration for PyConvexity.
3
+
4
+ Provides high-level and low-level APIs for building PyPSA networks from database,
5
+ solving them, and storing results back to the database.
6
+ """
7
+
8
+ from pyconvexity.solvers.pypsa.api import (
9
+ solve_network,
10
+ build_pypsa_network,
11
+ solve_pypsa_network,
12
+ load_network_components,
13
+ apply_constraints,
14
+ store_solve_results
15
+ )
16
+
17
+ __all__ = [
18
+ "solve_network",
19
+ "build_pypsa_network",
20
+ "solve_pypsa_network",
21
+ "load_network_components",
22
+ "apply_constraints",
23
+ "store_solve_results"
24
+ ]
@@ -0,0 +1,398 @@
1
+ """
2
+ High-level API for PyPSA solver integration.
3
+
4
+ Provides user-friendly functions for the most common workflows.
5
+ """
6
+
7
+ import logging
8
+ from typing import Dict, Any, Optional, Callable
9
+
10
+ from pyconvexity.core.database import database_context
11
+ from pyconvexity.solvers.pypsa.builder import NetworkBuilder
12
+ from pyconvexity.solvers.pypsa.solver import NetworkSolver
13
+ from pyconvexity.solvers.pypsa.storage import ResultStorage
14
+ from pyconvexity.solvers.pypsa.constraints import ConstraintApplicator
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ def solve_network(
20
+ db_path: str,
21
+ network_id: int,
22
+ scenario_id: Optional[int] = None,
23
+ solver_name: str = "highs",
24
+ solver_options: Optional[Dict[str, Any]] = None,
25
+ constraints_dsl: Optional[str] = None,
26
+ discount_rate: Optional[float] = None,
27
+ progress_callback: Optional[Callable[[int, str], None]] = None,
28
+ return_detailed_results: bool = True
29
+ ) -> Dict[str, Any]:
30
+ """
31
+ Complete solve workflow: build PyPSA network from database, solve, store results.
32
+
33
+ This is the main high-level function that most users should use. It handles
34
+ the complete workflow of loading data from database, building a PyPSA network,
35
+ solving it, and storing results back to the database.
36
+
37
+ Args:
38
+ db_path: Path to the database file
39
+ network_id: ID of the network to solve
40
+ scenario_id: Optional scenario ID (uses master scenario if None)
41
+ solver_name: Solver to use (default: "highs")
42
+ solver_options: Optional solver-specific options
43
+ constraints_dsl: Optional DSL constraints to apply
44
+ discount_rate: Optional discount rate for multi-period optimization
45
+ progress_callback: Optional callback for progress updates (progress: int, message: str)
46
+ return_detailed_results: If True, return comprehensive results; if False, return simple status
47
+
48
+ Returns:
49
+ Dictionary with solve results - comprehensive if return_detailed_results=True, simple status otherwise
50
+
51
+ Raises:
52
+ DatabaseError: If database operations fail
53
+ ValidationError: If network data is invalid
54
+ ImportError: If PyPSA is not available
55
+ """
56
+ if progress_callback:
57
+ progress_callback(0, "Starting network solve...")
58
+
59
+ with database_context(db_path) as conn:
60
+ # Load network configuration with scenario awareness (matches old code)
61
+ from pyconvexity.models import get_network_config
62
+ network_config = get_network_config(conn, network_id, scenario_id)
63
+ if progress_callback:
64
+ progress_callback(8, "Loaded network configuration")
65
+
66
+ # Use configuration values with parameter overrides
67
+ effective_discount_rate = discount_rate if discount_rate is not None else network_config.get('discount_rate', 0.05)
68
+
69
+ # Build network
70
+ if progress_callback:
71
+ progress_callback(10, "Building PyPSA network...")
72
+
73
+ builder = NetworkBuilder()
74
+ network = builder.build_network(conn, network_id, scenario_id, progress_callback)
75
+
76
+ if progress_callback:
77
+ progress_callback(50, f"Network built: {len(network.buses)} buses, {len(network.generators)} generators")
78
+
79
+ # Apply constraints
80
+ constraint_applicator = ConstraintApplicator()
81
+ if constraints_dsl or progress_callback:
82
+ if progress_callback:
83
+ progress_callback(60, "Applying constraints...")
84
+ constraint_applicator.apply_constraints(conn, network_id, network, scenario_id, constraints_dsl)
85
+
86
+ # Solve network
87
+ if progress_callback:
88
+ progress_callback(70, f"Solving with {solver_name}...")
89
+
90
+ solver = NetworkSolver()
91
+ solve_result = solver.solve_network(
92
+ network,
93
+ solver_name=solver_name,
94
+ solver_options=solver_options,
95
+ discount_rate=effective_discount_rate, # Use effective discount rate from config
96
+ conn=conn,
97
+ network_id=network_id,
98
+ scenario_id=scenario_id,
99
+ constraint_applicator=constraint_applicator
100
+ )
101
+
102
+ if progress_callback:
103
+ progress_callback(85, "Storing results...")
104
+
105
+ # Store results - ALWAYS store results regardless of return_detailed_results flag
106
+ storage = ResultStorage()
107
+ storage_result = storage.store_results(
108
+ conn, network_id, network, solve_result, scenario_id
109
+ )
110
+
111
+ if progress_callback:
112
+ progress_callback(100, "Solve completed successfully")
113
+
114
+ # Return simple status if requested (for sidecar/async usage)
115
+ # Results are now stored in database regardless of this flag
116
+ if not return_detailed_results:
117
+ return {
118
+ "success": solve_result.get("success", False),
119
+ "message": "Solve completed successfully" if solve_result.get("success") else "Solve failed",
120
+ "error": solve_result.get("error") if not solve_result.get("success") else None,
121
+ "network_id": network_id,
122
+ "scenario_id": scenario_id
123
+ }
124
+
125
+ # Combine results in comprehensive format for detailed analysis
126
+ comprehensive_result = {
127
+ **solve_result,
128
+ "storage_stats": storage_result,
129
+ "network_id": network_id,
130
+ "scenario_id": scenario_id
131
+ }
132
+
133
+ # Transform to include sidecar-compatible format
134
+ return _transform_to_comprehensive_format(comprehensive_result)
135
+
136
+
137
+ def build_pypsa_network(
138
+ db_path: str,
139
+ network_id: int,
140
+ scenario_id: Optional[int] = None,
141
+ progress_callback: Optional[Callable[[int, str], None]] = None
142
+ ) -> 'pypsa.Network':
143
+ """
144
+ Build PyPSA network object from database.
145
+
146
+ This function loads all network data from the database and constructs
147
+ a PyPSA Network object ready for solving or analysis. Useful when you
148
+ want to inspect or modify the network before solving.
149
+
150
+ Args:
151
+ db_path: Path to the database file
152
+ network_id: ID of the network to build
153
+ scenario_id: Optional scenario ID (uses master scenario if None)
154
+ progress_callback: Optional callback for progress updates
155
+
156
+ Returns:
157
+ PyPSA Network object ready for solving
158
+
159
+ Raises:
160
+ DatabaseError: If database operations fail
161
+ ValidationError: If network data is invalid
162
+ ImportError: If PyPSA is not available
163
+ """
164
+ with database_context(db_path) as conn:
165
+ builder = NetworkBuilder()
166
+ return builder.build_network(conn, network_id, scenario_id, progress_callback)
167
+
168
+
169
+ def solve_pypsa_network(
170
+ network: 'pypsa.Network',
171
+ db_path: str,
172
+ network_id: int,
173
+ scenario_id: Optional[int] = None,
174
+ solver_name: str = "highs",
175
+ solver_options: Optional[Dict[str, Any]] = None,
176
+ store_results: bool = True,
177
+ progress_callback: Optional[Callable[[int, str], None]] = None
178
+ ) -> Dict[str, Any]:
179
+ """
180
+ Solve PyPSA network and optionally store results back to database.
181
+
182
+ This function takes an existing PyPSA network (e.g., from build_pypsa_network),
183
+ solves it, and optionally stores the results back to the database.
184
+
185
+ Args:
186
+ network: PyPSA Network object to solve
187
+ db_path: Path to the database file (needed for result storage)
188
+ network_id: ID of the network (for result storage)
189
+ scenario_id: Optional scenario ID
190
+ solver_name: Solver to use (default: "highs")
191
+ solver_options: Optional solver-specific options
192
+ store_results: Whether to store results back to database (default: True)
193
+ progress_callback: Optional callback for progress updates
194
+
195
+ Returns:
196
+ Dictionary with solve results and statistics
197
+
198
+ Raises:
199
+ DatabaseError: If database operations fail (when store_results=True)
200
+ ImportError: If PyPSA is not available
201
+ """
202
+ if progress_callback:
203
+ progress_callback(0, f"Solving network with {solver_name}...")
204
+
205
+ # Solve network
206
+ solver = NetworkSolver()
207
+ solve_result = solver.solve_network(
208
+ network,
209
+ solver_name=solver_name,
210
+ solver_options=solver_options
211
+ )
212
+
213
+ if progress_callback:
214
+ progress_callback(70, "Solve completed")
215
+
216
+ # Store results if requested
217
+ if store_results:
218
+ if progress_callback:
219
+ progress_callback(80, "Storing results...")
220
+
221
+ with database_context(db_path) as conn:
222
+ storage = ResultStorage()
223
+ storage_result = storage.store_results(
224
+ conn, network_id, network, solve_result, scenario_id
225
+ )
226
+ solve_result["storage_stats"] = storage_result
227
+
228
+ if progress_callback:
229
+ progress_callback(100, "Complete")
230
+
231
+ return solve_result
232
+
233
+
234
+ def load_network_components(
235
+ db_path: str,
236
+ network_id: int,
237
+ scenario_id: Optional[int] = None
238
+ ) -> Dict[str, Any]:
239
+ """
240
+ Load all network components and attributes as structured data.
241
+
242
+ This low-level function loads network data without building a PyPSA network.
243
+ Useful for analysis, validation, or building custom network representations.
244
+
245
+ Args:
246
+ db_path: Path to the database file
247
+ network_id: ID of the network to load
248
+ scenario_id: Optional scenario ID
249
+
250
+ Returns:
251
+ Dictionary containing all network components and metadata
252
+
253
+ Raises:
254
+ DatabaseError: If database operations fail
255
+ """
256
+ with database_context(db_path) as conn:
257
+ builder = NetworkBuilder()
258
+ return builder.load_network_data(conn, network_id, scenario_id)
259
+
260
+
261
+ def apply_constraints(
262
+ network: 'pypsa.Network',
263
+ db_path: str,
264
+ network_id: int,
265
+ scenario_id: Optional[int] = None,
266
+ constraints_dsl: Optional[str] = None
267
+ ) -> None:
268
+ """
269
+ Apply custom constraints to PyPSA network.
270
+
271
+ This function applies database-stored constraints and optional DSL constraints
272
+ to an existing PyPSA network. Modifies the network in-place.
273
+
274
+ Args:
275
+ network: PyPSA Network object to modify
276
+ db_path: Path to the database file
277
+ network_id: ID of the network
278
+ scenario_id: Optional scenario ID
279
+ constraints_dsl: Optional DSL constraints string
280
+
281
+ Raises:
282
+ DatabaseError: If database operations fail
283
+ ValidationError: If constraints are invalid
284
+ """
285
+ with database_context(db_path) as conn:
286
+ constraint_applicator = ConstraintApplicator()
287
+ constraint_applicator.apply_constraints(
288
+ conn, network_id, network, scenario_id, constraints_dsl
289
+ )
290
+
291
+
292
+ def store_solve_results(
293
+ network: 'pypsa.Network',
294
+ db_path: str,
295
+ network_id: int,
296
+ scenario_id: Optional[int],
297
+ solve_metadata: Dict[str, Any]
298
+ ) -> Dict[str, Any]:
299
+ """
300
+ Store PyPSA solve results back to database.
301
+
302
+ This low-level function stores solve results from a PyPSA network back
303
+ to the database. Useful when you want full control over the solving process
304
+ but still want to store results in the standard format.
305
+
306
+ Args:
307
+ network: Solved PyPSA Network object
308
+ db_path: Path to the database file
309
+ network_id: ID of the network
310
+ scenario_id: Scenario ID for result storage
311
+ solve_metadata: Dictionary with solve metadata (solver_name, solve_time, etc.)
312
+
313
+ Returns:
314
+ Dictionary with storage statistics
315
+
316
+ Raises:
317
+ DatabaseError: If database operations fail
318
+ """
319
+ with database_context(db_path) as conn:
320
+ storage = ResultStorage()
321
+ return storage.store_results(
322
+ conn, network_id, network, solve_metadata, scenario_id
323
+ )
324
+
325
+
326
+ def _transform_to_comprehensive_format(pyconvexity_result: Dict[str, Any]) -> Dict[str, Any]:
327
+ """
328
+ Transform pyconvexity result to comprehensive format that includes both
329
+ the original structure and sidecar-compatible fields.
330
+
331
+ This ensures compatibility with existing sidecar code while providing
332
+ a clean API for direct pyconvexity users.
333
+
334
+ Args:
335
+ pyconvexity_result: Result from pyconvexity solve operations
336
+
337
+ Returns:
338
+ Comprehensive result with both original and sidecar-compatible fields
339
+ """
340
+ try:
341
+ # Extract basic solve information
342
+ success = pyconvexity_result.get("success", False)
343
+ status = pyconvexity_result.get("status", "unknown")
344
+ solve_time = pyconvexity_result.get("solve_time", 0.0)
345
+ objective_value = pyconvexity_result.get("objective_value")
346
+
347
+ # Extract storage stats
348
+ storage_stats = pyconvexity_result.get("storage_stats", {})
349
+ component_stats = storage_stats.get("component_stats", {})
350
+ network_stats = storage_stats.get("network_stats", {})
351
+
352
+ # Create comprehensive result that includes both formats
353
+ comprehensive_result = {
354
+ # Original pyconvexity format (for direct users)
355
+ **pyconvexity_result,
356
+
357
+ # Sidecar-compatible format (for backward compatibility)
358
+ "network_statistics": {
359
+ "total_generation_mwh": network_stats.get("total_generation_mwh", 0.0),
360
+ "total_load_mwh": network_stats.get("total_load_mwh", 0.0),
361
+ "unmet_load_mwh": network_stats.get("unmet_load_mwh", 0.0),
362
+ "total_cost": network_stats.get("total_cost", objective_value or 0.0),
363
+ "num_buses": network_stats.get("num_buses", 0),
364
+ "num_generators": network_stats.get("num_generators", 0),
365
+ "num_loads": network_stats.get("num_loads", 0),
366
+ "num_lines": network_stats.get("num_lines", 0),
367
+ "num_links": network_stats.get("num_links", 0)
368
+ },
369
+
370
+ "component_storage_stats": {
371
+ "stored_bus_results": component_stats.get("stored_bus_results", 0),
372
+ "stored_generator_results": component_stats.get("stored_generator_results", 0),
373
+ "stored_unmet_load_results": component_stats.get("stored_unmet_load_results", 0),
374
+ "stored_load_results": component_stats.get("stored_load_results", 0),
375
+ "stored_line_results": component_stats.get("stored_line_results", 0),
376
+ "stored_link_results": component_stats.get("stored_link_results", 0),
377
+ "stored_storage_unit_results": component_stats.get("stored_storage_unit_results", 0),
378
+ "stored_store_results": component_stats.get("stored_store_results", 0),
379
+ "skipped_attributes": component_stats.get("skipped_attributes", 0),
380
+ "errors": component_stats.get("errors", 0)
381
+ },
382
+
383
+ # Additional compatibility fields
384
+ "multi_period": pyconvexity_result.get("multi_period", False),
385
+ "years": pyconvexity_result.get("years", [])
386
+ }
387
+
388
+ return comprehensive_result
389
+
390
+ except Exception as e:
391
+ logger.error(f"Failed to transform result to comprehensive format: {e}", exc_info=True)
392
+ # Return original result with error info if transformation fails
393
+ return {
394
+ **pyconvexity_result,
395
+ "transformation_error": str(e),
396
+ "network_statistics": {},
397
+ "component_storage_stats": {}
398
+ }