pyconvexity 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (43) hide show
  1. pyconvexity/__init__.py +57 -8
  2. pyconvexity/_version.py +1 -2
  3. pyconvexity/core/__init__.py +0 -2
  4. pyconvexity/core/database.py +158 -0
  5. pyconvexity/core/types.py +105 -18
  6. pyconvexity/data/README.md +101 -0
  7. pyconvexity/data/__init__.py +18 -0
  8. pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  9. pyconvexity/data/loaders/__init__.py +3 -0
  10. pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  11. pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  12. pyconvexity/data/loaders/cache.py +212 -0
  13. pyconvexity/data/schema/01_core_schema.sql +12 -12
  14. pyconvexity/data/schema/02_data_metadata.sql +17 -321
  15. pyconvexity/data/sources/__init__.py +5 -0
  16. pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  17. pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  18. pyconvexity/data/sources/gem.py +412 -0
  19. pyconvexity/io/__init__.py +32 -0
  20. pyconvexity/io/excel_exporter.py +1012 -0
  21. pyconvexity/io/excel_importer.py +1109 -0
  22. pyconvexity/io/netcdf_exporter.py +192 -0
  23. pyconvexity/io/netcdf_importer.py +1602 -0
  24. pyconvexity/models/__init__.py +7 -0
  25. pyconvexity/models/attributes.py +209 -72
  26. pyconvexity/models/components.py +3 -0
  27. pyconvexity/models/network.py +17 -15
  28. pyconvexity/models/scenarios.py +177 -0
  29. pyconvexity/solvers/__init__.py +29 -0
  30. pyconvexity/solvers/pypsa/__init__.py +24 -0
  31. pyconvexity/solvers/pypsa/api.py +421 -0
  32. pyconvexity/solvers/pypsa/batch_loader.py +304 -0
  33. pyconvexity/solvers/pypsa/builder.py +566 -0
  34. pyconvexity/solvers/pypsa/constraints.py +321 -0
  35. pyconvexity/solvers/pypsa/solver.py +1106 -0
  36. pyconvexity/solvers/pypsa/storage.py +1574 -0
  37. pyconvexity/timeseries.py +327 -0
  38. pyconvexity/validation/rules.py +2 -2
  39. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/METADATA +5 -2
  40. pyconvexity-0.1.4.dist-info/RECORD +46 -0
  41. pyconvexity-0.1.2.dist-info/RECORD +0 -20
  42. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/WHEEL +0 -0
  43. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,421 @@
1
+ """
2
+ High-level API for PyPSA solver integration.
3
+
4
+ Provides user-friendly functions for the most common workflows.
5
+ """
6
+
7
+ import logging
8
+ from typing import Dict, Any, Optional, Callable
9
+
10
+ from pyconvexity.core.database import database_context
11
+ from pyconvexity.solvers.pypsa.builder import NetworkBuilder
12
+ from pyconvexity.solvers.pypsa.solver import NetworkSolver
13
+ from pyconvexity.solvers.pypsa.storage import ResultStorage
14
+ from pyconvexity.solvers.pypsa.constraints import ConstraintApplicator
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ def solve_network(
20
+ db_path: str,
21
+ network_id: int,
22
+ scenario_id: Optional[int] = None,
23
+ solver_name: str = "highs",
24
+ solver_options: Optional[Dict[str, Any]] = None,
25
+ constraints_dsl: Optional[str] = None,
26
+ discount_rate: Optional[float] = None,
27
+ progress_callback: Optional[Callable[[int, str], None]] = None,
28
+ return_detailed_results: bool = True
29
+ ) -> Dict[str, Any]:
30
+ """
31
+ Complete solve workflow: build PyPSA network from database, solve, store results.
32
+
33
+ This is the main high-level function that most users should use. It handles
34
+ the complete workflow of loading data from database, building a PyPSA network,
35
+ solving it, and storing results back to the database.
36
+
37
+ Args:
38
+ db_path: Path to the database file
39
+ network_id: ID of the network to solve
40
+ scenario_id: Optional scenario ID (uses master scenario if None)
41
+ solver_name: Solver to use (default: "highs")
42
+ solver_options: Optional solver-specific options
43
+ constraints_dsl: Optional DSL constraints to apply
44
+ discount_rate: Optional discount rate for multi-period optimization
45
+ progress_callback: Optional callback for progress updates (progress: int, message: str)
46
+ return_detailed_results: If True, return comprehensive results; if False, return simple status
47
+
48
+ Returns:
49
+ Dictionary with solve results - comprehensive if return_detailed_results=True, simple status otherwise
50
+
51
+ Raises:
52
+ DatabaseError: If database operations fail
53
+ ValidationError: If network data is invalid
54
+ ImportError: If PyPSA is not available
55
+ """
56
+ if progress_callback:
57
+ progress_callback(0, "Starting network solve...")
58
+
59
+ with database_context(db_path) as conn:
60
+ # Load network configuration with scenario awareness (matches old code)
61
+ from pyconvexity.models import get_network_config
62
+ network_config = get_network_config(conn, network_id, scenario_id)
63
+ if progress_callback:
64
+ progress_callback(8, "Loaded network configuration")
65
+
66
+ # Use configuration values with parameter overrides
67
+ effective_discount_rate = discount_rate if discount_rate is not None else network_config.get('discount_rate', 0.05)
68
+
69
+ # Build network
70
+ if progress_callback:
71
+ progress_callback(10, "Building PyPSA network...")
72
+
73
+ builder = NetworkBuilder()
74
+ network = builder.build_network(conn, network_id, scenario_id, progress_callback)
75
+
76
+ if progress_callback:
77
+ progress_callback(50, f"Network built: {len(network.buses)} buses, {len(network.generators)} generators")
78
+
79
+ # Apply constraints
80
+ constraint_applicator = ConstraintApplicator()
81
+ if constraints_dsl or progress_callback:
82
+ if progress_callback:
83
+ progress_callback(60, "Applying constraints...")
84
+ constraint_applicator.apply_constraints(conn, network_id, network, scenario_id, constraints_dsl)
85
+
86
+ # Solve network
87
+ if progress_callback:
88
+ progress_callback(70, f"Solving with {solver_name}...")
89
+
90
+ solver = NetworkSolver()
91
+ solve_result = solver.solve_network(
92
+ network,
93
+ solver_name=solver_name,
94
+ solver_options=solver_options,
95
+ discount_rate=effective_discount_rate, # Use effective discount rate from config
96
+ conn=conn,
97
+ network_id=network_id,
98
+ scenario_id=scenario_id,
99
+ constraint_applicator=constraint_applicator
100
+ )
101
+
102
+ if progress_callback:
103
+ progress_callback(85, "Storing results...")
104
+
105
+ # Store results - ALWAYS store results regardless of return_detailed_results flag
106
+ storage = ResultStorage()
107
+ storage_result = storage.store_results(
108
+ conn, network_id, network, solve_result, scenario_id
109
+ )
110
+
111
+ if progress_callback:
112
+ progress_callback(95, "Solve completed successfully")
113
+
114
+ # Optimize database after successful solve (if solve was successful)
115
+ if solve_result.get("success", False):
116
+ try:
117
+ if progress_callback:
118
+ progress_callback(98, "Optimizing database...")
119
+
120
+ from pyconvexity.core.database import should_optimize_database, optimize_database
121
+
122
+ # Only optimize if there's significant free space (>5% threshold for post-solve)
123
+ if should_optimize_database(conn, free_space_threshold_percent=5.0):
124
+ logger.info("Running database optimization after successful solve")
125
+ optimization_result = optimize_database(conn)
126
+ logger.info(f"Database optimization completed: {optimization_result['space_reclaimed']:,} bytes reclaimed")
127
+ else:
128
+ logger.debug("Skipping database optimization - insufficient free space")
129
+
130
+ except Exception as e:
131
+ # Don't fail the solve if optimization fails
132
+ logger.warning(f"Database optimization failed (non-critical): {e}")
133
+
134
+ if progress_callback:
135
+ progress_callback(100, "Complete")
136
+
137
+ # Return simple status if requested (for sidecar/async usage)
138
+ # Results are now stored in database regardless of this flag
139
+ if not return_detailed_results:
140
+ return {
141
+ "success": solve_result.get("success", False),
142
+ "message": "Solve completed successfully" if solve_result.get("success") else "Solve failed",
143
+ "error": solve_result.get("error") if not solve_result.get("success") else None,
144
+ "network_id": network_id,
145
+ "scenario_id": scenario_id
146
+ }
147
+
148
+ # Combine results in comprehensive format for detailed analysis
149
+ comprehensive_result = {
150
+ **solve_result,
151
+ "storage_stats": storage_result,
152
+ "network_id": network_id,
153
+ "scenario_id": scenario_id
154
+ }
155
+
156
+ # Transform to include sidecar-compatible format
157
+ return _transform_to_comprehensive_format(comprehensive_result)
158
+
159
+
160
+ def build_pypsa_network(
161
+ db_path: str,
162
+ network_id: int,
163
+ scenario_id: Optional[int] = None,
164
+ progress_callback: Optional[Callable[[int, str], None]] = None
165
+ ) -> 'pypsa.Network':
166
+ """
167
+ Build PyPSA network object from database.
168
+
169
+ This function loads all network data from the database and constructs
170
+ a PyPSA Network object ready for solving or analysis. Useful when you
171
+ want to inspect or modify the network before solving.
172
+
173
+ Args:
174
+ db_path: Path to the database file
175
+ network_id: ID of the network to build
176
+ scenario_id: Optional scenario ID (uses master scenario if None)
177
+ progress_callback: Optional callback for progress updates
178
+
179
+ Returns:
180
+ PyPSA Network object ready for solving
181
+
182
+ Raises:
183
+ DatabaseError: If database operations fail
184
+ ValidationError: If network data is invalid
185
+ ImportError: If PyPSA is not available
186
+ """
187
+ with database_context(db_path) as conn:
188
+ builder = NetworkBuilder()
189
+ return builder.build_network(conn, network_id, scenario_id, progress_callback)
190
+
191
+
192
+ def solve_pypsa_network(
193
+ network: 'pypsa.Network',
194
+ db_path: str,
195
+ network_id: int,
196
+ scenario_id: Optional[int] = None,
197
+ solver_name: str = "highs",
198
+ solver_options: Optional[Dict[str, Any]] = None,
199
+ store_results: bool = True,
200
+ progress_callback: Optional[Callable[[int, str], None]] = None
201
+ ) -> Dict[str, Any]:
202
+ """
203
+ Solve PyPSA network and optionally store results back to database.
204
+
205
+ This function takes an existing PyPSA network (e.g., from build_pypsa_network),
206
+ solves it, and optionally stores the results back to the database.
207
+
208
+ Args:
209
+ network: PyPSA Network object to solve
210
+ db_path: Path to the database file (needed for result storage)
211
+ network_id: ID of the network (for result storage)
212
+ scenario_id: Optional scenario ID
213
+ solver_name: Solver to use (default: "highs")
214
+ solver_options: Optional solver-specific options
215
+ store_results: Whether to store results back to database (default: True)
216
+ progress_callback: Optional callback for progress updates
217
+
218
+ Returns:
219
+ Dictionary with solve results and statistics
220
+
221
+ Raises:
222
+ DatabaseError: If database operations fail (when store_results=True)
223
+ ImportError: If PyPSA is not available
224
+ """
225
+ if progress_callback:
226
+ progress_callback(0, f"Solving network with {solver_name}...")
227
+
228
+ # Solve network
229
+ solver = NetworkSolver()
230
+ solve_result = solver.solve_network(
231
+ network,
232
+ solver_name=solver_name,
233
+ solver_options=solver_options
234
+ )
235
+
236
+ if progress_callback:
237
+ progress_callback(70, "Solve completed")
238
+
239
+ # Store results if requested
240
+ if store_results:
241
+ if progress_callback:
242
+ progress_callback(80, "Storing results...")
243
+
244
+ with database_context(db_path) as conn:
245
+ storage = ResultStorage()
246
+ storage_result = storage.store_results(
247
+ conn, network_id, network, solve_result, scenario_id
248
+ )
249
+ solve_result["storage_stats"] = storage_result
250
+
251
+ if progress_callback:
252
+ progress_callback(100, "Complete")
253
+
254
+ return solve_result
255
+
256
+
257
+ def load_network_components(
258
+ db_path: str,
259
+ network_id: int,
260
+ scenario_id: Optional[int] = None
261
+ ) -> Dict[str, Any]:
262
+ """
263
+ Load all network components and attributes as structured data.
264
+
265
+ This low-level function loads network data without building a PyPSA network.
266
+ Useful for analysis, validation, or building custom network representations.
267
+
268
+ Args:
269
+ db_path: Path to the database file
270
+ network_id: ID of the network to load
271
+ scenario_id: Optional scenario ID
272
+
273
+ Returns:
274
+ Dictionary containing all network components and metadata
275
+
276
+ Raises:
277
+ DatabaseError: If database operations fail
278
+ """
279
+ with database_context(db_path) as conn:
280
+ builder = NetworkBuilder()
281
+ return builder.load_network_data(conn, network_id, scenario_id)
282
+
283
+
284
+ def apply_constraints(
285
+ network: 'pypsa.Network',
286
+ db_path: str,
287
+ network_id: int,
288
+ scenario_id: Optional[int] = None,
289
+ constraints_dsl: Optional[str] = None
290
+ ) -> None:
291
+ """
292
+ Apply custom constraints to PyPSA network.
293
+
294
+ This function applies database-stored constraints and optional DSL constraints
295
+ to an existing PyPSA network. Modifies the network in-place.
296
+
297
+ Args:
298
+ network: PyPSA Network object to modify
299
+ db_path: Path to the database file
300
+ network_id: ID of the network
301
+ scenario_id: Optional scenario ID
302
+ constraints_dsl: Optional DSL constraints string
303
+
304
+ Raises:
305
+ DatabaseError: If database operations fail
306
+ ValidationError: If constraints are invalid
307
+ """
308
+ with database_context(db_path) as conn:
309
+ constraint_applicator = ConstraintApplicator()
310
+ constraint_applicator.apply_constraints(
311
+ conn, network_id, network, scenario_id, constraints_dsl
312
+ )
313
+
314
+
315
+ def store_solve_results(
316
+ network: 'pypsa.Network',
317
+ db_path: str,
318
+ network_id: int,
319
+ scenario_id: Optional[int],
320
+ solve_metadata: Dict[str, Any]
321
+ ) -> Dict[str, Any]:
322
+ """
323
+ Store PyPSA solve results back to database.
324
+
325
+ This low-level function stores solve results from a PyPSA network back
326
+ to the database. Useful when you want full control over the solving process
327
+ but still want to store results in the standard format.
328
+
329
+ Args:
330
+ network: Solved PyPSA Network object
331
+ db_path: Path to the database file
332
+ network_id: ID of the network
333
+ scenario_id: Scenario ID for result storage
334
+ solve_metadata: Dictionary with solve metadata (solver_name, solve_time, etc.)
335
+
336
+ Returns:
337
+ Dictionary with storage statistics
338
+
339
+ Raises:
340
+ DatabaseError: If database operations fail
341
+ """
342
+ with database_context(db_path) as conn:
343
+ storage = ResultStorage()
344
+ return storage.store_results(
345
+ conn, network_id, network, solve_metadata, scenario_id
346
+ )
347
+
348
+
349
+ def _transform_to_comprehensive_format(pyconvexity_result: Dict[str, Any]) -> Dict[str, Any]:
350
+ """
351
+ Transform pyconvexity result to comprehensive format that includes both
352
+ the original structure and sidecar-compatible fields.
353
+
354
+ This ensures compatibility with existing sidecar code while providing
355
+ a clean API for direct pyconvexity users.
356
+
357
+ Args:
358
+ pyconvexity_result: Result from pyconvexity solve operations
359
+
360
+ Returns:
361
+ Comprehensive result with both original and sidecar-compatible fields
362
+ """
363
+ try:
364
+ # Extract basic solve information
365
+ success = pyconvexity_result.get("success", False)
366
+ status = pyconvexity_result.get("status", "unknown")
367
+ solve_time = pyconvexity_result.get("solve_time", 0.0)
368
+ objective_value = pyconvexity_result.get("objective_value")
369
+
370
+ # Extract storage stats
371
+ storage_stats = pyconvexity_result.get("storage_stats", {})
372
+ component_stats = storage_stats.get("component_stats", {})
373
+ network_stats = storage_stats.get("network_stats", {})
374
+
375
+ # Create comprehensive result that includes both formats
376
+ comprehensive_result = {
377
+ # Original pyconvexity format (for direct users)
378
+ **pyconvexity_result,
379
+
380
+ # Sidecar-compatible format (for backward compatibility)
381
+ "network_statistics": {
382
+ "total_generation_mwh": network_stats.get("total_generation_mwh", 0.0),
383
+ "total_load_mwh": network_stats.get("total_load_mwh", 0.0),
384
+ "unmet_load_mwh": network_stats.get("unmet_load_mwh", 0.0),
385
+ "total_cost": network_stats.get("total_cost", objective_value or 0.0),
386
+ "num_buses": network_stats.get("num_buses", 0),
387
+ "num_generators": network_stats.get("num_generators", 0),
388
+ "num_loads": network_stats.get("num_loads", 0),
389
+ "num_lines": network_stats.get("num_lines", 0),
390
+ "num_links": network_stats.get("num_links", 0)
391
+ },
392
+
393
+ "component_storage_stats": {
394
+ "stored_bus_results": component_stats.get("stored_bus_results", 0),
395
+ "stored_generator_results": component_stats.get("stored_generator_results", 0),
396
+ "stored_unmet_load_results": component_stats.get("stored_unmet_load_results", 0),
397
+ "stored_load_results": component_stats.get("stored_load_results", 0),
398
+ "stored_line_results": component_stats.get("stored_line_results", 0),
399
+ "stored_link_results": component_stats.get("stored_link_results", 0),
400
+ "stored_storage_unit_results": component_stats.get("stored_storage_unit_results", 0),
401
+ "stored_store_results": component_stats.get("stored_store_results", 0),
402
+ "skipped_attributes": component_stats.get("skipped_attributes", 0),
403
+ "errors": component_stats.get("errors", 0)
404
+ },
405
+
406
+ # Additional compatibility fields
407
+ "multi_period": pyconvexity_result.get("multi_period", False),
408
+ "years": pyconvexity_result.get("years", [])
409
+ }
410
+
411
+ return comprehensive_result
412
+
413
+ except Exception as e:
414
+ logger.error(f"Failed to transform result to comprehensive format: {e}", exc_info=True)
415
+ # Return original result with error info if transformation fails
416
+ return {
417
+ **pyconvexity_result,
418
+ "transformation_error": str(e),
419
+ "network_statistics": {},
420
+ "component_storage_stats": {}
421
+ }