pyconvexity 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (43) hide show
  1. pyconvexity/__init__.py +57 -8
  2. pyconvexity/_version.py +1 -2
  3. pyconvexity/core/__init__.py +0 -2
  4. pyconvexity/core/database.py +158 -0
  5. pyconvexity/core/types.py +105 -18
  6. pyconvexity/data/README.md +101 -0
  7. pyconvexity/data/__init__.py +18 -0
  8. pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  9. pyconvexity/data/loaders/__init__.py +3 -0
  10. pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  11. pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  12. pyconvexity/data/loaders/cache.py +212 -0
  13. pyconvexity/data/schema/01_core_schema.sql +12 -12
  14. pyconvexity/data/schema/02_data_metadata.sql +17 -321
  15. pyconvexity/data/sources/__init__.py +5 -0
  16. pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  17. pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  18. pyconvexity/data/sources/gem.py +412 -0
  19. pyconvexity/io/__init__.py +32 -0
  20. pyconvexity/io/excel_exporter.py +1012 -0
  21. pyconvexity/io/excel_importer.py +1109 -0
  22. pyconvexity/io/netcdf_exporter.py +192 -0
  23. pyconvexity/io/netcdf_importer.py +1602 -0
  24. pyconvexity/models/__init__.py +7 -0
  25. pyconvexity/models/attributes.py +209 -72
  26. pyconvexity/models/components.py +3 -0
  27. pyconvexity/models/network.py +17 -15
  28. pyconvexity/models/scenarios.py +177 -0
  29. pyconvexity/solvers/__init__.py +29 -0
  30. pyconvexity/solvers/pypsa/__init__.py +24 -0
  31. pyconvexity/solvers/pypsa/api.py +421 -0
  32. pyconvexity/solvers/pypsa/batch_loader.py +304 -0
  33. pyconvexity/solvers/pypsa/builder.py +566 -0
  34. pyconvexity/solvers/pypsa/constraints.py +321 -0
  35. pyconvexity/solvers/pypsa/solver.py +1106 -0
  36. pyconvexity/solvers/pypsa/storage.py +1574 -0
  37. pyconvexity/timeseries.py +327 -0
  38. pyconvexity/validation/rules.py +2 -2
  39. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/METADATA +5 -2
  40. pyconvexity-0.1.4.dist-info/RECORD +46 -0
  41. pyconvexity-0.1.2.dist-info/RECORD +0 -20
  42. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/WHEEL +0 -0
  43. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/top_level.txt +0 -0
@@ -20,6 +20,10 @@ from pyconvexity.models.network import (
20
20
  get_component_counts, get_master_scenario_id, resolve_scenario_id
21
21
  )
22
22
 
23
+ from pyconvexity.models.scenarios import (
24
+ create_scenario, list_scenarios, get_scenario, delete_scenario
25
+ )
26
+
23
27
  __all__ = [
24
28
  # Component operations
25
29
  "get_component_type", "get_component", "list_components_by_type",
@@ -33,4 +37,7 @@ __all__ = [
33
37
  "create_network", "get_network_info", "get_network_time_periods", "list_networks",
34
38
  "create_carrier", "list_carriers", "get_network_config", "set_network_config",
35
39
  "get_component_counts", "get_master_scenario_id", "resolve_scenario_id",
40
+
41
+ # Scenario operations
42
+ "create_scenario", "list_scenarios", "get_scenario", "delete_scenario",
36
43
  ]
@@ -8,14 +8,14 @@ with support for both static values and timeseries data.
8
8
  import sqlite3
9
9
  import json
10
10
  import logging
11
- from typing import Dict, Any, Optional, List
11
+ from typing import Dict, Any, Optional, List, Union
12
12
  import pandas as pd
13
13
  from io import BytesIO
14
14
  import pyarrow as pa
15
15
  import pyarrow.parquet as pq
16
16
 
17
17
  from pyconvexity.core.types import (
18
- StaticValue, TimeseriesPoint, AttributeValue, TimePeriod
18
+ StaticValue, Timeseries, TimeseriesMetadata, AttributeValue, TimePeriod
19
19
  )
20
20
  from pyconvexity.core.errors import (
21
21
  ComponentNotFound, AttributeNotFound, ValidationError, TimeseriesError
@@ -86,7 +86,7 @@ def set_timeseries_attribute(
86
86
  conn: sqlite3.Connection,
87
87
  component_id: int,
88
88
  attribute_name: str,
89
- timeseries: List[TimeseriesPoint],
89
+ timeseries: Union[Timeseries, List[float]],
90
90
  scenario_id: Optional[int] = None
91
91
  ) -> None:
92
92
  """
@@ -96,7 +96,7 @@ def set_timeseries_attribute(
96
96
  conn: Database connection
97
97
  component_id: Component ID
98
98
  attribute_name: Name of the attribute
99
- timeseries: List of timeseries points
99
+ timeseries: Timeseries object or list of float values
100
100
  scenario_id: Scenario ID (uses master scenario if None)
101
101
 
102
102
  Raises:
@@ -116,25 +116,34 @@ def set_timeseries_attribute(
116
116
  if not rule.allows_timeseries:
117
117
  raise ValidationError(f"Attribute '{attribute_name}' for {component_type} does not allow timeseries values")
118
118
 
119
- # 4. Serialize timeseries to Parquet
120
- parquet_data = serialize_timeseries_to_parquet(timeseries)
119
+ # 4. Convert input to values array
120
+ if isinstance(timeseries, Timeseries):
121
+ values = timeseries.values
122
+ elif isinstance(timeseries, list) and all(isinstance(v, (int, float)) for v in timeseries):
123
+ # Direct values array
124
+ values = [float(v) for v in timeseries]
125
+ else:
126
+ raise ValueError("timeseries must be Timeseries or List[float]")
121
127
 
122
- # 5. Resolve scenario ID (get master scenario if None)
128
+ # 5. Serialize to binary format (ultra-fast, matches Rust exactly)
129
+ binary_data = serialize_values_to_binary(values)
130
+
131
+ # 6. Resolve scenario ID (get master scenario if None)
123
132
  resolved_scenario_id = resolve_scenario_id(conn, component_id, scenario_id)
124
133
 
125
- # 6. Remove any existing attribute for this scenario
134
+ # 7. Remove any existing attribute for this scenario
126
135
  cursor = conn.cursor()
127
136
  cursor.execute(
128
137
  "DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id = ?",
129
138
  (component_id, attribute_name, resolved_scenario_id)
130
139
  )
131
140
 
132
- # 7. Insert new timeseries attribute
141
+ # 8. Insert new timeseries attribute
133
142
  cursor.execute(
134
143
  """INSERT INTO component_attributes
135
144
  (component_id, attribute_name, scenario_id, storage_type, timeseries_data, data_type, unit, is_input)
136
145
  VALUES (?, ?, ?, 'timeseries', ?, ?, ?, ?)""",
137
- (component_id, attribute_name, resolved_scenario_id, parquet_data,
146
+ (component_id, attribute_name, resolved_scenario_id, binary_data,
138
147
  rule.data_type, rule.unit, rule.is_input)
139
148
  )
140
149
 
@@ -239,22 +248,19 @@ def get_attribute(
239
248
  if not timeseries_data:
240
249
  raise ValidationError("Timeseries attribute missing data")
241
250
 
242
- # Get network_id from component to load time periods
243
- cursor = conn.execute("SELECT network_id FROM components WHERE id = ?", (component_id,))
244
- network_row = cursor.fetchone()
251
+ # Deserialize from binary format to new efficient Timeseries format
252
+ values = deserialize_values_from_binary(timeseries_data)
245
253
 
246
- network_time_periods = None
247
- if network_row:
248
- network_id = network_row[0]
249
- try:
250
- from pyconvexity.models.network import get_network_time_periods
251
- network_time_periods = get_network_time_periods(conn, network_id)
252
- except Exception as e:
253
- logger.warning(f"Failed to load network time periods for timestamp computation: {e}")
254
+ timeseries = Timeseries(
255
+ values=values,
256
+ length=len(values),
257
+ start_index=0,
258
+ data_type=data_type,
259
+ unit=unit,
260
+ is_input=True # Default, could be enhanced with actual is_input from DB
261
+ )
254
262
 
255
- # Deserialize from Parquet with proper timestamp computation
256
- timeseries_points = deserialize_timeseries_from_parquet(timeseries_data, network_time_periods)
257
- return AttributeValue.timeseries(timeseries_points)
263
+ return AttributeValue.timeseries(timeseries)
258
264
 
259
265
  else:
260
266
  raise ValidationError(f"Unknown storage type: {storage_type}")
@@ -322,64 +328,195 @@ def get_master_scenario_id(conn: sqlite3.Connection, network_id: int) -> int:
322
328
  return result[0]
323
329
 
324
330
 
325
- # Timeseries serialization functions
331
+ # ============================================================================
332
+ # EFFICIENT TIMESERIES SERIALIZATION - MATCHES RUST IMPLEMENTATION EXACTLY
333
+ # ============================================================================
326
334
 
327
- def serialize_timeseries_to_parquet(timeseries: List[TimeseriesPoint]) -> bytes:
328
- """Serialize timeseries to Parquet format - EXACT MATCH WITH RUST SCHEMA."""
329
- # Define the exact schema to match Rust expectations
330
- schema = pa.schema([
331
- ('period_index', pa.int32()),
332
- ('value', pa.float64())
333
- ])
335
+ def serialize_values_to_binary(values: List[float]) -> bytes:
336
+ """
337
+ Serialize f32 values to binary format - EXACT MATCH WITH RUST.
334
338
 
335
- if not timeseries:
336
- # Return empty parquet file with correct schema
337
- empty_period_array = pa.array([], type=pa.int32())
338
- empty_value_array = pa.array([], type=pa.float64())
339
- table = pa.table([empty_period_array, empty_value_array], schema=schema)
340
- else:
341
- # Create PyArrow table with EXPLICIT schema to ensure data types match Rust
342
- period_indices = [p.period_index for p in timeseries]
343
- values = [p.value for p in timeseries]
344
-
345
- # Create arrays with explicit types to ensure Int32 for period_index
346
- period_array = pa.array(period_indices, type=pa.int32())
347
- value_array = pa.array(values, type=pa.float64())
348
-
349
- table = pa.table([period_array, value_array], schema=schema)
339
+ Ultra-fast binary format: just raw Float32 array, little-endian.
340
+ """
341
+ if not values:
342
+ return b''
343
+
344
+ import struct
345
+ buffer = bytearray(len(values) * 4) # 4 bytes per Float32
350
346
 
351
- # Serialize to Parquet bytes with SNAPPY compression (match Rust)
352
- buffer = BytesIO()
353
- pq.write_table(table, buffer, compression='snappy')
354
- return buffer.getvalue()
347
+ for i, value in enumerate(values):
348
+ # Pack as little-endian Float32 to match Rust exactly
349
+ struct.pack_into('<f', buffer, i * 4, float(value))
350
+
351
+ return bytes(buffer)
355
352
 
356
353
 
357
- def deserialize_timeseries_from_parquet(data: bytes, network_time_periods: Optional[List[TimePeriod]] = None) -> List[TimeseriesPoint]:
358
- """Deserialize timeseries from Parquet format - EXACT MATCH WITH RUST."""
354
+ def deserialize_values_from_binary(data: bytes) -> List[float]:
355
+ """
356
+ Deserialize f32 values from binary format - EXACT MATCH WITH RUST.
357
+
358
+ Ultra-fast deserialization: read raw Float32 values only.
359
+ """
359
360
  if not data:
360
361
  return []
361
362
 
362
- buffer = BytesIO(data)
363
- table = pq.read_table(buffer)
363
+ # Ensure data length is multiple of 4 (Float32 size)
364
+ if len(data) % 4 != 0:
365
+ raise ValueError("Invalid binary data length - must be multiple of 4 bytes")
366
+
367
+ import struct
368
+ values = []
369
+
370
+ # Ultra-fast deserialization: read raw Float32 values
371
+ for i in range(0, len(data), 4):
372
+ value = struct.unpack('<f', data[i:i+4])[0] # Little-endian Float32
373
+ values.append(value)
374
+
375
+ return values
376
+
377
+
378
+ def get_timeseries_length_from_binary(data: bytes) -> int:
379
+ """Get the length of a timeseries without deserializing the full data."""
380
+ if not data:
381
+ return 0
382
+
383
+ # Ultra-fast: just divide by 4 bytes per Float32
384
+ if len(data) % 4 != 0:
385
+ raise ValueError("Invalid binary data length - must be multiple of 4 bytes")
386
+
387
+ return len(data) // 4
388
+
364
389
 
365
- # Convert to pandas for easier handling
366
- df = table.to_pandas()
390
+ # ============================================================================
391
+ # UNIFIED TIMESERIES FUNCTIONS - MATCH RUST API
392
+ # ============================================================================
367
393
 
368
- points = []
369
- for _, row in df.iterrows():
370
- period_index = int(row['period_index'])
394
+ def get_timeseries(
395
+ conn: sqlite3.Connection,
396
+ component_id: int,
397
+ attribute_name: str,
398
+ scenario_id: Optional[int] = None,
399
+ start_index: Optional[int] = None,
400
+ end_index: Optional[int] = None,
401
+ max_points: Optional[int] = None
402
+ ) -> Timeseries:
403
+ """
404
+ Get timeseries data with unified interface matching Rust implementation.
405
+
406
+ Args:
407
+ conn: Database connection
408
+ component_id: Component ID
409
+ attribute_name: Name of the attribute
410
+ scenario_id: Scenario ID (uses master scenario if None)
411
+ start_index: Start index for range queries
412
+ end_index: End index for range queries
413
+ max_points: Maximum number of points (for sampling)
371
414
 
372
- # Compute timestamp from period_index using network time periods if available
373
- if network_time_periods and 0 <= period_index < len(network_time_periods):
374
- timestamp = network_time_periods[period_index].timestamp
375
- else:
376
- # Fallback: use period_index as timestamp (matching previous behavior for compatibility)
377
- timestamp = period_index
415
+ Returns:
416
+ Timeseries object with efficient array-based data
378
417
 
379
- points.append(TimeseriesPoint(
380
- timestamp=timestamp,
381
- value=float(row['value']),
382
- period_index=period_index
383
- ))
418
+ Raises:
419
+ ComponentNotFound: If component doesn't exist
420
+ AttributeNotFound: If attribute doesn't exist
421
+ """
422
+ # Get the attribute value
423
+ attr_value = get_attribute(conn, component_id, attribute_name, scenario_id)
424
+
425
+ if not attr_value.is_timeseries():
426
+ raise ValueError(f"Attribute '{attribute_name}' is not a timeseries")
427
+
428
+ timeseries = attr_value.as_timeseries()
429
+ if not timeseries:
430
+ raise ValueError("Failed to get timeseries data")
431
+
432
+ # Apply range filtering if requested
433
+ if start_index is not None and end_index is not None:
434
+ timeseries = timeseries.slice(start_index, end_index)
435
+
436
+ # Apply sampling if requested
437
+ if max_points is not None:
438
+ timeseries = timeseries.sample(max_points)
439
+
440
+ return timeseries
441
+
384
442
 
385
- return points
443
+ def get_timeseries_metadata(
444
+ conn: sqlite3.Connection,
445
+ component_id: int,
446
+ attribute_name: str,
447
+ scenario_id: Optional[int] = None
448
+ ) -> TimeseriesMetadata:
449
+ """
450
+ Get timeseries metadata without loading the full data.
451
+
452
+ Args:
453
+ conn: Database connection
454
+ component_id: Component ID
455
+ attribute_name: Name of the attribute
456
+ scenario_id: Scenario ID (uses master scenario if None)
457
+
458
+ Returns:
459
+ TimeseriesMetadata with length and type information
460
+ """
461
+ # Get basic attribute info without loading full data
462
+ cursor = conn.cursor()
463
+
464
+ # Get network_id from component
465
+ cursor.execute("SELECT network_id FROM components WHERE id = ?", (component_id,))
466
+ result = cursor.fetchone()
467
+ if not result:
468
+ raise ComponentNotFound(component_id)
469
+
470
+ network_id = result[0]
471
+
472
+ # Get master scenario ID
473
+ master_scenario_id = get_master_scenario_id(conn, network_id)
474
+ current_scenario_id = scenario_id if scenario_id is not None else master_scenario_id
475
+
476
+ # Get timeseries metadata
477
+ cursor.execute(
478
+ """SELECT timeseries_data, data_type, unit, is_input
479
+ FROM component_attributes
480
+ WHERE component_id = ? AND attribute_name = ? AND storage_type = 'timeseries' AND scenario_id = ?""",
481
+ (component_id, attribute_name, current_scenario_id)
482
+ )
483
+ result = cursor.fetchone()
484
+
485
+ # Try fallback to master scenario if not found
486
+ if not result and current_scenario_id != master_scenario_id:
487
+ cursor.execute(
488
+ """SELECT timeseries_data, data_type, unit, is_input
489
+ FROM component_attributes
490
+ WHERE component_id = ? AND attribute_name = ? AND storage_type = 'timeseries' AND scenario_id = ?""",
491
+ (component_id, attribute_name, master_scenario_id)
492
+ )
493
+ result = cursor.fetchone()
494
+
495
+ if not result:
496
+ raise AttributeNotFound(component_id, attribute_name)
497
+
498
+ timeseries_data, data_type, unit, is_input = result
499
+
500
+ # Get length without full deserialization
501
+ length = get_timeseries_length_from_binary(timeseries_data)
502
+
503
+ # Get time range from network time periods
504
+ try:
505
+ from pyconvexity.models.network import get_network_time_periods
506
+ time_periods = get_network_time_periods(conn, network_id)
507
+ start_time = time_periods[0].timestamp if time_periods else 0
508
+ end_time = time_periods[-1].timestamp if time_periods else 0
509
+ except Exception:
510
+ start_time = 0
511
+ end_time = length - 1
512
+
513
+ return TimeseriesMetadata(
514
+ length=length,
515
+ start_time=start_time,
516
+ end_time=end_time,
517
+ start_index=0,
518
+ end_index=length,
519
+ data_type=data_type,
520
+ unit=unit,
521
+ is_input=is_input
522
+ )
@@ -451,6 +451,9 @@ def ensure_unmet_load_for_bus(
451
451
  from pyconvexity.models.attributes import set_static_attribute
452
452
  set_static_attribute(conn, unmet_load_id, "marginal_cost", StaticValue(1e6))
453
453
  set_static_attribute(conn, unmet_load_id, "p_nom", StaticValue(1e6))
454
+ set_static_attribute(conn, unmet_load_id, "p_max_pu", StaticValue(1.0)) # Can run at full capacity
455
+ set_static_attribute(conn, unmet_load_id, "p_min_pu", StaticValue(0.0)) # Can be turned off
456
+ set_static_attribute(conn, unmet_load_id, "sign", StaticValue(1.0)) # Positive power sign (generation)
454
457
  set_static_attribute(conn, unmet_load_id, "active", StaticValue(unmet_load_active))
455
458
 
456
459
 
@@ -9,7 +9,7 @@ import sqlite3
9
9
  import json
10
10
  import logging
11
11
  from typing import Dict, Any, Optional, List
12
- from datetime import datetime
12
+ from datetime import datetime, timezone
13
13
 
14
14
  from pyconvexity.core.types import (
15
15
  CreateNetworkRequest, TimePeriod, Network
@@ -105,7 +105,7 @@ def get_network_time_periods(
105
105
  network_id: int
106
106
  ) -> List[TimePeriod]:
107
107
  """
108
- Get network time periods.
108
+ Get network time periods using optimized storage.
109
109
 
110
110
  Args:
111
111
  conn: Database connection
@@ -115,28 +115,30 @@ def get_network_time_periods(
115
115
  List of TimePeriod objects ordered by period_index
116
116
  """
117
117
  cursor = conn.execute("""
118
- SELECT timestamp, period_index
118
+ SELECT period_count, start_timestamp, interval_seconds
119
119
  FROM network_time_periods
120
- WHERE network_id = ?
121
- ORDER BY period_index
120
+ WHERE network_id = ?
122
121
  """, (network_id,))
123
122
 
123
+ row = cursor.fetchone()
124
+ if not row:
125
+ return [] # No time periods defined
126
+
127
+ period_count, start_timestamp, interval_seconds = row
128
+
129
+ # Generate all time periods computationally
124
130
  periods = []
125
- for row in cursor.fetchall():
126
- timestamp_str, period_index = row
131
+ for period_index in range(period_count):
132
+ timestamp = start_timestamp + (period_index * interval_seconds)
127
133
 
128
- # Convert datetime string to Unix timestamp
129
- try:
130
- dt = datetime.strptime(timestamp_str, "%Y-%m-%d %H:%M:%S")
131
- timestamp = int(dt.timestamp())
132
- except ValueError:
133
- # Fallback: use period_index as timestamp
134
- timestamp = period_index
134
+ # Format timestamp as string for compatibility - ALWAYS use UTC to avoid DST duplicates
135
+ dt = datetime.fromtimestamp(timestamp, tz=timezone.utc)
136
+ formatted_time = dt.strftime("%Y-%m-%d %H:%M:%S")
135
137
 
136
138
  periods.append(TimePeriod(
137
139
  timestamp=timestamp,
138
140
  period_index=period_index,
139
- formatted_time=timestamp_str
141
+ formatted_time=formatted_time
140
142
  ))
141
143
 
142
144
  return periods
@@ -0,0 +1,177 @@
1
+ """
2
+ Scenario management operations for PyConvexity.
3
+
4
+ Provides operations for creating and managing scenarios within networks.
5
+ """
6
+
7
+ import sqlite3
8
+ import logging
9
+ from typing import List, Optional
10
+ from datetime import datetime
11
+
12
+ from pyconvexity.core.errors import ValidationError, DatabaseError
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ def create_scenario(
18
+ conn: sqlite3.Connection,
19
+ network_id: int,
20
+ name: str,
21
+ description: Optional[str] = None,
22
+ is_master: bool = False,
23
+ ) -> int:
24
+ """
25
+ Create a new scenario for a network.
26
+
27
+ Args:
28
+ conn: Database connection
29
+ network_id: ID of the network
30
+ name: Name of the scenario
31
+ description: Optional description
32
+ is_master: Whether this is a master scenario (default False)
33
+
34
+ Returns:
35
+ ID of the newly created scenario
36
+
37
+ Raises:
38
+ ValidationError: If network doesn't exist or scenario name conflicts
39
+ DatabaseError: If creation fails
40
+ """
41
+
42
+ # Validate network exists
43
+ cursor = conn.execute("SELECT COUNT(*) FROM networks WHERE id = ?", (network_id,))
44
+ if cursor.fetchone()[0] == 0:
45
+ raise ValidationError(f"Network with ID {network_id} not found")
46
+
47
+ # Check for name conflicts within the network
48
+ cursor = conn.execute(
49
+ "SELECT COUNT(*) FROM scenarios WHERE network_id = ? AND name = ?",
50
+ (network_id, name)
51
+ )
52
+ if cursor.fetchone()[0] > 0:
53
+ raise ValidationError(f"Scenario with name '{name}' already exists in network {network_id}")
54
+
55
+ # Insert the scenario (database triggers will handle master scenario uniqueness)
56
+ cursor = conn.execute(
57
+ "INSERT INTO scenarios (network_id, name, description, is_master, created_at) "
58
+ "VALUES (?, ?, ?, ?, datetime('now'))",
59
+ (network_id, name, description, is_master)
60
+ )
61
+
62
+ scenario_id = cursor.lastrowid
63
+ if not scenario_id:
64
+ raise DatabaseError("Failed to create scenario")
65
+
66
+ logger.info(f"Created scenario '{name}' (ID: {scenario_id}) for network {network_id}")
67
+ return scenario_id
68
+
69
+
70
+ def list_scenarios(conn: sqlite3.Connection, network_id: int) -> List[dict]:
71
+ """
72
+ List all scenarios for a network.
73
+
74
+ Args:
75
+ conn: Database connection
76
+ network_id: ID of the network
77
+
78
+ Returns:
79
+ List of scenario dictionaries with keys: id, network_id, name, description, is_master, created_at
80
+
81
+ Raises:
82
+ DatabaseError: If query fails
83
+ """
84
+
85
+ cursor = conn.execute(
86
+ "SELECT id, network_id, name, description, is_master, created_at "
87
+ "FROM scenarios "
88
+ "WHERE network_id = ? "
89
+ "ORDER BY is_master DESC, created_at ASC",
90
+ (network_id,)
91
+ )
92
+
93
+ scenarios = []
94
+ for row in cursor.fetchall():
95
+ scenarios.append({
96
+ 'id': row[0],
97
+ 'network_id': row[1],
98
+ 'name': row[2],
99
+ 'description': row[3],
100
+ 'is_master': bool(row[4]),
101
+ 'created_at': row[5],
102
+ })
103
+
104
+ logger.debug(f"Found {len(scenarios)} scenarios for network {network_id}")
105
+ return scenarios
106
+
107
+
108
+ def get_scenario(conn: sqlite3.Connection, scenario_id: int) -> dict:
109
+ """
110
+ Get a specific scenario by ID.
111
+
112
+ Args:
113
+ conn: Database connection
114
+ scenario_id: ID of the scenario
115
+
116
+ Returns:
117
+ Scenario dictionary with keys: id, network_id, name, description, is_master, created_at
118
+
119
+ Raises:
120
+ ValidationError: If scenario not found
121
+ DatabaseError: If query fails
122
+ """
123
+
124
+ cursor = conn.execute(
125
+ "SELECT id, network_id, name, description, is_master, created_at "
126
+ "FROM scenarios "
127
+ "WHERE id = ?",
128
+ (scenario_id,)
129
+ )
130
+
131
+ row = cursor.fetchone()
132
+ if not row:
133
+ raise ValidationError(f"Scenario with ID {scenario_id} not found")
134
+
135
+ return {
136
+ 'id': row[0],
137
+ 'network_id': row[1],
138
+ 'name': row[2],
139
+ 'description': row[3],
140
+ 'is_master': bool(row[4]),
141
+ 'created_at': row[5],
142
+ }
143
+
144
+
145
+ def delete_scenario(conn: sqlite3.Connection, scenario_id: int) -> None:
146
+ """
147
+ Delete a scenario (cannot delete master scenarios).
148
+
149
+ Args:
150
+ conn: Database connection
151
+ scenario_id: ID of the scenario to delete
152
+
153
+ Raises:
154
+ ValidationError: If scenario not found or is master scenario
155
+ DatabaseError: If deletion fails
156
+ """
157
+
158
+ # Check if scenario exists and is not master
159
+ cursor = conn.execute(
160
+ "SELECT is_master FROM scenarios WHERE id = ?",
161
+ (scenario_id,)
162
+ )
163
+
164
+ row = cursor.fetchone()
165
+ if not row:
166
+ raise ValidationError(f"Scenario with ID {scenario_id} not found")
167
+
168
+ if row[0]: # is_master
169
+ raise ValidationError("Cannot delete master scenario")
170
+
171
+ # Delete the scenario (this will cascade to delete related component attributes)
172
+ cursor = conn.execute("DELETE FROM scenarios WHERE id = ?", (scenario_id,))
173
+
174
+ if cursor.rowcount == 0:
175
+ raise DatabaseError("Failed to delete scenario")
176
+
177
+ logger.info(f"Deleted scenario {scenario_id}")
@@ -0,0 +1,29 @@
1
+ """
2
+ Solver module for PyConvexity.
3
+
4
+ Provides interfaces to various optimization solvers for energy system modeling.
5
+ """
6
+
7
+ # Try to import PyPSA solver with graceful fallback
8
+ try:
9
+ from pyconvexity.solvers.pypsa import (
10
+ solve_network,
11
+ build_pypsa_network,
12
+ solve_pypsa_network,
13
+ load_network_components,
14
+ apply_constraints,
15
+ store_solve_results
16
+ )
17
+
18
+ __all__ = [
19
+ "solve_network",
20
+ "build_pypsa_network",
21
+ "solve_pypsa_network",
22
+ "load_network_components",
23
+ "apply_constraints",
24
+ "store_solve_results"
25
+ ]
26
+
27
+ except ImportError:
28
+ # PyPSA not available
29
+ __all__ = []
@@ -0,0 +1,24 @@
1
+ """
2
+ PyPSA solver integration for PyConvexity.
3
+
4
+ Provides high-level and low-level APIs for building PyPSA networks from database,
5
+ solving them, and storing results back to the database.
6
+ """
7
+
8
+ from pyconvexity.solvers.pypsa.api import (
9
+ solve_network,
10
+ build_pypsa_network,
11
+ solve_pypsa_network,
12
+ load_network_components,
13
+ apply_constraints,
14
+ store_solve_results
15
+ )
16
+
17
+ __all__ = [
18
+ "solve_network",
19
+ "build_pypsa_network",
20
+ "solve_pypsa_network",
21
+ "load_network_components",
22
+ "apply_constraints",
23
+ "store_solve_results"
24
+ ]