pyconvexity 0.4.0__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. pyconvexity/__init__.py +87 -46
  2. pyconvexity/_version.py +1 -1
  3. pyconvexity/core/__init__.py +3 -5
  4. pyconvexity/core/database.py +111 -103
  5. pyconvexity/core/errors.py +16 -10
  6. pyconvexity/core/types.py +61 -54
  7. pyconvexity/data/__init__.py +0 -1
  8. pyconvexity/data/loaders/cache.py +65 -64
  9. pyconvexity/data/schema/01_core_schema.sql +134 -234
  10. pyconvexity/data/schema/02_data_metadata.sql +38 -168
  11. pyconvexity/data/schema/03_validation_data.sql +327 -264
  12. pyconvexity/data/sources/gem.py +169 -139
  13. pyconvexity/io/__init__.py +4 -10
  14. pyconvexity/io/excel_exporter.py +694 -480
  15. pyconvexity/io/excel_importer.py +817 -545
  16. pyconvexity/io/netcdf_exporter.py +66 -61
  17. pyconvexity/io/netcdf_importer.py +850 -619
  18. pyconvexity/models/__init__.py +109 -59
  19. pyconvexity/models/attributes.py +197 -178
  20. pyconvexity/models/carriers.py +70 -67
  21. pyconvexity/models/components.py +260 -236
  22. pyconvexity/models/network.py +202 -284
  23. pyconvexity/models/results.py +65 -55
  24. pyconvexity/models/scenarios.py +58 -88
  25. pyconvexity/solvers/__init__.py +5 -5
  26. pyconvexity/solvers/pypsa/__init__.py +3 -3
  27. pyconvexity/solvers/pypsa/api.py +150 -134
  28. pyconvexity/solvers/pypsa/batch_loader.py +165 -162
  29. pyconvexity/solvers/pypsa/builder.py +390 -291
  30. pyconvexity/solvers/pypsa/constraints.py +184 -162
  31. pyconvexity/solvers/pypsa/solver.py +968 -663
  32. pyconvexity/solvers/pypsa/storage.py +1377 -671
  33. pyconvexity/timeseries.py +63 -60
  34. pyconvexity/validation/__init__.py +14 -6
  35. pyconvexity/validation/rules.py +95 -84
  36. pyconvexity-0.4.1.dist-info/METADATA +46 -0
  37. pyconvexity-0.4.1.dist-info/RECORD +42 -0
  38. pyconvexity/data/schema/04_scenario_schema.sql +0 -122
  39. pyconvexity/data/schema/migrate_add_geometries.sql +0 -73
  40. pyconvexity-0.4.0.dist-info/METADATA +0 -138
  41. pyconvexity-0.4.0.dist-info/RECORD +0 -44
  42. {pyconvexity-0.4.0.dist-info → pyconvexity-0.4.1.dist-info}/WHEEL +0 -0
  43. {pyconvexity-0.4.0.dist-info → pyconvexity-0.4.1.dist-info}/top_level.txt +0 -0
@@ -10,54 +10,58 @@ import logging
10
10
  from typing import Dict, Any, Optional, List
11
11
 
12
12
  from pyconvexity.core.types import (
13
- ValidationRule, StaticValue, TimePeriod, TimeseriesValidationResult
14
- )
15
- from pyconvexity.core.errors import (
16
- ValidationError, InvalidDataType
13
+ ValidationRule,
14
+ StaticValue,
15
+ TimePeriod,
16
+ TimeseriesValidationResult,
17
17
  )
18
+ from pyconvexity.core.errors import ValidationError, InvalidDataType
18
19
 
19
20
  logger = logging.getLogger(__name__)
20
21
 
21
22
 
22
23
  def get_validation_rule(
23
- conn: sqlite3.Connection,
24
- component_type: str,
25
- attribute_name: str
24
+ conn: sqlite3.Connection, component_type: str, attribute_name: str
26
25
  ) -> ValidationRule:
27
26
  """
28
27
  Get validation rule for a specific component type and attribute.
29
-
28
+
30
29
  Args:
31
30
  conn: Database connection
32
31
  component_type: Type of component (e.g., "BUS", "GENERATOR")
33
32
  attribute_name: Name of the attribute
34
-
33
+
35
34
  Returns:
36
35
  ValidationRule object with all validation information
37
-
36
+
38
37
  Raises:
39
38
  ValidationError: If no validation rule is found
40
39
  """
41
- cursor = conn.execute("""
40
+ cursor = conn.execute(
41
+ """
42
42
  SELECT component_type, attribute_name, data_type, unit, default_value, allowed_storage_types,
43
43
  is_required, is_input, description
44
44
  FROM attribute_validation_rules
45
45
  WHERE component_type = ? AND attribute_name = ?
46
- """, (component_type, attribute_name))
47
-
46
+ """,
47
+ (component_type, attribute_name),
48
+ )
49
+
48
50
  row = cursor.fetchone()
49
51
  if not row:
50
- raise ValidationError(f"No validation rule found for {component_type}.{attribute_name}")
51
-
52
+ raise ValidationError(
53
+ f"No validation rule found for {component_type}.{attribute_name}"
54
+ )
55
+
52
56
  allowed_storage_types = row[5]
53
57
  allows_static = allowed_storage_types in ("static", "static_or_timeseries")
54
58
  allows_timeseries = allowed_storage_types in ("timeseries", "static_or_timeseries")
55
-
59
+
56
60
  # Parse default value
57
61
  default_value = None
58
62
  if row[4]: # default_value_string
59
63
  default_value = parse_default_value(row[4])
60
-
64
+
61
65
  return ValidationRule(
62
66
  component_type=row[0],
63
67
  attribute_name=row[1],
@@ -70,58 +74,65 @@ def get_validation_rule(
70
74
  is_required=bool(row[6]),
71
75
  is_input=bool(row[7]),
72
76
  description=row[8],
73
- default_value=default_value
77
+ default_value=default_value,
74
78
  )
75
79
 
76
80
 
77
81
  def list_validation_rules(
78
- conn: sqlite3.Connection,
79
- component_type: str
82
+ conn: sqlite3.Connection, component_type: str
80
83
  ) -> List[ValidationRule]:
81
84
  """
82
85
  List validation rules for a component type.
83
-
86
+
84
87
  Args:
85
88
  conn: Database connection
86
89
  component_type: Type of component
87
-
90
+
88
91
  Returns:
89
92
  List of ValidationRule objects
90
93
  """
91
- cursor = conn.execute("""
94
+ cursor = conn.execute(
95
+ """
92
96
  SELECT component_type, attribute_name, data_type, unit, default_value, allowed_storage_types,
93
97
  is_required, is_input, description
94
98
  FROM attribute_validation_rules
95
99
  WHERE component_type = ?
96
100
  ORDER BY attribute_name
97
- """, (component_type,))
98
-
101
+ """,
102
+ (component_type,),
103
+ )
104
+
99
105
  rules = []
100
106
  for row in cursor.fetchall():
101
107
  allowed_storage_types = row[5]
102
108
  allows_static = allowed_storage_types in ("static", "static_or_timeseries")
103
- allows_timeseries = allowed_storage_types in ("timeseries", "static_or_timeseries")
104
-
109
+ allows_timeseries = allowed_storage_types in (
110
+ "timeseries",
111
+ "static_or_timeseries",
112
+ )
113
+
105
114
  # Parse default value
106
115
  default_value = None
107
116
  if row[4]: # default_value_string
108
117
  default_value = parse_default_value(row[4])
109
-
110
- rules.append(ValidationRule(
111
- component_type=row[0],
112
- attribute_name=row[1],
113
- data_type=row[2],
114
- unit=row[3],
115
- default_value_string=row[4],
116
- allowed_storage_types=allowed_storage_types,
117
- allows_static=allows_static,
118
- allows_timeseries=allows_timeseries,
119
- is_required=bool(row[6]),
120
- is_input=bool(row[7]),
121
- description=row[8],
122
- default_value=default_value
123
- ))
124
-
118
+
119
+ rules.append(
120
+ ValidationRule(
121
+ component_type=row[0],
122
+ attribute_name=row[1],
123
+ data_type=row[2],
124
+ unit=row[3],
125
+ default_value_string=row[4],
126
+ allowed_storage_types=allowed_storage_types,
127
+ allows_static=allows_static,
128
+ allows_timeseries=allows_timeseries,
129
+ is_required=bool(row[6]),
130
+ is_input=bool(row[7]),
131
+ description=row[8],
132
+ default_value=default_value,
133
+ )
134
+ )
135
+
125
136
  return rules
126
137
 
127
138
 
@@ -129,20 +140,22 @@ def get_all_validation_rules(conn: sqlite3.Connection) -> Dict[str, Any]:
129
140
  """
130
141
  Get all validation rules from the database.
131
142
  This replaces the need to load the entire JSON file into memory.
132
-
143
+
133
144
  Args:
134
145
  conn: Database connection
135
-
146
+
136
147
  Returns:
137
148
  Dictionary mapping component types to their validation rules
138
149
  """
139
150
  try:
140
- cursor = conn.execute("""
151
+ cursor = conn.execute(
152
+ """
141
153
  SELECT component_type, attribute_name, data_type, unit, default_value, allowed_storage_types,
142
154
  is_required, is_input, description
143
155
  FROM attribute_validation_rules
144
- """)
145
-
156
+ """
157
+ )
158
+
146
159
  rules = {}
147
160
  for row in cursor.fetchall():
148
161
  component_type = row[0]
@@ -154,20 +167,20 @@ def get_all_validation_rules(conn: sqlite3.Connection) -> Dict[str, Any]:
154
167
  is_required = bool(row[6])
155
168
  is_input = bool(row[7])
156
169
  description = row[8]
157
-
170
+
158
171
  if component_type not in rules:
159
172
  rules[component_type] = {}
160
-
173
+
161
174
  rules[component_type][attribute_name] = {
162
- 'data_type': data_type,
163
- 'unit': unit,
164
- 'default_value': default_value,
165
- 'allowed_storage_types': allowed_storage_types,
166
- 'is_required': is_required,
167
- 'is_input': is_input,
168
- 'description': description
175
+ "data_type": data_type,
176
+ "unit": unit,
177
+ "default_value": default_value,
178
+ "allowed_storage_types": allowed_storage_types,
179
+ "is_required": is_required,
180
+ "is_input": is_input,
181
+ "description": description,
169
182
  }
170
-
183
+
171
184
  return rules
172
185
  except Exception as e:
173
186
  logger.error(f"Error getting all validation rules: {e}")
@@ -177,66 +190,64 @@ def get_all_validation_rules(conn: sqlite3.Connection) -> Dict[str, Any]:
177
190
  def validate_static_value(value: StaticValue, rule: ValidationRule) -> None:
178
191
  """
179
192
  Validate static value against rule.
180
-
193
+
181
194
  Args:
182
195
  value: StaticValue to validate
183
196
  rule: ValidationRule to validate against
184
-
197
+
185
198
  Raises:
186
199
  InvalidDataType: If value type doesn't match rule
187
200
  """
188
201
  value_type = value.data_type()
189
-
202
+
190
203
  if value_type != rule.data_type:
191
204
  raise InvalidDataType(expected=rule.data_type, actual=value_type)
192
205
 
193
206
 
194
207
  def validate_timeseries_alignment(
195
- conn: sqlite3.Connection,
196
- network_id: int,
197
- timeseries: List[float]
208
+ conn: sqlite3.Connection, timeseries: List[float]
198
209
  ) -> TimeseriesValidationResult:
199
210
  """
200
- Validate timeseries alignment with network periods.
201
-
211
+ Validate timeseries alignment with network periods (single network per database).
212
+
202
213
  Args:
203
214
  conn: Database connection
204
- network_id: Network ID
205
215
  timeseries: List of timeseries points to validate
206
-
216
+
207
217
  Returns:
208
218
  TimeseriesValidationResult with validation details
209
219
  """
210
220
  # Get network time periods
211
221
  from pyconvexity.models.network import get_network_time_periods
212
- network_periods = get_network_time_periods(conn, network_id)
222
+
223
+ network_periods = get_network_time_periods(conn)
213
224
  network_period_indices = {p.period_index for p in network_periods}
214
-
225
+
215
226
  # Get provided period indices
216
227
  provided_period_indices = {p.period_index for p in timeseries}
217
-
228
+
218
229
  # Find missing and extra periods
219
230
  missing_periods = list(network_period_indices - provided_period_indices)
220
231
  extra_periods = list(provided_period_indices - network_period_indices)
221
-
232
+
222
233
  is_valid = len(missing_periods) == 0 and len(extra_periods) == 0
223
-
234
+
224
235
  return TimeseriesValidationResult(
225
236
  is_valid=is_valid,
226
237
  missing_periods=missing_periods,
227
238
  extra_periods=extra_periods,
228
239
  total_network_periods=len(network_periods),
229
- provided_periods=len(timeseries)
240
+ provided_periods=len(timeseries),
230
241
  )
231
242
 
232
243
 
233
244
  def parse_default_value(s: str) -> Optional[StaticValue]:
234
245
  """
235
246
  Parse default value string.
236
-
247
+
237
248
  Args:
238
249
  s: String representation of default value
239
-
250
+
240
251
  Returns:
241
252
  StaticValue object or None if parsing fails
242
253
  """
@@ -265,30 +276,30 @@ def get_attribute_setter_info(
265
276
  ) -> Dict[str, Any]:
266
277
  """
267
278
  Get the appropriate function name for setting an attribute.
268
-
279
+
269
280
  Args:
270
281
  conn: Database connection
271
282
  component_type: Type of component
272
283
  attribute_name: Name of the attribute
273
-
284
+
274
285
  Returns:
275
286
  Dictionary with setter function information
276
-
287
+
277
288
  Raises:
278
289
  ValidationError: If attribute or data type is unknown
279
290
  """
280
291
  rule = get_validation_rule(conn, component_type, attribute_name)
281
-
292
+
282
293
  function_name = {
283
294
  "float": "set_float_attribute",
284
- "int": "set_integer_attribute",
295
+ "int": "set_integer_attribute",
285
296
  "boolean": "set_boolean_attribute",
286
297
  "string": "set_string_attribute",
287
298
  }.get(rule.data_type)
288
-
299
+
289
300
  if not function_name:
290
301
  raise ValidationError(f"Unknown data type: {rule.data_type}")
291
-
302
+
292
303
  return {
293
304
  "function_name": function_name,
294
305
  "data_type": rule.data_type,
@@ -297,5 +308,5 @@ def get_attribute_setter_info(
297
308
  "is_required": rule.is_required,
298
309
  "default_value": rule.default_value_string,
299
310
  "unit": rule.unit,
300
- "description": rule.description
311
+ "description": rule.description,
301
312
  }
@@ -0,0 +1,46 @@
1
+ Metadata-Version: 2.4
2
+ Name: pyconvexity
3
+ Version: 0.4.1
4
+ Summary: Python library for energy system modeling and optimization with PyPSA
5
+ Author-email: Convexity Team <info@convexity.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/bayesian-energy/convexity-js
8
+ Project-URL: Repository, https://github.com/bayesian-energy/convexity-js
9
+ Project-URL: Issues, https://github.com/bayesian-energy/convexity-js/issues
10
+ Classifier: Development Status :: 3 - Alpha
11
+ Classifier: Intended Audience :: Science/Research
12
+ Classifier: License :: OSI Approved :: MIT License
13
+ Classifier: Programming Language :: Python :: 3
14
+ Classifier: Programming Language :: Python :: 3.9
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Topic :: Scientific/Engineering
19
+ Requires-Python: >=3.9
20
+ Description-Content-Type: text/markdown
21
+ Requires-Dist: pandas>=1.5.0
22
+ Requires-Dist: numpy>=1.21.0
23
+ Requires-Dist: pyarrow>=10.0.0
24
+ Provides-Extra: pypsa
25
+ Requires-Dist: pypsa>=0.25.0; extra == "pypsa"
26
+ Requires-Dist: networkx; extra == "pypsa"
27
+ Requires-Dist: scipy; extra == "pypsa"
28
+ Requires-Dist: xarray; extra == "pypsa"
29
+ Provides-Extra: excel
30
+ Requires-Dist: openpyxl>=3.0.0; extra == "excel"
31
+ Requires-Dist: xlsxwriter>=3.0.0; extra == "excel"
32
+ Provides-Extra: netcdf
33
+ Requires-Dist: netcdf4>=1.6.0; extra == "netcdf"
34
+ Requires-Dist: xarray>=2022.3.0; extra == "netcdf"
35
+ Provides-Extra: data
36
+ Requires-Dist: country-converter>=1.0.0; extra == "data"
37
+ Requires-Dist: pyyaml>=6.0.0; extra == "data"
38
+ Provides-Extra: dev
39
+ Requires-Dist: pytest>=7.0.0; extra == "dev"
40
+ Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
41
+ Requires-Dist: black>=22.0.0; extra == "dev"
42
+ Requires-Dist: isort>=5.10.0; extra == "dev"
43
+ Requires-Dist: mypy>=1.0.0; extra == "dev"
44
+ Requires-Dist: pre-commit>=2.20.0; extra == "dev"
45
+ Provides-Extra: all
46
+ Requires-Dist: pyconvexity[data,excel,netcdf,pypsa]; extra == "all"
@@ -0,0 +1,42 @@
1
+ pyconvexity/__init__.py,sha256=JHLMfIvvM3Q68qG1KeIBmavTrjurgieDb8R3UXp_WcM,4887
2
+ pyconvexity/_version.py,sha256=pMtTmSUht-XtbR_7Doz6bsQqopJJd8rZ8I8zy2HwwoA,22
3
+ pyconvexity/timeseries.py,sha256=QdKbiqjAlxkJATyKm2Kelx1Ea2PsAnnCYfVLU5VER1Y,11085
4
+ pyconvexity/core/__init__.py,sha256=gdyyHNqOc4h9Nfe9u6NA936GNzH6coGNCMgBvvvOnGE,1196
5
+ pyconvexity/core/database.py,sha256=vwCmuN0B0xwImh6L0bFR4vNWHw_wVfYSG1KwsUjK4iY,14831
6
+ pyconvexity/core/errors.py,sha256=5ZUpqtQVROxizfZeLvOI-fDXYq6ZISqykJrD5oaLfTo,2808
7
+ pyconvexity/core/types.py,sha256=VOg_VMxK3OY_aa5ASzG2KIBEodZ48iepQgNlotQsesA,11171
8
+ pyconvexity/data/README.md,sha256=-tyDHVjqzfWbVvgM4yYYx8cysmgvFXI6plVQNxSHBmo,3156
9
+ pyconvexity/data/__init__.py,sha256=CFFwuIKS0qBk0HVLSByOK-oA5qm4krstJTUGFwUZyjo,509
10
+ pyconvexity/data/loaders/__init__.py,sha256=6xPtOmH2n1mNby7ZjA-2Mk9F48Q246RNsyMnCnJ6gwA,60
11
+ pyconvexity/data/loaders/cache.py,sha256=R-DUIiFpphjyi5EitcUZwzwUdZeqN6poYVyuNpKzB4g,7040
12
+ pyconvexity/data/schema/01_core_schema.sql,sha256=GA5ZoQrxVsGIBDH_YDVkIkHBpZQKedB3LaYATd_XgXs,15130
13
+ pyconvexity/data/schema/02_data_metadata.sql,sha256=BbpTkH1s7IbZQkDBRF2kL_UR9tzMEWDBYS3VBkwDRu0,4323
14
+ pyconvexity/data/schema/03_validation_data.sql,sha256=bxadwwN9Om0KYwRo3lXom33TJI5cg4n2n_X0rpK2CoA,88454
15
+ pyconvexity/data/sources/__init__.py,sha256=Dn6_oS7wB-vLjMj2YeXlmIl6hNjACbicimSabKxIWnc,108
16
+ pyconvexity/data/sources/gem.py,sha256=v8OYCMsb2t-8u-YmK8vzMsgI9ArUAOAXMZZQOFpJ-nI,14923
17
+ pyconvexity/io/__init__.py,sha256=FCyvRDfBUrrNei-y5JVod6MMN1bkPMSSfE0fpKi1aKQ,751
18
+ pyconvexity/io/excel_exporter.py,sha256=9MkZAVnHvsJSmfZ12w29GhDTsYI89fCGphjdo7s_ABs,50506
19
+ pyconvexity/io/excel_importer.py,sha256=Q5petB0WXjbw0TIR4ofG3EkjdT8lBh21yJMbEgdtXfU,59347
20
+ pyconvexity/io/netcdf_exporter.py,sha256=9EUPvDSYR7RxYDfOBT7O9clpC9F4sIj1RaNayvOmb5I,7119
21
+ pyconvexity/io/netcdf_importer.py,sha256=WATZutznl7uMfcbi7957qxdP5NRQs3c1dV6mhBJotdc,70783
22
+ pyconvexity/models/__init__.py,sha256=dmU2I24yHh2azL-FMNX09-InkBhzdYzzJtIq3mlBBv0,4090
23
+ pyconvexity/models/attributes.py,sha256=fvAeMGJNZyMdvMLwGvrGG39SRUzqC43AWJZlFGOC63c,17733
24
+ pyconvexity/models/carriers.py,sha256=L_WuDMW13k8aaA-obsDPxjmpZgZELiIAZuNtxq7YLpg,3447
25
+ pyconvexity/models/components.py,sha256=vxznTAvAzF99ILcc1DdLtj4K6k8aqclwlH1VhLcFAMM,17570
26
+ pyconvexity/models/network.py,sha256=A5_GVxVeuthAmVKfoSA4ANhVXKwuRizke3E7eR0P8rM,11535
27
+ pyconvexity/models/results.py,sha256=6j1H4AwVmp94L97gl_sGnE8izMxkU5o89guKIU8JdtE,4169
28
+ pyconvexity/models/scenarios.py,sha256=i2i9wu0WBtvzv5TdwL3ssAAkaUH6KHlBE-dfqL9xLWE,2800
29
+ pyconvexity/solvers/__init__.py,sha256=t1gOUTqbYDCtIvKPqGVY1fjKwqJi2Od9bGeIO7bPvJE,667
30
+ pyconvexity/solvers/pypsa/__init__.py,sha256=nudu0AOYEfPhpGHZ1Q9pUgjGeeIJd_zeULc975iyluE,555
31
+ pyconvexity/solvers/pypsa/api.py,sha256=9cK5cnJxRI6rkCvqELwMcTFGbmFYJgDsXD6UKCQ5Yfw,17897
32
+ pyconvexity/solvers/pypsa/batch_loader.py,sha256=w525_lqanKtARKgicajhBiDwIJzkaU_HbkL1I82gDqg,12361
33
+ pyconvexity/solvers/pypsa/builder.py,sha256=Nx6fSNzbW4KYbqrgQD-dSlnzvGmypCwcjXwm5KNFq9c,24351
34
+ pyconvexity/solvers/pypsa/constraints.py,sha256=20WliFDhPQGMAsS4VOTU8LZJpsFpLVRHpNsZW49GTcc,16397
35
+ pyconvexity/solvers/pypsa/solver.py,sha256=RjHece-2j3dvzR0zVJYLoSBJnSxbAYPx2f0MN_h6acM,74176
36
+ pyconvexity/solvers/pypsa/storage.py,sha256=7IoT2DpWBKe6jTdKe11a_48d8AuI1TzNMe7LSI3PG-g,107947
37
+ pyconvexity/validation/__init__.py,sha256=VJNZlFoWABsWwUKktNk2jbtXIepH5omvC0WtsTS7o3o,583
38
+ pyconvexity/validation/rules.py,sha256=GiNadc8hvbWBr09vUkGiLLTmSdvtNSeGLFwvCjlikYY,9241
39
+ pyconvexity-0.4.1.dist-info/METADATA,sha256=nwuABYUrl4WhAtRhh6fv2meBJPHW-e1cPjkm8OwiDSM,1953
40
+ pyconvexity-0.4.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
41
+ pyconvexity-0.4.1.dist-info/top_level.txt,sha256=wFPEDXVaebR3JO5Tt3HNse-ws5aROCcxEco15d6j64s,12
42
+ pyconvexity-0.4.1.dist-info/RECORD,,
@@ -1,122 +0,0 @@
1
- -- ============================================================================
2
- -- SCENARIO-AWARE ATTRIBUTES AND CONSTRAINTS SCHEMA
3
- -- Support for multiple scenarios within networks and Python constraint execution
4
- -- Version 1.0.0
5
- -- ============================================================================
6
-
7
- -- ============================================================================
8
- -- SCENARIOS TABLE
9
- -- ============================================================================
10
-
11
- -- Scenarios table - represents different modeling scenarios within a network
12
- -- The "Main" scenario is the master scenario (explicit, not implicit)
13
- -- "Master" refers to the fact that this is the main reference scenario
14
- CREATE TABLE scenarios (
15
- id INTEGER PRIMARY KEY AUTOINCREMENT,
16
- network_id INTEGER NOT NULL,
17
- name TEXT NOT NULL,
18
- description TEXT,
19
- is_master BOOLEAN DEFAULT FALSE,
20
- created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
21
-
22
- CONSTRAINT fk_scenarios_network
23
- FOREIGN KEY (network_id) REFERENCES networks(id) ON DELETE CASCADE,
24
- CONSTRAINT uq_scenarios_network_name
25
- UNIQUE (network_id, name)
26
- );
27
-
28
- -- Index for efficient scenario lookups
29
- CREATE INDEX idx_scenarios_network ON scenarios(network_id);
30
- CREATE INDEX idx_scenarios_master ON scenarios(is_master);
31
-
32
- -- ============================================================================
33
- -- SCENARIO MANAGEMENT TRIGGERS
34
- -- ============================================================================
35
-
36
- -- Ensure exactly one master scenario per network
37
- CREATE TRIGGER ensure_single_master_scenario
38
- BEFORE INSERT ON scenarios
39
- FOR EACH ROW
40
- WHEN NEW.is_master = TRUE
41
- BEGIN
42
- UPDATE scenarios
43
- SET is_master = FALSE
44
- WHERE network_id = NEW.network_id AND is_master = TRUE;
45
- END;
46
-
47
- -- Ensure exactly one master scenario per network on update
48
- CREATE TRIGGER ensure_single_master_scenario_update
49
- BEFORE UPDATE ON scenarios
50
- FOR EACH ROW
51
- WHEN NEW.is_master = TRUE AND OLD.is_master = FALSE
52
- BEGIN
53
- UPDATE scenarios
54
- SET is_master = FALSE
55
- WHERE network_id = NEW.network_id AND is_master = TRUE AND id != NEW.id;
56
- END;
57
-
58
- -- Prevent deletion of master scenario
59
- CREATE TRIGGER prevent_master_scenario_deletion
60
- BEFORE DELETE ON scenarios
61
- FOR EACH ROW
62
- WHEN OLD.is_master = TRUE
63
- BEGIN
64
- SELECT RAISE(ABORT, 'Cannot delete master scenario');
65
- END;
66
-
67
- -- ============================================================================
68
- -- SCENARIO UTILITY FUNCTIONS
69
- -- ============================================================================
70
-
71
- -- Create a view for easy attribute resolution with scenario inheritance
72
- CREATE VIEW component_attributes_with_scenario AS
73
- SELECT
74
- ca.component_id,
75
- ca.attribute_name,
76
- ca.storage_type,
77
- ca.static_value,
78
- ca.timeseries_data,
79
- ca.data_type,
80
- ca.unit,
81
- ca.is_input,
82
- COALESCE(ca.scenario_id, 0) as scenario_id,
83
- CASE
84
- WHEN ca.scenario_id IS NULL THEN 'Main'
85
- ELSE s.name
86
- END as scenario_name,
87
- ca.created_at,
88
- ca.updated_at
89
- FROM component_attributes ca
90
- LEFT JOIN scenarios s ON ca.scenario_id = s.id;
91
-
92
- -- ============================================================================
93
- -- AUTOMATIC MASTER SCENARIO CREATION
94
- -- ============================================================================
95
-
96
- -- Trigger to automatically create "Main" scenario (the master scenario) when a network is created
97
- -- This ensures every network has exactly one master scenario that serves as the main reference
98
- CREATE TRIGGER create_master_scenario_for_network
99
- AFTER INSERT ON networks
100
- FOR EACH ROW
101
- BEGIN
102
- INSERT INTO scenarios (network_id, name, description, is_master)
103
- VALUES (NEW.id, 'Main', 'Main scenario (default)', TRUE);
104
- END;
105
-
106
- -- ============================================================================
107
- -- COMPONENT VALIDATION FOR CONSTRAINT
108
- -- ============================================================================
109
-
110
- -- Note: CONSTRAINT components can have NULL carrier_id - this is now enforced
111
- -- by the CHECK constraint in the components table schema
112
-
113
- -- ============================================================================
114
- -- INITIALIZATION
115
- -- ============================================================================
116
-
117
- -- This schema extends the existing core schema with scenario support
118
- -- "Main" scenarios (master scenarios) are automatically created for existing networks
119
- INSERT INTO scenarios (network_id, name, description, is_master)
120
- SELECT id, 'Main', 'Main scenario (default)', TRUE
121
- FROM networks
122
- WHERE id NOT IN (SELECT network_id FROM scenarios WHERE is_master = TRUE);
@@ -1,73 +0,0 @@
1
- -- ============================================================================
2
- -- MIGRATION: Add Component Geometries Support
3
- -- Adds the component_geometries table to existing databases
4
- -- This migration is safe and backwards compatible - existing functionality is unchanged
5
- -- ============================================================================
6
-
7
- -- Check if the table already exists before creating
8
- CREATE TABLE IF NOT EXISTS component_geometries (
9
- id INTEGER PRIMARY KEY AUTOINCREMENT,
10
- component_id INTEGER NOT NULL UNIQUE,
11
-
12
- -- GeoJSON geometry stored as JSON text
13
- -- Supports: Point, LineString, Polygon, MultiPolygon, MultiPoint, MultiLineString, GeometryCollection
14
- geometry TEXT NOT NULL,
15
-
16
- -- Cache the geometry type for faster queries and validation
17
- geometry_type TEXT NOT NULL CHECK (geometry_type IN (
18
- 'Point', 'LineString', 'Polygon', 'MultiPolygon',
19
- 'MultiPoint', 'MultiLineString', 'GeometryCollection'
20
- )),
21
-
22
- -- Cache bounding box for spatial indexing and quick filtering
23
- bbox_min_lng REAL,
24
- bbox_min_lat REAL,
25
- bbox_max_lng REAL,
26
- bbox_max_lat REAL,
27
-
28
- -- Metadata
29
- created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
30
- updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
31
-
32
- CONSTRAINT fk_geometry_component
33
- FOREIGN KEY (component_id) REFERENCES components(id) ON DELETE CASCADE
34
- );
35
-
36
- -- Create indexes if they don't exist
37
- CREATE INDEX IF NOT EXISTS idx_component_geometries_component
38
- ON component_geometries(component_id);
39
-
40
- CREATE INDEX IF NOT EXISTS idx_component_geometries_type
41
- ON component_geometries(geometry_type);
42
-
43
- CREATE INDEX IF NOT EXISTS idx_component_geometries_bbox
44
- ON component_geometries(bbox_min_lng, bbox_min_lat, bbox_max_lng, bbox_max_lat);
45
-
46
- -- Create trigger for automatic timestamp updates
47
- DROP TRIGGER IF EXISTS update_component_geometries_timestamp;
48
-
49
- CREATE TRIGGER update_component_geometries_timestamp
50
- BEFORE UPDATE ON component_geometries
51
- FOR EACH ROW
52
- BEGIN
53
- UPDATE component_geometries
54
- SET updated_at = CURRENT_TIMESTAMP
55
- WHERE id = NEW.id;
56
- END;
57
-
58
- -- Update schema version
59
- UPDATE system_metadata
60
- SET value = '2.5.0',
61
- updated_at = CURRENT_TIMESTAMP,
62
- description = 'Database schema version - Added component_geometries table'
63
- WHERE key = 'schema_version';
64
-
65
- -- Add metadata about the migration
66
- INSERT OR REPLACE INTO system_metadata (key, value, description)
67
- VALUES ('geometries_migration_applied', datetime('now'), 'Timestamp when component geometries migration was applied');
68
-
69
- -- Verify the migration
70
- SELECT
71
- 'Migration completed successfully!' as message,
72
- COUNT(*) as existing_geometries
73
- FROM component_geometries;