pyconvexity 0.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/__init__.py +241 -0
- pyconvexity/_version.py +1 -0
- pyconvexity/core/__init__.py +60 -0
- pyconvexity/core/database.py +485 -0
- pyconvexity/core/errors.py +106 -0
- pyconvexity/core/types.py +400 -0
- pyconvexity/dashboard.py +265 -0
- pyconvexity/data/README.md +101 -0
- pyconvexity/data/__init__.py +17 -0
- pyconvexity/data/loaders/__init__.py +3 -0
- pyconvexity/data/loaders/cache.py +213 -0
- pyconvexity/data/schema/01_core_schema.sql +420 -0
- pyconvexity/data/schema/02_data_metadata.sql +120 -0
- pyconvexity/data/schema/03_validation_data.sql +507 -0
- pyconvexity/data/sources/__init__.py +5 -0
- pyconvexity/data/sources/gem.py +442 -0
- pyconvexity/io/__init__.py +26 -0
- pyconvexity/io/excel_exporter.py +1226 -0
- pyconvexity/io/excel_importer.py +1381 -0
- pyconvexity/io/netcdf_exporter.py +191 -0
- pyconvexity/io/netcdf_importer.py +1802 -0
- pyconvexity/models/__init__.py +195 -0
- pyconvexity/models/attributes.py +730 -0
- pyconvexity/models/carriers.py +159 -0
- pyconvexity/models/components.py +611 -0
- pyconvexity/models/network.py +503 -0
- pyconvexity/models/results.py +148 -0
- pyconvexity/models/scenarios.py +234 -0
- pyconvexity/solvers/__init__.py +29 -0
- pyconvexity/solvers/pypsa/__init__.py +30 -0
- pyconvexity/solvers/pypsa/api.py +446 -0
- pyconvexity/solvers/pypsa/batch_loader.py +296 -0
- pyconvexity/solvers/pypsa/builder.py +655 -0
- pyconvexity/solvers/pypsa/clearing_price.py +678 -0
- pyconvexity/solvers/pypsa/constraints.py +405 -0
- pyconvexity/solvers/pypsa/solver.py +1442 -0
- pyconvexity/solvers/pypsa/storage.py +2096 -0
- pyconvexity/timeseries.py +330 -0
- pyconvexity/validation/__init__.py +25 -0
- pyconvexity/validation/rules.py +312 -0
- pyconvexity-0.4.8.dist-info/METADATA +148 -0
- pyconvexity-0.4.8.dist-info/RECORD +44 -0
- pyconvexity-0.4.8.dist-info/WHEEL +5 -0
- pyconvexity-0.4.8.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,730 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Attribute management operations for PyConvexity.
|
|
3
|
+
|
|
4
|
+
Provides operations for setting, getting, and managing component attributes
|
|
5
|
+
with support for both static values and timeseries data.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import sqlite3
|
|
9
|
+
import json
|
|
10
|
+
import logging
|
|
11
|
+
from typing import Dict, Any, Optional, List, Union
|
|
12
|
+
import pandas as pd
|
|
13
|
+
from io import BytesIO
|
|
14
|
+
import pyarrow as pa
|
|
15
|
+
import pyarrow.parquet as pq
|
|
16
|
+
|
|
17
|
+
from pyconvexity.core.types import (
|
|
18
|
+
StaticValue,
|
|
19
|
+
Timeseries,
|
|
20
|
+
TimeseriesMetadata,
|
|
21
|
+
AttributeValue,
|
|
22
|
+
TimePeriod,
|
|
23
|
+
)
|
|
24
|
+
from pyconvexity.core.errors import (
|
|
25
|
+
ComponentNotFound,
|
|
26
|
+
AttributeNotFound,
|
|
27
|
+
ValidationError,
|
|
28
|
+
TimeseriesError,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
logger = logging.getLogger(__name__)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def set_static_attribute(
|
|
35
|
+
conn: sqlite3.Connection,
|
|
36
|
+
component_id: int,
|
|
37
|
+
attribute_name: str,
|
|
38
|
+
value: StaticValue,
|
|
39
|
+
scenario_id: Optional[int] = None,
|
|
40
|
+
) -> None:
|
|
41
|
+
"""
|
|
42
|
+
Set a static attribute value for a component in a specific scenario.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
conn: Database connection
|
|
46
|
+
component_id: Component ID
|
|
47
|
+
attribute_name: Name of the attribute
|
|
48
|
+
value: Static value to set
|
|
49
|
+
scenario_id: Scenario ID (uses master scenario if None)
|
|
50
|
+
|
|
51
|
+
Raises:
|
|
52
|
+
ComponentNotFound: If component doesn't exist
|
|
53
|
+
ValidationError: If attribute doesn't allow static values or validation fails
|
|
54
|
+
"""
|
|
55
|
+
# 1. Get component type
|
|
56
|
+
from pyconvexity.models.components import get_component_type
|
|
57
|
+
|
|
58
|
+
component_type = get_component_type(conn, component_id)
|
|
59
|
+
|
|
60
|
+
# 2. Get validation rule
|
|
61
|
+
from pyconvexity.validation.rules import get_validation_rule, validate_static_value
|
|
62
|
+
|
|
63
|
+
rule = get_validation_rule(conn, component_type, attribute_name)
|
|
64
|
+
|
|
65
|
+
# 3. Check if static values are allowed
|
|
66
|
+
if not rule.allows_static:
|
|
67
|
+
raise ValidationError(
|
|
68
|
+
f"Attribute '{attribute_name}' for {component_type} does not allow static values"
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# 4. Validate data type
|
|
72
|
+
validate_static_value(value, rule)
|
|
73
|
+
|
|
74
|
+
# 5. Use scenario_id directly (NULL for base network)
|
|
75
|
+
resolved_scenario_id = scenario_id
|
|
76
|
+
|
|
77
|
+
# 6. Remove any existing attribute for this scenario
|
|
78
|
+
cursor = conn.cursor()
|
|
79
|
+
if resolved_scenario_id is None:
|
|
80
|
+
cursor.execute(
|
|
81
|
+
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id IS NULL",
|
|
82
|
+
(component_id, attribute_name),
|
|
83
|
+
)
|
|
84
|
+
else:
|
|
85
|
+
cursor.execute(
|
|
86
|
+
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id = ?",
|
|
87
|
+
(component_id, attribute_name, resolved_scenario_id),
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
# 7. Insert new static attribute (store as JSON in static_value TEXT column)
|
|
91
|
+
json_value = value.to_json()
|
|
92
|
+
|
|
93
|
+
cursor.execute(
|
|
94
|
+
"""INSERT INTO component_attributes
|
|
95
|
+
(component_id, attribute_name, scenario_id, storage_type, static_value, data_type, unit, is_input)
|
|
96
|
+
VALUES (?, ?, ?, 'static', ?, ?, ?, ?)""",
|
|
97
|
+
(
|
|
98
|
+
component_id,
|
|
99
|
+
attribute_name,
|
|
100
|
+
resolved_scenario_id,
|
|
101
|
+
json_value,
|
|
102
|
+
rule.data_type,
|
|
103
|
+
rule.unit,
|
|
104
|
+
rule.is_input,
|
|
105
|
+
),
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def set_timeseries_attribute(
|
|
110
|
+
conn: sqlite3.Connection,
|
|
111
|
+
component_id: int,
|
|
112
|
+
attribute_name: str,
|
|
113
|
+
timeseries: Union[Timeseries, List[float]],
|
|
114
|
+
scenario_id: Optional[int] = None,
|
|
115
|
+
) -> None:
|
|
116
|
+
"""
|
|
117
|
+
Set a timeseries attribute value for a component in a specific scenario.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
conn: Database connection
|
|
121
|
+
component_id: Component ID
|
|
122
|
+
attribute_name: Name of the attribute
|
|
123
|
+
timeseries: Timeseries object or list of float values
|
|
124
|
+
scenario_id: Scenario ID (uses master scenario if None)
|
|
125
|
+
|
|
126
|
+
Raises:
|
|
127
|
+
ComponentNotFound: If component doesn't exist
|
|
128
|
+
ValidationError: If attribute doesn't allow timeseries values
|
|
129
|
+
TimeseriesError: If timeseries serialization fails
|
|
130
|
+
"""
|
|
131
|
+
# 1. Get component type
|
|
132
|
+
from pyconvexity.models.components import get_component_type
|
|
133
|
+
|
|
134
|
+
component_type = get_component_type(conn, component_id)
|
|
135
|
+
|
|
136
|
+
# 2. Get validation rule
|
|
137
|
+
from pyconvexity.validation.rules import get_validation_rule
|
|
138
|
+
|
|
139
|
+
rule = get_validation_rule(conn, component_type, attribute_name)
|
|
140
|
+
|
|
141
|
+
# 3. Check if timeseries values are allowed
|
|
142
|
+
if not rule.allows_timeseries:
|
|
143
|
+
raise ValidationError(
|
|
144
|
+
f"Attribute '{attribute_name}' for {component_type} does not allow timeseries values"
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
# 4. Convert input to values array
|
|
148
|
+
if isinstance(timeseries, Timeseries):
|
|
149
|
+
values = timeseries.values
|
|
150
|
+
elif isinstance(timeseries, list) and all(
|
|
151
|
+
isinstance(v, (int, float)) for v in timeseries
|
|
152
|
+
):
|
|
153
|
+
# Direct values array
|
|
154
|
+
values = [float(v) for v in timeseries]
|
|
155
|
+
else:
|
|
156
|
+
raise ValueError("timeseries must be Timeseries or List[float]")
|
|
157
|
+
|
|
158
|
+
# 5. Serialize to binary format (ultra-fast, matches Rust exactly)
|
|
159
|
+
binary_data = serialize_values_to_binary(values)
|
|
160
|
+
|
|
161
|
+
# 6. Use scenario_id directly (NULL for base network)
|
|
162
|
+
resolved_scenario_id = scenario_id
|
|
163
|
+
|
|
164
|
+
# 7. Remove any existing attribute for this scenario
|
|
165
|
+
cursor = conn.cursor()
|
|
166
|
+
if resolved_scenario_id is None:
|
|
167
|
+
cursor.execute(
|
|
168
|
+
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id IS NULL",
|
|
169
|
+
(component_id, attribute_name),
|
|
170
|
+
)
|
|
171
|
+
else:
|
|
172
|
+
cursor.execute(
|
|
173
|
+
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id = ?",
|
|
174
|
+
(component_id, attribute_name, resolved_scenario_id),
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
# 8. Insert new timeseries attribute
|
|
178
|
+
cursor.execute(
|
|
179
|
+
"""INSERT INTO component_attributes
|
|
180
|
+
(component_id, attribute_name, scenario_id, storage_type, timeseries_data, data_type, unit, is_input)
|
|
181
|
+
VALUES (?, ?, ?, 'timeseries', ?, ?, ?, ?)""",
|
|
182
|
+
(
|
|
183
|
+
component_id,
|
|
184
|
+
attribute_name,
|
|
185
|
+
resolved_scenario_id,
|
|
186
|
+
binary_data,
|
|
187
|
+
rule.data_type,
|
|
188
|
+
rule.unit,
|
|
189
|
+
rule.is_input,
|
|
190
|
+
),
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def get_attribute(
|
|
195
|
+
conn: sqlite3.Connection,
|
|
196
|
+
component_id: int,
|
|
197
|
+
attribute_name: str,
|
|
198
|
+
scenario_id: Optional[int] = None,
|
|
199
|
+
) -> AttributeValue:
|
|
200
|
+
"""
|
|
201
|
+
Get an attribute value with scenario fallback logic.
|
|
202
|
+
|
|
203
|
+
Args:
|
|
204
|
+
conn: Database connection
|
|
205
|
+
component_id: Component ID
|
|
206
|
+
attribute_name: Name of the attribute
|
|
207
|
+
scenario_id: Scenario ID (uses master scenario if None)
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
AttributeValue containing either static or timeseries data
|
|
211
|
+
|
|
212
|
+
Raises:
|
|
213
|
+
ComponentNotFound: If component doesn't exist
|
|
214
|
+
AttributeNotFound: If attribute doesn't exist
|
|
215
|
+
"""
|
|
216
|
+
cursor = conn.cursor()
|
|
217
|
+
|
|
218
|
+
# Determine which scenario to check first
|
|
219
|
+
current_scenario_id = scenario_id
|
|
220
|
+
|
|
221
|
+
# First try to get the attribute from the current scenario
|
|
222
|
+
if current_scenario_id is None:
|
|
223
|
+
cursor.execute(
|
|
224
|
+
"""SELECT storage_type, static_value, timeseries_data, data_type, unit
|
|
225
|
+
FROM component_attributes
|
|
226
|
+
WHERE component_id = ? AND attribute_name = ? AND scenario_id IS NULL""",
|
|
227
|
+
(component_id, attribute_name),
|
|
228
|
+
)
|
|
229
|
+
else:
|
|
230
|
+
cursor.execute(
|
|
231
|
+
"""SELECT storage_type, static_value, timeseries_data, data_type, unit
|
|
232
|
+
FROM component_attributes
|
|
233
|
+
WHERE component_id = ? AND attribute_name = ? AND scenario_id = ?""",
|
|
234
|
+
(component_id, attribute_name, current_scenario_id),
|
|
235
|
+
)
|
|
236
|
+
result = cursor.fetchone()
|
|
237
|
+
|
|
238
|
+
# If not found in current scenario and current scenario is not base (NULL), try base scenario as fallback
|
|
239
|
+
if not result and current_scenario_id is not None:
|
|
240
|
+
cursor.execute(
|
|
241
|
+
"""SELECT storage_type, static_value, timeseries_data, data_type, unit
|
|
242
|
+
FROM component_attributes
|
|
243
|
+
WHERE component_id = ? AND attribute_name = ? AND scenario_id IS NULL""",
|
|
244
|
+
(component_id, attribute_name),
|
|
245
|
+
)
|
|
246
|
+
result = cursor.fetchone()
|
|
247
|
+
|
|
248
|
+
if not result:
|
|
249
|
+
raise AttributeNotFound(component_id, attribute_name)
|
|
250
|
+
|
|
251
|
+
storage_type, static_value_json, timeseries_data, data_type, unit = result
|
|
252
|
+
|
|
253
|
+
# Handle the deserialization based on storage type
|
|
254
|
+
if storage_type == "static":
|
|
255
|
+
if not static_value_json:
|
|
256
|
+
raise ValidationError("Static attribute missing value")
|
|
257
|
+
|
|
258
|
+
# Parse JSON value
|
|
259
|
+
json_value = json.loads(static_value_json)
|
|
260
|
+
|
|
261
|
+
# Convert based on data type
|
|
262
|
+
if data_type == "float":
|
|
263
|
+
if isinstance(json_value, (int, float)):
|
|
264
|
+
static_value = StaticValue(float(json_value))
|
|
265
|
+
else:
|
|
266
|
+
raise ValidationError("Expected float value")
|
|
267
|
+
elif data_type == "int":
|
|
268
|
+
if isinstance(json_value, (int, float)):
|
|
269
|
+
static_value = StaticValue(int(json_value))
|
|
270
|
+
else:
|
|
271
|
+
raise ValidationError("Expected integer value")
|
|
272
|
+
elif data_type == "boolean":
|
|
273
|
+
if isinstance(json_value, bool):
|
|
274
|
+
static_value = StaticValue(json_value)
|
|
275
|
+
else:
|
|
276
|
+
raise ValidationError("Expected boolean value")
|
|
277
|
+
elif data_type == "string":
|
|
278
|
+
if isinstance(json_value, str):
|
|
279
|
+
static_value = StaticValue(json_value)
|
|
280
|
+
else:
|
|
281
|
+
raise ValidationError("Expected string value")
|
|
282
|
+
else:
|
|
283
|
+
raise ValidationError(f"Unknown data type: {data_type}")
|
|
284
|
+
|
|
285
|
+
return AttributeValue.static(static_value)
|
|
286
|
+
|
|
287
|
+
elif storage_type == "timeseries":
|
|
288
|
+
if not timeseries_data:
|
|
289
|
+
raise ValidationError("Timeseries attribute missing data")
|
|
290
|
+
|
|
291
|
+
# Deserialize from binary format to new efficient Timeseries format
|
|
292
|
+
values = deserialize_values_from_binary(timeseries_data)
|
|
293
|
+
|
|
294
|
+
timeseries = Timeseries(
|
|
295
|
+
values=values,
|
|
296
|
+
length=len(values),
|
|
297
|
+
start_index=0,
|
|
298
|
+
data_type=data_type,
|
|
299
|
+
unit=unit,
|
|
300
|
+
is_input=True, # Default, could be enhanced with actual is_input from DB
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
return AttributeValue.timeseries(timeseries)
|
|
304
|
+
|
|
305
|
+
else:
|
|
306
|
+
raise ValidationError(f"Unknown storage type: {storage_type}")
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
def delete_attribute(
|
|
310
|
+
conn: sqlite3.Connection,
|
|
311
|
+
component_id: int,
|
|
312
|
+
attribute_name: str,
|
|
313
|
+
scenario_id: Optional[int] = None,
|
|
314
|
+
) -> None:
|
|
315
|
+
"""
|
|
316
|
+
Delete an attribute from a specific scenario.
|
|
317
|
+
|
|
318
|
+
Args:
|
|
319
|
+
conn: Database connection
|
|
320
|
+
component_id: Component ID
|
|
321
|
+
attribute_name: Name of the attribute
|
|
322
|
+
scenario_id: Scenario ID (uses master scenario if None)
|
|
323
|
+
|
|
324
|
+
Raises:
|
|
325
|
+
AttributeNotFound: If attribute doesn't exist
|
|
326
|
+
"""
|
|
327
|
+
# Use scenario_id directly (NULL for base network)
|
|
328
|
+
resolved_scenario_id = scenario_id
|
|
329
|
+
|
|
330
|
+
cursor = conn.cursor()
|
|
331
|
+
if resolved_scenario_id is None:
|
|
332
|
+
cursor.execute(
|
|
333
|
+
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id IS NULL",
|
|
334
|
+
(component_id, attribute_name),
|
|
335
|
+
)
|
|
336
|
+
else:
|
|
337
|
+
cursor.execute(
|
|
338
|
+
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id = ?",
|
|
339
|
+
(component_id, attribute_name, resolved_scenario_id),
|
|
340
|
+
)
|
|
341
|
+
|
|
342
|
+
if cursor.rowcount == 0:
|
|
343
|
+
raise AttributeNotFound(component_id, attribute_name)
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
# ============================================================================
|
|
347
|
+
# EFFICIENT TIMESERIES SERIALIZATION - MATCHES RUST IMPLEMENTATION EXACTLY
|
|
348
|
+
# ============================================================================
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def serialize_values_to_binary(values: List[float]) -> bytes:
|
|
352
|
+
"""
|
|
353
|
+
Serialize f32 values to binary format - EXACT MATCH WITH RUST.
|
|
354
|
+
|
|
355
|
+
Ultra-fast binary format: just raw Float32 array, little-endian.
|
|
356
|
+
"""
|
|
357
|
+
if not values:
|
|
358
|
+
return b""
|
|
359
|
+
|
|
360
|
+
import struct
|
|
361
|
+
|
|
362
|
+
buffer = bytearray(len(values) * 4) # 4 bytes per Float32
|
|
363
|
+
|
|
364
|
+
for i, value in enumerate(values):
|
|
365
|
+
# Pack as little-endian Float32 to match Rust exactly
|
|
366
|
+
struct.pack_into("<f", buffer, i * 4, float(value))
|
|
367
|
+
|
|
368
|
+
return bytes(buffer)
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
def deserialize_values_from_binary(data: bytes) -> List[float]:
|
|
372
|
+
"""
|
|
373
|
+
Deserialize f32 values from binary format - EXACT MATCH WITH RUST.
|
|
374
|
+
|
|
375
|
+
Ultra-fast deserialization: read raw Float32 values only.
|
|
376
|
+
"""
|
|
377
|
+
if not data:
|
|
378
|
+
return []
|
|
379
|
+
|
|
380
|
+
# Ensure data length is multiple of 4 (Float32 size)
|
|
381
|
+
if len(data) % 4 != 0:
|
|
382
|
+
raise ValueError("Invalid binary data length - must be multiple of 4 bytes")
|
|
383
|
+
|
|
384
|
+
import struct
|
|
385
|
+
|
|
386
|
+
values = []
|
|
387
|
+
|
|
388
|
+
# Ultra-fast deserialization: read raw Float32 values
|
|
389
|
+
for i in range(0, len(data), 4):
|
|
390
|
+
value = struct.unpack("<f", data[i : i + 4])[0] # Little-endian Float32
|
|
391
|
+
values.append(value)
|
|
392
|
+
|
|
393
|
+
return values
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
def get_timeseries_length_from_binary(data: bytes) -> int:
|
|
397
|
+
"""Get the length of a timeseries without deserializing the full data."""
|
|
398
|
+
if not data:
|
|
399
|
+
return 0
|
|
400
|
+
|
|
401
|
+
# Ultra-fast: just divide by 4 bytes per Float32
|
|
402
|
+
if len(data) % 4 != 0:
|
|
403
|
+
raise ValueError("Invalid binary data length - must be multiple of 4 bytes")
|
|
404
|
+
|
|
405
|
+
return len(data) // 4
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
# ============================================================================
|
|
409
|
+
# UNIFIED TIMESERIES FUNCTIONS - MATCH RUST API
|
|
410
|
+
# ============================================================================
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
def get_timeseries(
|
|
414
|
+
conn: sqlite3.Connection,
|
|
415
|
+
component_id: int,
|
|
416
|
+
attribute_name: str,
|
|
417
|
+
scenario_id: Optional[int] = None,
|
|
418
|
+
start_index: Optional[int] = None,
|
|
419
|
+
end_index: Optional[int] = None,
|
|
420
|
+
max_points: Optional[int] = None,
|
|
421
|
+
) -> Timeseries:
|
|
422
|
+
"""
|
|
423
|
+
Get timeseries data with unified interface matching Rust implementation.
|
|
424
|
+
|
|
425
|
+
Args:
|
|
426
|
+
conn: Database connection
|
|
427
|
+
component_id: Component ID
|
|
428
|
+
attribute_name: Name of the attribute
|
|
429
|
+
scenario_id: Scenario ID (uses master scenario if None)
|
|
430
|
+
start_index: Start index for range queries
|
|
431
|
+
end_index: End index for range queries
|
|
432
|
+
max_points: Maximum number of points (for sampling)
|
|
433
|
+
|
|
434
|
+
Returns:
|
|
435
|
+
Timeseries object with efficient array-based data
|
|
436
|
+
|
|
437
|
+
Raises:
|
|
438
|
+
ComponentNotFound: If component doesn't exist
|
|
439
|
+
AttributeNotFound: If attribute doesn't exist
|
|
440
|
+
"""
|
|
441
|
+
# Get the attribute value
|
|
442
|
+
attr_value = get_attribute(conn, component_id, attribute_name, scenario_id)
|
|
443
|
+
|
|
444
|
+
if not attr_value.is_timeseries():
|
|
445
|
+
raise ValueError(f"Attribute '{attribute_name}' is not a timeseries")
|
|
446
|
+
|
|
447
|
+
timeseries = attr_value.as_timeseries()
|
|
448
|
+
if not timeseries:
|
|
449
|
+
raise ValueError("Failed to get timeseries data")
|
|
450
|
+
|
|
451
|
+
# Apply range filtering if requested
|
|
452
|
+
if start_index is not None and end_index is not None:
|
|
453
|
+
timeseries = timeseries.slice(start_index, end_index)
|
|
454
|
+
|
|
455
|
+
# Apply sampling if requested
|
|
456
|
+
if max_points is not None:
|
|
457
|
+
timeseries = timeseries.sample(max_points)
|
|
458
|
+
|
|
459
|
+
return timeseries
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
# ============================================================================
|
|
463
|
+
# ACTUAL VALUE FUNCTIONS
|
|
464
|
+
# ============================================================================
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
def set_actual_static_value(
|
|
468
|
+
conn: sqlite3.Connection,
|
|
469
|
+
component_id: int,
|
|
470
|
+
attribute_name: str,
|
|
471
|
+
value: StaticValue,
|
|
472
|
+
) -> None:
|
|
473
|
+
"""
|
|
474
|
+
Set an actual (measured/true) static value for a component attribute.
|
|
475
|
+
|
|
476
|
+
Args:
|
|
477
|
+
conn: Database connection
|
|
478
|
+
component_id: Component ID
|
|
479
|
+
attribute_name: Attribute name
|
|
480
|
+
value: Static value to set
|
|
481
|
+
"""
|
|
482
|
+
from pyconvexity.models.scenarios import get_or_create_actual_scenario
|
|
483
|
+
|
|
484
|
+
actual_scenario_id = get_or_create_actual_scenario(conn)
|
|
485
|
+
set_static_attribute(conn, component_id, attribute_name, value, actual_scenario_id)
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
def set_actual_timeseries_value(
|
|
489
|
+
conn: sqlite3.Connection,
|
|
490
|
+
component_id: int,
|
|
491
|
+
attribute_name: str,
|
|
492
|
+
timeseries: Union[Timeseries, List[float]],
|
|
493
|
+
) -> None:
|
|
494
|
+
"""
|
|
495
|
+
Set an actual (measured/true) timeseries value for a component attribute.
|
|
496
|
+
|
|
497
|
+
Args:
|
|
498
|
+
conn: Database connection
|
|
499
|
+
component_id: Component ID
|
|
500
|
+
attribute_name: Attribute name
|
|
501
|
+
timeseries: Timeseries object or list of float values
|
|
502
|
+
"""
|
|
503
|
+
from pyconvexity.models.scenarios import get_or_create_actual_scenario
|
|
504
|
+
|
|
505
|
+
actual_scenario_id = get_or_create_actual_scenario(conn)
|
|
506
|
+
set_timeseries_attribute(
|
|
507
|
+
conn, component_id, attribute_name, timeseries, actual_scenario_id
|
|
508
|
+
)
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
def get_actual_value(
|
|
512
|
+
conn: sqlite3.Connection,
|
|
513
|
+
component_id: int,
|
|
514
|
+
attribute_name: str,
|
|
515
|
+
) -> Optional[AttributeValue]:
|
|
516
|
+
"""
|
|
517
|
+
Get the actual value for a component attribute (if it exists).
|
|
518
|
+
|
|
519
|
+
Args:
|
|
520
|
+
conn: Database connection
|
|
521
|
+
component_id: Component ID
|
|
522
|
+
attribute_name: Attribute name
|
|
523
|
+
|
|
524
|
+
Returns:
|
|
525
|
+
AttributeValue or None if no actual value exists
|
|
526
|
+
"""
|
|
527
|
+
from pyconvexity.models.scenarios import get_actual_scenario_id
|
|
528
|
+
|
|
529
|
+
actual_scenario_id = get_actual_scenario_id(conn)
|
|
530
|
+
if actual_scenario_id is None:
|
|
531
|
+
return None
|
|
532
|
+
|
|
533
|
+
try:
|
|
534
|
+
return get_attribute_direct(conn, component_id, attribute_name, actual_scenario_id)
|
|
535
|
+
except AttributeNotFound:
|
|
536
|
+
return None
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
def get_attribute_direct(
|
|
540
|
+
conn: sqlite3.Connection,
|
|
541
|
+
component_id: int,
|
|
542
|
+
attribute_name: str,
|
|
543
|
+
scenario_id: Optional[int] = None,
|
|
544
|
+
) -> AttributeValue:
|
|
545
|
+
"""
|
|
546
|
+
Get an attribute value directly for a specific scenario (no fallback logic).
|
|
547
|
+
|
|
548
|
+
Args:
|
|
549
|
+
conn: Database connection
|
|
550
|
+
component_id: Component ID
|
|
551
|
+
attribute_name: Attribute name
|
|
552
|
+
scenario_id: Scenario ID (NULL for base network)
|
|
553
|
+
|
|
554
|
+
Returns:
|
|
555
|
+
AttributeValue
|
|
556
|
+
|
|
557
|
+
Raises:
|
|
558
|
+
AttributeNotFound: If attribute doesn't exist in the specified scenario
|
|
559
|
+
"""
|
|
560
|
+
cursor = conn.cursor()
|
|
561
|
+
|
|
562
|
+
if scenario_id is None:
|
|
563
|
+
cursor.execute(
|
|
564
|
+
"""SELECT storage_type, static_value, timeseries_data, data_type, unit
|
|
565
|
+
FROM component_attributes
|
|
566
|
+
WHERE component_id = ? AND attribute_name = ? AND scenario_id IS NULL""",
|
|
567
|
+
(component_id, attribute_name),
|
|
568
|
+
)
|
|
569
|
+
else:
|
|
570
|
+
cursor.execute(
|
|
571
|
+
"""SELECT storage_type, static_value, timeseries_data, data_type, unit
|
|
572
|
+
FROM component_attributes
|
|
573
|
+
WHERE component_id = ? AND attribute_name = ? AND scenario_id = ?""",
|
|
574
|
+
(component_id, attribute_name, scenario_id),
|
|
575
|
+
)
|
|
576
|
+
|
|
577
|
+
result = cursor.fetchone()
|
|
578
|
+
if not result:
|
|
579
|
+
raise AttributeNotFound(component_id, attribute_name)
|
|
580
|
+
|
|
581
|
+
storage_type, static_value_json, timeseries_data, data_type, unit = result
|
|
582
|
+
|
|
583
|
+
# Handle deserialization based on storage type
|
|
584
|
+
if storage_type == "static":
|
|
585
|
+
if not static_value_json:
|
|
586
|
+
raise ValidationError("Static attribute missing value")
|
|
587
|
+
|
|
588
|
+
import json
|
|
589
|
+
|
|
590
|
+
json_value = json.loads(static_value_json)
|
|
591
|
+
|
|
592
|
+
if data_type == "float":
|
|
593
|
+
static_value = StaticValue(float(json_value))
|
|
594
|
+
elif data_type == "int":
|
|
595
|
+
static_value = StaticValue(int(json_value))
|
|
596
|
+
elif data_type == "boolean":
|
|
597
|
+
static_value = StaticValue(bool(json_value))
|
|
598
|
+
elif data_type == "string":
|
|
599
|
+
static_value = StaticValue(str(json_value))
|
|
600
|
+
else:
|
|
601
|
+
raise ValidationError(f"Unknown data type: {data_type}")
|
|
602
|
+
|
|
603
|
+
return AttributeValue.static(static_value)
|
|
604
|
+
|
|
605
|
+
elif storage_type == "timeseries":
|
|
606
|
+
if not timeseries_data:
|
|
607
|
+
raise ValidationError("Timeseries attribute missing data")
|
|
608
|
+
|
|
609
|
+
values = deserialize_values_from_binary(timeseries_data)
|
|
610
|
+
|
|
611
|
+
timeseries = Timeseries(
|
|
612
|
+
values=values,
|
|
613
|
+
length=len(values),
|
|
614
|
+
start_index=0,
|
|
615
|
+
data_type=data_type,
|
|
616
|
+
unit=unit,
|
|
617
|
+
is_input=True,
|
|
618
|
+
)
|
|
619
|
+
|
|
620
|
+
return AttributeValue.timeseries(timeseries)
|
|
621
|
+
|
|
622
|
+
else:
|
|
623
|
+
raise ValidationError(f"Unknown storage type: {storage_type}")
|
|
624
|
+
|
|
625
|
+
|
|
626
|
+
def clear_actual_value(
|
|
627
|
+
conn: sqlite3.Connection,
|
|
628
|
+
component_id: int,
|
|
629
|
+
attribute_name: str,
|
|
630
|
+
) -> None:
|
|
631
|
+
"""
|
|
632
|
+
Clear (delete) the actual value for a component attribute.
|
|
633
|
+
|
|
634
|
+
Args:
|
|
635
|
+
conn: Database connection
|
|
636
|
+
component_id: Component ID
|
|
637
|
+
attribute_name: Attribute name
|
|
638
|
+
"""
|
|
639
|
+
from pyconvexity.models.scenarios import get_actual_scenario_id
|
|
640
|
+
|
|
641
|
+
actual_scenario_id = get_actual_scenario_id(conn)
|
|
642
|
+
if actual_scenario_id is None:
|
|
643
|
+
return # No actual scenario, nothing to clear
|
|
644
|
+
|
|
645
|
+
try:
|
|
646
|
+
delete_attribute(conn, component_id, attribute_name, actual_scenario_id)
|
|
647
|
+
except AttributeNotFound:
|
|
648
|
+
pass # Already doesn't exist
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
def get_timeseries_metadata(
|
|
652
|
+
conn: sqlite3.Connection,
|
|
653
|
+
component_id: int,
|
|
654
|
+
attribute_name: str,
|
|
655
|
+
scenario_id: Optional[int] = None,
|
|
656
|
+
) -> TimeseriesMetadata:
|
|
657
|
+
"""
|
|
658
|
+
Get timeseries metadata without loading the full data.
|
|
659
|
+
|
|
660
|
+
Args:
|
|
661
|
+
conn: Database connection
|
|
662
|
+
component_id: Component ID
|
|
663
|
+
attribute_name: Name of the attribute
|
|
664
|
+
scenario_id: Scenario ID (uses master scenario if None)
|
|
665
|
+
|
|
666
|
+
Returns:
|
|
667
|
+
TimeseriesMetadata with length and type information
|
|
668
|
+
"""
|
|
669
|
+
# Get basic attribute info without loading full data
|
|
670
|
+
cursor = conn.cursor()
|
|
671
|
+
|
|
672
|
+
# Get current scenario ID
|
|
673
|
+
current_scenario_id = scenario_id
|
|
674
|
+
|
|
675
|
+
# Get timeseries metadata
|
|
676
|
+
if current_scenario_id is None:
|
|
677
|
+
cursor.execute(
|
|
678
|
+
"""SELECT timeseries_data, data_type, unit, is_input
|
|
679
|
+
FROM component_attributes
|
|
680
|
+
WHERE component_id = ? AND attribute_name = ? AND storage_type = 'timeseries' AND scenario_id IS NULL""",
|
|
681
|
+
(component_id, attribute_name),
|
|
682
|
+
)
|
|
683
|
+
else:
|
|
684
|
+
cursor.execute(
|
|
685
|
+
"""SELECT timeseries_data, data_type, unit, is_input
|
|
686
|
+
FROM component_attributes
|
|
687
|
+
WHERE component_id = ? AND attribute_name = ? AND storage_type = 'timeseries' AND scenario_id = ?""",
|
|
688
|
+
(component_id, attribute_name, current_scenario_id),
|
|
689
|
+
)
|
|
690
|
+
result = cursor.fetchone()
|
|
691
|
+
|
|
692
|
+
# Try fallback to base scenario if not found in current scenario
|
|
693
|
+
if not result and current_scenario_id is not None:
|
|
694
|
+
cursor.execute(
|
|
695
|
+
"""SELECT timeseries_data, data_type, unit, is_input
|
|
696
|
+
FROM component_attributes
|
|
697
|
+
WHERE component_id = ? AND attribute_name = ? AND storage_type = 'timeseries' AND scenario_id IS NULL""",
|
|
698
|
+
(component_id, attribute_name),
|
|
699
|
+
)
|
|
700
|
+
result = cursor.fetchone()
|
|
701
|
+
|
|
702
|
+
if not result:
|
|
703
|
+
raise AttributeNotFound(component_id, attribute_name)
|
|
704
|
+
|
|
705
|
+
timeseries_data, data_type, unit, is_input = result
|
|
706
|
+
|
|
707
|
+
# Get length without full deserialization
|
|
708
|
+
length = get_timeseries_length_from_binary(timeseries_data)
|
|
709
|
+
|
|
710
|
+
# Get time range from network time periods
|
|
711
|
+
try:
|
|
712
|
+
from pyconvexity.models.network import get_network_time_periods
|
|
713
|
+
|
|
714
|
+
time_periods = get_network_time_periods(conn)
|
|
715
|
+
start_time = time_periods[0].timestamp if time_periods else 0
|
|
716
|
+
end_time = time_periods[-1].timestamp if time_periods else 0
|
|
717
|
+
except Exception:
|
|
718
|
+
start_time = 0
|
|
719
|
+
end_time = length - 1
|
|
720
|
+
|
|
721
|
+
return TimeseriesMetadata(
|
|
722
|
+
length=length,
|
|
723
|
+
start_time=start_time,
|
|
724
|
+
end_time=end_time,
|
|
725
|
+
start_index=0,
|
|
726
|
+
end_index=length,
|
|
727
|
+
data_type=data_type,
|
|
728
|
+
unit=unit,
|
|
729
|
+
is_input=is_input,
|
|
730
|
+
)
|