pyconvexity 0.3.8.post7__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyconvexity/__init__.py +87 -46
- pyconvexity/_version.py +1 -1
- pyconvexity/core/__init__.py +3 -5
- pyconvexity/core/database.py +111 -103
- pyconvexity/core/errors.py +16 -10
- pyconvexity/core/types.py +61 -54
- pyconvexity/data/__init__.py +0 -1
- pyconvexity/data/loaders/cache.py +65 -64
- pyconvexity/data/schema/01_core_schema.sql +134 -234
- pyconvexity/data/schema/02_data_metadata.sql +38 -168
- pyconvexity/data/schema/03_validation_data.sql +327 -264
- pyconvexity/data/sources/gem.py +169 -139
- pyconvexity/io/__init__.py +4 -10
- pyconvexity/io/excel_exporter.py +694 -480
- pyconvexity/io/excel_importer.py +817 -545
- pyconvexity/io/netcdf_exporter.py +66 -61
- pyconvexity/io/netcdf_importer.py +850 -619
- pyconvexity/models/__init__.py +109 -59
- pyconvexity/models/attributes.py +197 -178
- pyconvexity/models/carriers.py +70 -67
- pyconvexity/models/components.py +260 -236
- pyconvexity/models/network.py +202 -284
- pyconvexity/models/results.py +65 -55
- pyconvexity/models/scenarios.py +58 -88
- pyconvexity/solvers/__init__.py +5 -5
- pyconvexity/solvers/pypsa/__init__.py +3 -3
- pyconvexity/solvers/pypsa/api.py +150 -134
- pyconvexity/solvers/pypsa/batch_loader.py +165 -162
- pyconvexity/solvers/pypsa/builder.py +390 -291
- pyconvexity/solvers/pypsa/constraints.py +184 -162
- pyconvexity/solvers/pypsa/solver.py +968 -666
- pyconvexity/solvers/pypsa/storage.py +1377 -671
- pyconvexity/timeseries.py +63 -60
- pyconvexity/validation/__init__.py +14 -6
- pyconvexity/validation/rules.py +95 -84
- pyconvexity-0.4.1.dist-info/METADATA +46 -0
- pyconvexity-0.4.1.dist-info/RECORD +42 -0
- pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
- pyconvexity/data/schema/04_scenario_schema.sql +0 -122
- pyconvexity/data/schema/migrate_add_geometries.sql +0 -73
- pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
- pyconvexity-0.3.8.post7.dist-info/METADATA +0 -138
- pyconvexity-0.3.8.post7.dist-info/RECORD +0 -49
- {pyconvexity-0.3.8.post7.dist-info → pyconvexity-0.4.1.dist-info}/WHEEL +0 -0
- {pyconvexity-0.3.8.post7.dist-info → pyconvexity-0.4.1.dist-info}/top_level.txt +0 -0
pyconvexity/models/attributes.py
CHANGED
|
@@ -15,10 +15,17 @@ import pyarrow as pa
|
|
|
15
15
|
import pyarrow.parquet as pq
|
|
16
16
|
|
|
17
17
|
from pyconvexity.core.types import (
|
|
18
|
-
StaticValue,
|
|
18
|
+
StaticValue,
|
|
19
|
+
Timeseries,
|
|
20
|
+
TimeseriesMetadata,
|
|
21
|
+
AttributeValue,
|
|
22
|
+
TimePeriod,
|
|
19
23
|
)
|
|
20
24
|
from pyconvexity.core.errors import (
|
|
21
|
-
ComponentNotFound,
|
|
25
|
+
ComponentNotFound,
|
|
26
|
+
AttributeNotFound,
|
|
27
|
+
ValidationError,
|
|
28
|
+
TimeseriesError,
|
|
22
29
|
)
|
|
23
30
|
|
|
24
31
|
logger = logging.getLogger(__name__)
|
|
@@ -29,56 +36,73 @@ def set_static_attribute(
|
|
|
29
36
|
component_id: int,
|
|
30
37
|
attribute_name: str,
|
|
31
38
|
value: StaticValue,
|
|
32
|
-
scenario_id: Optional[int] = None
|
|
39
|
+
scenario_id: Optional[int] = None,
|
|
33
40
|
) -> None:
|
|
34
41
|
"""
|
|
35
42
|
Set a static attribute value for a component in a specific scenario.
|
|
36
|
-
|
|
43
|
+
|
|
37
44
|
Args:
|
|
38
45
|
conn: Database connection
|
|
39
46
|
component_id: Component ID
|
|
40
47
|
attribute_name: Name of the attribute
|
|
41
48
|
value: Static value to set
|
|
42
49
|
scenario_id: Scenario ID (uses master scenario if None)
|
|
43
|
-
|
|
50
|
+
|
|
44
51
|
Raises:
|
|
45
52
|
ComponentNotFound: If component doesn't exist
|
|
46
53
|
ValidationError: If attribute doesn't allow static values or validation fails
|
|
47
54
|
"""
|
|
48
55
|
# 1. Get component type
|
|
49
56
|
from pyconvexity.models.components import get_component_type
|
|
57
|
+
|
|
50
58
|
component_type = get_component_type(conn, component_id)
|
|
51
|
-
|
|
59
|
+
|
|
52
60
|
# 2. Get validation rule
|
|
53
61
|
from pyconvexity.validation.rules import get_validation_rule, validate_static_value
|
|
62
|
+
|
|
54
63
|
rule = get_validation_rule(conn, component_type, attribute_name)
|
|
55
|
-
|
|
64
|
+
|
|
56
65
|
# 3. Check if static values are allowed
|
|
57
66
|
if not rule.allows_static:
|
|
58
|
-
raise ValidationError(
|
|
59
|
-
|
|
67
|
+
raise ValidationError(
|
|
68
|
+
f"Attribute '{attribute_name}' for {component_type} does not allow static values"
|
|
69
|
+
)
|
|
70
|
+
|
|
60
71
|
# 4. Validate data type
|
|
61
72
|
validate_static_value(value, rule)
|
|
62
|
-
|
|
63
|
-
# 5.
|
|
64
|
-
resolved_scenario_id =
|
|
65
|
-
|
|
73
|
+
|
|
74
|
+
# 5. Use scenario_id directly (NULL for base network)
|
|
75
|
+
resolved_scenario_id = scenario_id
|
|
76
|
+
|
|
66
77
|
# 6. Remove any existing attribute for this scenario
|
|
67
78
|
cursor = conn.cursor()
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
79
|
+
if resolved_scenario_id is None:
|
|
80
|
+
cursor.execute(
|
|
81
|
+
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id IS NULL",
|
|
82
|
+
(component_id, attribute_name),
|
|
83
|
+
)
|
|
84
|
+
else:
|
|
85
|
+
cursor.execute(
|
|
86
|
+
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id = ?",
|
|
87
|
+
(component_id, attribute_name, resolved_scenario_id),
|
|
88
|
+
)
|
|
89
|
+
|
|
73
90
|
# 7. Insert new static attribute (store as JSON in static_value TEXT column)
|
|
74
91
|
json_value = value.to_json()
|
|
75
|
-
|
|
92
|
+
|
|
76
93
|
cursor.execute(
|
|
77
94
|
"""INSERT INTO component_attributes
|
|
78
95
|
(component_id, attribute_name, scenario_id, storage_type, static_value, data_type, unit, is_input)
|
|
79
96
|
VALUES (?, ?, ?, 'static', ?, ?, ?, ?)""",
|
|
80
|
-
(
|
|
81
|
-
|
|
97
|
+
(
|
|
98
|
+
component_id,
|
|
99
|
+
attribute_name,
|
|
100
|
+
resolved_scenario_id,
|
|
101
|
+
json_value,
|
|
102
|
+
rule.data_type,
|
|
103
|
+
rule.unit,
|
|
104
|
+
rule.is_input,
|
|
105
|
+
),
|
|
82
106
|
)
|
|
83
107
|
|
|
84
108
|
|
|
@@ -87,18 +111,18 @@ def set_timeseries_attribute(
|
|
|
87
111
|
component_id: int,
|
|
88
112
|
attribute_name: str,
|
|
89
113
|
timeseries: Union[Timeseries, List[float]],
|
|
90
|
-
scenario_id: Optional[int] = None
|
|
114
|
+
scenario_id: Optional[int] = None,
|
|
91
115
|
) -> None:
|
|
92
116
|
"""
|
|
93
117
|
Set a timeseries attribute value for a component in a specific scenario.
|
|
94
|
-
|
|
118
|
+
|
|
95
119
|
Args:
|
|
96
120
|
conn: Database connection
|
|
97
121
|
component_id: Component ID
|
|
98
122
|
attribute_name: Name of the attribute
|
|
99
123
|
timeseries: Timeseries object or list of float values
|
|
100
124
|
scenario_id: Scenario ID (uses master scenario if None)
|
|
101
|
-
|
|
125
|
+
|
|
102
126
|
Raises:
|
|
103
127
|
ComponentNotFound: If component doesn't exist
|
|
104
128
|
ValidationError: If attribute doesn't allow timeseries values
|
|
@@ -106,45 +130,64 @@ def set_timeseries_attribute(
|
|
|
106
130
|
"""
|
|
107
131
|
# 1. Get component type
|
|
108
132
|
from pyconvexity.models.components import get_component_type
|
|
133
|
+
|
|
109
134
|
component_type = get_component_type(conn, component_id)
|
|
110
|
-
|
|
135
|
+
|
|
111
136
|
# 2. Get validation rule
|
|
112
137
|
from pyconvexity.validation.rules import get_validation_rule
|
|
138
|
+
|
|
113
139
|
rule = get_validation_rule(conn, component_type, attribute_name)
|
|
114
|
-
|
|
140
|
+
|
|
115
141
|
# 3. Check if timeseries values are allowed
|
|
116
142
|
if not rule.allows_timeseries:
|
|
117
|
-
raise ValidationError(
|
|
118
|
-
|
|
143
|
+
raise ValidationError(
|
|
144
|
+
f"Attribute '{attribute_name}' for {component_type} does not allow timeseries values"
|
|
145
|
+
)
|
|
146
|
+
|
|
119
147
|
# 4. Convert input to values array
|
|
120
148
|
if isinstance(timeseries, Timeseries):
|
|
121
149
|
values = timeseries.values
|
|
122
|
-
elif isinstance(timeseries, list) and all(
|
|
150
|
+
elif isinstance(timeseries, list) and all(
|
|
151
|
+
isinstance(v, (int, float)) for v in timeseries
|
|
152
|
+
):
|
|
123
153
|
# Direct values array
|
|
124
154
|
values = [float(v) for v in timeseries]
|
|
125
155
|
else:
|
|
126
156
|
raise ValueError("timeseries must be Timeseries or List[float]")
|
|
127
|
-
|
|
157
|
+
|
|
128
158
|
# 5. Serialize to binary format (ultra-fast, matches Rust exactly)
|
|
129
159
|
binary_data = serialize_values_to_binary(values)
|
|
130
|
-
|
|
131
|
-
# 6.
|
|
132
|
-
resolved_scenario_id =
|
|
133
|
-
|
|
160
|
+
|
|
161
|
+
# 6. Use scenario_id directly (NULL for base network)
|
|
162
|
+
resolved_scenario_id = scenario_id
|
|
163
|
+
|
|
134
164
|
# 7. Remove any existing attribute for this scenario
|
|
135
165
|
cursor = conn.cursor()
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
166
|
+
if resolved_scenario_id is None:
|
|
167
|
+
cursor.execute(
|
|
168
|
+
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id IS NULL",
|
|
169
|
+
(component_id, attribute_name),
|
|
170
|
+
)
|
|
171
|
+
else:
|
|
172
|
+
cursor.execute(
|
|
173
|
+
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id = ?",
|
|
174
|
+
(component_id, attribute_name, resolved_scenario_id),
|
|
175
|
+
)
|
|
176
|
+
|
|
141
177
|
# 8. Insert new timeseries attribute
|
|
142
178
|
cursor.execute(
|
|
143
179
|
"""INSERT INTO component_attributes
|
|
144
180
|
(component_id, attribute_name, scenario_id, storage_type, timeseries_data, data_type, unit, is_input)
|
|
145
181
|
VALUES (?, ?, ?, 'timeseries', ?, ?, ?, ?)""",
|
|
146
|
-
(
|
|
147
|
-
|
|
182
|
+
(
|
|
183
|
+
component_id,
|
|
184
|
+
attribute_name,
|
|
185
|
+
resolved_scenario_id,
|
|
186
|
+
binary_data,
|
|
187
|
+
rule.data_type,
|
|
188
|
+
rule.unit,
|
|
189
|
+
rule.is_input,
|
|
190
|
+
),
|
|
148
191
|
)
|
|
149
192
|
|
|
150
193
|
|
|
@@ -152,72 +195,69 @@ def get_attribute(
|
|
|
152
195
|
conn: sqlite3.Connection,
|
|
153
196
|
component_id: int,
|
|
154
197
|
attribute_name: str,
|
|
155
|
-
scenario_id: Optional[int] = None
|
|
198
|
+
scenario_id: Optional[int] = None,
|
|
156
199
|
) -> AttributeValue:
|
|
157
200
|
"""
|
|
158
201
|
Get an attribute value with scenario fallback logic.
|
|
159
|
-
|
|
202
|
+
|
|
160
203
|
Args:
|
|
161
204
|
conn: Database connection
|
|
162
205
|
component_id: Component ID
|
|
163
206
|
attribute_name: Name of the attribute
|
|
164
207
|
scenario_id: Scenario ID (uses master scenario if None)
|
|
165
|
-
|
|
208
|
+
|
|
166
209
|
Returns:
|
|
167
210
|
AttributeValue containing either static or timeseries data
|
|
168
|
-
|
|
211
|
+
|
|
169
212
|
Raises:
|
|
170
213
|
ComponentNotFound: If component doesn't exist
|
|
171
214
|
AttributeNotFound: If attribute doesn't exist
|
|
172
215
|
"""
|
|
173
|
-
|
|
174
|
-
# Get network_id from component to find master scenario
|
|
175
216
|
cursor = conn.cursor()
|
|
176
|
-
|
|
177
|
-
result = cursor.fetchone()
|
|
178
|
-
if not result:
|
|
179
|
-
raise ComponentNotFound(component_id)
|
|
180
|
-
|
|
181
|
-
network_id = result[0]
|
|
182
|
-
|
|
183
|
-
# Get master scenario ID
|
|
184
|
-
master_scenario_id = get_master_scenario_id(conn, network_id)
|
|
185
|
-
|
|
217
|
+
|
|
186
218
|
# Determine which scenario to check first
|
|
187
|
-
current_scenario_id = scenario_id
|
|
188
|
-
|
|
219
|
+
current_scenario_id = scenario_id
|
|
220
|
+
|
|
189
221
|
# First try to get the attribute from the current scenario
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
# If not found in current scenario and current scenario is not master, try master scenario
|
|
199
|
-
if not result and current_scenario_id != master_scenario_id:
|
|
222
|
+
if current_scenario_id is None:
|
|
223
|
+
cursor.execute(
|
|
224
|
+
"""SELECT storage_type, static_value, timeseries_data, data_type, unit
|
|
225
|
+
FROM component_attributes
|
|
226
|
+
WHERE component_id = ? AND attribute_name = ? AND scenario_id IS NULL""",
|
|
227
|
+
(component_id, attribute_name),
|
|
228
|
+
)
|
|
229
|
+
else:
|
|
200
230
|
cursor.execute(
|
|
201
231
|
"""SELECT storage_type, static_value, timeseries_data, data_type, unit
|
|
202
232
|
FROM component_attributes
|
|
203
233
|
WHERE component_id = ? AND attribute_name = ? AND scenario_id = ?""",
|
|
204
|
-
(component_id, attribute_name,
|
|
234
|
+
(component_id, attribute_name, current_scenario_id),
|
|
235
|
+
)
|
|
236
|
+
result = cursor.fetchone()
|
|
237
|
+
|
|
238
|
+
# If not found in current scenario and current scenario is not base (NULL), try base scenario as fallback
|
|
239
|
+
if not result and current_scenario_id is not None:
|
|
240
|
+
cursor.execute(
|
|
241
|
+
"""SELECT storage_type, static_value, timeseries_data, data_type, unit
|
|
242
|
+
FROM component_attributes
|
|
243
|
+
WHERE component_id = ? AND attribute_name = ? AND scenario_id IS NULL""",
|
|
244
|
+
(component_id, attribute_name),
|
|
205
245
|
)
|
|
206
246
|
result = cursor.fetchone()
|
|
207
|
-
|
|
247
|
+
|
|
208
248
|
if not result:
|
|
209
249
|
raise AttributeNotFound(component_id, attribute_name)
|
|
210
|
-
|
|
250
|
+
|
|
211
251
|
storage_type, static_value_json, timeseries_data, data_type, unit = result
|
|
212
|
-
|
|
252
|
+
|
|
213
253
|
# Handle the deserialization based on storage type
|
|
214
254
|
if storage_type == "static":
|
|
215
255
|
if not static_value_json:
|
|
216
256
|
raise ValidationError("Static attribute missing value")
|
|
217
|
-
|
|
257
|
+
|
|
218
258
|
# Parse JSON value
|
|
219
259
|
json_value = json.loads(static_value_json)
|
|
220
|
-
|
|
260
|
+
|
|
221
261
|
# Convert based on data type
|
|
222
262
|
if data_type == "float":
|
|
223
263
|
if isinstance(json_value, (int, float)):
|
|
@@ -241,27 +281,27 @@ def get_attribute(
|
|
|
241
281
|
raise ValidationError("Expected string value")
|
|
242
282
|
else:
|
|
243
283
|
raise ValidationError(f"Unknown data type: {data_type}")
|
|
244
|
-
|
|
284
|
+
|
|
245
285
|
return AttributeValue.static(static_value)
|
|
246
|
-
|
|
286
|
+
|
|
247
287
|
elif storage_type == "timeseries":
|
|
248
288
|
if not timeseries_data:
|
|
249
289
|
raise ValidationError("Timeseries attribute missing data")
|
|
250
|
-
|
|
290
|
+
|
|
251
291
|
# Deserialize from binary format to new efficient Timeseries format
|
|
252
292
|
values = deserialize_values_from_binary(timeseries_data)
|
|
253
|
-
|
|
293
|
+
|
|
254
294
|
timeseries = Timeseries(
|
|
255
295
|
values=values,
|
|
256
296
|
length=len(values),
|
|
257
297
|
start_index=0,
|
|
258
298
|
data_type=data_type,
|
|
259
299
|
unit=unit,
|
|
260
|
-
is_input=True # Default, could be enhanced with actual is_input from DB
|
|
300
|
+
is_input=True, # Default, could be enhanced with actual is_input from DB
|
|
261
301
|
)
|
|
262
|
-
|
|
302
|
+
|
|
263
303
|
return AttributeValue.timeseries(timeseries)
|
|
264
|
-
|
|
304
|
+
|
|
265
305
|
else:
|
|
266
306
|
raise ValidationError(f"Unknown storage type: {storage_type}")
|
|
267
307
|
|
|
@@ -270,91 +310,68 @@ def delete_attribute(
|
|
|
270
310
|
conn: sqlite3.Connection,
|
|
271
311
|
component_id: int,
|
|
272
312
|
attribute_name: str,
|
|
273
|
-
scenario_id: Optional[int] = None
|
|
313
|
+
scenario_id: Optional[int] = None,
|
|
274
314
|
) -> None:
|
|
275
315
|
"""
|
|
276
316
|
Delete an attribute from a specific scenario.
|
|
277
|
-
|
|
317
|
+
|
|
278
318
|
Args:
|
|
279
319
|
conn: Database connection
|
|
280
320
|
component_id: Component ID
|
|
281
321
|
attribute_name: Name of the attribute
|
|
282
322
|
scenario_id: Scenario ID (uses master scenario if None)
|
|
283
|
-
|
|
323
|
+
|
|
284
324
|
Raises:
|
|
285
325
|
AttributeNotFound: If attribute doesn't exist
|
|
286
326
|
"""
|
|
287
|
-
#
|
|
288
|
-
resolved_scenario_id =
|
|
289
|
-
|
|
290
|
-
cursor = conn.cursor()
|
|
291
|
-
cursor.execute(
|
|
292
|
-
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id = ?",
|
|
293
|
-
(component_id, attribute_name, resolved_scenario_id)
|
|
294
|
-
)
|
|
295
|
-
|
|
296
|
-
if cursor.rowcount == 0:
|
|
297
|
-
raise AttributeNotFound(component_id, attribute_name)
|
|
298
|
-
|
|
327
|
+
# Use scenario_id directly (NULL for base network)
|
|
328
|
+
resolved_scenario_id = scenario_id
|
|
299
329
|
|
|
300
|
-
# Helper functions
|
|
301
|
-
|
|
302
|
-
def resolve_scenario_id(conn: sqlite3.Connection, component_id: int, scenario_id: Optional[int]) -> int:
|
|
303
|
-
"""Resolve scenario ID - if None, get master scenario ID."""
|
|
304
|
-
if scenario_id is not None:
|
|
305
|
-
return scenario_id
|
|
306
|
-
|
|
307
|
-
# Get network_id from component, then get master scenario
|
|
308
330
|
cursor = conn.cursor()
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
331
|
+
if resolved_scenario_id is None:
|
|
332
|
+
cursor.execute(
|
|
333
|
+
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id IS NULL",
|
|
334
|
+
(component_id, attribute_name),
|
|
335
|
+
)
|
|
336
|
+
else:
|
|
337
|
+
cursor.execute(
|
|
338
|
+
"DELETE FROM component_attributes WHERE component_id = ? AND attribute_name = ? AND scenario_id = ?",
|
|
339
|
+
(component_id, attribute_name, resolved_scenario_id),
|
|
340
|
+
)
|
|
317
341
|
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
cursor = conn.cursor()
|
|
321
|
-
cursor.execute(
|
|
322
|
-
"SELECT id FROM scenarios WHERE network_id = ? AND is_master = TRUE",
|
|
323
|
-
(network_id,)
|
|
324
|
-
)
|
|
325
|
-
result = cursor.fetchone()
|
|
326
|
-
if not result:
|
|
327
|
-
raise ValidationError(f"No master scenario found for network {network_id}")
|
|
328
|
-
return result[0]
|
|
342
|
+
if cursor.rowcount == 0:
|
|
343
|
+
raise AttributeNotFound(component_id, attribute_name)
|
|
329
344
|
|
|
330
345
|
|
|
331
346
|
# ============================================================================
|
|
332
347
|
# EFFICIENT TIMESERIES SERIALIZATION - MATCHES RUST IMPLEMENTATION EXACTLY
|
|
333
348
|
# ============================================================================
|
|
334
349
|
|
|
350
|
+
|
|
335
351
|
def serialize_values_to_binary(values: List[float]) -> bytes:
|
|
336
352
|
"""
|
|
337
353
|
Serialize f32 values to binary format - EXACT MATCH WITH RUST.
|
|
338
|
-
|
|
354
|
+
|
|
339
355
|
Ultra-fast binary format: just raw Float32 array, little-endian.
|
|
340
356
|
"""
|
|
341
357
|
if not values:
|
|
342
|
-
return b
|
|
343
|
-
|
|
358
|
+
return b""
|
|
359
|
+
|
|
344
360
|
import struct
|
|
361
|
+
|
|
345
362
|
buffer = bytearray(len(values) * 4) # 4 bytes per Float32
|
|
346
|
-
|
|
363
|
+
|
|
347
364
|
for i, value in enumerate(values):
|
|
348
365
|
# Pack as little-endian Float32 to match Rust exactly
|
|
349
|
-
struct.pack_into(
|
|
350
|
-
|
|
366
|
+
struct.pack_into("<f", buffer, i * 4, float(value))
|
|
367
|
+
|
|
351
368
|
return bytes(buffer)
|
|
352
369
|
|
|
353
370
|
|
|
354
371
|
def deserialize_values_from_binary(data: bytes) -> List[float]:
|
|
355
372
|
"""
|
|
356
373
|
Deserialize f32 values from binary format - EXACT MATCH WITH RUST.
|
|
357
|
-
|
|
374
|
+
|
|
358
375
|
Ultra-fast deserialization: read raw Float32 values only.
|
|
359
376
|
"""
|
|
360
377
|
if not data:
|
|
@@ -365,11 +382,12 @@ def deserialize_values_from_binary(data: bytes) -> List[float]:
|
|
|
365
382
|
raise ValueError("Invalid binary data length - must be multiple of 4 bytes")
|
|
366
383
|
|
|
367
384
|
import struct
|
|
385
|
+
|
|
368
386
|
values = []
|
|
369
|
-
|
|
387
|
+
|
|
370
388
|
# Ultra-fast deserialization: read raw Float32 values
|
|
371
389
|
for i in range(0, len(data), 4):
|
|
372
|
-
value = struct.unpack(
|
|
390
|
+
value = struct.unpack("<f", data[i : i + 4])[0] # Little-endian Float32
|
|
373
391
|
values.append(value)
|
|
374
392
|
|
|
375
393
|
return values
|
|
@@ -379,11 +397,11 @@ def get_timeseries_length_from_binary(data: bytes) -> int:
|
|
|
379
397
|
"""Get the length of a timeseries without deserializing the full data."""
|
|
380
398
|
if not data:
|
|
381
399
|
return 0
|
|
382
|
-
|
|
400
|
+
|
|
383
401
|
# Ultra-fast: just divide by 4 bytes per Float32
|
|
384
402
|
if len(data) % 4 != 0:
|
|
385
403
|
raise ValueError("Invalid binary data length - must be multiple of 4 bytes")
|
|
386
|
-
|
|
404
|
+
|
|
387
405
|
return len(data) // 4
|
|
388
406
|
|
|
389
407
|
|
|
@@ -391,6 +409,7 @@ def get_timeseries_length_from_binary(data: bytes) -> int:
|
|
|
391
409
|
# UNIFIED TIMESERIES FUNCTIONS - MATCH RUST API
|
|
392
410
|
# ============================================================================
|
|
393
411
|
|
|
412
|
+
|
|
394
413
|
def get_timeseries(
|
|
395
414
|
conn: sqlite3.Connection,
|
|
396
415
|
component_id: int,
|
|
@@ -398,11 +417,11 @@ def get_timeseries(
|
|
|
398
417
|
scenario_id: Optional[int] = None,
|
|
399
418
|
start_index: Optional[int] = None,
|
|
400
419
|
end_index: Optional[int] = None,
|
|
401
|
-
max_points: Optional[int] = None
|
|
420
|
+
max_points: Optional[int] = None,
|
|
402
421
|
) -> Timeseries:
|
|
403
422
|
"""
|
|
404
423
|
Get timeseries data with unified interface matching Rust implementation.
|
|
405
|
-
|
|
424
|
+
|
|
406
425
|
Args:
|
|
407
426
|
conn: Database connection
|
|
408
427
|
component_id: Component ID
|
|
@@ -411,32 +430,32 @@ def get_timeseries(
|
|
|
411
430
|
start_index: Start index for range queries
|
|
412
431
|
end_index: End index for range queries
|
|
413
432
|
max_points: Maximum number of points (for sampling)
|
|
414
|
-
|
|
433
|
+
|
|
415
434
|
Returns:
|
|
416
435
|
Timeseries object with efficient array-based data
|
|
417
|
-
|
|
436
|
+
|
|
418
437
|
Raises:
|
|
419
438
|
ComponentNotFound: If component doesn't exist
|
|
420
439
|
AttributeNotFound: If attribute doesn't exist
|
|
421
440
|
"""
|
|
422
441
|
# Get the attribute value
|
|
423
442
|
attr_value = get_attribute(conn, component_id, attribute_name, scenario_id)
|
|
424
|
-
|
|
443
|
+
|
|
425
444
|
if not attr_value.is_timeseries():
|
|
426
445
|
raise ValueError(f"Attribute '{attribute_name}' is not a timeseries")
|
|
427
|
-
|
|
446
|
+
|
|
428
447
|
timeseries = attr_value.as_timeseries()
|
|
429
448
|
if not timeseries:
|
|
430
449
|
raise ValueError("Failed to get timeseries data")
|
|
431
|
-
|
|
450
|
+
|
|
432
451
|
# Apply range filtering if requested
|
|
433
452
|
if start_index is not None and end_index is not None:
|
|
434
453
|
timeseries = timeseries.slice(start_index, end_index)
|
|
435
|
-
|
|
454
|
+
|
|
436
455
|
# Apply sampling if requested
|
|
437
456
|
if max_points is not None:
|
|
438
457
|
timeseries = timeseries.sample(max_points)
|
|
439
|
-
|
|
458
|
+
|
|
440
459
|
return timeseries
|
|
441
460
|
|
|
442
461
|
|
|
@@ -444,72 +463,72 @@ def get_timeseries_metadata(
|
|
|
444
463
|
conn: sqlite3.Connection,
|
|
445
464
|
component_id: int,
|
|
446
465
|
attribute_name: str,
|
|
447
|
-
scenario_id: Optional[int] = None
|
|
466
|
+
scenario_id: Optional[int] = None,
|
|
448
467
|
) -> TimeseriesMetadata:
|
|
449
468
|
"""
|
|
450
469
|
Get timeseries metadata without loading the full data.
|
|
451
|
-
|
|
470
|
+
|
|
452
471
|
Args:
|
|
453
472
|
conn: Database connection
|
|
454
473
|
component_id: Component ID
|
|
455
474
|
attribute_name: Name of the attribute
|
|
456
475
|
scenario_id: Scenario ID (uses master scenario if None)
|
|
457
|
-
|
|
476
|
+
|
|
458
477
|
Returns:
|
|
459
478
|
TimeseriesMetadata with length and type information
|
|
460
479
|
"""
|
|
461
480
|
# Get basic attribute info without loading full data
|
|
462
481
|
cursor = conn.cursor()
|
|
463
|
-
|
|
464
|
-
# Get
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
if not result:
|
|
468
|
-
raise ComponentNotFound(component_id)
|
|
469
|
-
|
|
470
|
-
network_id = result[0]
|
|
471
|
-
|
|
472
|
-
# Get master scenario ID
|
|
473
|
-
master_scenario_id = get_master_scenario_id(conn, network_id)
|
|
474
|
-
current_scenario_id = scenario_id if scenario_id is not None else master_scenario_id
|
|
475
|
-
|
|
482
|
+
|
|
483
|
+
# Get current scenario ID
|
|
484
|
+
current_scenario_id = scenario_id
|
|
485
|
+
|
|
476
486
|
# Get timeseries metadata
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
# Try fallback to master scenario if not found
|
|
486
|
-
if not result and current_scenario_id != master_scenario_id:
|
|
487
|
+
if current_scenario_id is None:
|
|
488
|
+
cursor.execute(
|
|
489
|
+
"""SELECT timeseries_data, data_type, unit, is_input
|
|
490
|
+
FROM component_attributes
|
|
491
|
+
WHERE component_id = ? AND attribute_name = ? AND storage_type = 'timeseries' AND scenario_id IS NULL""",
|
|
492
|
+
(component_id, attribute_name),
|
|
493
|
+
)
|
|
494
|
+
else:
|
|
487
495
|
cursor.execute(
|
|
488
496
|
"""SELECT timeseries_data, data_type, unit, is_input
|
|
489
497
|
FROM component_attributes
|
|
490
498
|
WHERE component_id = ? AND attribute_name = ? AND storage_type = 'timeseries' AND scenario_id = ?""",
|
|
491
|
-
(component_id, attribute_name,
|
|
499
|
+
(component_id, attribute_name, current_scenario_id),
|
|
500
|
+
)
|
|
501
|
+
result = cursor.fetchone()
|
|
502
|
+
|
|
503
|
+
# Try fallback to base scenario if not found in current scenario
|
|
504
|
+
if not result and current_scenario_id is not None:
|
|
505
|
+
cursor.execute(
|
|
506
|
+
"""SELECT timeseries_data, data_type, unit, is_input
|
|
507
|
+
FROM component_attributes
|
|
508
|
+
WHERE component_id = ? AND attribute_name = ? AND storage_type = 'timeseries' AND scenario_id IS NULL""",
|
|
509
|
+
(component_id, attribute_name),
|
|
492
510
|
)
|
|
493
511
|
result = cursor.fetchone()
|
|
494
|
-
|
|
512
|
+
|
|
495
513
|
if not result:
|
|
496
514
|
raise AttributeNotFound(component_id, attribute_name)
|
|
497
|
-
|
|
515
|
+
|
|
498
516
|
timeseries_data, data_type, unit, is_input = result
|
|
499
|
-
|
|
517
|
+
|
|
500
518
|
# Get length without full deserialization
|
|
501
519
|
length = get_timeseries_length_from_binary(timeseries_data)
|
|
502
|
-
|
|
520
|
+
|
|
503
521
|
# Get time range from network time periods
|
|
504
522
|
try:
|
|
505
523
|
from pyconvexity.models.network import get_network_time_periods
|
|
506
|
-
|
|
524
|
+
|
|
525
|
+
time_periods = get_network_time_periods(conn)
|
|
507
526
|
start_time = time_periods[0].timestamp if time_periods else 0
|
|
508
527
|
end_time = time_periods[-1].timestamp if time_periods else 0
|
|
509
528
|
except Exception:
|
|
510
529
|
start_time = 0
|
|
511
530
|
end_time = length - 1
|
|
512
|
-
|
|
531
|
+
|
|
513
532
|
return TimeseriesMetadata(
|
|
514
533
|
length=length,
|
|
515
534
|
start_time=start_time,
|
|
@@ -518,5 +537,5 @@ def get_timeseries_metadata(
|
|
|
518
537
|
end_index=length,
|
|
519
538
|
data_type=data_type,
|
|
520
539
|
unit=unit,
|
|
521
|
-
is_input=is_input
|
|
540
|
+
is_input=is_input,
|
|
522
541
|
)
|