pyconvexity 0.1.3__py3-none-any.whl → 0.3.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/__init__.py +27 -2
- pyconvexity/_version.py +1 -2
- pyconvexity/core/__init__.py +0 -2
- pyconvexity/core/database.py +158 -0
- pyconvexity/core/types.py +105 -18
- pyconvexity/data/schema/01_core_schema.sql +12 -12
- pyconvexity/data/schema/02_data_metadata.sql +17 -321
- pyconvexity/data/sources/gem.py +5 -5
- pyconvexity/io/excel_exporter.py +34 -13
- pyconvexity/io/excel_importer.py +48 -51
- pyconvexity/io/netcdf_importer.py +1054 -51
- pyconvexity/models/attributes.py +209 -72
- pyconvexity/models/network.py +17 -15
- pyconvexity/solvers/pypsa/api.py +24 -1
- pyconvexity/solvers/pypsa/batch_loader.py +37 -44
- pyconvexity/solvers/pypsa/builder.py +68 -152
- pyconvexity/solvers/pypsa/solver.py +104 -253
- pyconvexity/solvers/pypsa/storage.py +740 -1373
- pyconvexity/timeseries.py +327 -0
- pyconvexity/validation/rules.py +2 -2
- {pyconvexity-0.1.3.dist-info → pyconvexity-0.3.7.dist-info}/METADATA +1 -1
- pyconvexity-0.3.7.dist-info/RECORD +41 -0
- pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
- pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
- pyconvexity-0.1.3.dist-info/RECORD +0 -45
- {pyconvexity-0.1.3.dist-info → pyconvexity-0.3.7.dist-info}/WHEEL +0 -0
- {pyconvexity-0.1.3.dist-info → pyconvexity-0.3.7.dist-info}/top_level.txt +0 -0
pyconvexity/io/excel_importer.py
CHANGED
|
@@ -13,7 +13,7 @@ import json
|
|
|
13
13
|
|
|
14
14
|
# Import functions directly from pyconvexity
|
|
15
15
|
from pyconvexity.core.database import open_connection
|
|
16
|
-
from pyconvexity.core.types import StaticValue,
|
|
16
|
+
from pyconvexity.core.types import StaticValue, CreateNetworkRequest
|
|
17
17
|
from pyconvexity.core.errors import AttributeNotFound, ValidationError
|
|
18
18
|
from pyconvexity.models import (
|
|
19
19
|
list_components_by_type, create_component, update_component, create_network,
|
|
@@ -22,6 +22,8 @@ from pyconvexity.models import (
|
|
|
22
22
|
get_network_info, delete_attribute
|
|
23
23
|
)
|
|
24
24
|
from pyconvexity.validation import get_validation_rule
|
|
25
|
+
from pyconvexity.timeseries import set_timeseries
|
|
26
|
+
from pyconvexity.models.attributes import set_timeseries_attribute as set_timeseries_conn
|
|
25
27
|
|
|
26
28
|
logger = logging.getLogger(__name__)
|
|
27
29
|
|
|
@@ -302,14 +304,20 @@ class ExcelModelImporter:
|
|
|
302
304
|
|
|
303
305
|
self.logger.info(f"Generating {len(timestamps)} time periods from {start_time} to {end_time} at {time_resolution} resolution")
|
|
304
306
|
|
|
305
|
-
# Insert time periods
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
307
|
+
# Insert optimized time periods metadata
|
|
308
|
+
period_count = len(timestamps)
|
|
309
|
+
start_timestamp = int(timestamps[0].timestamp())
|
|
310
|
+
|
|
311
|
+
# Calculate interval in seconds
|
|
312
|
+
if len(timestamps) > 1:
|
|
313
|
+
interval_seconds = int((timestamps[1] - timestamps[0]).total_seconds())
|
|
314
|
+
else:
|
|
315
|
+
interval_seconds = 3600 # Default to hourly
|
|
316
|
+
|
|
317
|
+
conn.execute("""
|
|
318
|
+
INSERT INTO network_time_periods (network_id, period_count, start_timestamp, interval_seconds)
|
|
319
|
+
VALUES (?, ?, ?, ?)
|
|
320
|
+
""", (network_id, period_count, start_timestamp, interval_seconds))
|
|
313
321
|
|
|
314
322
|
self.logger.info(f"Successfully created {len(timestamps)} time periods for network {network_id}")
|
|
315
323
|
|
|
@@ -765,76 +773,65 @@ class ExcelModelImporter:
|
|
|
765
773
|
component = next((c for c in components if c.name == component_name), None)
|
|
766
774
|
|
|
767
775
|
if component:
|
|
768
|
-
# Create timeseries data
|
|
769
|
-
|
|
770
|
-
matched_timestamps = 0
|
|
771
|
-
fallback_timestamps = 0
|
|
772
|
-
skipped_timestamps = 0
|
|
776
|
+
# Create timeseries data using efficient array format
|
|
777
|
+
timeseries_values = []
|
|
773
778
|
filled_missing_values = 0
|
|
774
779
|
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
780
|
+
# Debug: Show first few timestamps for comparison
|
|
781
|
+
if len(timestamps) > 0 and len(network_time_periods) > 0:
|
|
782
|
+
excel_first = str(timestamps.iloc[0]).strip()
|
|
783
|
+
excel_last = str(timestamps.iloc[-1]).strip() if len(timestamps) > 1 else excel_first
|
|
784
|
+
network_first = network_time_periods[0].formatted_time
|
|
785
|
+
network_last = network_time_periods[-1].formatted_time if len(network_time_periods) > 1 else network_first
|
|
786
|
+
|
|
787
|
+
self.logger.info(f"TIMESERIES DEBUG: Timestamp comparison for '{component_name}.{attr_name}':")
|
|
788
|
+
self.logger.info(f" Excel range: '{excel_first}' to '{excel_last}' ({len(timestamps)} periods)")
|
|
789
|
+
self.logger.info(f" Network range: '{network_first}' to '{network_last}' ({len(network_time_periods)} periods)")
|
|
790
|
+
|
|
791
|
+
# Take the first N values from Excel where N = expected network periods
|
|
792
|
+
# This puts responsibility on user to format Excel correctly
|
|
793
|
+
max_periods = min(len(timestamps), len(network_time_periods), len(df[column]))
|
|
794
|
+
|
|
795
|
+
for i in range(max_periods):
|
|
796
|
+
value = df[column].iloc[i]
|
|
778
797
|
|
|
779
798
|
# Handle missing values - use 0.0 as default
|
|
780
799
|
if pd.isna(value):
|
|
781
800
|
actual_value = 0.0
|
|
782
801
|
filled_missing_values += 1
|
|
783
|
-
self.logger.debug(f"Using default value 0.0 for missing data at timestamp {timestamp_str} in {component_name}.{attr_name}")
|
|
784
802
|
else:
|
|
785
803
|
try:
|
|
786
804
|
actual_value = float(value)
|
|
787
805
|
except (ValueError, TypeError):
|
|
788
806
|
actual_value = 0.0
|
|
789
807
|
filled_missing_values += 1
|
|
790
|
-
self.logger.debug(f"Using default value 0.0 for invalid data '{value}' at timestamp {timestamp_str} in {component_name}.{attr_name}")
|
|
791
|
-
|
|
792
|
-
# Try to find matching time period
|
|
793
|
-
if timestamp_str in time_period_map:
|
|
794
|
-
period = time_period_map[timestamp_str]
|
|
795
|
-
timeseries_point = TimeseriesPoint(
|
|
796
|
-
timestamp=period.timestamp,
|
|
797
|
-
value=actual_value,
|
|
798
|
-
period_index=period.period_index
|
|
799
|
-
)
|
|
800
|
-
matched_timestamps += 1
|
|
801
|
-
else:
|
|
802
|
-
# Fallback: try to parse timestamp and use index
|
|
803
|
-
try:
|
|
804
|
-
parsed_timestamp = pd.to_datetime(timestamp_str)
|
|
805
|
-
timeseries_point = TimeseriesPoint(
|
|
806
|
-
timestamp=int(parsed_timestamp.timestamp()),
|
|
807
|
-
value=actual_value,
|
|
808
|
-
period_index=i
|
|
809
|
-
)
|
|
810
|
-
fallback_timestamps += 1
|
|
811
|
-
except Exception as e:
|
|
812
|
-
skipped_timestamps += 1
|
|
813
|
-
self.logger.warning(f"Failed to parse timestamp '{timestamp_str}' in {component_name}.{attr_name}: {e}")
|
|
814
|
-
continue
|
|
815
808
|
|
|
816
|
-
|
|
809
|
+
timeseries_values.append(actual_value)
|
|
817
810
|
|
|
818
|
-
final_ts_length = len(
|
|
811
|
+
final_ts_length = len(timeseries_values)
|
|
819
812
|
self.logger.info(f"TIMESERIES DEBUG: Component '{component_name}.{attr_name}': "
|
|
820
813
|
f"Excel rows={excel_ts_length}, "
|
|
821
|
-
f"
|
|
822
|
-
f"
|
|
823
|
-
f"Skipped={skipped_timestamps}, "
|
|
814
|
+
f"Network periods={expected_length}, "
|
|
815
|
+
f"Used={max_periods}, "
|
|
824
816
|
f"Filled missing={filled_missing_values}, "
|
|
825
817
|
f"Final length={final_ts_length}")
|
|
826
818
|
|
|
827
819
|
if filled_missing_values > 0:
|
|
828
820
|
self.logger.warning(f"TIMESERIES DEBUG: Filled {filled_missing_values} missing/invalid values with 0.0 for '{component_name}.{attr_name}'")
|
|
829
821
|
|
|
822
|
+
if excel_ts_length != expected_length:
|
|
823
|
+
self.logger.warning(f"TIMESERIES DEBUG: LENGTH MISMATCH for '{component_name}.{attr_name}': "
|
|
824
|
+
f"Excel has {excel_ts_length} rows, network expects {expected_length} periods")
|
|
825
|
+
|
|
830
826
|
if final_ts_length != expected_length:
|
|
831
827
|
self.logger.warning(f"TIMESERIES DEBUG: FINAL LENGTH MISMATCH for '{component_name}.{attr_name}': "
|
|
832
828
|
f"Expected {expected_length}, got {final_ts_length} (difference: {final_ts_length - expected_length})")
|
|
833
829
|
|
|
834
|
-
if
|
|
830
|
+
if timeseries_values:
|
|
835
831
|
try:
|
|
836
|
-
|
|
837
|
-
|
|
832
|
+
# Use new efficient timeseries API
|
|
833
|
+
set_timeseries_conn(
|
|
834
|
+
conn, component.id, attr_name, timeseries_values, scenario_id
|
|
838
835
|
)
|
|
839
836
|
imported['attributes'] += 1
|
|
840
837
|
self.logger.info(f"TIMESERIES DEBUG: Successfully imported {final_ts_length} points for '{component_name}.{attr_name}'")
|