pyconvexity 0.1.3__py3-none-any.whl → 0.3.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

@@ -13,7 +13,7 @@ import json
13
13
 
14
14
  # Import functions directly from pyconvexity
15
15
  from pyconvexity.core.database import open_connection
16
- from pyconvexity.core.types import StaticValue, TimeseriesPoint, CreateNetworkRequest
16
+ from pyconvexity.core.types import StaticValue, CreateNetworkRequest
17
17
  from pyconvexity.core.errors import AttributeNotFound, ValidationError
18
18
  from pyconvexity.models import (
19
19
  list_components_by_type, create_component, update_component, create_network,
@@ -22,6 +22,8 @@ from pyconvexity.models import (
22
22
  get_network_info, delete_attribute
23
23
  )
24
24
  from pyconvexity.validation import get_validation_rule
25
+ from pyconvexity.timeseries import set_timeseries
26
+ from pyconvexity.models.attributes import set_timeseries_attribute as set_timeseries_conn
25
27
 
26
28
  logger = logging.getLogger(__name__)
27
29
 
@@ -302,14 +304,20 @@ class ExcelModelImporter:
302
304
 
303
305
  self.logger.info(f"Generating {len(timestamps)} time periods from {start_time} to {end_time} at {time_resolution} resolution")
304
306
 
305
- # Insert time periods into database
306
- for period_index, timestamp in enumerate(timestamps):
307
- timestamp_str = timestamp.strftime('%Y-%m-%d %H:%M:%S')
308
-
309
- conn.execute("""
310
- INSERT INTO network_time_periods (network_id, timestamp, period_index)
311
- VALUES (?, ?, ?)
312
- """, (network_id, timestamp_str, period_index))
307
+ # Insert optimized time periods metadata
308
+ period_count = len(timestamps)
309
+ start_timestamp = int(timestamps[0].timestamp())
310
+
311
+ # Calculate interval in seconds
312
+ if len(timestamps) > 1:
313
+ interval_seconds = int((timestamps[1] - timestamps[0]).total_seconds())
314
+ else:
315
+ interval_seconds = 3600 # Default to hourly
316
+
317
+ conn.execute("""
318
+ INSERT INTO network_time_periods (network_id, period_count, start_timestamp, interval_seconds)
319
+ VALUES (?, ?, ?, ?)
320
+ """, (network_id, period_count, start_timestamp, interval_seconds))
313
321
 
314
322
  self.logger.info(f"Successfully created {len(timestamps)} time periods for network {network_id}")
315
323
 
@@ -765,76 +773,65 @@ class ExcelModelImporter:
765
773
  component = next((c for c in components if c.name == component_name), None)
766
774
 
767
775
  if component:
768
- # Create timeseries data
769
- timeseries_data = []
770
- matched_timestamps = 0
771
- fallback_timestamps = 0
772
- skipped_timestamps = 0
776
+ # Create timeseries data using efficient array format
777
+ timeseries_values = []
773
778
  filled_missing_values = 0
774
779
 
775
- for i, (timestamp, value) in enumerate(zip(timestamps, df[column])):
776
- # Convert timestamp string to match network time periods
777
- timestamp_str = str(timestamp).strip()
780
+ # Debug: Show first few timestamps for comparison
781
+ if len(timestamps) > 0 and len(network_time_periods) > 0:
782
+ excel_first = str(timestamps.iloc[0]).strip()
783
+ excel_last = str(timestamps.iloc[-1]).strip() if len(timestamps) > 1 else excel_first
784
+ network_first = network_time_periods[0].formatted_time
785
+ network_last = network_time_periods[-1].formatted_time if len(network_time_periods) > 1 else network_first
786
+
787
+ self.logger.info(f"TIMESERIES DEBUG: Timestamp comparison for '{component_name}.{attr_name}':")
788
+ self.logger.info(f" Excel range: '{excel_first}' to '{excel_last}' ({len(timestamps)} periods)")
789
+ self.logger.info(f" Network range: '{network_first}' to '{network_last}' ({len(network_time_periods)} periods)")
790
+
791
+ # Take the first N values from Excel where N = expected network periods
792
+ # This puts responsibility on user to format Excel correctly
793
+ max_periods = min(len(timestamps), len(network_time_periods), len(df[column]))
794
+
795
+ for i in range(max_periods):
796
+ value = df[column].iloc[i]
778
797
 
779
798
  # Handle missing values - use 0.0 as default
780
799
  if pd.isna(value):
781
800
  actual_value = 0.0
782
801
  filled_missing_values += 1
783
- self.logger.debug(f"Using default value 0.0 for missing data at timestamp {timestamp_str} in {component_name}.{attr_name}")
784
802
  else:
785
803
  try:
786
804
  actual_value = float(value)
787
805
  except (ValueError, TypeError):
788
806
  actual_value = 0.0
789
807
  filled_missing_values += 1
790
- self.logger.debug(f"Using default value 0.0 for invalid data '{value}' at timestamp {timestamp_str} in {component_name}.{attr_name}")
791
-
792
- # Try to find matching time period
793
- if timestamp_str in time_period_map:
794
- period = time_period_map[timestamp_str]
795
- timeseries_point = TimeseriesPoint(
796
- timestamp=period.timestamp,
797
- value=actual_value,
798
- period_index=period.period_index
799
- )
800
- matched_timestamps += 1
801
- else:
802
- # Fallback: try to parse timestamp and use index
803
- try:
804
- parsed_timestamp = pd.to_datetime(timestamp_str)
805
- timeseries_point = TimeseriesPoint(
806
- timestamp=int(parsed_timestamp.timestamp()),
807
- value=actual_value,
808
- period_index=i
809
- )
810
- fallback_timestamps += 1
811
- except Exception as e:
812
- skipped_timestamps += 1
813
- self.logger.warning(f"Failed to parse timestamp '{timestamp_str}' in {component_name}.{attr_name}: {e}")
814
- continue
815
808
 
816
- timeseries_data.append(timeseries_point)
809
+ timeseries_values.append(actual_value)
817
810
 
818
- final_ts_length = len(timeseries_data)
811
+ final_ts_length = len(timeseries_values)
819
812
  self.logger.info(f"TIMESERIES DEBUG: Component '{component_name}.{attr_name}': "
820
813
  f"Excel rows={excel_ts_length}, "
821
- f"Matched={matched_timestamps}, "
822
- f"Fallback={fallback_timestamps}, "
823
- f"Skipped={skipped_timestamps}, "
814
+ f"Network periods={expected_length}, "
815
+ f"Used={max_periods}, "
824
816
  f"Filled missing={filled_missing_values}, "
825
817
  f"Final length={final_ts_length}")
826
818
 
827
819
  if filled_missing_values > 0:
828
820
  self.logger.warning(f"TIMESERIES DEBUG: Filled {filled_missing_values} missing/invalid values with 0.0 for '{component_name}.{attr_name}'")
829
821
 
822
+ if excel_ts_length != expected_length:
823
+ self.logger.warning(f"TIMESERIES DEBUG: LENGTH MISMATCH for '{component_name}.{attr_name}': "
824
+ f"Excel has {excel_ts_length} rows, network expects {expected_length} periods")
825
+
830
826
  if final_ts_length != expected_length:
831
827
  self.logger.warning(f"TIMESERIES DEBUG: FINAL LENGTH MISMATCH for '{component_name}.{attr_name}': "
832
828
  f"Expected {expected_length}, got {final_ts_length} (difference: {final_ts_length - expected_length})")
833
829
 
834
- if timeseries_data:
830
+ if timeseries_values:
835
831
  try:
836
- set_timeseries_attribute(
837
- conn, component.id, attr_name, timeseries_data, scenario_id
832
+ # Use new efficient timeseries API
833
+ set_timeseries_conn(
834
+ conn, component.id, attr_name, timeseries_values, scenario_id
838
835
  )
839
836
  imported['attributes'] += 1
840
837
  self.logger.info(f"TIMESERIES DEBUG: Successfully imported {final_ts_length} points for '{component_name}.{attr_name}'")