ecopipeline 1.0.4__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ecopipeline/event_tracking/Alarm.py +317 -0
- ecopipeline/event_tracking/__init__.py +20 -2
- ecopipeline/event_tracking/alarms/AbnormalCOP.py +76 -0
- ecopipeline/event_tracking/alarms/BackupUse.py +94 -0
- ecopipeline/event_tracking/alarms/BalancingValve.py +78 -0
- ecopipeline/event_tracking/alarms/BlownFuse.py +72 -0
- ecopipeline/event_tracking/alarms/Boundary.py +90 -0
- ecopipeline/event_tracking/alarms/HPWHInlet.py +73 -0
- ecopipeline/event_tracking/alarms/HPWHOutage.py +96 -0
- ecopipeline/event_tracking/alarms/HPWHOutlet.py +85 -0
- ecopipeline/event_tracking/alarms/LSInconsist.py +114 -0
- ecopipeline/event_tracking/alarms/PowerRatio.py +111 -0
- ecopipeline/event_tracking/alarms/SOOChange.py +127 -0
- ecopipeline/event_tracking/alarms/ShortCycle.py +59 -0
- ecopipeline/event_tracking/alarms/TMSetpoint.py +127 -0
- ecopipeline/event_tracking/alarms/TempRange.py +84 -0
- ecopipeline/event_tracking/alarms/__init__.py +0 -0
- ecopipeline/event_tracking/event_tracking.py +517 -704
- ecopipeline/extract/__init__.py +2 -2
- ecopipeline/extract/extract.py +84 -0
- ecopipeline/load/__init__.py +2 -2
- ecopipeline/load/load.py +304 -3
- ecopipeline/transform/transform.py +1 -1
- ecopipeline/utils/ConfigManager.py +15 -2
- {ecopipeline-1.0.4.dist-info → ecopipeline-1.1.0.dist-info}/METADATA +1 -1
- ecopipeline-1.1.0.dist-info/RECORD +41 -0
- {ecopipeline-1.0.4.dist-info → ecopipeline-1.1.0.dist-info}/WHEEL +1 -1
- ecopipeline-1.0.4.dist-info/RECORD +0 -25
- {ecopipeline-1.0.4.dist-info → ecopipeline-1.1.0.dist-info}/licenses/LICENSE +0 -0
- {ecopipeline-1.0.4.dist-info → ecopipeline-1.1.0.dist-info}/top_level.txt +0 -0
ecopipeline/extract/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from .extract import get_noaa_data, json_to_df, extract_files, get_last_full_day_from_db, get_db_row_from_time, extract_new, csv_to_df, get_sub_dirs, msa_to_df, fm_api_to_df, small_planet_control_to_df, dent_csv_to_df, flow_csv_to_df, pull_egauge_data, egauge_csv_to_df, remove_char_sequence_from_csv_header, tb_api_to_df, skycentrics_api_to_df,get_OAT_open_meteo
|
|
1
|
+
from .extract import get_noaa_data, json_to_df, extract_files, get_last_full_day_from_db, get_db_row_from_time, extract_new, csv_to_df, get_sub_dirs, msa_to_df, fm_api_to_df, small_planet_control_to_df, dent_csv_to_df, flow_csv_to_df, pull_egauge_data, egauge_csv_to_df, remove_char_sequence_from_csv_header, tb_api_to_df, skycentrics_api_to_df,get_OAT_open_meteo, licor_cloud_api_to_df
|
|
2
2
|
__all__ = ["get_noaa_data", "json_to_df", "extract_files", "get_last_full_day_from_db", "get_db_row_from_time", 'extract_new', "csv_to_df", "get_sub_dirs", "msa_to_df", "fm_api_to_df",
|
|
3
3
|
"small_planet_control_to_df","dent_csv_to_df","flow_csv_to_df","pull_egauge_data", "egauge_csv_to_df","remove_char_sequence_from_csv_header", "tb_api_to_df", "skycentrics_api_to_df",
|
|
4
|
-
"get_OAT_open_meteo"]
|
|
4
|
+
"get_OAT_open_meteo","licor_cloud_api_to_df"]
|
ecopipeline/extract/extract.py
CHANGED
|
@@ -862,6 +862,90 @@ def pull_egauge_data(config: ConfigManager, eGauge_ids: list, eGauge_usr : str,
|
|
|
862
862
|
|
|
863
863
|
os.chdir(original_directory)
|
|
864
864
|
|
|
865
|
+
def licor_cloud_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: datetime = None, create_csv : bool = True) -> pd.DataFrame:
|
|
866
|
+
"""
|
|
867
|
+
Connects to the LI-COR Cloud API to pull sensor data and returns a dataframe.
|
|
868
|
+
|
|
869
|
+
The function queries the LI-COR Cloud API for sensor data within the specified time range.
|
|
870
|
+
Each sensor's data is returned as a separate column in the dataframe, indexed by timestamp.
|
|
871
|
+
|
|
872
|
+
Parameters
|
|
873
|
+
----------
|
|
874
|
+
config : ecopipeline.ConfigManager
|
|
875
|
+
The ConfigManager object that holds configuration data for the pipeline. The config manager
|
|
876
|
+
must contain the api_token and api_device_id (device serial number) for authentication
|
|
877
|
+
with the LI-COR Cloud API.
|
|
878
|
+
startTime : datetime
|
|
879
|
+
The start time for data extraction. If None, defaults to 28 hours before endTime.
|
|
880
|
+
endTime : datetime
|
|
881
|
+
The end time for data extraction. If None, defaults to the current time.
|
|
882
|
+
create_csv : bool
|
|
883
|
+
If True, saves the extracted data to a CSV file in the data directory (default True).
|
|
884
|
+
|
|
885
|
+
Returns
|
|
886
|
+
-------
|
|
887
|
+
pd.DataFrame:
|
|
888
|
+
Pandas DataFrame with sensor serial numbers as column headers and timestamps as the index.
|
|
889
|
+
The index is in UTC and may need to be converted to the appropriate timezone.
|
|
890
|
+
Returns an empty DataFrame if the API call fails.
|
|
891
|
+
"""
|
|
892
|
+
df = pd.DataFrame()
|
|
893
|
+
api_device_id = config.api_device_id
|
|
894
|
+
if endTime is None:
|
|
895
|
+
endTime = datetime.now()
|
|
896
|
+
if startTime is None:
|
|
897
|
+
# 28 hours to ensure encapsulation of last day
|
|
898
|
+
startTime = endTime - timedelta(hours=28)
|
|
899
|
+
|
|
900
|
+
url = f'https://api.licor.cloud/v2/data'
|
|
901
|
+
token = config.api_token
|
|
902
|
+
params = {
|
|
903
|
+
'deviceSerialNumber': api_device_id,
|
|
904
|
+
'startTime': f'{int(startTime.timestamp())*1000}',
|
|
905
|
+
'endTime': f'{int(endTime.timestamp())*1000}'
|
|
906
|
+
}
|
|
907
|
+
# Headers
|
|
908
|
+
headers = {
|
|
909
|
+
'accept': 'application/json',
|
|
910
|
+
'Authorization': f'Bearer {token}'
|
|
911
|
+
}
|
|
912
|
+
|
|
913
|
+
try:
|
|
914
|
+
response = requests.get(url, headers=headers, params=params)
|
|
915
|
+
if response.status_code == 200:
|
|
916
|
+
response_json = response.json()
|
|
917
|
+
data = {}
|
|
918
|
+
if 'sensors' in response_json.keys():
|
|
919
|
+
for sensor in response_json['sensors']:
|
|
920
|
+
sensor_id = sensor['sensorSerialNumber']
|
|
921
|
+
for measurement in sensor.get('data', []):
|
|
922
|
+
try:
|
|
923
|
+
records = measurement.get('records', [])
|
|
924
|
+
series = pd.Series(
|
|
925
|
+
data={record[0]: _get_float_value(record[1]) for record in records}
|
|
926
|
+
)
|
|
927
|
+
data[sensor_id] = series
|
|
928
|
+
except:
|
|
929
|
+
print(f"Could not convert {sensor_id} values to floats.")
|
|
930
|
+
df = pd.DataFrame(data)
|
|
931
|
+
df.index = pd.to_datetime(df.index, unit='ms')
|
|
932
|
+
df = df.sort_index()
|
|
933
|
+
else:
|
|
934
|
+
print(f"Failed to make GET request. Status code: {response.status_code} {response.json()}")
|
|
935
|
+
df = pd.DataFrame()
|
|
936
|
+
except Exception as e:
|
|
937
|
+
traceback.print_exc()
|
|
938
|
+
print(f"An error occurred: {e}")
|
|
939
|
+
df = pd.DataFrame()
|
|
940
|
+
# save to file
|
|
941
|
+
if create_csv:
|
|
942
|
+
filename = f"{startTime.strftime('%Y%m%d%H%M%S')}.csv"
|
|
943
|
+
original_directory = os.getcwd()
|
|
944
|
+
os.chdir(config.data_directory)
|
|
945
|
+
df.to_csv(filename, index_label='time_pt')
|
|
946
|
+
os.chdir(original_directory)
|
|
947
|
+
return df
|
|
948
|
+
|
|
865
949
|
def tb_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: datetime = None, create_csv : bool = True, query_hours : float = 1,
|
|
866
950
|
sensor_keys : list = [], seperate_keys : bool = False, device_id_overwrite : str = None, csv_prefix : str = ""):
|
|
867
951
|
"""
|
ecopipeline/load/__init__.py
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
from .load import check_table_exists, create_new_table, load_overwrite_database, load_event_table, report_data_loss, load_data_statistics
|
|
1
|
+
from .load import check_table_exists, create_new_table, load_overwrite_database, load_event_table, report_data_loss, load_data_statistics, load_alarms
|
|
2
2
|
__all__ = ["check_table_exists", "create_new_table", "load_overwrite_database", "load_event_table", "report_data_loss",
|
|
3
|
-
"load_data_statistics"]
|
|
3
|
+
"load_data_statistics", "load_alarms"]
|
ecopipeline/load/load.py
CHANGED
|
@@ -302,6 +302,14 @@ def load_event_table(config : ConfigManager, event_df: pd.DataFrame, site_name :
|
|
|
302
302
|
bool:
|
|
303
303
|
A boolean value indicating if the data was successfully written to the database.
|
|
304
304
|
"""
|
|
305
|
+
if event_df.empty:
|
|
306
|
+
print("No events to load. DataFrame is empty.")
|
|
307
|
+
return True
|
|
308
|
+
if site_name is None:
|
|
309
|
+
site_name = config.get_site_name()
|
|
310
|
+
if 'alarm_type' in event_df.columns:
|
|
311
|
+
print("Alarm dataframe detected... redirecting dataframe to load_alarms() function...")
|
|
312
|
+
return load_alarms(config, event_df, site_name)
|
|
305
313
|
# define constants
|
|
306
314
|
proj_cop_filters = ['MV_COMMISSIONED','PLANT_COMMISSIONED','DATA_LOSS_COP','SYSTEM_MAINTENANCE','SYSTEM_TESTING']
|
|
307
315
|
optim_cop_filters = ['MV_COMMISSIONED','PLANT_COMMISSIONED','DATA_LOSS_COP','INSTALLATION_ERROR_COP',
|
|
@@ -320,8 +328,6 @@ def load_event_table(config : ConfigManager, event_df: pd.DataFrame, site_name :
|
|
|
320
328
|
print(f"Attempting to write data for {event_df.index[0]} to {event_df.index[-1]} into {table_name}")
|
|
321
329
|
|
|
322
330
|
# Get string of all column names for sql insert
|
|
323
|
-
if site_name is None:
|
|
324
|
-
site_name = config.get_site_name()
|
|
325
331
|
column_names = f"start_time_pt,site_name"
|
|
326
332
|
column_types = ["datetime","varchar(25)","datetime",
|
|
327
333
|
"ENUM('MISC_EVENT','DATA_LOSS','DATA_LOSS_COP','SITE_VISIT','SYSTEM_MAINTENANCE','EQUIPMENT_MALFUNCTION','PARTIAL_OCCUPANCY','INSTALLATION_ERROR','ALARM','SILENT_ALARM','MV_COMMISSIONED','PLANT_COMMISSIONED','INSTALLATION_ERROR_COP','SOO_PERIOD','SOO_PERIOD_COP','SYSTEM_TESTING')",
|
|
@@ -543,4 +549,299 @@ def _generate_mysql_update(row, index, table_name, primary_key):
|
|
|
543
549
|
else:
|
|
544
550
|
statement = ""
|
|
545
551
|
|
|
546
|
-
return statement, values
|
|
552
|
+
return statement, values
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
def load_alarms(config: ConfigManager, alarm_df: pd.DataFrame, site_name: str = None) -> bool:
|
|
556
|
+
"""
|
|
557
|
+
Loads alarm data from central_alarm_df_creator() output into the alarm and alarm_inst tables.
|
|
558
|
+
|
|
559
|
+
For each alarm instance in the DataFrame:
|
|
560
|
+
- Checks if a matching alarm record exists (same site_name, alarm_type, variable_name)
|
|
561
|
+
- If no matching alarm exists, creates a new record in the alarm table
|
|
562
|
+
- Inserts the alarm instance (with start/end times and certainty) into the alarm_inst table
|
|
563
|
+
|
|
564
|
+
Certainty-based overlap handling for alarm instances:
|
|
565
|
+
- If new alarm has higher certainty than existing overlapping instance: existing is split
|
|
566
|
+
around the new alarm so each time segment has the highest certainty available
|
|
567
|
+
- If new alarm has lower certainty than existing: only non-overlapping portions of new
|
|
568
|
+
alarm are inserted
|
|
569
|
+
- If same certainty: existing instance is extended to encompass both time periods
|
|
570
|
+
|
|
571
|
+
Parameters
|
|
572
|
+
----------
|
|
573
|
+
config : ecopipeline.ConfigManager
|
|
574
|
+
The ConfigManager object that holds configuration data for the pipeline.
|
|
575
|
+
alarm_df : pd.DataFrame
|
|
576
|
+
The pandas DataFrame output from central_alarm_df_creator(). Must have columns:
|
|
577
|
+
start_time_pt, end_time_pt, alarm_type, variable_name. Optional column: certainty
|
|
578
|
+
(defaults to 3 if not present). Certainty values: 3=high, 2=med, 1=low.
|
|
579
|
+
site_name : str
|
|
580
|
+
The name of the site to associate alarms with. If None, defaults to config.get_site_name()
|
|
581
|
+
|
|
582
|
+
Returns
|
|
583
|
+
-------
|
|
584
|
+
bool:
|
|
585
|
+
A boolean value indicating if the data was successfully written to the database.
|
|
586
|
+
"""
|
|
587
|
+
if alarm_df.empty:
|
|
588
|
+
print("No alarms to load. DataFrame is empty.")
|
|
589
|
+
return True
|
|
590
|
+
|
|
591
|
+
# Validate required columns
|
|
592
|
+
required_columns = ['start_time_pt', 'end_time_pt', 'alarm_type', 'variable_name']
|
|
593
|
+
missing_columns = [col for col in required_columns if col not in alarm_df.columns]
|
|
594
|
+
if missing_columns:
|
|
595
|
+
raise Exception(f"alarm_df is missing required columns: {missing_columns}")
|
|
596
|
+
|
|
597
|
+
# Sort by start_time_pt to process alarms in chronological order
|
|
598
|
+
alarm_df = alarm_df.sort_values(by='start_time_pt').reset_index(drop=True)
|
|
599
|
+
|
|
600
|
+
if site_name is None:
|
|
601
|
+
site_name = config.get_site_name()
|
|
602
|
+
|
|
603
|
+
dbname = config.get_db_name()
|
|
604
|
+
alarm_table = "alarm"
|
|
605
|
+
alarm_inst_table = "alarm_inst"
|
|
606
|
+
|
|
607
|
+
connection, cursor = config.connect_db()
|
|
608
|
+
|
|
609
|
+
try:
|
|
610
|
+
# Check if tables exist
|
|
611
|
+
if not check_table_exists(cursor, alarm_table, dbname):
|
|
612
|
+
create_table_statement = """
|
|
613
|
+
CREATE TABLE alarm (
|
|
614
|
+
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
615
|
+
var_names_id VARCHAR(40),
|
|
616
|
+
start_time_pt DATETIME NOT NULL,
|
|
617
|
+
end_time_pt DATETIME NULL,
|
|
618
|
+
site_name VARCHAR(20),
|
|
619
|
+
alarm_type VARCHAR(20),
|
|
620
|
+
variable_name VARCHAR(70),
|
|
621
|
+
silenced BOOLEAN,
|
|
622
|
+
closing_event_id INT NULL,
|
|
623
|
+
FOREIGN KEY (closing_event_id) REFERENCES site_events(id),
|
|
624
|
+
UNIQUE INDEX unique_alarm (site_name, alarm_type, variable_name, start_time_pt, end_time_pt)
|
|
625
|
+
);
|
|
626
|
+
"""
|
|
627
|
+
cursor.execute(create_table_statement)
|
|
628
|
+
if not check_table_exists(cursor, alarm_inst_table, dbname):
|
|
629
|
+
create_table_statement = """
|
|
630
|
+
CREATE TABLE alarm_inst (
|
|
631
|
+
inst_id INT AUTO_INCREMENT PRIMARY KEY,
|
|
632
|
+
id INT,
|
|
633
|
+
start_time_pt DATETIME NOT NULL,
|
|
634
|
+
end_time_pt DATETIME NOT NULL,
|
|
635
|
+
certainty INT NOT NULL,
|
|
636
|
+
FOREIGN KEY (id) REFERENCES alarm(id)
|
|
637
|
+
);
|
|
638
|
+
"""
|
|
639
|
+
cursor.execute(create_table_statement)
|
|
640
|
+
|
|
641
|
+
# Get existing alarms for this site
|
|
642
|
+
cursor.execute(
|
|
643
|
+
f"SELECT id, alarm_type, variable_name, start_time_pt, end_time_pt FROM {alarm_table} WHERE site_name = %s",
|
|
644
|
+
(site_name,)
|
|
645
|
+
)
|
|
646
|
+
existing_alarms = cursor.fetchall()
|
|
647
|
+
# Create lookup dict: (alarm_type, variable_name) -> list of (alarm_id, start_time, end_time)
|
|
648
|
+
# Using a list because there can be multiple alarms with same type/variable but different date ranges
|
|
649
|
+
alarm_lookup = {}
|
|
650
|
+
for row in existing_alarms:
|
|
651
|
+
key = (row[1], row[2]) # (alarm_type, variable_name)
|
|
652
|
+
if key not in alarm_lookup:
|
|
653
|
+
alarm_lookup[key] = []
|
|
654
|
+
alarm_lookup[key].append({
|
|
655
|
+
'id': row[0],
|
|
656
|
+
'start_time': row[3],
|
|
657
|
+
'end_time': row[4]
|
|
658
|
+
})
|
|
659
|
+
|
|
660
|
+
# SQL statements
|
|
661
|
+
insert_alarm_sql = f"""
|
|
662
|
+
INSERT INTO {alarm_table} (var_names_id, start_time_pt, end_time_pt, site_name, alarm_type, variable_name, silenced)
|
|
663
|
+
VALUES (%s, %s, %s, %s, %s, %s, %s)
|
|
664
|
+
"""
|
|
665
|
+
update_alarm_dates_sql = f"""
|
|
666
|
+
UPDATE {alarm_table} SET start_time_pt = %s, end_time_pt = %s WHERE id = %s
|
|
667
|
+
"""
|
|
668
|
+
insert_inst_sql = f"""
|
|
669
|
+
INSERT INTO {alarm_inst_table} (id, start_time_pt, end_time_pt, certainty)
|
|
670
|
+
VALUES (%s, %s, %s, %s)
|
|
671
|
+
"""
|
|
672
|
+
update_inst_sql = f"""
|
|
673
|
+
UPDATE {alarm_inst_table} SET start_time_pt = %s, end_time_pt = %s WHERE inst_id = %s
|
|
674
|
+
"""
|
|
675
|
+
delete_inst_sql = f"""
|
|
676
|
+
DELETE FROM {alarm_inst_table} WHERE inst_id = %s
|
|
677
|
+
"""
|
|
678
|
+
|
|
679
|
+
new_alarms = 0
|
|
680
|
+
updated_alarms = 0
|
|
681
|
+
new_instances = 0
|
|
682
|
+
updated_instances = 0
|
|
683
|
+
max_gap_days = 3
|
|
684
|
+
|
|
685
|
+
for _, row in alarm_df.iterrows():
|
|
686
|
+
start_time = row['start_time_pt']
|
|
687
|
+
end_time = row['end_time_pt']
|
|
688
|
+
alarm_type = row['alarm_type']
|
|
689
|
+
variable_name = row['variable_name']
|
|
690
|
+
certainty = row.get('certainty', 3) # Default to high certainty if not specified
|
|
691
|
+
|
|
692
|
+
lookup_key = (alarm_type, variable_name)
|
|
693
|
+
alarm_id = None
|
|
694
|
+
|
|
695
|
+
if lookup_key in alarm_lookup:
|
|
696
|
+
# Find matching alarm based on date range logic
|
|
697
|
+
for alarm_record in alarm_lookup[lookup_key]:
|
|
698
|
+
alarm_start = alarm_record['start_time']
|
|
699
|
+
alarm_end = alarm_record['end_time']
|
|
700
|
+
|
|
701
|
+
# Case 1: Alarm dates encapsulate row dates - just use this alarm
|
|
702
|
+
if alarm_start <= start_time and alarm_end >= end_time:
|
|
703
|
+
alarm_id = alarm_record['id']
|
|
704
|
+
break
|
|
705
|
+
|
|
706
|
+
# Calculate gap between date ranges
|
|
707
|
+
if end_time < alarm_start:
|
|
708
|
+
gap = (alarm_start - end_time).days
|
|
709
|
+
elif start_time > alarm_end:
|
|
710
|
+
gap = (start_time - alarm_end).days
|
|
711
|
+
else:
|
|
712
|
+
gap = 0 # Overlapping
|
|
713
|
+
|
|
714
|
+
# Case 2: Overlapping or within 3 days - extend the alarm dates
|
|
715
|
+
if gap <= max_gap_days:
|
|
716
|
+
alarm_id = alarm_record['id']
|
|
717
|
+
new_start = min(alarm_start, start_time)
|
|
718
|
+
new_end = max(alarm_end, end_time)
|
|
719
|
+
|
|
720
|
+
# Only update if dates actually changed
|
|
721
|
+
if new_start != alarm_start or new_end != alarm_end:
|
|
722
|
+
cursor.execute(update_alarm_dates_sql, (new_start, new_end, alarm_id))
|
|
723
|
+
# Update the lookup cache
|
|
724
|
+
alarm_record['start_time'] = new_start
|
|
725
|
+
alarm_record['end_time'] = new_end
|
|
726
|
+
updated_alarms += 1
|
|
727
|
+
break
|
|
728
|
+
|
|
729
|
+
# Case 3: No matching alarm found (gap > 3 days for all existing alarms)
|
|
730
|
+
# Will create a new alarm below
|
|
731
|
+
|
|
732
|
+
if alarm_id is None:
|
|
733
|
+
# Create new alarm record
|
|
734
|
+
cursor.execute(insert_alarm_sql, (
|
|
735
|
+
"No ID", # TODO add actual ID?
|
|
736
|
+
start_time,
|
|
737
|
+
end_time,
|
|
738
|
+
site_name,
|
|
739
|
+
alarm_type,
|
|
740
|
+
variable_name,
|
|
741
|
+
False # silenced = False by default
|
|
742
|
+
))
|
|
743
|
+
# Retrieve the ID from database to handle concurrent inserts safely
|
|
744
|
+
cursor.execute(
|
|
745
|
+
f"""SELECT id FROM {alarm_table}
|
|
746
|
+
WHERE site_name = %s AND alarm_type = %s AND variable_name = %s
|
|
747
|
+
AND start_time_pt = %s AND end_time_pt = %s""",
|
|
748
|
+
(site_name, alarm_type, variable_name, start_time, end_time)
|
|
749
|
+
)
|
|
750
|
+
result = cursor.fetchone()
|
|
751
|
+
if result is None:
|
|
752
|
+
raise Exception(f"Failed to retrieve alarm ID after insert for {alarm_type}/{variable_name}")
|
|
753
|
+
alarm_id = result[0]
|
|
754
|
+
# Add to lookup cache
|
|
755
|
+
if lookup_key not in alarm_lookup:
|
|
756
|
+
alarm_lookup[lookup_key] = []
|
|
757
|
+
alarm_lookup[lookup_key].append({
|
|
758
|
+
'id': alarm_id,
|
|
759
|
+
'start_time': start_time,
|
|
760
|
+
'end_time': end_time
|
|
761
|
+
})
|
|
762
|
+
new_alarms += 1
|
|
763
|
+
|
|
764
|
+
# Get existing alarm instances for this alarm_id that might overlap
|
|
765
|
+
cursor.execute(
|
|
766
|
+
f"""SELECT inst_id, start_time_pt, end_time_pt, certainty
|
|
767
|
+
FROM {alarm_inst_table}
|
|
768
|
+
WHERE id = %s AND start_time_pt <= %s AND end_time_pt >= %s""",
|
|
769
|
+
(alarm_id, end_time, start_time)
|
|
770
|
+
)
|
|
771
|
+
existing_instances = cursor.fetchall()
|
|
772
|
+
|
|
773
|
+
# Track segments of the new alarm to insert (may be split by higher-certainty existing alarms)
|
|
774
|
+
new_segments = [(start_time, end_time, certainty)]
|
|
775
|
+
|
|
776
|
+
for existing in existing_instances:
|
|
777
|
+
existing_inst_id, existing_start, existing_end, existing_certainty = existing
|
|
778
|
+
|
|
779
|
+
# Process each new segment against this existing instance
|
|
780
|
+
updated_segments = []
|
|
781
|
+
for seg_start, seg_end, seg_certainty in new_segments:
|
|
782
|
+
# Check if there's overlap
|
|
783
|
+
if seg_end <= existing_start or seg_start >= existing_end:
|
|
784
|
+
# No overlap, keep segment as is
|
|
785
|
+
updated_segments.append((seg_start, seg_end, seg_certainty))
|
|
786
|
+
continue
|
|
787
|
+
|
|
788
|
+
# There is overlap - handle based on certainty comparison
|
|
789
|
+
if existing_certainty < seg_certainty:
|
|
790
|
+
# Case 1: New alarm has higher certainty - split existing around new
|
|
791
|
+
# Part before new alarm (if any)
|
|
792
|
+
if existing_start < seg_start:
|
|
793
|
+
cursor.execute(update_inst_sql, (existing_start, seg_start, existing_inst_id))
|
|
794
|
+
updated_instances += 1
|
|
795
|
+
# Insert the part after new alarm (if any)
|
|
796
|
+
if existing_end > seg_end:
|
|
797
|
+
cursor.execute(insert_inst_sql, (alarm_id, seg_end, existing_end, existing_certainty))
|
|
798
|
+
new_instances += 1
|
|
799
|
+
elif existing_end > seg_end:
|
|
800
|
+
# No part before, but there's a part after
|
|
801
|
+
cursor.execute(update_inst_sql, (seg_end, existing_end, existing_inst_id))
|
|
802
|
+
updated_instances += 1
|
|
803
|
+
else:
|
|
804
|
+
# Existing is completely encompassed by new - delete it
|
|
805
|
+
cursor.execute(delete_inst_sql, (existing_inst_id,))
|
|
806
|
+
# Keep the new segment as is
|
|
807
|
+
updated_segments.append((seg_start, seg_end, seg_certainty))
|
|
808
|
+
|
|
809
|
+
elif existing_certainty > seg_certainty:
|
|
810
|
+
# Case 2: Existing has higher certainty - trim new segment to non-overlapping parts
|
|
811
|
+
# Part before existing (if any)
|
|
812
|
+
if seg_start < existing_start:
|
|
813
|
+
updated_segments.append((seg_start, existing_start, seg_certainty))
|
|
814
|
+
# Part after existing (if any)
|
|
815
|
+
if seg_end > existing_end:
|
|
816
|
+
updated_segments.append((existing_end, seg_end, seg_certainty))
|
|
817
|
+
# The overlapping part of new segment is discarded
|
|
818
|
+
|
|
819
|
+
else:
|
|
820
|
+
# Case 3: Same certainty - merge to encompass both
|
|
821
|
+
merged_start = min(seg_start, existing_start)
|
|
822
|
+
merged_end = max(seg_end, existing_end)
|
|
823
|
+
cursor.execute(update_inst_sql, (merged_start, merged_end, existing_inst_id))
|
|
824
|
+
updated_instances += 1
|
|
825
|
+
# Remove this segment from new_segments (it's been merged into existing)
|
|
826
|
+
# Don't add to updated_segments
|
|
827
|
+
|
|
828
|
+
new_segments = updated_segments
|
|
829
|
+
|
|
830
|
+
# Insert any remaining new segments
|
|
831
|
+
for seg_start, seg_end, seg_certainty in new_segments:
|
|
832
|
+
if seg_start < seg_end: # Only insert valid segments
|
|
833
|
+
cursor.execute(insert_inst_sql, (alarm_id, seg_start, seg_end, seg_certainty))
|
|
834
|
+
new_instances += 1
|
|
835
|
+
|
|
836
|
+
connection.commit()
|
|
837
|
+
print(f"Successfully loaded alarms: {new_alarms} new alarm records, {updated_alarms} updated alarm records, {new_instances} new instances, {updated_instances} updated instances.")
|
|
838
|
+
return True
|
|
839
|
+
|
|
840
|
+
except Exception as e:
|
|
841
|
+
print(f"Error loading alarms: {e}")
|
|
842
|
+
connection.rollback()
|
|
843
|
+
return False
|
|
844
|
+
|
|
845
|
+
finally:
|
|
846
|
+
cursor.close()
|
|
847
|
+
connection.close()
|
|
@@ -352,7 +352,7 @@ def convert_temp_resistance_type(df : pd.DataFrame, column_name : str, sensor_mo
|
|
|
352
352
|
return df
|
|
353
353
|
|
|
354
354
|
def estimate_power(df : pd.DataFrame, new_power_column : str, current_a_column : str, current_b_column : str, current_c_column : str,
|
|
355
|
-
assumed_voltage : float = 208, power_factor : float = 1):
|
|
355
|
+
assumed_voltage : float = 208, power_factor : float = 1) -> pd.DataFrame:
|
|
356
356
|
"""
|
|
357
357
|
df: pd.DataFrame
|
|
358
358
|
Pandas dataframe with minute-to-minute data
|
|
@@ -7,6 +7,7 @@ from datetime import datetime
|
|
|
7
7
|
import base64
|
|
8
8
|
import hashlib
|
|
9
9
|
import hmac
|
|
10
|
+
import pandas as pd
|
|
10
11
|
|
|
11
12
|
class ConfigManager:
|
|
12
13
|
"""
|
|
@@ -79,9 +80,10 @@ class ConfigManager:
|
|
|
79
80
|
self.api_pw = configure.get('data', 'api_pw')
|
|
80
81
|
self.api_device_id = configure.get('data','device_id')
|
|
81
82
|
configured_data_method = True
|
|
82
|
-
elif 'api_token' in configure['data']
|
|
83
|
+
elif 'api_token' in configure['data']:
|
|
83
84
|
self.api_token = configure.get('data', 'api_token')
|
|
84
|
-
|
|
85
|
+
if 'api_secret' in configure['data']:
|
|
86
|
+
self.api_secret = configure.get('data', 'api_secret')
|
|
85
87
|
self.api_device_id = configure.get('data','device_id')
|
|
86
88
|
configured_data_method = True
|
|
87
89
|
if not configured_data_method:
|
|
@@ -281,3 +283,14 @@ class ConfigManager:
|
|
|
281
283
|
hashlib.sha1).digest())
|
|
282
284
|
token = '{}:{}'.format(self.api_token, signature.decode())
|
|
283
285
|
return token, date_str
|
|
286
|
+
|
|
287
|
+
def get_ls_df(self, ls_file_name : str = 'load_shift.csv') -> pd.DataFrame:
|
|
288
|
+
full_ls_filename = f"{self.input_directory}load_shift.csv"
|
|
289
|
+
if ls_file_name != "" and os.path.exists(full_ls_filename):
|
|
290
|
+
ls_df = pd.read_csv(full_ls_filename)
|
|
291
|
+
ls_df['startDateTime'] = pd.to_datetime(ls_df['date'] + ' ' + ls_df['startTime'])
|
|
292
|
+
ls_df['endDateTime'] = pd.to_datetime(ls_df['date'] + ' ' + ls_df['endTime'])
|
|
293
|
+
return ls_df
|
|
294
|
+
else:
|
|
295
|
+
print(f"The loadshift file '{full_ls_filename}' does not exist. Thus loadshifting will not be added to daily dataframe.")
|
|
296
|
+
return pd.DataFrame()
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
ecopipeline/__init__.py,sha256=pjC00JWsjVAhS0jUKHD-wyi4UIpTsWbIg9JaxLS1mlc,275
|
|
2
|
+
ecopipeline/event_tracking/Alarm.py,sha256=R8xlBieQIsORjEP_CdC_RW7vlq1XbGIpGnrpEyJNo64,15661
|
|
3
|
+
ecopipeline/event_tracking/__init__.py,sha256=QT49yHZMw7L4zFw-paN0TVTKTOGeH_6-WjbuqebcD-4,1215
|
|
4
|
+
ecopipeline/event_tracking/event_tracking.py,sha256=fJizghOnyhSne5uHDxR9Up67Qx0LUFik53LS_DQeriM,44261
|
|
5
|
+
ecopipeline/event_tracking/alarms/AbnormalCOP.py,sha256=VeTLt7Fyr3GPflM2K0yLzKapW0gVnTePZaljV526HP4,4587
|
|
6
|
+
ecopipeline/event_tracking/alarms/BackupUse.py,sha256=LrUq5R9Pyklh4yyuBsGjiPJwRsTWACbIXsMJI5rTx8k,5526
|
|
7
|
+
ecopipeline/event_tracking/alarms/BalancingValve.py,sha256=z3NanIgjRIfqujnfUZFoeqNu0FrHU0qjnTRMkCKdfgs,4662
|
|
8
|
+
ecopipeline/event_tracking/alarms/BlownFuse.py,sha256=0toXQ1Q5lQuw6I7Fm-yHUtcCrwXWJnDerVm8Dht21z4,4465
|
|
9
|
+
ecopipeline/event_tracking/alarms/Boundary.py,sha256=07tGsf4tMFaS_WgclDX8XI7wH8bZm5-yyG8QBChvxC8,5121
|
|
10
|
+
ecopipeline/event_tracking/alarms/HPWHInlet.py,sha256=wF3fzTqABJ7QMrdKODVlGHwCEB3Hj3njaaAKhQItYe4,4359
|
|
11
|
+
ecopipeline/event_tracking/alarms/HPWHOutage.py,sha256=E2p5xJuJrpTXp8bJNM5Z4Qou5whtGwLEzPBdIBUpRsc,6045
|
|
12
|
+
ecopipeline/event_tracking/alarms/HPWHOutlet.py,sha256=hQTSoykL9tsfby_prmty5xSxw2O2qri4vfbvSi_ny5w,5247
|
|
13
|
+
ecopipeline/event_tracking/alarms/LSInconsist.py,sha256=9gPFhw5DT89tpu4MIXuJ2MbKu20wAlUVYFD9a32Eqrk,5748
|
|
14
|
+
ecopipeline/event_tracking/alarms/PowerRatio.py,sha256=1PUFKimzCJ7PqbtzkMUe8imlGbkBX8tM9L7qpGMhyhM,5652
|
|
15
|
+
ecopipeline/event_tracking/alarms/SOOChange.py,sha256=UJFoMTM0GSgcVSJcxZhmKu67RkFcaSmHIKvFlxxkblc,8092
|
|
16
|
+
ecopipeline/event_tracking/alarms/ShortCycle.py,sha256=5dlBE-QrEdU20DDl6CUFIB86aojPNfEeNwiQTbd6XXY,3215
|
|
17
|
+
ecopipeline/event_tracking/alarms/TMSetpoint.py,sha256=C7bhkJrHpJ7ENjGw1Y-r4VIhTeiIJC1yoQmw2KUrQDc,8624
|
|
18
|
+
ecopipeline/event_tracking/alarms/TempRange.py,sha256=veMQ14Hm-Toom5wsJNe49DwPFS5MZMWp-dMTglUfAb0,5090
|
|
19
|
+
ecopipeline/event_tracking/alarms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
+
ecopipeline/extract/__init__.py,sha256=tGMph8ExJG_fYdGnlVbKfGwv8b4t8d7dcH2hekscpeg,822
|
|
21
|
+
ecopipeline/extract/extract.py,sha256=OeNwxJ9uWZLJjtpm9_qzVpRRaqyPKjn39WlW4SpzDYE,62062
|
|
22
|
+
ecopipeline/load/__init__.py,sha256=1D1BPhYzfGeHzE5dDtinGi3M7SFr9IfkSulHJKubM58,320
|
|
23
|
+
ecopipeline/load/load.py,sha256=uQ4nnnui3WmXFGqxI7Me1HPxdLsNybHMbbqkr_oiqHQ,38762
|
|
24
|
+
ecopipeline/transform/__init__.py,sha256=FjGcNpYNEYPdYQhogNRrQlKH2hGNNLv55jig1KaGaHY,2686
|
|
25
|
+
ecopipeline/transform/bayview.py,sha256=TP24dnTsUD95X-f6732egPZKjepFLJgDm9ImGr-fppY,17899
|
|
26
|
+
ecopipeline/transform/lbnl.py,sha256=EQ54G4rJXaZ7pwVusKcdK2KBehSdCsNo2ybphtMGs7o,33400
|
|
27
|
+
ecopipeline/transform/transform.py,sha256=KVj3kkDa_dQOqRU4l4cHK01L0UPv3FTusQ_Dk2ez8RA,57270
|
|
28
|
+
ecopipeline/utils/ConfigManager.py,sha256=E3YzdolFcRMVwxNk359Y0XVShqYquQ9otizPkkk9FU8,13880
|
|
29
|
+
ecopipeline/utils/NOAADataDownloader.py,sha256=iC2nl_O4PS1KFrchcPXRZxshwZwUMSqXy6BQBUwnOUU,20927
|
|
30
|
+
ecopipeline/utils/__init__.py,sha256=7dT3tP6SMK4uBW6NBmQ8i6LaNTTuV6fpAZToBBlJ904,62
|
|
31
|
+
ecopipeline/utils/unit_convert.py,sha256=VFh1we2Y8KV3u21BeWb-U3TlZJXo83q5vdxxkpgcuME,3064
|
|
32
|
+
ecopipeline/utils/pkls/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
33
|
+
ecopipeline/utils/pkls/tasseron_resistance_to_temp_3.pkl,sha256=9UpCZ3rSu0mU4LoTBg2M6Q-t7aGLaejEcAo801vfr7U,2013
|
|
34
|
+
ecopipeline/utils/pkls/tasseron_temp_to_resistance_2.pkl,sha256=Uq6I2dl5GcR5wb5QxurgDP4A2D4-N3neDL3BKtx53A4,2047
|
|
35
|
+
ecopipeline/utils/pkls/veris_resistance_to_temp_3.pkl,sha256=CVbUWJvOQXg2nZ-0GP9FWtU-ffMGcwg3ok5q669bmf8,1472
|
|
36
|
+
ecopipeline/utils/pkls/veris_temp_to_resistance_2.pkl,sha256=JiEp4SxR9eq-olKd8TKAG37iHMscJE_2SSHizGqBdno,1472
|
|
37
|
+
ecopipeline-1.1.0.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
|
+
ecopipeline-1.1.0.dist-info/METADATA,sha256=igrtMpk3zjLaGqZZnup-AGQK2zyVAta1wufec-YgB-E,2363
|
|
39
|
+
ecopipeline-1.1.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
40
|
+
ecopipeline-1.1.0.dist-info/top_level.txt,sha256=WOPFJH2LIgKqm4lk2OnFF5cgVkYibkaBxIxgvLgO7y0,12
|
|
41
|
+
ecopipeline-1.1.0.dist-info/RECORD,,
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
ecopipeline/__init__.py,sha256=pjC00JWsjVAhS0jUKHD-wyi4UIpTsWbIg9JaxLS1mlc,275
|
|
2
|
-
ecopipeline/event_tracking/__init__.py,sha256=1saCNVWbcp7bwz1kXfKa2d8aUoGWNWvWSj7IJY4fzc8,219
|
|
3
|
-
ecopipeline/event_tracking/event_tracking.py,sha256=LASkal4MgGLN7UzrAjbKw3eaM9JwKwt3YpkIraRSgiE,51172
|
|
4
|
-
ecopipeline/extract/__init__.py,sha256=j_8-q_yrPRySwYyloMv5v2XQeYYyYfX0N-MW2ZDA4rg,775
|
|
5
|
-
ecopipeline/extract/extract.py,sha256=MykzAchL_0LY0NG9TOAadpm5MSgjn7lPRI8AvSIMUBk,58530
|
|
6
|
-
ecopipeline/load/__init__.py,sha256=NLa_efQJZ8aP-J0Y5xx9DP7mtfRH9jY6Jz1ZMZN_BAA,292
|
|
7
|
-
ecopipeline/load/load.py,sha256=PaSGWOZI0Xg44_SWN7htn2DPIAU_s8mOtCGibXq25tM,24614
|
|
8
|
-
ecopipeline/transform/__init__.py,sha256=FjGcNpYNEYPdYQhogNRrQlKH2hGNNLv55jig1KaGaHY,2686
|
|
9
|
-
ecopipeline/transform/bayview.py,sha256=TP24dnTsUD95X-f6732egPZKjepFLJgDm9ImGr-fppY,17899
|
|
10
|
-
ecopipeline/transform/lbnl.py,sha256=EQ54G4rJXaZ7pwVusKcdK2KBehSdCsNo2ybphtMGs7o,33400
|
|
11
|
-
ecopipeline/transform/transform.py,sha256=TaWFD7mrcTuLX-hmV8klyMFKFfEjzJI21FLkY37fjkY,57254
|
|
12
|
-
ecopipeline/utils/ConfigManager.py,sha256=_9MZE9S-wOlQboAHzVk7VSrtxqR_lLu1hVJ-iulp4ag,13174
|
|
13
|
-
ecopipeline/utils/NOAADataDownloader.py,sha256=iC2nl_O4PS1KFrchcPXRZxshwZwUMSqXy6BQBUwnOUU,20927
|
|
14
|
-
ecopipeline/utils/__init__.py,sha256=7dT3tP6SMK4uBW6NBmQ8i6LaNTTuV6fpAZToBBlJ904,62
|
|
15
|
-
ecopipeline/utils/unit_convert.py,sha256=VFh1we2Y8KV3u21BeWb-U3TlZJXo83q5vdxxkpgcuME,3064
|
|
16
|
-
ecopipeline/utils/pkls/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
|
-
ecopipeline/utils/pkls/tasseron_resistance_to_temp_3.pkl,sha256=9UpCZ3rSu0mU4LoTBg2M6Q-t7aGLaejEcAo801vfr7U,2013
|
|
18
|
-
ecopipeline/utils/pkls/tasseron_temp_to_resistance_2.pkl,sha256=Uq6I2dl5GcR5wb5QxurgDP4A2D4-N3neDL3BKtx53A4,2047
|
|
19
|
-
ecopipeline/utils/pkls/veris_resistance_to_temp_3.pkl,sha256=CVbUWJvOQXg2nZ-0GP9FWtU-ffMGcwg3ok5q669bmf8,1472
|
|
20
|
-
ecopipeline/utils/pkls/veris_temp_to_resistance_2.pkl,sha256=JiEp4SxR9eq-olKd8TKAG37iHMscJE_2SSHizGqBdno,1472
|
|
21
|
-
ecopipeline-1.0.4.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
|
-
ecopipeline-1.0.4.dist-info/METADATA,sha256=wE8t_wMfNclwSkN2kFK5mqazOXqvuI2VHqfMgrK1CxU,2363
|
|
23
|
-
ecopipeline-1.0.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
24
|
-
ecopipeline-1.0.4.dist-info/top_level.txt,sha256=WOPFJH2LIgKqm4lk2OnFF5cgVkYibkaBxIxgvLgO7y0,12
|
|
25
|
-
ecopipeline-1.0.4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|