ecopipeline 1.0.5__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. ecopipeline/event_tracking/Alarm.py +317 -0
  2. ecopipeline/event_tracking/__init__.py +18 -1
  3. ecopipeline/event_tracking/alarms/AbnormalCOP.py +76 -0
  4. ecopipeline/event_tracking/alarms/BackupUse.py +94 -0
  5. ecopipeline/event_tracking/alarms/BalancingValve.py +78 -0
  6. ecopipeline/event_tracking/alarms/BlownFuse.py +72 -0
  7. ecopipeline/event_tracking/alarms/Boundary.py +90 -0
  8. ecopipeline/event_tracking/alarms/HPWHInlet.py +73 -0
  9. ecopipeline/event_tracking/alarms/HPWHOutage.py +96 -0
  10. ecopipeline/event_tracking/alarms/HPWHOutlet.py +85 -0
  11. ecopipeline/event_tracking/alarms/LSInconsist.py +114 -0
  12. ecopipeline/event_tracking/alarms/PowerRatio.py +111 -0
  13. ecopipeline/event_tracking/alarms/SOOChange.py +127 -0
  14. ecopipeline/event_tracking/alarms/ShortCycle.py +59 -0
  15. ecopipeline/event_tracking/alarms/TMSetpoint.py +127 -0
  16. ecopipeline/event_tracking/alarms/TempRange.py +84 -0
  17. ecopipeline/event_tracking/alarms/__init__.py +0 -0
  18. ecopipeline/event_tracking/event_tracking.py +119 -1177
  19. ecopipeline/extract/extract.py +51 -0
  20. ecopipeline/extract/zip_to_lat_long.csv +41490 -0
  21. ecopipeline/load/__init__.py +2 -2
  22. ecopipeline/load/load.py +304 -3
  23. ecopipeline/utils/ConfigManager.py +30 -0
  24. {ecopipeline-1.0.5.dist-info → ecopipeline-1.1.1.dist-info}/METADATA +1 -1
  25. ecopipeline-1.1.1.dist-info/RECORD +42 -0
  26. {ecopipeline-1.0.5.dist-info → ecopipeline-1.1.1.dist-info}/WHEEL +1 -1
  27. ecopipeline-1.0.5.dist-info/RECORD +0 -25
  28. {ecopipeline-1.0.5.dist-info → ecopipeline-1.1.1.dist-info}/licenses/LICENSE +0 -0
  29. {ecopipeline-1.0.5.dist-info → ecopipeline-1.1.1.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,3 @@
1
- from .load import check_table_exists, create_new_table, load_overwrite_database, load_event_table, report_data_loss, load_data_statistics
1
+ from .load import check_table_exists, create_new_table, load_overwrite_database, load_event_table, report_data_loss, load_data_statistics, load_alarms
2
2
  __all__ = ["check_table_exists", "create_new_table", "load_overwrite_database", "load_event_table", "report_data_loss",
3
- "load_data_statistics"]
3
+ "load_data_statistics", "load_alarms"]
ecopipeline/load/load.py CHANGED
@@ -302,6 +302,14 @@ def load_event_table(config : ConfigManager, event_df: pd.DataFrame, site_name :
302
302
  bool:
303
303
  A boolean value indicating if the data was successfully written to the database.
304
304
  """
305
+ if event_df.empty:
306
+ print("No events to load. DataFrame is empty.")
307
+ return True
308
+ if site_name is None:
309
+ site_name = config.get_site_name()
310
+ if 'alarm_type' in event_df.columns:
311
+ print("Alarm dataframe detected... redirecting dataframe to load_alarms() function...")
312
+ return load_alarms(config, event_df, site_name)
305
313
  # define constants
306
314
  proj_cop_filters = ['MV_COMMISSIONED','PLANT_COMMISSIONED','DATA_LOSS_COP','SYSTEM_MAINTENANCE','SYSTEM_TESTING']
307
315
  optim_cop_filters = ['MV_COMMISSIONED','PLANT_COMMISSIONED','DATA_LOSS_COP','INSTALLATION_ERROR_COP',
@@ -320,8 +328,6 @@ def load_event_table(config : ConfigManager, event_df: pd.DataFrame, site_name :
320
328
  print(f"Attempting to write data for {event_df.index[0]} to {event_df.index[-1]} into {table_name}")
321
329
 
322
330
  # Get string of all column names for sql insert
323
- if site_name is None:
324
- site_name = config.get_site_name()
325
331
  column_names = f"start_time_pt,site_name"
326
332
  column_types = ["datetime","varchar(25)","datetime",
327
333
  "ENUM('MISC_EVENT','DATA_LOSS','DATA_LOSS_COP','SITE_VISIT','SYSTEM_MAINTENANCE','EQUIPMENT_MALFUNCTION','PARTIAL_OCCUPANCY','INSTALLATION_ERROR','ALARM','SILENT_ALARM','MV_COMMISSIONED','PLANT_COMMISSIONED','INSTALLATION_ERROR_COP','SOO_PERIOD','SOO_PERIOD_COP','SYSTEM_TESTING')",
@@ -543,4 +549,299 @@ def _generate_mysql_update(row, index, table_name, primary_key):
543
549
  else:
544
550
  statement = ""
545
551
 
546
- return statement, values
552
+ return statement, values
553
+
554
+
555
+ def load_alarms(config: ConfigManager, alarm_df: pd.DataFrame, site_name: str = None) -> bool:
556
+ """
557
+ Loads alarm data from central_alarm_df_creator() output into the alarm and alarm_inst tables.
558
+
559
+ For each alarm instance in the DataFrame:
560
+ - Checks if a matching alarm record exists (same site_name, alarm_type, variable_name)
561
+ - If no matching alarm exists, creates a new record in the alarm table
562
+ - Inserts the alarm instance (with start/end times and certainty) into the alarm_inst table
563
+
564
+ Certainty-based overlap handling for alarm instances:
565
+ - If new alarm has higher certainty than existing overlapping instance: existing is split
566
+ around the new alarm so each time segment has the highest certainty available
567
+ - If new alarm has lower certainty than existing: only non-overlapping portions of new
568
+ alarm are inserted
569
+ - If same certainty: existing instance is extended to encompass both time periods
570
+
571
+ Parameters
572
+ ----------
573
+ config : ecopipeline.ConfigManager
574
+ The ConfigManager object that holds configuration data for the pipeline.
575
+ alarm_df : pd.DataFrame
576
+ The pandas DataFrame output from central_alarm_df_creator(). Must have columns:
577
+ start_time_pt, end_time_pt, alarm_type, variable_name. Optional column: certainty
578
+ (defaults to 3 if not present). Certainty values: 3=high, 2=med, 1=low.
579
+ site_name : str
580
+ The name of the site to associate alarms with. If None, defaults to config.get_site_name()
581
+
582
+ Returns
583
+ -------
584
+ bool:
585
+ A boolean value indicating if the data was successfully written to the database.
586
+ """
587
+ if alarm_df.empty:
588
+ print("No alarms to load. DataFrame is empty.")
589
+ return True
590
+
591
+ # Validate required columns
592
+ required_columns = ['start_time_pt', 'end_time_pt', 'alarm_type', 'variable_name']
593
+ missing_columns = [col for col in required_columns if col not in alarm_df.columns]
594
+ if missing_columns:
595
+ raise Exception(f"alarm_df is missing required columns: {missing_columns}")
596
+
597
+ # Sort by start_time_pt to process alarms in chronological order
598
+ alarm_df = alarm_df.sort_values(by='start_time_pt').reset_index(drop=True)
599
+
600
+ if site_name is None:
601
+ site_name = config.get_site_name()
602
+
603
+ dbname = config.get_db_name()
604
+ alarm_table = "alarm"
605
+ alarm_inst_table = "alarm_inst"
606
+
607
+ connection, cursor = config.connect_db()
608
+
609
+ try:
610
+ # Check if tables exist
611
+ if not check_table_exists(cursor, alarm_table, dbname):
612
+ create_table_statement = """
613
+ CREATE TABLE alarm (
614
+ id INT AUTO_INCREMENT PRIMARY KEY,
615
+ var_names_id VARCHAR(40),
616
+ start_time_pt DATETIME NOT NULL,
617
+ end_time_pt DATETIME NULL,
618
+ site_name VARCHAR(20),
619
+ alarm_type VARCHAR(20),
620
+ variable_name VARCHAR(70),
621
+ silenced BOOLEAN,
622
+ closing_event_id INT NULL,
623
+ FOREIGN KEY (closing_event_id) REFERENCES site_events(id),
624
+ UNIQUE INDEX unique_alarm (site_name, alarm_type, variable_name, start_time_pt, end_time_pt)
625
+ );
626
+ """
627
+ cursor.execute(create_table_statement)
628
+ if not check_table_exists(cursor, alarm_inst_table, dbname):
629
+ create_table_statement = """
630
+ CREATE TABLE alarm_inst (
631
+ inst_id INT AUTO_INCREMENT PRIMARY KEY,
632
+ id INT,
633
+ start_time_pt DATETIME NOT NULL,
634
+ end_time_pt DATETIME NOT NULL,
635
+ certainty INT NOT NULL,
636
+ FOREIGN KEY (id) REFERENCES alarm(id)
637
+ );
638
+ """
639
+ cursor.execute(create_table_statement)
640
+
641
+ # Get existing alarms for this site
642
+ cursor.execute(
643
+ f"SELECT id, alarm_type, variable_name, start_time_pt, end_time_pt FROM {alarm_table} WHERE site_name = %s",
644
+ (site_name,)
645
+ )
646
+ existing_alarms = cursor.fetchall()
647
+ # Create lookup dict: (alarm_type, variable_name) -> list of (alarm_id, start_time, end_time)
648
+ # Using a list because there can be multiple alarms with same type/variable but different date ranges
649
+ alarm_lookup = {}
650
+ for row in existing_alarms:
651
+ key = (row[1], row[2]) # (alarm_type, variable_name)
652
+ if key not in alarm_lookup:
653
+ alarm_lookup[key] = []
654
+ alarm_lookup[key].append({
655
+ 'id': row[0],
656
+ 'start_time': row[3],
657
+ 'end_time': row[4]
658
+ })
659
+
660
+ # SQL statements
661
+ insert_alarm_sql = f"""
662
+ INSERT INTO {alarm_table} (var_names_id, start_time_pt, end_time_pt, site_name, alarm_type, variable_name, silenced)
663
+ VALUES (%s, %s, %s, %s, %s, %s, %s)
664
+ """
665
+ update_alarm_dates_sql = f"""
666
+ UPDATE {alarm_table} SET start_time_pt = %s, end_time_pt = %s WHERE id = %s
667
+ """
668
+ insert_inst_sql = f"""
669
+ INSERT INTO {alarm_inst_table} (id, start_time_pt, end_time_pt, certainty)
670
+ VALUES (%s, %s, %s, %s)
671
+ """
672
+ update_inst_sql = f"""
673
+ UPDATE {alarm_inst_table} SET start_time_pt = %s, end_time_pt = %s WHERE inst_id = %s
674
+ """
675
+ delete_inst_sql = f"""
676
+ DELETE FROM {alarm_inst_table} WHERE inst_id = %s
677
+ """
678
+
679
+ new_alarms = 0
680
+ updated_alarms = 0
681
+ new_instances = 0
682
+ updated_instances = 0
683
+ max_gap_days = 3
684
+
685
+ for _, row in alarm_df.iterrows():
686
+ start_time = row['start_time_pt']
687
+ end_time = row['end_time_pt']
688
+ alarm_type = row['alarm_type']
689
+ variable_name = row['variable_name']
690
+ certainty = row.get('certainty', 3) # Default to high certainty if not specified
691
+
692
+ lookup_key = (alarm_type, variable_name)
693
+ alarm_id = None
694
+
695
+ if lookup_key in alarm_lookup:
696
+ # Find matching alarm based on date range logic
697
+ for alarm_record in alarm_lookup[lookup_key]:
698
+ alarm_start = alarm_record['start_time']
699
+ alarm_end = alarm_record['end_time']
700
+
701
+ # Case 1: Alarm dates encapsulate row dates - just use this alarm
702
+ if alarm_start <= start_time and alarm_end >= end_time:
703
+ alarm_id = alarm_record['id']
704
+ break
705
+
706
+ # Calculate gap between date ranges
707
+ if end_time < alarm_start:
708
+ gap = (alarm_start - end_time).days
709
+ elif start_time > alarm_end:
710
+ gap = (start_time - alarm_end).days
711
+ else:
712
+ gap = 0 # Overlapping
713
+
714
+ # Case 2: Overlapping or within 3 days - extend the alarm dates
715
+ if gap <= max_gap_days:
716
+ alarm_id = alarm_record['id']
717
+ new_start = min(alarm_start, start_time)
718
+ new_end = max(alarm_end, end_time)
719
+
720
+ # Only update if dates actually changed
721
+ if new_start != alarm_start or new_end != alarm_end:
722
+ cursor.execute(update_alarm_dates_sql, (new_start, new_end, alarm_id))
723
+ # Update the lookup cache
724
+ alarm_record['start_time'] = new_start
725
+ alarm_record['end_time'] = new_end
726
+ updated_alarms += 1
727
+ break
728
+
729
+ # Case 3: No matching alarm found (gap > 3 days for all existing alarms)
730
+ # Will create a new alarm below
731
+
732
+ if alarm_id is None:
733
+ # Create new alarm record
734
+ cursor.execute(insert_alarm_sql, (
735
+ "No ID", # TODO add actual ID?
736
+ start_time,
737
+ end_time,
738
+ site_name,
739
+ alarm_type,
740
+ variable_name,
741
+ False # silenced = False by default
742
+ ))
743
+ # Retrieve the ID from database to handle concurrent inserts safely
744
+ cursor.execute(
745
+ f"""SELECT id FROM {alarm_table}
746
+ WHERE site_name = %s AND alarm_type = %s AND variable_name = %s
747
+ AND start_time_pt = %s AND end_time_pt = %s""",
748
+ (site_name, alarm_type, variable_name, start_time, end_time)
749
+ )
750
+ result = cursor.fetchone()
751
+ if result is None:
752
+ raise Exception(f"Failed to retrieve alarm ID after insert for {alarm_type}/{variable_name}")
753
+ alarm_id = result[0]
754
+ # Add to lookup cache
755
+ if lookup_key not in alarm_lookup:
756
+ alarm_lookup[lookup_key] = []
757
+ alarm_lookup[lookup_key].append({
758
+ 'id': alarm_id,
759
+ 'start_time': start_time,
760
+ 'end_time': end_time
761
+ })
762
+ new_alarms += 1
763
+
764
+ # Get existing alarm instances for this alarm_id that might overlap
765
+ cursor.execute(
766
+ f"""SELECT inst_id, start_time_pt, end_time_pt, certainty
767
+ FROM {alarm_inst_table}
768
+ WHERE id = %s AND start_time_pt <= %s AND end_time_pt >= %s""",
769
+ (alarm_id, end_time, start_time)
770
+ )
771
+ existing_instances = cursor.fetchall()
772
+
773
+ # Track segments of the new alarm to insert (may be split by higher-certainty existing alarms)
774
+ new_segments = [(start_time, end_time, certainty)]
775
+
776
+ for existing in existing_instances:
777
+ existing_inst_id, existing_start, existing_end, existing_certainty = existing
778
+
779
+ # Process each new segment against this existing instance
780
+ updated_segments = []
781
+ for seg_start, seg_end, seg_certainty in new_segments:
782
+ # Check if there's overlap
783
+ if seg_end <= existing_start or seg_start >= existing_end:
784
+ # No overlap, keep segment as is
785
+ updated_segments.append((seg_start, seg_end, seg_certainty))
786
+ continue
787
+
788
+ # There is overlap - handle based on certainty comparison
789
+ if existing_certainty < seg_certainty:
790
+ # Case 1: New alarm has higher certainty - split existing around new
791
+ # Part before new alarm (if any)
792
+ if existing_start < seg_start:
793
+ cursor.execute(update_inst_sql, (existing_start, seg_start, existing_inst_id))
794
+ updated_instances += 1
795
+ # Insert the part after new alarm (if any)
796
+ if existing_end > seg_end:
797
+ cursor.execute(insert_inst_sql, (alarm_id, seg_end, existing_end, existing_certainty))
798
+ new_instances += 1
799
+ elif existing_end > seg_end:
800
+ # No part before, but there's a part after
801
+ cursor.execute(update_inst_sql, (seg_end, existing_end, existing_inst_id))
802
+ updated_instances += 1
803
+ else:
804
+ # Existing is completely encompassed by new - delete it
805
+ cursor.execute(delete_inst_sql, (existing_inst_id,))
806
+ # Keep the new segment as is
807
+ updated_segments.append((seg_start, seg_end, seg_certainty))
808
+
809
+ elif existing_certainty > seg_certainty:
810
+ # Case 2: Existing has higher certainty - trim new segment to non-overlapping parts
811
+ # Part before existing (if any)
812
+ if seg_start < existing_start:
813
+ updated_segments.append((seg_start, existing_start, seg_certainty))
814
+ # Part after existing (if any)
815
+ if seg_end > existing_end:
816
+ updated_segments.append((existing_end, seg_end, seg_certainty))
817
+ # The overlapping part of new segment is discarded
818
+
819
+ else:
820
+ # Case 3: Same certainty - merge to encompass both
821
+ merged_start = min(seg_start, existing_start)
822
+ merged_end = max(seg_end, existing_end)
823
+ cursor.execute(update_inst_sql, (merged_start, merged_end, existing_inst_id))
824
+ updated_instances += 1
825
+ # Remove this segment from new_segments (it's been merged into existing)
826
+ # Don't add to updated_segments
827
+
828
+ new_segments = updated_segments
829
+
830
+ # Insert any remaining new segments
831
+ for seg_start, seg_end, seg_certainty in new_segments:
832
+ if seg_start < seg_end: # Only insert valid segments
833
+ cursor.execute(insert_inst_sql, (alarm_id, seg_start, seg_end, seg_certainty))
834
+ new_instances += 1
835
+
836
+ connection.commit()
837
+ print(f"Successfully loaded alarms: {new_alarms} new alarm records, {updated_alarms} updated alarm records, {new_instances} new instances, {updated_instances} updated instances.")
838
+ return True
839
+
840
+ except Exception as e:
841
+ print(f"Error loading alarms: {e}")
842
+ connection.rollback()
843
+ return False
844
+
845
+ finally:
846
+ cursor.close()
847
+ connection.close()
@@ -222,6 +222,36 @@ class ConfigManager:
222
222
  print(f"Successfully connected to database.")
223
223
  return connection, connection.cursor()
224
224
 
225
+ def connect_siteConfig_db(self) -> (mysql.connector.MySQLConnection, mysql.connector.cursor.MySQLCursor):
226
+ """
227
+ Create a connection with the mySQL server.
228
+
229
+ Parameters
230
+ ----------
231
+ None
232
+
233
+ Returns
234
+ -------
235
+ mysql.connector.MySQLConnection, mysql.connector.cursor.MySQLCursor:
236
+ A connection and cursor object. THe cursor can be used to execute
237
+ mySQL queries and the connection object can be used to save those changes.
238
+ """
239
+
240
+ connection = None
241
+ try:
242
+ connection = mysql.connector.connect(
243
+ host=self.db_connection_info['host'],
244
+ user=self.db_connection_info['user'],
245
+ password=self.db_connection_info['password'],
246
+ database="SiteConfig"
247
+ )
248
+ except mysql.connector.Error:
249
+ print("Unable to connect to database with given credentials.")
250
+ return None, None
251
+
252
+ print(f"Successfully connected to database.")
253
+ return connection, connection.cursor()
254
+
225
255
  def get_fm_token(self) -> str:
226
256
  # for getting feild manager api token
227
257
  if self.api_usr is None or self.api_pw is None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ecopipeline
3
- Version: 1.0.5
3
+ Version: 1.1.1
4
4
  Summary: Contains functions for use in Ecotope Datapipelines
5
5
  Classifier: Programming Language :: Python :: 3
6
6
  Classifier: License :: OSI Approved :: GNU General Public License (GPL)
@@ -0,0 +1,42 @@
1
+ ecopipeline/__init__.py,sha256=pjC00JWsjVAhS0jUKHD-wyi4UIpTsWbIg9JaxLS1mlc,275
2
+ ecopipeline/event_tracking/Alarm.py,sha256=R8xlBieQIsORjEP_CdC_RW7vlq1XbGIpGnrpEyJNo64,15661
3
+ ecopipeline/event_tracking/__init__.py,sha256=QT49yHZMw7L4zFw-paN0TVTKTOGeH_6-WjbuqebcD-4,1215
4
+ ecopipeline/event_tracking/event_tracking.py,sha256=fJizghOnyhSne5uHDxR9Up67Qx0LUFik53LS_DQeriM,44261
5
+ ecopipeline/event_tracking/alarms/AbnormalCOP.py,sha256=VeTLt7Fyr3GPflM2K0yLzKapW0gVnTePZaljV526HP4,4587
6
+ ecopipeline/event_tracking/alarms/BackupUse.py,sha256=LrUq5R9Pyklh4yyuBsGjiPJwRsTWACbIXsMJI5rTx8k,5526
7
+ ecopipeline/event_tracking/alarms/BalancingValve.py,sha256=z3NanIgjRIfqujnfUZFoeqNu0FrHU0qjnTRMkCKdfgs,4662
8
+ ecopipeline/event_tracking/alarms/BlownFuse.py,sha256=0toXQ1Q5lQuw6I7Fm-yHUtcCrwXWJnDerVm8Dht21z4,4465
9
+ ecopipeline/event_tracking/alarms/Boundary.py,sha256=07tGsf4tMFaS_WgclDX8XI7wH8bZm5-yyG8QBChvxC8,5121
10
+ ecopipeline/event_tracking/alarms/HPWHInlet.py,sha256=wF3fzTqABJ7QMrdKODVlGHwCEB3Hj3njaaAKhQItYe4,4359
11
+ ecopipeline/event_tracking/alarms/HPWHOutage.py,sha256=E2p5xJuJrpTXp8bJNM5Z4Qou5whtGwLEzPBdIBUpRsc,6045
12
+ ecopipeline/event_tracking/alarms/HPWHOutlet.py,sha256=hQTSoykL9tsfby_prmty5xSxw2O2qri4vfbvSi_ny5w,5247
13
+ ecopipeline/event_tracking/alarms/LSInconsist.py,sha256=9gPFhw5DT89tpu4MIXuJ2MbKu20wAlUVYFD9a32Eqrk,5748
14
+ ecopipeline/event_tracking/alarms/PowerRatio.py,sha256=1PUFKimzCJ7PqbtzkMUe8imlGbkBX8tM9L7qpGMhyhM,5652
15
+ ecopipeline/event_tracking/alarms/SOOChange.py,sha256=UJFoMTM0GSgcVSJcxZhmKu67RkFcaSmHIKvFlxxkblc,8092
16
+ ecopipeline/event_tracking/alarms/ShortCycle.py,sha256=5dlBE-QrEdU20DDl6CUFIB86aojPNfEeNwiQTbd6XXY,3215
17
+ ecopipeline/event_tracking/alarms/TMSetpoint.py,sha256=C7bhkJrHpJ7ENjGw1Y-r4VIhTeiIJC1yoQmw2KUrQDc,8624
18
+ ecopipeline/event_tracking/alarms/TempRange.py,sha256=veMQ14Hm-Toom5wsJNe49DwPFS5MZMWp-dMTglUfAb0,5090
19
+ ecopipeline/event_tracking/alarms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
+ ecopipeline/extract/__init__.py,sha256=tGMph8ExJG_fYdGnlVbKfGwv8b4t8d7dcH2hekscpeg,822
21
+ ecopipeline/extract/extract.py,sha256=JAWCfyWet1uo5uf8DL-GtWU5ijCWi7AUt4TxNKH4pFs,64653
22
+ ecopipeline/extract/zip_to_lat_long.csv,sha256=rQpdyZxui34nNV-lLhjtbYkYrTz_IZzs9WSm7IjMMPY,2585933
23
+ ecopipeline/load/__init__.py,sha256=1D1BPhYzfGeHzE5dDtinGi3M7SFr9IfkSulHJKubM58,320
24
+ ecopipeline/load/load.py,sha256=uQ4nnnui3WmXFGqxI7Me1HPxdLsNybHMbbqkr_oiqHQ,38762
25
+ ecopipeline/transform/__init__.py,sha256=FjGcNpYNEYPdYQhogNRrQlKH2hGNNLv55jig1KaGaHY,2686
26
+ ecopipeline/transform/bayview.py,sha256=TP24dnTsUD95X-f6732egPZKjepFLJgDm9ImGr-fppY,17899
27
+ ecopipeline/transform/lbnl.py,sha256=EQ54G4rJXaZ7pwVusKcdK2KBehSdCsNo2ybphtMGs7o,33400
28
+ ecopipeline/transform/transform.py,sha256=KVj3kkDa_dQOqRU4l4cHK01L0UPv3FTusQ_Dk2ez8RA,57270
29
+ ecopipeline/utils/ConfigManager.py,sha256=GJxUQ9psm_uiU3j0LBsn7h0OFGvxHWS8dAomkaTLR3A,14960
30
+ ecopipeline/utils/NOAADataDownloader.py,sha256=iC2nl_O4PS1KFrchcPXRZxshwZwUMSqXy6BQBUwnOUU,20927
31
+ ecopipeline/utils/__init__.py,sha256=7dT3tP6SMK4uBW6NBmQ8i6LaNTTuV6fpAZToBBlJ904,62
32
+ ecopipeline/utils/unit_convert.py,sha256=VFh1we2Y8KV3u21BeWb-U3TlZJXo83q5vdxxkpgcuME,3064
33
+ ecopipeline/utils/pkls/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
+ ecopipeline/utils/pkls/tasseron_resistance_to_temp_3.pkl,sha256=9UpCZ3rSu0mU4LoTBg2M6Q-t7aGLaejEcAo801vfr7U,2013
35
+ ecopipeline/utils/pkls/tasseron_temp_to_resistance_2.pkl,sha256=Uq6I2dl5GcR5wb5QxurgDP4A2D4-N3neDL3BKtx53A4,2047
36
+ ecopipeline/utils/pkls/veris_resistance_to_temp_3.pkl,sha256=CVbUWJvOQXg2nZ-0GP9FWtU-ffMGcwg3ok5q669bmf8,1472
37
+ ecopipeline/utils/pkls/veris_temp_to_resistance_2.pkl,sha256=JiEp4SxR9eq-olKd8TKAG37iHMscJE_2SSHizGqBdno,1472
38
+ ecopipeline-1.1.1.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
+ ecopipeline-1.1.1.dist-info/METADATA,sha256=k1RWYAYwxdQaTYlD70WJcE6g-6Arqv9JAiziTKzn-Bg,2363
40
+ ecopipeline-1.1.1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
41
+ ecopipeline-1.1.1.dist-info/top_level.txt,sha256=WOPFJH2LIgKqm4lk2OnFF5cgVkYibkaBxIxgvLgO7y0,12
42
+ ecopipeline-1.1.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.10.1)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,25 +0,0 @@
1
- ecopipeline/__init__.py,sha256=pjC00JWsjVAhS0jUKHD-wyi4UIpTsWbIg9JaxLS1mlc,275
2
- ecopipeline/event_tracking/__init__.py,sha256=91KN-WmYN6q1WbTJO-EE9QQUPCAQ1AbqH7krRsqaAug,402
3
- ecopipeline/event_tracking/event_tracking.py,sha256=TNuCNcr-5z9hEfu5CXyoMDvDyeBT05KB2X1U5PBtxeg,101489
4
- ecopipeline/extract/__init__.py,sha256=tGMph8ExJG_fYdGnlVbKfGwv8b4t8d7dcH2hekscpeg,822
5
- ecopipeline/extract/extract.py,sha256=OeNwxJ9uWZLJjtpm9_qzVpRRaqyPKjn39WlW4SpzDYE,62062
6
- ecopipeline/load/__init__.py,sha256=NLa_efQJZ8aP-J0Y5xx9DP7mtfRH9jY6Jz1ZMZN_BAA,292
7
- ecopipeline/load/load.py,sha256=PaSGWOZI0Xg44_SWN7htn2DPIAU_s8mOtCGibXq25tM,24614
8
- ecopipeline/transform/__init__.py,sha256=FjGcNpYNEYPdYQhogNRrQlKH2hGNNLv55jig1KaGaHY,2686
9
- ecopipeline/transform/bayview.py,sha256=TP24dnTsUD95X-f6732egPZKjepFLJgDm9ImGr-fppY,17899
10
- ecopipeline/transform/lbnl.py,sha256=EQ54G4rJXaZ7pwVusKcdK2KBehSdCsNo2ybphtMGs7o,33400
11
- ecopipeline/transform/transform.py,sha256=KVj3kkDa_dQOqRU4l4cHK01L0UPv3FTusQ_Dk2ez8RA,57270
12
- ecopipeline/utils/ConfigManager.py,sha256=E3YzdolFcRMVwxNk359Y0XVShqYquQ9otizPkkk9FU8,13880
13
- ecopipeline/utils/NOAADataDownloader.py,sha256=iC2nl_O4PS1KFrchcPXRZxshwZwUMSqXy6BQBUwnOUU,20927
14
- ecopipeline/utils/__init__.py,sha256=7dT3tP6SMK4uBW6NBmQ8i6LaNTTuV6fpAZToBBlJ904,62
15
- ecopipeline/utils/unit_convert.py,sha256=VFh1we2Y8KV3u21BeWb-U3TlZJXo83q5vdxxkpgcuME,3064
16
- ecopipeline/utils/pkls/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- ecopipeline/utils/pkls/tasseron_resistance_to_temp_3.pkl,sha256=9UpCZ3rSu0mU4LoTBg2M6Q-t7aGLaejEcAo801vfr7U,2013
18
- ecopipeline/utils/pkls/tasseron_temp_to_resistance_2.pkl,sha256=Uq6I2dl5GcR5wb5QxurgDP4A2D4-N3neDL3BKtx53A4,2047
19
- ecopipeline/utils/pkls/veris_resistance_to_temp_3.pkl,sha256=CVbUWJvOQXg2nZ-0GP9FWtU-ffMGcwg3ok5q669bmf8,1472
20
- ecopipeline/utils/pkls/veris_temp_to_resistance_2.pkl,sha256=JiEp4SxR9eq-olKd8TKAG37iHMscJE_2SSHizGqBdno,1472
21
- ecopipeline-1.0.5.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- ecopipeline-1.0.5.dist-info/METADATA,sha256=eU8B5fLOWV_TQimqs2Ws-PgTZt88XRy8xgAmh9Dn5OQ,2363
23
- ecopipeline-1.0.5.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
24
- ecopipeline-1.0.5.dist-info/top_level.txt,sha256=WOPFJH2LIgKqm4lk2OnFF5cgVkYibkaBxIxgvLgO7y0,12
25
- ecopipeline-1.0.5.dist-info/RECORD,,