ecopipeline 1.0.4__py3-none-any.whl → 1.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ecopipeline/event_tracking/__init__.py +3 -2
- ecopipeline/event_tracking/event_tracking.py +910 -39
- ecopipeline/extract/__init__.py +2 -2
- ecopipeline/extract/extract.py +84 -0
- ecopipeline/transform/transform.py +1 -1
- ecopipeline/utils/ConfigManager.py +15 -2
- {ecopipeline-1.0.4.dist-info → ecopipeline-1.0.5.dist-info}/METADATA +1 -1
- {ecopipeline-1.0.4.dist-info → ecopipeline-1.0.5.dist-info}/RECORD +11 -11
- {ecopipeline-1.0.4.dist-info → ecopipeline-1.0.5.dist-info}/WHEEL +1 -1
- {ecopipeline-1.0.4.dist-info → ecopipeline-1.0.5.dist-info}/licenses/LICENSE +0 -0
- {ecopipeline-1.0.4.dist-info → ecopipeline-1.0.5.dist-info}/top_level.txt +0 -0
ecopipeline/extract/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from .extract import get_noaa_data, json_to_df, extract_files, get_last_full_day_from_db, get_db_row_from_time, extract_new, csv_to_df, get_sub_dirs, msa_to_df, fm_api_to_df, small_planet_control_to_df, dent_csv_to_df, flow_csv_to_df, pull_egauge_data, egauge_csv_to_df, remove_char_sequence_from_csv_header, tb_api_to_df, skycentrics_api_to_df,get_OAT_open_meteo
|
|
1
|
+
from .extract import get_noaa_data, json_to_df, extract_files, get_last_full_day_from_db, get_db_row_from_time, extract_new, csv_to_df, get_sub_dirs, msa_to_df, fm_api_to_df, small_planet_control_to_df, dent_csv_to_df, flow_csv_to_df, pull_egauge_data, egauge_csv_to_df, remove_char_sequence_from_csv_header, tb_api_to_df, skycentrics_api_to_df,get_OAT_open_meteo, licor_cloud_api_to_df
|
|
2
2
|
__all__ = ["get_noaa_data", "json_to_df", "extract_files", "get_last_full_day_from_db", "get_db_row_from_time", 'extract_new', "csv_to_df", "get_sub_dirs", "msa_to_df", "fm_api_to_df",
|
|
3
3
|
"small_planet_control_to_df","dent_csv_to_df","flow_csv_to_df","pull_egauge_data", "egauge_csv_to_df","remove_char_sequence_from_csv_header", "tb_api_to_df", "skycentrics_api_to_df",
|
|
4
|
-
"get_OAT_open_meteo"]
|
|
4
|
+
"get_OAT_open_meteo","licor_cloud_api_to_df"]
|
ecopipeline/extract/extract.py
CHANGED
|
@@ -862,6 +862,90 @@ def pull_egauge_data(config: ConfigManager, eGauge_ids: list, eGauge_usr : str,
|
|
|
862
862
|
|
|
863
863
|
os.chdir(original_directory)
|
|
864
864
|
|
|
865
|
+
def licor_cloud_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: datetime = None, create_csv : bool = True) -> pd.DataFrame:
|
|
866
|
+
"""
|
|
867
|
+
Connects to the LI-COR Cloud API to pull sensor data and returns a dataframe.
|
|
868
|
+
|
|
869
|
+
The function queries the LI-COR Cloud API for sensor data within the specified time range.
|
|
870
|
+
Each sensor's data is returned as a separate column in the dataframe, indexed by timestamp.
|
|
871
|
+
|
|
872
|
+
Parameters
|
|
873
|
+
----------
|
|
874
|
+
config : ecopipeline.ConfigManager
|
|
875
|
+
The ConfigManager object that holds configuration data for the pipeline. The config manager
|
|
876
|
+
must contain the api_token and api_device_id (device serial number) for authentication
|
|
877
|
+
with the LI-COR Cloud API.
|
|
878
|
+
startTime : datetime
|
|
879
|
+
The start time for data extraction. If None, defaults to 28 hours before endTime.
|
|
880
|
+
endTime : datetime
|
|
881
|
+
The end time for data extraction. If None, defaults to the current time.
|
|
882
|
+
create_csv : bool
|
|
883
|
+
If True, saves the extracted data to a CSV file in the data directory (default True).
|
|
884
|
+
|
|
885
|
+
Returns
|
|
886
|
+
-------
|
|
887
|
+
pd.DataFrame:
|
|
888
|
+
Pandas DataFrame with sensor serial numbers as column headers and timestamps as the index.
|
|
889
|
+
The index is in UTC and may need to be converted to the appropriate timezone.
|
|
890
|
+
Returns an empty DataFrame if the API call fails.
|
|
891
|
+
"""
|
|
892
|
+
df = pd.DataFrame()
|
|
893
|
+
api_device_id = config.api_device_id
|
|
894
|
+
if endTime is None:
|
|
895
|
+
endTime = datetime.now()
|
|
896
|
+
if startTime is None:
|
|
897
|
+
# 28 hours to ensure encapsulation of last day
|
|
898
|
+
startTime = endTime - timedelta(hours=28)
|
|
899
|
+
|
|
900
|
+
url = f'https://api.licor.cloud/v2/data'
|
|
901
|
+
token = config.api_token
|
|
902
|
+
params = {
|
|
903
|
+
'deviceSerialNumber': api_device_id,
|
|
904
|
+
'startTime': f'{int(startTime.timestamp())*1000}',
|
|
905
|
+
'endTime': f'{int(endTime.timestamp())*1000}'
|
|
906
|
+
}
|
|
907
|
+
# Headers
|
|
908
|
+
headers = {
|
|
909
|
+
'accept': 'application/json',
|
|
910
|
+
'Authorization': f'Bearer {token}'
|
|
911
|
+
}
|
|
912
|
+
|
|
913
|
+
try:
|
|
914
|
+
response = requests.get(url, headers=headers, params=params)
|
|
915
|
+
if response.status_code == 200:
|
|
916
|
+
response_json = response.json()
|
|
917
|
+
data = {}
|
|
918
|
+
if 'sensors' in response_json.keys():
|
|
919
|
+
for sensor in response_json['sensors']:
|
|
920
|
+
sensor_id = sensor['sensorSerialNumber']
|
|
921
|
+
for measurement in sensor.get('data', []):
|
|
922
|
+
try:
|
|
923
|
+
records = measurement.get('records', [])
|
|
924
|
+
series = pd.Series(
|
|
925
|
+
data={record[0]: _get_float_value(record[1]) for record in records}
|
|
926
|
+
)
|
|
927
|
+
data[sensor_id] = series
|
|
928
|
+
except:
|
|
929
|
+
print(f"Could not convert {sensor_id} values to floats.")
|
|
930
|
+
df = pd.DataFrame(data)
|
|
931
|
+
df.index = pd.to_datetime(df.index, unit='ms')
|
|
932
|
+
df = df.sort_index()
|
|
933
|
+
else:
|
|
934
|
+
print(f"Failed to make GET request. Status code: {response.status_code} {response.json()}")
|
|
935
|
+
df = pd.DataFrame()
|
|
936
|
+
except Exception as e:
|
|
937
|
+
traceback.print_exc()
|
|
938
|
+
print(f"An error occurred: {e}")
|
|
939
|
+
df = pd.DataFrame()
|
|
940
|
+
# save to file
|
|
941
|
+
if create_csv:
|
|
942
|
+
filename = f"{startTime.strftime('%Y%m%d%H%M%S')}.csv"
|
|
943
|
+
original_directory = os.getcwd()
|
|
944
|
+
os.chdir(config.data_directory)
|
|
945
|
+
df.to_csv(filename, index_label='time_pt')
|
|
946
|
+
os.chdir(original_directory)
|
|
947
|
+
return df
|
|
948
|
+
|
|
865
949
|
def tb_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: datetime = None, create_csv : bool = True, query_hours : float = 1,
|
|
866
950
|
sensor_keys : list = [], seperate_keys : bool = False, device_id_overwrite : str = None, csv_prefix : str = ""):
|
|
867
951
|
"""
|
|
@@ -352,7 +352,7 @@ def convert_temp_resistance_type(df : pd.DataFrame, column_name : str, sensor_mo
|
|
|
352
352
|
return df
|
|
353
353
|
|
|
354
354
|
def estimate_power(df : pd.DataFrame, new_power_column : str, current_a_column : str, current_b_column : str, current_c_column : str,
|
|
355
|
-
assumed_voltage : float = 208, power_factor : float = 1):
|
|
355
|
+
assumed_voltage : float = 208, power_factor : float = 1) -> pd.DataFrame:
|
|
356
356
|
"""
|
|
357
357
|
df: pd.DataFrame
|
|
358
358
|
Pandas dataframe with minute-to-minute data
|
|
@@ -7,6 +7,7 @@ from datetime import datetime
|
|
|
7
7
|
import base64
|
|
8
8
|
import hashlib
|
|
9
9
|
import hmac
|
|
10
|
+
import pandas as pd
|
|
10
11
|
|
|
11
12
|
class ConfigManager:
|
|
12
13
|
"""
|
|
@@ -79,9 +80,10 @@ class ConfigManager:
|
|
|
79
80
|
self.api_pw = configure.get('data', 'api_pw')
|
|
80
81
|
self.api_device_id = configure.get('data','device_id')
|
|
81
82
|
configured_data_method = True
|
|
82
|
-
elif 'api_token' in configure['data']
|
|
83
|
+
elif 'api_token' in configure['data']:
|
|
83
84
|
self.api_token = configure.get('data', 'api_token')
|
|
84
|
-
|
|
85
|
+
if 'api_secret' in configure['data']:
|
|
86
|
+
self.api_secret = configure.get('data', 'api_secret')
|
|
85
87
|
self.api_device_id = configure.get('data','device_id')
|
|
86
88
|
configured_data_method = True
|
|
87
89
|
if not configured_data_method:
|
|
@@ -281,3 +283,14 @@ class ConfigManager:
|
|
|
281
283
|
hashlib.sha1).digest())
|
|
282
284
|
token = '{}:{}'.format(self.api_token, signature.decode())
|
|
283
285
|
return token, date_str
|
|
286
|
+
|
|
287
|
+
def get_ls_df(self, ls_file_name : str = 'load_shift.csv') -> pd.DataFrame:
|
|
288
|
+
full_ls_filename = f"{self.input_directory}load_shift.csv"
|
|
289
|
+
if ls_file_name != "" and os.path.exists(full_ls_filename):
|
|
290
|
+
ls_df = pd.read_csv(full_ls_filename)
|
|
291
|
+
ls_df['startDateTime'] = pd.to_datetime(ls_df['date'] + ' ' + ls_df['startTime'])
|
|
292
|
+
ls_df['endDateTime'] = pd.to_datetime(ls_df['date'] + ' ' + ls_df['endTime'])
|
|
293
|
+
return ls_df
|
|
294
|
+
else:
|
|
295
|
+
print(f"The loadshift file '{full_ls_filename}' does not exist. Thus loadshifting will not be added to daily dataframe.")
|
|
296
|
+
return pd.DataFrame()
|
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
ecopipeline/__init__.py,sha256=pjC00JWsjVAhS0jUKHD-wyi4UIpTsWbIg9JaxLS1mlc,275
|
|
2
|
-
ecopipeline/event_tracking/__init__.py,sha256=
|
|
3
|
-
ecopipeline/event_tracking/event_tracking.py,sha256=
|
|
4
|
-
ecopipeline/extract/__init__.py,sha256=
|
|
5
|
-
ecopipeline/extract/extract.py,sha256=
|
|
2
|
+
ecopipeline/event_tracking/__init__.py,sha256=91KN-WmYN6q1WbTJO-EE9QQUPCAQ1AbqH7krRsqaAug,402
|
|
3
|
+
ecopipeline/event_tracking/event_tracking.py,sha256=TNuCNcr-5z9hEfu5CXyoMDvDyeBT05KB2X1U5PBtxeg,101489
|
|
4
|
+
ecopipeline/extract/__init__.py,sha256=tGMph8ExJG_fYdGnlVbKfGwv8b4t8d7dcH2hekscpeg,822
|
|
5
|
+
ecopipeline/extract/extract.py,sha256=OeNwxJ9uWZLJjtpm9_qzVpRRaqyPKjn39WlW4SpzDYE,62062
|
|
6
6
|
ecopipeline/load/__init__.py,sha256=NLa_efQJZ8aP-J0Y5xx9DP7mtfRH9jY6Jz1ZMZN_BAA,292
|
|
7
7
|
ecopipeline/load/load.py,sha256=PaSGWOZI0Xg44_SWN7htn2DPIAU_s8mOtCGibXq25tM,24614
|
|
8
8
|
ecopipeline/transform/__init__.py,sha256=FjGcNpYNEYPdYQhogNRrQlKH2hGNNLv55jig1KaGaHY,2686
|
|
9
9
|
ecopipeline/transform/bayview.py,sha256=TP24dnTsUD95X-f6732egPZKjepFLJgDm9ImGr-fppY,17899
|
|
10
10
|
ecopipeline/transform/lbnl.py,sha256=EQ54G4rJXaZ7pwVusKcdK2KBehSdCsNo2ybphtMGs7o,33400
|
|
11
|
-
ecopipeline/transform/transform.py,sha256=
|
|
12
|
-
ecopipeline/utils/ConfigManager.py,sha256=
|
|
11
|
+
ecopipeline/transform/transform.py,sha256=KVj3kkDa_dQOqRU4l4cHK01L0UPv3FTusQ_Dk2ez8RA,57270
|
|
12
|
+
ecopipeline/utils/ConfigManager.py,sha256=E3YzdolFcRMVwxNk359Y0XVShqYquQ9otizPkkk9FU8,13880
|
|
13
13
|
ecopipeline/utils/NOAADataDownloader.py,sha256=iC2nl_O4PS1KFrchcPXRZxshwZwUMSqXy6BQBUwnOUU,20927
|
|
14
14
|
ecopipeline/utils/__init__.py,sha256=7dT3tP6SMK4uBW6NBmQ8i6LaNTTuV6fpAZToBBlJ904,62
|
|
15
15
|
ecopipeline/utils/unit_convert.py,sha256=VFh1we2Y8KV3u21BeWb-U3TlZJXo83q5vdxxkpgcuME,3064
|
|
@@ -18,8 +18,8 @@ ecopipeline/utils/pkls/tasseron_resistance_to_temp_3.pkl,sha256=9UpCZ3rSu0mU4LoT
|
|
|
18
18
|
ecopipeline/utils/pkls/tasseron_temp_to_resistance_2.pkl,sha256=Uq6I2dl5GcR5wb5QxurgDP4A2D4-N3neDL3BKtx53A4,2047
|
|
19
19
|
ecopipeline/utils/pkls/veris_resistance_to_temp_3.pkl,sha256=CVbUWJvOQXg2nZ-0GP9FWtU-ffMGcwg3ok5q669bmf8,1472
|
|
20
20
|
ecopipeline/utils/pkls/veris_temp_to_resistance_2.pkl,sha256=JiEp4SxR9eq-olKd8TKAG37iHMscJE_2SSHizGqBdno,1472
|
|
21
|
-
ecopipeline-1.0.
|
|
22
|
-
ecopipeline-1.0.
|
|
23
|
-
ecopipeline-1.0.
|
|
24
|
-
ecopipeline-1.0.
|
|
25
|
-
ecopipeline-1.0.
|
|
21
|
+
ecopipeline-1.0.5.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
|
+
ecopipeline-1.0.5.dist-info/METADATA,sha256=eU8B5fLOWV_TQimqs2Ws-PgTZt88XRy8xgAmh9Dn5OQ,2363
|
|
23
|
+
ecopipeline-1.0.5.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
|
|
24
|
+
ecopipeline-1.0.5.dist-info/top_level.txt,sha256=WOPFJH2LIgKqm4lk2OnFF5cgVkYibkaBxIxgvLgO7y0,12
|
|
25
|
+
ecopipeline-1.0.5.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|