ecopipeline 0.8.8__tar.gz → 0.8.10__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ecopipeline-0.8.8/src/ecopipeline.egg-info → ecopipeline-0.8.10}/PKG-INFO +1 -1
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/setup.cfg +1 -1
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/extract/extract.py +23 -7
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/transform/transform.py +3 -7
- {ecopipeline-0.8.8 → ecopipeline-0.8.10/src/ecopipeline.egg-info}/PKG-INFO +1 -1
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/LICENSE +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/README.md +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/pyproject.toml +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/setup.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/__init__.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/event_tracking/__init__.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/event_tracking/event_tracking.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/extract/__init__.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/load/__init__.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/load/load.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/transform/__init__.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/transform/bayview.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/transform/lbnl.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/utils/ConfigManager.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/utils/__init__.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline/utils/unit_convert.py +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline.egg-info/SOURCES.txt +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline.egg-info/dependency_links.txt +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline.egg-info/requires.txt +0 -0
- {ecopipeline-0.8.8 → ecopipeline-0.8.10}/src/ecopipeline.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[metadata]
|
|
2
2
|
name = ecopipeline
|
|
3
|
-
version = 0.8.
|
|
3
|
+
version = 0.8.10
|
|
4
4
|
authors = ["Carlos Bello, <bellocarlos@seattleu.edu>, Emil Fahrig <fahrigemil@seattleu.edu>, Casey Mang <cmang@seattleu.edu>, Julian Harris <harrisjulian@seattleu.edu>, Roger Tram <rtram@seattleu.edu>, Nolan Price <nolan@ecotope.com>"]
|
|
5
5
|
description = Contains functions for use in Ecotope Datapipelines
|
|
6
6
|
long_description = file: README.md
|
|
@@ -732,11 +732,17 @@ def fm_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: dat
|
|
|
732
732
|
time_diff = endTime - startTime
|
|
733
733
|
midpointTime = startTime + time_diff / 2
|
|
734
734
|
# recursively construct the df
|
|
735
|
-
df_1 = fm_api_to_df(config, startTime, midpointTime)
|
|
736
|
-
df_2 = fm_api_to_df(config, midpointTime, endTime)
|
|
735
|
+
df_1 = fm_api_to_df(config, startTime, midpointTime, create_csv=False)
|
|
736
|
+
df_2 = fm_api_to_df(config, midpointTime, endTime, create_csv=False)
|
|
737
737
|
df = pd.concat([df_1, df_2])
|
|
738
738
|
df = df.sort_index()
|
|
739
739
|
df = df.groupby(df.index).mean()
|
|
740
|
+
if create_csv:
|
|
741
|
+
filename = f"{startTime.strftime('%Y%m%d%H%M%S')}.csv"
|
|
742
|
+
original_directory = os.getcwd()
|
|
743
|
+
os.chdir(config.data_directory)
|
|
744
|
+
df.to_csv(filename, index_label='time_pt')
|
|
745
|
+
os.chdir(original_directory)
|
|
740
746
|
return df
|
|
741
747
|
|
|
742
748
|
print(f"Failed to make GET request. Status code: {response.status_code} {response.json()}")
|
|
@@ -761,7 +767,7 @@ def pull_egauge_data(config: ConfigManager, eGauge_ids: list, eGauge_usr : str,
|
|
|
761
767
|
|
|
762
768
|
os.chdir(original_directory)
|
|
763
769
|
|
|
764
|
-
def tb_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: datetime = None, create_csv : bool = True):
|
|
770
|
+
def tb_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: datetime = None, create_csv : bool = True, query_hours : int = 12):
|
|
765
771
|
"""
|
|
766
772
|
Function connects to the things board manager api to pull data and returns a dataframe.
|
|
767
773
|
|
|
@@ -779,11 +785,14 @@ def tb_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: dat
|
|
|
779
785
|
is local time from the data's index.
|
|
780
786
|
create_csv : bool
|
|
781
787
|
create csv files as you process such that API need not be relied upon for reprocessing
|
|
788
|
+
query_hours : int
|
|
789
|
+
number of hours to query at a time from ThingsBoard API
|
|
782
790
|
|
|
783
791
|
Returns
|
|
784
792
|
-------
|
|
785
793
|
pd.DataFrame:
|
|
786
|
-
Pandas Dataframe containing data from the API pull with column headers the same as the variable names in the data from the pull
|
|
794
|
+
Pandas Dataframe containing data from the API pull with column headers the same as the variable names in the data from the pull.
|
|
795
|
+
Will return with index in UTC so needs to be converted after to appropriate timezone
|
|
787
796
|
"""
|
|
788
797
|
if endTime is None:
|
|
789
798
|
endTime = datetime.now()
|
|
@@ -791,15 +800,22 @@ def tb_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: dat
|
|
|
791
800
|
# 28 hours to ensure encapsulation of last day
|
|
792
801
|
startTime = endTime - timedelta(hours=28)
|
|
793
802
|
|
|
794
|
-
if endTime - timedelta(hours=
|
|
803
|
+
if endTime - timedelta(hours=query_hours) > startTime:
|
|
795
804
|
time_diff = endTime - startTime
|
|
796
805
|
midpointTime = startTime + time_diff / 2
|
|
797
806
|
# recursively construct the df
|
|
798
|
-
df_1 = tb_api_to_df(config, startTime, midpointTime)
|
|
799
|
-
df_2 = tb_api_to_df(config, midpointTime, endTime)
|
|
807
|
+
df_1 = tb_api_to_df(config, startTime, midpointTime, create_csv=False,query_hours=query_hours)
|
|
808
|
+
df_2 = tb_api_to_df(config, midpointTime, endTime, create_csv=False,query_hours=query_hours)
|
|
800
809
|
df = pd.concat([df_1, df_2])
|
|
801
810
|
df = df.sort_index()
|
|
802
811
|
df = df.groupby(df.index).mean()
|
|
812
|
+
if create_csv:
|
|
813
|
+
filename = f"{startTime.strftime('%Y%m%d%H%M%S')}.csv"
|
|
814
|
+
original_directory = os.getcwd()
|
|
815
|
+
os.chdir(config.data_directory)
|
|
816
|
+
df.to_csv(filename, index_label='time_pt')
|
|
817
|
+
os.chdir(original_directory)
|
|
818
|
+
|
|
803
819
|
return df
|
|
804
820
|
url = f'https://thingsboard.cloud/api/plugins/telemetry/DEVICE/{config.api_device_id}/values/timeseries'
|
|
805
821
|
token = config.get_thingsboard_token()
|
|
@@ -1110,13 +1110,9 @@ def create_data_statistics_df(df: pd.DataFrame) -> pd.DataFrame:
|
|
|
1110
1110
|
total_missing = df_full.isna().resample('D').sum().astype(int)
|
|
1111
1111
|
# Function to calculate max consecutive missing values
|
|
1112
1112
|
def max_consecutive_nans(x):
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
groups = (is_na != is_na.shift()).cumsum()
|
|
1117
|
-
return is_na.groupby(groups).sum().max() or 0
|
|
1118
|
-
except:
|
|
1119
|
-
raise Exception(f"{x} is the problem!")
|
|
1113
|
+
is_na = pd.Series(x).isna().reset_index(drop=True)
|
|
1114
|
+
groups = (is_na != is_na.shift()).cumsum()
|
|
1115
|
+
return is_na.groupby(groups).sum().max() or 0
|
|
1120
1116
|
|
|
1121
1117
|
# Function to calculate average consecutive missing values
|
|
1122
1118
|
def avg_consecutive_nans(x):
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|