ecopipeline 0.6.5__tar.gz → 0.6.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ecopipeline-0.6.5/src/ecopipeline.egg-info → ecopipeline-0.6.7}/PKG-INFO +1 -1
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/setup.cfg +1 -1
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline/extract/extract.py +1 -1
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline/load/load.py +5 -4
- {ecopipeline-0.6.5 → ecopipeline-0.6.7/src/ecopipeline.egg-info}/PKG-INFO +1 -1
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/LICENSE +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/README.md +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/pyproject.toml +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/setup.py +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline/__init__.py +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline/extract/__init__.py +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline/load/__init__.py +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline/transform/__init__.py +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline/transform/bayview.py +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline/transform/lbnl.py +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline/transform/transform.py +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline/utils/ConfigManager.py +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline/utils/__init__.py +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline/utils/unit_convert.py +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline.egg-info/SOURCES.txt +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline.egg-info/dependency_links.txt +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline.egg-info/requires.txt +0 -0
- {ecopipeline-0.6.5 → ecopipeline-0.6.7}/src/ecopipeline.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[metadata]
|
|
2
2
|
name = ecopipeline
|
|
3
|
-
version = 0.6.
|
|
3
|
+
version = 0.6.7
|
|
4
4
|
authors = ["Carlos Bello, <bellocarlos@seattleu.edu>, Emil Fahrig <fahrigemil@seattleu.edu>, Casey Mang <cmang@seattleu.edu>, Julian Harris <harrisjulian@seattleu.edu>, Roger Tram <rtram@seattleu.edu>, Nolan Price <nolan@ecotope.com>"]
|
|
5
5
|
description = Contains functions for use in Ecotope Datapipelines
|
|
6
6
|
long_description = file: README.md
|
|
@@ -141,7 +141,7 @@ def extract_new(startTime: datetime, filenames: List[str], decihex = False, time
|
|
|
141
141
|
|
|
142
142
|
if decihex:
|
|
143
143
|
base_date = datetime(1970, 1, 1)
|
|
144
|
-
file_dates = [pd.Timestamp(base_date + timedelta(seconds = int(re.search(r'\.(.*?)_', filename).group(1), 16))) for filename in filenames] #convert decihex to dates, these are in utc
|
|
144
|
+
file_dates = [pd.Timestamp(base_date + timedelta(seconds = int(re.search(r'\.(.*?)_', filename.split("/")[-1]).group(1), 16))) for filename in filenames] #convert decihex to dates, these are in utc
|
|
145
145
|
if timeZone == None:
|
|
146
146
|
file_dates_local = [file_date.tz_localize('UTC').tz_localize(None) for file_date in file_dates] #convert utc
|
|
147
147
|
else:
|
|
@@ -396,7 +396,8 @@ def load_event_table(config : ConfigManager, event_df: pd.DataFrame, site_name :
|
|
|
396
396
|
|
|
397
397
|
def report_data_loss(config : ConfigManager, site_name : str = None):
|
|
398
398
|
"""
|
|
399
|
-
Logs data loss event in event database (assumes one exists)
|
|
399
|
+
Logs data loss event in event database (assumes one exists) as a DATA_LOSS_COP event to
|
|
400
|
+
note that COP calculations have been effected
|
|
400
401
|
|
|
401
402
|
Parameters
|
|
402
403
|
----------
|
|
@@ -418,11 +419,11 @@ def report_data_loss(config : ConfigManager, site_name : str = None):
|
|
|
418
419
|
site_name = config.get_site_name()
|
|
419
420
|
error_string = "Error processing data. Please check logs to resolve."
|
|
420
421
|
|
|
421
|
-
print(f"logging
|
|
422
|
+
print(f"logging DATA_LOSS_COP into {table_name}")
|
|
422
423
|
|
|
423
424
|
# create SQL statement
|
|
424
425
|
insert_str = "INSERT INTO " + table_name + " (start_time_pt, site_name, event_detail, event_type, last_modified_date, last_modified_by) VALUES "
|
|
425
|
-
insert_str += f"('{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}','{site_name}','{error_string}','
|
|
426
|
+
insert_str += f"('{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}','{site_name}','{error_string}','DATA_LOSS_COP','{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}','automatic_upload')"
|
|
426
427
|
|
|
427
428
|
existing_rows = pd.DataFrame({
|
|
428
429
|
'id' : []
|
|
@@ -438,7 +439,7 @@ def report_data_loss(config : ConfigManager, site_name : str = None):
|
|
|
438
439
|
try:
|
|
439
440
|
# find existing times in database for upsert statement
|
|
440
441
|
cursor.execute(
|
|
441
|
-
f"SELECT id FROM {table_name} WHERE end_time_pt IS NULL AND site_name = '{site_name}' AND event_type = '
|
|
442
|
+
f"SELECT id FROM {table_name} WHERE end_time_pt IS NULL AND site_name = '{site_name}' AND event_type = 'DATA_LOSS_COP'")
|
|
442
443
|
# Fetch the results into a DataFrame
|
|
443
444
|
existing_rows = pd.DataFrame(cursor.fetchall(), columns=['id'])
|
|
444
445
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|