tsp 1.8.1__py3-none-any.whl → 1.10.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tsp/__init__.py +11 -11
- tsp/__meta__.py +1 -1
- tsp/concatenation.py +159 -153
- tsp/core.py +1306 -1162
- tsp/data/2023-01-06_755-test-Dataset_2031-Constant_Over_Interval-Hourly-Ground_Temperature-Thermistor_Automated.timeserie.csv +4 -4
- tsp/data/2023-01-06_755-test.metadata.txt +208 -208
- tsp/data/NTGS_example_csv.csv +6 -6
- tsp/data/NTGS_example_slash_dates.csv +6 -6
- tsp/data/NTGS_gtr_example_excel.xlsx +0 -0
- tsp/data/example_geotop.csv +5240 -5240
- tsp/data/example_gtnp.csv +1298 -1298
- tsp/data/example_permos.csv +7 -7
- tsp/data/ntgs-db-multi.txt +3872 -0
- tsp/data/ntgs-db-single.txt +2251 -0
- tsp/data/test_geotop_has_space.txt +5 -5
- tsp/data/tsp_format_long.csv +10 -0
- tsp/data/tsp_format_wide_1.csv +7 -0
- tsp/data/tsp_format_wide_2.csv +7 -0
- tsp/dataloggers/AbstractReader.py +43 -43
- tsp/dataloggers/FG2.py +110 -110
- tsp/dataloggers/GP5W.py +114 -114
- tsp/dataloggers/Geoprecision.py +34 -34
- tsp/dataloggers/HOBO.py +930 -914
- tsp/dataloggers/RBRXL800.py +190 -190
- tsp/dataloggers/RBRXR420.py +371 -308
- tsp/dataloggers/Vemco.py +84 -0
- tsp/dataloggers/__init__.py +15 -15
- tsp/dataloggers/logr.py +196 -115
- tsp/dataloggers/test_files/004448.DAT +2543 -2543
- tsp/dataloggers/test_files/004531.DAT +17106 -17106
- tsp/dataloggers/test_files/004531.HEX +3587 -3587
- tsp/dataloggers/test_files/004534.HEX +3587 -3587
- tsp/dataloggers/test_files/010252.dat +1731 -1731
- tsp/dataloggers/test_files/010252.hex +1739 -1739
- tsp/dataloggers/test_files/010274.hex +1291 -1291
- tsp/dataloggers/test_files/010278.hex +3544 -3544
- tsp/dataloggers/test_files/012064.dat +1286 -1286
- tsp/dataloggers/test_files/012064.hex +1294 -1294
- tsp/dataloggers/test_files/012064_modified_start.hex +1294 -0
- tsp/dataloggers/test_files/012081.hex +3532 -3532
- tsp/dataloggers/test_files/013138_recovery_stamp.hex +1123 -0
- tsp/dataloggers/test_files/014037-2007.hex +95 -0
- tsp/dataloggers/test_files/019360_20160918_1146_SlumpIslandTopofHill.hex +11253 -0
- tsp/dataloggers/test_files/019360_20160918_1146_SlumpIslandTopofHill.xls +0 -0
- tsp/dataloggers/test_files/07B1592.DAT +1483 -1483
- tsp/dataloggers/test_files/07B1592.HEX +1806 -1806
- tsp/dataloggers/test_files/07B4450.DAT +2234 -2234
- tsp/dataloggers/test_files/07B4450.HEX +2559 -2559
- tsp/dataloggers/test_files/2022018_2025-09-18T22-16-16.txt +36 -0
- tsp/dataloggers/test_files/2022018_2025-09-18T22-16-16_raw.csv +2074 -0
- tsp/dataloggers/test_files/2022018_2025-09-18T22-16-16_temp.csv +2074 -0
- tsp/dataloggers/test_files/2025004_2025-12-02T17-07-28_cfg.txt +30 -0
- tsp/dataloggers/test_files/2025004_2025-12-02T17-07-28_raw.csv +35 -0
- tsp/dataloggers/test_files/2025004_2025-12-02T17-07-28_temp.csv +35 -0
- tsp/dataloggers/test_files/204087.xlsx +0 -0
- tsp/dataloggers/test_files/Asc-1455As02.000 +2982 -0
- tsp/dataloggers/test_files/Asc-1456As02.000 +2992 -0
- tsp/dataloggers/test_files/Asc-1457As02.000 +2917 -0
- tsp/dataloggers/test_files/BGC_BH15_019362_20140610_1253.hex +1729 -0
- tsp/dataloggers/test_files/Bin2944.csv +759 -0
- tsp/dataloggers/test_files/Bin5494.csv +2972 -0
- tsp/dataloggers/test_files/Bin6786.csv +272 -0
- tsp/dataloggers/test_files/FG2_399.csv +9881 -9881
- tsp/dataloggers/test_files/GP5W.csv +1121 -1121
- tsp/dataloggers/test_files/GP5W_260.csv +1884 -1884
- tsp/dataloggers/test_files/GP5W_270.csv +2210 -2210
- tsp/dataloggers/test_files/H08-030-08_HOBOware.csv +998 -998
- tsp/dataloggers/test_files/Minilog-II-T_350763_20190711_1.csv +2075 -0
- tsp/dataloggers/test_files/Minilog-II-T_350769_20190921_1.csv +6384 -0
- tsp/dataloggers/test_files/Minilog-II-T_354284_20190921_1.csv +4712 -0
- tsp/dataloggers/test_files/Minilog-T_7943_20140920_1.csv +5826 -0
- tsp/dataloggers/test_files/Minilog-T_8979_20140806_1.csv +2954 -0
- tsp/dataloggers/test_files/Minilog-T_975_20110824_1.csv +4343 -0
- tsp/dataloggers/test_files/RBR_01.dat +1046 -1046
- tsp/dataloggers/test_files/RBR_02.dat +2426 -2426
- tsp/dataloggers/test_files/RI03b_062831_20240905_1801.rsk +0 -0
- tsp/dataloggers/test_files/RI03b_062831_20240905_1801.xlsx +0 -0
- tsp/dataloggers/test_files/RSTDT2055.csv +2152 -2152
- tsp/dataloggers/test_files/U23-001_HOBOware.csv +1001 -1001
- tsp/dataloggers/test_files/hobo-negative-2.txt +6396 -6396
- tsp/dataloggers/test_files/hobo-negative-3.txt +5593 -5593
- tsp/dataloggers/test_files/hobo-positive-number-1.txt +1000 -1000
- tsp/dataloggers/test_files/hobo-positive-number-2.csv +1003 -1003
- tsp/dataloggers/test_files/hobo-positive-number-3.csv +1133 -1133
- tsp/dataloggers/test_files/hobo-positive-number-4.csv +1209 -1209
- tsp/dataloggers/test_files/hobo2.csv +8702 -8702
- tsp/dataloggers/test_files/hobo_1_AB.csv +21732 -21732
- tsp/dataloggers/test_files/hobo_1_AB_Details.txt +133 -133
- tsp/dataloggers/test_files/hobo_1_AB_classic.csv +4373 -4373
- tsp/dataloggers/test_files/hobo_1_AB_defaults.csv +21732 -21732
- tsp/dataloggers/test_files/hobo_1_AB_minimal.txt +1358 -1358
- tsp/dataloggers/test_files/hobo_1_AB_var2.csv +3189 -3189
- tsp/dataloggers/test_files/hobo_1_AB_var3.csv +2458 -2458
- tsp/dataloggers/test_files/logR_ULogC16-32_1.csv +106 -106
- tsp/dataloggers/test_files/logR_ULogC16-32_2.csv +100 -100
- tsp/dataloggers/test_files/mon_3_Ta_2010-08-18_2013-02-08.txt +21724 -21724
- tsp/dataloggers/test_files/rbr_001.dat +1133 -1133
- tsp/dataloggers/test_files/rbr_001.hex +1139 -1139
- tsp/dataloggers/test_files/rbr_001_no_comment.dat +1132 -1132
- tsp/dataloggers/test_files/rbr_001_no_comment.hex +1138 -1138
- tsp/dataloggers/test_files/rbr_002.dat +1179 -1179
- tsp/dataloggers/test_files/rbr_002.hex +1185 -1185
- tsp/dataloggers/test_files/rbr_003.hex +1292 -1292
- tsp/dataloggers/test_files/rbr_xl_001.DAT +1105 -1105
- tsp/dataloggers/test_files/rbr_xl_002.DAT +1126 -1126
- tsp/dataloggers/test_files/rbr_xl_003.DAT +4622 -4622
- tsp/dataloggers/test_files/rbr_xl_003.HEX +3587 -3587
- tsp/gtnp.py +148 -148
- tsp/labels.py +3 -3
- tsp/misc.py +90 -90
- tsp/physics.py +101 -101
- tsp/plots/static.py +388 -374
- tsp/readers.py +829 -548
- tsp/standardization/__init__.py +0 -0
- tsp/standardization/metadata.py +95 -0
- tsp/standardization/metadata_ref.py +0 -0
- tsp/standardization/validator.py +535 -0
- tsp/time.py +45 -45
- tsp/tspwarnings.py +27 -15
- tsp/utils.py +131 -101
- tsp/version.py +1 -1
- {tsp-1.8.1.dist-info → tsp-1.10.2.dist-info}/METADATA +95 -86
- tsp-1.10.2.dist-info/RECORD +132 -0
- {tsp-1.8.1.dist-info → tsp-1.10.2.dist-info}/licenses/LICENSE +674 -674
- {tsp-1.8.1.dist-info → tsp-1.10.2.dist-info}/top_level.txt +1 -0
- tsp-1.8.1.dist-info/RECORD +0 -94
- {tsp-1.8.1.dist-info → tsp-1.10.2.dist-info}/WHEEL +0 -0
tsp/dataloggers/Vemco.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
*DESCRIPTION*
|
|
4
|
+
|
|
5
|
+
Author: rparker
|
|
6
|
+
Created: 2024-01-10
|
|
7
|
+
"""
|
|
8
|
+
import os
|
|
9
|
+
import pathlib
|
|
10
|
+
import pandas as pd
|
|
11
|
+
import datetime as dt
|
|
12
|
+
|
|
13
|
+
from .AbstractReader import AbstractReader
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Vemco(AbstractReader):
|
|
17
|
+
def __init__(self):
|
|
18
|
+
self.META = {}
|
|
19
|
+
|
|
20
|
+
def read(self, file_path: str):
|
|
21
|
+
file_extention = pathlib.Path(file_path).suffix.lower()
|
|
22
|
+
if file_extention == ".000":
|
|
23
|
+
try:
|
|
24
|
+
with open(file_path, "r", encoding="cp1252") as file:
|
|
25
|
+
first_line = file.readline()
|
|
26
|
+
if first_line.startswith("*"):
|
|
27
|
+
self._read_old_000_logger_file(file_path)
|
|
28
|
+
else:
|
|
29
|
+
raise IOError(f"{os.path.basename(file_path)} unreadable. Try opening this file in Logger VUE and "
|
|
30
|
+
f"exporting it as a .csv")
|
|
31
|
+
except UnicodeDecodeError as e:
|
|
32
|
+
raise IOError(f"{os.path.basename(file_path)} unreadable. Try opening this file in Logger VUE and "
|
|
33
|
+
f"exporting it as a .csv")
|
|
34
|
+
elif file_extention == ".csv":
|
|
35
|
+
self._read_logger_vue_csv(file_path)
|
|
36
|
+
else:
|
|
37
|
+
raise ValueError("File is not a .csv or .000")
|
|
38
|
+
return self.DATA
|
|
39
|
+
|
|
40
|
+
def _read_old_000_logger_file(self, file_path):
|
|
41
|
+
with open(file_path, "r", encoding="cp1252") as file:
|
|
42
|
+
lines = file.readlines()
|
|
43
|
+
header_lines = lines[:6]
|
|
44
|
+
data_lines = [l.strip().split(",") for l in lines[6:]]
|
|
45
|
+
self.META["logger_model"] = header_lines[0].split("=")[-1].strip()
|
|
46
|
+
self.META["logger_sn"] = header_lines[1].split("=")[-1].strip()
|
|
47
|
+
self.META["study_id"] = header_lines[2].split("=")[-1].strip()
|
|
48
|
+
self.META["logging_start"] = dt.datetime.strptime(header_lines[3].split("=")[-1].strip(), "%d/%m/%Y %H:%M:%S")
|
|
49
|
+
self.META["download_date"] = dt.datetime.strptime(header_lines[4].split("=")[-1].strip(), "%d/%m/%Y %H:%M:%S")
|
|
50
|
+
sample_interval = dt.datetime.strptime(header_lines[5].split("=")[-1].strip(), "%H:%M:%S")
|
|
51
|
+
self.META["sample_interval"] = dt.timedelta(hours=sample_interval.hour, minutes=sample_interval.minute,
|
|
52
|
+
seconds=sample_interval.second)
|
|
53
|
+
self.META['raw'] = "".join(header_lines)
|
|
54
|
+
self.DATA = pd.DataFrame(data_lines[1:], columns=["TIME", "TEMPERATURE"])
|
|
55
|
+
self.DATA["TIME"] = pd.to_datetime(self.DATA["TIME"], format="%d/%m/%Y %H:%M:%S")
|
|
56
|
+
self.DATA["TEMPERATURE"] = pd.to_numeric(self.DATA["TEMPERATURE"], errors='coerce')
|
|
57
|
+
return
|
|
58
|
+
|
|
59
|
+
def _read_logger_vue_csv(self, file_path):
|
|
60
|
+
with open(file_path, "r", encoding="cp1252") as file:
|
|
61
|
+
lines = file.readlines()
|
|
62
|
+
|
|
63
|
+
header_lines = lines[:7]
|
|
64
|
+
data_lines = [l.strip().split(",") for l in lines[7:]]
|
|
65
|
+
model_and_sn = header_lines[1][15:-1]
|
|
66
|
+
self.META["logger_sn"] = model_and_sn.split("-")[-1]
|
|
67
|
+
self.META["logger_model"] = "-".join(model_and_sn.split("-")[:-1])
|
|
68
|
+
self.META["study_id"] = header_lines[2].split(":")[-1].strip()
|
|
69
|
+
self.META["logging_start"] = dt.datetime.strptime(header_lines[4][18:-1], "%Y-%m-%d %H:%M:%S")
|
|
70
|
+
self.META["download_date"] = dt.datetime.strptime(header_lines[5][17:-1], "%Y-%m-%d %H:%M:%S")
|
|
71
|
+
if header_lines[3].endswith(")"):
|
|
72
|
+
data_tz = dt.timezone(dt.timedelta(hours=int(header_lines[3][45:-2])))
|
|
73
|
+
self.META["logging_start"] = self.META["logging_start"].replace(tzinfo=data_tz)
|
|
74
|
+
self.META["download_date"] = self.META["download_date"].replace(tzinfo=data_tz)
|
|
75
|
+
self.META["utc_offset"] = data_tz
|
|
76
|
+
sample_interval = dt.datetime.strptime(header_lines[6][17:-1], "%H:%M:%S")
|
|
77
|
+
self.META["sample_interval"] = dt.timedelta(hours=sample_interval.hour, minutes=sample_interval.minute,
|
|
78
|
+
seconds=sample_interval.second)
|
|
79
|
+
self.META['raw'] = "".join(header_lines)
|
|
80
|
+
self.DATA = pd.DataFrame(data_lines[1:], columns=["date", "time", "TEMPERATURE"])
|
|
81
|
+
self.DATA["TIME"] = pd.to_datetime(self.DATA["date"] + " " + self.DATA["time"], format="%Y-%m-%d %H:%M:%S")
|
|
82
|
+
self.DATA["TEMPERATURE"] = pd.to_numeric(self.DATA["TEMPERATURE"], errors='coerce')
|
|
83
|
+
self.DATA.drop(columns=["date", "time"], inplace=True)
|
|
84
|
+
return
|
tsp/dataloggers/__init__.py
CHANGED
|
@@ -1,15 +1,15 @@
|
|
|
1
|
-
from .HOBO import HOBO, HOBOProperties
|
|
2
|
-
from .FG2 import FG2
|
|
3
|
-
from .GP5W import GP5W
|
|
4
|
-
from .Geoprecision import detect_geoprecision_type
|
|
5
|
-
from .logr import LogR
|
|
6
|
-
|
|
7
|
-
HOBO.__module__ = __name__
|
|
8
|
-
HOBOProperties.__module__ = __name__
|
|
9
|
-
FG2.__module__ =__name__
|
|
10
|
-
GP5W.__module__ = __name__
|
|
11
|
-
LogR.__module__ = __name__
|
|
12
|
-
|
|
13
|
-
__all__ = ['HOBO','HOBOProperties',
|
|
14
|
-
'FG2','GP5W', 'detect_geoprecision_type',
|
|
15
|
-
'LogR']
|
|
1
|
+
from .HOBO import HOBO, HOBOProperties
|
|
2
|
+
from .FG2 import FG2
|
|
3
|
+
from .GP5W import GP5W
|
|
4
|
+
from .Geoprecision import detect_geoprecision_type
|
|
5
|
+
from .logr import LogR
|
|
6
|
+
|
|
7
|
+
HOBO.__module__ = __name__
|
|
8
|
+
HOBOProperties.__module__ = __name__
|
|
9
|
+
FG2.__module__ =__name__
|
|
10
|
+
GP5W.__module__ = __name__
|
|
11
|
+
LogR.__module__ = __name__
|
|
12
|
+
|
|
13
|
+
__all__ = ['HOBO','HOBOProperties',
|
|
14
|
+
'FG2','GP5W', 'detect_geoprecision_type',
|
|
15
|
+
'LogR']
|
tsp/dataloggers/logr.py
CHANGED
|
@@ -1,115 +1,196 @@
|
|
|
1
|
-
import pandas as pd
|
|
2
|
-
import regex as re
|
|
3
|
-
import numpy as np
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
SEP = ","
|
|
9
|
-
|
|
10
|
-
def __init__(self):
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
if
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
1
|
+
import pandas as pd
|
|
2
|
+
import regex as re
|
|
3
|
+
import numpy as np
|
|
4
|
+
import datetime as dt
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class LogR:
|
|
8
|
+
SEP = ","
|
|
9
|
+
|
|
10
|
+
def __init__(self):
|
|
11
|
+
self.DATA = None
|
|
12
|
+
self.META = None
|
|
13
|
+
pass
|
|
14
|
+
|
|
15
|
+
def read(self, file: str, cfg_txt: str = None):
|
|
16
|
+
raw = is_raw_data(file)
|
|
17
|
+
if raw and cfg_txt is None:
|
|
18
|
+
raise ValueError("cfg_txt must be specified if providing raw data.")
|
|
19
|
+
|
|
20
|
+
if cfg_txt is not None:
|
|
21
|
+
config_params = read_cfg_file(cfg_txt)
|
|
22
|
+
else:
|
|
23
|
+
config_params = None
|
|
24
|
+
|
|
25
|
+
header_rows = read_logr_header(file)
|
|
26
|
+
columns = [line.strip().split(',') for line in header_rows if is_columns_row(line)][0]
|
|
27
|
+
labels = [line.strip().split(',') for line in header_rows if is_label_row(line)][0]
|
|
28
|
+
data = pd.read_csv(file, header=len(header_rows) - 1,
|
|
29
|
+
names=["TIME" if c == 'timestamp' else c for c in columns])
|
|
30
|
+
if raw:
|
|
31
|
+
data = convert_raw_to_temperatures(data=data, channel_metadata=config_params["channel_metadata"])
|
|
32
|
+
else:
|
|
33
|
+
data['TIME'] = pd.to_datetime(data['TIME'], format=dateformat())
|
|
34
|
+
if config_params is not None and "UTC Offset" in config_params.keys():
|
|
35
|
+
pattern = re.compile(r"-?\+?\d{2}:?\d{0,2}")
|
|
36
|
+
match = pattern.search(config_params["UTC Offset"])
|
|
37
|
+
if match is None:
|
|
38
|
+
raise ValueError("Could not parse UTC offset")
|
|
39
|
+
offset = [int(ele) for ele in match.group().split(":")]
|
|
40
|
+
if len(offset) == 1:
|
|
41
|
+
tz = dt.timezone(dt.timedelta(hours=offset[0]))
|
|
42
|
+
elif len(offset) == 2:
|
|
43
|
+
tz = dt.timezone(dt.timedelta(hours=offset[0], minutes=offset[1]))
|
|
44
|
+
else:
|
|
45
|
+
raise ValueError("Could not parse UTC offset")
|
|
46
|
+
data['TIME'] = data['TIME'].dt.tz_localize(tz)
|
|
47
|
+
|
|
48
|
+
channels = pd.Series(data.columns).str.match("^CH")
|
|
49
|
+
|
|
50
|
+
self.DATA = data
|
|
51
|
+
self.META = {'label': labels,
|
|
52
|
+
'guessed_depths': guess_depths(labels)[-sum(channels):]}
|
|
53
|
+
if config_params is not None:
|
|
54
|
+
self.META = self.META | config_params
|
|
55
|
+
|
|
56
|
+
return self.DATA
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def read_cfg_file(file_path: str):
|
|
60
|
+
metadata = dict()
|
|
61
|
+
with open(file_path, "r") as f:
|
|
62
|
+
for i in range(50):
|
|
63
|
+
line = f.readline()
|
|
64
|
+
if line.startswith("ChannelID"):
|
|
65
|
+
break
|
|
66
|
+
if line != "\n":
|
|
67
|
+
line = line.split(":")
|
|
68
|
+
metadata[line[0].strip()] = line[1].strip()
|
|
69
|
+
if "Serial Number" in metadata.keys():
|
|
70
|
+
metadata["logger_sn"] = metadata["Serial Number"]
|
|
71
|
+
del metadata["Serial Number"]
|
|
72
|
+
metadata["channel_metadata"] = pd.read_csv(file_path, delimiter="\t", header=len(metadata.keys()),
|
|
73
|
+
index_col="ChannelID")
|
|
74
|
+
return metadata
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def convert_raw_to_temperatures(data: pd.DataFrame, channel_metadata: pd.DataFrame):
|
|
78
|
+
data["TIME"] = pd.to_datetime(data["TIME"], unit="s", utc=True)
|
|
79
|
+
voltage_ref = 2.5
|
|
80
|
+
r_fixed = 7500
|
|
81
|
+
r_correction = 100
|
|
82
|
+
for channel in channel_metadata.index:
|
|
83
|
+
if channel in data.columns:
|
|
84
|
+
coefficents = {letter: channel_metadata.loc[channel, letter] for letter in ["A", "B", "C", "D", "E"]}
|
|
85
|
+
resistances = r_fixed / (voltage_ref / data[channel] - 1) - r_correction
|
|
86
|
+
data[channel] = 1 / (coefficents["A"] + coefficents["B"] * np.log(resistances)
|
|
87
|
+
+ coefficents["C"] * np.power(np.log(resistances), 3)
|
|
88
|
+
+ coefficents["D"] * np.power(np.log(resistances), 5)) - 273.15
|
|
89
|
+
return data
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def read_logr_header(file: str) -> list:
|
|
93
|
+
""" Read metadata / header lines from LogR file
|
|
94
|
+
|
|
95
|
+
Parameters
|
|
96
|
+
----------
|
|
97
|
+
file : str
|
|
98
|
+
path to a LogR file
|
|
99
|
+
|
|
100
|
+
Returns
|
|
101
|
+
-------
|
|
102
|
+
list
|
|
103
|
+
list of lines in the header block
|
|
104
|
+
|
|
105
|
+
Raises
|
|
106
|
+
------
|
|
107
|
+
ValueError
|
|
108
|
+
_description_
|
|
109
|
+
"""
|
|
110
|
+
found_data = False
|
|
111
|
+
max_rows = 50
|
|
112
|
+
header_lines = list()
|
|
113
|
+
|
|
114
|
+
with open(file) as f:
|
|
115
|
+
while not found_data and max_rows:
|
|
116
|
+
max_rows -= 1
|
|
117
|
+
line = f.readline()
|
|
118
|
+
if is_data_row(line):
|
|
119
|
+
found_data = True
|
|
120
|
+
break
|
|
121
|
+
else:
|
|
122
|
+
header_lines.append(line)
|
|
123
|
+
if not found_data:
|
|
124
|
+
raise ValueError("Could not find start of data")
|
|
125
|
+
return header_lines
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def guess_depths(labels: list[str]) -> list[float]:
|
|
129
|
+
pattern = re.compile(r"(-?[\d\.]+)")
|
|
130
|
+
|
|
131
|
+
matches = [pattern.search(l) for l in labels]
|
|
132
|
+
depths = [float(d.group(1)) if d else None for d in matches]
|
|
133
|
+
|
|
134
|
+
return depths
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def guessed_depths_ok(depths, n_channel) -> bool:
|
|
138
|
+
""" Evaluate whether the guessed depths are valid """
|
|
139
|
+
d = np.array(depths, dtype='float64')
|
|
140
|
+
|
|
141
|
+
# monotonic (by convention)
|
|
142
|
+
if not (np.diff(d) > 0).all() or (np.diff(d) < 0).all():
|
|
143
|
+
return False
|
|
144
|
+
|
|
145
|
+
# equal to number of channels
|
|
146
|
+
if not sum(~np.isnan(d)) == n_channel:
|
|
147
|
+
return False
|
|
148
|
+
|
|
149
|
+
return True
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def dateformat():
|
|
153
|
+
return "%Y/%m/%d %H:%M:%S"
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def is_data_row(line: str) -> bool:
|
|
157
|
+
if line == "":
|
|
158
|
+
return False
|
|
159
|
+
second_element = line.split(",")[1]
|
|
160
|
+
try:
|
|
161
|
+
if second_element.isnumeric():
|
|
162
|
+
dt.datetime.fromtimestamp(float(second_element))
|
|
163
|
+
else:
|
|
164
|
+
dt.datetime.strptime(second_element, dateformat())
|
|
165
|
+
return True
|
|
166
|
+
except:
|
|
167
|
+
return False
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def is_raw_data(file: str) -> bool:
|
|
171
|
+
with open(file) as f:
|
|
172
|
+
for i in range(50):
|
|
173
|
+
line = f.readline()
|
|
174
|
+
if line == "":
|
|
175
|
+
continue
|
|
176
|
+
second_element = line.split(",")[1]
|
|
177
|
+
try:
|
|
178
|
+
if second_element.isnumeric():
|
|
179
|
+
dt.datetime.fromtimestamp(float(second_element))
|
|
180
|
+
return True
|
|
181
|
+
else:
|
|
182
|
+
dt.datetime.strptime(second_element, dateformat())
|
|
183
|
+
return False
|
|
184
|
+
except:
|
|
185
|
+
pass
|
|
186
|
+
raise RuntimeError("Could not determine if raw data")
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def is_columns_row(line: str) -> bool:
|
|
190
|
+
pattern = re.compile(r"^SensorId")
|
|
191
|
+
return bool(pattern.match(line))
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def is_label_row(line: str) -> bool:
|
|
195
|
+
pattern = re.compile(r"^Label")
|
|
196
|
+
return bool(pattern.match(line))
|