tsp 1.7.7__py3-none-any.whl → 1.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tsp might be problematic. Click here for more details.
- tsp/__init__.py +11 -11
- tsp/__meta__.py +1 -1
- tsp/concatenation.py +153 -0
- tsp/core.py +1108 -1035
- tsp/data/2023-01-06_755-test-Dataset_2031-Constant_Over_Interval-Hourly-Ground_Temperature-Thermistor_Automated.timeserie.csv +4 -4
- tsp/data/2023-01-06_755-test.metadata.txt +208 -208
- tsp/data/NTGS_example_csv.csv +6 -6
- tsp/data/NTGS_example_slash_dates.csv +6 -6
- tsp/data/example_geotop.csv +5240 -5240
- tsp/data/example_gtnp.csv +1298 -1298
- tsp/data/example_permos.csv +7 -7
- tsp/data/test_geotop_has_space.txt +5 -5
- tsp/dataloggers/AbstractReader.py +43 -43
- tsp/dataloggers/FG2.py +110 -110
- tsp/dataloggers/GP5W.py +114 -114
- tsp/dataloggers/Geoprecision.py +34 -34
- tsp/dataloggers/HOBO.py +914 -914
- tsp/dataloggers/RBRXL800.py +190 -190
- tsp/dataloggers/RBRXR420.py +308 -308
- tsp/dataloggers/__init__.py +15 -15
- tsp/dataloggers/logr.py +115 -115
- tsp/dataloggers/test_files/004448.DAT +2543 -2543
- tsp/dataloggers/test_files/004531.DAT +17106 -17106
- tsp/dataloggers/test_files/004531.HEX +3587 -3587
- tsp/dataloggers/test_files/004534.HEX +3587 -3587
- tsp/dataloggers/test_files/010252.dat +1731 -1731
- tsp/dataloggers/test_files/010252.hex +1739 -1739
- tsp/dataloggers/test_files/010274.hex +1291 -1291
- tsp/dataloggers/test_files/010278.hex +3544 -3544
- tsp/dataloggers/test_files/012064.dat +1286 -1286
- tsp/dataloggers/test_files/012064.hex +1294 -1294
- tsp/dataloggers/test_files/012081.hex +3532 -3532
- tsp/dataloggers/test_files/07B1592.DAT +1483 -1483
- tsp/dataloggers/test_files/07B1592.HEX +1806 -1806
- tsp/dataloggers/test_files/07B4450.DAT +2234 -2234
- tsp/dataloggers/test_files/07B4450.HEX +2559 -2559
- tsp/dataloggers/test_files/FG2_399.csv +9881 -9881
- tsp/dataloggers/test_files/GP5W.csv +1121 -1121
- tsp/dataloggers/test_files/GP5W_260.csv +1884 -1884
- tsp/dataloggers/test_files/GP5W_270.csv +2210 -2210
- tsp/dataloggers/test_files/H08-030-08_HOBOware.csv +998 -998
- tsp/dataloggers/test_files/RBR_01.dat +1046 -1046
- tsp/dataloggers/test_files/RBR_02.dat +2426 -2426
- tsp/dataloggers/test_files/RSTDT2055.csv +2152 -2152
- tsp/dataloggers/test_files/U23-001_HOBOware.csv +1001 -1001
- tsp/dataloggers/test_files/hobo-negative-2.txt +6396 -6396
- tsp/dataloggers/test_files/hobo-negative-3.txt +5593 -5593
- tsp/dataloggers/test_files/hobo-positive-number-1.txt +1000 -1000
- tsp/dataloggers/test_files/hobo-positive-number-2.csv +1003 -1003
- tsp/dataloggers/test_files/hobo-positive-number-3.csv +1133 -1133
- tsp/dataloggers/test_files/hobo-positive-number-4.csv +1209 -1209
- tsp/dataloggers/test_files/hobo2.csv +8702 -8702
- tsp/dataloggers/test_files/hobo_1_AB.csv +21732 -21732
- tsp/dataloggers/test_files/hobo_1_AB_Details.txt +133 -133
- tsp/dataloggers/test_files/hobo_1_AB_classic.csv +4373 -4373
- tsp/dataloggers/test_files/hobo_1_AB_defaults.csv +21732 -21732
- tsp/dataloggers/test_files/hobo_1_AB_minimal.txt +1358 -1358
- tsp/dataloggers/test_files/hobo_1_AB_var2.csv +3189 -3189
- tsp/dataloggers/test_files/hobo_1_AB_var3.csv +2458 -2458
- tsp/dataloggers/test_files/logR_ULogC16-32_1.csv +106 -106
- tsp/dataloggers/test_files/logR_ULogC16-32_2.csv +100 -100
- tsp/dataloggers/test_files/mon_3_Ta_2010-08-18_2013-02-08.txt +21724 -21724
- tsp/dataloggers/test_files/rbr_001.dat +1133 -1133
- tsp/dataloggers/test_files/rbr_001.hex +1139 -1139
- tsp/dataloggers/test_files/rbr_001_no_comment.dat +1132 -1132
- tsp/dataloggers/test_files/rbr_001_no_comment.hex +1138 -1138
- tsp/dataloggers/test_files/rbr_002.dat +1179 -1179
- tsp/dataloggers/test_files/rbr_002.hex +1185 -1185
- tsp/dataloggers/test_files/rbr_003.hex +1292 -1292
- tsp/dataloggers/test_files/rbr_003.xls +0 -0
- tsp/dataloggers/test_files/rbr_xl_001.DAT +1105 -1105
- tsp/dataloggers/test_files/rbr_xl_002.DAT +1126 -1126
- tsp/dataloggers/test_files/rbr_xl_003.DAT +4622 -4622
- tsp/dataloggers/test_files/rbr_xl_003.HEX +3587 -3587
- tsp/gtnp.py +148 -148
- tsp/labels.py +3 -3
- tsp/misc.py +90 -90
- tsp/physics.py +101 -101
- tsp/plots/static.py +373 -373
- tsp/readers.py +548 -548
- tsp/time.py +45 -45
- tsp/tspwarnings.py +14 -14
- tsp/utils.py +101 -101
- tsp/version.py +1 -1
- {tsp-1.7.7.dist-info → tsp-1.8.0.dist-info}/METADATA +30 -23
- tsp-1.8.0.dist-info/RECORD +94 -0
- {tsp-1.7.7.dist-info → tsp-1.8.0.dist-info}/WHEEL +5 -5
- {tsp-1.7.7.dist-info → tsp-1.8.0.dist-info/licenses}/LICENSE +674 -674
- tsp/dataloggers/test_files/CSc_CR1000_1.dat +0 -295
- tsp/scratch.py +0 -6
- tsp-1.7.7.dist-info/RECORD +0 -95
- {tsp-1.7.7.dist-info → tsp-1.8.0.dist-info}/top_level.txt +0 -0
tsp/dataloggers/logr.py
CHANGED
|
@@ -1,115 +1,115 @@
|
|
|
1
|
-
import pandas as pd
|
|
2
|
-
import regex as re
|
|
3
|
-
import numpy as np
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
class LogR:
|
|
7
|
-
|
|
8
|
-
SEP = ","
|
|
9
|
-
|
|
10
|
-
def __init__(self):
|
|
11
|
-
pass
|
|
12
|
-
|
|
13
|
-
def read(self, file):
|
|
14
|
-
headers = read_logr_header(file)
|
|
15
|
-
|
|
16
|
-
columns = [line.strip().split(',') for line in headers if is_columns_row(line)][0]
|
|
17
|
-
labels = [line.strip().split(',') for line in headers if is_label_row(line)][0]
|
|
18
|
-
data = pd.read_csv(file, skiprows=len(headers))
|
|
19
|
-
|
|
20
|
-
data.columns = ["TIME" if c == 'timestamp' else c for c in columns]
|
|
21
|
-
data['TIME'] = pd.to_datetime(data['TIME'], format=dateformat())
|
|
22
|
-
|
|
23
|
-
channels = pd.Series(data.columns).str.match("^CH")
|
|
24
|
-
|
|
25
|
-
self.DATA = data
|
|
26
|
-
self.META = {
|
|
27
|
-
'label': labels,
|
|
28
|
-
'guessed_depths': guess_depths(labels)[-sum(channels):]
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
return self.DATA
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
def read_logr_header(file: str) -> "list":
|
|
35
|
-
""" Read metadata / header lines from LogR file
|
|
36
|
-
|
|
37
|
-
Parameters
|
|
38
|
-
----------
|
|
39
|
-
file : str
|
|
40
|
-
path to a LogR file
|
|
41
|
-
|
|
42
|
-
Returns
|
|
43
|
-
-------
|
|
44
|
-
list
|
|
45
|
-
list of lines in the header block
|
|
46
|
-
|
|
47
|
-
Raises
|
|
48
|
-
------
|
|
49
|
-
ValueError
|
|
50
|
-
_description_
|
|
51
|
-
"""
|
|
52
|
-
found_data = False
|
|
53
|
-
max_rows = 50
|
|
54
|
-
header_lines = []
|
|
55
|
-
|
|
56
|
-
with open(file) as f:
|
|
57
|
-
while not found_data and max_rows:
|
|
58
|
-
max_rows -= 1
|
|
59
|
-
|
|
60
|
-
line = f.readline()
|
|
61
|
-
|
|
62
|
-
if is_data_row(line):
|
|
63
|
-
found_data = True
|
|
64
|
-
break
|
|
65
|
-
|
|
66
|
-
else:
|
|
67
|
-
header_lines.append(line)
|
|
68
|
-
|
|
69
|
-
if not found_data:
|
|
70
|
-
raise ValueError("Could not find start of data")
|
|
71
|
-
|
|
72
|
-
return header_lines
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
def guess_depths(labels: "list[str]") -> "list[float]":
|
|
76
|
-
pattern = re.compile(r"(-?[\d\.]+)")
|
|
77
|
-
|
|
78
|
-
matches = [pattern.search(l) for l in labels]
|
|
79
|
-
depths = [float(d.group(1)) if d else None for d in matches]
|
|
80
|
-
|
|
81
|
-
return depths
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
def guessed_depths_ok(depths, n_channel) -> bool:
|
|
85
|
-
""" Evaluate whether the guessed depths are valid """
|
|
86
|
-
d = np.array(depths, dtype='float64')
|
|
87
|
-
|
|
88
|
-
# monotonic (by convention)
|
|
89
|
-
if not (np.diff(d) > 0).all() or (np.diff(d) < 0).all():
|
|
90
|
-
return False
|
|
91
|
-
|
|
92
|
-
# equal to number of channels
|
|
93
|
-
if not sum(~np.isnan(d)) == n_channel:
|
|
94
|
-
return False
|
|
95
|
-
|
|
96
|
-
return True
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
def dateformat():
|
|
100
|
-
return "%Y/%m/%d %H:%M:%S"
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
def is_data_row(line: str) -> bool:
|
|
104
|
-
pattern = re.compile(r"^,\d{4}/\d{2}/\d{2}\s\d{2}:")
|
|
105
|
-
return bool(pattern.match(line))
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
def is_columns_row(line:str) -> bool:
|
|
109
|
-
pattern = re.compile(r"^SensorId")
|
|
110
|
-
return bool(pattern.match(line))
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
def is_label_row(line: str) -> bool:
|
|
114
|
-
pattern = re.compile(r"^Label")
|
|
115
|
-
return bool(pattern.match(line))
|
|
1
|
+
import pandas as pd
|
|
2
|
+
import regex as re
|
|
3
|
+
import numpy as np
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class LogR:
|
|
7
|
+
|
|
8
|
+
SEP = ","
|
|
9
|
+
|
|
10
|
+
def __init__(self):
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
def read(self, file):
|
|
14
|
+
headers = read_logr_header(file)
|
|
15
|
+
|
|
16
|
+
columns = [line.strip().split(',') for line in headers if is_columns_row(line)][0]
|
|
17
|
+
labels = [line.strip().split(',') for line in headers if is_label_row(line)][0]
|
|
18
|
+
data = pd.read_csv(file, skiprows=len(headers))
|
|
19
|
+
|
|
20
|
+
data.columns = ["TIME" if c == 'timestamp' else c for c in columns]
|
|
21
|
+
data['TIME'] = pd.to_datetime(data['TIME'], format=dateformat())
|
|
22
|
+
|
|
23
|
+
channels = pd.Series(data.columns).str.match("^CH")
|
|
24
|
+
|
|
25
|
+
self.DATA = data
|
|
26
|
+
self.META = {
|
|
27
|
+
'label': labels,
|
|
28
|
+
'guessed_depths': guess_depths(labels)[-sum(channels):]
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
return self.DATA
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def read_logr_header(file: str) -> "list":
|
|
35
|
+
""" Read metadata / header lines from LogR file
|
|
36
|
+
|
|
37
|
+
Parameters
|
|
38
|
+
----------
|
|
39
|
+
file : str
|
|
40
|
+
path to a LogR file
|
|
41
|
+
|
|
42
|
+
Returns
|
|
43
|
+
-------
|
|
44
|
+
list
|
|
45
|
+
list of lines in the header block
|
|
46
|
+
|
|
47
|
+
Raises
|
|
48
|
+
------
|
|
49
|
+
ValueError
|
|
50
|
+
_description_
|
|
51
|
+
"""
|
|
52
|
+
found_data = False
|
|
53
|
+
max_rows = 50
|
|
54
|
+
header_lines = []
|
|
55
|
+
|
|
56
|
+
with open(file) as f:
|
|
57
|
+
while not found_data and max_rows:
|
|
58
|
+
max_rows -= 1
|
|
59
|
+
|
|
60
|
+
line = f.readline()
|
|
61
|
+
|
|
62
|
+
if is_data_row(line):
|
|
63
|
+
found_data = True
|
|
64
|
+
break
|
|
65
|
+
|
|
66
|
+
else:
|
|
67
|
+
header_lines.append(line)
|
|
68
|
+
|
|
69
|
+
if not found_data:
|
|
70
|
+
raise ValueError("Could not find start of data")
|
|
71
|
+
|
|
72
|
+
return header_lines
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def guess_depths(labels: "list[str]") -> "list[float]":
|
|
76
|
+
pattern = re.compile(r"(-?[\d\.]+)")
|
|
77
|
+
|
|
78
|
+
matches = [pattern.search(l) for l in labels]
|
|
79
|
+
depths = [float(d.group(1)) if d else None for d in matches]
|
|
80
|
+
|
|
81
|
+
return depths
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def guessed_depths_ok(depths, n_channel) -> bool:
|
|
85
|
+
""" Evaluate whether the guessed depths are valid """
|
|
86
|
+
d = np.array(depths, dtype='float64')
|
|
87
|
+
|
|
88
|
+
# monotonic (by convention)
|
|
89
|
+
if not (np.diff(d) > 0).all() or (np.diff(d) < 0).all():
|
|
90
|
+
return False
|
|
91
|
+
|
|
92
|
+
# equal to number of channels
|
|
93
|
+
if not sum(~np.isnan(d)) == n_channel:
|
|
94
|
+
return False
|
|
95
|
+
|
|
96
|
+
return True
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def dateformat():
|
|
100
|
+
return "%Y/%m/%d %H:%M:%S"
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def is_data_row(line: str) -> bool:
|
|
104
|
+
pattern = re.compile(r"^,\d{4}/\d{2}/\d{2}\s\d{2}:")
|
|
105
|
+
return bool(pattern.match(line))
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def is_columns_row(line:str) -> bool:
|
|
109
|
+
pattern = re.compile(r"^SensorId")
|
|
110
|
+
return bool(pattern.match(line))
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def is_label_row(line: str) -> bool:
|
|
114
|
+
pattern = re.compile(r"^Label")
|
|
115
|
+
return bool(pattern.match(line))
|