tsp 1.7.1__py3-none-any.whl → 1.7.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tsp might be problematic. Click here for more details.

Files changed (91) hide show
  1. tsp/__init__.py +11 -11
  2. tsp/__meta__.py +1 -1
  3. tsp/core.py +1035 -1010
  4. tsp/data/2023-01-06_755-test-Dataset_2031-Constant_Over_Interval-Hourly-Ground_Temperature-Thermistor_Automated.timeserie.csv +4 -4
  5. tsp/data/2023-01-06_755-test.metadata.txt +208 -208
  6. tsp/data/NTGS_example_csv.csv +6 -0
  7. tsp/data/NTGS_example_slash_dates.csv +6 -0
  8. tsp/data/example_geotop.csv +5240 -5240
  9. tsp/data/example_gtnp.csv +1298 -1298
  10. tsp/data/example_permos.csv +8 -0
  11. tsp/data/test_geotop_has_space.txt +5 -0
  12. tsp/dataloggers/AbstractReader.py +43 -43
  13. tsp/dataloggers/FG2.py +110 -110
  14. tsp/dataloggers/GP5W.py +114 -114
  15. tsp/dataloggers/Geoprecision.py +34 -34
  16. tsp/dataloggers/HOBO.py +914 -914
  17. tsp/dataloggers/RBRXL800.py +190 -190
  18. tsp/dataloggers/RBRXR420.py +308 -307
  19. tsp/dataloggers/__init__.py +15 -15
  20. tsp/dataloggers/logr.py +115 -115
  21. tsp/dataloggers/test_files/004448.DAT +2543 -2543
  22. tsp/dataloggers/test_files/004531.DAT +17106 -17106
  23. tsp/dataloggers/test_files/004531.HEX +3587 -3587
  24. tsp/dataloggers/test_files/004534.HEX +3587 -3587
  25. tsp/dataloggers/test_files/010252.dat +1731 -1731
  26. tsp/dataloggers/test_files/010252.hex +1739 -1739
  27. tsp/dataloggers/test_files/010274.hex +1291 -1291
  28. tsp/dataloggers/test_files/010278.hex +3544 -3544
  29. tsp/dataloggers/test_files/012064.dat +1286 -1286
  30. tsp/dataloggers/test_files/012064.hex +1294 -1294
  31. tsp/dataloggers/test_files/012081.hex +3532 -3532
  32. tsp/dataloggers/test_files/07B1592.DAT +1483 -1483
  33. tsp/dataloggers/test_files/07B1592.HEX +1806 -1806
  34. tsp/dataloggers/test_files/07B4450.DAT +2234 -2234
  35. tsp/dataloggers/test_files/07B4450.HEX +2559 -2559
  36. tsp/dataloggers/test_files/CSc_CR1000_1.dat +295 -0
  37. tsp/dataloggers/test_files/FG2_399.csv +9881 -9881
  38. tsp/dataloggers/test_files/GP5W.csv +1121 -1121
  39. tsp/dataloggers/test_files/GP5W_260.csv +1884 -1884
  40. tsp/dataloggers/test_files/GP5W_270.csv +2210 -2210
  41. tsp/dataloggers/test_files/H08-030-08_HOBOware.csv +998 -998
  42. tsp/dataloggers/test_files/RBR_01.dat +1046 -1046
  43. tsp/dataloggers/test_files/RBR_02.dat +2426 -2426
  44. tsp/dataloggers/test_files/RSTDT2055.csv +2152 -2152
  45. tsp/dataloggers/test_files/U23-001_HOBOware.csv +1001 -1001
  46. tsp/dataloggers/test_files/hobo-negative-2.txt +6396 -6396
  47. tsp/dataloggers/test_files/hobo-negative-3.txt +5593 -5593
  48. tsp/dataloggers/test_files/hobo-positive-number-1.txt +1000 -1000
  49. tsp/dataloggers/test_files/hobo-positive-number-2.csv +1003 -1003
  50. tsp/dataloggers/test_files/hobo-positive-number-3.csv +1133 -1133
  51. tsp/dataloggers/test_files/hobo-positive-number-4.csv +1209 -1209
  52. tsp/dataloggers/test_files/hobo2.csv +8702 -8702
  53. tsp/dataloggers/test_files/hobo_1_AB.csv +21732 -21732
  54. tsp/dataloggers/test_files/hobo_1_AB_Details.txt +133 -133
  55. tsp/dataloggers/test_files/hobo_1_AB_classic.csv +4373 -4373
  56. tsp/dataloggers/test_files/hobo_1_AB_defaults.csv +21732 -21732
  57. tsp/dataloggers/test_files/hobo_1_AB_minimal.txt +1358 -1358
  58. tsp/dataloggers/test_files/hobo_1_AB_var2.csv +3189 -3189
  59. tsp/dataloggers/test_files/hobo_1_AB_var3.csv +2458 -2458
  60. tsp/dataloggers/test_files/logR_ULogC16-32_1.csv +106 -106
  61. tsp/dataloggers/test_files/logR_ULogC16-32_2.csv +100 -100
  62. tsp/dataloggers/test_files/mon_3_Ta_2010-08-18_2013-02-08.txt +21724 -21724
  63. tsp/dataloggers/test_files/rbr_001.dat +1133 -1133
  64. tsp/dataloggers/test_files/rbr_001.hex +1139 -1139
  65. tsp/dataloggers/test_files/rbr_001_no_comment.dat +1132 -1132
  66. tsp/dataloggers/test_files/rbr_001_no_comment.hex +1138 -1138
  67. tsp/dataloggers/test_files/rbr_002.dat +1179 -1179
  68. tsp/dataloggers/test_files/rbr_002.hex +1185 -1185
  69. tsp/dataloggers/test_files/rbr_003.hex +1292 -1292
  70. tsp/dataloggers/test_files/rbr_003.xls +0 -0
  71. tsp/dataloggers/test_files/rbr_xl_001.DAT +1105 -1105
  72. tsp/dataloggers/test_files/rbr_xl_002.DAT +1126 -1126
  73. tsp/dataloggers/test_files/rbr_xl_003.DAT +4622 -4622
  74. tsp/dataloggers/test_files/rbr_xl_003.HEX +3587 -3587
  75. tsp/gtnp.py +148 -141
  76. tsp/labels.py +3 -3
  77. tsp/misc.py +90 -90
  78. tsp/physics.py +101 -101
  79. tsp/plots/static.py +374 -305
  80. tsp/readers.py +548 -536
  81. tsp/scratch.py +6 -0
  82. tsp/time.py +45 -45
  83. tsp/tspwarnings.py +15 -0
  84. tsp/utils.py +101 -101
  85. tsp/version.py +1 -1
  86. {tsp-1.7.1.dist-info → tsp-1.7.7.dist-info}/LICENSE +674 -674
  87. {tsp-1.7.1.dist-info → tsp-1.7.7.dist-info}/METADATA +10 -6
  88. tsp-1.7.7.dist-info/RECORD +95 -0
  89. {tsp-1.7.1.dist-info → tsp-1.7.7.dist-info}/WHEEL +5 -5
  90. tsp-1.7.1.dist-info/RECORD +0 -88
  91. {tsp-1.7.1.dist-info → tsp-1.7.7.dist-info}/top_level.txt +0 -0
tsp/scratch.py ADDED
@@ -0,0 +1,6 @@
1
+ from tsp import read_geotop
2
+
3
+ t = read_geotop(r"C:\Users\Nick\Desktop\soil0032.txt")
4
+ import pdb;pdb.set_trace()
5
+
6
+ print('ok')
tsp/time.py CHANGED
@@ -1,46 +1,46 @@
1
- import re
2
- from datetime import datetime, tzinfo
3
-
4
- from typing import Union
5
-
6
-
7
- def get_utc_offset(offset: "Union[str,int]") -> int:
8
- """Get the UTC offset in seconds from a string or integer"""
9
-
10
- if isinstance(offset, str):
11
- if offset.lower() == "utc" or (offset.lower() == "z"):
12
- return 0
13
-
14
- pattern = re.compile(r"([+-]?)(\d{2}):(\d{2})")
15
- match = pattern.match(offset)
16
-
17
- if not match:
18
- raise ValueError("Offset must be a string in the format '+HH:MM' or '-HH:MM'")
19
-
20
- sign = match.group(1)
21
- hours = int(match.group(2))
22
- minutes = int(match.group(3))
23
- utc_offset = (hours*60 + minutes)*60
24
- if sign == "-":
25
- utc_offset *= -1
26
-
27
- elif isinstance(offset, int):
28
- utc_offset = offset
29
-
30
- else:
31
- raise ValueError("Offset must be a string in the format '+HH:MM' or '-HH:MM' or an integer in seconds")
32
-
33
- return utc_offset
34
-
35
-
36
- def format_utc_offset(offset: tzinfo) -> str:
37
- """Format a UTC offset as a string in the format '+HH:MM' or '-HH:MM'"""
38
- utc_offset = offset.utcoffset(datetime.now()).total_seconds()
39
- sign = "-" if utc_offset < 0 else "+"
40
- hours = int(abs(utc_offset)//3600)
41
- minutes = int(abs(utc_offset)%3600/60)
42
-
43
- if hours == 0 and minutes == 0:
44
- return "UTC"
45
-
1
+ import re
2
+ from datetime import datetime, tzinfo
3
+
4
+ from typing import Union
5
+
6
+
7
+ def get_utc_offset(offset: "Union[str,int]") -> int:
8
+ """Get the UTC offset in seconds from a string or integer"""
9
+
10
+ if isinstance(offset, str):
11
+ if offset.lower() == "utc" or (offset.lower() == "z"):
12
+ return 0
13
+
14
+ pattern = re.compile(r"([+-]?)(\d{2}):(\d{2})")
15
+ match = pattern.match(offset)
16
+
17
+ if not match:
18
+ raise ValueError("Offset must be a string in the format '+HH:MM' or '-HH:MM'")
19
+
20
+ sign = match.group(1)
21
+ hours = int(match.group(2))
22
+ minutes = int(match.group(3))
23
+ utc_offset = (hours*60 + minutes)*60
24
+ if sign == "-":
25
+ utc_offset *= -1
26
+
27
+ elif isinstance(offset, int):
28
+ utc_offset = offset
29
+
30
+ else:
31
+ raise ValueError("Offset must be a string in the format '+HH:MM' or '-HH:MM' or an integer in seconds")
32
+
33
+ return utc_offset
34
+
35
+
36
+ def format_utc_offset(offset: tzinfo) -> str:
37
+ """Format a UTC offset as a string in the format '+HH:MM' or '-HH:MM'"""
38
+ utc_offset = offset.utcoffset(datetime.now()).total_seconds()
39
+ sign = "-" if utc_offset < 0 else "+"
40
+ hours = int(abs(utc_offset)//3600)
41
+ minutes = int(abs(utc_offset)%3600/60)
42
+
43
+ if hours == 0 and minutes == 0:
44
+ return "UTC"
45
+
46
46
  return f"{sign}{hours:02d}:{minutes:02d}"
tsp/tspwarnings.py ADDED
@@ -0,0 +1,15 @@
1
+ import numpy as np
2
+
3
+
4
+ class DuplicateTimesWarning(UserWarning):
5
+ """For when duplicate times are found in a file."""
6
+ def __init__(self, times):
7
+ self.times = times
8
+
9
+ def _msg(self, times) -> str:
10
+ m = f"Duplicate timestamps found: {times[np.where(times.duplicated())[0]]}. That's bad."
11
+ return m
12
+
13
+ def __str__(self):
14
+ return self._msg(self.times)
15
+
tsp/utils.py CHANGED
@@ -1,101 +1,101 @@
1
- import pandas as pd
2
- import numpy as np
3
-
4
- import tsp
5
- from tsp import TSP
6
-
7
-
8
- def resolve_duplicate_times(t: TSP, keep="first") -> TSP:
9
- """Eliminate duplicate times in a TSP.
10
-
11
- Parameters
12
- ----------
13
- tsp : TSP
14
- TSP to resolve duplicate times in.
15
- keep : str, optional
16
- Method to resolve duplicate times. Chosen from "first", "average", "last", "strip"
17
- by default "first"
18
-
19
- Returns
20
- -------
21
- TSP
22
- TSP with no duplicated times."""
23
- resolver = _get_duplicate_resolver(keep)
24
- return resolver(t)
25
-
26
-
27
- def _get_duplicate_resolver(keep: str):
28
- if keep == "first":
29
- return _first_duplicate_time
30
- elif keep == "average":
31
- return _average_duplicate_time
32
- elif keep == "last":
33
- return _last_duplicate_time
34
- elif keep == "strip":
35
- return _strip_duplicate_time
36
- else:
37
- raise ValueError(f"Unknown duplicate resolver method: {keep}")
38
-
39
-
40
- def _first_duplicate_time(t: TSP):
41
- df = t.wide
42
- df = df[~df.index.duplicated(keep="first")]
43
-
44
- time = df.index
45
- values = df.drop(['time'], axis=1).values
46
- depths = df.drop(['time'], axis=1).columns
47
-
48
- t_new = TSP(times=time, values=values, depths=depths,
49
- latitude=t.latitude, longitude=t.longitude,
50
- site_id=t.site_id, metadata=t.metadata)
51
-
52
- return t_new
53
-
54
-
55
- def _last_duplicate_time(t: TSP):
56
- df = t.wide
57
- df = df[~df.index.duplicated(keep="last")]
58
-
59
- time = df.index
60
- values = df.drop(['time'], axis=1).values
61
- depths = df.drop(['time'], axis=1).columns
62
-
63
- t_new = TSP(times=time, values=values, depths=depths,
64
- latitude=t.latitude, longitude=t.longitude,
65
- site_id=t.site_id, metadata=t.metadata)
66
-
67
- return t_new
68
-
69
-
70
- def _strip_duplicate_time(t: TSP):
71
- df = t.wide
72
- df = df[~df.index.duplicated(keep=False)]
73
-
74
- time = df.index
75
- values = df.drop(['time'], axis=1).values
76
- depths = df.drop(['time'], axis=1).columns
77
-
78
- t_new = TSP(times=time, values=values, depths=depths,
79
- latitude=t.latitude, longitude=t.longitude,
80
- site_id=t.site_id, metadata=t.metadata)
81
-
82
- return t_new
83
-
84
-
85
- def _average_duplicate_time(t: TSP):
86
- singleton = t.wide[~t.wide.index.duplicated(keep=False)]
87
- duplicated = t.wide[t.wide.index.duplicated(keep=False)].drop(['time'], axis=1).reset_index()
88
- averaged = duplicated.groupby(duplicated['index']).apply(lambda x: x[~x.isna()].mean(numeric_only=True))
89
- averaged.insert(0, 'time',averaged.index)
90
-
91
- df = pd.concat([singleton, averaged], ignore_index=False).sort_index()
92
-
93
- time = df.index
94
- values = df.drop(['time'], axis=1).values
95
- depths = df.drop(['time'], axis=1).columns
96
-
97
- t_new = TSP(times=time, values=values, depths=depths,
98
- latitude=t.latitude, longitude=t.longitude,
99
- site_id=t.site_id, metadata=t.metadata)
100
-
101
- return t_new
1
+ import pandas as pd
2
+ import numpy as np
3
+
4
+ import tsp
5
+ from tsp import TSP
6
+
7
+
8
+ def resolve_duplicate_times(t: TSP, keep="first") -> TSP:
9
+ """Eliminate duplicate times in a TSP.
10
+
11
+ Parameters
12
+ ----------
13
+ tsp : TSP
14
+ TSP to resolve duplicate times in.
15
+ keep : str, optional
16
+ Method to resolve duplicate times. Chosen from "first", "average", "last", "strip"
17
+ by default "first"
18
+
19
+ Returns
20
+ -------
21
+ TSP
22
+ TSP with no duplicated times."""
23
+ resolver = _get_duplicate_resolver(keep)
24
+ return resolver(t)
25
+
26
+
27
+ def _get_duplicate_resolver(keep: str):
28
+ if keep == "first":
29
+ return _first_duplicate_time
30
+ elif keep == "average":
31
+ return _average_duplicate_time
32
+ elif keep == "last":
33
+ return _last_duplicate_time
34
+ elif keep == "strip":
35
+ return _strip_duplicate_time
36
+ else:
37
+ raise ValueError(f"Unknown duplicate resolver method: {keep}")
38
+
39
+
40
+ def _first_duplicate_time(t: TSP):
41
+ df = t.wide
42
+ df = df[~df.index.duplicated(keep="first")]
43
+
44
+ time = df.index
45
+ values = df.drop(['time'], axis=1).values
46
+ depths = df.drop(['time'], axis=1).columns
47
+
48
+ t_new = TSP(times=time, values=values, depths=depths,
49
+ latitude=t.latitude, longitude=t.longitude,
50
+ site_id=t.site_id, metadata=t.metadata)
51
+
52
+ return t_new
53
+
54
+
55
+ def _last_duplicate_time(t: TSP):
56
+ df = t.wide
57
+ df = df[~df.index.duplicated(keep="last")]
58
+
59
+ time = df.index
60
+ values = df.drop(['time'], axis=1).values
61
+ depths = df.drop(['time'], axis=1).columns
62
+
63
+ t_new = TSP(times=time, values=values, depths=depths,
64
+ latitude=t.latitude, longitude=t.longitude,
65
+ site_id=t.site_id, metadata=t.metadata)
66
+
67
+ return t_new
68
+
69
+
70
+ def _strip_duplicate_time(t: TSP):
71
+ df = t.wide
72
+ df = df[~df.index.duplicated(keep=False)]
73
+
74
+ time = df.index
75
+ values = df.drop(['time'], axis=1).values
76
+ depths = df.drop(['time'], axis=1).columns
77
+
78
+ t_new = TSP(times=time, values=values, depths=depths,
79
+ latitude=t.latitude, longitude=t.longitude,
80
+ site_id=t.site_id, metadata=t.metadata)
81
+
82
+ return t_new
83
+
84
+
85
+ def _average_duplicate_time(t: TSP):
86
+ singleton = t.wide[~t.wide.index.duplicated(keep=False)]
87
+ duplicated = t.wide[t.wide.index.duplicated(keep=False)].drop(['time'], axis=1).reset_index()
88
+ averaged = duplicated.groupby(duplicated['index']).apply(lambda x: x[~x.isna()].mean(numeric_only=True))
89
+ averaged.insert(0, 'time',averaged.index)
90
+
91
+ df = pd.concat([singleton, averaged], ignore_index=False).sort_index()
92
+
93
+ time = df.index
94
+ values = df.drop(['time'], axis=1).values
95
+ depths = df.drop(['time'], axis=1).columns
96
+
97
+ t_new = TSP(times=time, values=values, depths=depths,
98
+ latitude=t.latitude, longitude=t.longitude,
99
+ site_id=t.site_id, metadata=t.metadata)
100
+
101
+ return t_new
tsp/version.py CHANGED
@@ -1 +1 @@
1
- version="1.7.1"
1
+ version="1.7.7"