tsp 1.7.3__py3-none-any.whl → 1.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tsp might be problematic. Click here for more details.

tsp/__meta__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  # Automatically created. Please do not edit.
2
- __version__ = '1.7.3'
2
+ __version__ = '1.8.0'
3
3
  __author__ = 'Nick Brown'
tsp/concatenation.py ADDED
@@ -0,0 +1,153 @@
1
+ from __future__ import annotations
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+
6
+ from typing import Any, TYPE_CHECKING
7
+
8
+
9
+ if TYPE_CHECKING:
10
+ from tsp import TSP
11
+
12
+
13
+ def _tsp_concat(tsp_list: "list[TSP]", on_conflict="error", metadata='first') -> dict[str, Any]:
14
+ """ Helper for core.tsp_concat """
15
+ # Validate the TSPs in the list
16
+ _validate_tsp_list(tsp_list)
17
+
18
+ # Combine the TSPs
19
+ dfs = [t.wide for t in tsp_list]
20
+ combined_df = _concat_deduplicate(dfs, on_conflict=on_conflict)
21
+
22
+ # Combine metadata
23
+ if metadata == 'first':
24
+ metadata = {key:val for key, val in tsp_list[0].metadata.items()}
25
+ latitude = tsp_list[0].latitude
26
+ longitude = tsp_list[0].longitude
27
+ site_id = tsp_list[0].site_id
28
+
29
+ elif metadata == 'identical':
30
+ metadata = {key:val for key, val in tsp_list[0].metadata.items()}
31
+ for key, val in metadata.items():
32
+ for t in tsp_list[1:]:
33
+ if key not in t.metadata or t.metadata[key] != val:
34
+ _ = metadata.pop(key)
35
+ latitude = _none_if_not_identical([t.latitude for t in tsp_list])
36
+ longitude = _none_if_not_identical([t.longitude for t in tsp_list])
37
+ site_id = _none_if_not_identical([t.site_id for t in tsp_list])
38
+
39
+ elif metadata == 'none':
40
+ metadata = None
41
+ latitude, longitude, site_id = None, None, None
42
+
43
+ else:
44
+ raise ValueError(f"Unknown metadata method: {metadata}")
45
+
46
+ #final_tsp = TSP(times=combined_df.index, values=combined_df.values, depths=combined_df.columns,
47
+ # latitude=latitude, longitude=longitude,
48
+ # site_id=site_id, metadata=metadata)
49
+ try:
50
+ combined_df.drop('time', axis=1, inplace=True)
51
+ except KeyError:
52
+ Warning("Deprecation Error: The 'time' column is no longer used in TSP objects. Please update your code to avoid this warning.")
53
+
54
+ tsp_dict = {
55
+ 'times': combined_df.index,
56
+ 'values': combined_df.values,
57
+ 'depths': combined_df.columns,
58
+ 'latitude': latitude,
59
+ 'longitude': longitude,
60
+ 'site_id': site_id,
61
+ 'metadata': metadata
62
+ }
63
+ return tsp_dict
64
+
65
+
66
+ def _none_if_not_identical(list):
67
+ """Check if all elements in the list are identical. If they are, return the first element; otherwise, return None."""
68
+ first = list[0]
69
+ for item in list[1:]:
70
+ if item != first:
71
+ return None
72
+ return first
73
+
74
+
75
+ def _validate_tsp_list(tsp_list: "list[TSP]"):
76
+ """Check that all TSPs in the list have the same depths."""
77
+ depths0 = tsp_list[0].depths
78
+ for t in tsp_list[1:]:
79
+ if not np.array_equal(depths0, t.depths):
80
+ raise ValueError("All TSPs must have the same depths.")
81
+
82
+
83
+ def _concat_deduplicate(df_list, on_conflict='error'):
84
+ """
85
+ Concatenates a list of DataFrames, handling duplicate indices based on row values.
86
+
87
+ Args:
88
+ df_list (list): A list of pandas DataFrames. Assumes they have identical
89
+ column names.
90
+ on_conflict (str): Specifies how to handle duplicate indices with
91
+ unequal row values.
92
+ - 'error': Raise a ValueError (default).
93
+ - 'keep_first': Keep the row corresponding to the first
94
+ DataFrame in the list where the index appeared.
95
+
96
+ Returns:
97
+ pandas.DataFrame: The concatenated DataFrame with duplicates handled
98
+ according to the specified rules.
99
+
100
+ Raises:
101
+ ValueError: If df_list is empty.
102
+ ValueError: If on_conflict is not 'error' or 'keep_first'.
103
+ ValueError: If on_conflict='error' and duplicate indices with
104
+ non-identical row values are found.
105
+ """
106
+ if not df_list:
107
+ raise ValueError("Input DataFrame list cannot be empty.")
108
+
109
+ if on_conflict not in ['error', 'keep_first']:
110
+ raise ValueError("on_conflict must be either 'error' or 'keep_first'")
111
+
112
+ # Store original index name if it exists
113
+ original_index_name = df_list[0].index.name
114
+
115
+ # Concatenate all DataFrames. The order is preserved.
116
+ combined_df = pd.concat(df_list, ignore_index=False) # Keep original indices
117
+
118
+ temp_index_col = "__temp_index__"
119
+ combined_reset = combined_df.reset_index(names=temp_index_col)
120
+
121
+ # Drop rows that are duplicates based on *all* columns
122
+ deduplicated_reset = combined_reset.drop_duplicates(keep='first')
123
+
124
+ # Check for remaining duplicates *only* in the original index column.
125
+ remaining_duplicates_mask = deduplicated_reset.duplicated(subset=temp_index_col, keep=False)
126
+
127
+ if remaining_duplicates_mask.any():
128
+ # We have indices that appeared multiple times with different values.
129
+ if on_conflict == 'error':
130
+ conflicting_indices = deduplicated_reset.loc[remaining_duplicates_mask, temp_index_col].unique()
131
+ raise ValueError(
132
+ f"Duplicate indices with non-identical values found: "
133
+ f"{list(conflicting_indices)}. Use on_conflict='keep_first' to keep "
134
+ f"the first occurrence."
135
+ )
136
+ elif on_conflict == 'keep_first':
137
+ # Drop the later occurrences of these conflicting index values.
138
+ # Since 'deduplicated_reset' preserved the first unique (index, row_value)
139
+ # combination, dropping duplicates based solely on the index column
140
+ # while keeping the first achieves the desired outcome.
141
+ final_reset = deduplicated_reset.drop_duplicates(subset=temp_index_col, keep='first')
142
+ else:
143
+ pass
144
+ else:
145
+ # No conflicting duplicates (duplicate indices with different values) were found.
146
+ final_reset = deduplicated_reset
147
+
148
+ final_df = final_reset.set_index(temp_index_col)
149
+ final_df.index.name = original_index_name
150
+ # Sort by time, ascending
151
+ final_df.sort_index(inplace=True)
152
+
153
+ return final_df
tsp/core.py CHANGED
@@ -19,15 +19,19 @@ except ModuleNotFoundError:
19
19
  warnings.warn("Missing netCDF4 library. Some functionality will be limited.", stacklevel=2)
20
20
 
21
21
  from typing import Union, Optional
22
+ from numpy.typing import NDArray
22
23
  from datetime import datetime, tzinfo, timezone, timedelta
23
24
 
24
25
  import tsp
25
26
  import tsp.labels as lbl
27
+ import tsp.tspwarnings as tw
28
+
26
29
  from tsp.physics import analytical_fourier
27
- from tsp.plots.static import trumpet_curve, colour_contour, time_series
30
+ from tsp.plots.static import trumpet_curve, colour_contour, time_series, profile_evolution
28
31
  from tsp.time import format_utc_offset
29
32
  from tsp.time import get_utc_offset
30
33
  from tsp.misc import completeness
34
+ from tsp.concatenation import _tsp_concat
31
35
 
32
36
  from matplotlib.figure import Figure
33
37
 
@@ -74,6 +78,28 @@ class TSP:
74
78
 
75
79
  def __str__(self) -> str:
76
80
  return str(self.wide)
81
+
82
+ def __add__(self, other: TSP) -> TSP:
83
+ """ Concatenate two TSP objects along the time axis.
84
+ The two TSP objects must have the same depths and the same UTC offset.
85
+
86
+ Parameters
87
+ ----------
88
+ other : TSP
89
+ Another TSP object to concatenate with this one
90
+
91
+ Returns
92
+ -------
93
+ TSP
94
+ A new TSP object with the concatenated data
95
+ """
96
+ if not isinstance(other, TSP):
97
+ raise TypeError("Can only concatenate TSP objects.")
98
+
99
+ if self.utc_offset != other.utc_offset:
100
+ raise ValueError("UTC offsets must be the same to concatenate.")
101
+
102
+ return tsp_concat([self, other])
77
103
 
78
104
  def __init__(self, times, depths, values,
79
105
  latitude: Optional[float]=None,
@@ -83,7 +109,7 @@ class TSP:
83
109
 
84
110
  self._times = handle_incoming_times(times)
85
111
  if self._times.duplicated().any():
86
- warnings.warn(f"Duplicate timestamps found: {self._times[np.where(self._times.duplicated())[0]]}. That's bad.", stacklevel=2)
112
+ warnings.warn(tw.DuplicateTimesWarning(self._times), stacklevel=2)
87
113
 
88
114
  if self.utc_offset:
89
115
  self._output_utc_offset = self.utc_offset
@@ -101,6 +127,8 @@ class TSP:
101
127
  self._freq = None
102
128
  self._completeness = None
103
129
 
130
+ self._export_precision = 3
131
+
104
132
  @property
105
133
  def freq(self) -> Optional[int]:
106
134
  """ Measurement frequency [s] """
@@ -216,12 +244,15 @@ class TSP:
216
244
  return t
217
245
 
218
246
  @classmethod
219
- def synthetic(cls, depths: "np.ndarray", start="2000-01-01", end="2003-01-01",
220
- Q:"Optional[float]"=0.2,
221
- c:"Optional[float]"=1.6e6,
222
- k:"Optional[float]"=2.5,
223
- A:"Optional[float]"=6,
224
- MAGST:"Optional[float]"=-0.5) -> "TSP":
247
+ def synthetic(cls, depths: NDArray[np.number],
248
+ start:str ="2000-01-01",
249
+ end:str ="2003-01-01",
250
+ freq: "str"="D",
251
+ Q:float=0.2,
252
+ c:float=1.6e6,
253
+ k:float=2.5,
254
+ A:float=6,
255
+ MAGST:float=-0.5) -> "TSP":
225
256
  """
226
257
  Create a 'synthetic' temperature time series using the analytical solution to the heat conduction equation.
227
258
  Suitable for testing
@@ -229,9 +260,13 @@ class TSP:
229
260
  Parameters
230
261
  ----------
231
262
  depths : np.ndarray
232
- array of depths in m
263
+ array of depths in metres
233
264
  start : str
234
- array of times in seconds
265
+ start date for the time series, in the format "YYYY-MM-DD"
266
+ end : str
267
+ end date for the time series, in the format "YYYY-MM-DD"
268
+ freq : str
269
+ pandas frequency string, e.g. "D" for daily, "H" for hourly, etc.
235
270
  Q : Optional[float], optional
236
271
  Ground heat flux [W m-2], by default 0.2
237
272
  c : Optional[float], optional
@@ -248,10 +283,16 @@ class TSP:
248
283
  TSP
249
284
  A timeseries profile (TSP) object
250
285
  """
251
- times = pd.date_range(start=start, end=end).to_pydatetime()
286
+ times = pd.date_range(start=start, end=end, freq=freq).to_pydatetime()
252
287
  t_sec = np.array([(t-times[0]).total_seconds() for t in times])
253
288
 
254
- values = analytical_fourier(depths=depths, times=t_sec, Q=Q, c=c, k=k, A=A, MAGST=MAGST)
289
+ values = analytical_fourier(depths=depths,
290
+ times=t_sec,
291
+ Q=Q,
292
+ c=c,
293
+ k=k,
294
+ A=A,
295
+ MAGST=MAGST)
255
296
 
256
297
  this = cls(depths=depths, times=times, values=values)
257
298
 
@@ -550,7 +591,7 @@ class TSP:
550
591
  return t
551
592
 
552
593
  @property
553
- def depths(self) -> "np.ndarray":
594
+ def depths(self) -> NDArray[np.number]:
554
595
  """ Return the depth values in the profile
555
596
 
556
597
  Returns
@@ -602,9 +643,9 @@ class TSP:
602
643
  filename : str
603
644
  Path to the file to write to
604
645
  """
605
- df = self.wide.rename(columns={'time': 'Date/Depth'})
646
+ df = self.wide.round(self._export_precision).rename(columns={'time': 'Date/Depth'})
606
647
  df['Date/Depth'] = df['Date/Depth'].dt.strftime("%Y-%m-%d %H:%M:%S")
607
-
648
+
608
649
  df.to_csv(filename, index=False, na_rep="-999")
609
650
 
610
651
  def to_ntgs(self, filename:str, project_name:str="", site_id:"Optional[str]" = None, latitude:"Optional[float]"=None, longitude:"Optional[float]"=None) -> None:
@@ -650,7 +691,7 @@ class TSP:
650
691
  headers = [str(d) + "_m" for d in self.depths]
651
692
 
652
693
  for i, h in enumerate(headers):
653
- df[h] = data[:, i]
694
+ df[h] = data[:, i].round(self._export_precision)
654
695
 
655
696
  df.to_csv(filename, index=False)
656
697
 
@@ -690,8 +731,27 @@ class TSP:
690
731
  f.write(self._to_json())
691
732
 
692
733
  def _to_json(self) -> str:
693
- return self.wide.to_json()
734
+ return self.wide.round(self._export_precision).to_json()
694
735
 
736
+ def plot_profiles(self, P:int=100, n:int=10) -> Figure:
737
+ """ Create a plot of the temperature profiles at different times
738
+
739
+ Parameters
740
+ ----------
741
+ P : int
742
+ Percentage of time range to plot
743
+ n : int
744
+ Number of evenly-spaced profiles to plot
745
+
746
+ Returns
747
+ -------
748
+ Figure
749
+ matplotlib `Figure` object
750
+ """
751
+ fig = profile_evolution(depths=self.depths, times=self.times, values=self._values, P=P, n=n)
752
+ fig.show()
753
+ return fig
754
+
695
755
  def plot_trumpet(self,
696
756
  year: Optional[int]=None,
697
757
  begin: Optional[datetime]=None,
@@ -708,6 +768,10 @@ class TSP:
708
768
  If 'end' also provided, the earliest measurement to include in the averaging for the plot
709
769
  end : datetime, optional
710
770
  If 'begin' also provided, the latest measurement to include in the averaging for the plot
771
+ min_completeness : float, optional
772
+ If provided, the minimum completeness (fractional, 0 to 1) required to include
773
+ in temperature envelope, otherwise
774
+ the point is plotted as an unconnected, slightly transparent dot, by default None
711
775
  **kwargs : dict, optional
712
776
  Extra arguments to the plotting function: refer to the documentation for :func:`~tsp.plots.static.trumpet_curve` for a
713
777
  list of all possible arguments.
@@ -1008,3 +1072,37 @@ def handle_incoming_times(times: "Union[np.ndarray, pd.DatetimeIndex, pd.Series,
1008
1072
 
1009
1073
  else:
1010
1074
  raise ValueError(invalid_msg)
1075
+
1076
+ def tsp_concat(tsp_list, on_conflict='error', metadata='first') -> TSP:
1077
+ """Combine multiple TSPs into a single TSP.
1078
+
1079
+ Parameters
1080
+ ----------
1081
+ tsp_list : list[TSP]
1082
+ List of TSPs to combine. They must have the same depths
1083
+ on_conflict : str, optional
1084
+ Method to resolve duplicate times with different values. Chosen from "error", "keep", by default "error"
1085
+ - "error": Raise an error if duplicate times with different values are found.
1086
+ - "keep": Keep the first occurrence of the duplicate time.
1087
+ metadata : str, optional
1088
+ Method to select metadata from the TSPs. Chosen from "first", "identical", or "none", by default "first"
1089
+ - "first": Use the metadata from the first TSP in the list.
1090
+ - "identical": Only keep metadata records that are identical across TSPs.
1091
+ - "none": Ignore metadata and set it to None.
1092
+ Returns
1093
+ -------
1094
+ TSP
1095
+ Combined TSP.
1096
+
1097
+ Description
1098
+ -----------
1099
+ This function combines multiple TSPs into a single TSP. The TSPs must have the same depths.
1100
+ """
1101
+ tsp_dict = _tsp_concat(tsp_list=tsp_list, on_conflict=on_conflict, metadata=metadata)
1102
+ times = tsp_dict.pop('times')
1103
+ depths = tsp_dict.pop('depths')
1104
+ values = tsp_dict.pop('values')
1105
+
1106
+ t = TSP(times, depths, values, **tsp_dict)
1107
+
1108
+ return t
@@ -0,0 +1,6 @@
1
+ project_name,site_id,latitude,longitude,date_YYYY-MM-DD,time_HH:MM:SS,0.05_m,0.1_m,0.2_m,0.5_m,0.75_m
2
+ Your Great Project,West Whale Lake,63.390429,-109.364898,2015-04-14,03:00:00,-3.510,-2.411,-1.871,-1.037,-0.814
3
+ Your Great Project,West Whale Lake,63.390429,-109.364898,2015-04-14,04:00:00,-3.469,-2.382,-1.856,-1.046,-0.818
4
+ Your Great Project,West Whale Lake,63.390429,-109.364898,2015-04-14,05:00:00,-3.457,-2.353,-1.835,-1.037,-0.828
5
+ Your Great Project,West Whale Lake,63.390429,-109.364898,2015-04-14,06:00:00,-3.466,-2.336,-1.825,-1.039,-0.831
6
+
@@ -0,0 +1,6 @@
1
+ project_name,site_id,latitude,longitude,date_YYYY-MM-DD,time_HH:MM:SS,0.05_m,0.1_m,0.2_m,0.5_m,0.75_m
2
+ Your Great Project,West Whale Lake,63.390429,-109.364898,2015/04/14,03:00:00,-3.510,-2.411,-1.871,-1.037,-0.814
3
+ Your Great Project,West Whale Lake,63.390429,-109.364898,2015/04/14,04:00:00,-3.469,-2.382,-1.856,-1.046,-0.818
4
+ Your Great Project,West Whale Lake,63.390429,-109.364898,2015/04/14,05:00:00,-3.457,-2.353,-1.835,-1.037,-0.828
5
+ Your Great Project,West Whale Lake,63.390429,-109.364898,2015/04/14,06:00:00,-3.466,-2.336,-1.825,-1.039,-0.831
6
+
@@ -0,0 +1,8 @@
1
+ time,0,0.5,1,1.5,2.3,3.6,5.09,7.08,9.57,12.56,20.04
2
+ 2002-11-18 00:00:00,NA,-0.38333,-0.18833,-0.27,-0.25167,-0.45667,-0.68167,-0.67333,-0.76333,-0.67667,-0.53167
3
+ 2002-11-19 00:00:00,NA,-0.53667,-0.1925,-0.2575,-0.24667,-0.45667,-0.6825,-0.685,-0.75083,-0.68083,-0.5375
4
+ 2002-11-20 00:00:00,NA,-0.61083,-0.21167,-0.26333,-0.24333,-0.46417,-0.67833,-0.66833,-0.7525,-0.6825,-0.54
5
+ 2002-11-21 00:00:00,NA,-0.46333,-0.2,-0.26333,-0.245,-0.45917,-0.66917,-0.67333,-0.74833,-0.68167,-0.54083
6
+ 2002-11-22 00:00:00,NA,-0.35583,-0.195,-0.2625,-0.2425,-0.46333,-0.66417,-0.67333,-0.75,-0.68417,-0.54
7
+ 2002-11-23 00:00:00,NA,-0.33333,-0.19833,-0.265,-0.2425,-0.46333,-0.66,-0.66833,-0.75,-0.68667,-0.53917
8
+ 2002-11-24 00:00:00,NA,-0.32333,-0.20083,-0.2675,-0.2425,-0.46667,-0.65833,-0.66167,-0.7475,-0.68417,-0.54
tsp/gtnp.py CHANGED
@@ -3,6 +3,7 @@ from typing import Optional
3
3
  from collections import OrderedDict
4
4
 
5
5
  import re
6
+ import warnings
6
7
 
7
8
  from tsp.time import get_utc_offset
8
9
 
@@ -22,8 +23,14 @@ class GtnpMetadata:
22
23
  self._parse()
23
24
 
24
25
  def _read(self):
25
- with open(self.filepath, 'r') as f:
26
- self._raw = f.readlines()
26
+ try:
27
+ with open(self.filepath, 'r') as f:
28
+ self._raw = f.readlines()
29
+
30
+ except UnicodeDecodeError:
31
+ warnings.warn("Couldn't read file with utf-8 encoding. Metadata might be corrupted.")
32
+ with open(self.filepath, 'r', errors='ignore') as f:
33
+ self._raw = f.readlines()
27
34
 
28
35
  @property
29
36
  def raw(self) -> 'list[str]':
tsp/plots/static.py CHANGED
@@ -2,8 +2,10 @@ import numpy as np
2
2
  import warnings
3
3
 
4
4
  import matplotlib.dates as mdates
5
+ import matplotlib.cm as cm
5
6
  from matplotlib import pyplot as plt
6
7
  from matplotlib.figure import Figure
8
+ from matplotlib.colors import ListedColormap
7
9
  from typing import Optional
8
10
 
9
11
  try:
@@ -123,8 +125,9 @@ def colour_contour(depths, times, values, title="", colours: "Union[str, list]"=
123
125
  A title for the figure, by default ""
124
126
  colours : Union[str, list], optional
125
127
  Either a list of colours to be used for the colour bar, or one of:
126
- * **symmetric**:
127
- * **dynamic**:
128
+ * **symmetric**: ensure colour switch is centered at 0C
129
+ * **dynamic**: Maximize dynamic range
130
+ * **basic**: distinguish unfrozen, warm (>-2C) and
128
131
  ,by default 'symmetric'
129
132
  contour : list, optional
130
133
  A list of float values. If provided, draw contours at each of those values, by default []
@@ -172,7 +175,14 @@ def colour_contour(depths, times, values, title="", colours: "Union[str, list]"=
172
175
  clev = contour_levels(Z, colours, step=1)
173
176
 
174
177
  # Add data
175
- cs = ax1.contourf(X, Y, Z, levels=clev, cmap=plt.cm.coolwarm)
178
+ if colours == 'basic':
179
+ co = ["darkblue", "lightblue", "lightgreen", "lightred"]
180
+ cmap = None
181
+ else:
182
+ co = None
183
+ cmap = plt.cm.coolwarm
184
+
185
+ cs = ax1.contourf(X, Y, Z, levels=clev, cmap=cmap, colors=co)
176
186
  fig.colorbar(cs, ticks = np.arange(-25,25,5))
177
187
 
178
188
  if len(contour) > 0:
@@ -240,12 +250,16 @@ def time_series(depths, times, values, title='', d_units='m', t_units=u'\N{DEGRE
240
250
  leg = ax.legend(loc='center left', bbox_to_anchor=(1.04, 0.5), fancybox=True, shadow=True)
241
251
 
242
252
  lined = {} # Will map legend lines to original lines.
243
- for legline, origline in zip(leg.get_lines(), lines):
253
+ for legline, label, origline in zip(leg.get_lines(), leg.get_texts(), lines):
244
254
  legline.set_picker(True) # Enable picking on the legend line.
245
- lined[legline] = origline
255
+ label.set_picker(True) # Enable picking on the legend label.
256
+ lined[legline] = [origline, [label]]
257
+ lined[label] = [origline, [legline]]
246
258
 
247
259
  on_pick = create_legend_picker(fig, lined)
260
+ on_tilde = create_tilde_toggle(fig, lined)
248
261
  fig.canvas.mpl_connect('pick_event', on_pick)
262
+ fig.canvas.mpl_connect('key_press_event', on_tilde)
249
263
 
250
264
  zero = ax.hlines(y=0.0, xmin=min(times), xmax=max(times), linewidth=0.5, linestyles='dotted', color='grey')
251
265
  # Set axis properties
@@ -258,6 +272,42 @@ def time_series(depths, times, values, title='', d_units='m', t_units=u'\N{DEGRE
258
272
  return fig
259
273
 
260
274
 
275
+ def profile_evolution(depths, times, values, P:int=100, n:int=10):
276
+ """ Plot sample of temperature profiles over time
277
+
278
+ Parameters
279
+ ----------
280
+ depths : array-like
281
+ Depths of the temperature profile
282
+ times : array-like
283
+ Times of the temperature profile
284
+ values : array-like
285
+ Temperature values of the temperature profile
286
+ P : int, optional
287
+ Percentage of the time series to plot, starting from the end, by default 100
288
+ n : int, optional
289
+ Number of profiles to plot, evenly spaced over time period to plot, by default 10
290
+ """
291
+ cmap = cm.get_cmap('winter')
292
+ clist = cmap(np.arange(0,1,1/10))
293
+
294
+ fig, ax = plt.subplots()
295
+
296
+ p = 100 - P
297
+ lastP = (p*(len(times) // 100))
298
+
299
+ true_depths = -np.abs(depths)
300
+ plot_times = times[lastP:][::len(times[lastP:]) // n][:n]
301
+ plot_temps = values[lastP:,][::len(times[lastP:]) // n, :][:n,]
302
+
303
+ for i in range(n):
304
+ ax.plot(plot_temps[i,:], true_depths, color=clist[i],
305
+ alpha=0.5, label=f"{plot_times[i].year}")
306
+
307
+ ax.legend(fontsize="8")
308
+ ax.vlines(0, ymin=min(true_depths), ymax=max(true_depths), linewidth=0.5, color='black')
309
+
310
+ return fig
261
311
 
262
312
 
263
313
  def alpha(rgb, alpha):
@@ -269,13 +319,15 @@ def alpha(rgb, alpha):
269
319
  return rgba
270
320
 
271
321
 
272
- def contour_levels(data, levels: "Union[str,list]", step=1) -> "np.ndarray":
322
+ def contour_levels(data, levels: "Union[str,list]", step=1) -> np.ndarray:
273
323
  if levels == "dynamic":
274
324
  return np.arange(np.nanmin(data), np.nanmax(data), step)
275
325
 
276
326
  elif levels == "symmetric":
277
327
  return np.arange(min(np.nanmin(data), -np.nanmax(data) + 1),
278
328
  max(-np.nanmin(data) - 1, np.nanmax(data)), step)
329
+ elif levels == 'basic':
330
+ return np.array([min(-5, np.nanmin(data)), -2, 0, max(1, np.nanmax(data))])
279
331
  else:
280
332
  try:
281
333
  lev = np.array(levels, dtype='float')
@@ -290,16 +342,33 @@ def create_legend_picker(fig, lined) -> object:
290
342
  # On the pick event, find the original line corresponding to the legend
291
343
  # proxy line, and toggle its visibility.
292
344
  legline = event.artist
293
- origline = lined[legline]
345
+ origline = lined[legline][0]
346
+ other_toggles = lined[legline][1]
294
347
  visible = not origline.get_visible()
295
348
  origline.set_visible(visible)
296
349
  # Change the alpha on the line in the legend so we can see what lines
297
350
  # have been toggled.
298
351
  legline.set_alpha(1.0 if visible else 0.2)
352
+ for t in other_toggles:
353
+ t.set_alpha(1.0 if visible else 0.2)
299
354
  fig.canvas.draw()
300
355
 
301
356
  return on_pick
302
357
 
303
-
304
-
305
-
358
+ def create_tilde_toggle(fig, lined) -> object:
359
+
360
+ def on_click(event):
361
+ # on the "`" keypress, toggle lines off if any are on.
362
+ # on the "`" keypress, toggle lines on if all are off.
363
+ if event.key == '`':
364
+ visible = False
365
+ for togglable, [origline, other_toggles] in lined.items():
366
+ visible = visible or origline.get_visible()
367
+ for togglable, [origline, other_toggles] in lined.items():
368
+ origline.set_visible(not visible)
369
+ togglable.set_alpha(1.0 if not visible else 0.2)
370
+ for t in other_toggles:
371
+ t.set_alpha(1.0 if not visible else 0.2)
372
+ fig.canvas.draw()
373
+
374
+ return on_click
tsp/readers.py CHANGED
@@ -17,11 +17,71 @@ from tsp.dataloggers.HOBO import HOBO, HOBOProperties
17
17
  from tsp.dataloggers.logr import LogR, guessed_depths_ok
18
18
  from tsp.dataloggers.RBRXL800 import RBRXL800
19
19
  from tsp.dataloggers.RBRXR420 import RBRXR420
20
+ import tsp.tspwarnings as tw
20
21
 
21
22
  from tsp.core import TSP, IndexedTSP
22
23
  from tsp.misc import _is_depth_column
23
24
  from tsp.gtnp import GtnpMetadata
24
25
 
26
+
27
+ def read_classic(filepath: str, init_file: "Optional[str]"=None) -> TSP:
28
+ """Read output from CLASSIC land surface model
29
+
30
+ Depth values, if provided, represent the midpoint of the model cells.
31
+
32
+ Parameters
33
+ ----------
34
+ filepath : str
35
+ Path to an output file
36
+ init_file : str
37
+ Path to a classic init file. If provided, depth values will be calculated. Otherwise an :py:class:`~tsp.core.IndexedTSP` is returned
38
+
39
+ Returns
40
+ -------
41
+ TSP
42
+ An IndexedTSP. Use :py:meth:`~tsp.core.IndexedTSP.set_depths` to provide depth information if init_file is not provided.
43
+ """
44
+ try:
45
+ nc
46
+ except NameError:
47
+ warnings.warn("netCDF4 library must be installed.")
48
+
49
+ # tbaracc_d / tbaracc_m / tbaracc_y
50
+ with nc.Dataset(filepath, 'r') as ncdf:
51
+ lat = ncdf['lat'][:]
52
+ lon = ncdf['lon'][:]
53
+ temp = ncdf['tsl'][:] # t, z
54
+
55
+ try:
56
+ time = nc.num2date(ncdf['time'][:], ncdf['time'].units, ncdf['time'].calendar,
57
+ only_use_cftime_datetimes=False,
58
+ only_use_python_datetimes=True)
59
+ except ValueError:
60
+ cf_time = nc.num2date(ncdf['time'][:], ncdf['time'].units, ncdf['time'].calendar)
61
+ time = np.array([datetime.datetime.fromisoformat(t.isoformat()) for t in cf_time])
62
+
63
+ if init_file:
64
+ with nc.Dataset(init_file, 'r') as init:
65
+ delz = init["DELZ"][:]
66
+ depths = np.round(np.cumsum(delz) - np.multiply(delz, 0.5), 7) # delz precision is lower so we get some very small offsets
67
+
68
+ if len(lat) > 1:
69
+ warnings.warn("Multiple points in file. Returning the first one found.")
70
+ # TODO: return Ensemble if multiple points
71
+ lat = lat[0]
72
+ lon = lon[0]
73
+ temp = temp[:,:,0,0]
74
+ else:
75
+ temp = temp[:,:,0,0]
76
+
77
+ t = IndexedTSP(times=time, values=temp, latitude=lat, longitude=lon)
78
+
79
+ if init_file:
80
+ t.set_depths(depths)
81
+
82
+ return t
83
+
84
+
25
85
  def read_csv(filepath: str,
26
86
  datecol: "Union[str, int]",
27
87
  datefmt: str = "%Y-%m-%d %H:%M:%S",
@@ -83,6 +143,85 @@ def read_csv(filepath: str,
83
143
  return t
84
144
 
85
145
 
146
+ def read_geoprecision(filepath: str) -> IndexedTSP:
147
+ """Read a Geoprecision datalogger export (text file)
148
+
149
+ Reads GP5W- and FG2-style files from geoprecision.
150
+
151
+ Parameters
152
+ ----------
153
+ filepath : str
154
+ Path to file.
155
+
156
+ Returns
157
+ -------
158
+ IndexedTSP
159
+ An IndexedTSP
160
+ """
161
+ Reader = detect_geoprecision_type(filepath)
162
+
163
+ if Reader is None:
164
+ raise RuntimeError("Could not detect type of geoprecision file (GP5W or FG2 missing from header")
165
+ reader = Reader()
166
+
167
+ data = reader.read(filepath)
168
+ t = IndexedTSP(times=data['TIME'].dt.to_pydatetime(),
169
+ values=data.drop("TIME", axis=1).values)
170
+
171
+ t.metadata = reader.META
172
+ return t
173
+
174
+
175
+ def read_geotop(file: str) -> TSP:
176
+ """Read a GEOtop soil temperature output file
177
+
178
+ Parameters
179
+ ----------
180
+ file : str
181
+ Path to file.
182
+
183
+ Returns
184
+ -------
185
+ TSP
186
+ A TSP
187
+
188
+ Description
189
+ -----------
190
+ Only the last run of the last simulation period is returned. This is because GEOtop outputs
191
+ all runs of all simulation periods in the same file. This function will only return the last
192
+ run of the last simulation period.
193
+ """
194
+ with warnings.catch_warnings():
195
+ warnings.filterwarnings("ignore", category=tw.DuplicateTimesWarning)
196
+
197
+ t = read_csv(file,
198
+ na_values=[-9999.0],
199
+ datecol="^(Date.*)",
200
+ datefmt=r"%d/%m/%Y %H:%M",
201
+ depth_pattern=r"^(-?[0-9\.]+\s*)$")
202
+
203
+ t._depths *= 0.001 # Convert to [m]
204
+
205
+ # Only use last simulation period
206
+ # TODO: this could be improved
207
+ raw = pd.read_csv(file)
208
+
209
+ is_max_sim_period = raw['Simulation_Period'] == max( raw['Simulation_Period'])
210
+ is_last_run_in_max_sim_period = raw['Run'] = raw['Run'][is_max_sim_period].max()
211
+ last_run = np.logical_and(is_max_sim_period, is_last_run_in_max_sim_period)
212
+
213
+ last = TSP(times = t.times[last_run],
214
+ depths = t.depths,
215
+ values = t.values[last_run, :],
216
+ metadata={"Simulation_Period": max(raw['Simulation_Period']),
217
+ "Run": max( raw['Run'] )
218
+ }
219
+ )
220
+
221
+ return last
222
+
223
+
224
+
86
225
  def read_gtnp(filename: str,
87
226
  metadata_filepath=None,
88
227
  autodetect_metadata=True) -> TSP:
@@ -141,45 +280,6 @@ def read_gtnp(filename: str,
141
280
  return t
142
281
 
143
282
 
144
- def read_geotop(file: str) -> TSP:
145
- """Read a GEOtop soil temperature output file
146
-
147
- Parameters
148
- ----------
149
- file : str
150
- Path to file.
151
-
152
- Returns
153
- -------
154
- TSP
155
- A TSP
156
- """
157
-
158
- t = read_csv(file,
159
- na_values=[-9999.0],
160
- datecol="^(Date.*)",
161
- datefmt=r"%d/%m/%Y %H:%M",
162
- depth_pattern=r"^(-?[0-9\.]+\s*)$")
163
-
164
- t._depths *= 0.001 # Convert to [m]
165
-
166
- # Only use last simulation period
167
- # TODO: this could be improved
168
- raw = pd.read_csv(file)
169
- last_run = np.logical_and(raw['Simulation_Period'] == max( raw['Simulation_Period'] ),
170
- raw['Run'] == max( raw['Run'] ))
171
-
172
- last = TSP(times = t.times[last_run],
173
- depths = t.depths,
174
- values = t.values[last_run, :],
175
- metadata={"Simulation_Period": max(raw['Simulation_Period']),
176
- "Run": max( raw['Run'] )
177
- }
178
- )
179
-
180
- return last
181
-
182
-
183
283
  def read_gtpem(file: str) -> "list[TSP]":
184
284
  output = list()
185
285
  try:
@@ -195,6 +295,70 @@ def read_gtpem(file: str) -> "list[TSP]":
195
295
  return output
196
296
 
197
297
 
298
+ def read_hoboware(filepath: str, hoboware_config: Optional[HOBOProperties]=None) -> IndexedTSP:
299
+ """Read Onset HoboWare datalogger exports
300
+
301
+ Parameters
302
+ ----------
303
+ filepath : str
304
+ Path to a file
305
+ hoboware_config : HOBOProperties, optional
306
+ A HOBOProperties object with information about how the file is configured. If not
307
+ provided, the configuration will be automatically detected if possible, by default None
308
+
309
+ Returns
310
+ -------
311
+ IndexedTSP
312
+ An IndexedTSP. Use the `set_depths` method to provide depth information
313
+ """
314
+ reader = HOBO(properties=hoboware_config)
315
+ data = reader.read(filepath)
316
+
317
+ t = IndexedTSP(times=data['TIME'],
318
+ values=data.drop("TIME", axis=1).values)
319
+
320
+ return t
321
+
322
+
323
+ def read_logr(filepath: str) -> "Union[IndexedTSP,TSP]":
324
+ """Read a LogR datalogger export (text file)
325
+
326
+ Reads LogR ULogC16-32 files.
327
+
328
+ Parameters
329
+ ----------
330
+ filepath : str
331
+ Path to file.
332
+
333
+ Returns
334
+ -------
335
+ IndexedTSP, TSP
336
+ An IndexedTSP or TSP, depending on whether the depth labels are sensible
337
+ """
338
+ r = LogR()
339
+ data = r.read(filepath)
340
+
341
+ times = data['TIME'].dt.to_pydatetime()
342
+ channels = pd.Series(data.columns).str.match("^CH")
343
+ values = data.loc[:, channels.to_numpy()]
344
+
345
+ if guessed_depths_ok(r.META['guessed_depths'], sum(channels)):
346
+ t = TSP(times=times,
347
+ depths=r.META['guessed_depths'][-sum(channels):],
348
+ values=values.values,)
349
+
350
+ else:
351
+ warnings.warn(f"Could not convert all channel labels into numeric depths."
352
+ "Use the set_depths() method to specify observation depths."
353
+ "Guessed depths can be accessed from .metadata['guessed_depths'].")
354
+
355
+ t = IndexedTSP(times=times,
356
+ values=values.values,
357
+ metadata = r.META)
358
+
359
+ return t
360
+
361
+
198
362
  def read_netcdf(file:str, standard_name='temperature_in_ground') -> TSP:
199
363
  """Read a CF-compliant netCDF file
200
364
 
@@ -282,8 +446,7 @@ def read_ntgs(filename: str) -> TSP:
282
446
  try:
283
447
  raw = pd.read_csv(filename,
284
448
  keep_default_na=False,na_values=[''],
285
- parse_dates={"time": ["date_YYYY-MM-DD","time_HH:MM:SS"]},
286
- date_parser=__nt_date_parser)
449
+ parse_dates={"time": ["date_YYYY-MM-DD","time_HH:MM:SS"]})
287
450
  except IndexError:
288
451
  raise IndexError("There are insufficient columns, the file format is invalid.")
289
452
  elif Path(filename).suffix in [".xls", ".xlsx"]:
@@ -316,185 +479,6 @@ def read_ntgs(filename: str) -> TSP:
316
479
  return t
317
480
 
318
481
 
319
- def __nt_date_parser(date, time) -> datetime.datetime:
320
- if isinstance(date, str):
321
- # Case from CSV files where the date is string
322
- try:
323
- year, month, day = [int(dateVal) for dateVal in date.split("-")]
324
- except ValueError:
325
- raise ValueError(f"The date {date} was unable to be parsed. The format required is YYYY-MM-DD.")
326
- elif isinstance(date, datetime.datetime):
327
- # Case XLSX files - are "timestamp" objects
328
- year, month, day = date.year, date.month, date.day
329
- else:
330
- raise ValueError(f"The date {date} was unable to be parsed.")
331
-
332
- if isinstance(time, str):
333
- try:
334
- h, m, s = [int(timeVal) for timeVal in time.split(":")]
335
- except ValueError:
336
- raise ValueError(f"The time {time} was unable to be parsed. The format required is (H)H:MM:SS.")
337
-
338
- elif isinstance(time, datetime.time):
339
- h, m, s = int(time.hour), time.minute, time.second
340
-
341
- else:
342
- raise ValueError(f"The time {time} was unable to be parsed.")
343
-
344
- return datetime.datetime(year, month, day, hour=h, minute=m, second=s)
345
-
346
-
347
- def read_geoprecision(filepath: str) -> IndexedTSP:
348
- """Read a Geoprecision datalogger export (text file)
349
-
350
- Reads GP5W- and FG2-style files from geoprecision.
351
-
352
- Parameters
353
- ----------
354
- filepath : str
355
- Path to file.
356
-
357
- Returns
358
- -------
359
- IndexedTSP
360
- An IndexedTSP
361
- """
362
- Reader = detect_geoprecision_type(filepath)
363
-
364
- if Reader is None:
365
- raise RuntimeError("Could not detect type of geoprecision file (GP5W or FG2 missing from header")
366
- reader = Reader()
367
-
368
- data = reader.read(filepath)
369
- t = IndexedTSP(times=data['TIME'].dt.to_pydatetime(),
370
- values=data.drop("TIME", axis=1).values)
371
-
372
- t.metadata = reader.META
373
- return t
374
-
375
-
376
- def read_logr(filepath: str) -> "Union[IndexedTSP,TSP]":
377
- """Read a LogR datalogger export (text file)
378
-
379
- Reads LogR ULogC16-32 files.
380
-
381
- Parameters
382
- ----------
383
- filepath : str
384
- Path to file.
385
-
386
- Returns
387
- -------
388
- IndexedTSP, TSP
389
- An IndexedTSP or TSP, depending on whether the depth labels are sensible
390
- """
391
- r = LogR()
392
- data = r.read(filepath)
393
-
394
- times = data['TIME'].dt.to_pydatetime()
395
- channels = pd.Series(data.columns).str.match("^CH")
396
- values = data.loc[:, channels.to_numpy()]
397
-
398
- if guessed_depths_ok(r.META['guessed_depths'], sum(channels)):
399
- t = TSP(times=times,
400
- depths=r.META['guessed_depths'][-sum(channels):],
401
- values=values.values,)
402
-
403
- else:
404
- warnings.warn(f"Could not convert all channel labels into numeric depths."
405
- "Use the set_depths() method to specify observation depths."
406
- "Guessed depths can be accessed from .metadata['guessed_depths'].")
407
-
408
- t = IndexedTSP(times=times,
409
- values=values.values,
410
- metadata = r.META)
411
-
412
- return t
413
-
414
-
415
- def read_hoboware(filepath: str, hoboware_config: Optional[HOBOProperties]=None) -> IndexedTSP:
416
- """Read Onset HoboWare datalogger exports
417
-
418
- Parameters
419
- ----------
420
- filepath : str
421
- Path to a file
422
- hoboware_config : HOBOProperties, optional
423
- A HOBOProperties object with information about how the file is configured. If not
424
- provided, the configuration will be automatically detected if possible, by default None
425
-
426
- Returns
427
- -------
428
- IndexedTSP
429
- An IndexedTSP. Use the `set_depths` method to provide depth information
430
- """
431
- reader = HOBO(properties=hoboware_config)
432
- data = reader.read(filepath)
433
-
434
- t = IndexedTSP(times=data['TIME'],
435
- values=data.drop("TIME", axis=1).values)
436
-
437
- return t
438
-
439
-
440
- def read_classic(filepath: str, init_file: "Optional[str]"=None) -> TSP:
441
- """Read output from CLASSIC land surface model
442
-
443
- Depth values, if provided, represent the midpoint of the model cells.
444
-
445
- Parameters
446
- ----------
447
- filepath : str
448
- Path to an output file
449
- init_file : str
450
- Path to a classic init file. If provided, depth values will be calculated. Otherwise an :py:class:`~tsp.core.IndexedTSP` is returned
451
-
452
- Returns
453
- -------
454
- TSP
455
- An IndexedTSP. Use :py:meth:`~tsp.core.IndexedTSP.set_depths` to provide depth information if init_file is not provided.
456
- """
457
- try:
458
- nc
459
- except NameError:
460
- warnings.warn("netCDF4 library must be installed.")
461
-
462
- # tbaracc_d / tbaracc_m / tbaracc_y
463
- with nc.Dataset(filepath, 'r') as ncdf:
464
- lat = ncdf['lat'][:]
465
- lon = ncdf['lon'][:]
466
- temp = ncdf['tsl'][:] # t, z
467
-
468
- try:
469
- time = nc.num2date(ncdf['time'][:], ncdf['time'].units, ncdf['time'].calendar,
470
- only_use_cftime_datetimes=False,
471
- only_use_python_datetimes=True)
472
- except ValueError:
473
- cf_time = nc.num2date(ncdf['time'][:], ncdf['time'].units, ncdf['time'].calendar)
474
- time = np.array([datetime.datetime.fromisoformat(t.isoformat()) for t in cf_time])
475
-
476
- if init_file:
477
- with nc.Dataset(init_file, 'r') as init:
478
- delz = init["DELZ"][:]
479
- depths = np.round(np.cumsum(delz) - np.multiply(delz, 0.5), 7) # delz precision is lower so we get some very small offsets
480
-
481
- if len(lat) > 1:
482
- warnings.warn("Multiple points in file. Returning the first one found.")
483
- # TODO: return Ensemble if multiple points
484
- lat = lat[0]
485
- lon = lon[0]
486
- temp = temp[:,:,0,0]
487
- else:
488
- temp = temp[:,:,0,0]
489
-
490
- t = IndexedTSP(times=time, values=temp, latitude=lat, longitude=lon)
491
-
492
- if init_file:
493
- t.set_depths(depths)
494
-
495
- return t
496
-
497
-
498
482
  def read_rbr(file_path: str) -> IndexedTSP:
499
483
  """
500
484
 
@@ -534,3 +518,31 @@ def read_rbr(file_path: str) -> IndexedTSP:
534
518
 
535
519
  return t
536
520
 
521
+
522
+ def read_permos(filepath:str) -> TSP:
523
+ """Read file from PERMOS database export
524
+
525
+ Parameters
526
+ ----------
527
+ filename : str
528
+ Path to file.
529
+
530
+ Returns
531
+ -------
532
+ TSP
533
+ A TSP
534
+
535
+ Used for data obtained from PERMOS (permos.ch/data-portal/permafrost-temperature-and-active-layer)
536
+ """
537
+ try:
538
+ raw = pd.read_csv(filepath,
539
+ index_col=0,
540
+ parse_dates=True)
541
+ except IndexError:
542
+ raise IndexError("There are insufficient columns, the file format is invalid.")
543
+
544
+ t = TSP(times=raw.index,
545
+ depths=[float(C) for C in raw.columns],
546
+ values=raw.values)
547
+
548
+ return t
tsp/tspwarnings.py ADDED
@@ -0,0 +1,15 @@
1
+ import numpy as np
2
+
3
+
4
+ class DuplicateTimesWarning(UserWarning):
5
+ """For when duplicate times are found in a file."""
6
+ def __init__(self, times):
7
+ self.times = times
8
+
9
+ def _msg(self, times) -> str:
10
+ m = f"Duplicate timestamps found: {times[np.where(times.duplicated())[0]]}. That's bad."
11
+ return m
12
+
13
+ def __str__(self):
14
+ return self._msg(self.times)
15
+
tsp/version.py CHANGED
@@ -1 +1 @@
1
- version="1.7.3"
1
+ version="1.8.0"
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: tsp
3
- Version: 1.7.3
3
+ Version: 1.8.0
4
4
  Summary: Making permafrost data effortless
5
5
  Home-page: https://gitlab.com/permafrostnet/teaspoon
6
6
  Author: Nick Brown
@@ -17,26 +17,37 @@ Requires-Dist: pandas
17
17
  Requires-Dist: numpy
18
18
  Requires-Dist: regex
19
19
  Requires-Dist: matplotlib
20
- Provides-Extra: dev
21
- Requires-Dist: manuel ; extra == 'dev'
22
- Requires-Dist: pytest ; extra == 'dev'
23
- Requires-Dist: pytest-cov ; extra == 'dev'
24
- Requires-Dist: coverage ; extra == 'dev'
25
- Requires-Dist: mock ; extra == 'dev'
26
- Provides-Extra: full
27
- Requires-Dist: pfit ==0.2.1 ; extra == 'full'
28
- Requires-Dist: pyrsktools ; extra == 'full'
29
- Requires-Dist: scipy ; extra == 'full'
30
- Requires-Dist: openpyxl ; extra == 'full'
31
- Requires-Dist: netCDF4 ; extra == 'full'
20
+ Requires-Dist: setuptools
32
21
  Provides-Extra: nc
33
- Requires-Dist: netCDF4 ; extra == 'nc'
34
- Requires-Dist: pfit ==0.2.1 ; extra == 'nc'
22
+ Requires-Dist: netCDF4; extra == "nc"
23
+ Requires-Dist: pfit==0.2.1; extra == "nc"
35
24
  Provides-Extra: plotting
36
- Requires-Dist: scipy ; extra == 'plotting'
25
+ Requires-Dist: scipy; extra == "plotting"
37
26
  Provides-Extra: rbr
38
- Requires-Dist: pyrsktools ; extra == 'rbr'
39
- Requires-Dist: openpyxl ; extra == 'rbr'
27
+ Requires-Dist: pyrsktools; extra == "rbr"
28
+ Requires-Dist: openpyxl; extra == "rbr"
29
+ Provides-Extra: full
30
+ Requires-Dist: netCDF4; extra == "full"
31
+ Requires-Dist: pyrsktools; extra == "full"
32
+ Requires-Dist: scipy; extra == "full"
33
+ Requires-Dist: pfit==0.2.1; extra == "full"
34
+ Requires-Dist: openpyxl; extra == "full"
35
+ Provides-Extra: dev
36
+ Requires-Dist: manuel; extra == "dev"
37
+ Requires-Dist: pytest; extra == "dev"
38
+ Requires-Dist: pytest-cov; extra == "dev"
39
+ Requires-Dist: coverage; extra == "dev"
40
+ Requires-Dist: mock; extra == "dev"
41
+ Dynamic: author
42
+ Dynamic: author-email
43
+ Dynamic: classifier
44
+ Dynamic: description
45
+ Dynamic: description-content-type
46
+ Dynamic: home-page
47
+ Dynamic: license-file
48
+ Dynamic: provides-extra
49
+ Dynamic: requires-dist
50
+ Dynamic: summary
40
51
 
41
52
  # Teaspoon
42
53
 
@@ -1,18 +1,23 @@
1
1
  tsp/__init__.py,sha256=V_rDl4XxSRtbFWK7KFZJkKqwjqwLREvlnVc7BCGjHaw,427
2
- tsp/__meta__.py,sha256=s5i_2brSLEf6ifjRKvtw2H4-2LFzGGtR7ynEWoQUzPk,93
3
- tsp/core.py,sha256=PW4J_TDURHV8AaOppD7GZfyTWdlChzeeWMjQEhLUZxA,34323
4
- tsp/gtnp.py,sha256=kIxhLq_ve_cxU9v4_nDebc8jy6l7KQ87MOg8vK8lih4,3991
2
+ tsp/__meta__.py,sha256=HPbBgUW82WNWV430sfTj9LXxF17fdFaQrKi-NL5mOo0,93
3
+ tsp/concatenation.py,sha256=XK-5ec_IrlIVShp9eqd5QVg1yDgk3ysPFzeJpo2G2gc,5990
4
+ tsp/core.py,sha256=gXGyY08_EHo2rttH2nqKMD3hUnVBrPQbjlNm-2ZXMME,37899
5
+ tsp/gtnp.py,sha256=cFFSOVzudEkxqtXSMnsVE7clVoXFH0Gb8ES-Cx_iwUU,4269
5
6
  tsp/labels.py,sha256=URV4zxfur6aYojdra7KGTipFdiN9pAOCEVvkcc2pj-Q,78
6
7
  tsp/misc.py,sha256=y0NvW5jOlli5s220QnaLYTOadSlj3Lyu-EE-RsoSbok,2614
7
8
  tsp/physics.py,sha256=hgVOGU0Bj1g-gxBNhLEl7Gm3VXJKIHHu35uPvgVMOxE,2699
8
- tsp/readers.py,sha256=5rEmC12dqiid22YGQD3xQdoPAA5eRfyHmmoEJMXlpds,18540
9
+ tsp/readers.py,sha256=1rPcS9eRpkGPWU6MsSrgiMh2QCKvAAY5_KiTQMMcsxk,18464
9
10
  tsp/time.py,sha256=82h7nxM-iXs2XwetF-alLtNvUm0qRtAA111gTMp5SY4,1379
11
+ tsp/tspwarnings.py,sha256=AGCmSlwM3JbnrEZSbyBKLnyQkKygsKU_YYbte4iZsK8,399
10
12
  tsp/utils.py,sha256=sOJSZLmfv7sh4X971_gNgtZvXtS8ZwGmUdqnUybcVE4,2932
11
- tsp/version.py,sha256=jVVJ1XqyGriV5CDoioX5vjYZp75DKC3MfzBS5a39Sw4,15
13
+ tsp/version.py,sha256=Xzb-lP-C0B-Xa7NdEdQP8ANIyFtMIaeq0GhSeSluzrk,15
12
14
  tsp/data/2023-01-06_755-test-Dataset_2031-Constant_Over_Interval-Hourly-Ground_Temperature-Thermistor_Automated.timeserie.csv,sha256=Q3Ssnoo_kiSn9_orZHjtMxQ02YbrjCAEQKs5sHPFJCg,171
13
15
  tsp/data/2023-01-06_755-test.metadata.txt,sha256=Ux1YGqmAmRQmMIqqK8-OloQXflg4Y45FRwdT-WgCg8c,5686
16
+ tsp/data/NTGS_example_csv.csv,sha256=OirFmLRqj7TUYfAUHqKbZFSqQ-g3I13QwUfUzgiJ-h4,554
17
+ tsp/data/NTGS_example_slash_dates.csv,sha256=RzeYXsGnOnmLqgrpeCr9pGi-ncaJSJKrsYvI5sf6xqo,554
14
18
  tsp/data/example_geotop.csv,sha256=rgVP7_tGEvUtn1K_KI98VVgm275D7qt8YegKMe3Vjw4,262289
15
19
  tsp/data/example_gtnp.csv,sha256=E5km06-cWlWMwzF-Qo7v0ZrlAvCTpyWIKY6hpzln4sc,191812
20
+ tsp/data/example_permos.csv,sha256=tz-Z4A9x3Uc8Dym65Roto95X_bgSUySC9wJrUnGVBPE,797
16
21
  tsp/data/test_geotop_has_space.txt,sha256=BaloXQVxyPD7LLeI6YCnHsR3pjA_BZsHYH9OeW57cC4,203
17
22
  tsp/dataloggers/AbstractReader.py,sha256=YsmESWrmH2jdL-Oli9pwjaFmPCEfJxjm4wx16FoRxpY,1777
18
23
  tsp/dataloggers/FG2.py,sha256=kvfjMQtoSs5ZzV7hc1lJ_SaDuSOOTONfI_nw2VaihO8,3281
@@ -81,9 +86,9 @@ tsp/dataloggers/test_files/rbr_xl_002.DAT,sha256=JQNHL6X9z4Rp6IDqDBYx8vWsUU6oigl
81
86
  tsp/dataloggers/test_files/rbr_xl_003.DAT,sha256=ZEKheCvB1CiubY2kMngigY0NNhWTYAiC_hmQhzODPYw,221656
82
87
  tsp/dataloggers/test_files/rbr_xl_003.HEX,sha256=sunCD5C1t8l5y4p1b9iiHNsZUznYIuBLz4uwoGkZh3E,118459
83
88
  tsp/plots/__init__.py,sha256=i5AhpNwyuT6il7uk2Vtc4YBjVnZ0ifvHNXw19rvDtsM,71
84
- tsp/plots/static.py,sha256=hUGMtx-AQ8eHPVKurEK1T9BHitnvfC3gQLB5h5ykeic,11039
85
- tsp-1.7.3.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
86
- tsp-1.7.3.dist-info/METADATA,sha256=c7Cc2iyrUtMm3bgTouMjpvdANGEJ1jWvidZnaufwfWo,3044
87
- tsp-1.7.3.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
88
- tsp-1.7.3.dist-info/top_level.txt,sha256=7tOR6y7BarphfWD2D7QFi_3F1jxagUZpHG8zwJIw4ck,30
89
- tsp-1.7.3.dist-info/RECORD,,
89
+ tsp/plots/static.py,sha256=pvLFM3_HuRgnPMe7smxj9v65zcM3JLsCvhhwGgdt5Uk,13836
90
+ tsp-1.8.0.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
91
+ tsp-1.8.0.dist-info/METADATA,sha256=rvZdIOFdzkkQe3Euae8ESNPAL2p6HukH2BRj-r2Auys,3271
92
+ tsp-1.8.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
93
+ tsp-1.8.0.dist-info/top_level.txt,sha256=7tOR6y7BarphfWD2D7QFi_3F1jxagUZpHG8zwJIw4ck,30
94
+ tsp-1.8.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.41.2)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5