tsp 1.8.1__py3-none-any.whl → 1.10.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tsp/__init__.py +11 -11
- tsp/__meta__.py +1 -1
- tsp/concatenation.py +159 -153
- tsp/core.py +1306 -1162
- tsp/data/2023-01-06_755-test-Dataset_2031-Constant_Over_Interval-Hourly-Ground_Temperature-Thermistor_Automated.timeserie.csv +4 -4
- tsp/data/2023-01-06_755-test.metadata.txt +208 -208
- tsp/data/NTGS_example_csv.csv +6 -6
- tsp/data/NTGS_example_slash_dates.csv +6 -6
- tsp/data/NTGS_gtr_example_excel.xlsx +0 -0
- tsp/data/example_geotop.csv +5240 -5240
- tsp/data/example_gtnp.csv +1298 -1298
- tsp/data/example_permos.csv +7 -7
- tsp/data/ntgs-db-multi.txt +3872 -0
- tsp/data/ntgs-db-single.txt +2251 -0
- tsp/data/test_geotop_has_space.txt +5 -5
- tsp/data/tsp_format_long.csv +10 -0
- tsp/data/tsp_format_wide_1.csv +7 -0
- tsp/data/tsp_format_wide_2.csv +7 -0
- tsp/dataloggers/AbstractReader.py +43 -43
- tsp/dataloggers/FG2.py +110 -110
- tsp/dataloggers/GP5W.py +114 -114
- tsp/dataloggers/Geoprecision.py +34 -34
- tsp/dataloggers/HOBO.py +930 -914
- tsp/dataloggers/RBRXL800.py +190 -190
- tsp/dataloggers/RBRXR420.py +371 -308
- tsp/dataloggers/Vemco.py +84 -0
- tsp/dataloggers/__init__.py +15 -15
- tsp/dataloggers/logr.py +196 -115
- tsp/dataloggers/test_files/004448.DAT +2543 -2543
- tsp/dataloggers/test_files/004531.DAT +17106 -17106
- tsp/dataloggers/test_files/004531.HEX +3587 -3587
- tsp/dataloggers/test_files/004534.HEX +3587 -3587
- tsp/dataloggers/test_files/010252.dat +1731 -1731
- tsp/dataloggers/test_files/010252.hex +1739 -1739
- tsp/dataloggers/test_files/010274.hex +1291 -1291
- tsp/dataloggers/test_files/010278.hex +3544 -3544
- tsp/dataloggers/test_files/012064.dat +1286 -1286
- tsp/dataloggers/test_files/012064.hex +1294 -1294
- tsp/dataloggers/test_files/012064_modified_start.hex +1294 -0
- tsp/dataloggers/test_files/012081.hex +3532 -3532
- tsp/dataloggers/test_files/013138_recovery_stamp.hex +1123 -0
- tsp/dataloggers/test_files/014037-2007.hex +95 -0
- tsp/dataloggers/test_files/019360_20160918_1146_SlumpIslandTopofHill.hex +11253 -0
- tsp/dataloggers/test_files/019360_20160918_1146_SlumpIslandTopofHill.xls +0 -0
- tsp/dataloggers/test_files/07B1592.DAT +1483 -1483
- tsp/dataloggers/test_files/07B1592.HEX +1806 -1806
- tsp/dataloggers/test_files/07B4450.DAT +2234 -2234
- tsp/dataloggers/test_files/07B4450.HEX +2559 -2559
- tsp/dataloggers/test_files/2022018_2025-09-18T22-16-16.txt +36 -0
- tsp/dataloggers/test_files/2022018_2025-09-18T22-16-16_raw.csv +2074 -0
- tsp/dataloggers/test_files/2022018_2025-09-18T22-16-16_temp.csv +2074 -0
- tsp/dataloggers/test_files/2025004_2025-12-02T17-07-28_cfg.txt +30 -0
- tsp/dataloggers/test_files/2025004_2025-12-02T17-07-28_raw.csv +35 -0
- tsp/dataloggers/test_files/2025004_2025-12-02T17-07-28_temp.csv +35 -0
- tsp/dataloggers/test_files/204087.xlsx +0 -0
- tsp/dataloggers/test_files/Asc-1455As02.000 +2982 -0
- tsp/dataloggers/test_files/Asc-1456As02.000 +2992 -0
- tsp/dataloggers/test_files/Asc-1457As02.000 +2917 -0
- tsp/dataloggers/test_files/BGC_BH15_019362_20140610_1253.hex +1729 -0
- tsp/dataloggers/test_files/Bin2944.csv +759 -0
- tsp/dataloggers/test_files/Bin5494.csv +2972 -0
- tsp/dataloggers/test_files/Bin6786.csv +272 -0
- tsp/dataloggers/test_files/FG2_399.csv +9881 -9881
- tsp/dataloggers/test_files/GP5W.csv +1121 -1121
- tsp/dataloggers/test_files/GP5W_260.csv +1884 -1884
- tsp/dataloggers/test_files/GP5W_270.csv +2210 -2210
- tsp/dataloggers/test_files/H08-030-08_HOBOware.csv +998 -998
- tsp/dataloggers/test_files/Minilog-II-T_350763_20190711_1.csv +2075 -0
- tsp/dataloggers/test_files/Minilog-II-T_350769_20190921_1.csv +6384 -0
- tsp/dataloggers/test_files/Minilog-II-T_354284_20190921_1.csv +4712 -0
- tsp/dataloggers/test_files/Minilog-T_7943_20140920_1.csv +5826 -0
- tsp/dataloggers/test_files/Minilog-T_8979_20140806_1.csv +2954 -0
- tsp/dataloggers/test_files/Minilog-T_975_20110824_1.csv +4343 -0
- tsp/dataloggers/test_files/RBR_01.dat +1046 -1046
- tsp/dataloggers/test_files/RBR_02.dat +2426 -2426
- tsp/dataloggers/test_files/RI03b_062831_20240905_1801.rsk +0 -0
- tsp/dataloggers/test_files/RI03b_062831_20240905_1801.xlsx +0 -0
- tsp/dataloggers/test_files/RSTDT2055.csv +2152 -2152
- tsp/dataloggers/test_files/U23-001_HOBOware.csv +1001 -1001
- tsp/dataloggers/test_files/hobo-negative-2.txt +6396 -6396
- tsp/dataloggers/test_files/hobo-negative-3.txt +5593 -5593
- tsp/dataloggers/test_files/hobo-positive-number-1.txt +1000 -1000
- tsp/dataloggers/test_files/hobo-positive-number-2.csv +1003 -1003
- tsp/dataloggers/test_files/hobo-positive-number-3.csv +1133 -1133
- tsp/dataloggers/test_files/hobo-positive-number-4.csv +1209 -1209
- tsp/dataloggers/test_files/hobo2.csv +8702 -8702
- tsp/dataloggers/test_files/hobo_1_AB.csv +21732 -21732
- tsp/dataloggers/test_files/hobo_1_AB_Details.txt +133 -133
- tsp/dataloggers/test_files/hobo_1_AB_classic.csv +4373 -4373
- tsp/dataloggers/test_files/hobo_1_AB_defaults.csv +21732 -21732
- tsp/dataloggers/test_files/hobo_1_AB_minimal.txt +1358 -1358
- tsp/dataloggers/test_files/hobo_1_AB_var2.csv +3189 -3189
- tsp/dataloggers/test_files/hobo_1_AB_var3.csv +2458 -2458
- tsp/dataloggers/test_files/logR_ULogC16-32_1.csv +106 -106
- tsp/dataloggers/test_files/logR_ULogC16-32_2.csv +100 -100
- tsp/dataloggers/test_files/mon_3_Ta_2010-08-18_2013-02-08.txt +21724 -21724
- tsp/dataloggers/test_files/rbr_001.dat +1133 -1133
- tsp/dataloggers/test_files/rbr_001.hex +1139 -1139
- tsp/dataloggers/test_files/rbr_001_no_comment.dat +1132 -1132
- tsp/dataloggers/test_files/rbr_001_no_comment.hex +1138 -1138
- tsp/dataloggers/test_files/rbr_002.dat +1179 -1179
- tsp/dataloggers/test_files/rbr_002.hex +1185 -1185
- tsp/dataloggers/test_files/rbr_003.hex +1292 -1292
- tsp/dataloggers/test_files/rbr_xl_001.DAT +1105 -1105
- tsp/dataloggers/test_files/rbr_xl_002.DAT +1126 -1126
- tsp/dataloggers/test_files/rbr_xl_003.DAT +4622 -4622
- tsp/dataloggers/test_files/rbr_xl_003.HEX +3587 -3587
- tsp/gtnp.py +148 -148
- tsp/labels.py +3 -3
- tsp/misc.py +90 -90
- tsp/physics.py +101 -101
- tsp/plots/static.py +388 -374
- tsp/readers.py +829 -548
- tsp/standardization/__init__.py +0 -0
- tsp/standardization/metadata.py +95 -0
- tsp/standardization/metadata_ref.py +0 -0
- tsp/standardization/validator.py +535 -0
- tsp/time.py +45 -45
- tsp/tspwarnings.py +27 -15
- tsp/utils.py +131 -101
- tsp/version.py +1 -1
- {tsp-1.8.1.dist-info → tsp-1.10.2.dist-info}/METADATA +95 -86
- tsp-1.10.2.dist-info/RECORD +132 -0
- {tsp-1.8.1.dist-info → tsp-1.10.2.dist-info}/licenses/LICENSE +674 -674
- {tsp-1.8.1.dist-info → tsp-1.10.2.dist-info}/top_level.txt +1 -0
- tsp-1.8.1.dist-info/RECORD +0 -94
- {tsp-1.8.1.dist-info → tsp-1.10.2.dist-info}/WHEEL +0 -0
tsp/__init__.py
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
from tsp.core import TSP, IndexedTSP
|
|
2
|
-
from tsp.misc import _is_depth_column
|
|
3
|
-
|
|
4
|
-
from tsp.plots.static import trumpet_curve, time_series, colour_contour
|
|
5
|
-
from tsp.readers import read_gtnp, read_geotop, read_geoprecision, read_hoboware, read_ntgs, read_logr, read_csv, read_rbr
|
|
6
|
-
from tsp.utils import resolve_duplicate_times
|
|
7
|
-
from tsp.version import version as __version__
|
|
8
|
-
|
|
9
|
-
#TSP.__module__ = "teaspoon"
|
|
10
|
-
|
|
11
|
-
__all__ = ["TSP", "IndexedTSP"]
|
|
1
|
+
from tsp.core import TSP, IndexedTSP
|
|
2
|
+
from tsp.misc import _is_depth_column
|
|
3
|
+
|
|
4
|
+
from tsp.plots.static import trumpet_curve, time_series, colour_contour
|
|
5
|
+
from tsp.readers import read_gtnp, read_geotop, read_geoprecision, read_hoboware, read_ntgs, read_logr, read_csv, read_rbr
|
|
6
|
+
from tsp.utils import resolve_duplicate_times
|
|
7
|
+
from tsp.version import version as __version__
|
|
8
|
+
|
|
9
|
+
#TSP.__module__ = "teaspoon"
|
|
10
|
+
|
|
11
|
+
__all__ = ["TSP", "IndexedTSP"]
|
tsp/__meta__.py
CHANGED
tsp/concatenation.py
CHANGED
|
@@ -1,153 +1,159 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import numpy as np
|
|
4
|
-
import pandas as pd
|
|
5
|
-
|
|
6
|
-
from typing import Any, TYPE_CHECKING
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
if TYPE_CHECKING:
|
|
10
|
-
from tsp import TSP
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
def _tsp_concat(tsp_list: "list[TSP]", on_conflict="error", metadata='first') -> dict[str, Any]:
|
|
14
|
-
""" Helper for core.tsp_concat """
|
|
15
|
-
# Validate the TSPs in the list
|
|
16
|
-
_validate_tsp_list(tsp_list)
|
|
17
|
-
|
|
18
|
-
# Combine the TSPs
|
|
19
|
-
dfs = [t.wide for t in tsp_list]
|
|
20
|
-
combined_df = _concat_deduplicate(dfs, on_conflict=on_conflict)
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
for key, val in metadata.items()
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
#
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
'
|
|
57
|
-
'
|
|
58
|
-
'
|
|
59
|
-
'
|
|
60
|
-
'
|
|
61
|
-
'
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
import pandas as pd
|
|
5
|
+
|
|
6
|
+
from typing import Any, TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from tsp import TSP
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _tsp_concat(tsp_list: "list[TSP]", on_conflict="error", metadata='first') -> dict[str, Any]:
|
|
14
|
+
""" Helper for core.tsp_concat """
|
|
15
|
+
# Validate the TSPs in the list
|
|
16
|
+
_validate_tsp_list(tsp_list)
|
|
17
|
+
|
|
18
|
+
# Combine the TSPs
|
|
19
|
+
dfs = [t.wide for t in tsp_list]
|
|
20
|
+
combined_df = _concat_deduplicate(dfs, on_conflict=on_conflict)
|
|
21
|
+
combined_counts = _concat_deduplicate([pd.DataFrame(t.counts, columns=t.depths, index=t.wide.index) for t in tsp_list], on_conflict=on_conflict).values
|
|
22
|
+
|
|
23
|
+
# Combine metadata
|
|
24
|
+
if metadata == 'first':
|
|
25
|
+
metadata = {key:val for key, val in tsp_list[0].metadata.items()}
|
|
26
|
+
latitude = tsp_list[0].latitude
|
|
27
|
+
longitude = tsp_list[0].longitude
|
|
28
|
+
site_id = tsp_list[0].site_id
|
|
29
|
+
|
|
30
|
+
elif metadata == 'identical':
|
|
31
|
+
metadata = {key:val for key, val in tsp_list[0].metadata.items()}
|
|
32
|
+
for key, val in metadata.items():
|
|
33
|
+
for t in tsp_list[1:]:
|
|
34
|
+
if key not in t.metadata or t.metadata[key] != val:
|
|
35
|
+
_ = metadata.pop(key)
|
|
36
|
+
latitude = _none_if_not_identical([t.latitude for t in tsp_list])
|
|
37
|
+
longitude = _none_if_not_identical([t.longitude for t in tsp_list])
|
|
38
|
+
site_id = _none_if_not_identical([t.site_id for t in tsp_list])
|
|
39
|
+
|
|
40
|
+
elif metadata == 'none':
|
|
41
|
+
metadata = None
|
|
42
|
+
latitude, longitude, site_id = None, None, None
|
|
43
|
+
|
|
44
|
+
else:
|
|
45
|
+
raise ValueError(f"Unknown metadata method: {metadata}")
|
|
46
|
+
|
|
47
|
+
#final_tsp = TSP(times=combined_df.index, values=combined_df.values, depths=combined_df.columns,
|
|
48
|
+
# latitude=latitude, longitude=longitude,
|
|
49
|
+
# site_id=site_id, metadata=metadata)
|
|
50
|
+
try:
|
|
51
|
+
combined_df.drop('time', axis=1, inplace=True)
|
|
52
|
+
except KeyError:
|
|
53
|
+
Warning("Deprecation Error: The 'time' column is no longer used in TSP objects. Please update your code to avoid this warning.")
|
|
54
|
+
|
|
55
|
+
tsp_dict = {
|
|
56
|
+
'times': combined_df.index,
|
|
57
|
+
'values': combined_df.values,
|
|
58
|
+
'depths': combined_df.columns,
|
|
59
|
+
'latitude': latitude,
|
|
60
|
+
'longitude': longitude,
|
|
61
|
+
'site_id': site_id,
|
|
62
|
+
'metadata': metadata,
|
|
63
|
+
'counts': combined_counts
|
|
64
|
+
}
|
|
65
|
+
return tsp_dict
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def _none_if_not_identical(list):
|
|
69
|
+
"""Check if all elements in the list are identical. If they are, return the first element; otherwise, return None."""
|
|
70
|
+
first = list[0]
|
|
71
|
+
for item in list[1:]:
|
|
72
|
+
if item != first:
|
|
73
|
+
return None
|
|
74
|
+
return first
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _validate_tsp_list(tsp_list: "list[TSP]"):
|
|
78
|
+
"""Check that all TSPs in the list have the same depths."""
|
|
79
|
+
depths0 = tsp_list[0].depths
|
|
80
|
+
for t in tsp_list[1:]:
|
|
81
|
+
if not np.array_equal(depths0, t.depths):
|
|
82
|
+
raise ValueError("All TSPs must have the same depths.")
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def _concat_deduplicate(df_list, on_conflict='error'):
|
|
86
|
+
"""
|
|
87
|
+
Concatenates a list of DataFrames, handling duplicate indices based on row values.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
df_list (list): A list of pandas DataFrames. Assumes they have identical
|
|
91
|
+
column names.
|
|
92
|
+
on_conflict (str): Specifies how to handle duplicate indices with
|
|
93
|
+
unequal row values.
|
|
94
|
+
- 'error': Raise a ValueError (default).
|
|
95
|
+
- 'keep_first': Keep the row corresponding to the first
|
|
96
|
+
DataFrame in the list where the index appeared.
|
|
97
|
+
- 'keep_last': Keep the row corresponding to the last
|
|
98
|
+
DataFrame in the list where the index appeared.
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
pandas.DataFrame: The concatenated DataFrame with duplicates handled
|
|
102
|
+
according to the specified rules.
|
|
103
|
+
|
|
104
|
+
Raises:
|
|
105
|
+
ValueError: If df_list is empty.
|
|
106
|
+
ValueError: If on_conflict is not 'error', 'keep_first', or 'keep_last'.
|
|
107
|
+
ValueError: If on_conflict='error' and duplicate indices with
|
|
108
|
+
non-identical row values are found.
|
|
109
|
+
"""
|
|
110
|
+
if not df_list:
|
|
111
|
+
raise ValueError("Input DataFrame list cannot be empty.")
|
|
112
|
+
|
|
113
|
+
if on_conflict not in ['error', 'keep_first', 'keep_last']:
|
|
114
|
+
raise ValueError("on_conflict must be either 'error', 'keep_first', or 'keep_last'")
|
|
115
|
+
|
|
116
|
+
# Store original index name if it exists
|
|
117
|
+
original_index_name = df_list[0].index.name
|
|
118
|
+
|
|
119
|
+
# Concatenate all DataFrames. The order is preserved.
|
|
120
|
+
combined_df = pd.concat(df_list, ignore_index=False) # Keep original indices
|
|
121
|
+
|
|
122
|
+
temp_index_col = "__temp_index__"
|
|
123
|
+
combined_reset = combined_df.reset_index(names=temp_index_col)
|
|
124
|
+
|
|
125
|
+
# Drop rows that are duplicates based on *all* columns
|
|
126
|
+
deduplicated_reset = combined_reset.drop_duplicates(keep='first')
|
|
127
|
+
|
|
128
|
+
# Check for remaining duplicates *only* in the original index column.
|
|
129
|
+
remaining_duplicates_mask = deduplicated_reset.duplicated(subset=temp_index_col, keep=False)
|
|
130
|
+
|
|
131
|
+
if remaining_duplicates_mask.any():
|
|
132
|
+
# We have indices that appeared multiple times with different values.
|
|
133
|
+
if on_conflict == 'error':
|
|
134
|
+
conflicting_indices = deduplicated_reset.loc[remaining_duplicates_mask, temp_index_col].unique()
|
|
135
|
+
raise ValueError(
|
|
136
|
+
f"Duplicate indices with non-identical values found: "
|
|
137
|
+
f"{list(conflicting_indices)}. Use on_conflict='keep_first' to keep "
|
|
138
|
+
f"the first occurrence."
|
|
139
|
+
)
|
|
140
|
+
elif on_conflict == 'keep_first':
|
|
141
|
+
# Drop the later occurrences of these conflicting index values.
|
|
142
|
+
# Since 'deduplicated_reset' preserved the first unique (index, row_value)
|
|
143
|
+
# combination, dropping duplicates based solely on the index column
|
|
144
|
+
# while keeping the first achieves the desired outcome.
|
|
145
|
+
final_reset = deduplicated_reset.drop_duplicates(subset=temp_index_col, keep='first')
|
|
146
|
+
elif on_conflict == 'keep_last':
|
|
147
|
+
final_reset = deduplicated_reset.drop_duplicates(subset=temp_index_col, keep='last')
|
|
148
|
+
else:
|
|
149
|
+
pass
|
|
150
|
+
else:
|
|
151
|
+
# No conflicting duplicates (duplicate indices with different values) were found.
|
|
152
|
+
final_reset = deduplicated_reset
|
|
153
|
+
|
|
154
|
+
final_df = final_reset.set_index(temp_index_col)
|
|
155
|
+
final_df.index.name = original_index_name
|
|
156
|
+
# Sort by time, ascending
|
|
157
|
+
final_df.sort_index(inplace=True)
|
|
158
|
+
|
|
159
|
+
return final_df
|