ticoi 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ticoi might be problematic. Click here for more details.
- ticoi/__about__.py +1 -0
- ticoi/__init__.py +0 -0
- ticoi/core.py +1500 -0
- ticoi/cube_data_classxr.py +2204 -0
- ticoi/cube_writer.py +741 -0
- ticoi/example.py +81 -0
- ticoi/filtering_functions.py +676 -0
- ticoi/interpolation_functions.py +236 -0
- ticoi/inversion_functions.py +1015 -0
- ticoi/mjd2date.py +31 -0
- ticoi/optimize_coefficient_functions.py +264 -0
- ticoi/pixel_class.py +1830 -0
- ticoi/seasonality_functions.py +209 -0
- ticoi/utils.py +725 -0
- ticoi-0.0.1.dist-info/METADATA +152 -0
- ticoi-0.0.1.dist-info/RECORD +18 -0
- ticoi-0.0.1.dist-info/WHEEL +4 -0
- ticoi-0.0.1.dist-info/licenses/LICENSE +165 -0
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Auxiliary functions to process the temporal interpolation.
|
|
3
|
+
|
|
4
|
+
Author : Laurane Charrier, Lei Guo, Nathan Lioret
|
|
5
|
+
Reference:
|
|
6
|
+
Charrier, L., Yan, Y., Koeniguer, E. C., Leinss, S., & Trouvé, E. (2021). Extraction of velocity time series with an optimal temporal sampling from displacement
|
|
7
|
+
observation networks. IEEE Transactions on Geoscience and Remote Sensing.
|
|
8
|
+
Charrier, L., Yan, Y., Colin Koeniguer, E., Mouginot, J., Millan, R., & Trouvé, E. (2022). Fusion of multi-temporal and multi-sensor ice velocity observations.
|
|
9
|
+
ISPRS annals of the photogrammetry, remote sensing and spatial information sciences, 3, 311-318.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from typing import List, Optional
|
|
13
|
+
|
|
14
|
+
import numpy as np
|
|
15
|
+
import pandas as pd
|
|
16
|
+
import scipy.ndimage as ndi
|
|
17
|
+
from scipy import interpolate
|
|
18
|
+
|
|
19
|
+
from ticoi.pixel_class import PixelClass
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def reconstruct_common_ref(
|
|
23
|
+
result: pd.DataFrame,
|
|
24
|
+
second_date_list: List[np.datetime64] | None = None,
|
|
25
|
+
) -> pd.DataFrame:
|
|
26
|
+
"""
|
|
27
|
+
Build the Cumulative Displacements (CD) time series with a Common Reference (CR) from a Leap Frog time series
|
|
28
|
+
|
|
29
|
+
:param result: [np array] --- Leap frog displacement for x-component and y-component
|
|
30
|
+
:param second_date_list: [list] --- List of dates in which the leap frog displacement will be reindexed
|
|
31
|
+
|
|
32
|
+
:return data: [pd dataframe] --- Cumulative displacement time series in x and y component, pandas dataframe
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
if result.empty:
|
|
36
|
+
length = 1 if second_date_list is None else len(second_date_list)
|
|
37
|
+
nan_list = np.full(length, np.nan)
|
|
38
|
+
second_dates = [np.nan] if second_date_list is None else second_date_list
|
|
39
|
+
return pd.DataFrame(
|
|
40
|
+
{
|
|
41
|
+
"Ref_date": nan_list,
|
|
42
|
+
"Second_date": second_dates,
|
|
43
|
+
"dx": nan_list,
|
|
44
|
+
"dy": nan_list,
|
|
45
|
+
"xcount_x": nan_list,
|
|
46
|
+
"xcount_y": nan_list,
|
|
47
|
+
}
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
# Common Reference
|
|
51
|
+
data = pd.DataFrame(
|
|
52
|
+
{
|
|
53
|
+
"Ref_date": result["date1"][0],
|
|
54
|
+
"Second_date": result["date2"],
|
|
55
|
+
}
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
for var in result.columns.difference(["date1", "date2"]):
|
|
59
|
+
if var in ["result_dx", "result_dy", "xcount_x", "xcount_y", "error_x", "error_y", "xcount_z"]:
|
|
60
|
+
data[var] = result[var].values.cumsum()
|
|
61
|
+
data = data.rename(columns={"result_dx": "dx", "result_dy": "dy"})
|
|
62
|
+
|
|
63
|
+
if second_date_list is not None:
|
|
64
|
+
tmp = pd.DataFrame(
|
|
65
|
+
{
|
|
66
|
+
"Ref_date": pd.NaT,
|
|
67
|
+
"Second_date": second_date_list,
|
|
68
|
+
**{var: np.nan for var in data.columns.difference(["Ref_date", "Second_date"])},
|
|
69
|
+
}
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
positions = np.searchsorted(second_date_list, data["Second_date"].values)
|
|
73
|
+
tmp.iloc[positions] = data.values
|
|
74
|
+
|
|
75
|
+
return tmp
|
|
76
|
+
return data
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def set_function_for_interpolation(
|
|
80
|
+
option_interpol: str, x: np.ndarray, dataf: pd.DataFrame, result_quality: list | None
|
|
81
|
+
) -> (
|
|
82
|
+
interpolate.interp1d | interpolate.UnivariateSpline,
|
|
83
|
+
interpolate.interp1d | interpolate.UnivariateSpline,
|
|
84
|
+
interpolate.interp1d | interpolate.UnivariateSpline,
|
|
85
|
+
interpolate.interp1d | interpolate.UnivariateSpline,
|
|
86
|
+
): # type: ignore
|
|
87
|
+
"""
|
|
88
|
+
Get the function to interpolate the each of the time series.
|
|
89
|
+
|
|
90
|
+
:param option_interpol: [str] --- Type of interpolation, it can be 'spline', 'spline_smooth' or 'nearest'
|
|
91
|
+
:param x: [int] --- Integer corresponding to the time at which a certain displacement has been estimated
|
|
92
|
+
:param dataf: [pd dataframe] --- Data to interpolate
|
|
93
|
+
:param result_quality: [list | str | None] [default is None] --- List which can contain 'Norm_residual' to determine the L2 norm of the residuals from the last inversion, 'X_contribution' to determine the number of Y observations which have contributed to estimate each value in X (it corresponds to A.dot(weight))
|
|
94
|
+
|
|
95
|
+
:return fdx, fdy: [functions | None] --- The functions which need to be used to interpolate dx and dy
|
|
96
|
+
:return fdx_xcount, fdx_ycount: [functions | None] --- The functions which need to be used to interpolate the contributed values in X
|
|
97
|
+
"""
|
|
98
|
+
|
|
99
|
+
assert type(option_interpol) is str and option_interpol in [
|
|
100
|
+
"spline_smooth",
|
|
101
|
+
"spline",
|
|
102
|
+
"nearest",
|
|
103
|
+
], "The filepath must be a string among the options: 'spline_smooth','spline','nearest'."
|
|
104
|
+
|
|
105
|
+
# Compute the functions used to interpolate
|
|
106
|
+
# Define the interpolation functions based on the interpolation option
|
|
107
|
+
interpolation_functions = {
|
|
108
|
+
"spline_smooth": lambda x, y: interpolate.UnivariateSpline(x, y, k=3),
|
|
109
|
+
"spline": lambda x, y: interpolate.interp1d(x, y, kind="cubic"),
|
|
110
|
+
"nearest": lambda x, y: interpolate.interp1d(x, y, kind="nearest"),
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
# Compute the functions used to interpolate
|
|
114
|
+
interpolation_func = interpolation_functions[option_interpol]
|
|
115
|
+
|
|
116
|
+
fdx = interpolation_func(x, dataf["dx"])
|
|
117
|
+
fdy = interpolation_func(x, dataf["dy"])
|
|
118
|
+
|
|
119
|
+
fdx_xcount, fdy_xcount, fdx_error, fdy_error = None, None, None, None
|
|
120
|
+
if result_quality is not None:
|
|
121
|
+
if "X_contribution" in result_quality:
|
|
122
|
+
fdx_xcount = interpolation_func(x, dataf["xcount_x"])
|
|
123
|
+
fdy_xcount = interpolation_func(x, dataf["xcount_y"])
|
|
124
|
+
if "Error_propagation" in result_quality:
|
|
125
|
+
fdx_error = interpolation_func(x, dataf["error_x"])
|
|
126
|
+
fdy_error = interpolation_func(x, dataf["error_y"])
|
|
127
|
+
|
|
128
|
+
return fdx, fdy, fdx_xcount, fdy_xcount, fdx_error, fdy_error
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def full_with_nan(dataf_lp: pd.DataFrame, first_date: pd.Series, second_date: pd.Series) -> pd.DataFrame:
|
|
132
|
+
"""
|
|
133
|
+
|
|
134
|
+
:param dataf_lp: [pd dataframe] --- Interpolated results
|
|
135
|
+
:param first_date: [pd series] --- List of first dates of the entire cube
|
|
136
|
+
:param second_date: [pd series] --- List of second dates of the entire cube
|
|
137
|
+
|
|
138
|
+
:return dataf_lp: [pd dataframe] --- Interpolated results with row of name so when there is missing estimation in comparison with the entire cube
|
|
139
|
+
"""
|
|
140
|
+
|
|
141
|
+
nul_df = pd.DataFrame(
|
|
142
|
+
{
|
|
143
|
+
"date1": first_date,
|
|
144
|
+
"date2": second_date,
|
|
145
|
+
"vx": np.full(len(first_date), np.nan),
|
|
146
|
+
"vy": np.full(len(first_date), np.nan),
|
|
147
|
+
}
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
if "xcount_x" in dataf_lp.columns:
|
|
151
|
+
nul_df["xcount_x"] = np.full(len(first_date), np.nan)
|
|
152
|
+
nul_df["xcount_y"] = np.full(len(first_date), np.nan)
|
|
153
|
+
if "error_x" in dataf_lp.columns:
|
|
154
|
+
nul_df["error_x"] = np.full(len(first_date), np.nan)
|
|
155
|
+
nul_df["error_y"] = np.full(len(first_date), np.nan)
|
|
156
|
+
dataf_lp = pd.concat([nul_df, dataf_lp], ignore_index=True)
|
|
157
|
+
|
|
158
|
+
return dataf_lp
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def smooth_results(result: np.ndarray, window_size: int = 3):
|
|
162
|
+
r"""
|
|
163
|
+
Spatially smooth the data by averaging (applying a convolution filter to) each pixel with its neighborhood.
|
|
164
|
+
/!\ This method only works with cubes where both starting and ending dates exactly correspond for each pixel (ie TICOI results)
|
|
165
|
+
|
|
166
|
+
:param result: [np array] --- Results for a variable (pandas dataframe of TICOI results transformed, as in cube_data_class.write_result_ticoi)
|
|
167
|
+
:param window_size: [int] [default is 3] --- Size of the window for mean filtering
|
|
168
|
+
|
|
169
|
+
:return result: [np array] --- Smoothened result
|
|
170
|
+
"""
|
|
171
|
+
|
|
172
|
+
filt = np.full((window_size, window_size), 1 / window_size**2)
|
|
173
|
+
|
|
174
|
+
# Filter the data at each date
|
|
175
|
+
for t in range(result.shape[-1]):
|
|
176
|
+
result[:, :, t] = ndi.correlate(result[:, :, t], filt, mode="nearest")
|
|
177
|
+
|
|
178
|
+
return result
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def visualisation_interpolation(
|
|
182
|
+
list_dataf: pd.DataFrame,
|
|
183
|
+
option_visual: List = [
|
|
184
|
+
"interp_xy_overlaid",
|
|
185
|
+
"interp_xy_overlaid_zoom",
|
|
186
|
+
"invertvv_overlaid",
|
|
187
|
+
"invertvv_overlaid_zoom",
|
|
188
|
+
"direction_overlaid",
|
|
189
|
+
"quality_metrics",
|
|
190
|
+
],
|
|
191
|
+
save: bool = False,
|
|
192
|
+
show: bool = True,
|
|
193
|
+
path_save: Optional[str] = None,
|
|
194
|
+
colors: List[str] = ["blueviolet", "orange"],
|
|
195
|
+
figsize: tuple[int] = (10, 6),
|
|
196
|
+
vminmax: List[int] = None,
|
|
197
|
+
):
|
|
198
|
+
"""
|
|
199
|
+
Plot some relevant information about TICOI results.
|
|
200
|
+
|
|
201
|
+
:param list_dataf: [pd dataframe] --- Results after the interpolation in TICOI processing
|
|
202
|
+
:param option_visual: [list] [default] --- List of the plots to prepare (each plot shows a different information)
|
|
203
|
+
:param save: [bool] [default is False] --- If True, save the figures to path_save (if not None)
|
|
204
|
+
:param show: [bool] [default is True] --- If True, plot the figures
|
|
205
|
+
:param path_save: [str | None] [default is None] --- Path where the figures must be saved if save is True
|
|
206
|
+
:param colors: [List<str>] [default is ["blueviolet", "orange"]] --- Colors for the plot
|
|
207
|
+
:param figsize: [tuple] [default is (10, 6)] --- Size of the figures
|
|
208
|
+
:param vminmax: [List[int, int]] [default is None] --- Min and max values for the y-axis of the plots
|
|
209
|
+
"""
|
|
210
|
+
|
|
211
|
+
pixel_object = PixelClass()
|
|
212
|
+
pixel_object.load(
|
|
213
|
+
list_dataf, save=save, show=show, path_save=path_save, type_data=["obs", "interp"], figsize=figsize
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
dico_visual = {
|
|
217
|
+
"interp_xy_overlaid": (
|
|
218
|
+
lambda pix: pix.plot_vx_vy_overlaid(type_data="interp", colors=colors, zoom_on_results=False)
|
|
219
|
+
),
|
|
220
|
+
"interp_xy_overlaid_zoom": (
|
|
221
|
+
lambda pix: pix.plot_vx_vy_overlaid(type_data="interp", colors=colors, zoom_on_results=True)
|
|
222
|
+
),
|
|
223
|
+
"inverpvv_overlaid": (
|
|
224
|
+
lambda pix: pix.plot_vv_overlaid(type_data="interp", colors=colors, zoom_on_results=False, vminmax=vminmax)
|
|
225
|
+
),
|
|
226
|
+
"inverpvv": (lambda pix: pix.plot_vv(type_data="interp", color=colors[1], vminmax=vminmax)),
|
|
227
|
+
"inverpvv_overlaid_zoom": (
|
|
228
|
+
lambda pix: pix.plot_vv_overlaid(type_data="interp", colors=colors, zoom_on_results=True, vminmax=vminmax)
|
|
229
|
+
),
|
|
230
|
+
"direction_overlaid": (lambda pix: pix.plot_direction_overlaid(type_data="interp")),
|
|
231
|
+
"quality_metrics": (lambda pix: pix.plot_quality_metrics()),
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
for option in option_visual:
|
|
235
|
+
if option in dico_visual.keys():
|
|
236
|
+
dico_visual[option](pixel_object)
|