ticoi 0.0.1__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ticoi might be problematic. Click here for more details.

ticoi/__about__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.0.1"
1
+ __version__ = "0.1.0"
@@ -37,13 +37,20 @@ from ticoi.filtering_functions import dask_filt_warpper, dask_smooth_wrapper
37
37
  from ticoi.inversion_functions import construction_dates_range_np
38
38
  from ticoi.mjd2date import mjd2date
39
39
 
40
+
41
+ from typing import Literal
42
+
43
+ MethodInterp = Literal["linear", "nearest", "zero", "slinear", "quadratic", "cubic"]
44
+ ReturnAs = Literal["dataframe", "cube"]
45
+
46
+
40
47
  # %% ======================================================================== #
41
48
  # CUBE DATA CLASS #
42
49
  # =========================================================================%% #
43
50
 
44
51
 
45
52
  class CubeDataClass:
46
- def __init__(self, cube=None, ds=None):
53
+ def __init__(self, cube=None, ds: xr.Dataset = None):
47
54
  """
48
55
  Initialisation of the main attributes, or copy cube's attributes and ds dataset if given.
49
56
 
@@ -267,12 +274,6 @@ class CubeDataClass:
267
274
 
268
275
  self.update_dimension() # Update self.nx,self.ny,self.nz
269
276
 
270
- if conf:
271
- minconfx = np.nanmin(self.ds["vx_error"].values[:])
272
- maxconfx = np.nanmax(self.ds["vx_error"].values[:])
273
- minconfy = np.nanmin(self.ds["vy_error"].values[:])
274
- maxconfy = np.nanmax(self.ds["vy_error"].values[:])
275
-
276
277
  date1 = np.array([np.datetime64(date_str, "D") for date_str in self.ds["acquisition_date_img1"].values])
277
278
  date2 = np.array([np.datetime64(date_str, "D") for date_str in self.ds["acquisition_date_img2"].values])
278
279
 
@@ -295,6 +296,10 @@ class CubeDataClass:
295
296
  sensor[np.isin(sensor, ["S2A", "S2B"])] = "Sentinel-2"
296
297
 
297
298
  if conf: # Normalize the error between 0 and 1, and convert error in confidence
299
+ minconfx = np.nanmin(self.ds["vx_error"].values[:])
300
+ maxconfx = np.nanmax(self.ds["vx_error"].values[:])
301
+ minconfy = np.nanmin(self.ds["vy_error"].values[:])
302
+ maxconfy = np.nanmax(self.ds["vy_error"].values[:])
298
303
  errorx = 1 - (self.ds["vx_error"].values - minconfx) / (maxconfx - minconfx)
299
304
  errory = 1 - (self.ds["vy_error"].values - minconfy) / (maxconfy - minconfy)
300
305
  else:
@@ -687,7 +692,7 @@ class CubeDataClass:
687
692
  filepath: list | str,
688
693
  chunks: dict | str | int = {},
689
694
  conf: bool = False,
690
- subset: str | None = None,
695
+ subset: list[np.array] | None = None,
691
696
  buffer: str | None = None,
692
697
  pick_date: str | None = None,
693
698
  pick_sensor: str | None = None,
@@ -1044,7 +1049,7 @@ class CubeDataClass:
1044
1049
  :params i, j: [int | float] --- Coordinates to be converted
1045
1050
  :param unit: [int] [default is 365] --- 1 for m/d, 365 for m/y
1046
1051
  :param regu: [int | str] [default is '1accelnotnull'] --- Type of regularization
1047
- :param coef: [int] [default is 100] --- Coef of Tikhonov regularisation
1052
+ :param coef: [int] [default is 100] --- Coef of Tikhonov regularization
1048
1053
  :param flag: [xr dataset | None] [default is None] --- If not None, the values of the coefficient used for stable areas, surge glacier and non surge glacier
1049
1054
  :param solver: [str] [default is 'LSMR_ini'] --- Solver of the inversion: 'LSMR', 'LSMR_ini', 'LS', 'LS_bounded', 'LSQR'
1050
1055
  :param interp: [str] [default is 'nearest'] --- Interpolation method used to load the pixel when it is not in the dataset ('nearest' or 'linear')
@@ -1056,8 +1061,8 @@ class CubeDataClass:
1056
1061
  :return data: [list | None] --- A list 2 elements : the first one is np.ndarray with the observed
1057
1062
  :return mean: [list | None] --- A list with average vx and vy if solver=LSMR_ini, but the regularization do not require an apriori on the acceleration
1058
1063
  :return dates_range: [list | None] --- Dates between which the displacements will be inverted
1059
- :return regu: [np array | Nothing] --- If flag is not None, regularisation method to be used for each pixel
1060
- :return coef: [np array | Nothing] --- If flag is not None, regularisation coefficient to be used for each pixel
1064
+ :return regu: [np array | Nothing] --- If flag is not None, regularization method to be used for each pixel
1065
+ :return coef: [np array | Nothing] --- If flag is not None, regularization coefficient to be used for each pixel
1061
1066
  """
1062
1067
 
1063
1068
  # Variables to keep
@@ -1148,7 +1153,7 @@ class CubeDataClass:
1148
1153
  Delete outliers according to a certain criterium.
1149
1154
 
1150
1155
  :param delete_outliers: [str | float] --- If float delete all velocities which a quality indicator higher than delete_outliers, if median_filter delete outliers that an angle 45° away from the average vector
1151
- :param flag: [xr dataset | None] [default is None] --- If not None, the values of the coefficient used for stable areas, surge glacier and non surge glacier
1156
+ :param flag: [xr dataset | None] [default is None] --- If not None, the values of the coefficient used for static areas, surge glacier and non surge glacier
1152
1157
  """
1153
1158
 
1154
1159
  if isinstance(delete_outliers, int) or isinstance(delete_outliers, str):
@@ -1350,7 +1355,7 @@ class CubeDataClass:
1350
1355
  elif flag.split(".")[-1] in ["shp", "gpkg"]: # If flag is a shape file
1351
1356
  flag = geopandas.read_file(flag).to_crs(self.ds.proj4).clip(self.ds.rio.bounds())
1352
1357
 
1353
- # surge-type glacier: 2, other glacier: 1, stable area: 0
1358
+ # surge-type glacier: 2, other glacier: 1, static area: 0
1354
1359
  if field_name is None:
1355
1360
  if "surge_type" in flag.columns: # RGI inventory, surge-type glacier: 2, other glacier: 0
1356
1361
  default_value = 0
@@ -1417,7 +1422,7 @@ class CubeDataClass:
1417
1422
  velo_or_disp: str = "velo",
1418
1423
  select_baseline: int | None = 180,
1419
1424
  verbose: bool = False,
1420
- ) -> xr.Dataset:
1425
+ ) -> xr.Dataset | None:
1421
1426
  """
1422
1427
  Filter the original data before the inversion:
1423
1428
  -delete outliers according to the provided criterion
@@ -1432,7 +1437,7 @@ class CubeDataClass:
1432
1437
  :param order: [int] [default is 3] --- Order of the smoothing function
1433
1438
  :param unit: [int] [default is 365] --- 365 if the unit is m/y, 1 if the unit is m/d
1434
1439
  :param delete_outliers: [str | float | None] [default is None] --- If float delete all velocities which a quality indicator higher than delete_outliers
1435
- :param flag: [xr dataset | None] [default is None] --- If not None, the values of the coefficient used for stable areas, surge glacier and non surge glacier
1440
+ :param flag: [xr dataset | None] [default is None] --- If not None, the values of the coefficient used for static areas, surge glacier and non surge glacier
1436
1441
  :param regu: [int | str] [default is "1accelnotnull"] --- Regularisation of the solver
1437
1442
  :param solver: [str] [default is 'LSMR_ini'] --- Solver used to invert the system
1438
1443
  :param proj: [str] [default is 'EPSG:4326'] --- EPSG of i,j projection
@@ -1443,7 +1448,9 @@ class CubeDataClass:
1443
1448
  :return obs_filt: [xr dataset | None] --- Filtered dataset
1444
1449
  """
1445
1450
 
1446
- def loop_rolling(da_arr: xr.Dataset, select_baseline: int | None = 180) -> (np.ndarray, np.ndarray): # type: ignore
1451
+ def loop_rolling(
1452
+ da_arr: xr.Dataset | xr.DataArray, select_baseline: int | None = 180
1453
+ ) -> (np.ndarray, np.ndarray): # type: ignore
1447
1454
  """
1448
1455
  A function to calculate spatial mean, resample data, and calculate smoothed velocity.
1449
1456
 
@@ -1532,7 +1539,7 @@ class CubeDataClass:
1532
1539
  self.ds["vx"] = self.ds["vx"] / self.ds["temporal_baseline"] * unit
1533
1540
  self.ds["vy"] = self.ds["vy"] / self.ds["temporal_baseline"] * unit
1534
1541
 
1535
- if flag is not None: # create a flag, to identify stable,areas, and eventually surges
1542
+ if flag is not None: # create a flag, to identify static,areas, and eventually surges
1536
1543
  flag = self.create_flag(flag)
1537
1544
  flag.load()
1538
1545
 
@@ -1927,7 +1934,7 @@ class CubeDataClass:
1927
1934
  self,
1928
1935
  points_heatmap: pd.DataFrame,
1929
1936
  variable: str = "vv",
1930
- method_interp: str = "linear",
1937
+ method_interp: MethodInterp = "linear",
1931
1938
  verbose: bool = False,
1932
1939
  freq: str = "MS",
1933
1940
  method: str = "mean",
@@ -2049,56 +2056,36 @@ class CubeDataClass:
2049
2056
  )
2050
2057
  ).reshape(self.nx, self.ny)
2051
2058
 
2052
- def compute_med_stable_areas(
2053
- self, shapefile_path, return_as="dataframe", stat_name="med", var_list=["vx", "vy"], invert=True
2059
+ def compute_med_static_areas(
2060
+ self,
2061
+ shapefile_path: str,
2062
+ return_as: ReturnAs = "dataframe",
2063
+ var_list: list[str] = ["vx", "vy"],
2064
+ invert: bool = True,
2054
2065
  ):
2055
2066
  """
2056
- Compute MAD per time step using Dask and apply_ufunc over a shapefile-defined area.
2057
-
2058
- Parameters:
2067
+ Compute median values in static areas
2059
2068
 
2060
- shapefile_path (str): Path to shapefile.
2061
- return_as (str): 'dataframe' or 'cube'.
2062
- stat_name (str): Base variable name for new data.
2063
- invert (bool): Whether to invert the shapefile mask.
2069
+ :param shapefile_path: Path to shapefile.
2070
+ :param var_list: List of variable names.
2071
+ :param invert: Whether to invert the shapefile mask.
2064
2072
 
2065
- Returns:
2066
- pd.DataFrame or xr.Dataset
2073
+ :return: pd.DataFrame or xr.Dataset
2067
2074
  """
2068
- # Ensure data has Dask chunks
2069
- # self.ds = self.ds.chunk({'y': -1, 'x': -1, 'mid_date': 10})
2070
- print(var_list)
2075
+
2071
2076
  # Clip with shapefile
2072
2077
  gdf = gpd.read_file(shapefile_path)
2073
2078
  gdf = gdf.to_crs(self.ds.rio.crs)
2074
2079
  masked = self.ds.rio.clip(gdf.geometry, gdf.crs, drop=False, all_touched=True, invert=invert)
2075
2080
 
2076
- print("Clipped")
2081
+ mad_results = masked[var_list].median(dim=["x", "y"]).compute()
2077
2082
 
2078
- # Return as DataFrame
2079
2083
  if return_as == "dataframe":
2080
- df_vx = (
2081
- masked["vx"]
2082
- .median(dim=["x", "y"])
2083
- .compute()
2084
- .to_dataframe(name=f"{stat_name}_vx")
2085
- .reset_index()[["mid_date", f"{stat_name}_vx"]]
2086
- )
2087
- df_vy = (
2088
- masked["vy"]
2089
- .median(dim=["x", "y"])
2090
- .compute()
2091
- .to_dataframe(name=f"{stat_name}_vy")
2092
- .reset_index()[["mid_date", f"{stat_name}_vy"]]
2093
- )
2094
- if len(var_list) == 3:
2095
- df_v = (
2096
- masked[var_list[2]]
2097
- .median(dim=["x", "y"])
2098
- .compute()
2099
- .to_dataframe(name=f"{stat_name}_v")
2100
- .reset_index()[["mid_date", f"{stat_name}_v"]]
2101
- )
2084
+ # Return as DataFrame
2085
+ df_vx = mad_results["vx"].to_dataframe(name="med_vx").reset_index()[["mid_date", "med_vx"]]
2086
+ df_vy = mad_results["vy"].to_dataframe(name="med_vy").reset_index()[["mid_date", "med_vy"]]
2087
+ if len(var_list) == 3: # for vv
2088
+ df_v = mad_results[var_list[2]].to_dataframe(name="med_v").reset_index()[["mid_date", "med_v"]]
2102
2089
 
2103
2090
  # Merge on time coordinate (e.g., 'mid_date')
2104
2091
  if len(var_list) == 3:
@@ -2110,81 +2097,57 @@ class CubeDataClass:
2110
2097
 
2111
2098
  return merged_df
2112
2099
 
2113
- # # Return as cube
2114
- # elif return_as == 'cube':
2115
- # return self.assign({f'{stat_name}_vx': mad_results['vx'], f'{stat_name}_vy': mad_results['vy']})
2100
+ # Return as cube
2101
+ elif return_as == "cube":
2102
+ return self.assign({"nmad_vx": mad_results["vx"], "nmad_vy": mad_results["vy"]})
2116
2103
 
2117
2104
  else:
2118
2105
  raise ValueError("return_as must be 'dataframe' or 'cube'")
2119
2106
 
2120
- def compute_mad(self, shapefile_path, return_as="dataframe", stat_name="mad", var_list=["vx", "vy"], invert=True):
2107
+ def compute_nmad(
2108
+ self,
2109
+ shapefile_path: str,
2110
+ return_as: ReturnAs = "dataframe",
2111
+ var_list: list[str] = ["vx", "vy"],
2112
+ invert: bool = True,
2113
+ ):
2121
2114
  """
2122
- Compute MAD per time step using Dask and apply_ufunc over a shapefile-defined area.
2123
-
2124
- Parameters:
2125
2115
 
2126
- shapefile_path (str): Path to shapefile.
2127
- return_as (str): 'dataframe' or 'cube'.
2128
- stat_name (str): Base variable name for new data.
2129
- invert (bool): Whether to invert the shapefile mask.
2116
+ :param shapefile_path: Path to shapefile.
2117
+ :param return_as: 'dataframe' or 'cube'.
2118
+ :param var_list:
2119
+ :param invert: Whether to invert the shapefile mask.
2130
2120
 
2131
- Returns:
2132
- pd.DataFrame or xr.Dataset
2121
+ :return: pd.DataFrame or xr.Dataset
2133
2122
  """
2134
- # Ensure data has Dask chunks
2135
- self.ds = self.ds.chunk({"y": -1, "x": -1, "mid_date": 10})
2136
- print(var_list)
2123
+
2137
2124
  # Clip with shapefile
2138
2125
  gdf = gpd.read_file(shapefile_path)
2139
2126
  gdf = gdf.to_crs(self.ds.rio.crs)
2140
- masked = self.ds.rio.clip(gdf.geometry, gdf.crs, drop=False, all_touched=True, invert=invert)
2127
+ masked_gpd = self.ds.rio.clip(gdf.geometry, gdf.crs, drop=False, all_touched=True, invert=invert)
2141
2128
 
2142
- print("Clipped")
2129
+ masked = masked_gpd.dropna(dim="x", how="all").dropna(dim="y", how="all").dropna(dim="mid_date", how="all")
2143
2130
 
2144
2131
  # Define MAD function
2145
2132
  def mad_2d(arr):
2146
2133
  median = np.nanmedian(arr)
2147
2134
  return 1.483 * np.nanmedian(np.abs(arr - median))
2148
2135
 
2149
- mad_results = {} # Store MAD DataArrays
2150
-
2151
- for var in var_list:
2152
- data = masked[var]
2153
-
2154
- mad = xr.apply_ufunc(
2155
- mad_2d,
2156
- data,
2157
- input_core_dims=[["y", "x"]],
2158
- output_core_dims=[[]],
2159
- vectorize=True,
2160
- dask="parallelized",
2161
- output_dtypes=[data.dtype],
2162
- )
2163
-
2164
- mad.name = f"{stat_name}_{var}"
2165
- mad_results[var] = mad
2136
+ mad_results = xr.apply_ufunc(
2137
+ mad_2d,
2138
+ masked[var_list],
2139
+ input_core_dims=[["y", "x"]],
2140
+ output_core_dims=[[]],
2141
+ vectorize=True,
2142
+ dask="parallelized",
2143
+ ).compute()
2166
2144
 
2167
2145
  # Return as DataFrame
2168
2146
  if return_as == "dataframe":
2169
- df_vx = (
2170
- mad_results["vx"]
2171
- .compute()
2172
- .to_dataframe(name=f"{stat_name}_vx")
2173
- .reset_index()[["mid_date", f"{stat_name}_vx"]]
2174
- )
2175
- df_vy = (
2176
- mad_results["vy"]
2177
- .compute()
2178
- .to_dataframe(name=f"{stat_name}_vy")
2179
- .reset_index()[["mid_date", f"{stat_name}_vy"]]
2180
- )
2147
+ df_vx = mad_results["vx"].to_dataframe(name="nmad_vx").reset_index()[["mid_date", "nmad_vx"]]
2148
+ df_vy = mad_results["vy"].to_dataframe(name="nmad_vy").reset_index()[["mid_date", "nmad_vy"]]
2181
2149
  if len(var_list) == 3:
2182
- df_v = (
2183
- mad_results[var_list[2]]
2184
- .compute()
2185
- .to_dataframe(name=f"{stat_name}_v")
2186
- .reset_index()[["mid_date", f"{stat_name}_v"]]
2187
- )
2150
+ df_v = mad_results[var_list[2]].to_dataframe(name="nmad_v").reset_index()[["mid_date", "nmad_v"]]
2188
2151
 
2189
2152
  # Merge on time coordinate (e.g., 'mid_date')
2190
2153
  if len(var_list) == 3:
@@ -2198,7 +2161,7 @@ class CubeDataClass:
2198
2161
 
2199
2162
  # Return as cube
2200
2163
  elif return_as == "cube":
2201
- return self.assign({f"{stat_name}_vx": mad_results["vx"], f"{stat_name}_vy": mad_results["vy"]})
2164
+ return self.assign({"nmad_vx": mad_results["vx"], "nmad_vy": mad_results["vy"]})
2202
2165
 
2203
2166
  else:
2204
2167
  raise ValueError("return_as must be 'dataframe' or 'cube'")
@@ -1,9 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ticoi
3
- Version: 0.0.1
3
+ Version: 0.1.0
4
4
  Summary: A package to run Temporal Inversion using linear Combinations of Observations, and Interpolation (TICOI)
5
5
  Project-URL: Homepage, https://github.com/ticoi/ticoi.git
6
- Project-URL: Issues, https://github.com/pypa/sampleproject/issues
6
+ Project-URL: Issues, https://github.com/ticoi/ticoi/issues
7
7
  Author: Lei Guo
8
8
  Author-email: Laurane Charrier <laurane.charrier@outlook.com>
9
9
  License: GPL-3.0-only
@@ -81,18 +81,14 @@ Clone the git repo and create a `mamba` environment (see how to install `mamba`
81
81
  the [mamba documentation](https://mamba.readthedocs.io/en/latest/)):
82
82
 
83
83
  ```bash
84
- git clone git@github.com:ticoi/ticoi.git
85
- cd ticoi
86
84
  mamba env create -f environment.yml # Add '-n custom_name' if you want.
87
- mamba activate environment # Or any other name specified above
85
+ mamba install -c conda-forge ticoi
88
86
  ```
89
87
 
90
88
  ### With `pip`
91
89
 
92
90
  ```bash
93
- python3.10 -m venv ticoi-env
94
- source ticoi-env/bin/activate
95
- pip install git+https://github.com/ticoi/ticoi.git
91
+ pip install ticoi
96
92
  ```
97
93
 
98
94
  ## TUTORIALS
@@ -114,6 +110,7 @@ pip install git+https://github.com/ticoi/ticoi.git
114
110
  * [How to process one ITS_LIVE cube directly from the cloud](/examples/advanced/cube_ticoi_demo_its_live.py)
115
111
  * [How to format several geotiff files into a netCDF file](examples/advanced/cube_prep_from_geotiff.py)
116
112
  * [How to apply GLAFT on TICOI results](examples/advanced/glaft_for_ticoi_results.py)
113
+ * [How to compute statistics in static areas](examples/advanced/stats_in_static_areas.py)
117
114
 
118
115
  ## TO USE YOUR OWN DATASET
119
116
 
@@ -1,7 +1,7 @@
1
- ticoi/__about__.py,sha256=sXLh7g3KC4QCFxcZGBTpG2scR7hmmBsMjq6LqRptkRg,22
1
+ ticoi/__about__.py,sha256=kUR5RAFc7HCeiqdlX36dZOHkUI5wI6V_43RpEcD8b-0,22
2
2
  ticoi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  ticoi/core.py,sha256=uW-t8g46sKEuJ2e_Xh--MXHRNwbyruOm84a3N6qRAT0,67652
4
- ticoi/cube_data_classxr.py,sha256=HSSVZmnfR3eDe_TnagmBHEPO1uAgqq_Jfgmy-sdZPik,103777
4
+ ticoi/cube_data_classxr.py,sha256=noOkdbecfRY8iT-ZB6gHtD8mchxXFV5ScA7ConU6edo,102605
5
5
  ticoi/cube_writer.py,sha256=mGz79Drme9Ub983t_pS1-VIRHpSxRwU21GAoznkqYq4,32806
6
6
  ticoi/example.py,sha256=YqUPXH7YcW75O1w3VFkcgyOFChnn1OR0syxVop-lWM8,3300
7
7
  ticoi/filtering_functions.py,sha256=IYEZTllbrfmZ_EqtXFYyWaNFGaZ7akjmJ7HkYdcKIjU,26633
@@ -12,7 +12,7 @@ ticoi/optimize_coefficient_functions.py,sha256=IRoumcK_KAspz4qeYEEaYDLsO_nvY3lke
12
12
  ticoi/pixel_class.py,sha256=xv29Z3w7xTgdRQN7Qe6zeARwT-KdRMW8thIoJCj5Sw8,81771
13
13
  ticoi/seasonality_functions.py,sha256=4oMsHxgD1jlkJSsIDGFHmb311b48IkrT2xUx3Z4X-1g,9153
14
14
  ticoi/utils.py,sha256=tfmP2g5iF1MEq_Z1CBNCSz0KsMPRHI_lUgGEz_nGi3o,29403
15
- ticoi-0.0.1.dist-info/METADATA,sha256=fLQzemVjfHH9dJ9eYCt5wNrm9JIIdw055fejxdS84eM,6441
16
- ticoi-0.0.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
17
- ticoi-0.0.1.dist-info/licenses/LICENSE,sha256=2n6rt7r999OuXp8iOqW9we7ORaxWncIbOwN1ILRGR2g,7651
18
- ticoi-0.0.1.dist-info/RECORD,,
15
+ ticoi-0.1.0.dist-info/METADATA,sha256=Wg1MZUYTbOxBZKNL5XukCQyoXVnXiqDg3Lm57TslsBg,6353
16
+ ticoi-0.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
17
+ ticoi-0.1.0.dist-info/licenses/LICENSE,sha256=2n6rt7r999OuXp8iOqW9we7ORaxWncIbOwN1ILRGR2g,7651
18
+ ticoi-0.1.0.dist-info/RECORD,,
File without changes