oafuncs 0.0.98.13__py3-none-any.whl → 0.0.98.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,123 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ """
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2025-04-25 16:22:52
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2025-04-25 16:22:52
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\_script\\data_interp.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.12
14
+ """
15
+
16
+ from typing import List, Union
17
+
18
+ import numpy as np
19
+ from scipy.interpolate import griddata
20
+
21
+ from oafuncs.oa_tool import PEx
22
+
23
+
24
+ def _interp_single_worker(*args):
25
+ """
26
+ 用于PEx并行的单slice插值worker,参数为(t, z, source_data, origin_points, target_points, interpolation_method, target_shape)
27
+ """
28
+ data_slice, origin_points, target_points, interpolation_method, target_shape = args
29
+
30
+ # 过滤掉包含 NaN 的点
31
+ valid_mask = ~np.isnan(data_slice.ravel())
32
+ valid_data = data_slice.ravel()[valid_mask]
33
+ valid_points = origin_points[valid_mask]
34
+
35
+ if len(valid_data) < 10: # 如果有效数据太少,用均值填充
36
+ return np.full(target_shape, np.nanmean(data_slice))
37
+
38
+ # 使用有效数据进行插值
39
+ result = griddata(valid_points, valid_data, target_points, method=interpolation_method)
40
+ result = result.reshape(target_shape)
41
+
42
+ # 检查插值结果中是否仍有 NaN,如果有,用最近邻插值填充
43
+ if np.any(np.isnan(result)):
44
+ # 使用最近邻方法填充剩余的 NaN 值
45
+ nan_mask = np.isnan(result)
46
+ result_nn = griddata(valid_points, valid_data, target_points[nan_mask.ravel()], method="nearest")
47
+ result.ravel()[nan_mask.ravel()] = result_nn
48
+
49
+ return result
50
+
51
+
52
+ def interp_2d_func(
53
+ target_x_coordinates: Union[np.ndarray, List[float]],
54
+ target_y_coordinates: Union[np.ndarray, List[float]],
55
+ source_x_coordinates: Union[np.ndarray, List[float]],
56
+ source_y_coordinates: Union[np.ndarray, List[float]],
57
+ source_data: np.ndarray,
58
+ interpolation_method: str = "cubic",
59
+ ) -> np.ndarray:
60
+ """
61
+ Perform 2D interpolation on the last two dimensions of a multi-dimensional array.
62
+
63
+ Args:
64
+ target_x_coordinates (Union[np.ndarray, List[float]]): Target grid's x-coordinates.
65
+ target_y_coordinates (Union[np.ndarray, List[float]]): Target grid's y-coordinates.
66
+ source_x_coordinates (Union[np.ndarray, List[float]]): Original grid's x-coordinates.
67
+ source_y_coordinates (Union[np.ndarray, List[float]]): Original grid's y-coordinates.
68
+ source_data (np.ndarray): Multi-dimensional array with the last two dimensions as spatial.
69
+ interpolation_method (str, optional): Interpolation method. Defaults to "cubic".
70
+ >>> optional: 'linear', 'nearest', 'cubic', 'quintic', etc.
71
+ use_parallel (bool, optional): Enable parallel processing. Defaults to True.
72
+
73
+ Returns:
74
+ np.ndarray: Interpolated data array.
75
+
76
+ Raises:
77
+ ValueError: If input shapes are invalid.
78
+
79
+ Examples:
80
+ >>> target_x_coordinates = np.array([1, 2, 3])
81
+ >>> target_y_coordinates = np.array([4, 5, 6])
82
+ >>> source_x_coordinates = np.array([7, 8, 9])
83
+ >>> source_y_coordinates = np.array([10, 11, 12])
84
+ >>> source_data = np.random.rand(3, 3)
85
+ >>> result = interp_2d(target_x_coordinates, target_y_coordinates, source_x_coordinates, source_y_coordinates, source_data)
86
+ >>> print(result.shape) # Expected output: (3, 3)
87
+ """
88
+ if len(target_y_coordinates.shape) == 1:
89
+ target_x_coordinates, target_y_coordinates = np.meshgrid(target_x_coordinates, target_y_coordinates)
90
+ if len(source_y_coordinates.shape) == 1:
91
+ source_x_coordinates, source_y_coordinates = np.meshgrid(source_x_coordinates, source_y_coordinates)
92
+
93
+ if source_x_coordinates.shape != source_data.shape[-2:] or source_y_coordinates.shape != source_data.shape[-2:]:
94
+ raise ValueError("[red]Shape of source_data does not match shape of source_x_coordinates or source_y_coordinates.[/red]")
95
+
96
+ target_points = np.column_stack((np.array(target_x_coordinates).ravel(), np.array(target_y_coordinates).ravel()))
97
+ origin_points = np.column_stack((np.array(source_x_coordinates).ravel(), np.array(source_y_coordinates).ravel()))
98
+
99
+ data_dims = len(source_data.shape)
100
+ # Ensure source_data is 4D for consistent processing (t, z, y, x)
101
+ if data_dims < 2:
102
+ raise ValueError(f"[red]Source data must have at least 2 dimensions, but got {data_dims}.[/red]")
103
+ elif data_dims > 4:
104
+ # Or handle cases with more than 4 dimensions if necessary
105
+ raise ValueError(f"[red]Source data has {data_dims} dimensions, but this function currently supports only up to 4.[/red]")
106
+
107
+ # Reshape to 4D by adding leading dimensions of size 1 if needed
108
+ num_dims_to_add = 4 - data_dims
109
+ new_shape = (1,) * num_dims_to_add + source_data.shape
110
+ new_src_data = source_data.reshape(new_shape)
111
+
112
+ t, z, y, x = new_src_data.shape
113
+
114
+ paras = []
115
+ target_shape = target_y_coordinates.shape
116
+ for t_index in range(t):
117
+ for z_index in range(z):
118
+ paras.append((new_src_data[t_index, z_index], origin_points, target_points, interpolation_method, target_shape))
119
+
120
+ with PEx() as excutor:
121
+ result = excutor.run(_interp_single_worker, paras)
122
+
123
+ return np.squeeze(np.array(result).reshape(t, z, *target_shape))
@@ -1,3 +1,18 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ """
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2025-03-30 11:16:29
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2025-04-25 14:23:10
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\_script\\netcdf_merge.py
9
+ Description
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.12
14
+ """
15
+
1
16
  import os
2
17
  from typing import List, Optional, Union
3
18
 
@@ -5,6 +20,7 @@ import numpy as np
5
20
  import xarray as xr
6
21
 
7
22
  from oafuncs import pbar
23
+ import logging
8
24
 
9
25
 
10
26
  def merge_nc(file_list: Union[str, List[str]], var_name: Optional[Union[str, List[str]]] = None, dim_name: Optional[str] = None, target_filename: Optional[str] = None) -> None:
@@ -74,13 +90,11 @@ def merge_nc(file_list: Union[str, List[str]], var_name: Optional[Union[str, Lis
74
90
  merged_data[var] = xr.concat(merged_data[var], dim=dim_name).fillna(0)
75
91
 
76
92
  if os.path.exists(target_filename):
77
- print("Warning: The target file already exists. Removing it ...")
93
+ # print("Warning: The target file already exists. Removing it ...")
94
+ logging.warning("The target file already exists. Removing it ...")
78
95
  os.remove(target_filename)
79
-
80
- save_to_nc(target_filename, merged_data)
81
-
82
- print(f'\nFile "{target_filename}" has been successfully created.')
83
96
 
97
+ save_to_nc(target_filename, xr.Dataset(merged_data))
84
98
 
85
99
 
86
100
  # Example usage
oafuncs/oa_data.py CHANGED
@@ -20,9 +20,8 @@ import numpy as np
20
20
  import salem
21
21
  import xarray as xr
22
22
  from rich import print
23
- from scipy.interpolate import griddata, interp1d
23
+ from scipy.interpolate import interp1d
24
24
 
25
- from oafuncs.oa_tool import PEx
26
25
 
27
26
  __all__ = ["interp_along_dim", "interp_2d", "ensure_list", "mask_shapefile"]
28
27
 
@@ -115,32 +114,7 @@ def interp_along_dim(
115
114
  return np.apply_along_axis(apply_interp_extrap, interpolation_axis, source_data)
116
115
 
117
116
 
118
- def _interp_single_worker(*args):
119
- """
120
- 用于PEx并行的单slice插值worker,参数为(t, z, source_data, origin_points, target_points, interpolation_method, target_shape)
121
- """
122
- data_slice, origin_points, target_points, interpolation_method, target_shape = args
123
-
124
- # 过滤掉包含 NaN 的点
125
- valid_mask = ~np.isnan(data_slice.ravel())
126
- valid_data = data_slice.ravel()[valid_mask]
127
- valid_points = origin_points[valid_mask]
128
-
129
- if len(valid_data) < 10: # 如果有效数据太少,用均值填充
130
- return np.full(target_shape, np.nanmean(data_slice))
131
117
 
132
- # 使用有效数据进行插值
133
- result = griddata(valid_points, valid_data, target_points, method=interpolation_method)
134
- result = result.reshape(target_shape)
135
-
136
- # 检查插值结果中是否仍有 NaN,如果有,用最近邻插值填充
137
- if np.any(np.isnan(result)):
138
- # 使用最近邻方法填充剩余的 NaN 值
139
- nan_mask = np.isnan(result)
140
- result_nn = griddata(valid_points, valid_data, target_points[nan_mask.ravel()], method="nearest")
141
- result.ravel()[nan_mask.ravel()] = result_nn
142
-
143
- return result
144
118
 
145
119
 
146
120
  def interp_2d(
@@ -149,7 +123,7 @@ def interp_2d(
149
123
  source_x_coordinates: Union[np.ndarray, List[float]],
150
124
  source_y_coordinates: Union[np.ndarray, List[float]],
151
125
  source_data: np.ndarray,
152
- interpolation_method: str = "cubic",
126
+ interpolation_method: str = "linear",
153
127
  ) -> np.ndarray:
154
128
  """
155
129
  Perform 2D interpolation on the last two dimensions of a multi-dimensional array.
@@ -160,7 +134,8 @@ def interp_2d(
160
134
  source_x_coordinates (Union[np.ndarray, List[float]]): Original grid's x-coordinates.
161
135
  source_y_coordinates (Union[np.ndarray, List[float]]): Original grid's y-coordinates.
162
136
  source_data (np.ndarray): Multi-dimensional array with the last two dimensions as spatial.
163
- interpolation_method (str, optional): Interpolation method. Defaults to "cubic".
137
+ >>> must be [y, x] or [*, y, x] or [*, *, y, x]
138
+ interpolation_method (str, optional): Interpolation method. Defaults to "linear".
164
139
  >>> optional: 'linear', 'nearest', 'cubic', 'quintic', etc.
165
140
  use_parallel (bool, optional): Enable parallel processing. Defaults to True.
166
141
 
@@ -179,42 +154,16 @@ def interp_2d(
179
154
  >>> result = interp_2d(target_x_coordinates, target_y_coordinates, source_x_coordinates, source_y_coordinates, source_data)
180
155
  >>> print(result.shape) # Expected output: (3, 3)
181
156
  """
182
- if len(target_y_coordinates.shape) == 1:
183
- target_x_coordinates, target_y_coordinates = np.meshgrid(target_x_coordinates, target_y_coordinates)
184
- if len(source_y_coordinates.shape) == 1:
185
- source_x_coordinates, source_y_coordinates = np.meshgrid(source_x_coordinates, source_y_coordinates)
186
-
187
- if source_x_coordinates.shape != source_data.shape[-2:] or source_y_coordinates.shape != source_data.shape[-2:]:
188
- raise ValueError("[red]Shape of source_data does not match shape of source_x_coordinates or source_y_coordinates.[/red]")
189
-
190
- target_points = np.column_stack((np.array(target_y_coordinates).ravel(), np.array(target_x_coordinates).ravel()))
191
- origin_points = np.column_stack((np.array(source_y_coordinates).ravel(), np.array(source_x_coordinates).ravel()))
192
-
193
- data_dims = len(source_data.shape)
194
- # Ensure source_data is 4D for consistent processing (t, z, y, x)
195
- if data_dims < 2:
196
- raise ValueError(f"[red]Source data must have at least 2 dimensions, but got {data_dims}.[/red]")
197
- elif data_dims > 4:
198
- # Or handle cases with more than 4 dimensions if necessary
199
- raise ValueError(f"[red]Source data has {data_dims} dimensions, but this function currently supports only up to 4.[/red]")
200
-
201
- # Reshape to 4D by adding leading dimensions of size 1 if needed
202
- num_dims_to_add = 4 - data_dims
203
- new_shape = (1,) * num_dims_to_add + source_data.shape
204
- new_src_data = source_data.reshape(new_shape)
205
-
206
- t, z, _, _ = new_src_data.shape
207
-
208
- paras = []
209
- target_shape = target_y_coordinates.shape
210
- for t_index in range(t):
211
- for z_index in range(z):
212
- paras.append((new_src_data[t_index, z_index], origin_points, target_points, interpolation_method, target_shape))
213
-
214
- with PEx() as excutor:
215
- result = excutor.run(_interp_single_worker, paras)
216
-
217
- return np.squeeze(np.array(result).reshape(t, z, *target_shape))
157
+ from ._script.data_interp import interp_2d_func
158
+
159
+ return interp_2d_func(
160
+ target_x_coordinates=target_x_coordinates,
161
+ target_y_coordinates=target_y_coordinates,
162
+ source_x_coordinates=source_x_coordinates,
163
+ source_y_coordinates=source_y_coordinates,
164
+ source_data=source_data,
165
+ interpolation_method=interpolation_method,
166
+ )
218
167
 
219
168
 
220
169
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: oafuncs
3
- Version: 0.0.98.13
3
+ Version: 0.0.98.15
4
4
  Summary: Oceanic and Atmospheric Functions
5
5
  Home-page: https://github.com/Industry-Pays/OAFuncs
6
6
  Author: Kun Liu
@@ -1,6 +1,6 @@
1
1
  oafuncs/__init__.py,sha256=T_-VtnWWllV3Q91twT5Yt2sUapeA051QbPNnBxmg9nw,1456
2
2
  oafuncs/oa_cmap.py,sha256=DimWT4Bg7uE5Lx8hSw1REp7whpsR2pFRStAwk1cowEM,11494
3
- oafuncs/oa_data.py,sha256=PXn4EpSbLPHhYmNJXEStd8vIMwInl3a9O9sW2c9z-g0,11152
3
+ oafuncs/oa_data.py,sha256=hngaxAi_r6PsHXzSeT3DMY_QdChWjuBMPOZNFvWU388,8442
4
4
  oafuncs/oa_date.py,sha256=WhM6cyD4G3IeghjLTHhAMtlvJbA7kwQG2sHnxdTgyso,6303
5
5
  oafuncs/oa_draw.py,sha256=Wj2QBgyIPpV_dxaDrH10jqj_puK9ZM9rd-si-3VrsrE,17631
6
6
  oafuncs/oa_file.py,sha256=j9gXJgPOJsliu4IOUc4bc-luW4yBvQyNCEmMyDVjUwQ,16404
@@ -11,8 +11,9 @@ oafuncs/oa_tool.py,sha256=rpPkLqWhqMmqlCc5wjL8qMTg3gThCkSrYJckbX_0iJc,8631
11
11
  oafuncs/_data/hycom.png,sha256=MadKs6Gyj5n9-TOu7L4atQfTXtF9dvN9w-tdU9IfygI,10945710
12
12
  oafuncs/_data/oafuncs.png,sha256=o3VD7wm-kwDea5E98JqxXl04_78cBX7VcdUt7uQXGiU,3679898
13
13
  oafuncs/_script/cprogressbar.py,sha256=UIgGcLFs-6IgWlITuBLaQqrpt4OAK3Mst5RlCiNfZdQ,15772
14
+ oafuncs/_script/data_interp.py,sha256=70U-Jsoxd5g-7dEQt4IaDRuRV-M_1lEKMGOci86vSVE,5431
14
15
  oafuncs/_script/email.py,sha256=lL4HGKrr524-g0xLlgs-4u7x4-u7DtgNoD9AL8XJKj4,3058
15
- oafuncs/_script/netcdf_merge.py,sha256=vWvrFUmzwZgTCn2E_9iBKmPLf93ekvD3Ud2NOQgJxGQ,3829
16
+ oafuncs/_script/netcdf_merge.py,sha256=9hCyxfeUHnBzs50_0v0jzVfxpMxTX4dNTo0pmsp_T6g,4226
16
17
  oafuncs/_script/netcdf_modify.py,sha256=sGRUYNhfGgf9JV70rnBzw3bzuTRSXzBTL_RMDnDPeLQ,4552
17
18
  oafuncs/_script/netcdf_write.py,sha256=iO1Qv9bp6RLiw1D8Nrv7tX_8X-diUZaX3Nxhk6pJ5Nw,8556
18
19
  oafuncs/_script/parallel.py,sha256=T9Aie-e4LcbKlFTLZ0l4lhEN3SBVa84jRcrAsIm8s0I,8767
@@ -37,8 +38,8 @@ oafuncs/oa_sign/__init__.py,sha256=QKqTFrJDFK40C5uvk48GlRRbGFzO40rgkYwu6dYxatM,5
37
38
  oafuncs/oa_sign/meteorological.py,sha256=8091SHo2L8kl4dCFmmSH5NGVHDku5i5lSiLEG5DLnOQ,6489
38
39
  oafuncs/oa_sign/ocean.py,sha256=xrW-rWD7xBWsB5PuCyEwQ1Q_RDKq2KCLz-LOONHgldU,5932
39
40
  oafuncs/oa_sign/scientific.py,sha256=a4JxOBgm9vzNZKpJ_GQIQf7cokkraV5nh23HGbmTYKw,5064
40
- oafuncs-0.0.98.13.dist-info/licenses/LICENSE.txt,sha256=rMtLpVg8sKiSlwClfR9w_Dd_5WubTQgoOzE2PDFxzs4,1074
41
- oafuncs-0.0.98.13.dist-info/METADATA,sha256=S7BtUR9AaROiwbgyU3-wFsbVo8ySvRNUR3M9x383uA8,4273
42
- oafuncs-0.0.98.13.dist-info/WHEEL,sha256=SmOxYU7pzNKBqASvQJ7DjX3XGUF92lrGhMb3R6_iiqI,91
43
- oafuncs-0.0.98.13.dist-info/top_level.txt,sha256=bgC35QkXbN4EmPHEveg_xGIZ5i9NNPYWqtJqaKqTPsQ,8
44
- oafuncs-0.0.98.13.dist-info/RECORD,,
41
+ oafuncs-0.0.98.15.dist-info/licenses/LICENSE.txt,sha256=rMtLpVg8sKiSlwClfR9w_Dd_5WubTQgoOzE2PDFxzs4,1074
42
+ oafuncs-0.0.98.15.dist-info/METADATA,sha256=jTAHHAY0xOxy2z13wYD4x4JOOSfsUxrz1WVyzXNfi9o,4273
43
+ oafuncs-0.0.98.15.dist-info/WHEEL,sha256=SmOxYU7pzNKBqASvQJ7DjX3XGUF92lrGhMb3R6_iiqI,91
44
+ oafuncs-0.0.98.15.dist-info/top_level.txt,sha256=bgC35QkXbN4EmPHEveg_xGIZ5i9NNPYWqtJqaKqTPsQ,8
45
+ oafuncs-0.0.98.15.dist-info/RECORD,,