oafuncs 0.0.98.21__py3-none-any.whl → 0.0.98.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,148 +1,214 @@
1
1
  from typing import List, Union
2
2
 
3
3
  import numpy as np
4
- from scipy.interpolate import NearestNDInterpolator, griddata
4
+ import importlib.util
5
5
 
6
6
  from oafuncs.oa_tool import PEx
7
7
 
8
+ # 检查pyinterp是否可用
9
+ pyinterp_available = importlib.util.find_spec("pyinterp") is not None
8
10
 
9
- def _normalize_lon(lon, ref_lon):
10
- """
11
- 将经度数组 lon 归一化到与 ref_lon 相同的区间([-180,180] 或 [0,360])
12
- 并在经度分界(如180/-180, 0/360)附近自动拓宽,避免插值断裂。
13
- """
14
- lon = np.asarray(lon)
15
- ref_lon = np.asarray(ref_lon)
16
- if np.nanmax(ref_lon) > 180:
17
- lon = np.where(lon < 0, lon + 360, lon)
18
- else:
19
- lon = np.where(lon > 180, lon - 360, lon)
20
- return lon
21
-
22
-
23
- def _expand_lonlat_for_dateline(points, values):
24
- """
25
- 对经度分界(如180/-180, 0/360)附近的数据进行拓宽,避免插值断裂。
26
- points: (N,2) [lon,lat]
27
- values: (N,)
28
- 返回拓宽后的 points, values
29
- """
30
- lon = points[:, 0]
31
- lat = points[:, 1]
32
- expanded_points = [points]
33
- expanded_values = [values]
34
- if (np.nanmax(lon) > 170) and (np.nanmin(lon) < -170):
35
- expanded_points.append(np.column_stack((lon + 360, lat)))
36
- expanded_points.append(np.column_stack((lon - 360, lat)))
37
- expanded_values.append(values)
38
- expanded_values.append(values)
39
- if (np.nanmax(lon) > 350) and (np.nanmin(lon) < 10):
40
- expanded_points.append(np.column_stack((lon - 360, lat)))
41
- expanded_points.append(np.column_stack((lon + 360, lat)))
42
- expanded_values.append(values)
43
- expanded_values.append(values)
44
- points_new = np.vstack(expanded_points)
45
- values_new = np.concatenate(expanded_values)
46
- return points_new, values_new
11
+ # 仅在pyinterp可用时导入相关模块
12
+ if pyinterp_available:
13
+ import pyinterp
14
+ from pyinterp.interpolator import RegularGridInterpolator, RTree
47
15
 
48
16
 
49
17
  def _interp_single_worker(*args):
50
18
  """
51
19
  用于PEx并行的单slice插值worker。
52
- 参数: data_slice, origin_points, target_points, interpolation_method, target_shape
53
- 球面插值:经纬度转球面坐标后插值
20
+ 参数: data_slice, origin_points, target_points, interpolation_method, target_shape, source_xy_shape
21
+ 使用pyinterp进行地理插值
54
22
  """
55
- data_slice, origin_points, target_points, interpolation_method, target_shape = args
23
+ # 确保pyinterp可用
24
+ if not pyinterp_available:
25
+ raise ImportError("pyinterp package is required for geographic interpolation")
26
+
27
+ data_slice, origin_points, target_points, interpolation_method, target_shape, source_xy_shape = args
56
28
 
57
- # 经纬度归一化
58
- origin_points = origin_points.copy()
59
- target_points = target_points.copy()
60
- origin_points[:, 0] = _normalize_lon(origin_points[:, 0], target_points[:, 0])
61
- target_points[:, 0] = _normalize_lon(target_points[:, 0], origin_points[:, 0])
62
-
63
- def lonlat2xyz(lon, lat):
64
- lon_rad = np.deg2rad(lon)
65
- lat_rad = np.deg2rad(lat)
66
- x = np.cos(lat_rad) * np.cos(lon_rad)
67
- y = np.cos(lat_rad) * np.sin(lon_rad)
68
- z = np.sin(lat_rad)
69
- return np.stack([x, y, z], axis=-1)
70
-
71
- # 过滤掉包含 NaN 的点
29
+ # 处理无效数据点
72
30
  valid_mask = ~np.isnan(data_slice.ravel())
73
- valid_data = data_slice.ravel()[valid_mask]
74
- valid_points = origin_points[valid_mask]
75
-
76
- if len(valid_data) < 10:
31
+ if np.count_nonzero(valid_mask) < 10:
77
32
  return np.full(target_shape, np.nanmean(data_slice))
78
33
 
79
- # 拓宽经度分界,避免如179/-181插值断裂
80
- valid_points_exp, valid_data_exp = _expand_lonlat_for_dateline(valid_points, valid_data)
81
-
82
- valid_xyz = lonlat2xyz(valid_points_exp[:, 0], valid_points_exp[:, 1])
83
- target_xyz = lonlat2xyz(target_points[:, 0], target_points[:, 1])
84
-
85
- # 使用 griddata 的 cubic 插值以获得更好平滑效果
86
- result = griddata(valid_xyz, valid_data_exp, target_xyz, method=interpolation_method).reshape(target_shape)
34
+ # 准备有效数据点
35
+ valid_data = data_slice.ravel()[valid_mask]
36
+ valid_points = origin_points[valid_mask]
87
37
 
88
- # 用最近邻处理残余 NaN
89
- if np.isnan(result).any():
90
- nn_interp = NearestNDInterpolator(valid_xyz, valid_data_exp)
91
- nn = nn_interp(target_xyz).reshape(target_shape)
92
- result[np.isnan(result)] = nn[np.isnan(result)]
38
+ # 根据插值方法选择合适的策略
39
+ if origin_points.shape[0] == source_xy_shape[0] * source_xy_shape[1]: # 规则网格
40
+ try:
41
+ # 尝试使用规则网格插值
42
+ y_size, x_size = source_xy_shape
43
+ lons = origin_points[:, 0].reshape(y_size, x_size)[0, :]
44
+ lats = origin_points[:, 1].reshape(y_size, x_size)[:, 0]
45
+
46
+ # 检查网格数据的有效性
47
+ grid_data = data_slice.reshape(source_xy_shape)
48
+ nan_ratio = np.isnan(grid_data).sum() / grid_data.size
49
+ if nan_ratio > 0.5: # 如果超过50%是NaN,跳过规则网格插值
50
+ raise ValueError("Too many NaN values in grid data")
51
+
52
+ # 创建pyinterp网格 - 设置经度循环
53
+ is_global = np.abs((lons[-1] - lons[0]) % 360 - 360) < 1e-6
54
+ grid = pyinterp.Grid2D(
55
+ x=pyinterp.Axis(lons, is_circle=is_global), # 根据数据判断是否为全球网格
56
+ y=pyinterp.Axis(lats),
57
+ array=grid_data,
58
+ increasing_axes=(-2, -1), # 确保坐标轴方向正确
59
+ )
60
+
61
+ # 创建插值器并执行插值
62
+ method_map = {"bilinear": "bilinear", "linear": "bilinear", "cubic": "bicubic", "nearest": "nearest"}
63
+ interpolator = RegularGridInterpolator(grid, method=method_map.get(interpolation_method, "bilinear"))
64
+
65
+ # 执行插值 - 使用geodetic坐标系统确保正确处理地球曲率
66
+ coords = pyinterp.geodetic.Coordinates(target_points[:, 0], target_points[:, 1], pyinterp.geodetic.System.WGS84)
67
+
68
+ result = interpolator.interpolate(coords).reshape(target_shape)
69
+
70
+ # 如果规则网格插值没有产生太多NaN值,直接返回结果
71
+ if np.isnan(result).sum() / result.size < 0.05:
72
+ return result
73
+
74
+ except Exception: # noqa
75
+ # 失败时使用RTree插值
76
+ pass
77
+
78
+ # 使用RTree进行非规则网格插值或填补规则网格产生的NaN
79
+ try:
80
+ # 创建RTree插值器
81
+ mesh = RTree(pyinterp.geodetic.Coordinates(valid_points[:, 0], valid_points[:, 1], pyinterp.geodetic.System.WGS84), valid_data)
82
+
83
+ # 根据插值方法和有效点数量选择合适的插值策略
84
+ coords = pyinterp.geodetic.Coordinates(target_points[:, 0], target_points[:, 1], pyinterp.geodetic.System.WGS84)
85
+
86
+ if interpolation_method in ["cubic", "quintic"] and len(valid_data) > 100:
87
+ # 对于点数充足的情况,高阶插值使用径向基函数
88
+ result = mesh.radial_basis_function(
89
+ coords,
90
+ function="thin_plate", # 薄板样条,适合地理数据
91
+ epsilon=0.1, # 平滑参数
92
+ norm="geodetic", # 使用地理距离
93
+ within=False, # 允许外推
94
+ ).reshape(target_shape)
95
+ else:
96
+ # 使用IDW,动态调整k值
97
+ k_value = max(min(int(np.sqrt(len(valid_data))), 16), 4) # 自适应近邻点数
98
+ result, _ = mesh.inverse_distance_weighting(
99
+ coords,
100
+ k=k_value,
101
+ p=2.0, # 平方反比权重
102
+ within=False, # 允许外推
103
+ ).reshape(target_shape)
104
+
105
+ # 检查插值结果,如果有NaN,尝试使用最近邻补充
106
+ if np.isnan(result).any():
107
+ nan_mask = np.isnan(result)
108
+ nan_coords = pyinterp.geodetic.Coordinates(target_points[nan_mask.ravel(), 0], target_points[nan_mask.ravel(), 1], pyinterp.geodetic.System.WGS84)
109
+ nn_values, _ = mesh.k_nearest(nan_coords, k=1)
110
+ result[nan_mask] = nn_values
111
+
112
+ except Exception:
113
+ # 如果所有复杂插值方法都失败,使用最基本的最近邻
114
+ try:
115
+ # 创建新的RTree对象尝试避免之前可能的问题
116
+ simple_mesh = RTree(pyinterp.geodetic.Coordinates(valid_points[:, 0], valid_points[:, 1], pyinterp.geodetic.System.WGS84), valid_data)
117
+
118
+ simple_coords = pyinterp.geodetic.Coordinates(target_points[:, 0], target_points[:, 1], pyinterp.geodetic.System.WGS84)
119
+
120
+ result, _ = simple_mesh.k_nearest(simple_coords, k=1).reshape(target_shape)
121
+ except Exception:
122
+ # 极端情况下,使用平均值填充
123
+ result = np.full(target_shape, np.nanmean(valid_data))
93
124
 
94
125
  return result
95
126
 
96
127
 
97
128
  def interp_2d_func_geo(target_x_coordinates: Union[np.ndarray, List[float]], target_y_coordinates: Union[np.ndarray, List[float]], source_x_coordinates: Union[np.ndarray, List[float]], source_y_coordinates: Union[np.ndarray, List[float]], source_data: np.ndarray, interpolation_method: str = "cubic") -> np.ndarray:
98
129
  """
99
- Perform 2D interpolation on the last two dimensions of a multi-dimensional array (spherical coordinates).
100
- 使用球面坐标系进行插值,适用于全球尺度的地理数据,能正确处理经度跨越日期线的情况。
130
+ 使用pyinterp进行地理插值,适用于全球尺度的地理数据与区域数据。
131
+
132
+ 特点:
133
+ - 正确处理经度跨越日期线的情况
134
+ - 自动选择最佳插值策略
135
+ - 处理规则网格和非规则数据
136
+ - 支持多维数据并行处理
101
137
 
102
138
  Args:
103
- target_x_coordinates (Union[np.ndarray, List[float]]): Target grid's longitude (-180 to 180 or 0 to 360).
104
- target_y_coordinates (Union[np.ndarray, List[float]]): Target grid's latitude (-90 to 90).
105
- source_x_coordinates (Union[np.ndarray, List[float]]): Original grid's longitude (-180 to 180 or 0 to 360).
106
- source_y_coordinates (Union[np.ndarray, List[float]]): Original grid's latitude (-90 to 90).
107
- source_data (np.ndarray): Multi-dimensional array with the last two dimensions as spatial.
108
- interpolation_method (str, optional): Interpolation method. Defaults to "cubic".
109
- >>> optional: 'linear', 'nearest', 'cubic', 'quintic', etc.
139
+ target_x_coordinates: 目标点经度 (-180 to 180 0 to 360)
140
+ target_y_coordinates: 目标点纬度 (-90 to 90)
141
+ source_x_coordinates: 源数据经度 (-180 to 180 0 to 360)
142
+ source_y_coordinates: 源数据纬度 (-90 to 90)
143
+ source_data: 多维数组,最后两个维度为空间维度
144
+ interpolation_method: 插值方法:
145
+ - 'nearest': 最近邻插值
146
+ - 'linear'/'bilinear': 双线性插值
147
+ - 'cubic': 三次样条插值
148
+ - 'quintic': 五次样条插值
110
149
 
111
150
  Returns:
112
- np.ndarray: Interpolated data array.
113
-
114
- Raises:
115
- ValueError: If input shapes are invalid.
151
+ np.ndarray: 插值后的数据数组
116
152
 
117
153
  Examples:
118
- >>> # 创建一个全球网格示例
119
- >>> target_lon = np.arange(-180, 181, 1) # 1度分辨率目标网格
154
+ >>> # 全球数据插值示例
155
+ >>> target_lon = np.arange(-180, 181, 1)
120
156
  >>> target_lat = np.arange(-90, 91, 1)
121
- >>> source_lon = np.arange(-180, 181, 5) # 5度分辨率源网格
157
+ >>> source_lon = np.arange(-180, 181, 5)
122
158
  >>> source_lat = np.arange(-90, 91, 5)
123
- >>> # 创建一个简单的数据场 (例如温度场)
124
159
  >>> source_data = np.cos(np.deg2rad(source_lat.reshape(-1, 1))) * np.cos(np.deg2rad(source_lon))
125
- >>> # 插值到高分辨率网格
126
- >>> result = interp_2d_geo(target_lon, target_lat, source_lon, source_lat, source_data)
127
- >>> print(result.shape) # Expected output: (181, 361)
160
+ >>> result = interp_2d_func_geo(target_lon, target_lat, source_lon, source_lat, source_data)
128
161
  """
162
+ # 确保pyinterp可用
163
+ if not pyinterp_available:
164
+ raise ImportError("pyinterp package is required for geographic interpolation")
165
+
129
166
  # 验证输入数据范围
130
167
  if np.nanmin(target_y_coordinates) < -90 or np.nanmax(target_y_coordinates) > 90:
131
168
  raise ValueError("[red]Target latitude must be in range [-90, 90].[/red]")
132
169
  if np.nanmin(source_y_coordinates) < -90 or np.nanmax(source_y_coordinates) > 90:
133
170
  raise ValueError("[red]Source latitude must be in range [-90, 90].[/red]")
134
171
 
172
+ # 转换为网格坐标
135
173
  if len(target_y_coordinates.shape) == 1:
136
174
  target_x_coordinates, target_y_coordinates = np.meshgrid(target_x_coordinates, target_y_coordinates)
137
175
  if len(source_y_coordinates.shape) == 1:
138
176
  source_x_coordinates, source_y_coordinates = np.meshgrid(source_x_coordinates, source_y_coordinates)
139
177
 
178
+ # 验证源数据形状
140
179
  if source_x_coordinates.shape != source_data.shape[-2:] or source_y_coordinates.shape != source_data.shape[-2:]:
141
180
  raise ValueError("[red]Shape of source_data does not match shape of source_x_coordinates or source_y_coordinates.[/red]")
142
181
 
182
+ # 准备坐标点并统一经度表示系统
143
183
  target_points = np.column_stack((np.array(target_x_coordinates).ravel(), np.array(target_y_coordinates).ravel()))
144
184
  origin_points = np.column_stack((np.array(source_x_coordinates).ravel(), np.array(source_y_coordinates).ravel()))
185
+ source_xy_shape = source_x_coordinates.shape
186
+
187
+ # 统一经度表示系统
188
+ origin_points = origin_points.copy()
189
+ target_points = target_points.copy()
145
190
 
191
+ # 检测经度系统并统一
192
+ src_lon_range = np.nanmax(origin_points[:, 0]) - np.nanmin(origin_points[:, 0])
193
+ tgt_lon_range = np.nanmax(target_points[:, 0]) - np.nanmin(target_points[:, 0])
194
+
195
+ # 如果数据接近全球范围并且表示系统不同,则统一表示系统
196
+ if (src_lon_range > 300 or tgt_lon_range > 300) and ((np.nanmax(target_points[:, 0]) > 180 and np.nanmin(origin_points[:, 0]) < 0) or (np.nanmax(origin_points[:, 0]) > 180 and np.nanmin(target_points[:, 0]) < 0)):
197
+ # 优先使用[0,360]系统,因为它不会在日期线处断开
198
+ if np.nanmax(target_points[:, 0]) > 180 or np.nanmax(origin_points[:, 0]) > 180:
199
+ # 转换为[0,360]系统
200
+ if np.nanmin(origin_points[:, 0]) < 0:
201
+ origin_points[:, 0] = np.where(origin_points[:, 0] < 0, origin_points[:, 0] + 360, origin_points[:, 0])
202
+ if np.nanmin(target_points[:, 0]) < 0:
203
+ target_points[:, 0] = np.where(target_points[:, 0] < 0, target_points[:, 0] + 360, target_points[:, 0])
204
+ else:
205
+ # 转换为[-180,180]系统
206
+ if np.nanmax(origin_points[:, 0]) > 180:
207
+ origin_points[:, 0] = np.where(origin_points[:, 0] > 180, origin_points[:, 0] - 360, origin_points[:, 0])
208
+ if np.nanmax(target_points[:, 0]) > 180:
209
+ target_points[:, 0] = np.where(target_points[:, 0] > 180, target_points[:, 0] - 360, target_points[:, 0])
210
+
211
+ # 处理多维数据
146
212
  data_dims = len(source_data.shape)
147
213
  if data_dims < 2:
148
214
  raise ValueError(f"[red]Source data must have at least 2 dimensions, but got {data_dims}.[/red]")
@@ -155,12 +221,14 @@ def interp_2d_func_geo(target_x_coordinates: Union[np.ndarray, List[float]], tar
155
221
 
156
222
  t, z, y, x = new_src_data.shape
157
223
 
224
+ # 准备并行处理参数
158
225
  params = []
159
226
  target_shape = target_y_coordinates.shape
160
227
  for t_index in range(t):
161
228
  for z_index in range(z):
162
- params.append((new_src_data[t_index, z_index], origin_points, target_points, interpolation_method, target_shape))
229
+ params.append((new_src_data[t_index, z_index], origin_points, target_points, interpolation_method, target_shape, source_xy_shape))
163
230
 
231
+ # 并行处理
164
232
  with PEx() as excutor:
165
233
  result = excutor.run(_interp_single_worker, params)
166
234
 
@@ -82,8 +82,8 @@ def merge_nc(file_list: Union[str, List[str]], var_name: Optional[Union[str, Lis
82
82
 
83
83
  for var in pbar(merged_data, "Merging variables"):
84
84
  if isinstance(merged_data[var], list):
85
- # 使用compat='override'确保合并时属性不会冲突
86
- merged_data[var] = xr.concat(merged_data[var], dim=dim_name, compat="override")
85
+ # 使用 coords='minimal' 替代默认值,并移除可能冲突的 compat='override'
86
+ merged_data[var] = xr.concat(merged_data[var], dim=dim_name, coords="minimal")
87
87
  # 恢复原始填充值和缺失值属性
88
88
  if var in fill_values:
89
89
  merged_data[var].attrs["_FillValue"] = fill_values[var]
@@ -100,19 +100,29 @@ def merge_nc(file_list: Union[str, List[str]], var_name: Optional[Union[str, Lis
100
100
  if coord not in merged_ds.coords:
101
101
  merged_ds = merged_ds.assign_coords({coord: ds0[coord]})
102
102
 
103
- # 如果合并维度是坐标,检查所有文件的该坐标是否一致
103
+ """ # 修改合并维度验证逻辑,更合理地检查所有文件维度的兼容性
104
104
  if dim_name in merged_ds.coords and len(file_list) > 1:
105
- logging.info(f"验证合并维度 {dim_name} 的一致性...")
106
- for file in file_list[1:]:
105
+ logging.info(f"检查合并维度 {dim_name} 的有效性...")
106
+
107
+ # 收集所有文件的该维度值
108
+ all_dim_values = []
109
+ for file in file_list:
107
110
  with xr.open_dataset(file) as ds:
108
- if dim_name in ds.coords and not ds[dim_name].equals(merged_ds[dim_name]):
109
- logging.warning(f"文件 {file} 的 {dim_name} 坐标与合并后的数据不一致,可能导致数据失真")
111
+ if dim_name in ds.coords:
112
+ all_dim_values.append(ds[dim_name].values)
113
+
114
+ # 只有当有两个或更多不同值集合时才警告
115
+ unique_values_count = len({tuple(vals.tolist()) if hasattr(vals, "tolist") else tuple(vals) for vals in all_dim_values})
116
+ if unique_values_count > 1:
117
+ logging.warning(f"检测到 {unique_values_count} 种不同的 {dim_name} 坐标值集合,合并可能导致数据重新排列")
118
+ else:
119
+ logging.info(f"所有文件的 {dim_name} 坐标值完全一致,合并将保持原始顺序") """
110
120
 
111
121
  if os.path.exists(target_filename):
112
122
  logging.warning("The target file already exists. Removing it ...")
113
123
  os.remove(target_filename)
114
124
 
115
- merged_ds.to_netcdf(target_filename,mode='w')
125
+ merged_ds.to_netcdf(target_filename, mode="w")
116
126
 
117
127
 
118
128
  # Example usage
@@ -263,10 +263,45 @@ def process_variable(var: str, data: xr.DataArray, dims: int, dims_name: Tuple[s
263
263
  print(f"Error processing {var}_{dims_name[0]}-{i}_{dims_name[1]}-{j}: {e}")
264
264
 
265
265
 
266
- def func_plot_dataset(ds_in: Union[xr.Dataset, xr.DataArray], output_dir: str, xyzt_dims: Tuple[str, str, str, str] = ("longitude", "latitude", "level", "time"), plot_type: str = "contourf", fixed_colorscale: bool = False) -> None:
266
+ def get_xyzt_names(ds_in, xyzt_dims):
267
+ dims_dict = {
268
+ "x": ["longitude", "lon", "x", "lon_rho", "lon_u", "lon_v", "xi_rho", "xi_u", "xi_v",
269
+ "xc", "x_rho", "xlon", "nlon", "east_west", "i", "xh", "xq", "nav_lon"],
270
+ "y": ["latitude", "lat", "y", "lat_rho", "lat_u", "lat_v", "eta_rho", "eta_u", "eta_v",
271
+ "yc", "y_rho", "ylat", "nlat", "north_south", "j", "yh", "yq", "nav_lat"],
272
+ "z": ["level", "lev", "z", "depth", "height", "pressure", "s_rho", "s_w",
273
+ "altitude", "plev", "isobaric", "vertical", "k", "sigma", "hybrid", "theta",
274
+ "pres", "sigma_level", "z_rho", "z_w", "layers", "deptht", "nav_lev"],
275
+ "t": ["time", "t", "ocean_time", "bry_time", 'frc_time',
276
+ "time_counter", "Time", "Julian_day", "forecast_time", "clim_time", "model_time"],
277
+ }
278
+ if xyzt_dims is not None:
279
+ x_dim, y_dim, z_dim, t_dim = xyzt_dims
280
+ return x_dim, y_dim, z_dim, t_dim
281
+ data_dim_names = ds_in.dims
282
+ x_dim, y_dim, z_dim, t_dim = None, None, None, None
283
+ for dim in dims_dict['x']:
284
+ if dim in data_dim_names:
285
+ x_dim = dim
286
+ break
287
+ for dim in dims_dict['y']:
288
+ if dim in data_dim_names:
289
+ y_dim = dim
290
+ break
291
+ for dim in dims_dict['z']:
292
+ if dim in data_dim_names:
293
+ z_dim = dim
294
+ break
295
+ for dim in dims_dict['t']:
296
+ if dim in data_dim_names:
297
+ t_dim = dim
298
+ break
299
+ return x_dim, y_dim, z_dim, t_dim
300
+
301
+
302
+ def func_plot_dataset(ds_in: Union[xr.Dataset, xr.DataArray], output_dir: str, xyzt_dims: Tuple[str, str, str, str] = None, plot_type: str = "contourf", fixed_colorscale: bool = False) -> None:
267
303
  """Plot variables from a NetCDF file and save the plots to the specified directory."""
268
304
  os.makedirs(output_dir, exist_ok=True)
269
- x_dim, y_dim, z_dim, t_dim = xyzt_dims
270
305
 
271
306
  # Main processing function
272
307
  try:
@@ -277,9 +312,11 @@ def func_plot_dataset(ds_in: Union[xr.Dataset, xr.DataArray], output_dir: str, x
277
312
  var = ds_in.name if ds_in.name is not None else "unnamed_variable"
278
313
  print("=" * 120)
279
314
  print(f"Processing: {var}")
315
+
280
316
  try:
281
317
  dims = len(ds_in.shape)
282
318
  dims_name = ds_in.dims
319
+ x_dim, y_dim, z_dim, t_dim = get_xyzt_names(ds_in, xyzt_dims)
283
320
  process_variable(var, ds_in, dims, dims_name, output_dir, x_dim, y_dim, z_dim, t_dim, fixed_colorscale, plot_type)
284
321
  except Exception as e:
285
322
  print(f"Error processing variable {var}: {e}")
@@ -295,6 +332,7 @@ def func_plot_dataset(ds_in: Union[xr.Dataset, xr.DataArray], output_dir: str, x
295
332
  data = ds[var]
296
333
  dims = len(data.shape)
297
334
  dims_name = data.dims
335
+ x_dim, y_dim, z_dim, t_dim = get_xyzt_names(data, xyzt_dims)
298
336
  try:
299
337
  process_variable(var, data, dims, dims_name, output_dir, x_dim, y_dim, z_dim, t_dim, fixed_colorscale, plot_type)
300
338
  except Exception as e:
oafuncs/oa_cmap.py CHANGED
@@ -8,7 +8,9 @@ __all__ = ["show", "to_color", "create", "get"]
8
8
 
9
9
 
10
10
  # ** 将cmap用填色图可视化(官网摘抄函数)
11
- def show(colormaps: Union[str, mpl.colors.Colormap, List[Union[str, mpl.colors.Colormap]]]) -> None:
11
+ def show(
12
+ colormaps: Union[str, mpl.colors.Colormap, List[Union[str, mpl.colors.Colormap]]],
13
+ ) -> None:
12
14
  """Helper function to plot data with associated colormap.
13
15
 
14
16
  This function creates a visualization of one or more colormaps by applying them
@@ -97,7 +99,14 @@ def to_color(colormap_name: str, num_colors: int = 256) -> List[tuple]:
97
99
 
98
100
 
99
101
  # ** 自制cmap,多色,可带位置
100
- def create(color_list: Optional[List[Union[str, tuple]]] = None, rgb_file: Optional[str] = None, color_positions: Optional[List[float]] = None, below_range_color: Optional[Union[str, tuple]] = None, above_range_color: Optional[Union[str, tuple]] = None, value_delimiter: str = ",") -> mpl.colors.Colormap:
102
+ def create(
103
+ color_list: Optional[List[Union[str, tuple]]] = None,
104
+ rgb_file: Optional[str] = None,
105
+ color_positions: Optional[List[float]] = None,
106
+ below_range_color: Optional[Union[str, tuple]] = None,
107
+ above_range_color: Optional[Union[str, tuple]] = None,
108
+ value_delimiter: str = ",",
109
+ ) -> mpl.colors.Colormap:
101
110
  """Create a custom colormap from a list of colors or an RGB txt document.
102
111
 
103
112
  Args:
@@ -144,7 +153,7 @@ def create(color_list: Optional[List[Union[str, tuple]]] = None, rgb_file: Optio
144
153
 
145
154
  if rgb_file:
146
155
  try:
147
- print(f"Reading RGB data from {rgb_file}...")
156
+ # print(f"Reading RGB data from {rgb_file}...")
148
157
 
149
158
  with open(rgb_file) as fid:
150
159
  data = [line.strip() for line in fid if line.strip() and not line.strip().startswith("#")]
@@ -178,7 +187,7 @@ def create(color_list: Optional[List[Union[str, tuple]]] = None, rgb_file: Optio
178
187
  if max_rgb > 2:
179
188
  rgb = rgb / 255.0
180
189
  cmap_color = mpl.colors.ListedColormap(rgb, name="my_color")
181
- print(f"Successfully created colormap from {rgb_file}")
190
+ # print(f"Successfully created colormap from {rgb_file}")
182
191
  except FileNotFoundError:
183
192
  error_msg = f"RGB file not found: {rgb_file}"
184
193
  print(error_msg)
@@ -189,15 +198,15 @@ def create(color_list: Optional[List[Union[str, tuple]]] = None, rgb_file: Optio
189
198
  cmap_color = mpl.colors.LinearSegmentedColormap.from_list("mycmap", color_list)
190
199
  else:
191
200
  cmap_color = mpl.colors.LinearSegmentedColormap.from_list("mycmap", list(zip(color_positions, color_list)))
192
- print(f"Successfully created colormap from {len(color_list)} colors")
201
+ # print(f"Successfully created colormap from {len(color_list)} colors")
193
202
 
194
203
  # Set below/above range colors if provided
195
204
  if below_range_color is not None:
196
205
  cmap_color.set_under(below_range_color)
197
- print(f"Set below-range color to {below_range_color}")
206
+ # print(f"Set below-range color to {below_range_color}")
198
207
  if above_range_color is not None:
199
208
  cmap_color.set_over(above_range_color)
200
- print(f"Set above-range color to {above_range_color}")
209
+ # print(f"Set above-range color to {above_range_color}")
201
210
 
202
211
  return cmap_color
203
212
 
oafuncs/oa_data.py CHANGED
@@ -164,45 +164,68 @@ def interp_2d(
164
164
 
165
165
  def interp_2d_geo(target_x_coordinates: Union[np.ndarray, List[float]], target_y_coordinates: Union[np.ndarray, List[float]], source_x_coordinates: Union[np.ndarray, List[float]], source_y_coordinates: Union[np.ndarray, List[float]], source_data: np.ndarray, interpolation_method: str = "cubic") -> np.ndarray:
166
166
  """
167
- Perform 2D interpolation on the last two dimensions of a multi-dimensional array (spherical coordinates).
168
- 使用球面坐标系进行插值,适用于全球尺度的地理数据,能正确处理经度跨越日期线的情况。
167
+ 使用pyinterp进行地理插值,适用于全球尺度的地理数据与区域数据。
168
+
169
+ 特点:
170
+ - 正确处理经度跨越日期线的情况
171
+ - 自动选择最佳插值策略
172
+ - 处理规则网格和非规则数据
173
+ - 支持多维数据并行处理
169
174
 
170
175
  Args:
171
- target_x_coordinates (Union[np.ndarray, List[float]]): Target grid's longitude (-180 to 180 or 0 to 360).
172
- target_y_coordinates (Union[np.ndarray, List[float]]): Target grid's latitude (-90 to 90).
173
- source_x_coordinates (Union[np.ndarray, List[float]]): Original grid's longitude (-180 to 180 or 0 to 360).
174
- source_y_coordinates (Union[np.ndarray, List[float]]): Original grid's latitude (-90 to 90).
175
- source_data (np.ndarray): Multi-dimensional array with the last two dimensions as spatial.
176
- interpolation_method (str, optional): Interpolation method. Defaults to "cubic".
177
- >>> optional: 'linear', 'nearest', 'cubic', 'quintic', etc.
176
+ target_x_coordinates: 目标点经度 (-180 to 180 0 to 360)
177
+ target_y_coordinates: 目标点纬度 (-90 to 90)
178
+ source_x_coordinates: 源数据经度 (-180 to 180 0 to 360)
179
+ source_y_coordinates: 源数据纬度 (-90 to 90)
180
+ source_data: 多维数组,最后两个维度为空间维度
181
+ interpolation_method: 插值方法:
182
+ - 'nearest': 最近邻插值
183
+ - 'linear'/'bilinear': 双线性插值
184
+ - 'cubic': 三次样条插值
185
+ - 'quintic': 五次样条插值
178
186
 
179
187
  Returns:
180
- np.ndarray: Interpolated data array.
181
-
182
- Raises:
183
- ValueError: If input shapes are invalid.
188
+ np.ndarray: 插值后的数据数组
184
189
 
185
190
  Examples:
186
- >>> # 创建一个全球网格示例
187
- >>> target_lon = np.arange(-180, 181, 1) # 1度分辨率目标网格
191
+ >>> # 全球数据插值示例
192
+ >>> target_lon = np.arange(-180, 181, 1)
188
193
  >>> target_lat = np.arange(-90, 91, 1)
189
- >>> source_lon = np.arange(-180, 181, 5) # 5度分辨率源网格
194
+ >>> source_lon = np.arange(-180, 181, 5)
190
195
  >>> source_lat = np.arange(-90, 91, 5)
191
- >>> # 创建一个简单的数据场 (例如温度场)
192
196
  >>> source_data = np.cos(np.deg2rad(source_lat.reshape(-1, 1))) * np.cos(np.deg2rad(source_lon))
193
- >>> # 插值到高分辨率网格
194
197
  >>> result = interp_2d_geo(target_lon, target_lat, source_lon, source_lat, source_data)
195
- >>> print(result.shape) # Expected output: (181, 361)
196
198
  """
197
- from ._script.data_interp_geo import interp_2d_func_geo
198
- interp_2d_func_geo(
199
- target_x_coordinates=target_x_coordinates,
200
- target_y_coordinates=target_y_coordinates,
201
- source_x_coordinates=source_x_coordinates,
202
- source_y_coordinates=source_y_coordinates,
203
- source_data=source_data,
204
- interpolation_method=interpolation_method,
205
- )
199
+ # 使用importlib检查pyinterp是否可用,避免直接import导致的警告
200
+ import importlib.util
201
+ pyinterp_available = importlib.util.find_spec("pyinterp") is not None
202
+
203
+ if pyinterp_available:
204
+ # 只在pyinterp可用时才导入相关模块
205
+ from ._script.data_interp_geo import interp_2d_func_geo
206
+
207
+ return interp_2d_func_geo(
208
+ target_x_coordinates=target_x_coordinates,
209
+ target_y_coordinates=target_y_coordinates,
210
+ source_x_coordinates=source_x_coordinates,
211
+ source_y_coordinates=source_y_coordinates,
212
+ source_data=source_data,
213
+ interpolation_method=interpolation_method,
214
+ )
215
+ else:
216
+ print("[yellow]警告: pyinterp模块未安装,无法使用球面坐标插值。尝试使用平面插值作为备选方案。[/yellow]")
217
+ print("[yellow]推荐使用 pip install pyinterp 安装pyinterp以获得更准确的地理数据插值结果。[/yellow]")
218
+ try:
219
+ return interp_2d(
220
+ target_x_coordinates=target_x_coordinates,
221
+ target_y_coordinates=target_y_coordinates,
222
+ source_x_coordinates=source_x_coordinates,
223
+ source_y_coordinates=source_y_coordinates,
224
+ source_data=source_data,
225
+ interpolation_method=interpolation_method,
226
+ )
227
+ except Exception as e:
228
+ raise ImportError(f"pyinterp不可用且备选插值方法也失败: {e}")
206
229
 
207
230
  def mask_shapefile(
208
231
  data_array: np.ndarray,
oafuncs/oa_nc.py CHANGED
@@ -246,7 +246,7 @@ def draw(
246
246
  output_directory: Optional[str] = None,
247
247
  dataset: Optional[xr.Dataset] = None,
248
248
  file_path: Optional[str] = None,
249
- dimensions: Union[List[str], Tuple[str, str, str, str]] = ("longitude", "latitude", "level", "time"),
249
+ dims_xyzt: Union[List[str], Tuple[str, str, str, str]] = None,
250
250
  plot_style: str = "contourf",
251
251
  use_fixed_colorscale: bool = False,
252
252
  ) -> None:
@@ -257,7 +257,7 @@ def draw(
257
257
  output_directory (Optional[str]): Path of the output directory.
258
258
  dataset (Optional[xr.Dataset]): Xarray dataset to plot.
259
259
  file_path (Optional[str]): Path to the NetCDF file.
260
- dimensions (Union[List[str], Tuple[str, str, str, str]]): Dimensions for plotting.
260
+ dims_xyzt (Union[List[str], Tuple[str, str, str, str]]): Dimensions for plotting. xyzt
261
261
  plot_style (str): Type of the plot, e.g., "contourf" or "contour". Default is "contourf".
262
262
  use_fixed_colorscale (bool): Whether to use a fixed colorscale. Default is False.
263
263
 
@@ -268,15 +268,15 @@ def draw(
268
268
 
269
269
  if output_directory is None:
270
270
  output_directory = os.getcwd()
271
- if not isinstance(dimensions, (list, tuple)):
271
+ if not isinstance(dims_xyzt, (list, tuple)):
272
272
  raise ValueError("dimensions must be a list or tuple")
273
273
 
274
274
  if dataset is not None:
275
- func_plot_dataset(dataset, output_directory, tuple(dimensions), plot_style, use_fixed_colorscale)
275
+ func_plot_dataset(dataset, output_directory, tuple(dims_xyzt), plot_style, use_fixed_colorscale)
276
276
  elif file_path is not None:
277
277
  if check(file_path):
278
278
  ds = xr.open_dataset(file_path)
279
- func_plot_dataset(ds, output_directory, tuple(dimensions), plot_style, use_fixed_colorscale)
279
+ func_plot_dataset(ds, output_directory, tuple(dims_xyzt), plot_style, use_fixed_colorscale)
280
280
  else:
281
281
  print(f"[red]Invalid file: {file_path}[/red]")
282
282
  else:
@@ -288,10 +288,11 @@ def compress(src_path, dst_path=None,convert_dtype='int16'):
288
288
  压缩 NetCDF 文件,使用 scale_factor/add_offset 压缩数据。
289
289
  若 dst_path 省略,则自动生成新文件名,写出后删除原文件并将新文件改回原名。
290
290
  """
291
+ src_path = str(src_path)
291
292
  # 判断是否要替换原文件
292
293
  delete_orig = dst_path is None
293
294
  if delete_orig:
294
- dst_path = src_path.replace(".nc", "_compress.nc")
295
+ dst_path = src_path.replace(".nc", "_compress_temp.nc")
295
296
 
296
297
  ds = xr.open_dataset(src_path)
297
298
  save(dst_path, ds, convert_dtype=convert_dtype, use_scale_offset=True, use_compression=True)
@@ -313,10 +314,11 @@ def unscale(src_path, dst_path=None, compression_level=4):
313
314
  dst_path: 目标文件路径,None则替换原文件
314
315
  compression_level: 压缩级别(1-9),数值越大压缩比越高,速度越慢
315
316
  """
317
+ src_path = str(src_path)
316
318
  # 判断是否要替换原文件
317
319
  delete_orig = dst_path is None
318
320
  if delete_orig:
319
- dst_path = src_path.replace(".nc", "_unpacked.nc")
321
+ dst_path = src_path.replace(".nc", "_unpacked_temp.nc")
320
322
 
321
323
  # 打开原始文件,获取文件大小
322
324
  orig_size = os.path.getsize(src_path) / (1024 * 1024) # MB
oafuncs/oa_tool.py CHANGED
@@ -135,7 +135,7 @@ def email(title: str = "Title", content: Optional[str] = None, send_to: str = "1
135
135
 
136
136
  def pbar(
137
137
  iterable: Iterable = range(100),
138
- description: str = "Working",
138
+ description: str = None,
139
139
  total: Optional[float] = None,
140
140
  completed: float = 0,
141
141
  color: Any = "None",
@@ -162,21 +162,9 @@ def pbar(
162
162
 
163
163
  Returns:
164
164
  Any: An instance of ColorProgressBar.
165
-
166
- Example:
167
- >>> for i in pbar(range(10), description="Processing"):
168
- ... time.sleep(0.1)
169
- >>> for i in pbar(range(10), description="Processing", color="green"):
170
- ... time.sleep(0.1)
171
- >>> for i in pbar(range(10), description="Processing", cmap=["red", "green"]):
172
- ... time.sleep(0.1)
173
- >>> for i in pbar(range(10), description="Processing", cmap="viridis"):
174
- ... time.sleep(0.1)
175
165
  """
176
166
  from ._script.cprogressbar import ColorProgressBar
177
167
  import random
178
-
179
- # number = random.randint(1, 999)
180
168
 
181
169
  def _generate_random_color_hex():
182
170
  """Generate a random color in hexadecimal format."""
@@ -188,11 +176,10 @@ def pbar(
188
176
  if color == 'None' and cmap is None:
189
177
  color = _generate_random_color_hex()
190
178
 
191
- style = f"bold {color if color != 'None' else 'green'}"
192
- # print(f"[{style}]~*^* {description} *^*~ -> {number:03d}[/{style}]")
193
- print(f"[{style}]~*^* {description} *^*~[/{style}]")
179
+ if description is not None:
180
+ style = f"bold {color if color != 'None' else 'green'}"
181
+ print(f"[{style}]~*^* {description} *^*~[/{style}]")
194
182
 
195
- # description=f'{number:03d}'
196
183
  description = ""
197
184
 
198
185
  return ColorProgressBar(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: oafuncs
3
- Version: 0.0.98.21
3
+ Version: 0.0.98.23
4
4
  Summary: Oceanic and Atmospheric Functions
5
5
  Home-page: https://github.com/Industry-Pays/OAFuncs
6
6
  Author: Kun Liu
@@ -1,25 +1,25 @@
1
1
  oafuncs/__init__.py,sha256=T_-VtnWWllV3Q91twT5Yt2sUapeA051QbPNnBxmg9nw,1456
2
- oafuncs/oa_cmap.py,sha256=NVKwEkmMUKXh9L1svx_WHqemLOx3evgX3_-UVDxc9ko,11498
3
- oafuncs/oa_data.py,sha256=Aat9ktxxRGevaqQya3IJWfXeoEs-FCXGUcNE2pKnzfU,10931
2
+ oafuncs/oa_cmap.py,sha256=pUFAGzbIg0WLxObBP2t_--ZIg00Dxdojx0y7OjTeqEo,11551
3
+ oafuncs/oa_data.py,sha256=klKi3wD-hrtr5tUTD_5sHy306KPbVV7yQYwzcVNKJbg,11684
4
4
  oafuncs/oa_date.py,sha256=WhM6cyD4G3IeghjLTHhAMtlvJbA7kwQG2sHnxdTgyso,6303
5
5
  oafuncs/oa_draw.py,sha256=IaBGDx-EOxyMM2IuJ4zLZt6ruHHV5qFStPItmUOXoWk,17635
6
6
  oafuncs/oa_file.py,sha256=j9gXJgPOJsliu4IOUc4bc-luW4yBvQyNCEmMyDVjUwQ,16404
7
7
  oafuncs/oa_help.py,sha256=_4AZgRDq5Or0vauNvq5IDDHIBoBfdOQtzak-mG1wwAw,4537
8
- oafuncs/oa_nc.py,sha256=UUXnBg2cO5XiJ8w0jNqCZJg83FVKqxlEHxOJG5o08Z8,15201
8
+ oafuncs/oa_nc.py,sha256=pxTyR8f2mlu1Zkz4PJ5ImOyhrFD_mgytXHJjt9ohnUw,15233
9
9
  oafuncs/oa_python.py,sha256=NkopwkYFGSEuVljnTBvXCl6o2CeyRNBqRXSsUl3euEE,5192
10
- oafuncs/oa_tool.py,sha256=QBjJh3pf54yXVuOmu97rW6Tsr6uNMyZ5KqZbR4VQFTc,8628
10
+ oafuncs/oa_tool.py,sha256=Zuaoa92wll0YqXGRf0oF_c7wlATtl7bvjCuLt9VLXp0,8046
11
11
  oafuncs/_data/hycom.png,sha256=MadKs6Gyj5n9-TOu7L4atQfTXtF9dvN9w-tdU9IfygI,10945710
12
12
  oafuncs/_data/oafuncs.png,sha256=o3VD7wm-kwDea5E98JqxXl04_78cBX7VcdUt7uQXGiU,3679898
13
13
  oafuncs/_script/cprogressbar.py,sha256=UIgGcLFs-6IgWlITuBLaQqrpt4OAK3Mst5RlCiNfZdQ,15772
14
14
  oafuncs/_script/data_interp.py,sha256=EiZbt6n5BEaRKcng88UgX7TFPhKE6TLVZniS01awXjg,5146
15
- oafuncs/_script/data_interp_geo.py,sha256=ZRFb3fKRiYQViZNHd19eW20C9i38BsiIU8w0fG5mbqM,7789
15
+ oafuncs/_script/data_interp_geo.py,sha256=MFp4ADqVFr_g-5bj6104V8U4f5-rAiQm8cfP4XceMY0,11532
16
16
  oafuncs/_script/email.py,sha256=lL4HGKrr524-g0xLlgs-4u7x4-u7DtgNoD9AL8XJKj4,3058
17
- oafuncs/_script/netcdf_merge.py,sha256=4mZLMcxBL4Rehi_eW2EX6vqbMJgZBOL4_ceaMzcuzio,5565
17
+ oafuncs/_script/netcdf_merge.py,sha256=tM9ePqLiEsE7eIsNM5XjEYeXwxjYOdNz5ejnEuI7xKw,6066
18
18
  oafuncs/_script/netcdf_modify.py,sha256=sGRUYNhfGgf9JV70rnBzw3bzuTRSXzBTL_RMDnDPeLQ,4552
19
19
  oafuncs/_script/netcdf_write.py,sha256=GvyUyUhzMonzSp3y4pT8ZAfbQrsh5J3dLnmINYJKhuE,21422
20
20
  oafuncs/_script/parallel.py,sha256=07-BJVHxXJNlrOrhrSGt7qCZiKWq6dBvNDBA1AANYnI,8861
21
21
  oafuncs/_script/parallel_test.py,sha256=0GBqZOX7IaCOKF2t1y8N8YYu53GJ33OkfsWgpvZNqM4,372
22
- oafuncs/_script/plot_dataset.py,sha256=zkSEnO_-biyagorwWXPoihts_cwuvripzEt-l9bHJ2E,13989
22
+ oafuncs/_script/plot_dataset.py,sha256=Hr4X0BHJ1qmf2YHT40Vu3nF8JS_4MlZ2MK6yeJCSHOg,15642
23
23
  oafuncs/_script/replace_file_content.py,sha256=eCFZjnZcwyRvy6b4mmIfBna-kylSZTyJRfgXd6DdCjk,5982
24
24
  oafuncs/oa_down/User_Agent-list.txt,sha256=pHaMlElMvZ8TG4vf4BqkZYKqe0JIGkr4kCN0lM1Y9FQ,514295
25
25
  oafuncs/oa_down/__init__.py,sha256=kRX5eTUCbAiz3zTaQM1501paOYS_3fizDN4Pa0mtNUA,585
@@ -39,8 +39,8 @@ oafuncs/oa_sign/__init__.py,sha256=QKqTFrJDFK40C5uvk48GlRRbGFzO40rgkYwu6dYxatM,5
39
39
  oafuncs/oa_sign/meteorological.py,sha256=8091SHo2L8kl4dCFmmSH5NGVHDku5i5lSiLEG5DLnOQ,6489
40
40
  oafuncs/oa_sign/ocean.py,sha256=xrW-rWD7xBWsB5PuCyEwQ1Q_RDKq2KCLz-LOONHgldU,5932
41
41
  oafuncs/oa_sign/scientific.py,sha256=a4JxOBgm9vzNZKpJ_GQIQf7cokkraV5nh23HGbmTYKw,5064
42
- oafuncs-0.0.98.21.dist-info/licenses/LICENSE.txt,sha256=rMtLpVg8sKiSlwClfR9w_Dd_5WubTQgoOzE2PDFxzs4,1074
43
- oafuncs-0.0.98.21.dist-info/METADATA,sha256=Lk9y2XVdDKb9HB5PP_Pm-_aII0tnGmaladuGW1Y9otM,4273
44
- oafuncs-0.0.98.21.dist-info/WHEEL,sha256=ooBFpIzZCPdw3uqIQsOo4qqbA4ZRPxHnOH7peeONza0,91
45
- oafuncs-0.0.98.21.dist-info/top_level.txt,sha256=bgC35QkXbN4EmPHEveg_xGIZ5i9NNPYWqtJqaKqTPsQ,8
46
- oafuncs-0.0.98.21.dist-info/RECORD,,
42
+ oafuncs-0.0.98.23.dist-info/licenses/LICENSE.txt,sha256=rMtLpVg8sKiSlwClfR9w_Dd_5WubTQgoOzE2PDFxzs4,1074
43
+ oafuncs-0.0.98.23.dist-info/METADATA,sha256=uLPiMYNdGE_FAuzYqOVV2s3LM-gFmGUBvEyf6t4Fni8,4273
44
+ oafuncs-0.0.98.23.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91
45
+ oafuncs-0.0.98.23.dist-info/top_level.txt,sha256=bgC35QkXbN4EmPHEveg_xGIZ5i9NNPYWqtJqaKqTPsQ,8
46
+ oafuncs-0.0.98.23.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.0.1)
2
+ Generator: setuptools (80.7.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5