oafuncs 0.0.98.7__py3-none-any.whl → 0.0.98.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,12 @@
1
1
  import os
2
2
  from typing import List, Optional, Union
3
- from dask.diagnostics import ProgressBar
3
+
4
+ import numpy as np
4
5
  import xarray as xr
6
+ from dask.diagnostics import ProgressBar
5
7
  from oafuncs import pbar
6
8
 
9
+
7
10
  def merge_nc(file_list: Union[str, List[str]], var_name: Optional[Union[str, List[str]]] = None, dim_name: Optional[str] = None, target_filename: Optional[str] = None) -> None:
8
11
  """
9
12
  Description:
@@ -22,7 +25,7 @@ def merge_nc(file_list: Union[str, List[str]], var_name: Optional[Union[str, Lis
22
25
  merge(file_list, var_name=['u', 'v'], dim_name='time', target_filename='merged.nc')
23
26
  merge(file_list, var_name=None, dim_name='time', target_filename='merged.nc')
24
27
  """
25
-
28
+
26
29
  if target_filename is None:
27
30
  target_filename = "merged.nc"
28
31
 
@@ -55,11 +58,19 @@ def merge_nc(file_list: Union[str, List[str]], var_name: Optional[Union[str, Lis
55
58
  if dim_name in data_var.dims:
56
59
  merged_data.setdefault(var, []).append(data_var)
57
60
  elif var not in merged_data:
58
- merged_data[var] = data_var.fillna(0) # 用0填充NaN值
61
+ # 判断类型,时间类型用NaT填充
62
+ if np.issubdtype(data_var.dtype, np.datetime64):
63
+ merged_data[var] = data_var.fillna(np.datetime64("NaT"))
64
+ else:
65
+ merged_data[var] = data_var.fillna(0)
59
66
 
60
67
  for var in pbar(merged_data, description="Merging variables", color="#9b45d1"):
61
68
  if isinstance(merged_data[var], list):
62
- merged_data[var] = xr.concat(merged_data[var], dim=dim_name).fillna(0)
69
+ # 判断类型,时间类型用NaT填充
70
+ if np.issubdtype(merged_data[var][0].dtype, np.datetime64):
71
+ merged_data[var] = xr.concat(merged_data[var], dim=dim_name).fillna(np.datetime64("NaT"))
72
+ else:
73
+ merged_data[var] = xr.concat(merged_data[var], dim=dim_name).fillna(0)
63
74
  # print(f"Variable '{var}' merged: min={merged_data[var].min().values:.3f}, max={merged_data[var].max().values:.3f}, mean={merged_data[var].mean().values:.3f}")
64
75
 
65
76
  # 修改写入数据部分,支持压缩并设置基数和比例因子
oafuncs/oa_data.py CHANGED
@@ -13,9 +13,7 @@ SystemInfo: Windows 11
13
13
  Python Version: 3.11
14
14
  """
15
15
 
16
- import itertools
17
- import multiprocessing as mp
18
- from concurrent.futures import ThreadPoolExecutor
16
+
19
17
  from typing import Any, List, Union
20
18
 
21
19
  import numpy as np
@@ -24,6 +22,8 @@ import xarray as xr
24
22
  from rich import print
25
23
  from scipy.interpolate import griddata, interp1d
26
24
 
25
+ from oafuncs.oa_tool import PEx
26
+
27
27
  __all__ = ["interp_along_dim", "interp_2d", "ensure_list", "mask_shapefile"]
28
28
 
29
29
 
@@ -115,6 +115,15 @@ def interp_along_dim(
115
115
  return np.apply_along_axis(apply_interp_extrap, interpolation_axis, source_data)
116
116
 
117
117
 
118
+ def _interp_single_worker(*args):
119
+ """
120
+ 用于PEx并行的单slice插值worker,参数为(t, z, source_data, origin_points, target_points, interpolation_method, target_shape)
121
+ """
122
+ data_slice, origin_points, target_points, interpolation_method, target_shape = args
123
+
124
+ return griddata(origin_points, data_slice.ravel(), target_points, method=interpolation_method).reshape(target_shape)
125
+
126
+
118
127
  def interp_2d(
119
128
  target_x_coordinates: Union[np.ndarray, List[float]],
120
129
  target_y_coordinates: Union[np.ndarray, List[float]],
@@ -122,7 +131,6 @@ def interp_2d(
122
131
  source_y_coordinates: Union[np.ndarray, List[float]],
123
132
  source_data: np.ndarray,
124
133
  interpolation_method: str = "linear",
125
- use_parallel: bool = True,
126
134
  ) -> np.ndarray:
127
135
  """
128
136
  Perform 2D interpolation on the last two dimensions of a multi-dimensional array.
@@ -151,10 +159,6 @@ def interp_2d(
151
159
  >>> result = interp_2d(target_x_coordinates, target_y_coordinates, source_x_coordinates, source_y_coordinates, source_data)
152
160
  >>> print(result.shape) # Expected output: (3, 3)
153
161
  """
154
-
155
- def interp_single(data_slice: np.ndarray, target_points: np.ndarray, origin_points: np.ndarray, method: str) -> np.ndarray:
156
- return griddata(origin_points, data_slice.ravel(), target_points, method=method).reshape(target_y_coordinates.shape)
157
-
158
162
  if len(target_y_coordinates.shape) == 1:
159
163
  target_x_coordinates, target_y_coordinates = np.meshgrid(target_x_coordinates, target_y_coordinates)
160
164
  if len(source_y_coordinates.shape) == 1:
@@ -166,25 +170,31 @@ def interp_2d(
166
170
  target_points = np.column_stack((np.array(target_y_coordinates).ravel(), np.array(target_x_coordinates).ravel()))
167
171
  origin_points = np.column_stack((np.array(source_y_coordinates).ravel(), np.array(source_x_coordinates).ravel()))
168
172
 
169
- if use_parallel:
170
- with ThreadPoolExecutor(max_workers=mp.cpu_count() - 2) as executor:
171
- if len(source_data.shape) == 2:
172
- interpolated_data = list(executor.map(interp_single, [source_data], [target_points], [origin_points], [interpolation_method]))
173
- elif len(source_data.shape) == 3:
174
- interpolated_data = list(executor.map(interp_single, [source_data[i] for i in range(source_data.shape[0])], [target_points] * source_data.shape[0], [origin_points] * source_data.shape[0], [interpolation_method] * source_data.shape[0]))
175
- elif len(source_data.shape) == 4:
176
- index_combinations = list(itertools.product(range(source_data.shape[0]), range(source_data.shape[1])))
177
- interpolated_data = list(executor.map(interp_single, [source_data[i, j] for i, j in index_combinations], [target_points] * len(index_combinations), [origin_points] * len(index_combinations), [interpolation_method] * len(index_combinations)))
178
- interpolated_data = np.array(interpolated_data).reshape(source_data.shape[0], source_data.shape[1], *target_y_coordinates.shape)
179
- else:
180
- if len(source_data.shape) == 2:
181
- interpolated_data = interp_single(source_data, target_points, origin_points, interpolation_method)
182
- elif len(source_data.shape) == 3:
183
- interpolated_data = np.stack([interp_single(source_data[i], target_points, origin_points, interpolation_method) for i in range(source_data.shape[0])])
184
- elif len(source_data.shape) == 4:
185
- interpolated_data = np.stack([np.stack([interp_single(source_data[i, j], target_points, origin_points, interpolation_method) for j in range(source_data.shape[1])]) for i in range(source_data.shape[0])])
186
-
187
- return np.squeeze(np.array(interpolated_data))
173
+ data_dims = len(source_data.shape)
174
+ # Ensure source_data is 4D for consistent processing (t, z, y, x)
175
+ if data_dims < 2:
176
+ raise ValueError(f"[red]Source data must have at least 2 dimensions, but got {data_dims}.[/red]")
177
+ elif data_dims > 4:
178
+ # Or handle cases with more than 4 dimensions if necessary
179
+ raise ValueError(f"[red]Source data has {data_dims} dimensions, but this function currently supports only up to 4.[/red]")
180
+
181
+ # Reshape to 4D by adding leading dimensions of size 1 if needed
182
+ num_dims_to_add = 4 - data_dims
183
+ new_shape = (1,) * num_dims_to_add + source_data.shape
184
+ new_src_data = source_data.reshape(new_shape)
185
+
186
+ t, z, _, _ = new_src_data.shape
187
+
188
+ paras = []
189
+ target_shape = target_y_coordinates.shape
190
+ for t_index in range(t):
191
+ for z_index in range(z):
192
+ paras.append((new_src_data[t_index, z_index], origin_points, target_points, interpolation_method, target_shape))
193
+
194
+ with PEx() as excutor:
195
+ result = excutor.run(_interp_single_worker, paras)
196
+
197
+ return np.squeeze(np.array(result))
188
198
 
189
199
 
190
200
  def mask_shapefile(
oafuncs/oa_date.py CHANGED
@@ -64,7 +64,6 @@ def hour_range(start_time: str, end_time: str, hour_interval: int = 6) -> List[s
64
64
  date_s += datetime.timedelta(hours=hour_interval)
65
65
  return date_list
66
66
 
67
-
68
67
  def adjust_time(base_time: str, time_delta: int, delta_unit: str = "hours", output_format: Optional[str] = None) -> str:
69
68
  """
70
69
  Adjust a given base time by adding a specified time delta.
@@ -89,7 +88,12 @@ def adjust_time(base_time: str, time_delta: int, delta_unit: str = "hours", outp
89
88
  """
90
89
  # Normalize the input time to "yyyymmddHHMMSS" format
91
90
  time_format = "%Y%m%d%H%M%S"
91
+ if len(base_time) == 4:
92
+ base_time += "0101"
93
+ elif len(base_time) == 6:
94
+ base_time += "01"
92
95
  base_time = base_time.ljust(14, "0")
96
+
93
97
  time_obj = datetime.datetime.strptime(base_time, time_format)
94
98
 
95
99
  # Add the specified amount of time
@@ -101,8 +105,19 @@ def adjust_time(base_time: str, time_delta: int, delta_unit: str = "hours", outp
101
105
  time_obj += datetime.timedelta(hours=time_delta)
102
106
  elif delta_unit == "days":
103
107
  time_obj += datetime.timedelta(days=time_delta)
108
+ elif delta_unit == "months":
109
+ # Handle month addition separately
110
+ month = time_obj.month - 1 + time_delta
111
+ year = time_obj.year + month // 12
112
+ month = month % 12 + 1
113
+ day = min(time_obj.day, month_days(year, month))
114
+ time_obj = time_obj.replace(year=year, month=month, day=day)
115
+ elif delta_unit == "years":
116
+ # Handle year addition separately
117
+ year = time_obj.year + time_delta
118
+ time_obj = time_obj.replace(year=year)
104
119
  else:
105
- raise ValueError("Invalid time unit. Use 'seconds', 'minutes', 'hours', or 'days'.")
120
+ raise ValueError("Invalid time unit. Use 'seconds', 'minutes', 'hours', 'days', 'months', or 'years'.")
106
121
 
107
122
  # Determine the output format
108
123
  if output_format:
@@ -116,6 +131,10 @@ def adjust_time(base_time: str, time_delta: int, delta_unit: str = "hours", outp
116
131
  default_format = "%Y%m%d%H"
117
132
  elif delta_unit == "days":
118
133
  default_format = "%Y%m%d"
134
+ elif delta_unit == "months":
135
+ default_format = "%Y%m"
136
+ elif delta_unit == "years":
137
+ default_format = "%Y"
119
138
  return time_obj.strftime(default_format)
120
139
 
121
140
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: oafuncs
3
- Version: 0.0.98.7
3
+ Version: 0.0.98.9
4
4
  Summary: Oceanic and Atmospheric Functions
5
5
  Home-page: https://github.com/Industry-Pays/OAFuncs
6
6
  Author: Kun Liu
@@ -1,7 +1,7 @@
1
1
  oafuncs/__init__.py,sha256=T_-VtnWWllV3Q91twT5Yt2sUapeA051QbPNnBxmg9nw,1456
2
2
  oafuncs/oa_cmap.py,sha256=DimWT4Bg7uE5Lx8hSw1REp7whpsR2pFRStAwk1cowEM,11494
3
- oafuncs/oa_data.py,sha256=0AbQ8_7vf9ecZaui6hmUjubkWRJxs4TGcdhJaPdbmP8,10958
4
- oafuncs/oa_date.py,sha256=KqU-bHtC74hYsf6VgiA3i2vI__q_toOVR-whFy4cYP8,5523
3
+ oafuncs/oa_data.py,sha256=SqvG2mRVSTmr5j7H3Cn-ILgDo8iwSAl6W5NLpVpBx3c,10288
4
+ oafuncs/oa_date.py,sha256=WhM6cyD4G3IeghjLTHhAMtlvJbA7kwQG2sHnxdTgyso,6303
5
5
  oafuncs/oa_draw.py,sha256=Wj2QBgyIPpV_dxaDrH10jqj_puK9ZM9rd-si-3VrsrE,17631
6
6
  oafuncs/oa_file.py,sha256=goF5iRXJFFCIKhIjlkCnYYt0EYlJb_4r8AeYNZ0-SOk,16209
7
7
  oafuncs/oa_help.py,sha256=_4AZgRDq5Or0vauNvq5IDDHIBoBfdOQtzak-mG1wwAw,4537
@@ -12,7 +12,7 @@ oafuncs/_data/hycom.png,sha256=MadKs6Gyj5n9-TOu7L4atQfTXtF9dvN9w-tdU9IfygI,10945
12
12
  oafuncs/_data/oafuncs.png,sha256=o3VD7wm-kwDea5E98JqxXl04_78cBX7VcdUt7uQXGiU,3679898
13
13
  oafuncs/_script/cprogressbar.py,sha256=wRU3SFPFtMI7ER26tTzg223kVKNo5RDWE9CzdIgUsuE,15771
14
14
  oafuncs/_script/email.py,sha256=lL4HGKrr524-g0xLlgs-4u7x4-u7DtgNoD9AL8XJKj4,3058
15
- oafuncs/_script/netcdf_merge.py,sha256=_EPF9Xj4HOVC9sZpi1lt62-Aq6pMlgsgwaajEBLhW6g,5092
15
+ oafuncs/_script/netcdf_merge.py,sha256=ktmTOgGfLHBNdS4HBc6xFDfO8B7E4DT7d1e6Dtare9Y,5596
16
16
  oafuncs/_script/netcdf_modify.py,sha256=sGRUYNhfGgf9JV70rnBzw3bzuTRSXzBTL_RMDnDPeLQ,4552
17
17
  oafuncs/_script/netcdf_write.py,sha256=iO1Qv9bp6RLiw1D8Nrv7tX_8X-diUZaX3Nxhk6pJ5Nw,8556
18
18
  oafuncs/_script/parallel.py,sha256=dRT7w_rBnR3mZkUlO6v6j05SwBTQpTccOna5CXI5Msg,8196
@@ -37,8 +37,8 @@ oafuncs/oa_sign/__init__.py,sha256=QKqTFrJDFK40C5uvk48GlRRbGFzO40rgkYwu6dYxatM,5
37
37
  oafuncs/oa_sign/meteorological.py,sha256=8091SHo2L8kl4dCFmmSH5NGVHDku5i5lSiLEG5DLnOQ,6489
38
38
  oafuncs/oa_sign/ocean.py,sha256=xrW-rWD7xBWsB5PuCyEwQ1Q_RDKq2KCLz-LOONHgldU,5932
39
39
  oafuncs/oa_sign/scientific.py,sha256=a4JxOBgm9vzNZKpJ_GQIQf7cokkraV5nh23HGbmTYKw,5064
40
- oafuncs-0.0.98.7.dist-info/licenses/LICENSE.txt,sha256=rMtLpVg8sKiSlwClfR9w_Dd_5WubTQgoOzE2PDFxzs4,1074
41
- oafuncs-0.0.98.7.dist-info/METADATA,sha256=HotYRm4-mVHvJqg_pe71FgQpZ7mYKjd6WGr2PBbIeJY,4272
42
- oafuncs-0.0.98.7.dist-info/WHEEL,sha256=lTU6B6eIfYoiQJTZNc-fyaR6BpL6ehTzU3xGYxn2n8k,91
43
- oafuncs-0.0.98.7.dist-info/top_level.txt,sha256=bgC35QkXbN4EmPHEveg_xGIZ5i9NNPYWqtJqaKqTPsQ,8
44
- oafuncs-0.0.98.7.dist-info/RECORD,,
40
+ oafuncs-0.0.98.9.dist-info/licenses/LICENSE.txt,sha256=rMtLpVg8sKiSlwClfR9w_Dd_5WubTQgoOzE2PDFxzs4,1074
41
+ oafuncs-0.0.98.9.dist-info/METADATA,sha256=Ns5ahXYSJiK2OmggCSbzoFC5XHMnLsFdC_KMnVrAnGA,4272
42
+ oafuncs-0.0.98.9.dist-info/WHEEL,sha256=pxyMxgL8-pra_rKaQ4drOZAegBVuX-G_4nRHjjgWbmo,91
43
+ oafuncs-0.0.98.9.dist-info/top_level.txt,sha256=bgC35QkXbN4EmPHEveg_xGIZ5i9NNPYWqtJqaKqTPsQ,8
44
+ oafuncs-0.0.98.9.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (78.1.1)
2
+ Generator: setuptools (79.0.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5