oafuncs 0.0.98.8__py3-none-any.whl → 0.0.98.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -148,7 +148,7 @@ class ColorProgressBar:
148
148
  description: str = "Working ...",
149
149
  total: Optional[float] = None,
150
150
  completed: float = 0,
151
- color: Any = "cyan",
151
+ color: Any = "green",
152
152
  cmap: Union[str, List[str]] = None,
153
153
  update_interval: float = 0.1,
154
154
  bar_length: int = None,
@@ -1,9 +1,12 @@
1
1
  import os
2
2
  from typing import List, Optional, Union
3
- from dask.diagnostics import ProgressBar
3
+
4
+ import numpy as np
4
5
  import xarray as xr
6
+ from dask.diagnostics import ProgressBar
5
7
  from oafuncs import pbar
6
8
 
9
+
7
10
  def merge_nc(file_list: Union[str, List[str]], var_name: Optional[Union[str, List[str]]] = None, dim_name: Optional[str] = None, target_filename: Optional[str] = None) -> None:
8
11
  """
9
12
  Description:
@@ -22,7 +25,7 @@ def merge_nc(file_list: Union[str, List[str]], var_name: Optional[Union[str, Lis
22
25
  merge(file_list, var_name=['u', 'v'], dim_name='time', target_filename='merged.nc')
23
26
  merge(file_list, var_name=None, dim_name='time', target_filename='merged.nc')
24
27
  """
25
-
28
+
26
29
  if target_filename is None:
27
30
  target_filename = "merged.nc"
28
31
 
@@ -55,11 +58,19 @@ def merge_nc(file_list: Union[str, List[str]], var_name: Optional[Union[str, Lis
55
58
  if dim_name in data_var.dims:
56
59
  merged_data.setdefault(var, []).append(data_var)
57
60
  elif var not in merged_data:
58
- merged_data[var] = data_var.fillna(0) # 用0填充NaN值
61
+ # 判断类型,时间类型用NaT填充
62
+ if np.issubdtype(data_var.dtype, np.datetime64):
63
+ merged_data[var] = data_var.fillna(np.datetime64("NaT"))
64
+ else:
65
+ merged_data[var] = data_var.fillna(0)
59
66
 
60
67
  for var in pbar(merged_data, description="Merging variables", color="#9b45d1"):
61
68
  if isinstance(merged_data[var], list):
62
- merged_data[var] = xr.concat(merged_data[var], dim=dim_name).fillna(0)
69
+ # 判断类型,时间类型用NaT填充
70
+ if np.issubdtype(merged_data[var][0].dtype, np.datetime64):
71
+ merged_data[var] = xr.concat(merged_data[var], dim=dim_name).fillna(np.datetime64("NaT"))
72
+ else:
73
+ merged_data[var] = xr.concat(merged_data[var], dim=dim_name).fillna(0)
63
74
  # print(f"Variable '{var}' merged: min={merged_data[var].min().values:.3f}, max={merged_data[var].max().values:.3f}, mean={merged_data[var].mean().values:.3f}")
64
75
 
65
76
  # 修改写入数据部分,支持压缩并设置基数和比例因子
@@ -1,4 +1,3 @@
1
- import atexit
2
1
  import logging
3
2
  import multiprocessing as mp
4
3
  import platform
@@ -30,6 +29,7 @@ class ParallelExecutor:
30
29
  self.running = True
31
30
  self.task_history = []
32
31
  self._executor = None
32
+ self._shutdown_called = False
33
33
 
34
34
  self.mode, default_workers = self._determine_optimal_settings()
35
35
  self.max_workers = max_workers or default_workers
@@ -37,7 +37,6 @@ class ParallelExecutor:
37
37
 
38
38
  self._init_platform_settings()
39
39
  self._start_resource_monitor()
40
- atexit.register(self.shutdown)
41
40
 
42
41
  logging.info(f"Initialized {self.__class__.__name__} on {self.platform} (mode={self.mode}, workers={self.max_workers})")
43
42
 
@@ -99,16 +98,26 @@ class ParallelExecutor:
99
98
 
100
99
  def run(self, func: Callable, params: List[Tuple], chunk_size: Optional[int] = None) -> List[Any]:
101
100
  chunk_size = chunk_size or self.chunk_size
102
- for retry in range(self.max_retries + 1):
103
- try:
104
- start_time = time.monotonic()
105
- results = self._execute_batch(func, params, chunk_size)
106
- self._update_settings(time.monotonic() - start_time, len(params))
107
- return results
108
- except Exception as e:
109
- logging.error(f"Attempt {retry + 1} failed: {e}")
110
- self._handle_failure()
111
- raise RuntimeError(f"Failed after {self.max_retries} retries")
101
+ try:
102
+ for retry in range(self.max_retries + 1):
103
+ try:
104
+ start_time = time.monotonic()
105
+ results = self._execute_batch(func, params, chunk_size)
106
+ self._update_settings(time.monotonic() - start_time, len(params))
107
+ return results
108
+ except Exception as e:
109
+ logging.error(f"Attempt {retry + 1} failed: {e}")
110
+ self._handle_failure()
111
+ raise RuntimeError(f"Failed after {self.max_retries} retries")
112
+ finally:
113
+ # 仅关闭当前 executor,保留资源监控等运行状态
114
+ if self._executor:
115
+ try:
116
+ self._executor.shutdown(wait=True)
117
+ except Exception as e:
118
+ logging.error(f"Executor shutdown error: {e}")
119
+ finally:
120
+ self._executor = None
112
121
 
113
122
  def _execute_batch(self, func: Callable, params: List[Tuple], chunk_size: int) -> List[Any]:
114
123
  if not params:
@@ -168,10 +177,14 @@ class ParallelExecutor:
168
177
  self._restart_executor()
169
178
 
170
179
  def shutdown(self):
180
+ if self._shutdown_called:
181
+ return
182
+ self._shutdown_called = True
171
183
  self.running = False
184
+ # 基类不再打印日志,由子类统一处理
172
185
  if self._executor:
173
186
  try:
174
- self._executor.shutdown(wait=False)
187
+ self._executor.shutdown(wait=True)
175
188
  except Exception as e:
176
189
  logging.error(f"Shutdown error: {e}")
177
190
  finally:
oafuncs/oa_data.py CHANGED
@@ -190,8 +190,10 @@ def interp_2d(
190
190
  for t_index in range(t):
191
191
  for z_index in range(z):
192
192
  paras.append((new_src_data[t_index, z_index], origin_points, target_points, interpolation_method, target_shape))
193
- excutor = PEx()
194
- result = excutor.run(_interp_single_worker, paras)
193
+
194
+ with PEx() as excutor:
195
+ result = excutor.run(_interp_single_worker, paras)
196
+ excutor.shutdown()
195
197
 
196
198
  return np.squeeze(np.array(result))
197
199
 
oafuncs/oa_date.py CHANGED
@@ -64,7 +64,6 @@ def hour_range(start_time: str, end_time: str, hour_interval: int = 6) -> List[s
64
64
  date_s += datetime.timedelta(hours=hour_interval)
65
65
  return date_list
66
66
 
67
-
68
67
  def adjust_time(base_time: str, time_delta: int, delta_unit: str = "hours", output_format: Optional[str] = None) -> str:
69
68
  """
70
69
  Adjust a given base time by adding a specified time delta.
@@ -89,7 +88,12 @@ def adjust_time(base_time: str, time_delta: int, delta_unit: str = "hours", outp
89
88
  """
90
89
  # Normalize the input time to "yyyymmddHHMMSS" format
91
90
  time_format = "%Y%m%d%H%M%S"
91
+ if len(base_time) == 4:
92
+ base_time += "0101"
93
+ elif len(base_time) == 6:
94
+ base_time += "01"
92
95
  base_time = base_time.ljust(14, "0")
96
+
93
97
  time_obj = datetime.datetime.strptime(base_time, time_format)
94
98
 
95
99
  # Add the specified amount of time
@@ -101,8 +105,19 @@ def adjust_time(base_time: str, time_delta: int, delta_unit: str = "hours", outp
101
105
  time_obj += datetime.timedelta(hours=time_delta)
102
106
  elif delta_unit == "days":
103
107
  time_obj += datetime.timedelta(days=time_delta)
108
+ elif delta_unit == "months":
109
+ # Handle month addition separately
110
+ month = time_obj.month - 1 + time_delta
111
+ year = time_obj.year + month // 12
112
+ month = month % 12 + 1
113
+ day = min(time_obj.day, month_days(year, month))
114
+ time_obj = time_obj.replace(year=year, month=month, day=day)
115
+ elif delta_unit == "years":
116
+ # Handle year addition separately
117
+ year = time_obj.year + time_delta
118
+ time_obj = time_obj.replace(year=year)
104
119
  else:
105
- raise ValueError("Invalid time unit. Use 'seconds', 'minutes', 'hours', or 'days'.")
120
+ raise ValueError("Invalid time unit. Use 'seconds', 'minutes', 'hours', 'days', 'months', or 'years'.")
106
121
 
107
122
  # Determine the output format
108
123
  if output_format:
@@ -116,6 +131,10 @@ def adjust_time(base_time: str, time_delta: int, delta_unit: str = "hours", outp
116
131
  default_format = "%Y%m%d%H"
117
132
  elif delta_unit == "days":
118
133
  default_format = "%Y%m%d"
134
+ elif delta_unit == "months":
135
+ default_format = "%Y%m"
136
+ elif delta_unit == "years":
137
+ default_format = "%Y"
119
138
  return time_obj.strftime(default_format)
120
139
 
121
140
 
oafuncs/oa_file.py CHANGED
@@ -287,7 +287,11 @@ def remove(target_pattern: str) -> None:
287
287
  (shutil.rmtree if os.path.isdir(file_path) else os.remove)(file_path)
288
288
  print(f"[green]Successfully deleted:[/green] [bold]{file_path}[/bold]")
289
289
  else:
290
- print(f"[yellow]Skipping unknown file type:[/yellow] [bold]{file_path}[/bold]")
290
+ if not os.path.exists(file_path):
291
+ # print(f"[yellow]File not found:[/yellow] [bold]{file_path}[/bold]")
292
+ pass
293
+ else:
294
+ print(f"[yellow]Skipping unknown file type:[/yellow] [bold]{file_path}[/bold]")
291
295
  except Exception as e:
292
296
  print(f"[red]Failed to delete:[/red] [bold]{file_path}[/bold]. Error: {e}")
293
297
 
oafuncs/oa_nc.py CHANGED
@@ -6,7 +6,7 @@ import numpy as np
6
6
  import xarray as xr
7
7
  from rich import print
8
8
 
9
- __all__ = ["save", "merge", "modify", "rename", "check", "convert_longitude", "isel", "draw"]
9
+ __all__ = ["save", "merge", "modify", "rename", "check", "convert_longitude", "isel", "draw", "unpack_netcdf"]
10
10
 
11
11
 
12
12
  def save(
@@ -278,6 +278,28 @@ def draw(
278
278
  print("[red]No dataset or file provided.[/red]")
279
279
 
280
280
 
281
+ def unpack_netcdf(src_path, dst_path=None):
282
+ """解码 NetCDF 并移除 scale_factor/add_offset,写出真实值。
283
+ 若 dst_path 省略,则自动生成新文件名,写出后删除原文件并将新文件改回原名。
284
+ """
285
+ # 判断是否要替换原文件
286
+ delete_orig = dst_path is None
287
+ if delete_orig:
288
+ dst_path = src_path.replace(".nc", "_unpacked.nc")
289
+
290
+ ds = xr.open_dataset(src_path, decode_cf=True)
291
+ for var in ds.data_vars:
292
+ ds[var].attrs.pop("scale_factor", None)
293
+ ds[var].attrs.pop("add_offset", None)
294
+ ds[var].encoding.clear()
295
+ ds.to_netcdf(dst_path, mode="w", format="NETCDF4", engine="netcdf4")
296
+ ds.close()
297
+
298
+ if delete_orig:
299
+ os.remove(src_path)
300
+ os.rename(dst_path, src_path)
301
+
302
+
281
303
  if __name__ == "__main__":
282
304
  data = np.random.rand(100, 50)
283
305
  save(r"test.nc", data, "data", {"time": np.linspace(0, 120, 100), "lev": np.linspace(0, 120, 50)}, "a")
oafuncs/oa_tool.py CHANGED
@@ -137,7 +137,7 @@ def pbar(
137
137
  description: str = "Working...",
138
138
  total: Optional[float] = None,
139
139
  completed: float = 0,
140
- color: Any = "cyan",
140
+ color: Any = "None",
141
141
  cmap: Union[str, List[str], None] = None,
142
142
  update_interval: float = 0.1,
143
143
  bar_length: Optional[int] = None,
@@ -173,8 +173,23 @@ def pbar(
173
173
  ... time.sleep(0.1)
174
174
  """
175
175
  from ._script.cprogressbar import ColorProgressBar
176
+ import random
176
177
 
177
- print(f"[blue]{description}[/blue]")
178
+ def _generate_random_color_hex():
179
+ """Generate a random color in hexadecimal format."""
180
+ r = random.randint(0, 255)
181
+ g = random.randint(0, 255)
182
+ b = random.randint(0, 255)
183
+ return '#{r:02x}{g:02x}{b:02x}'.format(r=r, g=g, b=b)
184
+
185
+ if color == 'None' and cmap is None:
186
+ color = _generate_random_color_hex()
187
+
188
+ style = f"bold {color if color != 'None' else 'green'}"
189
+ print(f"[{style}]~*^* {description} *^*~[/{style}]")
190
+
191
+ description=''
192
+
178
193
  return ColorProgressBar(
179
194
  iterable=iterable,
180
195
  description=description,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: oafuncs
3
- Version: 0.0.98.8
3
+ Version: 0.0.98.10
4
4
  Summary: Oceanic and Atmospheric Functions
5
5
  Home-page: https://github.com/Industry-Pays/OAFuncs
6
6
  Author: Kun Liu
@@ -1,21 +1,21 @@
1
1
  oafuncs/__init__.py,sha256=T_-VtnWWllV3Q91twT5Yt2sUapeA051QbPNnBxmg9nw,1456
2
2
  oafuncs/oa_cmap.py,sha256=DimWT4Bg7uE5Lx8hSw1REp7whpsR2pFRStAwk1cowEM,11494
3
- oafuncs/oa_data.py,sha256=7heyoFOBt_xqe0YSiUdO6tOpmySm0FuG1fHSoAO1NJI,10271
4
- oafuncs/oa_date.py,sha256=KqU-bHtC74hYsf6VgiA3i2vI__q_toOVR-whFy4cYP8,5523
3
+ oafuncs/oa_data.py,sha256=F0IR7T-BoWZho5aoDI_mWWUuuOvifohkkNLFm9Wlsqs,10312
4
+ oafuncs/oa_date.py,sha256=WhM6cyD4G3IeghjLTHhAMtlvJbA7kwQG2sHnxdTgyso,6303
5
5
  oafuncs/oa_draw.py,sha256=Wj2QBgyIPpV_dxaDrH10jqj_puK9ZM9rd-si-3VrsrE,17631
6
- oafuncs/oa_file.py,sha256=goF5iRXJFFCIKhIjlkCnYYt0EYlJb_4r8AeYNZ0-SOk,16209
6
+ oafuncs/oa_file.py,sha256=j9gXJgPOJsliu4IOUc4bc-luW4yBvQyNCEmMyDVjUwQ,16404
7
7
  oafuncs/oa_help.py,sha256=_4AZgRDq5Or0vauNvq5IDDHIBoBfdOQtzak-mG1wwAw,4537
8
- oafuncs/oa_nc.py,sha256=L1gqXxg93kIDsMOa87M0o-53KVmdqCipnXeF9XfzfY8,10513
8
+ oafuncs/oa_nc.py,sha256=S23QL_GfIaENPr9p7oEeFT34nqJ2-7fiCcFQu72CmjI,11327
9
9
  oafuncs/oa_python.py,sha256=NkopwkYFGSEuVljnTBvXCl6o2CeyRNBqRXSsUl3euEE,5192
10
- oafuncs/oa_tool.py,sha256=UNdiXKWfk860eb01vSGH8XN_Bd3CP7ihaspRrudday0,7983
10
+ oafuncs/oa_tool.py,sha256=EqOlGPq3Rx2ohqVnGuCZhMvr2o9_XgglrETMbAdEifM,8471
11
11
  oafuncs/_data/hycom.png,sha256=MadKs6Gyj5n9-TOu7L4atQfTXtF9dvN9w-tdU9IfygI,10945710
12
12
  oafuncs/_data/oafuncs.png,sha256=o3VD7wm-kwDea5E98JqxXl04_78cBX7VcdUt7uQXGiU,3679898
13
- oafuncs/_script/cprogressbar.py,sha256=wRU3SFPFtMI7ER26tTzg223kVKNo5RDWE9CzdIgUsuE,15771
13
+ oafuncs/_script/cprogressbar.py,sha256=UIgGcLFs-6IgWlITuBLaQqrpt4OAK3Mst5RlCiNfZdQ,15772
14
14
  oafuncs/_script/email.py,sha256=lL4HGKrr524-g0xLlgs-4u7x4-u7DtgNoD9AL8XJKj4,3058
15
- oafuncs/_script/netcdf_merge.py,sha256=_EPF9Xj4HOVC9sZpi1lt62-Aq6pMlgsgwaajEBLhW6g,5092
15
+ oafuncs/_script/netcdf_merge.py,sha256=ktmTOgGfLHBNdS4HBc6xFDfO8B7E4DT7d1e6Dtare9Y,5596
16
16
  oafuncs/_script/netcdf_modify.py,sha256=sGRUYNhfGgf9JV70rnBzw3bzuTRSXzBTL_RMDnDPeLQ,4552
17
17
  oafuncs/_script/netcdf_write.py,sha256=iO1Qv9bp6RLiw1D8Nrv7tX_8X-diUZaX3Nxhk6pJ5Nw,8556
18
- oafuncs/_script/parallel.py,sha256=dRT7w_rBnR3mZkUlO6v6j05SwBTQpTccOna5CXI5Msg,8196
18
+ oafuncs/_script/parallel.py,sha256=T9Aie-e4LcbKlFTLZ0l4lhEN3SBVa84jRcrAsIm8s0I,8767
19
19
  oafuncs/_script/parallel_test.py,sha256=0GBqZOX7IaCOKF2t1y8N8YYu53GJ33OkfsWgpvZNqM4,372
20
20
  oafuncs/_script/plot_dataset.py,sha256=zkSEnO_-biyagorwWXPoihts_cwuvripzEt-l9bHJ2E,13989
21
21
  oafuncs/_script/replace_file_content.py,sha256=eCFZjnZcwyRvy6b4mmIfBna-kylSZTyJRfgXd6DdCjk,5982
@@ -37,8 +37,8 @@ oafuncs/oa_sign/__init__.py,sha256=QKqTFrJDFK40C5uvk48GlRRbGFzO40rgkYwu6dYxatM,5
37
37
  oafuncs/oa_sign/meteorological.py,sha256=8091SHo2L8kl4dCFmmSH5NGVHDku5i5lSiLEG5DLnOQ,6489
38
38
  oafuncs/oa_sign/ocean.py,sha256=xrW-rWD7xBWsB5PuCyEwQ1Q_RDKq2KCLz-LOONHgldU,5932
39
39
  oafuncs/oa_sign/scientific.py,sha256=a4JxOBgm9vzNZKpJ_GQIQf7cokkraV5nh23HGbmTYKw,5064
40
- oafuncs-0.0.98.8.dist-info/licenses/LICENSE.txt,sha256=rMtLpVg8sKiSlwClfR9w_Dd_5WubTQgoOzE2PDFxzs4,1074
41
- oafuncs-0.0.98.8.dist-info/METADATA,sha256=Le6ieydYuvZciK8CaFHPBUy47T3geyA48Z6nzSbLMwQ,4272
42
- oafuncs-0.0.98.8.dist-info/WHEEL,sha256=lTU6B6eIfYoiQJTZNc-fyaR6BpL6ehTzU3xGYxn2n8k,91
43
- oafuncs-0.0.98.8.dist-info/top_level.txt,sha256=bgC35QkXbN4EmPHEveg_xGIZ5i9NNPYWqtJqaKqTPsQ,8
44
- oafuncs-0.0.98.8.dist-info/RECORD,,
40
+ oafuncs-0.0.98.10.dist-info/licenses/LICENSE.txt,sha256=rMtLpVg8sKiSlwClfR9w_Dd_5WubTQgoOzE2PDFxzs4,1074
41
+ oafuncs-0.0.98.10.dist-info/METADATA,sha256=iyv12KtjFAqYtbJxBTq3RhgQ55iG5_yfCx4IzVWfJHw,4273
42
+ oafuncs-0.0.98.10.dist-info/WHEEL,sha256=pxyMxgL8-pra_rKaQ4drOZAegBVuX-G_4nRHjjgWbmo,91
43
+ oafuncs-0.0.98.10.dist-info/top_level.txt,sha256=bgC35QkXbN4EmPHEveg_xGIZ5i9NNPYWqtJqaKqTPsQ,8
44
+ oafuncs-0.0.98.10.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (78.1.1)
2
+ Generator: setuptools (79.0.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5