oafuncs 0.0.97.14__tar.gz → 0.0.97.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. {oafuncs-0.0.97.14/oafuncs.egg-info → oafuncs-0.0.97.15}/PKG-INFO +1 -1
  2. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/_script/netcdf_merge.py +1 -1
  3. oafuncs-0.0.97.15/oafuncs/_script/netcdf_modify.py +106 -0
  4. oafuncs-0.0.97.15/oafuncs/_script/netcdf_write.py +125 -0
  5. oafuncs-0.0.97.15/oafuncs/oa_cmap.py +162 -0
  6. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_date.py +30 -16
  7. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_down/hycom_3hourly.py +4 -53
  8. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_draw.py +11 -132
  9. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_file.py +1 -23
  10. oafuncs-0.0.97.15/oafuncs/oa_nc.py +250 -0
  11. oafuncs-0.0.97.15/oafuncs/oa_python.py +98 -0
  12. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_sign/meteorological.py +3 -3
  13. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15/oafuncs.egg-info}/PKG-INFO +1 -1
  14. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs.egg-info/SOURCES.txt +2 -0
  15. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/setup.py +1 -1
  16. oafuncs-0.0.97.14/oafuncs/oa_cmap.py +0 -215
  17. oafuncs-0.0.97.14/oafuncs/oa_nc.py +0 -469
  18. oafuncs-0.0.97.14/oafuncs/oa_python.py +0 -108
  19. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/LICENSE.txt +0 -0
  20. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/MANIFEST.in +0 -0
  21. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/README.md +0 -0
  22. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/__init__.py +0 -0
  23. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/_data/OAFuncs.png +0 -0
  24. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/_data/hycom_3hourly.png +0 -0
  25. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/_script/cprogressbar.py +0 -0
  26. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/_script/email.py +0 -0
  27. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/_script/parallel.py +0 -0
  28. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/_script/parallel_example_usage.py +0 -0
  29. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/_script/plot_dataset.py +0 -0
  30. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/_script/replace_file_concent.py +0 -0
  31. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_data.py +0 -0
  32. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_down/User_Agent-list.txt +0 -0
  33. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_down/__init__.py +0 -0
  34. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_down/idm.py +0 -0
  35. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_down/literature.py +0 -0
  36. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_down/test_ua.py +0 -0
  37. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_down/user_agent.py +0 -0
  38. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_help.py +0 -0
  39. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_model/__init__.py +0 -0
  40. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_model/roms/__init__.py +0 -0
  41. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_model/roms/test.py +0 -0
  42. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_model/wrf/__init__.py +0 -0
  43. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_model/wrf/little_r.py +0 -0
  44. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_sign/__init__.py +0 -0
  45. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_sign/ocean.py +0 -0
  46. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_sign/scientific.py +0 -0
  47. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs/oa_tool.py +0 -0
  48. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs.egg-info/dependency_links.txt +0 -0
  49. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs.egg-info/requires.txt +0 -0
  50. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/oafuncs.egg-info/top_level.txt +0 -0
  51. {oafuncs-0.0.97.14 → oafuncs-0.0.97.15}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: oafuncs
3
- Version: 0.0.97.14
3
+ Version: 0.0.97.15
4
4
  Summary: Oceanic and Atmospheric Functions
5
5
  Home-page: https://github.com/Industry-Pays/OAFuncs
6
6
  Author: Kun Liu
@@ -48,7 +48,7 @@ def merge_nc(file_list: Union[str, List[str]], var_name: Optional[Union[str, Lis
48
48
  # 初始化合并数据字典
49
49
  merged_data = {}
50
50
 
51
- for i, file in pbar(enumerate(file_list),description="Reading files", color="green",total=len(file_list)):
51
+ for i, file in pbar(enumerate(file_list), description="Reading files", color="#f8bbd0", total=len(file_list)):
52
52
  with xr.open_dataset(file) as ds:
53
53
  for var in var_names:
54
54
  data_var = ds[var]
@@ -0,0 +1,106 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ """
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2025-04-05 14:00:50
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2025-04-05 14:00:50
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\_script\\netcdf_modify.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.12
14
+ """
15
+
16
+ import os
17
+
18
+ import netCDF4 as nc
19
+ import numpy as np
20
+ from rich import print
21
+
22
+
23
+ def _is_valid_netcdf_file(file_path):
24
+ """
25
+ Check if the file is a valid NetCDF file.
26
+ """
27
+ try:
28
+ with nc.Dataset(file_path, "r") as _:
29
+ pass
30
+ return True
31
+ except Exception:
32
+ return False
33
+
34
+
35
+ def _modify_var(nc_file_path, variable_name, new_value):
36
+ """
37
+ Modify the value of a variable in a NetCDF file.
38
+ """
39
+ if not os.path.exists(nc_file_path):
40
+ raise FileNotFoundError(f"NetCDF file '{nc_file_path}' does not exist.")
41
+ if not _is_valid_netcdf_file(nc_file_path):
42
+ raise ValueError(f"File '{nc_file_path}' is not a valid NetCDF file.")
43
+ if not variable_name:
44
+ raise ValueError("Variable name cannot be empty or None.")
45
+ if not isinstance(new_value, np.ndarray):
46
+ raise TypeError("New value must be a numpy.ndarray.")
47
+
48
+ try:
49
+ with nc.Dataset(nc_file_path, "r+") as dataset:
50
+ if variable_name not in dataset.variables:
51
+ raise ValueError(f"Variable '{variable_name}' not found in the NetCDF file.")
52
+ variable = dataset.variables[variable_name]
53
+ if variable.shape != new_value.shape:
54
+ raise ValueError(f"Shape mismatch: Variable '{variable_name}' has shape {variable.shape}, but new value has shape {new_value.shape}.")
55
+ variable[:] = new_value
56
+ print(f"[green]Successfully modified variable '{variable_name}' in '{nc_file_path}'.[/green]")
57
+ return True
58
+ except (FileNotFoundError, ValueError, TypeError) as e:
59
+ print(f"[red]Error:[/red] {e}")
60
+ return False
61
+ except Exception as e:
62
+ print(f"[red]Unexpected Error:[/red] Failed to modify variable '{variable_name}' in '{nc_file_path}'. [bold]Details:[/bold] {e}")
63
+ return False
64
+
65
+
66
+ def _modify_attr(nc_file_path, variable_name, attribute_name, attribute_value):
67
+ """
68
+ Add or modify an attribute of a variable in a NetCDF file.
69
+ """
70
+ if not os.path.exists(nc_file_path):
71
+ raise FileNotFoundError(f"NetCDF file '{nc_file_path}' does not exist.")
72
+ if not _is_valid_netcdf_file(nc_file_path):
73
+ raise ValueError(f"File '{nc_file_path}' is not a valid NetCDF file.")
74
+ if not variable_name:
75
+ raise ValueError("Variable name cannot be empty or None.")
76
+ if not attribute_name:
77
+ raise ValueError("Attribute name cannot be empty or None.")
78
+
79
+ try:
80
+ with nc.Dataset(nc_file_path, "r+") as ds:
81
+ if variable_name not in ds.variables:
82
+ raise ValueError(f"Variable '{variable_name}' not found in the NetCDF file.")
83
+ variable = ds.variables[variable_name]
84
+ variable.setncattr(attribute_name, attribute_value)
85
+ print(f"[green]Successfully modified attribute '{attribute_name}' of variable '{variable_name}' in '{nc_file_path}'.[/green]")
86
+ return True
87
+ except (FileNotFoundError, ValueError) as e:
88
+ print(f"[red]Error:[/red] {e}")
89
+ return False
90
+ except Exception as e:
91
+ print(f"[red]Unexpected Error:[/red] Failed to modify attribute '{attribute_name}' of variable '{variable_name}' in file '{nc_file_path}'. [bold]Details:[/bold] {e}")
92
+ return False
93
+
94
+
95
+ def modify_nc(nc_file, var_name, attr_name=None, new_value=None):
96
+ """
97
+ Modify the value of a variable or the value of an attribute in a NetCDF file.
98
+ """
99
+ try:
100
+ if attr_name is None:
101
+ return _modify_var(nc_file, var_name, new_value)
102
+ else:
103
+ return _modify_attr(nc_file, var_name, attr_name, new_value)
104
+ except Exception as e:
105
+ print(f"[red]Error:[/red] An error occurred while modifying '{var_name}' in '{nc_file}'. [bold]Details:[/bold] {e}")
106
+ return False
@@ -0,0 +1,125 @@
1
+ import numpy as np
2
+ import os
3
+ import netCDF4 as nc
4
+ import xarray as xr
5
+
6
+
7
+ def _numpy_to_nc_type(numpy_type):
8
+ """将NumPy数据类型映射到NetCDF数据类型"""
9
+ numpy_to_nc = {
10
+ "float32": "f4",
11
+ "float64": "f8",
12
+ "int8": "i1",
13
+ "int16": "i2",
14
+ "int32": "i4",
15
+ "int64": "i8",
16
+ "uint8": "u1",
17
+ "uint16": "u2",
18
+ "uint32": "u4",
19
+ "uint64": "u8",
20
+ }
21
+ # 确保传入的是字符串类型,如果不是,则转换为字符串
22
+ numpy_type_str = str(numpy_type) if not isinstance(numpy_type, str) else numpy_type
23
+ return numpy_to_nc.get(numpy_type_str, "f4") # 默认使用 'float32'
24
+
25
+
26
+ def _calculate_scale_and_offset(data, n=16):
27
+ if not isinstance(data, np.ndarray):
28
+ raise ValueError("Input data must be a NumPy array.")
29
+
30
+ # 使用 nan_to_num 来避免 NaN 值对 min 和 max 的影响
31
+ data_min = np.nanmin(data)
32
+ data_max = np.nanmax(data)
33
+
34
+ if np.isnan(data_min) or np.isnan(data_max):
35
+ raise ValueError("Input data contains NaN values, which are not allowed.")
36
+
37
+ scale_factor = (data_max - data_min) / (2**n - 1)
38
+ add_offset = data_min + 2 ** (n - 1) * scale_factor
39
+
40
+ return scale_factor, add_offset
41
+
42
+
43
+ def save_to_nc(file, data, varname=None, coords=None, mode="w", scale_offset_switch=True, compile_switch=True):
44
+ """
45
+ Description:
46
+ Write data to NetCDF file
47
+ Parameters:
48
+ file: str, file path
49
+ data: data
50
+ varname: str, variable name
51
+ coords: dict, coordinates, key is the dimension name, value is the coordinate data
52
+ mode: str, write mode, 'w' for write, 'a' for append
53
+ scale_offset_switch: bool, whether to use scale_factor and add_offset, default is True
54
+ compile_switch: bool, whether to use compression parameters, default is True
55
+ Example:
56
+ save(r'test.nc', data, 'u', {'time': np.linspace(0, 120, 100), 'lev': np.linspace(0, 120, 50)}, 'a')
57
+ """
58
+ # 设置压缩参数
59
+ kwargs = {"zlib": True, "complevel": 4} if compile_switch else {}
60
+
61
+ # 检查文件存在性并根据模式决定操作
62
+ if mode == "w" and os.path.exists(file):
63
+ os.remove(file)
64
+ elif mode == "a" and not os.path.exists(file):
65
+ mode = "w"
66
+
67
+ # 打开 NetCDF 文件
68
+ with nc.Dataset(file, mode, format="NETCDF4") as ncfile:
69
+ # 如果 data 是 DataArray 并且没有提供 varname 和 coords
70
+ if varname is None and coords is None and isinstance(data, xr.DataArray):
71
+ encoding = {}
72
+ for var in data.data_vars:
73
+ scale_factor, add_offset = _calculate_scale_and_offset(data[var].values)
74
+ encoding[var] = {
75
+ "zlib": True,
76
+ "complevel": 4,
77
+ "dtype": "int16",
78
+ "scale_factor": scale_factor,
79
+ "add_offset": add_offset,
80
+ "_FillValue": -32767,
81
+ }
82
+ data.to_netcdf(file, mode=mode, encoding=encoding)
83
+ return
84
+
85
+ # 添加坐标
86
+ for dim, coord_data in coords.items():
87
+ if dim in ncfile.dimensions:
88
+ if len(coord_data) != len(ncfile.dimensions[dim]):
89
+ raise ValueError(f"Length of coordinate '{dim}' does not match the dimension length.")
90
+ else:
91
+ ncfile.variables[dim][:] = np.array(coord_data)
92
+ else:
93
+ ncfile.createDimension(dim, len(coord_data))
94
+ var = ncfile.createVariable(dim, _numpy_to_nc_type(coord_data.dtype), (dim,), **kwargs)
95
+ var[:] = np.array(coord_data)
96
+
97
+ # 如果坐标数据有属性,则添加到 NetCDF 变量
98
+ if isinstance(coord_data, xr.DataArray) and coord_data.attrs:
99
+ for attr_name, attr_value in coord_data.attrs.items():
100
+ var.setncattr(attr_name, attr_value)
101
+
102
+ # 添加或更新变量
103
+ if varname in ncfile.variables:
104
+ if data.shape != ncfile.variables[varname].shape:
105
+ raise ValueError(f"Shape of data does not match the variable shape for '{varname}'.")
106
+ ncfile.variables[varname][:] = np.array(data)
107
+ else:
108
+ # 创建变量
109
+ dim_names = tuple(coords.keys())
110
+ if scale_offset_switch:
111
+ scale_factor, add_offset = _calculate_scale_and_offset(np.array(data))
112
+ dtype = "i2"
113
+ var = ncfile.createVariable(varname, dtype, dim_names, fill_value=-32767, **kwargs)
114
+ var.setncattr("scale_factor", scale_factor)
115
+ var.setncattr("add_offset", add_offset)
116
+ else:
117
+ dtype = _numpy_to_nc_type(data.dtype)
118
+ var = ncfile.createVariable(varname, dtype, dim_names, **kwargs)
119
+ var[:] = np.array(data)
120
+
121
+ # 添加属性
122
+ if isinstance(data, xr.DataArray) and data.attrs:
123
+ for key, value in data.attrs.items():
124
+ if key not in ["scale_factor", "add_offset", "_FillValue", "missing_value"] or not scale_offset_switch:
125
+ var.setncattr(key, value)
@@ -0,0 +1,162 @@
1
+ from typing import List, Optional, Union
2
+
3
+ import matplotlib as mpl
4
+ import matplotlib.pyplot as plt
5
+ import numpy as np
6
+ from rich import print
7
+
8
+ __all__ = ["show", "to_color", "create", "get"]
9
+
10
+
11
+ # ** 将cmap用填色图可视化(官网摘抄函数)
12
+ def show(colormaps: Union[str, mpl.colors.Colormap, List[Union[str, mpl.colors.Colormap]]]) -> None:
13
+ """
14
+ Description:
15
+ Helper function to plot data with associated colormap.
16
+ Parameters:
17
+ colormaps : list of colormaps, or a single colormap; can be a string or a colormap object.
18
+ Example:
19
+ cmap = ListedColormap(["darkorange", "gold", "lawngreen", "lightseagreen"])
20
+ show([cmap]); show("viridis"); show(["viridis", "cividis"])
21
+ """
22
+ if not isinstance(colormaps, list):
23
+ colormaps = [colormaps]
24
+ np.random.seed(19680801)
25
+ data = np.random.randn(30, 30)
26
+ n = len(colormaps)
27
+ fig, axs = plt.subplots(1, n, figsize=(n * 2 + 2, 3), constrained_layout=True, squeeze=False)
28
+ for ax, cmap in zip(axs.flat, colormaps):
29
+ psm = ax.pcolormesh(data, cmap=cmap, rasterized=True, vmin=-4, vmax=4)
30
+ fig.colorbar(psm, ax=ax)
31
+ plt.show()
32
+
33
+
34
+ # ** 将cmap转为list,即多个颜色的列表
35
+ def to_color(cmap_name: str, n: int = 256) -> List[tuple]:
36
+ """
37
+ Description:
38
+ Convert a colormap to a list of colors
39
+ Parameters:
40
+ cmap_name : str; the name of the colormap
41
+ n : int, optional; the number of colors
42
+ Return:
43
+ out_colors : list of colors
44
+ Example:
45
+ out_colors = to_color('viridis', 256)
46
+ """
47
+ cmap = mpl.colormaps.get_cmap(cmap_name)
48
+ return [cmap(i) for i in np.linspace(0, 1, n)]
49
+
50
+
51
+ # ** 自制cmap,多色,可带位置
52
+ def create(color_list: Optional[List[Union[str, tuple]]] = None, rgb_file_path: Optional[str] = None, positions: Optional[List[float]] = None, under_color: Optional[Union[str, tuple]] = None, over_color: Optional[Union[str, tuple]] = None, delimiter: str = ",") -> mpl.colors.Colormap:
53
+ """
54
+ Description:
55
+ Create a custom colormap from a list of colors or an RGB txt document.
56
+ Parameters:
57
+ color_list : list of colors (optional, required if rgb_file_path is None)
58
+ rgb_file_path : str, the path of txt file (optional, required if color_list is None)
59
+ positions : list of positions (optional, for color_list)
60
+ under_color : color (optional)
61
+ over_color : color (optional)
62
+ delimiter : str, optional, default is ','; the delimiter of RGB values in txt file
63
+ Return:
64
+ cmap : colormap
65
+ Example:
66
+ cmap = create(color_list=['#C2B7F3','#B3BBF2','#B0CBF1','#ACDCF0','#A8EEED'])
67
+ cmap = create(color_list=['aliceblue','skyblue','deepskyblue'], positions=[0.0,0.5,1.0])
68
+ cmap = create(rgb_file_path='path/to/file.txt', delimiter=',')
69
+ """
70
+ if rgb_file_path:
71
+ with open(rgb_file_path) as fid:
72
+ data = fid.readlines()
73
+ n = len(data)
74
+ rgb = np.zeros((n, 3))
75
+ for i in np.arange(n):
76
+ rgb[i][0] = data[i].split(delimiter)[0]
77
+ rgb[i][1] = data[i].split(delimiter)[1]
78
+ rgb[i][2] = data[i].split(delimiter)[2]
79
+ max_rgb = np.max(rgb)
80
+ if max_rgb > 2: # if the value is greater than 2, it is normalized to 0-1
81
+ rgb = rgb / 255.0
82
+ cmap_color = mpl.colors.ListedColormap(rgb, name="my_color")
83
+ elif color_list:
84
+ if positions is None: # 自动分配比例
85
+ cmap_color = mpl.colors.LinearSegmentedColormap.from_list("mycmap", color_list)
86
+ else: # 按提供比例分配
87
+ cmap_color = mpl.colors.LinearSegmentedColormap.from_list("mycmap", list(zip(positions, color_list)))
88
+ else:
89
+ raise ValueError("Either 'color_list' or 'rgb_file_path' must be provided.")
90
+
91
+ if under_color is not None:
92
+ cmap_color.set_under(under_color)
93
+ if over_color is not None:
94
+ cmap_color.set_over(over_color)
95
+ return cmap_color
96
+
97
+
98
+ # ** 选择cmap
99
+ def get(cmap_name: Optional[str] = None, query: bool = False) -> Optional[mpl.colors.Colormap]:
100
+ """
101
+ Description:
102
+ Choosing a colormap from the list of available colormaps or a custom colormap
103
+ Parameters:
104
+ cmap_name : str, optional; the name of the colormap
105
+ query : bool, optional; whether to query the available colormap names
106
+ Return:
107
+ cmap : colormap
108
+ Example:
109
+ cmap = get('viridis')
110
+ cmap = get('diverging_1')
111
+ cmap = get('cool_1')
112
+ cmap = get('warm_1')
113
+ cmap = get('colorful_1')
114
+ """
115
+ my_cmap_dict = {
116
+ "diverging_1": create(["#4e00b3", "#0000FF", "#00c0ff", "#a1d3ff", "#DCDCDC", "#FFD39B", "#FF8247", "#FF0000", "#FF5F9E"]),
117
+ "cool_1": create(["#4e00b3", "#0000FF", "#00c0ff", "#a1d3ff", "#DCDCDC"]),
118
+ "warm_1": create(["#DCDCDC", "#FFD39B", "#FF8247", "#FF0000", "#FF5F9E"]),
119
+ "colorful_1": create(["#6d00db", "#9800cb", "#F2003C", "#ff4500", "#ff7f00", "#FE28A2", "#FFC0CB", "#DDA0DD", "#40E0D0", "#1a66f2", "#00f7fb", "#8fff88", "#E3FF00"]),
120
+ }
121
+
122
+ if query:
123
+ print("Available cmap names:")
124
+ print("-" * 20)
125
+ print("Defined by myself:")
126
+ print("\n".join(my_cmap_dict.keys()))
127
+ print("-" * 20)
128
+ print("Matplotlib built-in:")
129
+ print("\n".join(mpl.colormaps.keys()))
130
+ print("-" * 20)
131
+ return None
132
+
133
+ if cmap_name is None:
134
+ return None
135
+
136
+ if cmap_name in my_cmap_dict:
137
+ return my_cmap_dict[cmap_name]
138
+ else:
139
+ try:
140
+ return mpl.colormaps.get_cmap(cmap_name)
141
+ except ValueError:
142
+ print(f"Unknown cmap name: {cmap_name}\nNow return 'rainbow' as default.")
143
+ return mpl.colormaps.get_cmap("rainbow") # 默认返回 'rainbow'
144
+
145
+
146
+ if __name__ == "__main__":
147
+ # ** 测试自制cmap
148
+ colors = ["#C2B7F3", "#B3BBF2", "#B0CBF1", "#ACDCF0", "#A8EEED"]
149
+ nodes = [0.0, 0.2, 0.4, 0.6, 1.0]
150
+ c_map = create(colors, nodes)
151
+ show([c_map])
152
+
153
+ # ** 测试自制diverging型cmap
154
+ diverging_cmap = create(["#4e00b3", "#0000FF", "#00c0ff", "#a1d3ff", "#DCDCDC", "#FFD39B", "#FF8247", "#FF0000", "#FF5F9E"])
155
+ show([diverging_cmap])
156
+
157
+ # ** 测试根据RGB的txt文档制作色卡
158
+ file_path = "E:/python/colorbar/test.txt"
159
+ cmap_rgb = create(rgb_file_path=file_path)
160
+
161
+ # ** 测试将cmap转为list
162
+ out_colors = to_color("viridis", 256)
@@ -13,40 +13,39 @@ SystemInfo: Windows 11
13
13
  Python Version: 3.12
14
14
  """
15
15
 
16
-
17
-
18
16
  import calendar
19
17
  import datetime
18
+ from typing import List, Optional
20
19
 
21
- __all__ = ["get_days_in_month", "generate_hour_list", "adjust_time", "timeit"]
20
+ __all__ = ["month_days", "hour_range", "adjust_time", "timeit"]
22
21
 
23
22
 
24
- def get_days_in_month(year, month):
23
+ def month_days(year: int, month: int) -> int:
25
24
  return calendar.monthrange(year, month)[1]
26
25
 
27
26
 
28
- def generate_hour_list(start_date, end_date, interval_hours=6):
27
+ def hour_range(start: str, end: str, interval: int = 6) -> List[str]:
29
28
  """
30
29
  Generate a list of datetime strings with a specified interval in hours.
31
30
 
32
31
  Args:
33
- start_date (str): Start date in the format "%Y%m%d%H".
34
- end_date (str): End date in the format "%Y%m%d%H".
35
- interval_hours (int): Interval in hours between each datetime.
32
+ start (str): Start date in the format "%Y%m%d%H".
33
+ end (str): End date in the format "%Y%m%d%H".
34
+ interval (int): Interval in hours between each datetime.
36
35
 
37
36
  Returns:
38
37
  list: List of datetime strings in the format "%Y%m%d%H".
39
38
  """
40
- date_s = datetime.datetime.strptime(start_date, "%Y%m%d%H")
41
- date_e = datetime.datetime.strptime(end_date, "%Y%m%d%H")
39
+ date_s = datetime.datetime.strptime(start, "%Y%m%d%H")
40
+ date_e = datetime.datetime.strptime(end, "%Y%m%d%H")
42
41
  date_list = []
43
42
  while date_s <= date_e:
44
43
  date_list.append(date_s.strftime("%Y%m%d%H"))
45
- date_s += datetime.timedelta(hours=interval_hours)
44
+ date_s += datetime.timedelta(hours=interval)
46
45
  return date_list
47
46
 
48
47
 
49
- def adjust_time(initial_time, amount, time_unit="hours", output_format=None):
48
+ def adjust_time(initial_time: str, amount: int, time_unit: str = "hours", output_format: Optional[str] = None) -> str:
50
49
  """
51
50
  Adjust a given initial time by adding a specified amount of time.
52
51
 
@@ -91,22 +90,37 @@ def adjust_time(initial_time, amount, time_unit="hours", output_format=None):
91
90
  default_format = "%Y%m%d"
92
91
  return time_obj.strftime(default_format)
93
92
 
93
+
94
94
  class timeit:
95
95
  """
96
96
  A decorator to measure the execution time of a function.
97
97
 
98
98
  Usage:
99
- @timeit
99
+ @timeit(log=True, print_time=True)
100
100
  def my_function():
101
101
  # Function code here
102
+
103
+ Args:
104
+ log (bool): Whether to log the execution time to a file. Defaults to False.
105
+ print_time (bool): Whether to print the execution time to the console. Defaults to True.
102
106
  """
103
- def __init__(self, func):
107
+
108
+ def __init__(self, func, log: bool = False, print_time: bool = True):
104
109
  self.func = func
110
+ self.log = log
111
+ self.print_time = print_time
105
112
 
106
113
  def __call__(self, *args, **kwargs):
107
114
  start_time = datetime.datetime.now()
108
115
  result = self.func(*args, **kwargs)
109
116
  end_time = datetime.datetime.now()
110
117
  elapsed_time = (end_time - start_time).total_seconds()
111
- print(f"Function '{self.func.__name__}' executed in {elapsed_time:.2f} seconds.")
112
- return result
118
+
119
+ if self.print_time:
120
+ print(f"Function '{self.func.__name__}' executed in {elapsed_time:.2f} seconds.")
121
+
122
+ if self.log:
123
+ with open("execution_time.log", "a") as log_file:
124
+ log_file.write(f"{datetime.datetime.now()} - Function '{self.func.__name__}' executed in {elapsed_time:.2f} seconds.\n")
125
+
126
+ return result
@@ -40,7 +40,7 @@ from oafuncs.oa_nc import modify as modify_nc
40
40
 
41
41
  warnings.filterwarnings("ignore", category=RuntimeWarning, message="Engine '.*' loading failed:.*")
42
42
 
43
- __all__ = ["draw_time_range", "download", "how_to_use", "get_time_list"]
43
+ __all__ = ["draw_time_range", "download"]
44
44
 
45
45
 
46
46
  def _get_initial_data():
@@ -278,7 +278,7 @@ def draw_time_range(pic_save_folder=None):
278
278
  plt.close()
279
279
 
280
280
 
281
- def get_time_list(time_s, time_e, delta, interval_type="hour"):
281
+ def _get_time_list(time_s, time_e, delta, interval_type="hour"):
282
282
  """
283
283
  Description: get a list of time strings from time_s to time_e with a specified interval
284
284
  Args:
@@ -1029,7 +1029,7 @@ def _download_hourly_func(var, time_s, time_e, lon_min=0, lon_max=359.92, lat_mi
1029
1029
  _prepare_url_to_download(var, lon_min, lon_max, lat_min, lat_max, ymdh_time_s, None, depth, level, store_path, dataset_name, version_name, check)
1030
1030
  elif int(ymdh_time_s) < int(ymdh_time_e):
1031
1031
  print("Downloading a series of files...")
1032
- time_list = get_time_list(ymdh_time_s, ymdh_time_e, interval_hour, "hour")
1032
+ time_list = _get_time_list(ymdh_time_s, ymdh_time_e, interval_hour, "hour")
1033
1033
  with Progress() as progress:
1034
1034
  task = progress.add_task(f"[cyan]{bar_desc}", total=len(time_list))
1035
1035
  if ftimes == 1:
@@ -1048,7 +1048,7 @@ def _download_hourly_func(var, time_s, time_e, lon_min=0, lon_max=359.92, lat_mi
1048
1048
  _done_callback(feature, progress, task, len(time_list), counter_lock)
1049
1049
  else:
1050
1050
  # new_time_list = get_time_list(ymdh_time_s, ymdh_time_e, 3 * ftimes, "hour")
1051
- new_time_list = get_time_list(ymdh_time_s, ymdh_time_e, interval_hour * ftimes, "hour")
1051
+ new_time_list = _get_time_list(ymdh_time_s, ymdh_time_e, interval_hour * ftimes, "hour")
1052
1052
  total_num = len(new_time_list)
1053
1053
  if num_workers is None or num_workers <= 1:
1054
1054
  # 串行方式
@@ -1248,55 +1248,6 @@ def download(var, time_s, time_e=None, lon_min=0, lon_max=359.92, lat_min=-80, l
1248
1248
  print("[bold #ecdbfe]=" * mark_len)
1249
1249
 
1250
1250
 
1251
- def how_to_use():
1252
- print("""
1253
- # 1. Choose the dataset and version according to the time:
1254
- # 1.1 Use function to query
1255
- You can use the function check_time_in_dataset_and_version(time_input=20241101) to find the dataset and version according to the time.
1256
- Then, you can see the dataset and version in the output.
1257
- # 1.2 Draw a picture to see
1258
- You can draw a picture to see the time range of each dataset and version.
1259
- Using the function draw_time_range(pic_save_folder=None) to draw the picture.
1260
-
1261
- # 2. Get the base url according to the dataset, version, var and year:
1262
- # 2.1 Dataset and version were found in step 1
1263
- # 2.2 Var: u, v, temp, salt, ssh, u_b, v_b, temp_b, salt_b
1264
- # 2.3 Year: 1994-2024(current year)
1265
-
1266
- # 3. Get the query_dict according to the var, lon_min, lon_max, lat_min, lat_max, depth, level_num, time_str_ymdh:
1267
- # 3.1 Var: u, v, temp, salt, ssh, u_b, v_b, temp_b, salt_b
1268
- # 3.2 Lon_min, lon_max, lat_min, lat_max: float
1269
- # 3.3 Depth: 0-5000m, if you wanna get single depth data, you can set the depth
1270
- # 3.4 Level_num: 1-40, if you wanna get single level data, you can set the level_num
1271
- # 3.5 Time_str_ymdh: '2024110112', the hour normally is 00, 03, 06, 09, 12, 15, 18, 21, besides 1 hourly data
1272
- # 3.6 Use the function to get the query_dict
1273
- # 3.7 Note: If you wanna get the full depth or full level data, you can needn't set the depth or level_num
1274
-
1275
- # 4. Get the submit url according to the dataset, version, var, year, query_dict:
1276
- # 4.1 Use the function to get the submit url
1277
- # 4.2 You can use the submit url to download the data
1278
-
1279
- # 5. Download the data according to the submit url:
1280
- # 5.1 Use the function to download the data
1281
- # 5.2 You can download the data of single time or a series of time
1282
- # 5.3 Note: If you wanna download a series of data, you can set the ymdh_time_s and ymdh_time_e different
1283
- # 5.4 Note: The time resolution is 3 hours
1284
-
1285
- # 6. Direct download the data:
1286
- # 6.1 Use the function to direct download the data
1287
- # 6.2 You can set the dataset_name and version_name by yourself
1288
- # 6.3 Note: If you do not set the dataset_name and version_name, the dataset and version will be chosen according to the download_time
1289
- # 6.4 Note: If you set the dataset_name and version_name, please ensure the dataset_name and version_name are correct
1290
- # 6.5 Note: If you just set one of the dataset_name and version_name, both the dataset and version will be chosen according to the download_time
1291
-
1292
- # 7. Simple use:
1293
- # 7.1 You can use the function: download(var, ymdh_time_s, ymdh_time_e, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, depth=None, level_num=None, store_path=None, dataset_name=None, version_name=None)
1294
- # 7.2 You can download the data of single time or a series of time
1295
- # 7.3 The parameters you must set are var, ymdh_time_s, ymdh_time_e
1296
- # 7.4 Example: download('u', '2024110112', '2024110212', lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, depth=None, level_num=None, store_path=None, dataset_name=None, version_name=None)
1297
- """)
1298
-
1299
-
1300
1251
  if __name__ == "__main__":
1301
1252
  download_dict = {
1302
1253
  "water_u": {"simple_name": "u", "download": 1},