oafuncs 0.0.96__tar.gz → 0.0.97.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. oafuncs-0.0.97.1/MANIFEST.in +4 -0
  2. {oafuncs-0.0.96/oafuncs.egg-info → oafuncs-0.0.97.1}/PKG-INFO +1 -2
  3. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/__init__.py +16 -6
  4. oafuncs-0.0.97.1/oafuncs/_script/__init__.py +27 -0
  5. oafuncs-0.0.97.1/oafuncs/_script/plot_dataset.py +299 -0
  6. oafuncs-0.0.97.1/oafuncs/data_store/hycom_3hourly.png +0 -0
  7. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_cmap.py +5 -3
  8. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_data.py +118 -6
  9. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_file.py +22 -18
  10. oafuncs-0.0.97.1/oafuncs/oa_model/__init__.py +19 -0
  11. oafuncs-0.0.97.1/oafuncs/oa_model/roms/__init__.py +20 -0
  12. oafuncs-0.0.97.1/oafuncs/oa_model/roms/test.py +19 -0
  13. oafuncs-0.0.97.1/oafuncs/oa_model/wrf/__init__.py +18 -0
  14. oafuncs-0.0.97.1/oafuncs/oa_model/wrf/little_r.py +186 -0
  15. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_nc.py +59 -18
  16. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_tool/__init__.py +1 -0
  17. oafuncs-0.0.97.1/oafuncs/oa_tool/time.py +22 -0
  18. {oafuncs-0.0.96 → oafuncs-0.0.97.1/oafuncs.egg-info}/PKG-INFO +1 -2
  19. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs.egg-info/SOURCES.txt +10 -1
  20. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/setup.py +29 -32
  21. oafuncs-0.0.96/MANIFEST.in +0 -2
  22. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/LICENSE.txt +0 -0
  23. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/README.md +0 -0
  24. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/data_store/OAFuncs.png +0 -0
  25. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_down/User_Agent-list.txt +0 -0
  26. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_down/__init__.py +0 -0
  27. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_down/hycom_3hourly.py +0 -0
  28. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_down/hycom_3hourly_20250129.py +0 -0
  29. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_down/idm.py +0 -0
  30. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_down/literature.py +0 -0
  31. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_down/test_ua.py +0 -0
  32. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_down/user_agent.py +0 -0
  33. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_draw.py +0 -0
  34. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_help.py +0 -0
  35. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_python.py +0 -0
  36. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_sign/__init__.py +0 -0
  37. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_sign/meteorological.py +0 -0
  38. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_sign/ocean.py +0 -0
  39. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_sign/scientific.py +0 -0
  40. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_tool/email.py +0 -0
  41. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs/oa_tool/parallel.py +0 -0
  42. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs.egg-info/dependency_links.txt +0 -0
  43. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs.egg-info/requires.txt +0 -0
  44. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/oafuncs.egg-info/top_level.txt +0 -0
  45. {oafuncs-0.0.96 → oafuncs-0.0.97.1}/setup.cfg +0 -0
@@ -0,0 +1,4 @@
1
+ include LICENSE.txt
2
+ include README.md
3
+ recursive-include oafuncs/data_store *
4
+ recursive-include oafuncs/oa_down *.txt
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: oafuncs
3
- Version: 0.0.96
3
+ Version: 0.0.97.1
4
4
  Summary: Oceanic and Atmospheric Functions
5
5
  Home-page: https://github.com/Industry-Pays/OAFuncs
6
6
  Author: Kun Liu
@@ -9,7 +9,6 @@ License: MIT
9
9
  Classifier: License :: OSI Approved :: MIT License
10
10
  Classifier: Programming Language :: Python
11
11
  Classifier: Programming Language :: Python :: 3
12
- Classifier: Programming Language :: Python :: 3.9
13
12
  Classifier: Programming Language :: Python :: 3.10
14
13
  Classifier: Programming Language :: Python :: 3.11
15
14
  Classifier: Programming Language :: Python :: 3.12
@@ -4,8 +4,8 @@
4
4
  Author: Liu Kun && 16031215@qq.com
5
5
  Date: 2024-09-17 16:09:20
6
6
  LastEditors: Liu Kun && 16031215@qq.com
7
- LastEditTime: 2024-12-13 12:31:06
8
- FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_s\\__init__.py
7
+ LastEditTime: 2025-03-09 16:28:01
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\__init__.py
9
9
  Description:
10
10
  EditPlatform: vscode
11
11
  ComputerInfo: XPS 15 9510
@@ -27,18 +27,28 @@ Python Version: 3.12
27
27
  # path: My_Funcs/OAFuncs/oafuncs/
28
28
  from .oa_cmap import *
29
29
  from .oa_data import *
30
+
31
+ # ------------------- 2024-12-13 12:31:06 -------------------
32
+ # path: My_Funcs/OAFuncs/oafuncs/oa_down/
33
+ from .oa_down import *
30
34
  from .oa_draw import *
31
35
  from .oa_file import *
32
36
  from .oa_help import *
37
+
38
+ # ------------------- 2024-12-13 12:31:06 -------------------
39
+ # path: My_Funcs/OAFuncs/oafuncs/oa_model/
40
+ from .oa_model import *
33
41
  from .oa_nc import *
34
42
  from .oa_python import *
35
- # ------------------- 2024-12-13 12:31:06 -------------------
36
- # path: My_Funcs/OAFuncs/oafuncs/oa_down/
37
- from .oa_down import *
43
+
38
44
  # ------------------- 2024-12-13 12:31:06 -------------------
39
45
  # path: My_Funcs/OAFuncs/oafuncs/oa_sign/
40
46
  from .oa_sign import *
47
+
41
48
  # ------------------- 2024-12-13 12:31:06 -------------------
42
49
  # path: My_Funcs/OAFuncs/oafuncs/oa_tool/
43
50
  from .oa_tool import *
44
- # ------------------- 2024-12-13 12:31:06 -------------------
51
+ # ------------------- 2025-03-09 16:28:01 -------------------
52
+ # path: My_Funcs/OAFuncs/oafuncs/_script/
53
+ from ._script import *
54
+ # ------------------- 2025-03-16 15:56:01 -------------------
@@ -0,0 +1,27 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ """
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2025-03-13 15:26:15
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2025-03-13 15:26:18
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_script\\__init__.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.12
14
+ """
15
+
16
+
17
+
18
+ # 会导致OAFuncs直接导入所有函数,不符合模块化设计
19
+ # from oafuncs.oa_s.oa_cmap import *
20
+ # from oafuncs.oa_s.oa_data import *
21
+ # from oafuncs.oa_s.oa_draw import *
22
+ # from oafuncs.oa_s.oa_file import *
23
+ # from oafuncs.oa_s.oa_help import *
24
+ # from oafuncs.oa_s.oa_nc import *
25
+ # from oafuncs.oa_s.oa_python import *
26
+
27
+ from .plot_dataset import func_plot_dataset
@@ -0,0 +1,299 @@
1
+ import os
2
+ from typing import Optional, Tuple
3
+
4
+ import matplotlib as mpl
5
+
6
+ mpl.use("Agg") # Use non-interactive backend
7
+
8
+ import cftime
9
+ import matplotlib.pyplot as plt
10
+ import numpy as np
11
+ from rich import print
12
+ import cartopy.crs as ccrs
13
+ import xarray as xr
14
+
15
+ import oafuncs
16
+
17
+
18
+ def plot_1d(data: xr.DataArray, output_path: str, x_dim: str, y_dim: str, z_dim: str, t_dim: str) -> None:
19
+ """Plot 1D data."""
20
+ plt.figure(figsize=(10, 6))
21
+
22
+ # Handle time dimension
23
+ if t_dim in data.dims and isinstance(data[t_dim].values[0], cftime.datetime):
24
+ try:
25
+ data[t_dim] = data.indexes[t_dim].to_datetimeindex()
26
+ except (AttributeError, ValueError, TypeError) as e:
27
+ print(f"Warning: Could not convert {t_dim} to datetime index: {e}")
28
+
29
+ # Determine X axis data
30
+ x, x_label = determine_x_axis(data, x_dim, y_dim, z_dim, t_dim)
31
+
32
+ y = data.values
33
+ plt.plot(x, y, linewidth=2)
34
+
35
+ # Add chart info
36
+ long_name = getattr(data, "long_name", "No long_name")
37
+ units = getattr(data, "units", "")
38
+ plt.title(f"{data.name} | {long_name}", fontsize=12)
39
+ plt.xlabel(x_label)
40
+ plt.ylabel(f"{data.name} ({units})" if units else data.name)
41
+
42
+ plt.grid(True, linestyle="--", alpha=0.7)
43
+ plt.tight_layout()
44
+
45
+ # Save image
46
+ os.makedirs(os.path.dirname(output_path), exist_ok=True)
47
+ plt.savefig(output_path, bbox_inches="tight", dpi=600)
48
+ plt.clf()
49
+ plt.close()
50
+
51
+
52
+ def determine_x_axis(data: xr.DataArray, x_dim: str, y_dim: str, z_dim: str, t_dim: str) -> Tuple[np.ndarray, str]:
53
+ """Determine the X axis data and label."""
54
+ if x_dim in data.dims:
55
+ return data[x_dim].values, x_dim
56
+ elif y_dim in data.dims:
57
+ return data[y_dim].values, y_dim
58
+ elif z_dim in data.dims:
59
+ return data[z_dim].values, z_dim
60
+ elif t_dim in data.dims:
61
+ return data[t_dim].values, t_dim
62
+ else:
63
+ return np.arange(len(data)), "Index"
64
+
65
+
66
+ def plot_2d(data: xr.DataArray, output_path: str, data_range: Optional[Tuple[float, float]], x_dim: str, y_dim: str, t_dim: str, plot_type: str) -> bool:
67
+ """Plot 2D data."""
68
+ if x_dim in data.dims and y_dim in data.dims and x_dim.lower() in ["lon", "longitude"] and y_dim.lower() in ["lat", "latitude"]:
69
+ lon_range = data[x_dim].values
70
+ lat_range = data[y_dim].values
71
+ lon_lat_ratio = np.abs(np.max(lon_range) - np.min(lon_range)) / (np.max(lat_range) - np.min(lat_range))
72
+ figsize = (10, 10 / lon_lat_ratio)
73
+ fig, ax = plt.subplots(figsize=figsize, subplot_kw={"projection": ccrs.PlateCarree()})
74
+ oafuncs.oa_draw.add_cartopy(ax, lon_range, lat_range)
75
+ else:
76
+ fig, ax = plt.subplots(figsize=(10, 8))
77
+
78
+ # Handle time dimension
79
+ if t_dim in data.dims and isinstance(data[t_dim].values[0], cftime.datetime):
80
+ try:
81
+ data[t_dim] = data.indexes[t_dim].to_datetimeindex()
82
+ except (AttributeError, ValueError, TypeError) as e:
83
+ print(f"Warning: Could not convert {t_dim} to datetime index: {e}")
84
+
85
+ # Check for valid data
86
+ if np.all(np.isnan(data.values)) or data.size == 0:
87
+ print(f"Skipping {data.name}: All values are NaN or empty")
88
+ plt.close()
89
+ return False
90
+
91
+ data_range = calculate_data_range(data, data_range)
92
+
93
+ if data_range is None:
94
+ print(f"Skipping {data.name} due to all NaN values")
95
+ plt.close()
96
+ return False
97
+
98
+ # Select appropriate colormap and levels
99
+ cmap, norm, levels = select_colormap_and_levels(data_range, plot_type)
100
+
101
+ mappable = None
102
+ try:
103
+ if plot_type == "contourf":
104
+ if np.ptp(data.values) < 1e-10 and not np.all(np.isnan(data.values)):
105
+ print(f"Warning: {data.name} has very little variation. Using imshow instead.")
106
+ mappable = ax.imshow(data.values, cmap=cmap, aspect="auto", interpolation="none")
107
+ colorbar = plt.colorbar(mappable, ax=ax)
108
+ else:
109
+ mappable = ax.contourf(data[x_dim], data[y_dim], data.values, levels=levels, cmap=cmap, norm=norm)
110
+ colorbar = plt.colorbar(mappable, ax=ax)
111
+ elif plot_type == "contour":
112
+ if np.ptp(data.values) < 1e-10 and not np.all(np.isnan(data.values)):
113
+ print(f"Warning: {data.name} has very little variation. Using imshow instead.")
114
+ mappable = ax.imshow(data.values, cmap=cmap, aspect="auto", interpolation="none")
115
+ colorbar = plt.colorbar(mappable, ax=ax)
116
+ else:
117
+ mappable = ax.contour(data[x_dim], data[y_dim], data.values, levels=levels, cmap=cmap, norm=norm)
118
+ ax.clabel(mappable, inline=True, fontsize=8, fmt="%1.1f")
119
+ colorbar = plt.colorbar(mappable, ax=ax)
120
+ except (ValueError, TypeError) as e:
121
+ print(f"Warning: Could not plot with specified parameters: {e}. Trying simplified parameters.")
122
+ try:
123
+ mappable = data.plot(ax=ax, cmap=cmap, add_colorbar=False)
124
+ colorbar = plt.colorbar(mappable, ax=ax)
125
+ except Exception as e2:
126
+ print(f"Error plotting {data.name}: {e2}")
127
+ plt.figure(figsize=(10, 8))
128
+ mappable = ax.imshow(data.values, cmap="viridis", aspect="auto")
129
+ colorbar = plt.colorbar(mappable, ax=ax, label=getattr(data, "units", ""))
130
+ plt.title(f"{data.name} | {getattr(data, 'long_name', 'No long_name')} (basic plot)", fontsize=12)
131
+ plt.tight_layout()
132
+ os.makedirs(os.path.dirname(output_path), exist_ok=True)
133
+ plt.savefig(output_path, bbox_inches="tight", dpi=600)
134
+ plt.close()
135
+ return True
136
+
137
+ plt.title(f"{data.name} | {getattr(data, 'long_name', 'No long_name')}", fontsize=12)
138
+ units = getattr(data, "units", "")
139
+ if units and colorbar:
140
+ colorbar.set_label(units)
141
+
142
+ plt.tight_layout()
143
+ os.makedirs(os.path.dirname(output_path), exist_ok=True)
144
+ plt.savefig(output_path, bbox_inches="tight", dpi=600)
145
+ plt.close()
146
+ return True
147
+
148
+
149
+ def calculate_data_range(data: xr.DataArray, data_range: Optional[Tuple[float, float]]) -> Optional[Tuple[float, float]]:
150
+ """Calculate the data range, ignoring extreme outliers."""
151
+ if data_range is None:
152
+ flat_data = data.values.flatten()
153
+ if flat_data.size == 0:
154
+ return None
155
+ valid_data = flat_data[~np.isnan(flat_data)]
156
+ if len(valid_data) == 0:
157
+ return None
158
+ low, high = np.percentile(valid_data, [0.5, 99.5])
159
+ filtered_data = valid_data[(valid_data >= low) & (valid_data <= high)]
160
+ if len(filtered_data) > 0:
161
+ data_range = (np.min(filtered_data), np.max(filtered_data))
162
+ else:
163
+ data_range = (np.nanmin(valid_data), np.nanmax(valid_data))
164
+ if abs(data_range[1] - data_range[0]) < 1e-10:
165
+ mean = (data_range[0] + data_range[1]) / 2
166
+ data_range = (mean - 1e-10 if mean != 0 else -1e-10, mean + 1e-10 if mean != 0 else 1e-10)
167
+ return data_range
168
+
169
+
170
+ def select_colormap_and_levels(data_range: Tuple[float, float], plot_type: str) -> Tuple[mpl.colors.Colormap, mpl.colors.Normalize, np.ndarray]:
171
+ """Select colormap and levels based on data range."""
172
+ if plot_type == "contour":
173
+ # For contour plots, use fewer levels
174
+ num_levels = 10
175
+ else:
176
+ # For filled contour plots, use more levels
177
+ num_levels = 128
178
+
179
+ if data_range[0] * data_range[1] < 0:
180
+ cmap = oafuncs.oa_cmap.get("diverging_1")
181
+ bdy = max(abs(data_range[0]), abs(data_range[1]))
182
+ norm = mpl.colors.TwoSlopeNorm(vmin=-bdy, vcenter=0, vmax=bdy)
183
+ levels = np.linspace(-bdy, bdy, num_levels)
184
+ else:
185
+ cmap = oafuncs.oa_cmap.get("cool_1") if data_range[0] < 0 else oafuncs.oa_cmap.get("warm_1")
186
+ norm = mpl.colors.Normalize(vmin=data_range[0], vmax=data_range[1])
187
+ levels = np.linspace(data_range[0], data_range[1], num_levels)
188
+
189
+ if np.any(np.diff(levels) <= 0):
190
+ levels = np.linspace(data_range[0], data_range[1], 10)
191
+ return cmap, norm, levels
192
+
193
+
194
+ def process_variable(var: str, data: xr.DataArray, dims: int, dims_name: Tuple[str, ...], output_dir: str, x_dim: str, y_dim: str, z_dim: str, t_dim: str, fixed_colorscale: bool, plot_type: str) -> None:
195
+ """Process a single variable."""
196
+ valid_dims = {x_dim, y_dim, z_dim, t_dim}
197
+ if not set(dims_name).issubset(valid_dims):
198
+ print(f"Skipping {var} due to unsupported dimensions: {dims_name}")
199
+ return
200
+
201
+ # Process 1D data
202
+ if dims == 1:
203
+ if np.issubdtype(data.dtype, np.character):
204
+ print(f"Skipping {var} due to character data type")
205
+ return
206
+ plot_1d(data, os.path.join(output_dir, f"{var}.png"), x_dim, y_dim, z_dim, t_dim)
207
+ print(f"{var}.png")
208
+ return
209
+
210
+ # Compute global data range for fixed colorscale
211
+ global_data_range = None
212
+ if dims >= 2 and fixed_colorscale:
213
+ global_data_range = calculate_data_range(data, None)
214
+ if global_data_range is None:
215
+ print(f"Skipping {var} due to no valid data")
216
+ return
217
+ print(f"Fixed colorscale range: {global_data_range}")
218
+
219
+ # Process 2D data
220
+ if dims == 2:
221
+ success = plot_2d(data, os.path.join(output_dir, f"{var}.png"), global_data_range, x_dim, y_dim, t_dim, plot_type)
222
+ if success:
223
+ print(f"{var}.png")
224
+
225
+ # Process 3D data
226
+ if dims == 3:
227
+ for i in range(data.shape[0]):
228
+ for attempt in range(10):
229
+ try:
230
+ if data[i].values.size == 0:
231
+ print(f"Skipped {var}_{dims_name[0]}-{i} (empty data)")
232
+ break
233
+ success = plot_2d(data[i], os.path.join(output_dir, f"{var}_{dims_name[0]}-{i}.png"), global_data_range, x_dim, y_dim, t_dim, plot_type)
234
+ if success:
235
+ print(f"{var}_{dims_name[0]}-{i}.png")
236
+ else:
237
+ print(f"Skipped {var}_{dims_name[0]}-{i} (invalid data)")
238
+ break
239
+ except Exception as e:
240
+ if attempt < 9:
241
+ print(f"Retrying {var}_{dims_name[0]}-{i} (attempt {attempt + 1})")
242
+ else:
243
+ print(f"Error processing {var}_{dims_name[0]}-{i}: {e}")
244
+
245
+ # Process 4D data
246
+ if dims == 4:
247
+ for i in range(data.shape[0]):
248
+ for j in range(data.shape[1]):
249
+ for attempt in range(3):
250
+ try:
251
+ if data[i, j].values.size == 0:
252
+ print(f"Skipped {var}_{dims_name[0]}-{i}_{dims_name[1]}-{j} (empty data)")
253
+ break
254
+ success = plot_2d(data[i, j], os.path.join(output_dir, f"{var}_{dims_name[0]}-{i}_{dims_name[1]}-{j}.png"), global_data_range, x_dim, y_dim, t_dim, plot_type)
255
+ if success:
256
+ print(f"{var}_{dims_name[0]}-{i}_{dims_name[1]}-{j}.png")
257
+ else:
258
+ print(f"Skipped {var}_{dims_name[0]}-{i}_{dims_name[1]}-{j} (invalid data)")
259
+ break
260
+ except Exception as e:
261
+ if attempt < 2:
262
+ print(f"Retrying {var}_{dims_name[0]}-{i}_{dims_name[1]}-{j} (attempt {attempt + 1})")
263
+ else:
264
+ print(f"Error processing {var}_{dims_name[0]}-{i}_{dims_name[1]}-{j}: {e}")
265
+
266
+
267
+ def func_plot_dataset(ds_in: xr.Dataset, output_dir: str, xyzt_dims: Tuple[str, str, str, str] = ("longitude", "latitude", "level", "time"), plot_type: str = "contourf", fixed_colorscale: bool = False) -> None:
268
+ """Plot variables from a NetCDF file and save the plots to the specified directory."""
269
+ os.makedirs(output_dir, exist_ok=True)
270
+ x_dim, y_dim, z_dim, t_dim = xyzt_dims
271
+
272
+ # Main processing function
273
+ try:
274
+ ds = ds_in
275
+ varlist = list(ds.data_vars)
276
+ print(f"Found {len(varlist)} variables in dataset")
277
+
278
+ for var in varlist:
279
+ print("=" * 120)
280
+ print(f"Processing: {var}")
281
+ data = ds[var]
282
+ dims = len(data.shape)
283
+ dims_name = data.dims
284
+ try:
285
+ process_variable(var, data, dims, dims_name, output_dir, x_dim, y_dim, z_dim, t_dim, fixed_colorscale, plot_type)
286
+ except Exception as e:
287
+ print(f"Error processing variable {var}: {e}")
288
+
289
+ except Exception as e:
290
+ print(f"Error processing dataset: {e}")
291
+ finally:
292
+ if "ds" in locals():
293
+ ds.close()
294
+ print("Dataset closed")
295
+
296
+
297
+ if __name__ == "__main__":
298
+ pass
299
+ # func_plot_dataset(ds, output_dir, xyzt_dims=("longitude", "latitude", "level", "time"), plot_type="contourf", fixed_colorscale=False)
@@ -143,14 +143,14 @@ def get(cmap_name=None, query=False):
143
143
  Example:
144
144
  cmap = get('viridis')
145
145
  cmap = get('diverging_1')
146
- cmap = get('cold_1')
146
+ cmap = get('cool_1')
147
147
  cmap = get('warm_1')
148
148
  cmap = get('colorful_1')
149
149
  """
150
150
 
151
151
  my_cmap_dict = {
152
152
  "diverging_1": create(["#4e00b3", "#0000FF", "#00c0ff", "#a1d3ff", "#DCDCDC", "#FFD39B", "#FF8247", "#FF0000", "#FF5F9E"]),
153
- "cold_1": create(["#4e00b3", "#0000FF", "#00c0ff", "#a1d3ff", "#DCDCDC"]),
153
+ "cool_1": create(["#4e00b3", "#0000FF", "#00c0ff", "#a1d3ff", "#DCDCDC"]),
154
154
  "warm_1": create(["#DCDCDC", "#FFD39B", "#FF8247", "#FF0000", "#FF5F9E"]),
155
155
  # "land_1": create_custom(["#3E6436", "#678A59", "#91A176", "#B8A87D", "#D9CBB2"], under="#A6CEE3", over="#FFFFFF"),
156
156
  # "ocean_1": create_custom(["#126697", "#2D88B3", "#4EA1C9", "#78B9D8", "#A6CEE3"], under="#8470FF", over="#3E6436"),
@@ -191,7 +191,9 @@ def get(cmap_name=None, query=False):
191
191
  try:
192
192
  return mpl.colormaps.get_cmap(cmap_name)
193
193
  except ValueError:
194
- raise ValueError(f"Unknown cmap name: {cmap_name}")
194
+ # raise ValueError(f"Unknown cmap name: {cmap_name}")
195
+ print(f"Unknown cmap name: {cmap_name}\nNow return 'rainbow' as default.")
196
+ return mpl.colormaps.get_cmap("rainbow")
195
197
 
196
198
 
197
199
  if __name__ == "__main__":
@@ -18,11 +18,12 @@ import multiprocessing as mp
18
18
  from concurrent.futures import ThreadPoolExecutor
19
19
 
20
20
  import numpy as np
21
+ import salem
21
22
  import xarray as xr
22
23
  from scipy.interpolate import griddata
23
- import salem
24
+ from scipy.interpolate import interp1d
24
25
 
25
- __all__ = ["interp_2d", "ensure_list", "mask_shapefile"]
26
+ __all__ = ["interp_along_dim", "interp_2d", "ensure_list", "mask_shapefile"]
26
27
 
27
28
 
28
29
  def ensure_list(input_data):
@@ -45,15 +46,126 @@ def ensure_list(input_data):
45
46
  return [str(input_data)]
46
47
 
47
48
 
49
+ def interp_along_dim(tgt_coords, src_coords, src_data, axis=-1, interp_method="linear", extrap_method="linear"):
50
+ """
51
+ 在指定维度上执行插值和外插操作。
52
+
53
+ Parameters:
54
+ -----------
55
+ tgt_coords: 1d array
56
+ 目标坐标点数组,必须是一维数组。
57
+
58
+ src_coords: 1d or nd array
59
+ 源坐标点数组。可以是一维数组(将被广播到与src_data匹配的形状)或与src_data相同形状的多维数组。
60
+
61
+ src_data: nd array
62
+ 源数据数组,包含要从src_coords插值到tgt_coords的现象值。
63
+
64
+ axis: int (default -1)
65
+ 要在src_data上执行插值的轴。默认为最后一个轴。
66
+
67
+ interp_method: str (default "linear")
68
+ 核心插值方法。
69
+ 可选值包括:
70
+ - "linear": 线性插值(默认)
71
+ - "nearest": 最近邻插值
72
+ - "zero": 零阶插值
73
+ - "slinear": 样条一阶插值
74
+ - "quadratic": 二阶插值
75
+ - "cubic": 三阶插值
76
+ - "previous": 使用前一个点的值
77
+ - "next": 使用后一个点的值
78
+ 更多选项参考scipy.interpolate.interp1d的kind参数。
79
+
80
+ extrap_method: str (default "linear")
81
+ 核心外插方法,用于处理超出源坐标范围的目标坐标点。
82
+ 支持与interp_method相同的选项:
83
+ - "linear": 线性外插(默认)
84
+ - "nearest": 最近邻外插
85
+ - "zero": 零阶外插
86
+ - "slinear": 样条一阶外插
87
+ - "quadratic": 二阶外插
88
+ - "cubic": 三阶外插
89
+ - "previous": 使用最近的前一个点的值
90
+ - "next": 使用最近的后一个点的值
91
+
92
+ Returns:
93
+ --------
94
+ array
95
+ 插值后的数据数组,形状将与src_data相同,但在axis轴上长度为len(tgt_coords)。
96
+
97
+ Examples:
98
+ ---------
99
+ 1D插值示例:
100
+ >>> tgt_coords = np.array([1, 2, 3, 4])
101
+ >>> src_coords = np.array([0, 1, 2, 3, 4, 5])
102
+ >>> src_data = np.array([0, 1, 4, 9, 16, 25])
103
+ >>> interp_along_dim(tgt_coords, src_coords, src_data)
104
+ array([ 1., 4., 9., 16.])
105
+
106
+ 多维插值示例:
107
+ >>> src_data = np.array([[0, 1, 4], [10, 20, 30]])
108
+ >>> interp_along_dim(np.array([0.5, 1.5]), np.array([0, 1, 2]), src_data, axis=1)
109
+ array([[ 0.5, 2.5],
110
+ [15. , 25. ]])
111
+ """
112
+ tgt_coords = np.asarray(tgt_coords)
113
+ if tgt_coords.ndim != 1:
114
+ raise ValueError("tgt_coords must be a 1d array.")
115
+
116
+ src_coords = np.asarray(src_coords)
117
+ src_data = np.asarray(src_data)
118
+
119
+ # 处理1维的简单情况
120
+ if src_data.ndim == 1 and src_coords.ndim == 1:
121
+ if len(src_coords) != len(src_data):
122
+ raise ValueError("For 1D data, src_coords and src_data must have the same length")
123
+
124
+ interpolator = interp1d(src_coords, src_data, kind=interp_method, fill_value="extrapolate", bounds_error=False)
125
+ return interpolator(tgt_coords)
126
+
127
+ # 多维情况的处理
128
+ if src_coords.ndim == 1:
129
+ # Expand src_coords to match src_data dimensions along the specified axis
130
+ shape = [1] * src_data.ndim
131
+ shape[axis] = src_coords.shape[0]
132
+ src_coords = np.reshape(src_coords, shape)
133
+ src_coords = np.broadcast_to(src_coords, src_data.shape)
134
+ elif src_coords.shape != src_data.shape:
135
+ raise ValueError("src_coords and src_data must have the same shape.")
136
+
137
+ def apply_interp_extrap(arr):
138
+ xp = np.moveaxis(src_coords, axis, 0)
139
+ # 根据维度正确获取坐标
140
+ if xp.ndim > 1:
141
+ xp = xp[:, 0] # 多维情况
142
+ else:
143
+ xp = xp # 1维情况
144
+
145
+ arr = np.moveaxis(arr, axis, 0)
146
+ interpolator = interp1d(xp, arr, kind=interp_method, fill_value="extrapolate", bounds_error=False)
147
+ interpolated = interpolator(tgt_coords)
148
+ if extrap_method != interp_method:
149
+ mask_extrap = (tgt_coords < xp.min()) | (tgt_coords > xp.max())
150
+ if np.any(mask_extrap):
151
+ extrap_interpolator = interp1d(xp, arr, kind=extrap_method, fill_value="extrapolate", bounds_error=False)
152
+ interpolated[mask_extrap] = extrap_interpolator(tgt_coords[mask_extrap])
153
+ return np.moveaxis(interpolated, 0, axis)
154
+
155
+ result = np.apply_along_axis(apply_interp_extrap, axis, src_data)
156
+
157
+ return result
158
+
159
+
48
160
  def interp_2d(target_x, target_y, origin_x, origin_y, data, method="linear", parallel=True):
49
161
  """
50
162
  Perform 2D interpolation on the last two dimensions of a multi-dimensional array.
51
163
 
52
164
  Parameters:
53
- - target_x (array-like): 1D array of target grid's x-coordinates.
54
- - target_y (array-like): 1D array of target grid's y-coordinates.
55
- - origin_x (array-like): 1D array of original grid's x-coordinates.
56
- - origin_y (array-like): 1D array of original grid's y-coordinates.
165
+ - target_x (array-like): 1D or 2D array of target grid's x-coordinates.
166
+ - target_y (array-like): 1D or 2D array of target grid's y-coordinates.
167
+ - origin_x (array-like): 1D or 2D array of original grid's x-coordinates.
168
+ - origin_y (array-like): 1D or 2D array of original grid's y-coordinates.
57
169
  - data (numpy.ndarray): Multi-dimensional array where the last two dimensions correspond to the original grid.
58
170
  - method (str, optional): Interpolation method, default is 'linear'. Other options include 'nearest', 'cubic', etc.
59
171
  - parallel (bool, optional): Flag to enable parallel processing. Default is True.