oafuncs 0.0.97.14__py3-none-any.whl → 0.0.97.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oafuncs/_script/cprogressbar.py +42 -20
- oafuncs/_script/netcdf_merge.py +1 -1
- oafuncs/_script/netcdf_modify.py +106 -0
- oafuncs/_script/netcdf_write.py +125 -0
- oafuncs/oa_cmap.py +59 -112
- oafuncs/oa_date.py +30 -16
- oafuncs/oa_down/hycom_3hourly.py +5 -54
- oafuncs/oa_draw.py +11 -132
- oafuncs/oa_file.py +1 -23
- oafuncs/oa_nc.py +51 -270
- oafuncs/oa_python.py +77 -87
- oafuncs/oa_sign/meteorological.py +3 -3
- oafuncs/oa_tool.py +31 -34
- {oafuncs-0.0.97.14.dist-info → oafuncs-0.0.97.16.dist-info}/METADATA +1 -1
- {oafuncs-0.0.97.14.dist-info → oafuncs-0.0.97.16.dist-info}/RECORD +18 -16
- {oafuncs-0.0.97.14.dist-info → oafuncs-0.0.97.16.dist-info}/WHEEL +0 -0
- {oafuncs-0.0.97.14.dist-info → oafuncs-0.0.97.16.dist-info}/licenses/LICENSE.txt +0 -0
- {oafuncs-0.0.97.14.dist-info → oafuncs-0.0.97.16.dist-info}/top_level.txt +0 -0
oafuncs/oa_nc.py
CHANGED
@@ -14,286 +14,74 @@ Python Version: 3.11
|
|
14
14
|
"""
|
15
15
|
|
16
16
|
import os
|
17
|
-
from typing import List, Optional, Union
|
17
|
+
from typing import List, Optional, Union, Tuple
|
18
18
|
|
19
19
|
import netCDF4 as nc
|
20
20
|
import numpy as np
|
21
21
|
import xarray as xr
|
22
22
|
from rich import print
|
23
23
|
|
24
|
-
__all__ = ["
|
24
|
+
__all__ = ["save", "merge", "modify", "rename", "check", "convert_longitude", "isel", "draw"]
|
25
25
|
|
26
26
|
|
27
|
-
def
|
28
|
-
"""
|
29
|
-
Description:
|
30
|
-
Read variables from nc file
|
31
|
-
Parameters:
|
32
|
-
file: str, file path
|
33
|
-
*vars: str, variable name or variable names; should be in same size
|
34
|
-
Example:
|
35
|
-
datas = get_var(file_ecm, 'h', 't', 'u', 'v')
|
36
|
-
Return:
|
37
|
-
datas: list, variable data
|
38
|
-
"""
|
39
|
-
ds = xr.open_dataset(file)
|
40
|
-
datas = []
|
41
|
-
for var in vars:
|
42
|
-
data = ds[var]
|
43
|
-
datas.append(data)
|
44
|
-
ds.close()
|
45
|
-
return datas
|
46
|
-
|
47
|
-
|
48
|
-
def extract(file, varname, only_value=True):
|
49
|
-
"""
|
50
|
-
Description:
|
51
|
-
Extract variables from nc file
|
52
|
-
Return the variable and coordinate dictionary
|
53
|
-
Parameters:
|
54
|
-
file: str, file path
|
55
|
-
varname: str, variable name
|
56
|
-
only_value: bool, whether to keep only the value of the variable and dimension
|
57
|
-
Example:
|
58
|
-
data, dimdict = extract('test.nc', 'h')
|
59
|
-
"""
|
60
|
-
ds = xr.open_dataset(file)
|
61
|
-
vardata = ds[varname]
|
62
|
-
ds.close()
|
63
|
-
dims = vardata.dims
|
64
|
-
dimdict = {}
|
65
|
-
for dim in dims:
|
66
|
-
if only_value:
|
67
|
-
dimdict[dim] = vardata[dim].values
|
68
|
-
else:
|
69
|
-
dimdict[dim] = ds[dim]
|
70
|
-
if only_value:
|
71
|
-
vardata = np.array(vardata)
|
72
|
-
return vardata, dimdict
|
73
|
-
|
74
|
-
|
75
|
-
def _numpy_to_nc_type(numpy_type):
|
76
|
-
"""将NumPy数据类型映射到NetCDF数据类型"""
|
77
|
-
numpy_to_nc = {
|
78
|
-
"float32": "f4",
|
79
|
-
"float64": "f8",
|
80
|
-
"int8": "i1",
|
81
|
-
"int16": "i2",
|
82
|
-
"int32": "i4",
|
83
|
-
"int64": "i8",
|
84
|
-
"uint8": "u1",
|
85
|
-
"uint16": "u2",
|
86
|
-
"uint32": "u4",
|
87
|
-
"uint64": "u8",
|
88
|
-
}
|
89
|
-
# 确保传入的是字符串类型,如果不是,则转换为字符串
|
90
|
-
numpy_type_str = str(numpy_type) if not isinstance(numpy_type, str) else numpy_type
|
91
|
-
return numpy_to_nc.get(numpy_type_str, "f4") # 默认使用 'float32'
|
92
|
-
|
93
|
-
|
94
|
-
def _calculate_scale_and_offset(data, n=16):
|
95
|
-
if not isinstance(data, np.ndarray):
|
96
|
-
raise ValueError("Input data must be a NumPy array.")
|
97
|
-
|
98
|
-
# 使用 nan_to_num 来避免 NaN 值对 min 和 max 的影响
|
99
|
-
data_min = np.nanmin(data)
|
100
|
-
data_max = np.nanmax(data)
|
101
|
-
|
102
|
-
if np.isnan(data_min) or np.isnan(data_max):
|
103
|
-
raise ValueError("Input data contains NaN values, which are not allowed.")
|
104
|
-
|
105
|
-
scale_factor = (data_max - data_min) / (2**n - 1)
|
106
|
-
add_offset = data_min + 2 ** (n - 1) * scale_factor
|
107
|
-
|
108
|
-
return scale_factor, add_offset
|
109
|
-
|
110
|
-
|
111
|
-
def save(file, data, varname=None, coords=None, mode="w", scale_offset_switch=True, compile_switch=True):
|
27
|
+
def save(file: str, data: Union[np.ndarray, xr.DataArray], varname: Optional[str] = None, coords: Optional[dict] = None, mode: str = "w", scale_offset_switch: bool = True, compile_switch: bool = True) -> None:
|
112
28
|
"""
|
113
29
|
Description:
|
114
30
|
Write data to NetCDF file
|
115
31
|
Parameters:
|
116
32
|
file: str, file path
|
117
|
-
data: data
|
118
|
-
varname: str, variable name
|
119
|
-
coords: dict, coordinates, key is the dimension name, value is the coordinate data
|
33
|
+
data: np.ndarray or xr.DataArray, data to be written
|
34
|
+
varname: Optional[str], variable name
|
35
|
+
coords: Optional[dict], coordinates, key is the dimension name, value is the coordinate data
|
120
36
|
mode: str, write mode, 'w' for write, 'a' for append
|
121
37
|
scale_offset_switch: bool, whether to use scale_factor and add_offset, default is True
|
122
38
|
compile_switch: bool, whether to use compression parameters, default is True
|
123
39
|
Example:
|
124
40
|
save(r'test.nc', data, 'u', {'time': np.linspace(0, 120, 100), 'lev': np.linspace(0, 120, 50)}, 'a')
|
125
41
|
"""
|
126
|
-
|
127
|
-
kwargs = {"zlib": True, "complevel": 4} if compile_switch else {}
|
128
|
-
|
129
|
-
# 检查文件存在性并根据模式决定操作
|
130
|
-
if mode == "w" and os.path.exists(file):
|
131
|
-
os.remove(file)
|
132
|
-
elif mode == "a" and not os.path.exists(file):
|
133
|
-
mode = "w"
|
134
|
-
|
135
|
-
# 打开 NetCDF 文件
|
136
|
-
with nc.Dataset(file, mode, format="NETCDF4") as ncfile:
|
137
|
-
# 如果 data 是 DataArray 并且没有提供 varname 和 coords
|
138
|
-
if varname is None and coords is None and isinstance(data, xr.DataArray):
|
139
|
-
encoding = {}
|
140
|
-
for var in data.data_vars:
|
141
|
-
scale_factor, add_offset = _calculate_scale_and_offset(data[var].values)
|
142
|
-
encoding[var] = {
|
143
|
-
"zlib": True,
|
144
|
-
"complevel": 4,
|
145
|
-
"dtype": "int16",
|
146
|
-
"scale_factor": scale_factor,
|
147
|
-
"add_offset": add_offset,
|
148
|
-
"_FillValue": -32767,
|
149
|
-
}
|
150
|
-
data.to_netcdf(file, mode=mode, encoding=encoding)
|
151
|
-
return
|
152
|
-
|
153
|
-
# 添加坐标
|
154
|
-
for dim, coord_data in coords.items():
|
155
|
-
if dim in ncfile.dimensions:
|
156
|
-
if len(coord_data) != len(ncfile.dimensions[dim]):
|
157
|
-
raise ValueError(f"Length of coordinate '{dim}' does not match the dimension length.")
|
158
|
-
else:
|
159
|
-
ncfile.variables[dim][:] = np.array(coord_data)
|
160
|
-
else:
|
161
|
-
ncfile.createDimension(dim, len(coord_data))
|
162
|
-
var = ncfile.createVariable(dim, _numpy_to_nc_type(coord_data.dtype), (dim,), **kwargs)
|
163
|
-
var[:] = np.array(coord_data)
|
164
|
-
|
165
|
-
# 如果坐标数据有属性,则添加到 NetCDF 变量
|
166
|
-
if isinstance(coord_data, xr.DataArray) and coord_data.attrs:
|
167
|
-
for attr_name, attr_value in coord_data.attrs.items():
|
168
|
-
var.setncattr(attr_name, attr_value)
|
169
|
-
|
170
|
-
# 添加或更新变量
|
171
|
-
if varname in ncfile.variables:
|
172
|
-
if data.shape != ncfile.variables[varname].shape:
|
173
|
-
raise ValueError(f"Shape of data does not match the variable shape for '{varname}'.")
|
174
|
-
ncfile.variables[varname][:] = np.array(data)
|
175
|
-
else:
|
176
|
-
# 创建变量
|
177
|
-
dim_names = tuple(coords.keys())
|
178
|
-
if scale_offset_switch:
|
179
|
-
scale_factor, add_offset = _calculate_scale_and_offset(np.array(data))
|
180
|
-
dtype = "i2"
|
181
|
-
var = ncfile.createVariable(varname, dtype, dim_names, fill_value=-32767, **kwargs)
|
182
|
-
var.setncattr("scale_factor", scale_factor)
|
183
|
-
var.setncattr("add_offset", add_offset)
|
184
|
-
else:
|
185
|
-
dtype = _numpy_to_nc_type(data.dtype)
|
186
|
-
var = ncfile.createVariable(varname, dtype, dim_names, **kwargs)
|
187
|
-
var[:] = np.array(data)
|
42
|
+
from ._script.netcdf_write import save_to_nc
|
188
43
|
|
189
|
-
|
190
|
-
if isinstance(data, xr.DataArray) and data.attrs:
|
191
|
-
for key, value in data.attrs.items():
|
192
|
-
if key not in ["scale_factor", "add_offset", "_FillValue", "missing_value"] or not scale_offset_switch:
|
193
|
-
var.setncattr(key, value)
|
44
|
+
save_to_nc(file, data, varname, coords, mode, scale_offset_switch, compile_switch)
|
194
45
|
|
195
46
|
|
196
47
|
def merge(file_list: Union[str, List[str]], var_name: Optional[Union[str, List[str]]] = None, dim_name: Optional[str] = None, target_filename: Optional[str] = None) -> None:
|
197
|
-
from ._script.netcdf_merge import merge_nc
|
198
|
-
|
199
|
-
merge_nc(file_list, var_name, dim_name, target_filename)
|
200
|
-
|
201
|
-
|
202
|
-
def _modify_var(nc_file_path, variable_name, new_value):
|
203
48
|
"""
|
204
49
|
Description:
|
205
|
-
|
206
|
-
|
50
|
+
Merge multiple NetCDF files into one.
|
207
51
|
Parameters:
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
Example:
|
213
|
-
modify_var('test.nc', 'u', np.random.rand(100, 50))
|
52
|
+
file_list: Union[str, List[str]], list of file paths or a single file path
|
53
|
+
var_name: Optional[Union[str, List[str]]], variable names to merge
|
54
|
+
dim_name: Optional[str], dimension name to merge along
|
55
|
+
target_filename: Optional[str], output file name
|
214
56
|
"""
|
215
|
-
|
216
|
-
# Open the NetCDF file
|
217
|
-
with nc.Dataset(nc_file_path, "r+") as dataset:
|
218
|
-
# Check if the variable exists
|
219
|
-
if variable_name not in dataset.variables:
|
220
|
-
raise ValueError(f"Variable '{variable_name}' not found in the NetCDF file.")
|
221
|
-
# Get the variable to be modified
|
222
|
-
variable = dataset.variables[variable_name]
|
223
|
-
# Check if the shape of the new value matches the variable's shape
|
224
|
-
if variable.shape != new_value.shape:
|
225
|
-
raise ValueError(f"Shape mismatch: Variable '{variable_name}' has shape {variable.shape}, but new value has shape {new_value.shape}.")
|
226
|
-
# Modify the value of the variable
|
227
|
-
variable[:] = new_value
|
228
|
-
print(f"Successfully modified variable '{variable_name}' in '{nc_file_path}'.")
|
229
|
-
except Exception as e:
|
230
|
-
print(f"An error occurred while modifying variable '{variable_name}' in '{nc_file_path}': {e}")
|
231
|
-
|
232
|
-
|
233
|
-
def _modify_attr(nc_file_path, variable_name, attribute_name, attribute_value):
|
234
|
-
"""
|
235
|
-
Description:
|
236
|
-
Add or modify an attribute of a variable in a NetCDF file using the netCDF4 library.
|
237
|
-
|
238
|
-
Parameters:
|
239
|
-
nc_file_path (str): The path to the NetCDF file.
|
240
|
-
variable_name (str): The name of the variable to be modified.
|
241
|
-
attribute_name (str): The name of the attribute to be added or modified.
|
242
|
-
attribute_value (any): The value of the attribute.
|
57
|
+
from ._script.netcdf_merge import merge_nc
|
243
58
|
|
244
|
-
|
245
|
-
modify_attr('test.nc', 'temperature', 'long_name', 'Temperature in Celsius')
|
246
|
-
"""
|
247
|
-
try:
|
248
|
-
with nc.Dataset(nc_file_path, "r+") as ds:
|
249
|
-
# Check if the variable exists
|
250
|
-
if variable_name not in ds.variables:
|
251
|
-
raise ValueError(f"Variable '{variable_name}' not found in the NetCDF file.")
|
252
|
-
# Get the variable
|
253
|
-
variable = ds.variables[variable_name]
|
254
|
-
# Add or modify the attribute
|
255
|
-
variable.setncattr(attribute_name, attribute_value)
|
256
|
-
print(f"Successfully modified attribute '{attribute_name}' of variable '{variable_name}' in '{nc_file_path}'.")
|
257
|
-
except Exception as e:
|
258
|
-
print(f"[red]Error:[/red] Failed to modify attribute '{attribute_name}' of variable '{variable_name}' in file '{nc_file_path}'. [bold]Details:[/bold] {e}")
|
59
|
+
merge_nc(file_list, var_name, dim_name, target_filename)
|
259
60
|
|
260
61
|
|
261
|
-
def modify(nc_file, var_name, attr_name=None, new_value=None):
|
62
|
+
def modify(nc_file: str, var_name: str, attr_name: Optional[str] = None, new_value: Optional[Union[str, float, int, np.ndarray]] = None) -> None:
|
262
63
|
"""
|
263
64
|
Description:
|
264
65
|
Modify the value of a variable or the value of an attribute in a NetCDF file.
|
265
|
-
|
266
66
|
Parameters:
|
267
|
-
nc_file
|
268
|
-
var_name
|
269
|
-
attr_name
|
270
|
-
new_value
|
271
|
-
|
272
|
-
Example:
|
273
|
-
modify('test.nc', 'temperature', 'long_name', 'Temperature in Celsius')
|
274
|
-
modify('test.nc', 'temperature', None, np.random.rand(100, 50))
|
67
|
+
nc_file: str, the path to the NetCDF file
|
68
|
+
var_name: str, the name of the variable to be modified
|
69
|
+
attr_name: Optional[str], the name of the attribute to be modified. If None, the variable value will be modified
|
70
|
+
new_value: Optional[Union[str, float, int, np.ndarray]], the new value of the variable or attribute
|
275
71
|
"""
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
else:
|
280
|
-
_modify_attr(nc_file, var_name, attr_name, new_value)
|
281
|
-
except Exception as e:
|
282
|
-
print(f"An error occurred while modifying '{var_name}' in '{nc_file}': {e}")
|
72
|
+
from ._script.netcdf_modify import modify_nc
|
73
|
+
|
74
|
+
modify_nc(nc_file, var_name, attr_name, new_value)
|
283
75
|
|
284
76
|
|
285
|
-
def rename(ncfile_path, old_name, new_name):
|
77
|
+
def rename(ncfile_path: str, old_name: str, new_name: str) -> None:
|
286
78
|
"""
|
287
79
|
Description:
|
288
80
|
Rename a variable and/or dimension in a NetCDF file.
|
289
|
-
|
290
81
|
Parameters:
|
291
|
-
ncfile_path
|
292
|
-
old_name
|
293
|
-
new_name
|
294
|
-
|
295
|
-
example:
|
296
|
-
rename('test.nc', 'temperature', 'temp')
|
82
|
+
ncfile_path: str, the path to the NetCDF file
|
83
|
+
old_name: str, the current name of the variable or dimension
|
84
|
+
new_name: str, the new name to assign to the variable or dimension
|
297
85
|
"""
|
298
86
|
try:
|
299
87
|
with nc.Dataset(ncfile_path, "r+") as dataset:
|
@@ -320,9 +108,14 @@ def rename(ncfile_path, old_name, new_name):
|
|
320
108
|
|
321
109
|
def check(ncfile: str, delete_switch: bool = False, print_switch: bool = True) -> bool:
|
322
110
|
"""
|
323
|
-
|
324
|
-
|
325
|
-
|
111
|
+
Description:
|
112
|
+
Check if a NetCDF file is corrupted with enhanced error handling.
|
113
|
+
Parameters:
|
114
|
+
ncfile: str, the path to the NetCDF file
|
115
|
+
delete_switch: bool, whether to delete the file if it is corrupted
|
116
|
+
print_switch: bool, whether to print messages during the check
|
117
|
+
Returns:
|
118
|
+
bool: True if the file is valid, False otherwise
|
326
119
|
"""
|
327
120
|
is_valid = False
|
328
121
|
|
@@ -377,18 +170,16 @@ def check(ncfile: str, delete_switch: bool = False, print_switch: bool = True) -
|
|
377
170
|
return True
|
378
171
|
|
379
172
|
|
380
|
-
def convert_longitude(ds, lon_name="longitude", convert=180):
|
173
|
+
def convert_longitude(ds: xr.Dataset, lon_name: str = "longitude", convert: int = 180) -> xr.Dataset:
|
381
174
|
"""
|
382
175
|
Description:
|
383
176
|
Convert the longitude array to a specified range.
|
384
|
-
|
385
177
|
Parameters:
|
386
|
-
ds
|
387
|
-
lon_name
|
388
|
-
convert
|
389
|
-
|
178
|
+
ds: xr.Dataset, the xarray dataset containing the longitude data
|
179
|
+
lon_name: str, the name of the longitude variable, default is "longitude"
|
180
|
+
convert: int, the target range to convert to, can be 180 or 360, default is 180
|
390
181
|
Returns:
|
391
|
-
|
182
|
+
xr.Dataset: The xarray dataset with the converted longitude
|
392
183
|
"""
|
393
184
|
to_which = int(convert)
|
394
185
|
if to_which not in [180, 360]:
|
@@ -402,20 +193,16 @@ def convert_longitude(ds, lon_name="longitude", convert=180):
|
|
402
193
|
return ds.sortby(lon_name)
|
403
194
|
|
404
195
|
|
405
|
-
def isel(ncfile, dim_name, slice_list):
|
196
|
+
def isel(ncfile: str, dim_name: str, slice_list: List[int]) -> xr.Dataset:
|
406
197
|
"""
|
407
198
|
Description:
|
408
|
-
Choose the data by the index of the dimension
|
409
|
-
|
199
|
+
Choose the data by the index of the dimension.
|
410
200
|
Parameters:
|
411
201
|
ncfile: str, the path of the netCDF file
|
412
202
|
dim_name: str, the name of the dimension
|
413
|
-
slice_list:
|
414
|
-
|
415
|
-
|
416
|
-
slice_list = [[y*12+m for m in range(11,14)] for y in range(84)]
|
417
|
-
slice_list = [y * 12 + m for y in range(84) for m in range(11, 14)]
|
418
|
-
isel(ncfile, 'time', slice_list)
|
203
|
+
slice_list: List[int], the indices of the dimension
|
204
|
+
Returns:
|
205
|
+
xr.Dataset: The subset dataset
|
419
206
|
"""
|
420
207
|
ds = xr.open_dataset(ncfile)
|
421
208
|
slice_list = np.array(slice_list).flatten()
|
@@ -425,23 +212,17 @@ def isel(ncfile, dim_name, slice_list):
|
|
425
212
|
return ds_new
|
426
213
|
|
427
214
|
|
428
|
-
def draw(output_dir=None, dataset=None, ncfile=None, xyzt_dims=("longitude", "latitude", "level", "time"), plot_type="contourf", fixed_colorscale=False):
|
215
|
+
def draw(output_dir: Optional[str] = None, dataset: Optional[xr.Dataset] = None, ncfile: Optional[str] = None, xyzt_dims: Union[List[str], Tuple[str, str, str, str]] = ("longitude", "latitude", "level", "time"), plot_type: str = "contourf", fixed_colorscale: bool = False) -> None:
|
429
216
|
"""
|
430
217
|
Description:
|
431
|
-
Draw the data in the netCDF file
|
432
|
-
|
218
|
+
Draw the data in the netCDF file.
|
433
219
|
Parameters:
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
|
438
|
-
z_dim: str, the name of the z dimension
|
439
|
-
t_dim: str, the name of the t dimension
|
220
|
+
output_dir: Optional[str], the path of the output directory
|
221
|
+
dataset: Optional[xr.Dataset], the xarray dataset to plot
|
222
|
+
ncfile: Optional[str], the path of the netCDF file
|
223
|
+
xyzt_dims: Union[List[str], Tuple[str, str, str, str]], the dimensions for plotting
|
440
224
|
plot_type: str, the type of the plot, default is "contourf" (contourf, contour)
|
441
225
|
fixed_colorscale: bool, whether to use fixed colorscale, default is False
|
442
|
-
|
443
|
-
Example:
|
444
|
-
draw(ncfile, output_dir, x_dim="longitude", y_dim="latitude", z_dim="level", t_dim="time", fixed_colorscale=False)
|
445
226
|
"""
|
446
227
|
from ._script.plot_dataset import func_plot_dataset
|
447
228
|
|
oafuncs/oa_python.py
CHANGED
@@ -1,108 +1,98 @@
|
|
1
1
|
#!/usr/bin/env python
|
2
2
|
# coding=utf-8
|
3
|
-
|
3
|
+
"""
|
4
4
|
Author: Liu Kun && 16031215@qq.com
|
5
|
-
Date:
|
5
|
+
Date: 2025-03-27 16:51:26
|
6
6
|
LastEditors: Liu Kun && 16031215@qq.com
|
7
|
-
LastEditTime:
|
7
|
+
LastEditTime: 2025-04-05 14:17:07
|
8
8
|
FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_python.py
|
9
|
-
Description:
|
9
|
+
Description:
|
10
10
|
EditPlatform: vscode
|
11
11
|
ComputerInfo: XPS 15 9510
|
12
12
|
SystemInfo: Windows 11
|
13
|
-
Python Version: 3.
|
14
|
-
|
13
|
+
Python Version: 3.12
|
14
|
+
"""
|
15
15
|
|
16
16
|
import os
|
17
|
+
|
17
18
|
from rich import print
|
18
19
|
|
19
|
-
__all__ = [
|
20
|
+
__all__ = ["install_packages", "upgrade_packages"]
|
21
|
+
|
22
|
+
|
23
|
+
def install_packages(packages=None, python_executable="python", package_manager="pip"):
|
24
|
+
"""
|
25
|
+
packages: list, libraries to be installed
|
26
|
+
python_executable: str, Python version; for example, on Windows, copy python.exe to python312.exe, then set python_executable='python312'
|
27
|
+
package_manager: str, the package manager to use ('pip' or 'conda')
|
28
|
+
"""
|
29
|
+
if not isinstance(packages, (list, type(None))):
|
30
|
+
raise ValueError("The 'packages' parameter must be a list or None")
|
20
31
|
|
32
|
+
if package_manager not in ["pip", "conda"]:
|
33
|
+
raise ValueError("The 'package_manager' parameter must be either 'pip' or 'conda'")
|
21
34
|
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
"
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
"scipy", # 科学计算
|
41
|
-
# "scikit-learn", # 机器学习
|
42
|
-
"matplotlib", # 绘图
|
43
|
-
# "seaborn",
|
44
|
-
"imageio", # 图像
|
45
|
-
# "pylustrator", # 绘图
|
46
|
-
"Cartopy", # 绘图 #cartopy已经支持python3.11并且可以直接pip安装
|
47
|
-
"seawater", # 海洋计算
|
48
|
-
"cmaps", # 颜色
|
49
|
-
"colorcet", # 颜色
|
50
|
-
"cmasher", # 颜色
|
51
|
-
"tqdm", # 进度条
|
52
|
-
# "taichi", # 加速
|
53
|
-
"icecream", # 打印调试
|
54
|
-
# "pyperclip", # 系统剪切板
|
55
|
-
"rich", # 精美文本终端
|
56
|
-
# "stratify", # 大气海洋数据垂直插值
|
57
|
-
"dask", # 并行计算
|
58
|
-
"bs4", # 网页
|
59
|
-
"pathlib", # 路径
|
60
|
-
"opencv-contrib-python", # 图像处理
|
61
|
-
# "pydap", # 网络数据xarray下载
|
62
|
-
"gsw", # 海洋计算
|
63
|
-
"global_land_mask", # 陆地海洋掩码
|
64
|
-
# "cfgrib", # grib文件
|
65
|
-
# "ecmwflibs", # grib文件, 两个库都需要安装
|
66
|
-
"geopandas", # 矢量数据,shp文件
|
67
|
-
# "geopy", # 地理位置
|
68
|
-
# "flask", # 网页
|
69
|
-
"cdsapi", # 网络数据下载(era5)
|
70
|
-
# 以下不太重要
|
71
|
-
"lxml", # 网页
|
72
|
-
"keyboard", # 键盘
|
73
|
-
"zhdate", # 中国农历
|
74
|
-
"python-pptx", # ppt
|
75
|
-
"python-docx", # word
|
76
|
-
"ipywidgets", # jupyter显示进度条插件
|
77
|
-
"salem", # 地图投影,可部分替代wrf-python
|
78
|
-
"meteva", # 气象数据处理,中国气象局开发
|
79
|
-
"wget", # 下载
|
80
|
-
"pyautogui", # 鼠标键盘,自动连点脚本需要
|
81
|
-
]
|
35
|
+
if package_manager == "conda":
|
36
|
+
if not packages:
|
37
|
+
return
|
38
|
+
try:
|
39
|
+
package_count = len(packages)
|
40
|
+
for i, package in enumerate(packages):
|
41
|
+
os.system(f"conda install -c conda-forge {package} -y")
|
42
|
+
print("-" * 100)
|
43
|
+
print(f"Successfully installed {package} ({i + 1}/{package_count})")
|
44
|
+
print("-" * 100)
|
45
|
+
except Exception as e:
|
46
|
+
print(f"Installation failed: {str(e)}")
|
47
|
+
return
|
48
|
+
|
49
|
+
os.system(f"{python_executable} -m ensurepip")
|
50
|
+
os.system(f"{python_executable} -m pip install --upgrade pip")
|
51
|
+
if not packages:
|
52
|
+
return
|
82
53
|
try:
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
if
|
88
|
-
|
54
|
+
installed_packages = os.popen(f"{python_executable} -m pip list --format=freeze").read().splitlines()
|
55
|
+
installed_packages = {pkg.split("==")[0].lower() for pkg in installed_packages}
|
56
|
+
package_count = len(packages)
|
57
|
+
for i, package in enumerate(packages):
|
58
|
+
# Check if the library is already installed, skip if installed
|
59
|
+
if package.lower() in installed_packages:
|
60
|
+
print(f"{package} is already installed")
|
89
61
|
continue
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
print('-'*100)
|
62
|
+
os.system(f"{python_executable} -m pip install {package}")
|
63
|
+
print("-" * 100)
|
64
|
+
print(f"Successfully installed {package} ({i + 1}/{package_count})")
|
65
|
+
print("-" * 100)
|
95
66
|
except Exception as e:
|
96
|
-
print("
|
67
|
+
print(f"Installation failed: {str(e)}")
|
68
|
+
|
69
|
+
|
70
|
+
def upgrade_packages(packages=None, python_executable="python", package_manager="pip"):
|
71
|
+
"""
|
72
|
+
packages: list, libraries to be upgraded
|
73
|
+
python_executable: str, Python version; for example, on Windows, copy python.exe to python312.exe, then set python_executable='python312'
|
74
|
+
package_manager: str, the package manager to use ('pip' or 'conda')
|
75
|
+
"""
|
76
|
+
if not isinstance(packages, (list, type(None))):
|
77
|
+
raise ValueError("The 'packages' parameter must be a list or None")
|
97
78
|
|
79
|
+
if package_manager not in ["pip", "conda"]:
|
80
|
+
raise ValueError("The 'package_manager' parameter must be either 'pip' or 'conda'")
|
98
81
|
|
99
|
-
def upgrade_lib(libs=None, python_exe='python'):
|
100
|
-
if libs is None:
|
101
|
-
installed_libs = os.popen(python_exe + ' -m pip list').read()
|
102
|
-
libs = installed_libs
|
103
82
|
try:
|
104
|
-
|
105
|
-
|
106
|
-
|
83
|
+
if package_manager == "conda":
|
84
|
+
if not packages:
|
85
|
+
installed_packages = os.popen("conda list --export").read().splitlines()
|
86
|
+
packages = [pkg.split("=")[0] for pkg in installed_packages if not pkg.startswith("#")]
|
87
|
+
for package in packages:
|
88
|
+
os.system(f"conda update -c conda-forge {package} -y")
|
89
|
+
print("Upgrade successful")
|
90
|
+
else:
|
91
|
+
if not packages:
|
92
|
+
installed_packages = os.popen(f"{python_executable} -m pip list --format=freeze").read().splitlines()
|
93
|
+
packages = [pkg.split("==")[0] for pkg in installed_packages]
|
94
|
+
for package in packages:
|
95
|
+
os.system(f"{python_executable} -m pip install --upgrade {package}")
|
96
|
+
print("Upgrade successful")
|
107
97
|
except Exception as e:
|
108
|
-
print("
|
98
|
+
print(f"Upgrade failed: {str(e)}")
|
@@ -4,8 +4,8 @@
|
|
4
4
|
Author: Liu Kun && 16031215@qq.com
|
5
5
|
Date: 2024-10-14 16:14:50
|
6
6
|
LastEditors: Liu Kun && 16031215@qq.com
|
7
|
-
LastEditTime:
|
8
|
-
FilePath: \\Python\\My_Funcs\\OAFuncs\\
|
7
|
+
LastEditTime: 2025-04-04 20:37:13
|
8
|
+
FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_sign\\meteorological.py
|
9
9
|
Description:
|
10
10
|
EditPlatform: vscode
|
11
11
|
ComputerInfo: XPS 15 9510
|
@@ -63,7 +63,7 @@ def sign_in_meteorological_home(email, password):
|
|
63
63
|
|
64
64
|
def login():
|
65
65
|
url = 'http://bbs.06climate.com/member.php?'
|
66
|
-
#
|
66
|
+
# 登录密码需要转码
|
67
67
|
mydata['password'] = hashlib.md5(mydata['password'].encode()).hexdigest()
|
68
68
|
credentials = {
|
69
69
|
'password': mydata['password'],
|