oafuncs 0.0.97.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
oafuncs/oa_nc.py ADDED
@@ -0,0 +1,523 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ """
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2024-09-17 14:58:50
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2024-12-06 14:16:56
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_nc.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.11
14
+ """
15
+
16
+ import os
17
+
18
+ import netCDF4 as nc
19
+ import numpy as np
20
+ import xarray as xr
21
+ from rich import print
22
+
23
+ from oafuncs._script.plot_dataset import func_plot_dataset
24
+
25
+ __all__ = ["get_var", "extract", "save", "merge", "modify", "rename", "check", "convert_longitude", "isel", "draw"]
26
+
27
+
28
+ def get_var(file, *vars):
29
+ """
30
+ Description:
31
+ Read variables from nc file
32
+ Parameters:
33
+ file: str, file path
34
+ *vars: str, variable name or variable names; should be in same size
35
+ Example:
36
+ datas = get_var(file_ecm, 'h', 't', 'u', 'v')
37
+ Return:
38
+ datas: list, variable data
39
+ """
40
+ ds = xr.open_dataset(file)
41
+ datas = []
42
+ for var in vars:
43
+ data = ds[var]
44
+ datas.append(data)
45
+ ds.close()
46
+ return datas
47
+
48
+
49
+ def extract(file, varname, only_value=True):
50
+ """
51
+ Description:
52
+ Extract variables from nc file
53
+ Return the variable and coordinate dictionary
54
+ Parameters:
55
+ file: str, file path
56
+ varname: str, variable name
57
+ only_value: bool, whether to keep only the value of the variable and dimension
58
+ Example:
59
+ data, dimdict = extract('test.nc', 'h')
60
+ """
61
+ ds = xr.open_dataset(file)
62
+ vardata = ds[varname]
63
+ ds.close()
64
+ dims = vardata.dims
65
+ dimdict = {}
66
+ for dim in dims:
67
+ if only_value:
68
+ dimdict[dim] = vardata[dim].values
69
+ else:
70
+ dimdict[dim] = ds[dim]
71
+ if only_value:
72
+ vardata = np.array(vardata)
73
+ return vardata, dimdict
74
+
75
+
76
+ def _numpy_to_nc_type(numpy_type):
77
+ """将NumPy数据类型映射到NetCDF数据类型"""
78
+ numpy_to_nc = {
79
+ "float32": "f4",
80
+ "float64": "f8",
81
+ "int8": "i1",
82
+ "int16": "i2",
83
+ "int32": "i4",
84
+ "int64": "i8",
85
+ "uint8": "u1",
86
+ "uint16": "u2",
87
+ "uint32": "u4",
88
+ "uint64": "u8",
89
+ }
90
+ # 确保传入的是字符串类型,如果不是,则转换为字符串
91
+ numpy_type_str = str(numpy_type) if not isinstance(numpy_type, str) else numpy_type
92
+ return numpy_to_nc.get(numpy_type_str, "f4") # 默认使用 'float32'
93
+
94
+
95
+ def _calculate_scale_and_offset(data, n=16):
96
+ if not isinstance(data, np.ndarray):
97
+ raise ValueError("Input data must be a NumPy array.")
98
+
99
+ # 使用 nan_to_num 来避免 NaN 值对 min 和 max 的影响
100
+ data_min = np.nanmin(data)
101
+ data_max = np.nanmax(data)
102
+
103
+ if np.isnan(data_min) or np.isnan(data_max):
104
+ raise ValueError("Input data contains NaN values, which are not allowed.")
105
+
106
+ scale_factor = (data_max - data_min) / (2**n - 1)
107
+ add_offset = data_min + 2 ** (n - 1) * scale_factor
108
+
109
+ return scale_factor, add_offset
110
+
111
+
112
+ def save(file, data, varname=None, coords=None, mode="w", scale_offset_switch=True, compile_switch=True):
113
+ """
114
+ Description:
115
+ Write data to NetCDF file
116
+ Parameters:
117
+ file: str, file path
118
+ data: data
119
+ varname: str, variable name
120
+ coords: dict, coordinates, key is the dimension name, value is the coordinate data
121
+ mode: str, write mode, 'w' for write, 'a' for append
122
+ scale_offset_switch: bool, whether to use scale_factor and add_offset, default is True
123
+ compile_switch: bool, whether to use compression parameters, default is True
124
+ Example:
125
+ save(r'test.nc', data, 'u', {'time': np.linspace(0, 120, 100), 'lev': np.linspace(0, 120, 50)}, 'a')
126
+ """
127
+ # 设置压缩参数
128
+ kwargs = {"zlib": True, "complevel": 4} if compile_switch else {}
129
+
130
+ # 检查文件存在性并根据模式决定操作
131
+ if mode == "w" and os.path.exists(file):
132
+ os.remove(file)
133
+ elif mode == "a" and not os.path.exists(file):
134
+ mode = "w"
135
+
136
+ # 打开 NetCDF 文件
137
+ with nc.Dataset(file, mode, format="NETCDF4") as ncfile:
138
+ # 如果 data 是 DataArray 并且没有提供 varname 和 coords
139
+ if varname is None and coords is None and isinstance(data, xr.DataArray):
140
+ data.to_netcdf(file, mode=mode)
141
+ return
142
+
143
+ # 添加坐标
144
+ for dim, coord_data in coords.items():
145
+ if dim in ncfile.dimensions:
146
+ if len(coord_data) != len(ncfile.dimensions[dim]):
147
+ raise ValueError(f"Length of coordinate '{dim}' does not match the dimension length.")
148
+ else:
149
+ ncfile.variables[dim][:] = np.array(coord_data)
150
+ else:
151
+ ncfile.createDimension(dim, len(coord_data))
152
+ var = ncfile.createVariable(dim, _numpy_to_nc_type(coord_data.dtype), (dim,), **kwargs)
153
+ var[:] = np.array(coord_data)
154
+
155
+ # 如果坐标数据有属性,则添加到 NetCDF 变量
156
+ if isinstance(coord_data, xr.DataArray) and coord_data.attrs:
157
+ for attr_name, attr_value in coord_data.attrs.items():
158
+ var.setncattr(attr_name, attr_value)
159
+
160
+ # 添加或更新变量
161
+ if varname in ncfile.variables:
162
+ if data.shape != ncfile.variables[varname].shape:
163
+ raise ValueError(f"Shape of data does not match the variable shape for '{varname}'.")
164
+ ncfile.variables[varname][:] = np.array(data)
165
+ else:
166
+ # 创建变量
167
+ dim_names = tuple(coords.keys())
168
+ if scale_offset_switch:
169
+ scale_factor, add_offset = _calculate_scale_and_offset(np.array(data))
170
+ dtype = "i2"
171
+ var = ncfile.createVariable(varname, dtype, dim_names, fill_value=-32767, **kwargs)
172
+ var.setncattr("scale_factor", scale_factor)
173
+ var.setncattr("add_offset", add_offset)
174
+ else:
175
+ dtype = _numpy_to_nc_type(data.dtype)
176
+ var = ncfile.createVariable(varname, dtype, dim_names, **kwargs)
177
+ var[:] = np.array(data)
178
+
179
+ # 添加属性
180
+ if isinstance(data, xr.DataArray) and data.attrs:
181
+ for key, value in data.attrs.items():
182
+ if key not in ["scale_factor", "add_offset", "_FillValue", "missing_value"] or not scale_offset_switch:
183
+ var.setncattr(key, value)
184
+
185
+
186
+ def merge(file_list, var_name=None, dim_name=None, target_filename=None):
187
+ """
188
+ Description:
189
+ Merge variables from multiple NetCDF files along a specified dimension and write to a new file.
190
+ If var_name is a string, it is considered a single variable; if it is a list and has only one element, it is also a single variable;
191
+ If the list has more than one element, it is a multi-variable; if var_name is None, all variables are merged.
192
+
193
+ Parameters:
194
+ file_list: List of NetCDF file paths
195
+ var_name: Name of the variable to be extracted or a list of variable names, default is None, which means all variables are extracted
196
+ dim_name: Dimension name used for merging
197
+ target_filename: Target file name after merging
198
+
199
+ Example:
200
+ merge(file_list, var_name='u', dim_name='time', target_filename='merged.nc')
201
+ merge(file_list, var_name=['u', 'v'], dim_name='time', target_filename='merged.nc')
202
+ merge(file_list, var_name=None, dim_name='time', target_filename='merged.nc')
203
+ """
204
+ # 看看保存文件是单纯文件名还是包含路径的,如果有路径,需要确保路径存在
205
+ if target_filename is None:
206
+ target_filename = "merged.nc"
207
+ if not os.path.exists(os.path.dirname(str(target_filename))):
208
+ os.makedirs(os.path.dirname(str(target_filename)))
209
+
210
+ if isinstance(file_list, str):
211
+ file_list = [file_list]
212
+
213
+ # 初始化变量名列表
214
+ var_names = None
215
+
216
+ # 判断 var_name 是单变量、多变量还是合并所有变量
217
+ if var_name is None:
218
+ # 获取第一个文件中的所有变量名
219
+ ds = xr.open_dataset(file_list[0])
220
+ var_names = list(ds.variables.keys())
221
+ ds.close()
222
+ elif isinstance(var_name, str):
223
+ var_names = [var_name]
224
+ elif isinstance(var_name, list):
225
+ var_names = var_name
226
+ else:
227
+ raise ValueError("var_name must be a string, a list of strings, or None")
228
+
229
+ # 初始化合并数据字典
230
+ merged_data = {}
231
+
232
+ # 遍历文件列表
233
+ print("Reading file ...")
234
+ for i, file in enumerate(file_list):
235
+ # 更新track描述进度
236
+ # print(f"\rReading file {i + 1}/{len(file_list)}...", end="")
237
+ ds = xr.open_dataset(file)
238
+ for var_name in var_names:
239
+ var = ds[var_name]
240
+ # 如果变量包含合并维度,则合并它们
241
+ if dim_name in var.dims:
242
+ if var_name not in merged_data:
243
+ merged_data[var_name] = [var]
244
+ else:
245
+ merged_data[var_name].append(var)
246
+ # 如果变量不包含合并维度,则仅保留第一个文件中的值
247
+ else:
248
+ if var_name not in merged_data:
249
+ merged_data[var_name] = var
250
+ ds.close()
251
+
252
+ print("\nMerging data ...")
253
+ for var_name in merged_data:
254
+ if isinstance(merged_data[var_name], list):
255
+ merged_data[var_name] = xr.concat(merged_data[var_name], dim=dim_name)
256
+
257
+ merged_data = xr.Dataset(merged_data)
258
+
259
+ print("\nWriting data to file ...")
260
+ if os.path.exists(target_filename):
261
+ print("Warning: The target file already exists.")
262
+ print("Removing existing file ...")
263
+ os.remove(target_filename)
264
+ merged_data.to_netcdf(target_filename)
265
+ print(f'\nFile "{target_filename}" has been created.')
266
+
267
+
268
+ def _modify_var(nc_file_path, variable_name, new_value):
269
+ """
270
+ Description:
271
+ Modify the value of a variable in a NetCDF file using the netCDF4 library.
272
+
273
+ Parameters:
274
+ nc_file_path (str): The path to the NetCDF file.
275
+ variable_name (str): The name of the variable to be modified.
276
+ new_value (numpy.ndarray): The new value of the variable.
277
+
278
+ Example:
279
+ modify_var('test.nc', 'u', np.random.rand(100, 50))
280
+ """
281
+ try:
282
+ # Open the NetCDF file
283
+ dataset = nc.Dataset(nc_file_path, "r+")
284
+ # Get the variable to be modified
285
+ variable = dataset.variables[variable_name]
286
+ # Modify the value of the variable
287
+ variable[:] = new_value
288
+ dataset.close()
289
+ print(f"Successfully modified variable {variable_name} in {nc_file_path}.")
290
+ except Exception as e:
291
+ print(f"An error occurred while modifying variable {variable_name} in {nc_file_path}: {e}")
292
+
293
+
294
+ def _modify_attr(nc_file_path, variable_name, attribute_name, attribute_value):
295
+ """
296
+ Description:
297
+ Add or modify an attribute of a variable in a NetCDF file using the netCDF4 library.
298
+
299
+ Parameters:
300
+ nc_file_path (str): The path to the NetCDF file.
301
+ variable_name (str): The name of the variable to be modified.
302
+ attribute_name (str): The name of the attribute to be added or modified.
303
+ attribute_value (any): The value of the attribute.
304
+
305
+ Example:
306
+ modify_attr('test.nc', 'temperature', 'long_name', 'Temperature in Celsius')
307
+ """
308
+ try:
309
+ ds = nc.Dataset(nc_file_path, "r+")
310
+ if variable_name not in ds.variables:
311
+ raise ValueError(f"Variable '{variable_name}' not found in the NetCDF file.")
312
+
313
+ variable = ds.variables[variable_name]
314
+ if attribute_name in variable.ncattrs():
315
+ print(f"Warning: Attribute '{attribute_name}' already exists. Replacing it.")
316
+ variable.setncattr(attribute_name, attribute_value)
317
+ else:
318
+ print(f"Adding attribute '{attribute_name}'...")
319
+ variable.setncattr(attribute_name, attribute_value)
320
+
321
+ ds.close()
322
+ except Exception as e:
323
+ raise RuntimeError(f"An error occurred: {e}")
324
+
325
+
326
+ def modify(nc_file, var_name, attr_name=None, new_value=None):
327
+ """
328
+ Description:
329
+ Modify the value of a variable or the value of an attribute in a NetCDF file.
330
+
331
+ Parameters:
332
+ nc_file (str): The path to the NetCDF file.
333
+ var_name (str): The name of the variable to be modified.
334
+ attr_name (str): The name of the attribute to be modified. If None, the variable value will be modified.
335
+ new_value (any): The new value of the variable or attribute.
336
+
337
+ Example:
338
+ modify('test.nc', 'temperature', 'long_name', 'Temperature in Celsius')
339
+ modify('test.nc', 'temperature', None, np.random.rand(100, 50))
340
+ """
341
+ if attr_name is None:
342
+ _modify_var(nc_file, var_name, new_value)
343
+ else:
344
+ _modify_attr(nc_file, var_name, attr_name, new_value)
345
+
346
+
347
+ def rename(ncfile_path, old_name, new_name):
348
+ """
349
+ Description:
350
+ Rename a variable and/or dimension in a NetCDF file.
351
+
352
+ Parameters:
353
+ ncfile_path (str): The path to the NetCDF file.
354
+ old_name (str): The current name of the variable or dimension.
355
+ new_name (str): The new name to assign to the variable or dimension.
356
+
357
+ example:
358
+ rename('test.nc', 'temperature', 'temp')
359
+ """
360
+ try:
361
+ with nc.Dataset(ncfile_path, "r+") as dataset:
362
+ # If the old name is not found as a variable or dimension, print a message
363
+ if old_name not in dataset.variables and old_name not in dataset.dimensions:
364
+ print(f"Variable or dimension {old_name} not found in the file.")
365
+
366
+ # Attempt to rename the variable
367
+ if old_name in dataset.variables:
368
+ dataset.renameVariable(old_name, new_name)
369
+ print(f"Successfully renamed variable {old_name} to {new_name}.")
370
+
371
+ # Attempt to rename the dimension
372
+ if old_name in dataset.dimensions:
373
+ # Check if the new dimension name already exists
374
+ if new_name in dataset.dimensions:
375
+ raise ValueError(f"Dimension name {new_name} already exists in the file.")
376
+ dataset.renameDimension(old_name, new_name)
377
+ print(f"Successfully renamed dimension {old_name} to {new_name}.")
378
+
379
+ except Exception as e:
380
+ print(f"An error occurred: {e}")
381
+
382
+
383
+ def check(ncfile: str, delete_switch: bool = False) -> bool:
384
+ """
385
+ Check if a NetCDF file is corrupted with enhanced error handling.
386
+
387
+ Handles HDF5 library errors gracefully without terminating program.
388
+ """
389
+ is_valid = False
390
+
391
+ if not os.path.exists(ncfile):
392
+ print(f"[#ffeac5]Local file missing: [#009d88]{ncfile}")
393
+ # 提示:提示文件缺失也许是正常的,这只是检查文件是否存在于本地
394
+ print("[#d6d9fd]Note: File missing may be normal, this is just to check if the file exists locally.")
395
+ return False
396
+
397
+ try:
398
+ # # 深度验证文件结构
399
+ # with nc.Dataset(ncfile, "r") as ds:
400
+ # # 显式检查文件结构完整性
401
+ # ds.sync() # 强制刷新缓冲区
402
+ # ds.close() # 显式关闭后重新打开验证
403
+
404
+ # 二次验证确保变量可访问
405
+ with nc.Dataset(ncfile, "r") as ds_verify:
406
+ if not ds_verify.variables:
407
+ print(f"Empty variables: {ncfile}")
408
+ else:
409
+ # 尝试访问元数据
410
+ _ = ds_verify.__dict__
411
+ # 抽样检查第一个变量
412
+ for var in ds_verify.variables.values():
413
+ _ = var.shape # 触发实际数据访问
414
+ break
415
+ is_valid = True
416
+
417
+ except Exception as e: # 捕获所有异常类型
418
+ print(f"HDF5 validation failed for {ncfile}: {str(e)}")
419
+ error_type = type(e).__name__
420
+ if "HDF5" in error_type or "h5" in error_type.lower():
421
+ print(f"Critical HDF5 structure error detected in {ncfile}")
422
+
423
+ # 安全删除流程
424
+ if not is_valid:
425
+ if delete_switch:
426
+ try:
427
+ os.remove(ncfile)
428
+ print(f"Removed corrupted: {ncfile}")
429
+ except Exception as del_error:
430
+ print(f"Delete failed: {ncfile} - {str(del_error)}")
431
+ return False
432
+
433
+ return True
434
+
435
+
436
+ def convert_longitude(ds, lon_name="longitude", convert=180):
437
+ """
438
+ Description:
439
+ Convert the longitude array to a specified range.
440
+
441
+ Parameters:
442
+ ds (xarray.Dataset): The xarray dataset containing the longitude data.
443
+ lon_name (str): The name of the longitude variable, default is "longitude".
444
+ convert (int): The target range to convert to, can be 180 or 360, default is 180.
445
+
446
+ Returns:
447
+ xarray.Dataset: The xarray dataset with the converted longitude.
448
+ """
449
+ to_which = int(convert)
450
+ if to_which not in [180, 360]:
451
+ raise ValueError("convert value must be '180' or '360'")
452
+
453
+ if to_which == 180:
454
+ ds = ds.assign_coords({lon_name: (ds[lon_name] + 180) % 360 - 180})
455
+ elif to_which == 360:
456
+ ds = ds.assign_coords({lon_name: (ds[lon_name] + 360) % 360})
457
+
458
+ return ds.sortby(lon_name)
459
+
460
+
461
+ def isel(ncfile, dim_name, slice_list):
462
+ """
463
+ Description:
464
+ Choose the data by the index of the dimension
465
+
466
+ Parameters:
467
+ ncfile: str, the path of the netCDF file
468
+ dim_name: str, the name of the dimension
469
+ slice_list: list, the index of the dimension
470
+
471
+ Example:
472
+ slice_list = [[y*12+m for m in range(11,14)] for y in range(84)]
473
+ slice_list = [y * 12 + m for y in range(84) for m in range(11, 14)]
474
+ isel(ncfile, 'time', slice_list)
475
+ """
476
+ ds = xr.open_dataset(ncfile)
477
+ slice_list = np.array(slice_list).flatten()
478
+ slice_list = [int(i) for i in slice_list]
479
+ ds_new = ds.isel(**{dim_name: slice_list})
480
+ ds.close()
481
+ return ds_new
482
+
483
+
484
+ def draw(output_dir=None, dataset=None, ncfile=None, xyzt_dims=("longitude", "latitude", "level", "time"), plot_type="contourf",fixed_colorscale=False):
485
+ """
486
+ Description:
487
+ Draw the data in the netCDF file
488
+
489
+ Parameters:
490
+ ncfile: str, the path of the netCDF file
491
+ output_dir: str, the path of the output directory
492
+ x_dim: str, the name of the x dimension
493
+ y_dim: str, the name of the y dimension
494
+ z_dim: str, the name of the z dimension
495
+ t_dim: str, the name of the t dimension
496
+ plot_type: str, the type of the plot, default is "contourf" (contourf, contour)
497
+ fixed_colorscale: bool, whether to use fixed colorscale, default is False
498
+
499
+ Example:
500
+ draw(ncfile, output_dir, x_dim="longitude", y_dim="latitude", z_dim="level", t_dim="time", fixed_colorscale=False)
501
+ """
502
+ if output_dir is None:
503
+ output_dir = str(os.getcwd())
504
+ if isinstance(xyzt_dims, (list, tuple)):
505
+ xyzt_dims = tuple(xyzt_dims)
506
+ else:
507
+ raise ValueError("xyzt_dims must be a list or tuple")
508
+ if dataset is not None:
509
+ func_plot_dataset(dataset, output_dir, xyzt_dims, plot_type, fixed_colorscale)
510
+ else:
511
+ if ncfile is not None:
512
+ if check(ncfile):
513
+ ds = xr.open_dataset(ncfile)
514
+ func_plot_dataset(ds, output_dir, xyzt_dims, plot_type, fixed_colorscale)
515
+ else:
516
+ print(f"Invalid file: {ncfile}")
517
+ else:
518
+ print("No dataset or file provided.")
519
+
520
+
521
+ if __name__ == "__main__":
522
+ data = np.random.rand(100, 50)
523
+ save(r"test.nc", data, "data", {"time": np.linspace(0, 120, 100), "lev": np.linspace(0, 120, 50)}, "a")
oafuncs/oa_python.py ADDED
@@ -0,0 +1,108 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ '''
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2024-10-11 21:02:07
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2024-11-21 10:59:53
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_python.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.11
14
+ '''
15
+
16
+ import os
17
+ from rich import print
18
+
19
+ __all__ = ['install_lib', 'upgrade_lib']
20
+
21
+
22
+ def install_lib(libs=None, python_exe='python'):
23
+ '''
24
+ libs: list, 需要安装的库
25
+ python_exe: str, python版本;如在windows下,将python.exe复制为python312.exe,然后python_exe='python312'
26
+ '''
27
+ os.system(python_exe + " -m ensurepip")
28
+ os.system(python_exe + " -m pip install --upgrade pip")
29
+ if libs is None:
30
+ libs = [
31
+ # "oafuncs", # 自己的库,在这个函数不宜操作,避免报错
32
+ "requests", # 网页
33
+ "xlwt", # excel文件
34
+ "xlrd", # excel文件
35
+ "openpyxl", # excel文件
36
+ "netCDF4", # nc文件
37
+ "numpy", # 数组
38
+ "pandas", # 数据
39
+ "xarray", # 数组
40
+ "scipy", # 科学计算
41
+ # "scikit-learn", # 机器学习
42
+ "matplotlib", # 绘图
43
+ # "seaborn",
44
+ "imageio", # 图像
45
+ # "pylustrator", # 绘图
46
+ "Cartopy", # 绘图 #cartopy已经支持python3.11并且可以直接pip安装
47
+ "seawater", # 海洋计算
48
+ "cmaps", # 颜色
49
+ "colorcet", # 颜色
50
+ "cmasher", # 颜色
51
+ "tqdm", # 进度条
52
+ # "taichi", # 加速
53
+ "icecream", # 打印调试
54
+ # "pyperclip", # 系统剪切板
55
+ "rich", # 精美文本终端
56
+ # "stratify", # 大气海洋数据垂直插值
57
+ "dask", # 并行计算
58
+ "bs4", # 网页
59
+ "pathlib", # 路径
60
+ "opencv-contrib-python", # 图像处理
61
+ # "pydap", # 网络数据xarray下载
62
+ "gsw", # 海洋计算
63
+ "global_land_mask", # 陆地海洋掩码
64
+ # "cfgrib", # grib文件
65
+ # "ecmwflibs", # grib文件, 两个库都需要安装
66
+ "geopandas", # 矢量数据,shp文件
67
+ # "geopy", # 地理位置
68
+ # "flask", # 网页
69
+ "cdsapi", # 网络数据下载(era5)
70
+ # 以下不太重要
71
+ "lxml", # 网页
72
+ "keyboard", # 键盘
73
+ "zhdate", # 中国农历
74
+ "python-pptx", # ppt
75
+ "python-docx", # word
76
+ "ipywidgets", # jupyter显示进度条插件
77
+ "salem", # 地图投影,可部分替代wrf-python
78
+ "meteva", # 气象数据处理,中国气象局开发
79
+ "wget", # 下载
80
+ "pyautogui", # 鼠标键盘,自动连点脚本需要
81
+ ]
82
+ try:
83
+ installed_libs = os.popen(python_exe + ' -m pip list').read()
84
+ lib_num = len(libs)
85
+ for i, lib in enumerate(libs):
86
+ # 判断库是否已经安装,已安装跳过
87
+ if lib in installed_libs:
88
+ print(lib, "早已安装")
89
+ continue
90
+ else:
91
+ os.system(python_exe + " -m " + "pip install " + lib)
92
+ print('-'*100)
93
+ print("安装成功", lib, "({}/{})".format(i+1, lib_num))
94
+ print('-'*100)
95
+ except Exception as e:
96
+ print("安装失败:", str(e))
97
+
98
+
99
+ def upgrade_lib(libs=None, python_exe='python'):
100
+ if libs is None:
101
+ installed_libs = os.popen(python_exe + ' -m pip list').read()
102
+ libs = installed_libs
103
+ try:
104
+ for lib in libs:
105
+ os.system(python_exe + " -m " + "pip install --upgrade " + lib)
106
+ print("升级成功")
107
+ except Exception as e:
108
+ print("升级失败:", str(e))
@@ -0,0 +1,21 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ '''
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2024-09-17 16:09:20
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2024-10-14 18:12:12
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\OAFuncs\\oa_sign\\__init__.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.11
14
+ '''
15
+
16
+ # from .love_ocean import sign as love_ocean
17
+ # from .meteorological_home import sign as meteorological_home
18
+
19
+ from .ocean import *
20
+ from .meteorological import *
21
+ from .scientific import *