shancx 1.8.92__py3-none-any.whl → 1.9.33.218__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- shancx/3D/__init__.py +25 -0
- shancx/Algo/Class.py +11 -0
- shancx/Algo/CudaPrefetcher1.py +112 -0
- shancx/Algo/Fake_image.py +24 -0
- shancx/Algo/Hsml.py +391 -0
- shancx/Algo/L2Loss.py +10 -0
- shancx/Algo/MetricTracker.py +132 -0
- shancx/Algo/Normalize.py +66 -0
- shancx/Algo/OptimizerWScheduler.py +38 -0
- shancx/Algo/Rmageresize.py +79 -0
- shancx/Algo/Savemodel.py +33 -0
- shancx/Algo/SmoothL1_losses.py +27 -0
- shancx/Algo/Tqdm.py +62 -0
- shancx/Algo/__init__.py +121 -0
- shancx/Algo/checknan.py +28 -0
- shancx/Algo/iouJU.py +83 -0
- shancx/Algo/mask.py +25 -0
- shancx/Algo/psnr.py +9 -0
- shancx/Algo/ssim.py +70 -0
- shancx/Algo/structural_similarity.py +308 -0
- shancx/Algo/tool.py +704 -0
- shancx/Calmetrics/__init__.py +97 -0
- shancx/Calmetrics/calmetrics.py +14 -0
- shancx/Calmetrics/calmetricsmatrixLib.py +147 -0
- shancx/Calmetrics/rmseR2score.py +35 -0
- shancx/Clip/__init__.py +50 -0
- shancx/Cmd.py +126 -0
- shancx/Config_.py +26 -0
- shancx/Df/DataFrame.py +11 -2
- shancx/Df/__init__.py +17 -0
- shancx/Df/tool.py +0 -0
- shancx/Diffm/Psamples.py +18 -0
- shancx/Diffm/__init__.py +0 -0
- shancx/Diffm/test.py +207 -0
- shancx/Doc/__init__.py +214 -0
- shancx/E/__init__.py +178 -152
- shancx/Fillmiss/__init__.py +0 -0
- shancx/Fillmiss/imgidwJU.py +46 -0
- shancx/Fillmiss/imgidwLatLonJU.py +82 -0
- shancx/Gpu/__init__.py +55 -0
- shancx/H9/__init__.py +126 -0
- shancx/H9/ahi_read_hsd.py +877 -0
- shancx/H9/ahisearchtable.py +298 -0
- shancx/H9/geometry.py +2439 -0
- shancx/Hug/__init__.py +81 -0
- shancx/Inst.py +22 -0
- shancx/Lib.py +31 -0
- shancx/Mos/__init__.py +37 -0
- shancx/NN/__init__.py +235 -106
- shancx/Path1.py +161 -0
- shancx/Plot/GlobMap.py +276 -116
- shancx/Plot/__init__.py +491 -1
- shancx/Plot/draw_day_CR_PNG.py +4 -21
- shancx/Plot/exam.py +116 -0
- shancx/Plot/plotGlobal.py +325 -0
- shancx/{radar_nmc.py → Plot/radarNmc.py} +4 -34
- shancx/{subplots_single_china_map.py → Plot/single_china_map.py} +1 -1
- shancx/Point.py +46 -0
- shancx/QC.py +223 -0
- shancx/RdPzl/__init__.py +32 -0
- shancx/Read.py +72 -0
- shancx/Resize.py +79 -0
- shancx/SN/__init__.py +62 -123
- shancx/Time/GetTime.py +9 -3
- shancx/Time/__init__.py +66 -1
- shancx/Time/timeCycle.py +302 -0
- shancx/Time/tool.py +0 -0
- shancx/Train/__init__.py +74 -0
- shancx/Train/makelist.py +187 -0
- shancx/Train/multiGpu.py +27 -0
- shancx/Train/prepare.py +161 -0
- shancx/Train/renet50.py +157 -0
- shancx/ZR.py +12 -0
- shancx/__init__.py +333 -262
- shancx/args.py +27 -0
- shancx/bak.py +768 -0
- shancx/df2database.py +62 -2
- shancx/geosProj.py +80 -0
- shancx/info.py +38 -0
- shancx/netdfJU.py +231 -0
- shancx/sendM.py +59 -0
- shancx/tensBoard/__init__.py +28 -0
- shancx/wait.py +246 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/METADATA +15 -5
- shancx-1.9.33.218.dist-info/RECORD +91 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/WHEEL +1 -1
- my_timer_decorator/__init__.py +0 -10
- shancx/Dsalgor/__init__.py +0 -19
- shancx/E/DFGRRIB.py +0 -30
- shancx/EN/DFGRRIB.py +0 -30
- shancx/EN/__init__.py +0 -148
- shancx/FileRead.py +0 -44
- shancx/Gray2RGB.py +0 -86
- shancx/M/__init__.py +0 -137
- shancx/MN/__init__.py +0 -133
- shancx/N/__init__.py +0 -131
- shancx/Plot/draw_day_CR_PNGUS.py +0 -206
- shancx/Plot/draw_day_CR_SVG.py +0 -275
- shancx/Plot/draw_day_pre_PNGUS.py +0 -205
- shancx/Plot/glob_nation_map.py +0 -116
- shancx/Plot/radar_nmc.py +0 -61
- shancx/Plot/radar_nmc_china_map_compare1.py +0 -50
- shancx/Plot/radar_nmc_china_map_f.py +0 -121
- shancx/Plot/radar_nmc_us_map_f.py +0 -128
- shancx/Plot/subplots_compare_devlop.py +0 -36
- shancx/Plot/subplots_single_china_map.py +0 -45
- shancx/S/__init__.py +0 -138
- shancx/W/__init__.py +0 -132
- shancx/WN/__init__.py +0 -132
- shancx/code.py +0 -331
- shancx/draw_day_CR_PNG.py +0 -200
- shancx/draw_day_CR_PNGUS.py +0 -206
- shancx/draw_day_CR_SVG.py +0 -275
- shancx/draw_day_pre_PNGUS.py +0 -205
- shancx/makenetCDFN.py +0 -42
- shancx/mkIMGSCX.py +0 -92
- shancx/netCDF.py +0 -130
- shancx/radar_nmc_china_map_compare1.py +0 -50
- shancx/radar_nmc_china_map_f.py +0 -125
- shancx/radar_nmc_us_map_f.py +0 -67
- shancx/subplots_compare_devlop.py +0 -36
- shancx/tool.py +0 -18
- shancx/user/H8mess.py +0 -317
- shancx/user/__init__.py +0 -137
- shancx/user/cinradHJN.py +0 -496
- shancx/user/examMeso.py +0 -293
- shancx/user/hjnDAAS.py +0 -26
- shancx/user/hjnFTP.py +0 -81
- shancx/user/hjnGIS.py +0 -320
- shancx/user/hjnGPU.py +0 -21
- shancx/user/hjnIDW.py +0 -68
- shancx/user/hjnKDTree.py +0 -75
- shancx/user/hjnLAPSTransform.py +0 -47
- shancx/user/hjnMiscellaneous.py +0 -182
- shancx/user/hjnProj.py +0 -162
- shancx/user/inotify.py +0 -41
- shancx/user/matplotlibMess.py +0 -87
- shancx/user/mkNCHJN.py +0 -623
- shancx/user/newTypeRadar.py +0 -492
- shancx/user/test.py +0 -6
- shancx/user/tlogP.py +0 -129
- shancx/util_log.py +0 -33
- shancx/wtx/H8mess.py +0 -315
- shancx/wtx/__init__.py +0 -151
- shancx/wtx/cinradHJN.py +0 -496
- shancx/wtx/colormap.py +0 -64
- shancx/wtx/examMeso.py +0 -298
- shancx/wtx/hjnDAAS.py +0 -26
- shancx/wtx/hjnFTP.py +0 -81
- shancx/wtx/hjnGIS.py +0 -330
- shancx/wtx/hjnGPU.py +0 -21
- shancx/wtx/hjnIDW.py +0 -68
- shancx/wtx/hjnKDTree.py +0 -75
- shancx/wtx/hjnLAPSTransform.py +0 -47
- shancx/wtx/hjnLog.py +0 -78
- shancx/wtx/hjnMiscellaneous.py +0 -201
- shancx/wtx/hjnProj.py +0 -161
- shancx/wtx/inotify.py +0 -41
- shancx/wtx/matplotlibMess.py +0 -87
- shancx/wtx/mkNCHJN.py +0 -613
- shancx/wtx/newTypeRadar.py +0 -492
- shancx/wtx/test.py +0 -6
- shancx/wtx/tlogP.py +0 -129
- shancx-1.8.92.dist-info/RECORD +0 -99
- /shancx/{Dsalgor → Algo}/dsalgor.py +0 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/top_level.txt +0 -0
shancx/bak.py
ADDED
|
@@ -0,0 +1,768 @@
|
|
|
1
|
+
|
|
2
|
+
import netCDF4 as nc
|
|
3
|
+
import numpy as np
|
|
4
|
+
def getPoint(pre, df, lat0, lon0, resolution, decimal=1):
|
|
5
|
+
latIdx = ((lat0 - df["Lat"]) / resolution + 0.5).astype(np.int64)
|
|
6
|
+
lonIdx = ((df["Lon"] - lon0) / resolution + 0.5).astype(np.int64)
|
|
7
|
+
return pre[...,latIdx, lonIdx].round(decimals=decimal)
|
|
8
|
+
def Get_Lat_Lon_QPF(path,Lon_data,Lat_data):
|
|
9
|
+
with nc.Dataset(path) as dataNC:
|
|
10
|
+
latArr = dataNC["lat"][:]
|
|
11
|
+
lonArr = dataNC["lon"][:]
|
|
12
|
+
if "AIW_QPF" in path:
|
|
13
|
+
pre = dataNC[list(dataNC.variables.keys())[3]][:]
|
|
14
|
+
elif "AIW_REF" in path:
|
|
15
|
+
pre = dataNC[list(dataNC.variables.keys())[4]][:]
|
|
16
|
+
data = getPoint(pre , {"Lon":Lon_data,"Lat":Lat_data} , latArr[0], lonArr[0], 0.01)
|
|
17
|
+
data = getPoint(pre , {"Lon":Lon_data,"Lat":Lat_data} , latArr[0], lonArr[0], 0.01)
|
|
18
|
+
return data
|
|
19
|
+
|
|
20
|
+
""" pip index 设置
|
|
21
|
+
mkdir .pip 进入文件夹 vim pip.conf 粘贴保存
|
|
22
|
+
[global]
|
|
23
|
+
index_url=https://pypi.tuna.tsinghua.edu.cn/simple
|
|
24
|
+
"""
|
|
25
|
+
"""
|
|
26
|
+
zoom插值
|
|
27
|
+
from scipy.ndimage import zoom
|
|
28
|
+
d = zoom(d_clip, [4201/169,6201/249], order=1)[:-1, :-1]
|
|
29
|
+
"""
|
|
30
|
+
"""
|
|
31
|
+
import multiprocessing
|
|
32
|
+
multiprocessing.set_start_method('fork', force=True) #fork #spawn
|
|
33
|
+
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
""" 区域切割
|
|
37
|
+
import xarray as xr
|
|
38
|
+
ds = xr.open_dataset(a)
|
|
39
|
+
# # 定义经纬度范围
|
|
40
|
+
# lon_min, lon_max = 72.0, 136.96
|
|
41
|
+
# lat_min, lat_max = 6.04, 54.0
|
|
42
|
+
# 定义经纬度范围
|
|
43
|
+
ds = ds.sortby('latitude')
|
|
44
|
+
lon_min, lon_max = 73, 134.99
|
|
45
|
+
lat_min, lat_max = 12.21, 54.2 #[73,134.99,12.21,54.2]
|
|
46
|
+
# 现在可以进行数据截取
|
|
47
|
+
subset = ds.sel(longitude=slice(lon_min, lon_max), latitude=slice(lat_min, lat_max)) #
|
|
48
|
+
H9 = subset["data"][::-1,:]
|
|
49
|
+
|
|
50
|
+
longitude_values = subset['longitude'].values
|
|
51
|
+
latitude_values = subset['latitude'].values
|
|
52
|
+
|
|
53
|
+
print("裁剪后的经度范围:", longitude_values.min(), longitude_values.max())
|
|
54
|
+
print("裁剪后的纬度范围:", latitude_values.min(), latitude_values.max())
|
|
55
|
+
|
|
56
|
+
# 裁剪后的数据信息
|
|
57
|
+
data_values = subset['data'].values
|
|
58
|
+
data_attrs = subset['data'].attrs
|
|
59
|
+
|
|
60
|
+
print("裁剪后的数据形状:", subset['data'].shape)
|
|
61
|
+
print("裁剪后的数据值:", data_values)
|
|
62
|
+
print("数据的属性信息:", data_attrs)
|
|
63
|
+
|
|
64
|
+
"""
|
|
65
|
+
###用于回算
|
|
66
|
+
"""
|
|
67
|
+
from main import makeAll,options
|
|
68
|
+
from multiprocessing import Pool
|
|
69
|
+
import datetime
|
|
70
|
+
from config import logger,output
|
|
71
|
+
import time
|
|
72
|
+
import pandas as pd
|
|
73
|
+
import os
|
|
74
|
+
from itertools import product
|
|
75
|
+
import threading
|
|
76
|
+
from shancx import Mul_sub
|
|
77
|
+
def excuteCommand(conf):
|
|
78
|
+
cmd = conf[0]
|
|
79
|
+
print(cmd)
|
|
80
|
+
os.system(cmd)
|
|
81
|
+
|
|
82
|
+
if __name__ == '__main__':
|
|
83
|
+
cfg = options()
|
|
84
|
+
isPhase = cfg.isPhase
|
|
85
|
+
isDebug = cfg.isDebug
|
|
86
|
+
sepSec = cfg.sepSec
|
|
87
|
+
gpu = cfg.gpu
|
|
88
|
+
pool = cfg.pool
|
|
89
|
+
isOverwrite = cfg.isOverwrite
|
|
90
|
+
timeList = pd.date_range(cfg.times[0], cfg.times[-1], freq=f"{sepSec}s")
|
|
91
|
+
logger.info(f"时间段check {timeList}")
|
|
92
|
+
gpuNum = 2
|
|
93
|
+
eachGPU = 4
|
|
94
|
+
makeListUTC = []
|
|
95
|
+
for UTC in timeList:
|
|
96
|
+
UTCStr = UTC.strftime("%Y%m%d%H%M")
|
|
97
|
+
outpath = f"{output}/{UTCStr[:4]}/{UTCStr[:8]}/MSP2_WTX_AIW_QPF_L88_CHN_{UTCStr}_00000-00300-00006.nc"
|
|
98
|
+
if not os.path.exists(outpath) or not os.path.exists(outpath.replace("_QPF_","_REF_")) or isOverwrite:
|
|
99
|
+
makeListUTC.append(UTC)
|
|
100
|
+
[print(element) for element in makeListUTC]
|
|
101
|
+
phaseCMD = "--isPhase" if isPhase else ""
|
|
102
|
+
debugCMD = "--isDebug" if isDebug else ""
|
|
103
|
+
OverwriteCMD = "--isOverwrite"
|
|
104
|
+
gpuCMD = f"--gpu={gpu}"
|
|
105
|
+
# cmdList = list(map(lambda x:f"python main.py --times={x.strftime('%Y%m%d%H%M')} {phaseCMD} {debugCMD} {OverwriteCMD} {gpuCMD}",makeListUTC))
|
|
106
|
+
cmdList = list(map(lambda x:f"python main.py --times={x.strftime('%Y%m%d%H%M')} {phaseCMD} {debugCMD} {gpuCMD}",makeListUTC))
|
|
107
|
+
if cmdList:
|
|
108
|
+
Mul_sub(excuteCommand, [cmdList], pool)
|
|
109
|
+
else:
|
|
110
|
+
print("cmdList is empty, skipping the call.")
|
|
111
|
+
raise ValueError("cmdList is empty, cannot execute command.")
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
CUDA_LAUNCH_BLOCKING=1 python makeHis.py --times 202410010048,202410110048 --gpu=0 --isDebug --sepSec 3600 --pool 5
|
|
115
|
+
CUDA_LAUNCH_BLOCKING=1 python makeHis1.py --times 202410010048,202410110048 --gpu=0 --isDebug --sepSec 3600 --pool 5
|
|
116
|
+
"""
|
|
117
|
+
"""
|
|
118
|
+
import shutil
|
|
119
|
+
def GetMulData(conf):
|
|
120
|
+
UTC = conf[0]
|
|
121
|
+
UTCStr = UTC.strftime("%Y%m%d%H%M%S")
|
|
122
|
+
outpath = f"{GradarNA}/{UTCStr[:4]}/{UTCStr[:8]}/"
|
|
123
|
+
# if os.path.exists(outpath):
|
|
124
|
+
# print(f"outpath {outpath} is existed ")
|
|
125
|
+
# return
|
|
126
|
+
path = f"{GLobradar}/{UTCStr[:4]}/{UTCStr[:8]}/CR_NA_{UTCStr[:12]}.nc"
|
|
127
|
+
if not os.path.exists(path):
|
|
128
|
+
print(f"outpath {path} is not existsing ")
|
|
129
|
+
return False
|
|
130
|
+
else:
|
|
131
|
+
crDir(outpath)
|
|
132
|
+
try:
|
|
133
|
+
shutil.copy(path, outpath) # 自动保留文件名
|
|
134
|
+
print(f"文件已复制到: {outpath}")
|
|
135
|
+
return True
|
|
136
|
+
except Exception as e:
|
|
137
|
+
print(f"复制失败: {e}")
|
|
138
|
+
return False
|
|
139
|
+
"""
|
|
140
|
+
###用于循环出日报
|
|
141
|
+
"""
|
|
142
|
+
#!/bin/bash
|
|
143
|
+
start_date="20241001"
|
|
144
|
+
end_date="20241101"
|
|
145
|
+
tag="scx/MQPF_Gan5_default_1112N"
|
|
146
|
+
current_date=$(date -d "$start_date" +%Y%m%d)
|
|
147
|
+
end_date=$(date -d "$end_date" +%Y%m%d)
|
|
148
|
+
while [ "$current_date" != "$end_date" ]; do
|
|
149
|
+
start_time="$current_date"0000
|
|
150
|
+
end_time="$current_date"2359
|
|
151
|
+
python makeDOC_newv2.py --times $start_time,$end_time --tag $tag
|
|
152
|
+
current_date=$(date -d "$current_date + 1 day" +%Y%m%d)
|
|
153
|
+
done
|
|
154
|
+
python makeDOC_newv2.py --times $end_date"0000",$end_date"2359" --tag $tag
|
|
155
|
+
"""
|
|
156
|
+
"""
|
|
157
|
+
frile name :launch.json
|
|
158
|
+
args:
|
|
159
|
+
|
|
160
|
+
{
|
|
161
|
+
"version": "0.2.0",
|
|
162
|
+
"configurations": [
|
|
163
|
+
|
|
164
|
+
{
|
|
165
|
+
"name": "Python: Current File",
|
|
166
|
+
"type": "debugpy",
|
|
167
|
+
"request": "launch",
|
|
168
|
+
"program": "${file}",
|
|
169
|
+
"console": "integratedTerminal",
|
|
170
|
+
"cwd": "${fileDirname}",
|
|
171
|
+
"purpose": ["debug-in-terminal"],
|
|
172
|
+
"justMyCode": false,
|
|
173
|
+
"args": [
|
|
174
|
+
"--times", "202410010042,202410020042",
|
|
175
|
+
"--isDebug" ,
|
|
176
|
+
"--isOverwrite",
|
|
177
|
+
"--sepSec", "3600",
|
|
178
|
+
"--gpu", "0"
|
|
179
|
+
]
|
|
180
|
+
}
|
|
181
|
+
]
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
{
|
|
186
|
+
"version": "0.2.0",
|
|
187
|
+
"configurations": [
|
|
188
|
+
{
|
|
189
|
+
"name": "VAE: Train SEVIR-LR",
|
|
190
|
+
"type": "debugpy",
|
|
191
|
+
"request": "launch",
|
|
192
|
+
"program": "${workspaceFolder}/scripts/vae/sevirlr/train.py",
|
|
193
|
+
"console": "integratedTerminal",
|
|
194
|
+
"cwd": "${workspaceFolder}",
|
|
195
|
+
"purpose": ["debug-in-terminal"],
|
|
196
|
+
"justMyCode": false,
|
|
197
|
+
"python": "/home/scx/miniconda3/envs/mqpf/bin/python",
|
|
198
|
+
"args": [
|
|
199
|
+
"--save", "vae_sevirlr_train",
|
|
200
|
+
"--gpus", "1",
|
|
201
|
+
"--cfg", "${workspaceFolder}/scripts/vae/sevirlr/cfg.yaml"
|
|
202
|
+
]
|
|
203
|
+
}
|
|
204
|
+
]
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
"""
|
|
208
|
+
|
|
209
|
+
"""
|
|
210
|
+
import importlib
|
|
211
|
+
def get_obj_from_str(class_path: str):
|
|
212
|
+
module_name, class_name = class_path.rsplit('.', 1)
|
|
213
|
+
module = importlib.import_module(module_name)
|
|
214
|
+
return getattr(module, class_name)
|
|
215
|
+
config = {
|
|
216
|
+
"target": "torch.nn.Linear", # 类路径
|
|
217
|
+
"params": { # 参数字典
|
|
218
|
+
"in_features": 128,
|
|
219
|
+
"out_features": 64
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
# 使用配置字典动态实例化对象
|
|
224
|
+
target_class = get_obj_from_str(config["target"]) # 获取类(torch.nn.Linear)
|
|
225
|
+
model = target_class(**config.get("params", dict())) # 使用解包的参数实例化
|
|
226
|
+
|
|
227
|
+
# 打印结果
|
|
228
|
+
print(model)
|
|
229
|
+
|
|
230
|
+
import torch
|
|
231
|
+
import torch.nn as nn
|
|
232
|
+
linear = nn.Linear(in_features=128, out_features=64, bias=True)配置字典动态传参
|
|
233
|
+
"""
|
|
234
|
+
|
|
235
|
+
"""
|
|
236
|
+
ImportError: /lib64/libc.so.6: version `GLIBC_2.28' not found (required by /home/scx1/miniconda3/envs/mqpf/lib/python3.10/site-packages/lxml/etree.cpython-310-x86_64-linux-gnu.so)
|
|
237
|
+
pip uninstall lxml
|
|
238
|
+
pip install lxml
|
|
239
|
+
"""
|
|
240
|
+
"""
|
|
241
|
+
001 key: "ee90f313-17b2-4e3d-84b8-3f9c290fa596"
|
|
242
|
+
002 far_po "f490767c-27bc-4424-9c75-2b33644171e2"
|
|
243
|
+
003 数据监控 "4c43f4bd-d984-416d-ac82-500df5e3ed86"
|
|
244
|
+
sendMESplus("测试数据",base=user_info)
|
|
245
|
+
"""
|
|
246
|
+
|
|
247
|
+
'''
|
|
248
|
+
from multiprocessing import Pool
|
|
249
|
+
'''
|
|
250
|
+
'''
|
|
251
|
+
##定義一個streamHandler
|
|
252
|
+
# print_handler = logging.StreamHandler()
|
|
253
|
+
# print_handler.setFormatter(formatter)
|
|
254
|
+
# loggers.addHandler(print_handler)
|
|
255
|
+
'''
|
|
256
|
+
'''
|
|
257
|
+
# @Time : 2023/09/27 下午8:52
|
|
258
|
+
# @Author : shanchangxi
|
|
259
|
+
# @File : util_log.py
|
|
260
|
+
import time
|
|
261
|
+
import logging
|
|
262
|
+
from logging import handlers
|
|
263
|
+
|
|
264
|
+
logger = logging.getLogger()
|
|
265
|
+
logger.setLevel(logging.INFO)
|
|
266
|
+
log_name = 'project_tim_tor.log'
|
|
267
|
+
logfile = log_name
|
|
268
|
+
time_rotating_file_handler = handlers.TimedRotatingFileHandler(filename=logfile, when='D', encoding='utf-8')
|
|
269
|
+
time_rotating_file_handler.setLevel(logging.INFO)
|
|
270
|
+
formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
|
|
271
|
+
time_rotating_file_handler.setFormatter(formatter)
|
|
272
|
+
logger.addHandler(time_rotating_file_handler)
|
|
273
|
+
print_handler = logging.StreamHandler()
|
|
274
|
+
print_handler.setFormatter(formatter)
|
|
275
|
+
logger.addHandler(print_handler)
|
|
276
|
+
'''
|
|
277
|
+
'''
|
|
278
|
+
###解决方法 pip install torch==2.4.0 torchvision torchaudio三个同时安装 python 3.12 解决cuda启动不了的问题
|
|
279
|
+
Res网络
|
|
280
|
+
'''
|
|
281
|
+
'''
|
|
282
|
+
import concurrent.futures
|
|
283
|
+
from itertools import product
|
|
284
|
+
def task(args):
|
|
285
|
+
args1,args2 = args
|
|
286
|
+
print( f"Task ({args1}, {args2}) , result")
|
|
287
|
+
return (args1,args2,5)
|
|
288
|
+
|
|
289
|
+
def Mul_sub(task, pro):
|
|
290
|
+
product_list = product(*pro)
|
|
291
|
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
|
292
|
+
futures = [executor.submit(task, item) for item in product_list]
|
|
293
|
+
results = [future.result() for future in concurrent.futures.as_completed(futures)]
|
|
294
|
+
return results
|
|
295
|
+
res = Mul_sub(task, [[1, 23, 4, 5], ["n"]])
|
|
296
|
+
print("res")
|
|
297
|
+
print(res)
|
|
298
|
+
'''
|
|
299
|
+
|
|
300
|
+
"""
|
|
301
|
+
find /mnt/wtx_weather_forecast/scx/SpiderGLOBPNGSource -type f -name "*.png" -mtime +3 -exec rm {} \;
|
|
302
|
+
-mtime 选项后面的数值代表天数。
|
|
303
|
+
+n 表示“超过 n 天”,即查找最后修改时间在 n 天之前的文件。
|
|
304
|
+
"""
|
|
305
|
+
"""
|
|
306
|
+
from shancx.SN import UserManager,sendMESplus
|
|
307
|
+
from shancx._info import users
|
|
308
|
+
M = UserManager(info=users)
|
|
309
|
+
user_info = M.get_user("003")
|
|
310
|
+
sendMESplus("测试数据",base=user_info)
|
|
311
|
+
"""
|
|
312
|
+
"""
|
|
313
|
+
https://api.map.baidu.com/lbsapi/getpoint/index.html 坐标
|
|
314
|
+
[global]
|
|
315
|
+
index-url = https://pypi.tuna.tsinghua.edu.cn/simple pip.conf
|
|
316
|
+
python setup.py sdist bdist_wheel
|
|
317
|
+
twine upload dist/*
|
|
318
|
+
"""
|
|
319
|
+
""" 与循环搭配使用
|
|
320
|
+
for key,value in dictflag.items():
|
|
321
|
+
try:
|
|
322
|
+
pac = all_df1[all_df1['PAC'].str.startswith(f'{key}')]
|
|
323
|
+
acctoal,acctoalEC,matEC,mat,rate_Lift_ratiotsEC,outpath= metriacfunall(pac)
|
|
324
|
+
if not len(matEC.shape) == (2,2):
|
|
325
|
+
continue
|
|
326
|
+
docdataset = mkdataset2TS(acctoal,acctoalEC,matEC,mat, rate_Lift_ratiotsEC,outpath)
|
|
327
|
+
|
|
328
|
+
except Exception as e:
|
|
329
|
+
print(traceback.format_exc())
|
|
330
|
+
continue
|
|
331
|
+
"""
|
|
332
|
+
|
|
333
|
+
"""
|
|
334
|
+
|
|
335
|
+
cuda-version 11.8 hcce14f8_3
|
|
336
|
+
cudatoolkit 11.8.0 h6a678d5_0
|
|
337
|
+
cudnn 8.9.2.26 cuda11_0
|
|
338
|
+
nvidia-cuda-cupti-cu12 12.1.105 pypi_0 pypi
|
|
339
|
+
nvidia-cuda-nvrtc-cu12 12.1.105 pypi_0 pypi
|
|
340
|
+
nvidia-cuda-runtime-cu12 12.1.105 pypi_0 pypi
|
|
341
|
+
nvidia-cudnn-cu12 8.9.2.26 pypi_0 pypi
|
|
342
|
+
mqpf conda install pytorch torchvision torchaudio cudatoolkit=11.8 -c pytorch
|
|
343
|
+
conda install cudnn=8.9.2.26 cudatoolkit=11.8
|
|
344
|
+
resunet pip install torch==2.4.0 torchvision torchaudio
|
|
345
|
+
conda install cudnn==8.9.2.26 cudatoolkit==11.8.0
|
|
346
|
+
conda install pytorch=2.2.2 torchvision torchaudio cudatoolkit=11.8 -c pytorch
|
|
347
|
+
resunet pip install torch==2.4.0 torchvision torchaudio
|
|
348
|
+
pip install protobuf==3.20
|
|
349
|
+
|
|
350
|
+
my-envmf1
|
|
351
|
+
torch 2.3.0 pypi_0 pypi
|
|
352
|
+
torchvision 0.18.0 pypi_0 pypi
|
|
353
|
+
|
|
354
|
+
RES:
|
|
355
|
+
torch 2.4.0 pypi_0 pypi
|
|
356
|
+
torchaudio 2.2.2 py311_cpu pytorch
|
|
357
|
+
torchsummary 1.5.1 pypi_0 pypi
|
|
358
|
+
torchvision 0.19.0 pypi_0 pypi
|
|
359
|
+
|
|
360
|
+
mqpf:
|
|
361
|
+
torch 2.3.1 pypi_0 pypi
|
|
362
|
+
torchaudio 2.3.1 pypi_0 pypi
|
|
363
|
+
torchvision 0.18.1 pypi_0 pypi
|
|
364
|
+
onnxruntime-gpu 1.16.0
|
|
365
|
+
onnx 1.15.0
|
|
366
|
+
numpy 1.26.4
|
|
367
|
+
|
|
368
|
+
vllm:
|
|
369
|
+
torch 2.1.2 pypi_0 pypi
|
|
370
|
+
torchvision 0.15.1+cu118 pypi_0 pypi
|
|
371
|
+
vllm 0.2.7 pypi_0 pypi
|
|
372
|
+
|
|
373
|
+
import torch
|
|
374
|
+
print("CUDA available:", torch.cuda.is_available())
|
|
375
|
+
print("CUDA version:", torch.version.cuda)
|
|
376
|
+
print("GPU device:", torch.cuda.get_device_name(0) if torch.cuda.is_available() else "No GPU")
|
|
377
|
+
nvidia-smi
|
|
378
|
+
nvcc --version
|
|
379
|
+
系统已经检测到物理 GPU(NVIDIA GeForce RTX 4090)和 NVIDIA 驱动,同时安装了 CUDA 12.1。然而,PyTorch 没有正确检测到 GPU,可能是因为 PyTorch 版本与 CUDA 驱动不兼容,或者环境变量未正确配置。
|
|
380
|
+
|
|
381
|
+
pip install torch==2.3.1 torchvision==0.18.1
|
|
382
|
+
|
|
383
|
+
conda install -c conda-forge cudatoolkit=11.8 --force-reinstall 解决报错
|
|
384
|
+
ls $CONDA_PREFIX/lib/libcublasLt.so.11
|
|
385
|
+
:ProviderLibrary::Get() [ONNXRuntimeError] : 1 : FAIL : Failed to load library libonnxruntime_providers_cuda.so with error: libcublasLt.so.11: cannot open shared object file: No such file or directory
|
|
386
|
+
export LD_LIBRARY_PATH=$CONDA_PREFIX/lib:$LD_LIBRARY_PATH
|
|
387
|
+
"""
|
|
388
|
+
"""
|
|
389
|
+
conda env export > environment.yml
|
|
390
|
+
conda env create -f /path/to/destination/environment.yml
|
|
391
|
+
conda activate your_env_name
|
|
392
|
+
|
|
393
|
+
conda install -c conda-forge conda-pack
|
|
394
|
+
conda pack -n aiw -o my_env.tar.gz
|
|
395
|
+
mkdir -p my_env
|
|
396
|
+
tar -xzf my_env.tar.gz -C my_env
|
|
397
|
+
source my_env/bin/activate
|
|
398
|
+
"""
|
|
399
|
+
"""
|
|
400
|
+
定时任务
|
|
401
|
+
|
|
402
|
+
MAILTO="shanhe12@163.com"
|
|
403
|
+
|
|
404
|
+
"""
|
|
405
|
+
"""
|
|
406
|
+
vgg_loss = VGGLoss(weights_path="/mnt/wtx_weather_forecast/scx/stat/sat/sat2radar/vgg19-dcbb9e9d.pth").to(device)
|
|
407
|
+
SAMloss = SAMLoss(model_type='vit_b', checkpoint_path='/mnt/wtx_weather_forecast/scx/stat/sat/sat2radar/sam_vit_b_01ec64.pth.1').to(device)
|
|
408
|
+
"""
|
|
409
|
+
|
|
410
|
+
"""
|
|
411
|
+
sdata = xr.open_dataset(sat_paths)
|
|
412
|
+
sdata["time"] = sUTC
|
|
413
|
+
edata = xr.open_dataset(sat_pathe)
|
|
414
|
+
edata["time"] = UTC
|
|
415
|
+
sdata = sdata.assign_coords(time=sUTC)
|
|
416
|
+
edata = edata.assign_coords(time=UTC)
|
|
417
|
+
添加维度和更新已有维度数据
|
|
418
|
+
sdata = xr.open_dataset(sat_paths).rename({"time": "old_time"})
|
|
419
|
+
edata = xr.open_dataset(sat_pathe).rename({"time": "old_time"})
|
|
420
|
+
# 现在可以安全添加新时间坐标
|
|
421
|
+
sdata = sdata.assign_coords(time=sUTC)
|
|
422
|
+
edata = edata.assign_coords(time=UTC)
|
|
423
|
+
UTC = datetime.datetime.strptime(self.nowDate, "%Y%m%d%H%M") 注意时间格式
|
|
424
|
+
"""
|
|
425
|
+
"""
|
|
426
|
+
#sudo mkdir -p /mnt/wtx_weather_forecast/GeoEnvData/rawData/MeteoL/Himawari/H9
|
|
427
|
+
#sudo mount -t nfs nfs.300s.ostor:/mnt/ifactory_public/AWS_data/AWS_data/Himawari /mnt/wtx_weather_forecast/GeoEnvData/rawData/MeteoL/Himawari/H9
|
|
428
|
+
"""
|
|
429
|
+
|
|
430
|
+
"""
|
|
431
|
+
groups
|
|
432
|
+
sudo gpasswd -d user sudo # 从 sudo 组移除用户 "user"
|
|
433
|
+
id
|
|
434
|
+
sudo usermod -u 1001 user
|
|
435
|
+
sudo usermod -g 1001 user
|
|
436
|
+
sudo chown -R 新用户名:新组名 目录名/
|
|
437
|
+
|
|
438
|
+
sudo find / -user 1015 -exec chown 1001 {} \;
|
|
439
|
+
|
|
440
|
+
more /etc/passwd
|
|
441
|
+
vim 修改 /etc/passwd
|
|
442
|
+
|
|
443
|
+
"""
|
|
444
|
+
"""
|
|
445
|
+
latArr = np.linspace(env.n, env.s, int(round((env.n - env.s) / 0.02)) + 1)
|
|
446
|
+
lonArr = np.linspace(env.w, env.e, int(round((env.e - env.w) / 0.02)) + 1)
|
|
447
|
+
"""
|
|
448
|
+
"""
|
|
449
|
+
find /mnt/wtx_weather_forecast/SAT/H9/Radar_ncSEAS/trainNN/2025/ -mindepth 2 -maxdepth 2 -type d
|
|
450
|
+
find /mnt/wtx_weather_forecast/SAT/H9/Radar_ncSEAS/trainNN/2025/ -mindepth 2 -maxdepth 2 -type d -exec rm -rf {} +
|
|
451
|
+
find /mnt/wtx_weather_forecast/SAT/H9/Radar_ncSEAS/trainNN/2025/ -mindepth 2 -maxdepth 2 -type d -not -name "important" -exec rm -rf {} +
|
|
452
|
+
find /mnt/wtx_weather_forecast/SAT/H9/Radar_ncSEAS/trainNN/2025/202[0-9][0-9][0-9][0-9]/ -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} +
|
|
453
|
+
"""
|
|
454
|
+
"""
|
|
455
|
+
sudo chmod -R 777 /mnt/wtx_weather_forecast/scx/MSG/MSG_Data
|
|
456
|
+
|
|
457
|
+
"""
|
|
458
|
+
|
|
459
|
+
"""
|
|
460
|
+
import os
|
|
461
|
+
import numpy as np
|
|
462
|
+
import pandas as pd
|
|
463
|
+
import glob
|
|
464
|
+
import datetime
|
|
465
|
+
from hjnwtx.mkNCHJN import mkDir
|
|
466
|
+
from shancx import Mul_sub_S,Mul_sub
|
|
467
|
+
from shancx.Plot import plotRadar,plotMat,plotA2b
|
|
468
|
+
from shancx.Time import gen_dt
|
|
469
|
+
|
|
470
|
+
# from shancx.Time import timeCycle
|
|
471
|
+
from shancx import crDir
|
|
472
|
+
from config import staMSGtrain0611,crMSGtrain0611
|
|
473
|
+
from shancx.wait import check_nans
|
|
474
|
+
# 将 getcheckdata 移到模块顶层
|
|
475
|
+
satflag = "MSG"
|
|
476
|
+
def getcheckdata(conf):
|
|
477
|
+
iph = conf[0]
|
|
478
|
+
radar_dir_path = conf[1]
|
|
479
|
+
sat_imin = conf[2]
|
|
480
|
+
try:
|
|
481
|
+
satdata = np.load(iph)
|
|
482
|
+
radarpth = glob.glob(f"{radar_dir_path}/{sat_imin[:4]}/{sat_imin[:8]}/CR_{iph.split('/')[-1][4:-4]}*.npy")[0]
|
|
483
|
+
radardata = np.load(radarpth)
|
|
484
|
+
if radardata.shape != (1, 256, 256) or satdata.shape != (6, 256, 256) :
|
|
485
|
+
return
|
|
486
|
+
if np.nanmean(radardata) > 20 or np.nanmean(satdata) > 280 :
|
|
487
|
+
plotMat(satdata[0],name=f"satdata{satflag}_{sat_imin}")
|
|
488
|
+
plotRadar(satdata[0],name=f"radar{satflag}_{sat_imin}")
|
|
489
|
+
return
|
|
490
|
+
flagnan = check_nans(satdata,threshold=0)
|
|
491
|
+
if flagnan:
|
|
492
|
+
# plotA2b(satdata[:3],satdata[3:])
|
|
493
|
+
radio = np.isnan(satdata).sum()/satdata.size
|
|
494
|
+
if radio>0.0001 and radio <0.01:
|
|
495
|
+
plotA2b(satdata[:3],satdata[3:],saveDir="plotA2bN")
|
|
496
|
+
return
|
|
497
|
+
|
|
498
|
+
df = pd.DataFrame({'sat_path': [iph], 'radar_path': [radarpth] })
|
|
499
|
+
return df
|
|
500
|
+
except Exception as e:
|
|
501
|
+
print(f"{iph} can not load succeed: {e}")
|
|
502
|
+
return None
|
|
503
|
+
|
|
504
|
+
def generateList(conf):
|
|
505
|
+
sat_dir_path, radar_dir_path, sat_imin= conf
|
|
506
|
+
if True:
|
|
507
|
+
satpath = glob.glob(f"{sat_dir_path}/{sat_imin[:4]}/{sat_imin[:8]}/SAT_{sat_imin}_*.npy")
|
|
508
|
+
satpath.sort()
|
|
509
|
+
if satpath:
|
|
510
|
+
datas = []
|
|
511
|
+
for path in satpath:
|
|
512
|
+
data = getcheckdata( (path,radar_dir_path,sat_imin))
|
|
513
|
+
datas.append(data)
|
|
514
|
+
datass = [i for i in datas if i is not None ]
|
|
515
|
+
if datass :
|
|
516
|
+
df = pd.concat(datass)
|
|
517
|
+
return df
|
|
518
|
+
else:
|
|
519
|
+
return None
|
|
520
|
+
|
|
521
|
+
import datetime as dt
|
|
522
|
+
import pandas as pd
|
|
523
|
+
def ldom(d): # last day of month
|
|
524
|
+
if d.month == 12:
|
|
525
|
+
return d.replace(year=d.year+1, month=1, day=1) - dt.timedelta(days=1)
|
|
526
|
+
return d.replace(month=d.month+1, day=1) - dt.timedelta(days=1)
|
|
527
|
+
import datetime as dt
|
|
528
|
+
import pandas as pd
|
|
529
|
+
def ldomN(d): # last day of month
|
|
530
|
+
USTstr = d.strftime('%Y%m%d%H%M')
|
|
531
|
+
datag = f"{sat_dir_path}/{USTstr[:4]}/{USTstr[:6]}*"
|
|
532
|
+
datapath = glob.glob(datag)[-2:]
|
|
533
|
+
datar = []
|
|
534
|
+
if datapath:
|
|
535
|
+
for i in datapath:
|
|
536
|
+
daytime1 = i.split("/")[-1]
|
|
537
|
+
daytime= datetime.datetime.strptime(daytime1, "%Y%m%d")
|
|
538
|
+
daytime = daytime.day
|
|
539
|
+
datar.append(daytime)
|
|
540
|
+
return datar
|
|
541
|
+
|
|
542
|
+
def gen_dt(s, e, t="trn",freq='30min'): # generate dates
|
|
543
|
+
dr = pd.date_range(start=s, end=e, freq=freq)
|
|
544
|
+
res = []
|
|
545
|
+
for d in dr:
|
|
546
|
+
me = ldomN(d) if t == "val" else ldom(d)
|
|
547
|
+
is_me = d.day in me
|
|
548
|
+
if (t == "trn" and not is_me) or (t == "val" and is_me):
|
|
549
|
+
res.append(d.strftime('%Y%m%d%H%M'))
|
|
550
|
+
return res
|
|
551
|
+
|
|
552
|
+
import argparse
|
|
553
|
+
import datetime
|
|
554
|
+
import pandas as pd
|
|
555
|
+
def options():
|
|
556
|
+
parser = argparse.ArgumentParser(description='examdatabasedata')
|
|
557
|
+
parser.add_argument('--times', type=str, default='202501010000,202506060000')
|
|
558
|
+
parser.add_argument('--flag', type=str, default='val')
|
|
559
|
+
config= parser.parse_args()
|
|
560
|
+
print(config)
|
|
561
|
+
config.times = config.times.split(",")
|
|
562
|
+
if len(config.times) == 1:
|
|
563
|
+
config.times = [config.times[0], config.times[0]]
|
|
564
|
+
config.times = [datetime.datetime.strptime(config.times[0], "%Y%m%d%H%M"),
|
|
565
|
+
datetime.datetime.strptime(config.times[1], "%Y%m%d%H%M")]
|
|
566
|
+
return config
|
|
567
|
+
|
|
568
|
+
if __name__ == '__main__':
|
|
569
|
+
cfg = options()
|
|
570
|
+
sUTC = cfg.times[0]
|
|
571
|
+
eUTC = cfg.times[-1]
|
|
572
|
+
flag = cfg.flag
|
|
573
|
+
sat_dir_path = staMSGtrain0611
|
|
574
|
+
radar_dir_path = f"{crMSGtrain0611}_256"
|
|
575
|
+
timelist = gen_dt(sUTC, eUTC, t=f"{flag}")
|
|
576
|
+
# start_time = datetime.datetime(2024,6,5,1)
|
|
577
|
+
# end_time = datetime.datetime(2024,6,5,5)
|
|
578
|
+
# timelist = gen_dt(start_time, end_time, t=f"{flag}")
|
|
579
|
+
savepath = f'/mnt/wtx_weather_forecast/SAT/MSG/MSGtrain_N/0611'
|
|
580
|
+
crDir(savepath)
|
|
581
|
+
|
|
582
|
+
# 调用方法 1.split_time 2. timelist 3. 路径
|
|
583
|
+
dataL = Mul_sub(generateList,[ [sat_dir_path]
|
|
584
|
+
, [radar_dir_path]
|
|
585
|
+
, timelist
|
|
586
|
+
]
|
|
587
|
+
)
|
|
588
|
+
dataLs = [i for i in dataL if i is not None]
|
|
589
|
+
if flag =="trn":
|
|
590
|
+
train_df = pd.concat(dataLs)
|
|
591
|
+
mkDir(savepath)
|
|
592
|
+
train_df.to_csv(f"{savepath}/df_train.csv", index=False, sep=',')
|
|
593
|
+
print(f"train_df {len(train_df)}")
|
|
594
|
+
print('complete!!!')
|
|
595
|
+
print(savepath)
|
|
596
|
+
if flag == "val":
|
|
597
|
+
valid_df = pd.concat(dataLs)
|
|
598
|
+
mkDir(savepath)
|
|
599
|
+
valid_df.to_csv(f"{savepath}/df_valid.csv", index=False, sep=',')
|
|
600
|
+
print(f"valid_df {len(valid_df)}")
|
|
601
|
+
print('complete!!!')
|
|
602
|
+
print(savepath)
|
|
603
|
+
|
|
604
|
+
"""
|
|
605
|
+
|
|
606
|
+
"""
|
|
607
|
+
def map_fun(conf):
|
|
608
|
+
UTC = conf[0]
|
|
609
|
+
logger.info(UTC)
|
|
610
|
+
try :
|
|
611
|
+
dP = drawPng(UTC)
|
|
612
|
+
if not dP.envList is None:
|
|
613
|
+
for i, env in enumerate(dP.envList):
|
|
614
|
+
CR = dP.CR[:,::4,::4]
|
|
615
|
+
CRc = clip(CR, env, dP.latArr[0], dP.lonArr[0], 0.04)
|
|
616
|
+
latArrc = clipLat(dP.latArr, env, 0.04)
|
|
617
|
+
lonArrc = clipLon(dP.lonArr, env, 0.04)
|
|
618
|
+
CRc[CRc < 5] = np.nan
|
|
619
|
+
statDt = clip(dP.df_Mat[:,:-1,:-1], env, dP.latArr[0], dP.lonArr[0], 0.04)
|
|
620
|
+
dP.makeDS(CRc,statDt, env,cfg.size)
|
|
621
|
+
except Exception as e:
|
|
622
|
+
logger.error(f"{UTC} error {e}")
|
|
623
|
+
logger.info(traceback.format_exc())
|
|
624
|
+
print(traceback.format_exc())
|
|
625
|
+
return
|
|
626
|
+
|
|
627
|
+
def getCheckArea(self, eps):
|
|
628
|
+
'''
|
|
629
|
+
split area
|
|
630
|
+
:param UTC:
|
|
631
|
+
:param eps:
|
|
632
|
+
:return:
|
|
633
|
+
'''
|
|
634
|
+
|
|
635
|
+
ret, img_thre = cv2.threshold(self.CR[0][::4,::4], 1, 255, cv2.THRESH_BINARY)
|
|
636
|
+
img_thre = img_thre.astype(np.uint8)
|
|
637
|
+
contours, hierarchy = cv2.findContours(img_thre, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
|
|
638
|
+
validcontours = list(filter(lambda x: len(x) > 35, contours))
|
|
639
|
+
logger.info(f"回波连通域{len(validcontours)}个")
|
|
640
|
+
xyList = []
|
|
641
|
+
for v in validcontours:
|
|
642
|
+
# print(validcontours[i])
|
|
643
|
+
xy = np.asarray(v).squeeze()
|
|
644
|
+
xyList.append(xy)
|
|
645
|
+
# plt.plot(xy[:,0],CR.shape[0]-xy[:,1])
|
|
646
|
+
#
|
|
647
|
+
# plt.show()
|
|
648
|
+
xyList = np.concatenate(xyList)
|
|
649
|
+
|
|
650
|
+
rectangles = testDBscan(xyList, eps)
|
|
651
|
+
envList = []
|
|
652
|
+
for r in rectangles:
|
|
653
|
+
[(wI, sI), (eI, nI)] = r
|
|
654
|
+
# plt.imshow(CR[sI:nI, wI:eI])
|
|
655
|
+
# plt.show()
|
|
656
|
+
n = np.round(self.latArr[0] - sI * 0.04, 2)
|
|
657
|
+
s = np.round(self.latArr[0] - nI * 0.04, 2)
|
|
658
|
+
w = np.round(self.lonArr[0] + wI * 0.04, 2)
|
|
659
|
+
e = np.round(self.lonArr[0] + eI * 0.04, 2)
|
|
660
|
+
env = envelope(n, s, w, e)
|
|
661
|
+
# CRc = clip(CR,en,latArr[0],lonArr[0],0.01)
|
|
662
|
+
# plt.imshow(CRc)
|
|
663
|
+
# plt.show()
|
|
664
|
+
envList.append(env)
|
|
665
|
+
logger.info(f"最终区域{len(envList)}个")
|
|
666
|
+
return envList
|
|
667
|
+
|
|
668
|
+
|
|
669
|
+
class drawPng():
|
|
670
|
+
def __init__(self, UTC):
|
|
671
|
+
self.UTC = UTC
|
|
672
|
+
self.UTCStr = UTC.strftime("%Y%m%d%H%M")
|
|
673
|
+
self.eps = 20
|
|
674
|
+
self.CR, self.latArr, self.lonArr = self.getCR()
|
|
675
|
+
self.envCHN = envelope(54.2, 12.21, 73, 134.99)
|
|
676
|
+
self.envList = None
|
|
677
|
+
self.df_Mat = self.makeStat()
|
|
678
|
+
if not self.CR is None:
|
|
679
|
+
self.envList = self.getCheckArea(self.eps)
|
|
680
|
+
|
|
681
|
+
"""
|
|
682
|
+
"""
|
|
683
|
+
conda install conda-forge::cudatoolkit==11.8.0
|
|
684
|
+
"""
|
|
685
|
+
"""
|
|
686
|
+
sudo pkill -9 -u scx 2>/dev/null || true
|
|
687
|
+
sudo groupdel scx 2>/dev/null; sudo userdel -r scx 2>/dev/null; sudo groupadd -g 1015 scx && sudo useradd -m -u 1015 -g 1015 -s /bin/bash scx && echo "scx:123456" | sudo chpasswd && sudo chown -R scx:scx /home/scx && id scx
|
|
688
|
+
sudo pkill -9 -u scx 2>/dev/null || true
|
|
689
|
+
sudo ps aux | grep scx | awk '{print $2}' | xargs -r sudo kill -9 2>/dev/null || true
|
|
690
|
+
sudo userdel -rf scx 2>/dev/null || true
|
|
691
|
+
sudo groupdel -f scx 2>/dev/null || true
|
|
692
|
+
sleep 2
|
|
693
|
+
sudo groupadd -g 1015 scx
|
|
694
|
+
sudo useradd -m -u 1015 -g 1015 -s /bin/bash scx
|
|
695
|
+
echo "scx:123456" | sudo chpasswd
|
|
696
|
+
id scx
|
|
697
|
+
"""
|
|
698
|
+
|
|
699
|
+
"""
|
|
700
|
+
from hjnwtx.mkNCHJN import dataClass, mkNCCommonUni,envelope,timeSeq,mkDir
|
|
701
|
+
env = envelope(35,10,108,125)
|
|
702
|
+
step = 0.01
|
|
703
|
+
latArr = np.linspace(env.n, env.s, int(round((env.n - env.s) / step)) + 1)
|
|
704
|
+
lonArr = np.linspace(env.w, env.e, int(round((env.e - env.w) / step)) + 1)
|
|
705
|
+
a = np.full([2501, 1701], np.nan)
|
|
706
|
+
# a = torch.full([2501, 1701], float('nan'))
|
|
707
|
+
|
|
708
|
+
a[:2280,:] = CR CR shape (2280,1701) a shape (3501,1701)
|
|
709
|
+
a[:2280, :] = np.maximum(a[:2280, :], CR[:2280, :])
|
|
710
|
+
"""
|
|
711
|
+
"""
|
|
712
|
+
find . -type d -empty -delete
|
|
713
|
+
find /mnt/wtx_weather_forecast/scx/GOES -type f -name "*.txt" -mmin +300 -delete ;
|
|
714
|
+
"""
|
|
715
|
+
"""
|
|
716
|
+
82.export TERMINFO=/lib/terminfo
|
|
717
|
+
81.ERROR: Failed to initialize `curses` (setupterm: could not find terminfo database)
|
|
718
|
+
"""
|
|
719
|
+
"""
|
|
720
|
+
mask = (mask_data == 0).to(device)
|
|
721
|
+
"""
|
|
722
|
+
|
|
723
|
+
"""
|
|
724
|
+
import pdb
|
|
725
|
+
pdb.set_trace()
|
|
726
|
+
l 10 查看最近10行
|
|
727
|
+
(Pdb) !a = 5 # 在当前作用域创建变量 a
|
|
728
|
+
(Pdb) p a
|
|
729
|
+
for i in range(5): print(i)
|
|
730
|
+
n 执行下一行
|
|
731
|
+
c 继续执行
|
|
732
|
+
q 退出
|
|
733
|
+
(Pdb) n # Next line
|
|
734
|
+
(Pdb) s # Step into function
|
|
735
|
+
(Pdb) c # Continue execution
|
|
736
|
+
(Pdb) b <line> # Set breakpoint
|
|
737
|
+
(Pdb) q # Quit debugger
|
|
738
|
+
(Pdb) !import os; os.listdir('.')
|
|
739
|
+
(Pdb) p locals() # Show local variables
|
|
740
|
+
(Pdb) p globals() # Show global variables
|
|
741
|
+
(Pdb) where # Show stack trace
|
|
742
|
+
(Pdb) list # Show current code context
|
|
743
|
+
"""
|
|
744
|
+
"""
|
|
745
|
+
np.savez_compressed(output_path.replace('.npy', '.npz'), data=data)
|
|
746
|
+
data = np.load(output_path.replace('.npy', '.npz'))['data']
|
|
747
|
+
with np.load(output_path.replace('.npy', '.npz')) as npz_file:
|
|
748
|
+
data = npz_file['data']
|
|
749
|
+
with np.load(output_path.replace('.npy', '.npz')) as npz_file:
|
|
750
|
+
data = npz_file[npz_file.files[0]]
|
|
751
|
+
"""
|
|
752
|
+
"""
|
|
753
|
+
lats = np.linspace(15, 60, h)
|
|
754
|
+
lons = np.linspace(70, 140, w)
|
|
755
|
+
|
|
756
|
+
# import cv2
|
|
757
|
+
# [ cv2.resize(i, (6200, 4200),interpolation=cv2.INTER_LINEAR) for i in dP.df_Mat ]
|
|
758
|
+
B08_fixed = B08.astype(np.float32)
|
|
759
|
+
print(f"转换后dtype: {B08_fixed.dtype}") 高位字节前后问题, cv2希望高阶字字节在后
|
|
760
|
+
print(f"转换后范围: {B08_fixed.min()} ~ {B08_fixed.max()}")
|
|
761
|
+
d_test = cv2.resize(B08_fixed, (100, 100), interpolation=cv2.INTER_CUBIC) #双三次插值
|
|
762
|
+
d_cv2 = cv2.resize(B08, (6200, 4200), interpolation=cv2.INTER_LINEAR) #双线性插值
|
|
763
|
+
print(f"测试插值范围: {d_test.min()} ~ {d_test.max()}")
|
|
764
|
+
"""
|
|
765
|
+
"""
|
|
766
|
+
import torch
|
|
767
|
+
torch.cuda.empty_cache()
|
|
768
|
+
"""
|