shancx 1.8.92__py3-none-any.whl → 1.9.33.218__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- shancx/3D/__init__.py +25 -0
- shancx/Algo/Class.py +11 -0
- shancx/Algo/CudaPrefetcher1.py +112 -0
- shancx/Algo/Fake_image.py +24 -0
- shancx/Algo/Hsml.py +391 -0
- shancx/Algo/L2Loss.py +10 -0
- shancx/Algo/MetricTracker.py +132 -0
- shancx/Algo/Normalize.py +66 -0
- shancx/Algo/OptimizerWScheduler.py +38 -0
- shancx/Algo/Rmageresize.py +79 -0
- shancx/Algo/Savemodel.py +33 -0
- shancx/Algo/SmoothL1_losses.py +27 -0
- shancx/Algo/Tqdm.py +62 -0
- shancx/Algo/__init__.py +121 -0
- shancx/Algo/checknan.py +28 -0
- shancx/Algo/iouJU.py +83 -0
- shancx/Algo/mask.py +25 -0
- shancx/Algo/psnr.py +9 -0
- shancx/Algo/ssim.py +70 -0
- shancx/Algo/structural_similarity.py +308 -0
- shancx/Algo/tool.py +704 -0
- shancx/Calmetrics/__init__.py +97 -0
- shancx/Calmetrics/calmetrics.py +14 -0
- shancx/Calmetrics/calmetricsmatrixLib.py +147 -0
- shancx/Calmetrics/rmseR2score.py +35 -0
- shancx/Clip/__init__.py +50 -0
- shancx/Cmd.py +126 -0
- shancx/Config_.py +26 -0
- shancx/Df/DataFrame.py +11 -2
- shancx/Df/__init__.py +17 -0
- shancx/Df/tool.py +0 -0
- shancx/Diffm/Psamples.py +18 -0
- shancx/Diffm/__init__.py +0 -0
- shancx/Diffm/test.py +207 -0
- shancx/Doc/__init__.py +214 -0
- shancx/E/__init__.py +178 -152
- shancx/Fillmiss/__init__.py +0 -0
- shancx/Fillmiss/imgidwJU.py +46 -0
- shancx/Fillmiss/imgidwLatLonJU.py +82 -0
- shancx/Gpu/__init__.py +55 -0
- shancx/H9/__init__.py +126 -0
- shancx/H9/ahi_read_hsd.py +877 -0
- shancx/H9/ahisearchtable.py +298 -0
- shancx/H9/geometry.py +2439 -0
- shancx/Hug/__init__.py +81 -0
- shancx/Inst.py +22 -0
- shancx/Lib.py +31 -0
- shancx/Mos/__init__.py +37 -0
- shancx/NN/__init__.py +235 -106
- shancx/Path1.py +161 -0
- shancx/Plot/GlobMap.py +276 -116
- shancx/Plot/__init__.py +491 -1
- shancx/Plot/draw_day_CR_PNG.py +4 -21
- shancx/Plot/exam.py +116 -0
- shancx/Plot/plotGlobal.py +325 -0
- shancx/{radar_nmc.py → Plot/radarNmc.py} +4 -34
- shancx/{subplots_single_china_map.py → Plot/single_china_map.py} +1 -1
- shancx/Point.py +46 -0
- shancx/QC.py +223 -0
- shancx/RdPzl/__init__.py +32 -0
- shancx/Read.py +72 -0
- shancx/Resize.py +79 -0
- shancx/SN/__init__.py +62 -123
- shancx/Time/GetTime.py +9 -3
- shancx/Time/__init__.py +66 -1
- shancx/Time/timeCycle.py +302 -0
- shancx/Time/tool.py +0 -0
- shancx/Train/__init__.py +74 -0
- shancx/Train/makelist.py +187 -0
- shancx/Train/multiGpu.py +27 -0
- shancx/Train/prepare.py +161 -0
- shancx/Train/renet50.py +157 -0
- shancx/ZR.py +12 -0
- shancx/__init__.py +333 -262
- shancx/args.py +27 -0
- shancx/bak.py +768 -0
- shancx/df2database.py +62 -2
- shancx/geosProj.py +80 -0
- shancx/info.py +38 -0
- shancx/netdfJU.py +231 -0
- shancx/sendM.py +59 -0
- shancx/tensBoard/__init__.py +28 -0
- shancx/wait.py +246 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/METADATA +15 -5
- shancx-1.9.33.218.dist-info/RECORD +91 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/WHEEL +1 -1
- my_timer_decorator/__init__.py +0 -10
- shancx/Dsalgor/__init__.py +0 -19
- shancx/E/DFGRRIB.py +0 -30
- shancx/EN/DFGRRIB.py +0 -30
- shancx/EN/__init__.py +0 -148
- shancx/FileRead.py +0 -44
- shancx/Gray2RGB.py +0 -86
- shancx/M/__init__.py +0 -137
- shancx/MN/__init__.py +0 -133
- shancx/N/__init__.py +0 -131
- shancx/Plot/draw_day_CR_PNGUS.py +0 -206
- shancx/Plot/draw_day_CR_SVG.py +0 -275
- shancx/Plot/draw_day_pre_PNGUS.py +0 -205
- shancx/Plot/glob_nation_map.py +0 -116
- shancx/Plot/radar_nmc.py +0 -61
- shancx/Plot/radar_nmc_china_map_compare1.py +0 -50
- shancx/Plot/radar_nmc_china_map_f.py +0 -121
- shancx/Plot/radar_nmc_us_map_f.py +0 -128
- shancx/Plot/subplots_compare_devlop.py +0 -36
- shancx/Plot/subplots_single_china_map.py +0 -45
- shancx/S/__init__.py +0 -138
- shancx/W/__init__.py +0 -132
- shancx/WN/__init__.py +0 -132
- shancx/code.py +0 -331
- shancx/draw_day_CR_PNG.py +0 -200
- shancx/draw_day_CR_PNGUS.py +0 -206
- shancx/draw_day_CR_SVG.py +0 -275
- shancx/draw_day_pre_PNGUS.py +0 -205
- shancx/makenetCDFN.py +0 -42
- shancx/mkIMGSCX.py +0 -92
- shancx/netCDF.py +0 -130
- shancx/radar_nmc_china_map_compare1.py +0 -50
- shancx/radar_nmc_china_map_f.py +0 -125
- shancx/radar_nmc_us_map_f.py +0 -67
- shancx/subplots_compare_devlop.py +0 -36
- shancx/tool.py +0 -18
- shancx/user/H8mess.py +0 -317
- shancx/user/__init__.py +0 -137
- shancx/user/cinradHJN.py +0 -496
- shancx/user/examMeso.py +0 -293
- shancx/user/hjnDAAS.py +0 -26
- shancx/user/hjnFTP.py +0 -81
- shancx/user/hjnGIS.py +0 -320
- shancx/user/hjnGPU.py +0 -21
- shancx/user/hjnIDW.py +0 -68
- shancx/user/hjnKDTree.py +0 -75
- shancx/user/hjnLAPSTransform.py +0 -47
- shancx/user/hjnMiscellaneous.py +0 -182
- shancx/user/hjnProj.py +0 -162
- shancx/user/inotify.py +0 -41
- shancx/user/matplotlibMess.py +0 -87
- shancx/user/mkNCHJN.py +0 -623
- shancx/user/newTypeRadar.py +0 -492
- shancx/user/test.py +0 -6
- shancx/user/tlogP.py +0 -129
- shancx/util_log.py +0 -33
- shancx/wtx/H8mess.py +0 -315
- shancx/wtx/__init__.py +0 -151
- shancx/wtx/cinradHJN.py +0 -496
- shancx/wtx/colormap.py +0 -64
- shancx/wtx/examMeso.py +0 -298
- shancx/wtx/hjnDAAS.py +0 -26
- shancx/wtx/hjnFTP.py +0 -81
- shancx/wtx/hjnGIS.py +0 -330
- shancx/wtx/hjnGPU.py +0 -21
- shancx/wtx/hjnIDW.py +0 -68
- shancx/wtx/hjnKDTree.py +0 -75
- shancx/wtx/hjnLAPSTransform.py +0 -47
- shancx/wtx/hjnLog.py +0 -78
- shancx/wtx/hjnMiscellaneous.py +0 -201
- shancx/wtx/hjnProj.py +0 -161
- shancx/wtx/inotify.py +0 -41
- shancx/wtx/matplotlibMess.py +0 -87
- shancx/wtx/mkNCHJN.py +0 -613
- shancx/wtx/newTypeRadar.py +0 -492
- shancx/wtx/test.py +0 -6
- shancx/wtx/tlogP.py +0 -129
- shancx-1.8.92.dist-info/RECORD +0 -99
- /shancx/{Dsalgor → Algo}/dsalgor.py +0 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/top_level.txt +0 -0
shancx/Time/timeCycle.py
ADDED
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
|
|
2
|
+
import glob
|
|
3
|
+
import os
|
|
4
|
+
import numpy as np
|
|
5
|
+
import traceback
|
|
6
|
+
from dateutil.relativedelta import relativedelta
|
|
7
|
+
# 获取指定路径下所有文件
|
|
8
|
+
paths = glob.glob("/root/data/ec_filter_npy_data/*")
|
|
9
|
+
|
|
10
|
+
# 遍历每个文件路径
|
|
11
|
+
for p in paths:
|
|
12
|
+
try:
|
|
13
|
+
# 尝试加载.npy文件
|
|
14
|
+
data = np.load(p)
|
|
15
|
+
except Exception as e:
|
|
16
|
+
# 如果读取失败,打印错误信息并删除文件
|
|
17
|
+
print(f"Error loading {p}: {traceback.format_exc()}")
|
|
18
|
+
os.remove(p)
|
|
19
|
+
print(f"Deleted file: {p}")
|
|
20
|
+
def GetMulData(conf):
|
|
21
|
+
sCST = conf[0]
|
|
22
|
+
eCST = conf[0]
|
|
23
|
+
sUTC = sCST+relativedelta(hours=-8)
|
|
24
|
+
sCSTstr = sCST.strftime("%Y%m%d%H%M%S")
|
|
25
|
+
sUTCstr = sUTC.strftime("%Y%m%d%H%M%S")
|
|
26
|
+
path = f"/root/data/{sUTCstr[:4]}/{sUTCstr:8}/CR_{sUTCstr[:12]}00.npy"
|
|
27
|
+
if os.path.exists(path):
|
|
28
|
+
print(f"outpath {path} existsing ")
|
|
29
|
+
|
|
30
|
+
else:
|
|
31
|
+
print(f"outpath {path} not existsing ")
|
|
32
|
+
return
|
|
33
|
+
|
|
34
|
+
from shancx import Mul_sub
|
|
35
|
+
import argparse
|
|
36
|
+
import datetime
|
|
37
|
+
import pandas as pd
|
|
38
|
+
def options():
|
|
39
|
+
parser = argparse.ArgumentParser(description='examdatabasedata')
|
|
40
|
+
parser.add_argument('--times', type=str, default='202411100000,202411101000')
|
|
41
|
+
config= parser.parse_args()
|
|
42
|
+
print(config)
|
|
43
|
+
config.times = config.times.split(",")
|
|
44
|
+
if len(config.times) == 1:
|
|
45
|
+
config.times = [config.times[0], config.times[0]]
|
|
46
|
+
config.times = [datetime.datetime.strptime(config.times[0], "%Y%m%d%H%M"),
|
|
47
|
+
datetime.datetime.strptime(config.times[1], "%Y%m%d%H%M")]
|
|
48
|
+
return config
|
|
49
|
+
if __name__ == '__main__':
|
|
50
|
+
cfg = options()
|
|
51
|
+
sCST = cfg.times[0]
|
|
52
|
+
eCST = cfg.times[-1]
|
|
53
|
+
timeList = pd.date_range(sCST, eCST, freq='6T') #6T 分钟
|
|
54
|
+
print(timeList)
|
|
55
|
+
Mul_sub(GetMulData,[timeList],31)
|
|
56
|
+
|
|
57
|
+
"""
|
|
58
|
+
for i in range(7):
|
|
59
|
+
UTC = sUTC +relativedelta(hours=-8,minutes=-diffT*6)
|
|
60
|
+
"""
|
|
61
|
+
|
|
62
|
+
"""
|
|
63
|
+
import glob
|
|
64
|
+
import os
|
|
65
|
+
import numpy as np
|
|
66
|
+
from shancx import crDir
|
|
67
|
+
from shancx.NN import _loggers
|
|
68
|
+
logger = _loggers()
|
|
69
|
+
import netCDF4 as nc
|
|
70
|
+
import numpy as np
|
|
71
|
+
from shancx.Plot import plotRadar
|
|
72
|
+
from shancx import crDir
|
|
73
|
+
import traceback
|
|
74
|
+
from dateutil.relativedelta import relativedelta
|
|
75
|
+
paths = glob.glob("/root/data/ec_filter_npy_data/*")
|
|
76
|
+
basePath = f"/mnt/wtx_weather_forecast/scx/sever7/test/RADA/MQPF1109_1"
|
|
77
|
+
output_dirH9Npy = f"/mnt/wtx_weather_forecast/SAT/H9/sat_npy_CHN"
|
|
78
|
+
def GetMulData(conf):
|
|
79
|
+
sUTC = conf[0]
|
|
80
|
+
sUTCstr = sUTC.strftime("%Y%m%d%H%M")
|
|
81
|
+
output_path =f"{output_dirH9Npy}/{sUTCstr[:4]}/{sUTCstr[:8]}/MSP3_PMSC_H9_GEO_FD_{sUTCstr[:12]}_00000-00000.npy"
|
|
82
|
+
inputPathstr = f"{basePath}/{sUTCstr[:4]}/{sUTCstr[:8]}/*{sUTCstr[:12]}*.nc"
|
|
83
|
+
inputPathL = glob.glob(inputPathstr)
|
|
84
|
+
if len(inputPathL) ==0 :
|
|
85
|
+
print(f"outpath {inputPathstr} is missing ")
|
|
86
|
+
return None
|
|
87
|
+
inputPath = inputPathL[0]
|
|
88
|
+
with nc.Dataset(inputPath) as dataNC:
|
|
89
|
+
CR = dataNC["CR"][:]
|
|
90
|
+
lat = dataNC["lat"][:]
|
|
91
|
+
lon = dataNC["lon"][:]
|
|
92
|
+
crDir(output_path)
|
|
93
|
+
np.save(output_path,CR.data)
|
|
94
|
+
logger.info(f"{output_path} done ")
|
|
95
|
+
|
|
96
|
+
from shancx import Mul_sub
|
|
97
|
+
import argparse
|
|
98
|
+
import datetime
|
|
99
|
+
import pandas as pd
|
|
100
|
+
def options():
|
|
101
|
+
parser = argparse.ArgumentParser(description='examdatabasedata')
|
|
102
|
+
parser.add_argument('--times', type=str, default='202507010000,202510010000')
|
|
103
|
+
config= parser.parse_args()
|
|
104
|
+
print(config)
|
|
105
|
+
config.times = config.times.split(",")
|
|
106
|
+
if len(config.times) == 1:
|
|
107
|
+
config.times = [config.times[0], config.times[0]]
|
|
108
|
+
config.times = [datetime.datetime.strptime(config.times[0], "%Y%m%d%H%M"),
|
|
109
|
+
datetime.datetime.strptime(config.times[1], "%Y%m%d%H%M")]
|
|
110
|
+
return config
|
|
111
|
+
if __name__ == '__main__':
|
|
112
|
+
cfg = options()
|
|
113
|
+
sUTC = cfg.times[0]
|
|
114
|
+
eUTC = cfg.times[-1]
|
|
115
|
+
timeList = pd.date_range(sUTC, eUTC, freq='3h') #6T 分钟
|
|
116
|
+
print(timeList)
|
|
117
|
+
Mul_sub(GetMulData,[timeList],6)
|
|
118
|
+
"""
|
|
119
|
+
|
|
120
|
+
"""
|
|
121
|
+
import glob
|
|
122
|
+
import os
|
|
123
|
+
import numpy as np
|
|
124
|
+
import traceback
|
|
125
|
+
from dateutil.relativedelta import relativedelta
|
|
126
|
+
import os
|
|
127
|
+
from shancx.NN import Mul_TH
|
|
128
|
+
import argparse
|
|
129
|
+
import datetime
|
|
130
|
+
import pandas as pd
|
|
131
|
+
def GetMulData(conf):
|
|
132
|
+
sUTC = conf[0]
|
|
133
|
+
sUTCstr = sUTC.strftime("%Y%m%d%H%M")
|
|
134
|
+
commandstr = f""
|
|
135
|
+
os.system(commandstr)
|
|
136
|
+
def options():
|
|
137
|
+
parser = argparse.ArgumentParser(description='examdatabasedata')
|
|
138
|
+
parser.add_argument('--times', type=str, default='202508010000,202508010500')
|
|
139
|
+
config= parser.parse_args()
|
|
140
|
+
print(config)
|
|
141
|
+
config.times = config.times.split(",")
|
|
142
|
+
if len(config.times) == 1:
|
|
143
|
+
config.times = [config.times[0], config.times[0]]
|
|
144
|
+
config.times = [datetime.datetime.strptime(config.times[0], "%Y%m%d%H%M"),
|
|
145
|
+
datetime.datetime.strptime(config.times[1], "%Y%m%d%H%M")]
|
|
146
|
+
return config
|
|
147
|
+
if __name__ == '__main__':
|
|
148
|
+
cfg = options()
|
|
149
|
+
sUTC = cfg.times[0]
|
|
150
|
+
eUTC = cfg.times[-1]
|
|
151
|
+
timeList = pd.date_range(sUTC, eUTC, freq='10T') #6T 分钟
|
|
152
|
+
print(timeList)
|
|
153
|
+
Mul_TH(GetMulData,[timeList],3)
|
|
154
|
+
cd /mnt/wtx_weather_forecast/scx/sever7/SATdata/mkH9 ;timeout 1200s /home/scx/miniconda3/envs/H9/bin/python mkH9.py --time {sUTCstr[:12]}
|
|
155
|
+
|
|
156
|
+
"""
|
|
157
|
+
|
|
158
|
+
"""
|
|
159
|
+
from shancx import crDir
|
|
160
|
+
import os
|
|
161
|
+
from shancx import loggers as logger
|
|
162
|
+
# Define the original and new filenames
|
|
163
|
+
original_file = "CR_20241117050600.npy"
|
|
164
|
+
new_file = "20241117050600.npy"
|
|
165
|
+
rootpath ="/root/autodl-tmp"
|
|
166
|
+
filepath = "data/radar"
|
|
167
|
+
def GetMulData(conf):
|
|
168
|
+
sCST = conf[0]
|
|
169
|
+
# eCST = conf[0]
|
|
170
|
+
sCSTstr = sCST.strftime("%Y%m%d%H%M%S")
|
|
171
|
+
outpath = os.path.join(rootpath,filepath,f"CR_{sCSTstr}00.npy")
|
|
172
|
+
if os.path.exists(outpath):
|
|
173
|
+
logger.info(f"outpath {outpath} is existsing ")
|
|
174
|
+
print(f"outpath {outpath} existsing ")
|
|
175
|
+
crDir(outpath)
|
|
176
|
+
array = np.load(f"./{original_file}")
|
|
177
|
+
np.save(outpath,array)
|
|
178
|
+
logger.info(f"outpath {outpath} done ")
|
|
179
|
+
print(f"outpath {outpath} done ")
|
|
180
|
+
from shancx import Mul_sub
|
|
181
|
+
import argparse
|
|
182
|
+
import datetime
|
|
183
|
+
import pandas as pd
|
|
184
|
+
import numpy as np
|
|
185
|
+
def options():
|
|
186
|
+
parser = argparse.ArgumentParser(description='examdatabasedata')
|
|
187
|
+
parser.add_argument('--times', type=str, default='202411101000,202411150000')
|
|
188
|
+
config= parser.parse_args()
|
|
189
|
+
print(config)
|
|
190
|
+
config.times = config.times.split(",")
|
|
191
|
+
if len(config.times) == 1:
|
|
192
|
+
config.times = [config.times[0], config.times[0]]
|
|
193
|
+
config.times = [datetime.datetime.strptime(config.times[0], "%Y%m%d%H%M"),
|
|
194
|
+
datetime.datetime.strptime(config.times[1], "%Y%m%d%H%M")]
|
|
195
|
+
return config
|
|
196
|
+
if __name__ == '__main__':
|
|
197
|
+
cfg = options()
|
|
198
|
+
sCST = cfg.times[0]
|
|
199
|
+
eCST = cfg.times[-1]
|
|
200
|
+
timeList = pd.date_range(sCST, eCST, freq='6T') #6T 分钟
|
|
201
|
+
print(timeList)
|
|
202
|
+
Mul_sub(GetMulData,[timeList],48)
|
|
203
|
+
|
|
204
|
+
------------------------------------
|
|
205
|
+
|
|
206
|
+
import glob
|
|
207
|
+
import os
|
|
208
|
+
import numpy as np
|
|
209
|
+
import traceback
|
|
210
|
+
from shancx import Mul_sub
|
|
211
|
+
from shancx import loggers as logger
|
|
212
|
+
|
|
213
|
+
# 获取指定路径下所有文件
|
|
214
|
+
paths = glob.glob("/root//autodl-tmp/data/radar/*") #E:\
|
|
215
|
+
|
|
216
|
+
# 遍历每个文件路径
|
|
217
|
+
def getMul_sub(conf):
|
|
218
|
+
p = conf[0]
|
|
219
|
+
print(p)
|
|
220
|
+
try:
|
|
221
|
+
# 尝试加载.npy文件
|
|
222
|
+
data = np.load(p)
|
|
223
|
+
print(f"Loaded {p} with shape {data.shape}")
|
|
224
|
+
except Exception as e:
|
|
225
|
+
# 如果读取失败,打印错误信息并删除文件
|
|
226
|
+
print(f"Error loading {p}: {traceback.format_exc()}")
|
|
227
|
+
logger.error(f"Error loading {p}: {traceback.format_exc()}")
|
|
228
|
+
os.remove(p)
|
|
229
|
+
print(f"Deleted file: {p}")
|
|
230
|
+
|
|
231
|
+
if __name__ == '__main__':
|
|
232
|
+
paths1 = [i for i in paths if '.npy' in i]
|
|
233
|
+
Mul_sub(getMul_sub,[paths1],20)
|
|
234
|
+
|
|
235
|
+
np.tile(np.load(basedata), (8, 1, 1)).reshape((8, 4200, 6200))
|
|
236
|
+
"""
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
"""
|
|
240
|
+
制作特定数据
|
|
241
|
+
|
|
242
|
+
import glob
|
|
243
|
+
import os
|
|
244
|
+
import numpy as np
|
|
245
|
+
import traceback
|
|
246
|
+
from dateutil.relativedelta import relativedelta
|
|
247
|
+
# 获取指定路径下所有文件
|
|
248
|
+
import glob
|
|
249
|
+
import datetime
|
|
250
|
+
from shancx import crDir
|
|
251
|
+
import netCDF4 as nc
|
|
252
|
+
# 遍历每个文件路径
|
|
253
|
+
def GetMulData(conf):
|
|
254
|
+
sUTC = conf[0]
|
|
255
|
+
UTCStr = sUTC.strftime("%Y%m%d%H%M%S")
|
|
256
|
+
path = f"/data2/mym/ifs_precipitation/{UTCStr[:4]}/{UTCStr[:8]}/ifs_precipitation_{UTCStr[:12]}.nc"
|
|
257
|
+
if not os.path.exists(path):
|
|
258
|
+
print(f"outpath {path} existsing ")
|
|
259
|
+
else:
|
|
260
|
+
with nc.Dataset(path) as dataNC:
|
|
261
|
+
# 获取 'time' 变量
|
|
262
|
+
latitude = dataNC.variables[list(dataNC.variables)[0]][:]
|
|
263
|
+
longitude = dataNC.variables[list(dataNC.variables)[2]][:]
|
|
264
|
+
ifs_precipitation = dataNC.variables[list(dataNC.variables)[1]][:]
|
|
265
|
+
# print(list(dataNC.variables))
|
|
266
|
+
# crDir(path)
|
|
267
|
+
# np.save(path,d)
|
|
268
|
+
return {"min":np.min(d),"max":np.max(d)}
|
|
269
|
+
from shancx import Mul_sub
|
|
270
|
+
import argparse
|
|
271
|
+
import datetime
|
|
272
|
+
import pandas as pd
|
|
273
|
+
def options():
|
|
274
|
+
parser = argparse.ArgumentParser(description='examdatabasedata')
|
|
275
|
+
parser.add_argument('--times', type=str, default='202411100000,202411101000')
|
|
276
|
+
config= parser.parse_args()
|
|
277
|
+
print(config)
|
|
278
|
+
config.times = config.times.split(",")
|
|
279
|
+
if len(config.times) == 1:
|
|
280
|
+
config.times = [config.times[0], config.times[0]]
|
|
281
|
+
config.times = [datetime.datetime.strptime(config.times[0], "%Y%m%d%H%M"),
|
|
282
|
+
datetime.datetime.strptime(config.times[1], "%Y%m%d%H%M")]
|
|
283
|
+
return config
|
|
284
|
+
if __name__ == '__main__':
|
|
285
|
+
|
|
286
|
+
cfg = options()
|
|
287
|
+
sUTC = cfg.times[0]
|
|
288
|
+
eUTC = cfg.times[-1]
|
|
289
|
+
sUTC = datetime.datetime(2002,6,1,0,0)
|
|
290
|
+
eUTC = datetime.datetime(2004,9,1,0,0)
|
|
291
|
+
timeList = pd.date_range(sUTC, eUTC, freq='3h') #6T 分钟
|
|
292
|
+
|
|
293
|
+
summer_timeList = timeList[timeList.month.isin([6, 7, 8])]
|
|
294
|
+
print(timeList)
|
|
295
|
+
minmax = Mul_sub(GetMulData,[summer_timeList],10)
|
|
296
|
+
global_min = min(d['min'] for d in minmax)
|
|
297
|
+
global_max = max(d['max'] for d in minmax) #117
|
|
298
|
+
print()
|
|
299
|
+
|
|
300
|
+
"""
|
|
301
|
+
|
|
302
|
+
|
shancx/Time/tool.py
ADDED
|
File without changes
|
shancx/Train/__init__.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
import torch
|
|
4
|
+
import torchvision.transforms as transforms
|
|
5
|
+
import cv2
|
|
6
|
+
import numpy as np
|
|
7
|
+
from typing import List, Tuple
|
|
8
|
+
from PIL import Image
|
|
9
|
+
import os
|
|
10
|
+
class ImageProcessor:
|
|
11
|
+
"""Handles image preprocessing and transformations"""
|
|
12
|
+
|
|
13
|
+
def __init__(self):
|
|
14
|
+
self.transform = transforms.Compose([
|
|
15
|
+
transforms.Resize(256),
|
|
16
|
+
transforms.CenterCrop(224),
|
|
17
|
+
transforms.ToTensor(),
|
|
18
|
+
transforms.Normalize(
|
|
19
|
+
mean=[0.485, 0.456, 0.406],
|
|
20
|
+
std=[0.229, 0.224, 0.225]
|
|
21
|
+
)
|
|
22
|
+
])
|
|
23
|
+
|
|
24
|
+
def preprocess_image(self, image_path: str) -> torch.Tensor:
|
|
25
|
+
"""Load and preprocess image for model input"""
|
|
26
|
+
try:
|
|
27
|
+
if not os.path.exists(image_path):
|
|
28
|
+
raise FileNotFoundError(f"Image file not found at {image_path}")
|
|
29
|
+
|
|
30
|
+
image = cv2.imread(image_path)
|
|
31
|
+
if image is None:
|
|
32
|
+
raise ValueError(f"Unable to read image at {image_path}")
|
|
33
|
+
|
|
34
|
+
# Convert BGR to RGB
|
|
35
|
+
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
|
|
36
|
+
# Convert to PIL Image for torchvision transforms
|
|
37
|
+
image = Image.fromarray(image)
|
|
38
|
+
return self.transform(image).unsqueeze(0)
|
|
39
|
+
except Exception as e:
|
|
40
|
+
print(f"Error processing image: {e}")
|
|
41
|
+
raise
|
|
42
|
+
|
|
43
|
+
# if __name__ == "__main__":
|
|
44
|
+
# image_path = "./space_shuttle.jpg"
|
|
45
|
+
# image_processor = ImageProcessor()
|
|
46
|
+
# input_tensor = image_processor.preprocess_image(image_path)
|
|
47
|
+
|
|
48
|
+
import torch
|
|
49
|
+
import torch.nn as nn
|
|
50
|
+
|
|
51
|
+
def multiGpu(model, gpu_ids):
|
|
52
|
+
# 检查是否有可用的 GPU
|
|
53
|
+
if not torch.cuda.is_available():
|
|
54
|
+
print("CUDA is not available. Using CPU.")
|
|
55
|
+
device = torch.device("cpu")
|
|
56
|
+
return model.to(device), device
|
|
57
|
+
device = torch.device(f"cuda:{gpu_ids[0]}")
|
|
58
|
+
if len(gpu_ids) > 1:
|
|
59
|
+
print(f"Using {len(gpu_ids)} GPUs: {gpu_ids}")
|
|
60
|
+
model = nn.DataParallel(model, device_ids=gpu_ids)
|
|
61
|
+
else:
|
|
62
|
+
print(f"Using GPU: {gpu_ids[0]}")
|
|
63
|
+
model = model.to(device)
|
|
64
|
+
return model, device
|
|
65
|
+
|
|
66
|
+
"""
|
|
67
|
+
model = MyModel()
|
|
68
|
+
gpu_ids = [5, 6, 7]
|
|
69
|
+
model, device = setup_multi_gpu(model, gpu_ids)
|
|
70
|
+
print(f"Model is on device: {device}")
|
|
71
|
+
data = torch.randn(10, 3, 224, 224).to(device)
|
|
72
|
+
output = model(data)
|
|
73
|
+
"""
|
|
74
|
+
|
shancx/Train/makelist.py
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
|
|
2
|
+
def save_results(results, output_file, mode='w',title="input_path,tar_path"):
|
|
3
|
+
with open(output_file, mode) as f:
|
|
4
|
+
if mode == 'w':
|
|
5
|
+
f.write(f"{title}\n")
|
|
6
|
+
for result in results:
|
|
7
|
+
f.write(f"{result['input_path']},{result['tar_path']}\n")
|
|
8
|
+
"""
|
|
9
|
+
output_file = "rmse_results.txt"
|
|
10
|
+
save_results([], output_file, mode='w')
|
|
11
|
+
result = {
|
|
12
|
+
'input_path': input_path ,
|
|
13
|
+
'tar_path': tar_path
|
|
14
|
+
}
|
|
15
|
+
save_results([result], output_file, mode='a')
|
|
16
|
+
|
|
17
|
+
df = pd.read_csv(csv_file, sep=" ",header=None)
|
|
18
|
+
sample_list = df.values.tolist()
|
|
19
|
+
self.paths = {
|
|
20
|
+
"input_path": [l[0] for l in self.sample_list],
|
|
21
|
+
"gt_path": [l[1] for l in self.sample_list]
|
|
22
|
+
}
|
|
23
|
+
"""
|
|
24
|
+
import os
|
|
25
|
+
import numpy as np
|
|
26
|
+
import pandas as pd
|
|
27
|
+
import glob
|
|
28
|
+
import datetime
|
|
29
|
+
from shancx import Mul_sub_S,Mul_sub
|
|
30
|
+
from shancx import crDir
|
|
31
|
+
def getcheckdata(conf):
|
|
32
|
+
satPath = conf[0]
|
|
33
|
+
radar_dir_path = conf[1]
|
|
34
|
+
sat_imin = conf[2]
|
|
35
|
+
try:
|
|
36
|
+
satdata = np.load(satPath)
|
|
37
|
+
radarpth = glob.glob(f"{radar_dir_path}/{sat_imin[:4]}/{sat_imin[:8]}/CR_{satPath.split('/')[-1][4:-4]}*.npy")[0]
|
|
38
|
+
radardata = np.load(radarpth)
|
|
39
|
+
if radardata.shape != satdata.shape :
|
|
40
|
+
return
|
|
41
|
+
df = pd.DataFrame({'sat_path': [satPath], 'radar_path': [radarpth] })
|
|
42
|
+
return df
|
|
43
|
+
except Exception as e:
|
|
44
|
+
print(f"{satPath} can not load succeed: {e}")
|
|
45
|
+
return None
|
|
46
|
+
def generateList(conf):
|
|
47
|
+
sat_dir_path, radar_dir_path, sat_imin= conf
|
|
48
|
+
satpath = glob.glob(f"{sat_dir_path}/{sat_imin[:4]}/{sat_imin[:8]}/SAT_{sat_imin}_*.npy")
|
|
49
|
+
satpath.sort()
|
|
50
|
+
if satpath:
|
|
51
|
+
datas = []
|
|
52
|
+
for path in satpath:
|
|
53
|
+
data = getcheckdata( (path,radar_dir_path,sat_imin))
|
|
54
|
+
datas.append(data)
|
|
55
|
+
datass = [i for i in datas if i is not None ]
|
|
56
|
+
if datass :
|
|
57
|
+
df = pd.concat(datass)
|
|
58
|
+
return df
|
|
59
|
+
else:
|
|
60
|
+
return None
|
|
61
|
+
import argparse
|
|
62
|
+
import datetime
|
|
63
|
+
import pandas as pd
|
|
64
|
+
def options():
|
|
65
|
+
parser = argparse.ArgumentParser(description='examdatabasedata')
|
|
66
|
+
parser.add_argument('--times', type=str, default='202502010000,202506220000')
|
|
67
|
+
parser.add_argument('--flag', type=str, default='val')
|
|
68
|
+
config= parser.parse_args()
|
|
69
|
+
print(config)
|
|
70
|
+
config.times = config.times.split(",")
|
|
71
|
+
if len(config.times) == 1:
|
|
72
|
+
config.times = [config.times[0], config.times[0]]
|
|
73
|
+
config.times = [datetime.datetime.strptime(config.times[0], "%Y%m%d%H%M"),
|
|
74
|
+
datetime.datetime.strptime(config.times[1], "%Y%m%d%H%M")]
|
|
75
|
+
return config
|
|
76
|
+
"""
|
|
77
|
+
if __name__ == '__main__':
|
|
78
|
+
cfg = options()
|
|
79
|
+
sUTC = cfg.times[0]
|
|
80
|
+
eUTC = cfg.times[-1]
|
|
81
|
+
flag = cfg.flag
|
|
82
|
+
sat_dir_path ="./"
|
|
83
|
+
radar_dir_path = f"./"
|
|
84
|
+
timelist = pd.date_range(sUTC, eUTC, t=f"{flag}")
|
|
85
|
+
timeListfliter = timelist[timelist.month.isin([1,4,7,10])&(timelist.day<=15)]
|
|
86
|
+
savepath = f'/mnt/wtx_weather_forecast/SAT/GOES18train_N/0624_1'
|
|
87
|
+
crDir(savepath)
|
|
88
|
+
# 调用方法 1.split_time 2. timelist 3. 路径
|
|
89
|
+
dataL = Mul_sub(generateList,[ [sat_dir_path]
|
|
90
|
+
, [radar_dir_path]
|
|
91
|
+
, timelist
|
|
92
|
+
]
|
|
93
|
+
)
|
|
94
|
+
dataLs = [i for i in dataL if i is not None]
|
|
95
|
+
if flag =="trn":
|
|
96
|
+
train_df = pd.concat(dataLs)
|
|
97
|
+
crDir(savepath)
|
|
98
|
+
train_df.to_csv(f"{savepath}/df_train.csv", index=False, sep=',')
|
|
99
|
+
print(f"train_df {len(train_df)}")
|
|
100
|
+
print('complete!!!')
|
|
101
|
+
print(savepath)
|
|
102
|
+
if flag == "val":
|
|
103
|
+
valid_df = pd.concat(dataLs)
|
|
104
|
+
crDir(savepath)
|
|
105
|
+
valid_df.to_csv(f"{savepath}/df_valid.csv", index=False, sep=',')
|
|
106
|
+
print(f"valid_df {len(valid_df)}")
|
|
107
|
+
print('complete!!!')
|
|
108
|
+
print(savepath)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
"""
|
|
112
|
+
"""
|
|
113
|
+
import os
|
|
114
|
+
import numpy as np
|
|
115
|
+
import pandas as pd
|
|
116
|
+
import glob
|
|
117
|
+
import datetime
|
|
118
|
+
from shancx import Mul_sub_S,Mul_sub
|
|
119
|
+
from shancx import crDir
|
|
120
|
+
def getcheckdata(conf):
|
|
121
|
+
inputPath = conf[0]
|
|
122
|
+
target_dir_path = conf[1]
|
|
123
|
+
imin = conf[2]
|
|
124
|
+
try:
|
|
125
|
+
inputdata = np.load(inputPath)
|
|
126
|
+
targetpth = glob.glob(f"{target_dir_path}/{imin[:4]}/{imin[:8]}/analy_{inputPath.split('/')[-1][3:-4]}*.npy")[0]
|
|
127
|
+
targetdata = np.load(targetpth)
|
|
128
|
+
if inputdata.shape != targetdata.shape :
|
|
129
|
+
return
|
|
130
|
+
df = pd.DataFrame({'inputPath': [inputPath], 'targetpth': [targetpth] })
|
|
131
|
+
return df
|
|
132
|
+
except Exception as e:
|
|
133
|
+
print(f"{inputPath} can not load succeed: {e}")
|
|
134
|
+
return None
|
|
135
|
+
def generateList(conf):
|
|
136
|
+
imin,input_dir_path, target_dir_path= conf
|
|
137
|
+
imin = imin.strftime("%Y%m%d%H%M")
|
|
138
|
+
targetpath = glob.glob(f"{input_dir_path}/{imin[:4]}/{imin[:8]}/EC_{imin}_*.npy")
|
|
139
|
+
targetpath.sort()
|
|
140
|
+
if targetpath:
|
|
141
|
+
datas = []
|
|
142
|
+
for path in targetpath:
|
|
143
|
+
data = getcheckdata( (path,target_dir_path,imin))
|
|
144
|
+
datas.append(data)
|
|
145
|
+
datass = [i for i in datas if i is not None ]
|
|
146
|
+
if datass :
|
|
147
|
+
df = pd.concat(datass)
|
|
148
|
+
return df
|
|
149
|
+
else:
|
|
150
|
+
return None
|
|
151
|
+
import argparse
|
|
152
|
+
import datetime
|
|
153
|
+
import pandas as pd
|
|
154
|
+
def options():
|
|
155
|
+
parser = argparse.ArgumentParser(description='examdatabasedata')
|
|
156
|
+
parser.add_argument('--times', type=str, default='202505210030,202505210130')
|
|
157
|
+
parser.add_argument('--flag', type=str, default='val')
|
|
158
|
+
parser.add_argument('--freq', type=str, default="1h")
|
|
159
|
+
config= parser.parse_args()
|
|
160
|
+
print(config)
|
|
161
|
+
config.times = config.times.split(",")
|
|
162
|
+
if len(config.times) == 1:
|
|
163
|
+
config.times = [config.times[0], config.times[0]]
|
|
164
|
+
config.times = [datetime.datetime.strptime(config.times[0], "%Y%m%d%H%M"),
|
|
165
|
+
datetime.datetime.strptime(config.times[1], "%Y%m%d%H%M")]
|
|
166
|
+
return config
|
|
167
|
+
if __name__ == '__main__':
|
|
168
|
+
cfg = options()
|
|
169
|
+
sUTC = cfg.times[0]
|
|
170
|
+
eUTC = cfg.times[-1]
|
|
171
|
+
freq = cfg.freq
|
|
172
|
+
input_dir_path ="/mnt/wtx_weather_forecast/scx/smart_grid/train/clip_EC"
|
|
173
|
+
target_dir_path = f"/mnt/wtx_weather_forecast/scx/smart_grid/train/analy_1_256"
|
|
174
|
+
timelist = pd.date_range(sUTC, eUTC, freq=f"{freq}")
|
|
175
|
+
timeListfliter = timelist[timelist.month.isin([1,4,5,7,10])]
|
|
176
|
+
timeListfliter = list(timeListfliter)
|
|
177
|
+
savepath = f'/mnt/wtx_weather_forecast/SAT/downscale/0901'
|
|
178
|
+
crDir(savepath)
|
|
179
|
+
# 调用方法 1.split_time 2. timelist 3. 路径
|
|
180
|
+
|
|
181
|
+
dataL = Mul_sub(generateList,[ timeListfliter,[input_dir_path] , [target_dir_path]] )
|
|
182
|
+
dataLs = [i for i in dataL if i is not None]
|
|
183
|
+
pd.concat(dataLs).to_csv('train.txt', sep=' ', index=False)
|
|
184
|
+
a = pd.read_csv("./train.txt",sep=" ").values.tolist()
|
|
185
|
+
"""
|
|
186
|
+
|
|
187
|
+
|
shancx/Train/multiGpu.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
|
|
2
|
+
import torch
|
|
3
|
+
import torch.nn as nn
|
|
4
|
+
|
|
5
|
+
def multiGpu(model, gpu_ids):
|
|
6
|
+
# 检查是否有可用的 GPU
|
|
7
|
+
if not torch.cuda.is_available():
|
|
8
|
+
print("CUDA is not available. Using CPU.")
|
|
9
|
+
device = torch.device("cpu")
|
|
10
|
+
return model.to(device), device
|
|
11
|
+
device = torch.device(f"cuda:{gpu_ids[0]}")
|
|
12
|
+
if len(gpu_ids) > 1:
|
|
13
|
+
print(f"Using {len(gpu_ids)} GPUs: {gpu_ids}")
|
|
14
|
+
model = nn.DataParallel(model, device_ids=gpu_ids)
|
|
15
|
+
else:
|
|
16
|
+
print(f"Using GPU: {gpu_ids[0]}")
|
|
17
|
+
model = model.to(device)
|
|
18
|
+
return model, device
|
|
19
|
+
|
|
20
|
+
"""
|
|
21
|
+
model = MyModel()
|
|
22
|
+
gpu_ids = [5, 6, 7]
|
|
23
|
+
model, device = setup_multi_gpu(model, gpu_ids)
|
|
24
|
+
print(f"Model is on device: {device}")
|
|
25
|
+
data = torch.randn(10, 3, 224, 224).to(device)
|
|
26
|
+
output = model(data)
|
|
27
|
+
"""
|