shancx 1.8.92__py3-none-any.whl → 1.9.33.218__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- shancx/3D/__init__.py +25 -0
- shancx/Algo/Class.py +11 -0
- shancx/Algo/CudaPrefetcher1.py +112 -0
- shancx/Algo/Fake_image.py +24 -0
- shancx/Algo/Hsml.py +391 -0
- shancx/Algo/L2Loss.py +10 -0
- shancx/Algo/MetricTracker.py +132 -0
- shancx/Algo/Normalize.py +66 -0
- shancx/Algo/OptimizerWScheduler.py +38 -0
- shancx/Algo/Rmageresize.py +79 -0
- shancx/Algo/Savemodel.py +33 -0
- shancx/Algo/SmoothL1_losses.py +27 -0
- shancx/Algo/Tqdm.py +62 -0
- shancx/Algo/__init__.py +121 -0
- shancx/Algo/checknan.py +28 -0
- shancx/Algo/iouJU.py +83 -0
- shancx/Algo/mask.py +25 -0
- shancx/Algo/psnr.py +9 -0
- shancx/Algo/ssim.py +70 -0
- shancx/Algo/structural_similarity.py +308 -0
- shancx/Algo/tool.py +704 -0
- shancx/Calmetrics/__init__.py +97 -0
- shancx/Calmetrics/calmetrics.py +14 -0
- shancx/Calmetrics/calmetricsmatrixLib.py +147 -0
- shancx/Calmetrics/rmseR2score.py +35 -0
- shancx/Clip/__init__.py +50 -0
- shancx/Cmd.py +126 -0
- shancx/Config_.py +26 -0
- shancx/Df/DataFrame.py +11 -2
- shancx/Df/__init__.py +17 -0
- shancx/Df/tool.py +0 -0
- shancx/Diffm/Psamples.py +18 -0
- shancx/Diffm/__init__.py +0 -0
- shancx/Diffm/test.py +207 -0
- shancx/Doc/__init__.py +214 -0
- shancx/E/__init__.py +178 -152
- shancx/Fillmiss/__init__.py +0 -0
- shancx/Fillmiss/imgidwJU.py +46 -0
- shancx/Fillmiss/imgidwLatLonJU.py +82 -0
- shancx/Gpu/__init__.py +55 -0
- shancx/H9/__init__.py +126 -0
- shancx/H9/ahi_read_hsd.py +877 -0
- shancx/H9/ahisearchtable.py +298 -0
- shancx/H9/geometry.py +2439 -0
- shancx/Hug/__init__.py +81 -0
- shancx/Inst.py +22 -0
- shancx/Lib.py +31 -0
- shancx/Mos/__init__.py +37 -0
- shancx/NN/__init__.py +235 -106
- shancx/Path1.py +161 -0
- shancx/Plot/GlobMap.py +276 -116
- shancx/Plot/__init__.py +491 -1
- shancx/Plot/draw_day_CR_PNG.py +4 -21
- shancx/Plot/exam.py +116 -0
- shancx/Plot/plotGlobal.py +325 -0
- shancx/{radar_nmc.py → Plot/radarNmc.py} +4 -34
- shancx/{subplots_single_china_map.py → Plot/single_china_map.py} +1 -1
- shancx/Point.py +46 -0
- shancx/QC.py +223 -0
- shancx/RdPzl/__init__.py +32 -0
- shancx/Read.py +72 -0
- shancx/Resize.py +79 -0
- shancx/SN/__init__.py +62 -123
- shancx/Time/GetTime.py +9 -3
- shancx/Time/__init__.py +66 -1
- shancx/Time/timeCycle.py +302 -0
- shancx/Time/tool.py +0 -0
- shancx/Train/__init__.py +74 -0
- shancx/Train/makelist.py +187 -0
- shancx/Train/multiGpu.py +27 -0
- shancx/Train/prepare.py +161 -0
- shancx/Train/renet50.py +157 -0
- shancx/ZR.py +12 -0
- shancx/__init__.py +333 -262
- shancx/args.py +27 -0
- shancx/bak.py +768 -0
- shancx/df2database.py +62 -2
- shancx/geosProj.py +80 -0
- shancx/info.py +38 -0
- shancx/netdfJU.py +231 -0
- shancx/sendM.py +59 -0
- shancx/tensBoard/__init__.py +28 -0
- shancx/wait.py +246 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/METADATA +15 -5
- shancx-1.9.33.218.dist-info/RECORD +91 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/WHEEL +1 -1
- my_timer_decorator/__init__.py +0 -10
- shancx/Dsalgor/__init__.py +0 -19
- shancx/E/DFGRRIB.py +0 -30
- shancx/EN/DFGRRIB.py +0 -30
- shancx/EN/__init__.py +0 -148
- shancx/FileRead.py +0 -44
- shancx/Gray2RGB.py +0 -86
- shancx/M/__init__.py +0 -137
- shancx/MN/__init__.py +0 -133
- shancx/N/__init__.py +0 -131
- shancx/Plot/draw_day_CR_PNGUS.py +0 -206
- shancx/Plot/draw_day_CR_SVG.py +0 -275
- shancx/Plot/draw_day_pre_PNGUS.py +0 -205
- shancx/Plot/glob_nation_map.py +0 -116
- shancx/Plot/radar_nmc.py +0 -61
- shancx/Plot/radar_nmc_china_map_compare1.py +0 -50
- shancx/Plot/radar_nmc_china_map_f.py +0 -121
- shancx/Plot/radar_nmc_us_map_f.py +0 -128
- shancx/Plot/subplots_compare_devlop.py +0 -36
- shancx/Plot/subplots_single_china_map.py +0 -45
- shancx/S/__init__.py +0 -138
- shancx/W/__init__.py +0 -132
- shancx/WN/__init__.py +0 -132
- shancx/code.py +0 -331
- shancx/draw_day_CR_PNG.py +0 -200
- shancx/draw_day_CR_PNGUS.py +0 -206
- shancx/draw_day_CR_SVG.py +0 -275
- shancx/draw_day_pre_PNGUS.py +0 -205
- shancx/makenetCDFN.py +0 -42
- shancx/mkIMGSCX.py +0 -92
- shancx/netCDF.py +0 -130
- shancx/radar_nmc_china_map_compare1.py +0 -50
- shancx/radar_nmc_china_map_f.py +0 -125
- shancx/radar_nmc_us_map_f.py +0 -67
- shancx/subplots_compare_devlop.py +0 -36
- shancx/tool.py +0 -18
- shancx/user/H8mess.py +0 -317
- shancx/user/__init__.py +0 -137
- shancx/user/cinradHJN.py +0 -496
- shancx/user/examMeso.py +0 -293
- shancx/user/hjnDAAS.py +0 -26
- shancx/user/hjnFTP.py +0 -81
- shancx/user/hjnGIS.py +0 -320
- shancx/user/hjnGPU.py +0 -21
- shancx/user/hjnIDW.py +0 -68
- shancx/user/hjnKDTree.py +0 -75
- shancx/user/hjnLAPSTransform.py +0 -47
- shancx/user/hjnMiscellaneous.py +0 -182
- shancx/user/hjnProj.py +0 -162
- shancx/user/inotify.py +0 -41
- shancx/user/matplotlibMess.py +0 -87
- shancx/user/mkNCHJN.py +0 -623
- shancx/user/newTypeRadar.py +0 -492
- shancx/user/test.py +0 -6
- shancx/user/tlogP.py +0 -129
- shancx/util_log.py +0 -33
- shancx/wtx/H8mess.py +0 -315
- shancx/wtx/__init__.py +0 -151
- shancx/wtx/cinradHJN.py +0 -496
- shancx/wtx/colormap.py +0 -64
- shancx/wtx/examMeso.py +0 -298
- shancx/wtx/hjnDAAS.py +0 -26
- shancx/wtx/hjnFTP.py +0 -81
- shancx/wtx/hjnGIS.py +0 -330
- shancx/wtx/hjnGPU.py +0 -21
- shancx/wtx/hjnIDW.py +0 -68
- shancx/wtx/hjnKDTree.py +0 -75
- shancx/wtx/hjnLAPSTransform.py +0 -47
- shancx/wtx/hjnLog.py +0 -78
- shancx/wtx/hjnMiscellaneous.py +0 -201
- shancx/wtx/hjnProj.py +0 -161
- shancx/wtx/inotify.py +0 -41
- shancx/wtx/matplotlibMess.py +0 -87
- shancx/wtx/mkNCHJN.py +0 -613
- shancx/wtx/newTypeRadar.py +0 -492
- shancx/wtx/test.py +0 -6
- shancx/wtx/tlogP.py +0 -129
- shancx-1.8.92.dist-info/RECORD +0 -99
- /shancx/{Dsalgor → Algo}/dsalgor.py +0 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/top_level.txt +0 -0
shancx/E/__init__.py
CHANGED
|
@@ -1,152 +1,178 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
import os
|
|
4
|
-
def start():
|
|
5
|
-
print("import successful")
|
|
6
|
-
# constants
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
''
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
""
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
import
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
'''
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
import os
|
|
4
|
+
def start():
|
|
5
|
+
print("import successful")
|
|
6
|
+
# constants
|
|
7
|
+
__author__ = 'shancx'
|
|
8
|
+
__author_email__ = 'shancx@126.com'
|
|
9
|
+
|
|
10
|
+
# @Time : 2023/09/27 下午8:52
|
|
11
|
+
# @Author : shanchangxi
|
|
12
|
+
# @File : util_log.py
|
|
13
|
+
import time
|
|
14
|
+
import logging
|
|
15
|
+
from logging import handlers
|
|
16
|
+
|
|
17
|
+
def mkDir(path):
|
|
18
|
+
if "." in path:
|
|
19
|
+
os.makedirs(os.path.dirname(path),exist_ok=True)
|
|
20
|
+
else:
|
|
21
|
+
os.makedirs(path, exist_ok=True)
|
|
22
|
+
|
|
23
|
+
loggers = logging.getLogger()
|
|
24
|
+
loggers.setLevel(logging.INFO)
|
|
25
|
+
log_name = './project_E.log'
|
|
26
|
+
mkDir(log_name)
|
|
27
|
+
logfile = log_name
|
|
28
|
+
time_rotating_file_handler = handlers.TimedRotatingFileHandler(filename=logfile, when='D', encoding='utf-8')
|
|
29
|
+
time_rotating_file_handler.setLevel(logging.INFO)
|
|
30
|
+
formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
|
|
31
|
+
time_rotating_file_handler.setFormatter(formatter)
|
|
32
|
+
loggers.addHandler(time_rotating_file_handler)
|
|
33
|
+
|
|
34
|
+
import logging
|
|
35
|
+
def setup_logger(logger_name=loggers, root="./", phase="project", level=logging.INFO, screen=False):
|
|
36
|
+
'''set up logger'''
|
|
37
|
+
l = logging.getLogger(logger_name)
|
|
38
|
+
formatter = logging.Formatter(
|
|
39
|
+
'%(asctime)s.%(msecs)03d - %(levelname)s: %(message)s', datefmt='%y-%m-%d %H:%M:%S')
|
|
40
|
+
log_file = os.path.join(root, '{}.log'.format(phase))
|
|
41
|
+
fh = logging.FileHandler(log_file, mode='w')
|
|
42
|
+
fh.setFormatter(formatter)
|
|
43
|
+
l.setLevel(level)
|
|
44
|
+
l.addHandler(fh)
|
|
45
|
+
if screen:
|
|
46
|
+
sh = logging.StreamHandler()
|
|
47
|
+
sh.setFormatter(formatter)
|
|
48
|
+
l.addHandler(sh)
|
|
49
|
+
return l
|
|
50
|
+
|
|
51
|
+
def greetJU(**kwargs):
|
|
52
|
+
if 'name' in kwargs:
|
|
53
|
+
print(f"Hello, {kwargs['name']}!")
|
|
54
|
+
else:
|
|
55
|
+
print("Hello, World!")
|
|
56
|
+
# greet(name='Alice') # 输出:Hello, Alice!
|
|
57
|
+
# greet() #
|
|
58
|
+
|
|
59
|
+
"""
|
|
60
|
+
import tracback
|
|
61
|
+
try:
|
|
62
|
+
|
|
63
|
+
except Exception as e:
|
|
64
|
+
|
|
65
|
+
logger.info(traceback.format_exc())
|
|
66
|
+
"""
|
|
67
|
+
'''
|
|
68
|
+
##定義一個streamHandler
|
|
69
|
+
# print_handler = logging.StreamHandler()
|
|
70
|
+
# print_handler.setFormatter(formatter)
|
|
71
|
+
# loggers.addHandler(print_handler)
|
|
72
|
+
'''
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
"""
|
|
76
|
+
from main import makeAll,options
|
|
77
|
+
from multiprocessing import Pool
|
|
78
|
+
import datetime
|
|
79
|
+
from config import logger,output
|
|
80
|
+
import time
|
|
81
|
+
import pandas as pd
|
|
82
|
+
import os
|
|
83
|
+
from itertools import product
|
|
84
|
+
import threading
|
|
85
|
+
|
|
86
|
+
def excuteCommand(cmd):
|
|
87
|
+
print(cmd)
|
|
88
|
+
os.system(cmd)
|
|
89
|
+
|
|
90
|
+
def gpuPro(makeListUTC, isPhase, isDebug, gpu, isOverwrite):
|
|
91
|
+
productList = product(makeListUTC, [isPhase], [isDebug], [gpu], [isOverwrite])
|
|
92
|
+
|
|
93
|
+
with Pool(4) as p:
|
|
94
|
+
p.map(makeAll, productList)
|
|
95
|
+
|
|
96
|
+
if __name__ == '__main__':
|
|
97
|
+
cfg = options()
|
|
98
|
+
isPhase = cfg.isPhase
|
|
99
|
+
isDebug = cfg.isDebug
|
|
100
|
+
sepSec = cfg.sepSec
|
|
101
|
+
gpu = cfg.gpu
|
|
102
|
+
pool = cfg.pool
|
|
103
|
+
isOverwrite = cfg.isOverwrite
|
|
104
|
+
timeList = pd.date_range(cfg.times[0], cfg.times[-1], freq=f"{sepSec}s")
|
|
105
|
+
logger.info(f"时间段check {timeList}")
|
|
106
|
+
gpuNum = 2
|
|
107
|
+
eachGPU = 4
|
|
108
|
+
|
|
109
|
+
makeListUTC = []
|
|
110
|
+
for UTC in timeList:
|
|
111
|
+
UTCStr = UTC.strftime("%Y%m%d%H%M")
|
|
112
|
+
outpath = f"{output}/{UTCStr[:4]}/{UTCStr[:8]}/MSP2_WTX_AIW_QPF_L88_CHN_{UTCStr}_00000-00300-00006.nc"
|
|
113
|
+
if not os.path.exists(outpath) or isOverwrite:
|
|
114
|
+
makeListUTC.append(UTC)
|
|
115
|
+
[print(element) for element in makeListUTC]
|
|
116
|
+
|
|
117
|
+
phaseCMD = "--isPhase" if isPhase else ""
|
|
118
|
+
debugCMD = "--isDebug" if isDebug else ""
|
|
119
|
+
OverwriteCMD = "--isOverwrite"
|
|
120
|
+
gpuCMD = f"--gpu={gpu}"
|
|
121
|
+
# cmdList = list(map(lambda x:f"python main.py --times={x.strftime('%Y%m%d%H%M')} {phaseCMD} {debugCMD} {OverwriteCMD} {gpuCMD}",makeListUTC))
|
|
122
|
+
cmdList = list(map(lambda x:f"python main.py --times={x.strftime('%Y%m%d%H%M')} {phaseCMD} {debugCMD} {gpuCMD}",makeListUTC))
|
|
123
|
+
|
|
124
|
+
with Pool(pool) as p:
|
|
125
|
+
p.map(excuteCommand, cmdList)
|
|
126
|
+
"""
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
'''
|
|
130
|
+
# @Time : 2023/09/27 下午8:52
|
|
131
|
+
# @Author : shanchangxi
|
|
132
|
+
# @File : util_log.py
|
|
133
|
+
import time
|
|
134
|
+
import logging
|
|
135
|
+
from logging import handlers
|
|
136
|
+
|
|
137
|
+
logger = logging.getLogger()
|
|
138
|
+
logger.setLevel(logging.INFO)
|
|
139
|
+
log_name = 'project_tim_tor.log'
|
|
140
|
+
logfile = log_name
|
|
141
|
+
time_rotating_file_handler = handlers.TimedRotatingFileHandler(filename=logfile, when='D', encoding='utf-8')
|
|
142
|
+
time_rotating_file_handler.setLevel(logging.INFO)
|
|
143
|
+
formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
|
|
144
|
+
time_rotating_file_handler.setFormatter(formatter)
|
|
145
|
+
logger.addHandler(time_rotating_file_handler)
|
|
146
|
+
print_handler = logging.StreamHandler()
|
|
147
|
+
print_handler.setFormatter(formatter)
|
|
148
|
+
logger.addHandler(print_handler)
|
|
149
|
+
|
|
150
|
+
'''
|
|
151
|
+
|
|
152
|
+
'''
|
|
153
|
+
###解决方法 pip install torch==2.4.0 torchvision torchaudio三个同时安装 python 3.12 解决cuda启动不了的问题
|
|
154
|
+
|
|
155
|
+
Res网络
|
|
156
|
+
'''
|
|
157
|
+
|
|
158
|
+
'''
|
|
159
|
+
#!/bin/bash
|
|
160
|
+
subject="Daily System Report"
|
|
161
|
+
to_email="shanhe12@163.com"
|
|
162
|
+
temp_file="/home/scx/logs/111.log" # 替换为日志文件的实际路径
|
|
163
|
+
search_terms=("error" "ERROR") # 可以添加多个搜索词
|
|
164
|
+
for term in "${search_terms[@]}"; do
|
|
165
|
+
if grep -q "$term" "$temp_file"; then
|
|
166
|
+
grep "$term" "$temp_file" | mail -s "$subject - $term" "$to_email"
|
|
167
|
+
else
|
|
168
|
+
echo "No matches found for '$term'." >&2
|
|
169
|
+
fi
|
|
170
|
+
done
|
|
171
|
+
|
|
172
|
+
'''
|
|
173
|
+
|
|
174
|
+
"""
|
|
175
|
+
manager = UserManager("./key/user_data.json")
|
|
176
|
+
manager.add_user("001", '[f"{key}","https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key="]')
|
|
177
|
+
user_info = manager.get_user("user002")
|
|
178
|
+
"""
|
|
File without changes
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
from scipy.spatial import cKDTree
|
|
3
|
+
import matplotlib.pyplot as plt
|
|
4
|
+
def fill_missing_idw(image, power=2, max_neighbors=8):
|
|
5
|
+
filled_image = image.copy()
|
|
6
|
+
known_mask = ~np.isnan(image)
|
|
7
|
+
missing_mask = np.isnan(image)
|
|
8
|
+
known_points = np.column_stack(np.where(known_mask))
|
|
9
|
+
missing_points = np.column_stack(np.where(missing_mask))
|
|
10
|
+
known_values = image[known_mask]
|
|
11
|
+
if len(missing_points) == 0:
|
|
12
|
+
print("There are no missing points to fill.")
|
|
13
|
+
return filled_image
|
|
14
|
+
tree = cKDTree(known_points)
|
|
15
|
+
distances, indexes = tree.query(missing_points, k=max_neighbors)
|
|
16
|
+
with np.errstate(divide='ignore'):
|
|
17
|
+
weights = 1 / distances**power
|
|
18
|
+
weights[~np.isfinite(weights)] = 0
|
|
19
|
+
weight_sums = np.sum(weights, axis=1)
|
|
20
|
+
weight_sums[weight_sums == 0] = 1
|
|
21
|
+
interpolated_values = np.sum(weights * known_values[indexes], axis=1) / weight_sums
|
|
22
|
+
for i, (x, y) in enumerate(missing_points):
|
|
23
|
+
filled_image[x, y] = interpolated_values[i]
|
|
24
|
+
return filled_image
|
|
25
|
+
def test_fill_missing_idw():
|
|
26
|
+
np.random.seed(42)
|
|
27
|
+
image = np.random.rand(20, 20)
|
|
28
|
+
image[5:10, 5:10] = np.nan
|
|
29
|
+
plt.figure(figsize=(12, 6))
|
|
30
|
+
plt.subplot(1, 2, 1)
|
|
31
|
+
plt.imshow(image, cmap='viridis', interpolation='none')
|
|
32
|
+
plt.title("before")
|
|
33
|
+
plt.colorbar()
|
|
34
|
+
plt.savefig("before.png")
|
|
35
|
+
filled_image = fill_missing_idw(image, power=2, max_neighbors=8)
|
|
36
|
+
plt.subplot(1, 2, 2)
|
|
37
|
+
plt.imshow(filled_image, cmap='viridis', interpolation='none')
|
|
38
|
+
plt.title("uper")
|
|
39
|
+
plt.colorbar()
|
|
40
|
+
|
|
41
|
+
plt.tight_layout()
|
|
42
|
+
plt.savefig("uper.png")
|
|
43
|
+
plt.show()
|
|
44
|
+
|
|
45
|
+
if __name__ == "__main__":
|
|
46
|
+
test_fill_missing_idw()
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
from netCDF4 import Dataset
|
|
3
|
+
from scipy.spatial import cKDTree
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
def fill_missing_idw(variable, lat, lon, power=2, max_neighbors=8):
|
|
7
|
+
filled_variable = variable.copy()
|
|
8
|
+
ny, nx = variable.shape
|
|
9
|
+
if isinstance(variable, np.ma.MaskedArray):
|
|
10
|
+
known_mask = ~variable.mask
|
|
11
|
+
missing_mask = variable.mask
|
|
12
|
+
known_values = variable.data[known_mask]
|
|
13
|
+
else:
|
|
14
|
+
known_mask = ~np.isnan(variable)
|
|
15
|
+
missing_mask = np.isnan(variable)
|
|
16
|
+
known_values = variable[known_mask]
|
|
17
|
+
known_points = np.column_stack((lon[known_mask], lat[known_mask]))
|
|
18
|
+
missing_points = np.column_stack((lon[missing_mask], lat[missing_mask]))
|
|
19
|
+
if len(known_points) == 0:
|
|
20
|
+
print("There are no known points available for interpolation.")
|
|
21
|
+
return filled_variable
|
|
22
|
+
if len(missing_points) == 0:
|
|
23
|
+
print("There are no missing points to fill.")
|
|
24
|
+
return filled_variable
|
|
25
|
+
tree = cKDTree(known_points)
|
|
26
|
+
distances, indexes = tree.query(missing_points, k=max_neighbors)
|
|
27
|
+
if distances.size == 0 or indexes.size == 0:
|
|
28
|
+
print("There are no valid neighbor points for interpolation.")
|
|
29
|
+
return filled_variable
|
|
30
|
+
if max_neighbors == 1:
|
|
31
|
+
distances = distances[:, np.newaxis]
|
|
32
|
+
indexes = indexes[:, np.newaxis]
|
|
33
|
+
with np.errstate(divide='ignore'):
|
|
34
|
+
weights = 1 / distances**power
|
|
35
|
+
weights[~np.isfinite(weights)] = 0
|
|
36
|
+
weight_sums = np.sum(weights, axis=1)
|
|
37
|
+
weight_sums[weight_sums == 0] = 1
|
|
38
|
+
interpolated_values = np.sum(weights * known_values[indexes], axis=1) / weight_sums
|
|
39
|
+
exact_matches = distances[:, 0] == 0
|
|
40
|
+
if np.any(exact_matches):
|
|
41
|
+
interpolated_values[exact_matches] = known_values[indexes[exact_matches, 0]]
|
|
42
|
+
filled_variable[missing_mask] = interpolated_values
|
|
43
|
+
n_filled = np.sum(~missing_mask)
|
|
44
|
+
print(f"尝试填补了 {len(missing_points)} 个缺失点,成功填补了 {n_filled} 个。")
|
|
45
|
+
return filled_variable
|
|
46
|
+
def process_nc_file(input_path, output_path):
|
|
47
|
+
with Dataset(input_path, 'r') as src:
|
|
48
|
+
with Dataset(output_path, 'w', format=src.file_format) as dst:
|
|
49
|
+
dst.setncatts({attr: src.getncattr(attr) for attr in src.ncattrs()})
|
|
50
|
+
for dim_name, dim in src.dimensions.items():
|
|
51
|
+
dst.createDimension(dim_name, len(dim) if not dim.isunlimited() else None)
|
|
52
|
+
for var_name, var in src.variables.items():
|
|
53
|
+
fill_value = getattr(var, '_FillValue', None)
|
|
54
|
+
dst_var = dst.createVariable(var_name, var.datatype, var.dimensions, fill_value=fill_value)
|
|
55
|
+
dst_var.setncatts({attr: var.getncattr(attr) for attr in var.ncattrs()})
|
|
56
|
+
data = var[:]
|
|
57
|
+
if var_name in ['CR']:
|
|
58
|
+
if data.ndim == 3:
|
|
59
|
+
filled_data = np.ma.array(data)
|
|
60
|
+
lat = src.variables['lat'][:]
|
|
61
|
+
lon = src.variables['lon'][:]
|
|
62
|
+
for t in range(data.shape[0]):
|
|
63
|
+
print(f"Processing time index {t} for variable {var_name}")
|
|
64
|
+
var_data = data[t, :, :]
|
|
65
|
+
if not isinstance(var_data, np.ma.MaskedArray):
|
|
66
|
+
var_data = np.ma.masked_where(np.isnan(var_data), var_data)
|
|
67
|
+
filled_var = fill_missing_idw(var_data, lat, lon)
|
|
68
|
+
filled_data[t, :, :] = filled_var
|
|
69
|
+
fill_val = getattr(var, '_FillValue', np.nan)
|
|
70
|
+
dst_var[:] = filled_data.filled(fill_val)
|
|
71
|
+
else:
|
|
72
|
+
raise ValueError(f"Variable {var_name} 维度不符合预期,预期为 3D 但实际为 {data.ndim}D。")
|
|
73
|
+
else:
|
|
74
|
+
dst_var[:] = data
|
|
75
|
+
print(f"The data after filling has been saved to {output_path}")
|
|
76
|
+
if __name__ == "__main__":
|
|
77
|
+
input_nc = "/mnt/wtx_weather_forecast/scx/WTX_DATA/RADA/MQPF_1204_diffu12/2024/20240908/MSP2_WTX_AIW_REF_L88_CHN_202409080448_00000-00300-00006.nc"
|
|
78
|
+
output_nc = "1aaa_filled.nc"
|
|
79
|
+
if not os.path.exists(input_nc):
|
|
80
|
+
print(f"输入文件 {input_nc} 不存在。")
|
|
81
|
+
else:
|
|
82
|
+
process_nc_file(input_nc, output_nc)
|
shancx/Gpu/__init__.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# @Time : 2024/10/17 上午午10:40
|
|
4
|
+
# @Author : shancx
|
|
5
|
+
# @File : __init__.py
|
|
6
|
+
# @email : shanhe12@163.com
|
|
7
|
+
import torch
|
|
8
|
+
def CheckGpuPlus(num=1):
|
|
9
|
+
if torch.cuda.is_available():
|
|
10
|
+
print(f"CUDA is available. Number of GPUs available: {torch.cuda.device_count()}")
|
|
11
|
+
|
|
12
|
+
for i in range(torch.cuda.device_count()):
|
|
13
|
+
print(f"GPU {i}: {torch.cuda.get_device_name(i)}")
|
|
14
|
+
device = torch.device(f'cuda:{num}' if torch.cuda.is_available() else 'cpu')
|
|
15
|
+
return device
|
|
16
|
+
else:
|
|
17
|
+
print("CUDA is not available. Using CPU.")
|
|
18
|
+
return None
|
|
19
|
+
|
|
20
|
+
#pd.concat(filter(None, results))
|
|
21
|
+
#valid_results = [df for df in results if isinstance(df, pd.DataFrame) and not df.empty]
|
|
22
|
+
|
|
23
|
+
import os
|
|
24
|
+
def visDevices(device_ids):
|
|
25
|
+
if isinstance(device_ids, int):
|
|
26
|
+
device_ids = str(device_ids)
|
|
27
|
+
elif isinstance(device_ids, list):
|
|
28
|
+
device_ids = ",".join(map(str, device_ids))
|
|
29
|
+
os.environ["CUDA_VISIBLE_DEVICES"] = device_ids
|
|
30
|
+
if torch.cuda.is_available():
|
|
31
|
+
print(f"Visible GPUs: {device_ids}")
|
|
32
|
+
print(f"Current visible GPUs: {os.environ.get('CUDA_VISIBLE_DEVICES')}")
|
|
33
|
+
for device_id in device_ids.split(","):
|
|
34
|
+
if not torch.cuda.device(int(device_id)):
|
|
35
|
+
print(f"Warning: GPU {device_id} is not available.")
|
|
36
|
+
else:
|
|
37
|
+
print("No GPU available. Using CPU.")
|
|
38
|
+
|
|
39
|
+
import torch
|
|
40
|
+
import torch.nn as nn
|
|
41
|
+
|
|
42
|
+
def multiGpu(model, gpu_ids):
|
|
43
|
+
# 检查是否有可用的 GPU
|
|
44
|
+
if not torch.cuda.is_available():
|
|
45
|
+
print("CUDA is not available. Using CPU.")
|
|
46
|
+
device = torch.device("cpu")
|
|
47
|
+
return model.to(device), device
|
|
48
|
+
device = torch.device(f"cuda:{gpu_ids[0]}")
|
|
49
|
+
if len(gpu_ids) > 1:
|
|
50
|
+
print(f"Using {len(gpu_ids)} GPUs: {gpu_ids}")
|
|
51
|
+
model = nn.DataParallel(model, device_ids=gpu_ids)
|
|
52
|
+
else:
|
|
53
|
+
print(f"Using GPU: {gpu_ids[0]}")
|
|
54
|
+
model = model.to(device)
|
|
55
|
+
return model, device
|
shancx/H9/__init__.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
from shancx.H9.ahisearchtable import ahisearchtable
|
|
2
|
+
from shancx.H9.ahi_read_hsd import ahi_read_hsd
|
|
3
|
+
from tqdm import tqdm
|
|
4
|
+
import os
|
|
5
|
+
import numpy as np
|
|
6
|
+
import datetime
|
|
7
|
+
import time
|
|
8
|
+
import warnings
|
|
9
|
+
warnings.filterwarnings("ignore", category=UserWarning)
|
|
10
|
+
class AHIScene(ahi_read_hsd, ahisearchtable) :
|
|
11
|
+
def __init__(self, subpoint=140.7, resolution=0.02):
|
|
12
|
+
super().__init__(subpoint=subpoint, resolution=resolution)
|
|
13
|
+
self.Tempfile = []
|
|
14
|
+
def hsdBlock(self, srcHSDfiles, tmppath, fillvalue=65535) :
|
|
15
|
+
''' 对H8、H9的HSD文件进行解析、拼接成NOM '''
|
|
16
|
+
# HS_H09_20230115_0400_B01_FLDK_R10_S0110.DAT.bz2
|
|
17
|
+
BandID, BlockIDMin, BlockIDMax, SegmentTotal = self.setHSDInfo(srcHSDfiles)
|
|
18
|
+
outdata = None
|
|
19
|
+
BlockIDs = []
|
|
20
|
+
with tqdm(total=len(srcHSDfiles), iterable='iterable',
|
|
21
|
+
desc = '正在进行第%i波段块合成' %(BandID), mininterval=1) as pbar:
|
|
22
|
+
for hsdname in srcHSDfiles :
|
|
23
|
+
if not os.path.isfile(hsdname):
|
|
24
|
+
print('文件不存在【%s】' %(hsdname))
|
|
25
|
+
pbar.update(1)
|
|
26
|
+
continue
|
|
27
|
+
|
|
28
|
+
# 获取文件名信息
|
|
29
|
+
nameinfo = self.getHSDNameInfo(hsdname)
|
|
30
|
+
if nameinfo is None :
|
|
31
|
+
pbar.update(1)
|
|
32
|
+
continue
|
|
33
|
+
SegmentNum = nameinfo['SegmemtID']
|
|
34
|
+
|
|
35
|
+
# print('正在解压bz2文件【%s】' %(hsdname))
|
|
36
|
+
self._unzipped = self.unzip_file(hsdname, tmppath)
|
|
37
|
+
if self._unzipped:
|
|
38
|
+
self.is_zipped = True
|
|
39
|
+
filename = self._unzipped
|
|
40
|
+
|
|
41
|
+
self.Tempfile.append(filename)
|
|
42
|
+
else:
|
|
43
|
+
filename = hsdname
|
|
44
|
+
|
|
45
|
+
if filename.endswith('.bz2') :
|
|
46
|
+
print('解压bz2文件失败【%s】' %(filename))
|
|
47
|
+
pbar.update(1)
|
|
48
|
+
continue
|
|
49
|
+
|
|
50
|
+
# 根据块号对数据进行拼接
|
|
51
|
+
data = self.readhsd(filename, SegmentNum)
|
|
52
|
+
if data is None :
|
|
53
|
+
pbar.update(1)
|
|
54
|
+
continue
|
|
55
|
+
|
|
56
|
+
if outdata is None :
|
|
57
|
+
line, pixel = data.shape
|
|
58
|
+
outdata = np.full(shape=(line*SegmentTotal, pixel),
|
|
59
|
+
fill_value=fillvalue, dtype=np.uint16)
|
|
60
|
+
|
|
61
|
+
data[np.isnan(data)] = fillvalue/100.0
|
|
62
|
+
outdata[(SegmentNum-BlockIDMin)*line:(SegmentNum-BlockIDMin+1)*line, :] \
|
|
63
|
+
= np.array(data*100.0, dtype=np.uint16)
|
|
64
|
+
BlockIDs.append(SegmentNum)
|
|
65
|
+
pbar.update(1)
|
|
66
|
+
pbar.close()
|
|
67
|
+
self.__del__()
|
|
68
|
+
return outdata
|
|
69
|
+
|
|
70
|
+
def setHSDInfo(self, filelist):
|
|
71
|
+
|
|
72
|
+
BandID = None
|
|
73
|
+
BlockIDs = []
|
|
74
|
+
for filename in filelist :
|
|
75
|
+
nameinfo = self.getHSDNameInfo(filename)
|
|
76
|
+
if nameinfo is None :
|
|
77
|
+
continue
|
|
78
|
+
|
|
79
|
+
if BandID is None :
|
|
80
|
+
BandID = nameinfo['BandID']
|
|
81
|
+
elif BandID != nameinfo['BandID'] :
|
|
82
|
+
raise Exception('输入的文件列表中有多个波段的块数据文件【%s】' %(filename))
|
|
83
|
+
BlockIDs.append(nameinfo['SegmemtID'])
|
|
84
|
+
|
|
85
|
+
BlockIDMin = np.nanmin(BlockIDs)
|
|
86
|
+
BlockIDMax = np.nanmax(BlockIDs)
|
|
87
|
+
|
|
88
|
+
SegmentTotal = int(BlockIDMax-BlockIDMin+1)
|
|
89
|
+
|
|
90
|
+
return BandID, BlockIDMin, BlockIDMax, SegmentTotal
|
|
91
|
+
|
|
92
|
+
def getHSDNameInfo(self, filename):
|
|
93
|
+
|
|
94
|
+
basename = os.path.basename(filename)
|
|
95
|
+
basename = basename.split('.')[0]
|
|
96
|
+
if len(basename) != 39 :
|
|
97
|
+
print('非标准文件名,需要输入文件名【HS_H09_YYYYMMDD_HHMM_BXX_FLDK_R20_S0810】')
|
|
98
|
+
return None
|
|
99
|
+
|
|
100
|
+
nameinfo = {}
|
|
101
|
+
namelist = basename.split('_')
|
|
102
|
+
|
|
103
|
+
nameinfo['SatID'] = namelist[1]
|
|
104
|
+
nameinfo['StartTime'] = datetime.datetime.strptime('%s %s' %(namelist[2], namelist[3]), '%Y%m%d %H%M')
|
|
105
|
+
nameinfo['BandID'] = int(namelist[4][1:]) # 2-digit band number (varies from "01" to "16");
|
|
106
|
+
nameinfo['ObsType'] = namelist[5]
|
|
107
|
+
nameinfo['Resolution'] = float(namelist[6][1:])/10.0/100 # spatial resolution ("05": 0.5km, "10": 1.0km, "20": 2.0km);
|
|
108
|
+
nameinfo['SegmemtID'] = int(namelist[7][1:3])
|
|
109
|
+
nameinfo['SegmemtTotal'] = int(namelist[7][3:5]) # total number of segments (fixed to "10")
|
|
110
|
+
|
|
111
|
+
return nameinfo
|
|
112
|
+
|
|
113
|
+
def __del__(self):
|
|
114
|
+
# pass
|
|
115
|
+
for filename in self.Tempfile :
|
|
116
|
+
if os.path.isfile(filename) :
|
|
117
|
+
try:
|
|
118
|
+
os.remove(filename)
|
|
119
|
+
except BaseException as e :
|
|
120
|
+
time.sleep(1)
|
|
121
|
+
try:
|
|
122
|
+
fp = open(filename, 'r')
|
|
123
|
+
fp.close()
|
|
124
|
+
os.remove(filename)
|
|
125
|
+
except BaseException as e :
|
|
126
|
+
pass
|