shancx 1.8.92__py3-none-any.whl → 1.9.33.218__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- shancx/3D/__init__.py +25 -0
- shancx/Algo/Class.py +11 -0
- shancx/Algo/CudaPrefetcher1.py +112 -0
- shancx/Algo/Fake_image.py +24 -0
- shancx/Algo/Hsml.py +391 -0
- shancx/Algo/L2Loss.py +10 -0
- shancx/Algo/MetricTracker.py +132 -0
- shancx/Algo/Normalize.py +66 -0
- shancx/Algo/OptimizerWScheduler.py +38 -0
- shancx/Algo/Rmageresize.py +79 -0
- shancx/Algo/Savemodel.py +33 -0
- shancx/Algo/SmoothL1_losses.py +27 -0
- shancx/Algo/Tqdm.py +62 -0
- shancx/Algo/__init__.py +121 -0
- shancx/Algo/checknan.py +28 -0
- shancx/Algo/iouJU.py +83 -0
- shancx/Algo/mask.py +25 -0
- shancx/Algo/psnr.py +9 -0
- shancx/Algo/ssim.py +70 -0
- shancx/Algo/structural_similarity.py +308 -0
- shancx/Algo/tool.py +704 -0
- shancx/Calmetrics/__init__.py +97 -0
- shancx/Calmetrics/calmetrics.py +14 -0
- shancx/Calmetrics/calmetricsmatrixLib.py +147 -0
- shancx/Calmetrics/rmseR2score.py +35 -0
- shancx/Clip/__init__.py +50 -0
- shancx/Cmd.py +126 -0
- shancx/Config_.py +26 -0
- shancx/Df/DataFrame.py +11 -2
- shancx/Df/__init__.py +17 -0
- shancx/Df/tool.py +0 -0
- shancx/Diffm/Psamples.py +18 -0
- shancx/Diffm/__init__.py +0 -0
- shancx/Diffm/test.py +207 -0
- shancx/Doc/__init__.py +214 -0
- shancx/E/__init__.py +178 -152
- shancx/Fillmiss/__init__.py +0 -0
- shancx/Fillmiss/imgidwJU.py +46 -0
- shancx/Fillmiss/imgidwLatLonJU.py +82 -0
- shancx/Gpu/__init__.py +55 -0
- shancx/H9/__init__.py +126 -0
- shancx/H9/ahi_read_hsd.py +877 -0
- shancx/H9/ahisearchtable.py +298 -0
- shancx/H9/geometry.py +2439 -0
- shancx/Hug/__init__.py +81 -0
- shancx/Inst.py +22 -0
- shancx/Lib.py +31 -0
- shancx/Mos/__init__.py +37 -0
- shancx/NN/__init__.py +235 -106
- shancx/Path1.py +161 -0
- shancx/Plot/GlobMap.py +276 -116
- shancx/Plot/__init__.py +491 -1
- shancx/Plot/draw_day_CR_PNG.py +4 -21
- shancx/Plot/exam.py +116 -0
- shancx/Plot/plotGlobal.py +325 -0
- shancx/{radar_nmc.py → Plot/radarNmc.py} +4 -34
- shancx/{subplots_single_china_map.py → Plot/single_china_map.py} +1 -1
- shancx/Point.py +46 -0
- shancx/QC.py +223 -0
- shancx/RdPzl/__init__.py +32 -0
- shancx/Read.py +72 -0
- shancx/Resize.py +79 -0
- shancx/SN/__init__.py +62 -123
- shancx/Time/GetTime.py +9 -3
- shancx/Time/__init__.py +66 -1
- shancx/Time/timeCycle.py +302 -0
- shancx/Time/tool.py +0 -0
- shancx/Train/__init__.py +74 -0
- shancx/Train/makelist.py +187 -0
- shancx/Train/multiGpu.py +27 -0
- shancx/Train/prepare.py +161 -0
- shancx/Train/renet50.py +157 -0
- shancx/ZR.py +12 -0
- shancx/__init__.py +333 -262
- shancx/args.py +27 -0
- shancx/bak.py +768 -0
- shancx/df2database.py +62 -2
- shancx/geosProj.py +80 -0
- shancx/info.py +38 -0
- shancx/netdfJU.py +231 -0
- shancx/sendM.py +59 -0
- shancx/tensBoard/__init__.py +28 -0
- shancx/wait.py +246 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/METADATA +15 -5
- shancx-1.9.33.218.dist-info/RECORD +91 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/WHEEL +1 -1
- my_timer_decorator/__init__.py +0 -10
- shancx/Dsalgor/__init__.py +0 -19
- shancx/E/DFGRRIB.py +0 -30
- shancx/EN/DFGRRIB.py +0 -30
- shancx/EN/__init__.py +0 -148
- shancx/FileRead.py +0 -44
- shancx/Gray2RGB.py +0 -86
- shancx/M/__init__.py +0 -137
- shancx/MN/__init__.py +0 -133
- shancx/N/__init__.py +0 -131
- shancx/Plot/draw_day_CR_PNGUS.py +0 -206
- shancx/Plot/draw_day_CR_SVG.py +0 -275
- shancx/Plot/draw_day_pre_PNGUS.py +0 -205
- shancx/Plot/glob_nation_map.py +0 -116
- shancx/Plot/radar_nmc.py +0 -61
- shancx/Plot/radar_nmc_china_map_compare1.py +0 -50
- shancx/Plot/radar_nmc_china_map_f.py +0 -121
- shancx/Plot/radar_nmc_us_map_f.py +0 -128
- shancx/Plot/subplots_compare_devlop.py +0 -36
- shancx/Plot/subplots_single_china_map.py +0 -45
- shancx/S/__init__.py +0 -138
- shancx/W/__init__.py +0 -132
- shancx/WN/__init__.py +0 -132
- shancx/code.py +0 -331
- shancx/draw_day_CR_PNG.py +0 -200
- shancx/draw_day_CR_PNGUS.py +0 -206
- shancx/draw_day_CR_SVG.py +0 -275
- shancx/draw_day_pre_PNGUS.py +0 -205
- shancx/makenetCDFN.py +0 -42
- shancx/mkIMGSCX.py +0 -92
- shancx/netCDF.py +0 -130
- shancx/radar_nmc_china_map_compare1.py +0 -50
- shancx/radar_nmc_china_map_f.py +0 -125
- shancx/radar_nmc_us_map_f.py +0 -67
- shancx/subplots_compare_devlop.py +0 -36
- shancx/tool.py +0 -18
- shancx/user/H8mess.py +0 -317
- shancx/user/__init__.py +0 -137
- shancx/user/cinradHJN.py +0 -496
- shancx/user/examMeso.py +0 -293
- shancx/user/hjnDAAS.py +0 -26
- shancx/user/hjnFTP.py +0 -81
- shancx/user/hjnGIS.py +0 -320
- shancx/user/hjnGPU.py +0 -21
- shancx/user/hjnIDW.py +0 -68
- shancx/user/hjnKDTree.py +0 -75
- shancx/user/hjnLAPSTransform.py +0 -47
- shancx/user/hjnMiscellaneous.py +0 -182
- shancx/user/hjnProj.py +0 -162
- shancx/user/inotify.py +0 -41
- shancx/user/matplotlibMess.py +0 -87
- shancx/user/mkNCHJN.py +0 -623
- shancx/user/newTypeRadar.py +0 -492
- shancx/user/test.py +0 -6
- shancx/user/tlogP.py +0 -129
- shancx/util_log.py +0 -33
- shancx/wtx/H8mess.py +0 -315
- shancx/wtx/__init__.py +0 -151
- shancx/wtx/cinradHJN.py +0 -496
- shancx/wtx/colormap.py +0 -64
- shancx/wtx/examMeso.py +0 -298
- shancx/wtx/hjnDAAS.py +0 -26
- shancx/wtx/hjnFTP.py +0 -81
- shancx/wtx/hjnGIS.py +0 -330
- shancx/wtx/hjnGPU.py +0 -21
- shancx/wtx/hjnIDW.py +0 -68
- shancx/wtx/hjnKDTree.py +0 -75
- shancx/wtx/hjnLAPSTransform.py +0 -47
- shancx/wtx/hjnLog.py +0 -78
- shancx/wtx/hjnMiscellaneous.py +0 -201
- shancx/wtx/hjnProj.py +0 -161
- shancx/wtx/inotify.py +0 -41
- shancx/wtx/matplotlibMess.py +0 -87
- shancx/wtx/mkNCHJN.py +0 -613
- shancx/wtx/newTypeRadar.py +0 -492
- shancx/wtx/test.py +0 -6
- shancx/wtx/tlogP.py +0 -129
- shancx-1.8.92.dist-info/RECORD +0 -99
- /shancx/{Dsalgor → Algo}/dsalgor.py +0 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
import os
|
|
4
|
+
# constants
|
|
5
|
+
__author__ = 'shancx'
|
|
6
|
+
|
|
7
|
+
__author_email__ = 'shanhe12@163.com'
|
|
8
|
+
# @Time : 2025/08/19 下午11:31
|
|
9
|
+
# @Author : shanchangxi
|
|
10
|
+
# @File : Calmetrics.py
|
|
11
|
+
from sklearn.metrics import mean_squared_error, r2_score
|
|
12
|
+
from scipy.stats import pearsonr
|
|
13
|
+
import numpy as np
|
|
14
|
+
def calculate_metrics(y_true, y_pred):
|
|
15
|
+
# Calculate metrics
|
|
16
|
+
correlation, _ = pearsonr(y_true, y_pred)
|
|
17
|
+
rmse = np.sqrt(mean_squared_error(y_true, y_pred))
|
|
18
|
+
mask = y_true != 0
|
|
19
|
+
y_true = y_true[mask]
|
|
20
|
+
y_pred = y_pred[mask]
|
|
21
|
+
mape = np.mean(np.abs((y_true - y_pred) / y_true)) * 100
|
|
22
|
+
r2 = r2_score(y_true, y_pred)
|
|
23
|
+
rmse = f"{rmse:0.4f}"
|
|
24
|
+
correlation = f"{correlation:0.4f}"
|
|
25
|
+
mape = f"{mape:0.4f}"
|
|
26
|
+
r2 = f"{r2:0.4f}"
|
|
27
|
+
return rmse,correlation,mape,r2
|
|
28
|
+
import numpy as np
|
|
29
|
+
class Metrics:
|
|
30
|
+
def __init__(self, true_values, forecast_values):
|
|
31
|
+
"""
|
|
32
|
+
初始化Metrics类,传入真值和预测值
|
|
33
|
+
:param true_values: 实际观测值,二值化数据
|
|
34
|
+
:param forecast_values: 预测结果,二值化数据
|
|
35
|
+
"""
|
|
36
|
+
self.true_values = true_values
|
|
37
|
+
self.forecast_values = forecast_values
|
|
38
|
+
|
|
39
|
+
def cal_confusion_matrix(self):
|
|
40
|
+
"""
|
|
41
|
+
计算混淆矩阵的四个要素: TP, TN, FP, FN
|
|
42
|
+
:return: 返回TP, TN, FP, FN
|
|
43
|
+
"""
|
|
44
|
+
TP = np.sum((self.true_values == 1) & (self.forecast_values == 1)) # True Positive
|
|
45
|
+
TN = np.sum((self.true_values == 0) & (self.forecast_values == 0)) # True Negative
|
|
46
|
+
FP = np.sum((self.true_values == 0) & (self.forecast_values == 1)) # False Positive
|
|
47
|
+
FN = np.sum((self.true_values == 1) & (self.forecast_values == 0)) # False Negative
|
|
48
|
+
|
|
49
|
+
return TP, TN, FP, FN
|
|
50
|
+
|
|
51
|
+
def cal_ts(self):
|
|
52
|
+
"""
|
|
53
|
+
计算TS评分
|
|
54
|
+
:return: TS评分
|
|
55
|
+
"""
|
|
56
|
+
TP, TN, FP, FN = self.cal_confusion_matrix()
|
|
57
|
+
ts_score = TP / (TP + FP + FN) if (TP + FP + FN) > 0 else np.nan
|
|
58
|
+
return ts_score
|
|
59
|
+
|
|
60
|
+
def cal_acc(self):
|
|
61
|
+
"""
|
|
62
|
+
计算准确率
|
|
63
|
+
:return: 准确率
|
|
64
|
+
"""
|
|
65
|
+
TP, TN, FP, FN = self.cal_confusion_matrix()
|
|
66
|
+
accuracy = (TP + TN) / (TP + TN + FP + FN) if (TP + TN + FP + FN) > 0 else np.nan
|
|
67
|
+
return accuracy
|
|
68
|
+
|
|
69
|
+
def cal_pod(self):
|
|
70
|
+
"""
|
|
71
|
+
计算命中率(Probability of Detection, POD)
|
|
72
|
+
:return: 命中率(POD)
|
|
73
|
+
"""
|
|
74
|
+
TP, TN, FP, FN = self.cal_confusion_matrix()
|
|
75
|
+
pod = TP / (TP + FN) if (TP + FN) > 0 else np.nan
|
|
76
|
+
return pod
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def cal_fnr(self):
|
|
80
|
+
"""
|
|
81
|
+
计算漏报率(False Negative Rate, FNR)
|
|
82
|
+
:return: 漏报率(FNR)
|
|
83
|
+
"""
|
|
84
|
+
TP, TN, FP, FN = self.cal_confusion_matrix()
|
|
85
|
+
fnr = FN / (TP + FN) if (TP + FN) > 0 else np.nan
|
|
86
|
+
return fnr
|
|
87
|
+
|
|
88
|
+
def cal_far(self):
|
|
89
|
+
"""
|
|
90
|
+
计算空报率(False Alarm Rate, FAR)
|
|
91
|
+
:return: 空报率(FAR)
|
|
92
|
+
"""
|
|
93
|
+
TP, TN, FP, FN = self.cal_confusion_matrix()
|
|
94
|
+
far = FP / (TP + FP) if (TP + FP) > 0 else np.nan
|
|
95
|
+
return far
|
|
96
|
+
|
|
97
|
+
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
from . import Metrics
|
|
3
|
+
def cal_metrics(obs_v,pre_v):
|
|
4
|
+
metrics = Metrics(obs_v.ravel(), pre_v.ravel())
|
|
5
|
+
ts_score = np.around(metrics.cal_ts(),4)
|
|
6
|
+
accuracy = np.around(metrics.cal_acc(),4)
|
|
7
|
+
pod = np.around(metrics.cal_pod(),4)
|
|
8
|
+
fnr = np.around(metrics.cal_fnr(),4)
|
|
9
|
+
far = np.around(metrics.cal_far(),4)
|
|
10
|
+
return ts_score,accuracy,pod,fnr,far
|
|
11
|
+
|
|
12
|
+
"""
|
|
13
|
+
ts_score, accuracy, pod, fnr, far = cal_metrics(obs_v,pre_v) 两个是维度一致的二维数组
|
|
14
|
+
"""
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
# coding=utf-8
|
|
2
|
+
|
|
3
|
+
from sklearn.metrics import confusion_matrix
|
|
4
|
+
import numpy as np
|
|
5
|
+
import copy
|
|
6
|
+
def prep_clf(obs, pre, thresholdR=None, thresholdF=None):
|
|
7
|
+
if thresholdR is not None and thresholdF is not None:
|
|
8
|
+
obs = np.where(obs >= thresholdR, 1, 0)
|
|
9
|
+
pre = np.where(pre >= thresholdF, 1, 0)
|
|
10
|
+
# True positive (TP)
|
|
11
|
+
hits = np.sum((obs == 1) & (pre == 1))
|
|
12
|
+
# False negative (FN)
|
|
13
|
+
misses = np.sum((obs == 1) & (pre == 0))
|
|
14
|
+
# False positive (FP)
|
|
15
|
+
falsealarms = np.sum((obs == 0) & (pre == 1))
|
|
16
|
+
# True negative (TN)
|
|
17
|
+
correctnegatives = np.sum((obs == 0) & (pre == 0))
|
|
18
|
+
return hits, misses, falsealarms, correctnegatives
|
|
19
|
+
def precision(obs, pre, thresholdR=None, thresholdF=None):
|
|
20
|
+
TP, FN, FP, TN = prep_clf(obs=obs, pre = pre, thresholdR=thresholdR, thresholdF=thresholdF)
|
|
21
|
+
return TP / (TP + FP+10e-5)
|
|
22
|
+
def recall(obs, pre, thresholdR=None, thresholdF=None):
|
|
23
|
+
TP, FN, FP, TN = prep_clf(obs=obs, pre = pre, thresholdR=thresholdR, thresholdF=thresholdF)
|
|
24
|
+
return TP / (TP + FN+10e-5)
|
|
25
|
+
def ACC(obs, pre, thresholdR=None, thresholdF=None):
|
|
26
|
+
TP, FN, FP, TN = prep_clf(obs=obs, pre = pre, thresholdR=thresholdR, thresholdF=thresholdF)
|
|
27
|
+
return (TP + TN) / (TP + TN + FP + FN)
|
|
28
|
+
def F1(obs, pre, thresholdR=None, thresholdF=None):
|
|
29
|
+
precision_socre = precision(obs, pre, thresholdR=thresholdR, thresholdF=thresholdF)
|
|
30
|
+
recall_score = recall(obs, pre, thresholdR=thresholdR, thresholdF=thresholdF)
|
|
31
|
+
return 2 * ((precision_socre * recall_score) / (precision_socre + recall_score+10e-5))
|
|
32
|
+
def TS(obs, pre, thresholdR=None, thresholdF=None):
|
|
33
|
+
hits, misses, falsealarms, correctnegatives = prep_clf(obs=obs, pre = pre, thresholdR=thresholdR, thresholdF=thresholdF)
|
|
34
|
+
return hits/(hits + falsealarms + misses+10e-5)
|
|
35
|
+
def ETS(obs, pre, thresholdR=None, thresholdF=None):
|
|
36
|
+
hits, misses, falsealarms, correctnegatives = prep_clf(obs=obs, pre = pre,
|
|
37
|
+
thresholdR=thresholdR, thresholdF=thresholdF)
|
|
38
|
+
num = (hits + falsealarms) * (hits + misses)
|
|
39
|
+
den = hits + misses + falsealarms + correctnegatives
|
|
40
|
+
Dr = num / den
|
|
41
|
+
ETS = (hits - Dr) / (hits + misses + falsealarms - Dr)
|
|
42
|
+
return ETS
|
|
43
|
+
def FAR(obs, pre, thresholdR=None, thresholdF=None):
|
|
44
|
+
hits, misses, falsealarms, correctnegatives = prep_clf(obs=obs, pre = pre,
|
|
45
|
+
thresholdR=thresholdR, thresholdF=thresholdF)
|
|
46
|
+
return falsealarms / (hits + falsealarms+10e-5)
|
|
47
|
+
def PO(obs, pre, thresholdR=None, thresholdF=None):
|
|
48
|
+
hits, misses, falsealarms, correctnegatives = prep_clf(obs=obs, pre = pre,
|
|
49
|
+
thresholdR=thresholdR, thresholdF=thresholdF)
|
|
50
|
+
return misses / (hits + misses+10e-5)
|
|
51
|
+
def POD(obs, pre, thresholdR=None, thresholdF=None):
|
|
52
|
+
hits, misses, falsealarms, correctnegatives = prep_clf(obs=obs, pre = pre,
|
|
53
|
+
thresholdR=thresholdR, thresholdF=thresholdF)
|
|
54
|
+
return hits / (hits + misses)
|
|
55
|
+
def BIAS(obs, pre, thresholdR = 0.1, thresholdF=None):
|
|
56
|
+
hits, misses, falsealarms, correctnegatives = prep_clf(obs=obs, pre = pre,
|
|
57
|
+
thresholdR=thresholdR, thresholdF=thresholdF)
|
|
58
|
+
return (hits + falsealarms) / (hits + misses)
|
|
59
|
+
def HSS(obs, pre, thresholdR=None, thresholdF=None):
|
|
60
|
+
hits, misses, falsealarms, correctnegatives = prep_clf(obs=obs, pre = pre,
|
|
61
|
+
thresholdR=thresholdR, thresholdF=thresholdF)
|
|
62
|
+
HSS_num = 2 * (hits * correctnegatives - misses * falsealarms)
|
|
63
|
+
HSS_den = (misses**2 + falsealarms**2 + 2*hits*correctnegatives +
|
|
64
|
+
(misses + falsealarms)*(hits + correctnegatives))
|
|
65
|
+
return HSS_num / HSS_den
|
|
66
|
+
def BSS(obs, pre, threshold=None):
|
|
67
|
+
if threshold is not None:
|
|
68
|
+
obs = np.where(obs >= threshold, 1, 0)
|
|
69
|
+
pre = np.where(pre >= threshold, 1, 0)
|
|
70
|
+
obs = obs.flatten()
|
|
71
|
+
pre = pre.flatten()
|
|
72
|
+
return np.sqrt(np.mean((obs - pre) ** 2))
|
|
73
|
+
def MAE(obs, pre):
|
|
74
|
+
obs = obs.flatten()
|
|
75
|
+
pre = pre.flatten()
|
|
76
|
+
return np.mean(np.abs(pre - obs))
|
|
77
|
+
def RMSE(obs, pre):
|
|
78
|
+
obs = obs.flatten()
|
|
79
|
+
pre = pre.flatten()
|
|
80
|
+
return np.sqrt(np.mean((obs - pre) ** 2))
|
|
81
|
+
def sun_rain_Matrix(o, f, threshold=None):
|
|
82
|
+
if threshold is not None:
|
|
83
|
+
o = np.where(o >= 0.1, 1, 0)
|
|
84
|
+
f = np.where(f >= threshold, 1, 0)
|
|
85
|
+
c_matrix = confusion_matrix(o, f, labels=[0, 1])
|
|
86
|
+
return c_matrix
|
|
87
|
+
def CY_classify(pre0):
|
|
88
|
+
# < 0.031 < 0.0606 无雨/雪
|
|
89
|
+
# 0.031~0.25 0.0606~0.8989 小雨/雪
|
|
90
|
+
# 0.25~0.35 0.8989~2.8700 中雨/雪
|
|
91
|
+
# 0.35~0.48 2.8700~12.8638 大雨/雪
|
|
92
|
+
# >= 0.48 >= 12.8638 暴雨/雪
|
|
93
|
+
pre = copy.deepcopy(pre0)
|
|
94
|
+
pre[pre0 < 0.031] = 0
|
|
95
|
+
pre[(pre0 >= 0.031)&(pre0 < 0.25)] = 1
|
|
96
|
+
pre[(pre0 >= 0.25)&(pre0 < 0.35)] = 2
|
|
97
|
+
pre[(pre0 >= 0.35)&(pre0 < 0.48)] = 3
|
|
98
|
+
pre[(pre0 >= 0.48)&(pre0 < 9990)] = 4
|
|
99
|
+
pre[pre0 > 9990] = -1
|
|
100
|
+
pre[np.isnan(pre0)] = -1
|
|
101
|
+
return pre
|
|
102
|
+
def pre1h_Matrix(obs, fore,mode):
|
|
103
|
+
try:
|
|
104
|
+
if len(obs) == 0:
|
|
105
|
+
return np.zeros([5,5])
|
|
106
|
+
else:
|
|
107
|
+
o = classify1h(obs)
|
|
108
|
+
if mode=="WTX":
|
|
109
|
+
f = classify1h(fore)
|
|
110
|
+
c_matrix = confusion_matrix(o, f, labels=[0, 1,2,3,4])
|
|
111
|
+
return c_matrix
|
|
112
|
+
else:
|
|
113
|
+
f = CY_classify(fore)
|
|
114
|
+
c_matrix = confusion_matrix(o, f, labels=[0, 1, 2, 3, 4])
|
|
115
|
+
return c_matrix
|
|
116
|
+
except Exception as e:
|
|
117
|
+
print(e)
|
|
118
|
+
def classify1h(pre0):
|
|
119
|
+
pre = copy.deepcopy(pre0)
|
|
120
|
+
pre[pre0 < 0.1] = 0
|
|
121
|
+
pre[np.logical_and(pre0 >= 0.1, pre0 <= 2.5)] = 1
|
|
122
|
+
pre[np.logical_and(pre0 > 2.5, pre0 <= 8)] = 2
|
|
123
|
+
pre[np.logical_and(pre0 > 8, pre0 <= 16)] = 3
|
|
124
|
+
pre[np.logical_and(pre0 > 16, pre0 <= 9990)] = 4
|
|
125
|
+
pre[pre0 > 9990] = -1
|
|
126
|
+
pre[np.isnan(pre0)] = -1
|
|
127
|
+
return pre
|
|
128
|
+
|
|
129
|
+
def calsmhsTS(mat):
|
|
130
|
+
tsList = []
|
|
131
|
+
for i in range(1, 5):
|
|
132
|
+
ts = mat[i, i] / (np.sum(mat[:i, i]) + np.sum(mat[i, :i + 1]))
|
|
133
|
+
tsList.append(np.round(ts, 3))
|
|
134
|
+
print(f"小雨:{tsList[0]},中雨:{tsList[1]},大雨:{tsList[2]},暴雨:{tsList[3]}")
|
|
135
|
+
return {"小雨":tsList[0],"中雨":tsList[1],"大雨":tsList[2],"暴雨":tsList[3]}
|
|
136
|
+
|
|
137
|
+
"""
|
|
138
|
+
cm1_C = sun_rain_Matrix(df["PRE1_r"].values, df["PRE1_c"].values)
|
|
139
|
+
F1h =TS( df[f"PRE{i}_r"],df[f"PRE{i}_c"],thresholdF=thresholdF)
|
|
140
|
+
F1hm =TS(df[f"PRE{i}_r"],df[f"PRE{i}_w"])
|
|
141
|
+
TSV[i]=[np.round(F1h,3),np.round(F1hm,3)]
|
|
142
|
+
"""
|
|
143
|
+
|
|
144
|
+
"""
|
|
145
|
+
cm1_C_pre1h = pre1h_Matrix(df["PRE1_r"].values, df["PRE1_c"].values,"CY")
|
|
146
|
+
calsmhsTS(cm1_C_pre1h)
|
|
147
|
+
"""
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from sklearn.metrics import mean_squared_error, r2_score
|
|
2
|
+
from scipy.stats import pearsonr
|
|
3
|
+
import numpy as np
|
|
4
|
+
def calculate_metrics(y_true, y_pred):
|
|
5
|
+
# Calculate metrics
|
|
6
|
+
correlation, _ = pearsonr(y_true, y_pred)
|
|
7
|
+
rmse = np.sqrt(mean_squared_error(y_true, y_pred))
|
|
8
|
+
mask = y_true != 0
|
|
9
|
+
y_true = y_true[mask]
|
|
10
|
+
y_pred = y_pred[mask]
|
|
11
|
+
mape = np.mean(np.abs((y_true - y_pred) / y_true)) * 100
|
|
12
|
+
r2 = r2_score(y_true, y_pred)
|
|
13
|
+
rmse = f"{rmse:0.4f}"
|
|
14
|
+
correlation = f"{correlation:0.4f}"
|
|
15
|
+
mape = f"{mape:0.4f}"
|
|
16
|
+
r2 = f"{r2:0.4f}"
|
|
17
|
+
return rmse,correlation,mape,r2
|
|
18
|
+
"""
|
|
19
|
+
calculate_metrics(data.flatten(),datanpy.flatten())
|
|
20
|
+
"""
|
|
21
|
+
"""
|
|
22
|
+
dt_res[f'{flag}'] = dt_res[f'{flag}'].astype(float)
|
|
23
|
+
dt_res[f'{flag}_p'] = dt_res[f'{flag}_p'].astype(float)
|
|
24
|
+
dt_ = dt_res[(dt_res[f'{flag}'] != 0) & (dt_res[f'{flag}_p'] != 0)]
|
|
25
|
+
dt_ = dt_res[(dt_res[f'{flag}'] < 900000.0) & (dt_res[f'{flag}_p'] < 900000.0)]
|
|
26
|
+
dt_.replace([np.inf, -np.inf], np.nan, inplace=True)
|
|
27
|
+
dt_.dropna(inplace=True)
|
|
28
|
+
correlation, rmse, mape, r2 = calculate_metrics(dt_[f"{flag}"], dt_[f"{flag}_p"])
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
y_true = arry1.flatten()
|
|
32
|
+
y_pred = arry2.flatten()
|
|
33
|
+
correlation, rmse, mape, r2 = calculate_metrics(y_true, y_pred)
|
|
34
|
+
"""
|
|
35
|
+
|
shancx/Clip/__init__.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
class Bounds:
|
|
5
|
+
def __init__(self, north, south, west, east):
|
|
6
|
+
self.n = north # 北边界
|
|
7
|
+
self.s = south # 南边界
|
|
8
|
+
self.w = west # 西边界
|
|
9
|
+
self.e = east # 东边界
|
|
10
|
+
|
|
11
|
+
def __str__(self):
|
|
12
|
+
return f"n:{self.n}, s:{self.s}, w:{self.w}, e:{self.e}"
|
|
13
|
+
|
|
14
|
+
def calc_idx(start_val, bound_val, step, data_len):
|
|
15
|
+
idx = int(np.round(abs(start_val - bound_val) / step + 0.5, 3))
|
|
16
|
+
return max(0, min(idx, data_len - 1))
|
|
17
|
+
|
|
18
|
+
def clip(data, bound_start, bound_end, step, pad=False):
|
|
19
|
+
idx_start = calc_idx(data[0], bound_start, step, len(data))
|
|
20
|
+
idx_end = calc_idx(data[0], bound_end, step, len(data))
|
|
21
|
+
if idx_start > idx_end:
|
|
22
|
+
idx_start, idx_end = idx_end, idx_start
|
|
23
|
+
if pad:
|
|
24
|
+
if data[idx_start] < bound_start and idx_start > 0:
|
|
25
|
+
idx_start -= 1
|
|
26
|
+
if data[idx_end] > bound_end and idx_end < (len(data) - 1):
|
|
27
|
+
idx_end += 1
|
|
28
|
+
return data[idx_start:idx_end + 1]
|
|
29
|
+
|
|
30
|
+
def clip_lat(bounds, step, pad=False):
|
|
31
|
+
# 使用 np.linspace 生成纬度数组
|
|
32
|
+
lat = np.linspace(90, -90, int(180 / step) + 1) # 从 90 到 -90,步长为 step
|
|
33
|
+
return clip(lat, bounds.n, bounds.s, step, pad)
|
|
34
|
+
|
|
35
|
+
def clip_lon(bounds, step, pad=False):
|
|
36
|
+
# 使用 np.linspace 生成经度数组
|
|
37
|
+
lon = np.linspace(-180, 180, int(360 / step) + 1) # 从 -180 到 180,步长为 step
|
|
38
|
+
return clip(lon, bounds.w, bounds.e, step, pad)
|
|
39
|
+
|
|
40
|
+
"""
|
|
41
|
+
if __name__ == "__main__":
|
|
42
|
+
from shancx.Clip import Bounds,clip_lat,clip_lon
|
|
43
|
+
bounds = Bounds(65, -65, 40, 170)
|
|
44
|
+
lat_clipped = clip_lat(bounds, 0.04)
|
|
45
|
+
print(f"Clipped Latitude Data Length: {len(lat_clipped)}")
|
|
46
|
+
lon_clipped = clip_lon(bounds, 0.04)
|
|
47
|
+
print(f"Clipped Longitude Data Length: {len(lon_clipped)}")
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
|
shancx/Cmd.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
import subprocess
|
|
4
|
+
import logging
|
|
5
|
+
from shancx.NN import setlogger
|
|
6
|
+
logger = setlogger(level=logging.INFO)
|
|
7
|
+
def runcommand(cmd, timeout=300):
|
|
8
|
+
try:
|
|
9
|
+
result = subprocess.run(
|
|
10
|
+
cmd
|
|
11
|
+
,shell=True
|
|
12
|
+
,timeout=timeout
|
|
13
|
+
,check=True
|
|
14
|
+
,capture_output=True
|
|
15
|
+
,text=True
|
|
16
|
+
)
|
|
17
|
+
for output, label in [(result.stdout, "output"), (result.stderr, "error output")]:
|
|
18
|
+
if output:
|
|
19
|
+
logger.info(f"Command {label}:\n{output}")
|
|
20
|
+
logger.info("Command succeeded!")
|
|
21
|
+
return True
|
|
22
|
+
except subprocess.TimeoutExpired:
|
|
23
|
+
logger.error(f"Timeout after {timeout} seconds!")
|
|
24
|
+
return False
|
|
25
|
+
except subprocess.CalledProcessError as e:
|
|
26
|
+
error_msg = e.stderr.strip() if e.stderr else "Unknown error"
|
|
27
|
+
logger.error(f"Command failed! Code: {e.returncode}, Error: {error_msg}")
|
|
28
|
+
return False
|
|
29
|
+
except Exception as e:
|
|
30
|
+
logger.error(f"Unexpected error: {str(e)}")
|
|
31
|
+
return False
|
|
32
|
+
|
|
33
|
+
"""
|
|
34
|
+
allDf.dtypes
|
|
35
|
+
|
|
36
|
+
vim ~/.bash_history
|
|
37
|
+
git checkout main -- /home/scx/mqpf_pmsc/backup/of_ref_pre.py
|
|
38
|
+
du -sh * | sort -h
|
|
39
|
+
rsync -avid scx@10.1.98.7:/home/scx/test/project ./
|
|
40
|
+
rsync -avid scx@10.1.98.7:/home/scx/ESRGAN-PyTorch-main /home/scx/test/ 整个文件夹以及文件夹下
|
|
41
|
+
find . -type f -name "project1.log*" -exec rm -f {} \;
|
|
42
|
+
find . -type f -name "project*.log*" -exec rm -f {} \;
|
|
43
|
+
sudo nvidia-smi -i 2 -pm 0
|
|
44
|
+
ssh scx@10.1.98.7 cmd 链接
|
|
45
|
+
|
|
46
|
+
grep users /etc/group
|
|
47
|
+
sudo groupdel scx
|
|
48
|
+
sudo useradd -u 1015 -g 1015 scx
|
|
49
|
+
id scx
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
more /etc/passwd
|
|
53
|
+
|
|
54
|
+
sudo usermod -g users scx
|
|
55
|
+
sudo groupdel users1
|
|
56
|
+
sudo groupadd -g 1015 scx
|
|
57
|
+
sudo usermod -g 1015 scx
|
|
58
|
+
id scx
|
|
59
|
+
grep scx /etc/group
|
|
60
|
+
sudo chown -R scx:scx /mnt/wtx_weather_forecast/scx/mqpf_0722_wtyN/
|
|
61
|
+
ssh-keygen -t rsa -b 4096 -C "shanhe12@163.com"
|
|
62
|
+
cat id_rsa111.pub >> /home/scx/.ssh/authorized_keys centos 链接ssh秘钥链接问题用升级为ubuntu22.04
|
|
63
|
+
df -h
|
|
64
|
+
data_r_com = np.max([data_r_700, data_r_850], axis=0)
|
|
65
|
+
traceback.format_exc()
|
|
66
|
+
np.unique(pre, return_counts=True)
|
|
67
|
+
data = instantiate_from_config(config.data) 类动态传参
|
|
68
|
+
id scx
|
|
69
|
+
sudo usermod -u 1015 scx
|
|
70
|
+
sudo groupmod -g 1015 scx
|
|
71
|
+
systemctl | grep scx
|
|
72
|
+
sudo loginctl terminate-user scx
|
|
73
|
+
sudo pkill -9 -u scx
|
|
74
|
+
.gitignore
|
|
75
|
+
pip.conf
|
|
76
|
+
w
|
|
77
|
+
python main.py -h
|
|
78
|
+
export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7
|
|
79
|
+
print("CUDA_VISIBLE_DEVICES:", os.environ.get("CUDA_VISIBLE_DEVICES"))
|
|
80
|
+
cuda:0 export CUDA_VISIBLE_DEVICES=7,1,2,3,4,5,6,0
|
|
81
|
+
tmux kill-session -t 1
|
|
82
|
+
passwd
|
|
83
|
+
pgrep -u scx | wc -l
|
|
84
|
+
sudo chmod -R u+w /mnt/wtx_weather_forecast/scx/sever7/exam/.git/objects ls -l
|
|
85
|
+
rsync -avid scx@10.1.98.5:
|
|
86
|
+
echo 'alias pgrep="pgrep -u $(whoami) | wc -l"' >> ~/.bashrc source ~/.bashrc
|
|
87
|
+
netstat -antp
|
|
88
|
+
netstat -antp | grep 140.90.101.79:443 | wc -l
|
|
89
|
+
ps aux|grep scx
|
|
90
|
+
ssh-keygen -t rsa -b 4096
|
|
91
|
+
export PATH="/home/scx1/miniconda3/bin:$PATH"
|
|
92
|
+
for i in reversed(range(n_steps)):
|
|
93
|
+
Every letter and function needs to be understood, and the best way is easy to learn
|
|
94
|
+
nano /home/scx1/miniconda3/envs/mqpf/bin/pip
|
|
95
|
+
/home/scx1/miniconda3/lib/python3.12/site-packages/matplotlib/mpl-data/fonts/ttf/ copy simhei.ttf rm -rf .cache .cache/matplot/*
|
|
96
|
+
chmod -R u+w ./sever7/mqpf_pmsc/.git/objects/ cannot create regular file './sever7/mqpf_pmsc/.git/objects/: Permission denied
|
|
97
|
+
vim 10w 2b ?scx
|
|
98
|
+
pip install git+
|
|
99
|
+
git config --global user.email "shanhe12@163.com"
|
|
100
|
+
git config --global user.name "shancx"
|
|
101
|
+
df.loc['Average'] = averages
|
|
102
|
+
df.at['Average', 'time1'] = 'Average'
|
|
103
|
+
conda init bash && source ~/.bashrc init bash
|
|
104
|
+
export CUDA_HOME=$CONDA_PREFIX
|
|
105
|
+
export PATH=$CUDA_HOME/bin:$PATH .bash or conda init bash && source ~/.bashrc
|
|
106
|
+
unset LD_LIBRARY_PATH
|
|
107
|
+
export LD_LIBRARY_PATH=""
|
|
108
|
+
hostname -I
|
|
109
|
+
locate libcublasLt.so.11 Could not load library libcublasLt.so.11. Error: libcublasLt.so.11: cannot open shared object file: No such file or directory libcublasLt 是 cuBLAS 库的一个扩展 cudatoolkit 安装
|
|
110
|
+
nano /home/scx1/miniconda3/envs/mqpf/bin/nvitop
|
|
111
|
+
DS_BUILD_FUSED_LAMB=1 pip install deepspeed
|
|
112
|
+
DS_BUILD_OPS=1 pip install deepspeed
|
|
113
|
+
xception has occurred: SystemExit (note: full exception trace is shown but execution is paused at: _run_module_as_main)
|
|
114
|
+
CST.strftime("%Y%m%d%H%M")
|
|
115
|
+
TypeError: 'module' object is not callable
|
|
116
|
+
MAILTO="shanhe12@163.com"
|
|
117
|
+
os.environ["CUDA_VISIBLE_DEVICES"] = "0,1,2,3,4,5,6,7"
|
|
118
|
+
python setup.py sdist bdist_wheel
|
|
119
|
+
twine upload dist/*
|
|
120
|
+
pip install setuptools twine
|
|
121
|
+
wmic bios get serialnumber
|
|
122
|
+
wmic diskdrive get serialnumber
|
|
123
|
+
"""
|
|
124
|
+
"""
|
|
125
|
+
sudo chmod a+w /mnt/wtx_weather_forecast/scx/MSG/MSG_Data/2025/20250612/ 多人文件夹权限
|
|
126
|
+
"""
|
shancx/Config_.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import pygrib
|
|
2
|
+
import numpy as np
|
|
3
|
+
import pandas as pd
|
|
4
|
+
import yaml
|
|
5
|
+
def _load_config(config_path: str) :
|
|
6
|
+
"""加载YAML配置文件"""
|
|
7
|
+
print(f"load config file Get configuration parameters: {config_path}")
|
|
8
|
+
with open(config_path, 'r') as f:
|
|
9
|
+
config = yaml.safe_load(f)
|
|
10
|
+
return config
|
|
11
|
+
|
|
12
|
+
import configparser
|
|
13
|
+
import traceback
|
|
14
|
+
def parse_config(path,section,option):
|
|
15
|
+
cp = configparser.ConfigParser()
|
|
16
|
+
try:
|
|
17
|
+
cp.read(path)
|
|
18
|
+
res = cp.get(section,option)
|
|
19
|
+
except Exception as e:
|
|
20
|
+
print(traceback.format_exc())
|
|
21
|
+
exit()
|
|
22
|
+
return res
|
|
23
|
+
"""
|
|
24
|
+
Path = "./application.conf"
|
|
25
|
+
radar_path = parse_config(Path, "JY", "radar_path") #JY是选择部分,radar_path配置路径
|
|
26
|
+
"""
|
shancx/Df/DataFrame.py
CHANGED
|
@@ -3,7 +3,16 @@ def getmask(df,col = 'PRE1_r'):
|
|
|
3
3
|
df[col] = df[col].mask(df[col] >= 9999, np.nan)
|
|
4
4
|
df = df.dropna()
|
|
5
5
|
return df
|
|
6
|
-
|
|
7
6
|
def Type(df_,col = 'stationID'):
|
|
8
7
|
df_['stationID'] = df_['stationID'].astype("str")
|
|
9
|
-
return df_
|
|
8
|
+
return df_
|
|
9
|
+
def Startswith(df,column_name,value):
|
|
10
|
+
df = df[df[f'{column_name}'].astype(str).str.startswith(f"{value}")].reset_index()
|
|
11
|
+
return df
|
|
12
|
+
def Startswith1(df,column_name,value):
|
|
13
|
+
df = df[~df[f'{column_name}'].astype(str).str.startswith(f"{value}")].reset_index()
|
|
14
|
+
return df
|
|
15
|
+
def Drop_duplicates(realDF,col):
|
|
16
|
+
realDF = realDF.drop_duplicates(subset=f"{col}").reset_index()
|
|
17
|
+
return realDF
|
|
18
|
+
|
shancx/Df/__init__.py
CHANGED
|
@@ -14,4 +14,21 @@ def getmask(df,col = 'PRE1_r'):
|
|
|
14
14
|
def Type(df_,col = 'stationID'):
|
|
15
15
|
df_['stationID'] = df_['stationID'].astype("str")
|
|
16
16
|
return df_
|
|
17
|
+
def Dtype(df_):
|
|
18
|
+
dftypes = df_.dtypes
|
|
19
|
+
print(dftypes)
|
|
20
|
+
return dftypes
|
|
17
21
|
|
|
22
|
+
# 如果env_rang的定义是 (北界, 南界, 西界, 东界)
|
|
23
|
+
def getrange(df,env_rang=None):
|
|
24
|
+
north, south, west, east = env_rang
|
|
25
|
+
filtered_data = df[
|
|
26
|
+
(df["Lat"] < north) &
|
|
27
|
+
(df["Lat"] > south) &
|
|
28
|
+
(df["Lon"] > west) &
|
|
29
|
+
(df["Lon"] < east)
|
|
30
|
+
]
|
|
31
|
+
return filtered_data
|
|
32
|
+
|
|
33
|
+
#pd.concat(filter(None, results))
|
|
34
|
+
#valid_results = [df for df in results if isinstance(df, pd.DataFrame) and not df.empty]
|
shancx/Df/tool.py
ADDED
|
File without changes
|
shancx/Diffm/Psamples.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import torch
|
|
2
|
+
def p_sample_loop(model,shape,n_steps,betas,one_minus_alphas_bar_sqrt):
|
|
3
|
+
cur_x = torch.randn(shape)
|
|
4
|
+
x_seq = [cur_x]
|
|
5
|
+
for i in reversed(range(n_steps)):
|
|
6
|
+
cur_x = p_sample(model,cur_x,i,betas,one_minus_alphas_bar_sqrt)
|
|
7
|
+
x_seq.append(cur_x)
|
|
8
|
+
return x_seq
|
|
9
|
+
|
|
10
|
+
def p_sample(model,x,t,betas,one_minus_alphas_bar_sqrt):
|
|
11
|
+
t = torch.tensor([t])
|
|
12
|
+
coeff = betas[t] / one_minus_alphas_bar_sqrt[t]
|
|
13
|
+
eps_theta = model(x,t)
|
|
14
|
+
mean = (1/(1-betas[t]).sqrt())*(x-(coeff*eps_theta))
|
|
15
|
+
z = torch.randn_like(x)
|
|
16
|
+
sigma_t = betas[t].sqrt()
|
|
17
|
+
sample = mean + sigma_t * z
|
|
18
|
+
return (sample)
|
shancx/Diffm/__init__.py
ADDED
|
File without changes
|