shancx 1.8.92__py3-none-any.whl → 1.9.33.218__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (166) hide show
  1. shancx/3D/__init__.py +25 -0
  2. shancx/Algo/Class.py +11 -0
  3. shancx/Algo/CudaPrefetcher1.py +112 -0
  4. shancx/Algo/Fake_image.py +24 -0
  5. shancx/Algo/Hsml.py +391 -0
  6. shancx/Algo/L2Loss.py +10 -0
  7. shancx/Algo/MetricTracker.py +132 -0
  8. shancx/Algo/Normalize.py +66 -0
  9. shancx/Algo/OptimizerWScheduler.py +38 -0
  10. shancx/Algo/Rmageresize.py +79 -0
  11. shancx/Algo/Savemodel.py +33 -0
  12. shancx/Algo/SmoothL1_losses.py +27 -0
  13. shancx/Algo/Tqdm.py +62 -0
  14. shancx/Algo/__init__.py +121 -0
  15. shancx/Algo/checknan.py +28 -0
  16. shancx/Algo/iouJU.py +83 -0
  17. shancx/Algo/mask.py +25 -0
  18. shancx/Algo/psnr.py +9 -0
  19. shancx/Algo/ssim.py +70 -0
  20. shancx/Algo/structural_similarity.py +308 -0
  21. shancx/Algo/tool.py +704 -0
  22. shancx/Calmetrics/__init__.py +97 -0
  23. shancx/Calmetrics/calmetrics.py +14 -0
  24. shancx/Calmetrics/calmetricsmatrixLib.py +147 -0
  25. shancx/Calmetrics/rmseR2score.py +35 -0
  26. shancx/Clip/__init__.py +50 -0
  27. shancx/Cmd.py +126 -0
  28. shancx/Config_.py +26 -0
  29. shancx/Df/DataFrame.py +11 -2
  30. shancx/Df/__init__.py +17 -0
  31. shancx/Df/tool.py +0 -0
  32. shancx/Diffm/Psamples.py +18 -0
  33. shancx/Diffm/__init__.py +0 -0
  34. shancx/Diffm/test.py +207 -0
  35. shancx/Doc/__init__.py +214 -0
  36. shancx/E/__init__.py +178 -152
  37. shancx/Fillmiss/__init__.py +0 -0
  38. shancx/Fillmiss/imgidwJU.py +46 -0
  39. shancx/Fillmiss/imgidwLatLonJU.py +82 -0
  40. shancx/Gpu/__init__.py +55 -0
  41. shancx/H9/__init__.py +126 -0
  42. shancx/H9/ahi_read_hsd.py +877 -0
  43. shancx/H9/ahisearchtable.py +298 -0
  44. shancx/H9/geometry.py +2439 -0
  45. shancx/Hug/__init__.py +81 -0
  46. shancx/Inst.py +22 -0
  47. shancx/Lib.py +31 -0
  48. shancx/Mos/__init__.py +37 -0
  49. shancx/NN/__init__.py +235 -106
  50. shancx/Path1.py +161 -0
  51. shancx/Plot/GlobMap.py +276 -116
  52. shancx/Plot/__init__.py +491 -1
  53. shancx/Plot/draw_day_CR_PNG.py +4 -21
  54. shancx/Plot/exam.py +116 -0
  55. shancx/Plot/plotGlobal.py +325 -0
  56. shancx/{radar_nmc.py → Plot/radarNmc.py} +4 -34
  57. shancx/{subplots_single_china_map.py → Plot/single_china_map.py} +1 -1
  58. shancx/Point.py +46 -0
  59. shancx/QC.py +223 -0
  60. shancx/RdPzl/__init__.py +32 -0
  61. shancx/Read.py +72 -0
  62. shancx/Resize.py +79 -0
  63. shancx/SN/__init__.py +62 -123
  64. shancx/Time/GetTime.py +9 -3
  65. shancx/Time/__init__.py +66 -1
  66. shancx/Time/timeCycle.py +302 -0
  67. shancx/Time/tool.py +0 -0
  68. shancx/Train/__init__.py +74 -0
  69. shancx/Train/makelist.py +187 -0
  70. shancx/Train/multiGpu.py +27 -0
  71. shancx/Train/prepare.py +161 -0
  72. shancx/Train/renet50.py +157 -0
  73. shancx/ZR.py +12 -0
  74. shancx/__init__.py +333 -262
  75. shancx/args.py +27 -0
  76. shancx/bak.py +768 -0
  77. shancx/df2database.py +62 -2
  78. shancx/geosProj.py +80 -0
  79. shancx/info.py +38 -0
  80. shancx/netdfJU.py +231 -0
  81. shancx/sendM.py +59 -0
  82. shancx/tensBoard/__init__.py +28 -0
  83. shancx/wait.py +246 -0
  84. {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/METADATA +15 -5
  85. shancx-1.9.33.218.dist-info/RECORD +91 -0
  86. {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/WHEEL +1 -1
  87. my_timer_decorator/__init__.py +0 -10
  88. shancx/Dsalgor/__init__.py +0 -19
  89. shancx/E/DFGRRIB.py +0 -30
  90. shancx/EN/DFGRRIB.py +0 -30
  91. shancx/EN/__init__.py +0 -148
  92. shancx/FileRead.py +0 -44
  93. shancx/Gray2RGB.py +0 -86
  94. shancx/M/__init__.py +0 -137
  95. shancx/MN/__init__.py +0 -133
  96. shancx/N/__init__.py +0 -131
  97. shancx/Plot/draw_day_CR_PNGUS.py +0 -206
  98. shancx/Plot/draw_day_CR_SVG.py +0 -275
  99. shancx/Plot/draw_day_pre_PNGUS.py +0 -205
  100. shancx/Plot/glob_nation_map.py +0 -116
  101. shancx/Plot/radar_nmc.py +0 -61
  102. shancx/Plot/radar_nmc_china_map_compare1.py +0 -50
  103. shancx/Plot/radar_nmc_china_map_f.py +0 -121
  104. shancx/Plot/radar_nmc_us_map_f.py +0 -128
  105. shancx/Plot/subplots_compare_devlop.py +0 -36
  106. shancx/Plot/subplots_single_china_map.py +0 -45
  107. shancx/S/__init__.py +0 -138
  108. shancx/W/__init__.py +0 -132
  109. shancx/WN/__init__.py +0 -132
  110. shancx/code.py +0 -331
  111. shancx/draw_day_CR_PNG.py +0 -200
  112. shancx/draw_day_CR_PNGUS.py +0 -206
  113. shancx/draw_day_CR_SVG.py +0 -275
  114. shancx/draw_day_pre_PNGUS.py +0 -205
  115. shancx/makenetCDFN.py +0 -42
  116. shancx/mkIMGSCX.py +0 -92
  117. shancx/netCDF.py +0 -130
  118. shancx/radar_nmc_china_map_compare1.py +0 -50
  119. shancx/radar_nmc_china_map_f.py +0 -125
  120. shancx/radar_nmc_us_map_f.py +0 -67
  121. shancx/subplots_compare_devlop.py +0 -36
  122. shancx/tool.py +0 -18
  123. shancx/user/H8mess.py +0 -317
  124. shancx/user/__init__.py +0 -137
  125. shancx/user/cinradHJN.py +0 -496
  126. shancx/user/examMeso.py +0 -293
  127. shancx/user/hjnDAAS.py +0 -26
  128. shancx/user/hjnFTP.py +0 -81
  129. shancx/user/hjnGIS.py +0 -320
  130. shancx/user/hjnGPU.py +0 -21
  131. shancx/user/hjnIDW.py +0 -68
  132. shancx/user/hjnKDTree.py +0 -75
  133. shancx/user/hjnLAPSTransform.py +0 -47
  134. shancx/user/hjnMiscellaneous.py +0 -182
  135. shancx/user/hjnProj.py +0 -162
  136. shancx/user/inotify.py +0 -41
  137. shancx/user/matplotlibMess.py +0 -87
  138. shancx/user/mkNCHJN.py +0 -623
  139. shancx/user/newTypeRadar.py +0 -492
  140. shancx/user/test.py +0 -6
  141. shancx/user/tlogP.py +0 -129
  142. shancx/util_log.py +0 -33
  143. shancx/wtx/H8mess.py +0 -315
  144. shancx/wtx/__init__.py +0 -151
  145. shancx/wtx/cinradHJN.py +0 -496
  146. shancx/wtx/colormap.py +0 -64
  147. shancx/wtx/examMeso.py +0 -298
  148. shancx/wtx/hjnDAAS.py +0 -26
  149. shancx/wtx/hjnFTP.py +0 -81
  150. shancx/wtx/hjnGIS.py +0 -330
  151. shancx/wtx/hjnGPU.py +0 -21
  152. shancx/wtx/hjnIDW.py +0 -68
  153. shancx/wtx/hjnKDTree.py +0 -75
  154. shancx/wtx/hjnLAPSTransform.py +0 -47
  155. shancx/wtx/hjnLog.py +0 -78
  156. shancx/wtx/hjnMiscellaneous.py +0 -201
  157. shancx/wtx/hjnProj.py +0 -161
  158. shancx/wtx/inotify.py +0 -41
  159. shancx/wtx/matplotlibMess.py +0 -87
  160. shancx/wtx/mkNCHJN.py +0 -613
  161. shancx/wtx/newTypeRadar.py +0 -492
  162. shancx/wtx/test.py +0 -6
  163. shancx/wtx/tlogP.py +0 -129
  164. shancx-1.8.92.dist-info/RECORD +0 -99
  165. /shancx/{Dsalgor → Algo}/dsalgor.py +0 -0
  166. {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/top_level.txt +0 -0
shancx/Diffm/test.py ADDED
@@ -0,0 +1,207 @@
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ import matplotlib.pyplot as plt
5
+ import numpy as np
6
+ from sklearn.datasets import make_s_curve
7
+ import matplotlib.pyplot as plt
8
+ from shancx import crDir
9
+ s_curve, _ = make_s_curve(10**4, noise=0.1)
10
+ s_curve = s_curve[:, [0, 2]] / 10.0
11
+ dataset = torch.Tensor(s_curve).float()
12
+ num_steps = 100
13
+ # Define the beta schedule
14
+ betas = torch.linspace(-6, 6, num_steps)
15
+ betas = torch.sigmoid(betas) * (0.5e-2 - 1e-5) + 1e-5
16
+ alphas = 1 - betas
17
+ alphas_prod = torch.cumprod(alphas, 0)
18
+ alphas_prod_p = torch.cat([torch.tensor([1]).float(), alphas_prod[:-1]], 0)
19
+ alphas_bar_sqrt = torch.sqrt(alphas_prod)
20
+ one_minus_alphas_bar_log = torch.log(1 - alphas_prod)
21
+ one_minus_alphas_bar_sqrt = torch.sqrt(1 - alphas_prod)
22
+ assert alphas.shape == alphas_prod.shape == alphas_prod_p.shape == \
23
+ alphas_bar_sqrt.shape == one_minus_alphas_bar_log.shape == \
24
+ one_minus_alphas_bar_sqrt.shape
25
+ class MLPDiffusion(nn.Module):
26
+ def __init__(self, n_steps, num_units=128):
27
+ super(MLPDiffusion, self).__init__()
28
+ self.linears = nn.ModuleList([
29
+ nn.Linear(2, num_units),
30
+ nn.ReLU(),
31
+ nn.Linear(num_units, num_units),
32
+ nn.ReLU(),
33
+ nn.Linear(num_units, num_units),
34
+ nn.ReLU(),
35
+ nn.Linear(num_units, 2),
36
+ ])
37
+ self.step_embeddings = nn.ModuleList([
38
+ nn.Embedding(n_steps, num_units),
39
+ nn.Embedding(n_steps, num_units),
40
+ nn.Embedding(n_steps, num_units),
41
+ ])
42
+
43
+ def forward(self, x, t):
44
+ for idx, embedding_layer in enumerate(self.step_embeddings):
45
+ t_embedding = embedding_layer(t)
46
+ x = self.linears[2 * idx](x)
47
+ x += t_embedding
48
+ x = self.linears[2 * idx + 1](x)
49
+ x = self.linears[-1](x)
50
+ return x
51
+ import torch
52
+ import torch.nn as nn
53
+ import torch
54
+ import torch.nn as nn
55
+ class UNetDiffusion(nn.Module):
56
+ def __init__(self, n_steps, num_units=128):
57
+ super(UNetDiffusion, self).__init__()
58
+ self.encoder1 = nn.Sequential(
59
+ nn.Linear(2, num_units),
60
+ nn.ReLU(),
61
+ nn.Linear(num_units, num_units),
62
+ nn.ReLU()
63
+ )
64
+ self.encoder2 = nn.Sequential(
65
+ nn.Linear(num_units, num_units * 2),
66
+ nn.ReLU(),
67
+ nn.Linear(num_units * 2, num_units * 2),
68
+ nn.ReLU()
69
+ )
70
+ self.encoder3 = nn.Sequential(
71
+ nn.Linear(num_units * 2, num_units * 4),
72
+ nn.ReLU(),
73
+ nn.Linear(num_units * 4, num_units * 4),
74
+ nn.ReLU()
75
+ )
76
+ self.decoder3 = nn.Sequential(
77
+ nn.Linear(num_units * 4, num_units * 2),
78
+ nn.ReLU(),
79
+ nn.Linear(num_units * 2, num_units * 2),
80
+ nn.ReLU()
81
+ )
82
+ self.decoder2 = nn.Sequential(
83
+ nn.Linear(num_units * 2, num_units),
84
+ nn.ReLU(),
85
+ nn.Linear(num_units, num_units),
86
+ nn.ReLU()
87
+ )
88
+ self.decoder1 = nn.Sequential(
89
+ nn.Linear(num_units, 2),
90
+ )
91
+ self.step_embeddings = nn.ModuleList([
92
+ nn.Embedding(n_steps, num_units),
93
+ nn.Embedding(n_steps, num_units),
94
+ nn.Embedding(n_steps, num_units),
95
+ ])
96
+
97
+ def forward(self, x, t):
98
+ # 对每个时间步 t,获取时间嵌入并将其加到输入特征中
99
+ for idx, embedding_layer in enumerate(self.step_embeddings):
100
+ t_embedding = embedding_layer(t) # 获取当前时间步的嵌入向量
101
+ if idx == 0:
102
+ x = self.encoder1(x)
103
+ elif idx == 1:
104
+ x = self.encoder2(x)
105
+ else:
106
+ x = self.encoder3(x)
107
+ x += t_embedding # 将时间步的嵌入加到当前层输出
108
+ if idx == 0:
109
+ x = self.encoder1(x)
110
+ elif idx == 1:
111
+ x = self.encoder2(x)
112
+ else:
113
+ x = self.encoder3(x)
114
+ # 解码器:逐层恢复
115
+ x = self.decoder3(x)
116
+ x = self.decoder2(x)
117
+ x = self.decoder1(x)
118
+
119
+ return x
120
+ def downsample_image(x, scale_factor=0.5, device=None):
121
+ result = x * scale_factor
122
+ return result
123
+ def p_sample(model, x, t, betas, one_minus_alphas_bar_sqrt, device):
124
+ t = torch.tensor([t]).to(device) # Move t to device
125
+ # betas = betas.to(device)
126
+ # one_minus_alphas_bar_sqrt = one_minus_alphas_bar_sqrt.to(device)
127
+
128
+ coeff = betas[t].to(device) / one_minus_alphas_bar_sqrt[t].to(device)
129
+ eps_theta = model(x, t)
130
+
131
+ mean = (1 / (1 - betas[t]).sqrt()) * (x - (coeff * eps_theta))
132
+ z = torch.randn_like(x).to(device)
133
+ sigma_t = betas[t].sqrt()
134
+
135
+ sample = mean + sigma_t * z
136
+ return sample
137
+ def p_sample_loop(model, shape, n_steps, betas, one_minus_alphas_bar_sqrt, device):
138
+ betas = betas.to(device)
139
+ one_minus_alphas_bar_sqrt = one_minus_alphas_bar_sqrt.to(device)
140
+ cur_x = torch.randn(shape).to(device) # Move initial x to device
141
+ x_seq = [cur_x]
142
+ for i in reversed(range(n_steps)):
143
+ cur_x = p_sample(model, cur_x, i, betas, one_minus_alphas_bar_sqrt, device)
144
+ x_seq.append(cur_x)
145
+ return x_seq
146
+ def diffusion_loss_fn(model, x_0, alphas_bar_sqrt, one_minus_alphas_bar_sqrt, n_steps, device):
147
+ x_0 = x_0.to(device)
148
+ alphas_bar_sqrt = alphas_bar_sqrt.to(device)
149
+ one_minus_alphas_bar_sqrt = one_minus_alphas_bar_sqrt.to(device)
150
+
151
+ batch_size = x_0.shape[0]
152
+ t = torch.randint(0, n_steps, size=(batch_size // 2,)).to(device)
153
+ t = torch.cat([t, n_steps - 1 - t], dim=0)
154
+ t = t.unsqueeze(-1)
155
+
156
+ a = alphas_bar_sqrt[t].to(device)
157
+ aml = one_minus_alphas_bar_sqrt[t].to(device)
158
+
159
+ e = torch.randn_like(x_0).to(device)
160
+ x = x_0 * a + e * aml
161
+
162
+ output = model(x, t.squeeze(-1))
163
+ return (e - output).square().mean()
164
+ def super_resolution_loss(model, x_0, alphas_bar_sqrt, one_minus_alphas_bar_sqrt, n_steps, lr_scale=0.5, device=None):
165
+ x_0 = x_0.to(device)
166
+ lr_x_0 = downsample_image(x_0, scale_factor=lr_scale, device=device)
167
+
168
+ loss = diffusion_loss_fn(model, lr_x_0, alphas_bar_sqrt, one_minus_alphas_bar_sqrt, n_steps, device=device).mean()
169
+
170
+ reconstructed_x_0 = model(lr_x_0, torch.tensor([n_steps - 1]).to(device))
171
+ mse_loss = F.mse_loss(reconstructed_x_0, x_0).mean()
172
+
173
+ return (loss + mse_loss).mean()
174
+ device = torch.device("cuda:1" if torch.cuda.is_available() else "cpu")
175
+ # model = MLPDiffusion(n_steps=100).to(device)
176
+ # optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)
177
+ model = UNetDiffusion(n_steps=100).to(device)
178
+ optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)
179
+ # Training Loop
180
+ num_epoch = 4000
181
+ for t in range(num_epoch):
182
+ for idx, batch_x in enumerate(torch.utils.data.DataLoader(dataset, batch_size=2, shuffle=True)):
183
+ batch_x = batch_x.to(device)
184
+ loss = super_resolution_loss(model, batch_x, alphas_bar_sqrt, one_minus_alphas_bar_sqrt, num_steps, device=device)
185
+ optimizer.zero_grad()
186
+ loss.backward()
187
+ torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)
188
+ optimizer.step()
189
+ if t % 100 == 0:
190
+ print(f'Epoch {t}, Loss: {loss.item()}')
191
+
192
+ # Sampling and visualization
193
+ x_seq = p_sample_loop(model, dataset.shape, num_steps, betas, one_minus_alphas_bar_sqrt, device)
194
+
195
+ fig, axs = plt.subplots(1, 10, figsize=(28, 3))
196
+ for i in range(1, 11):
197
+ if x_seq[i * 10] is not None:
198
+ cur_x = x_seq[i * 10].detach().cpu()
199
+ axs[i - 1].scatter(cur_x[:, 0], cur_x[:, 1], color='red', edgecolor='white')
200
+ axs[i - 1].set_axis_off()
201
+ axs[i - 1].set_title(f'$q(\\mathbf{{x}}_{{{i * 10}}})$')
202
+ else:
203
+ print(f"Warning: x_seq[{i * 10}] is None.")
204
+ plt.tight_layout()
205
+ outpath = f'./pngresult/{t}_Epoch_scatter_plot.png'
206
+ crDir(outpath)
207
+ plt.savefig(outpath, dpi=300)
shancx/Doc/__init__.py ADDED
@@ -0,0 +1,214 @@
1
+ from docx import Document
2
+ from docx.shared import Inches
3
+ from docx.shared import Pt
4
+ from functools import partial
5
+ def tableDOC(doc, data, row, col, title):
6
+ row_labels = row
7
+ col_labels = col
8
+ doc.add_paragraph(title)
9
+ table = doc.add_table(rows=1 + len(data), cols=1 + len(col_labels))
10
+ table.style = 'Table Grid'
11
+ header_cell = table.cell(0, 0)
12
+ header_cell.text = "实况\预报"
13
+ header_cell.paragraphs[0].runs[0].font.bold = False
14
+ for idx, label in enumerate(col_labels):
15
+ cell = table.cell(0, idx+1)
16
+ cell.text = label
17
+ cell.paragraphs[0].runs[0].font.bold = False
18
+ for row_idx, (label, row) in enumerate(zip(row_labels, data)):
19
+ table.cell(row_idx + 1, 0).text = label
20
+ table.cell(row_idx + 1, 0).paragraphs[0].runs[0].font.bold = False
21
+ for col_idx, value in enumerate(row):
22
+ table.cell(row_idx + 1, col_idx + 1).text = str(value)
23
+ def add_heading(doc, text):
24
+ heading = doc.add_heading(text, level=1)
25
+ heading.style.font.size = Pt(18) # 设置字体大小为18磅
26
+ def add_text_to_doc(doc, text):
27
+ para = doc.add_paragraph(text)
28
+ para.style = doc.styles['Body Text']
29
+ def add_image_to_doc(doc, image_path):
30
+ doc.add_picture(image_path, width=Inches(5.0))
31
+ def partialFN(tabledata):
32
+ return partial(tableDOC,row=tabledata["labels"], col= tabledata["columns"],title=tabledata["title"])
33
+
34
+ dataCY ={
35
+ "labels": ["晴", "雨"],
36
+ "columns": ["晴", "雨"],
37
+ "title": f"彩云10min晴雨混淆矩阵"
38
+ }
39
+ dataWTX ={
40
+ "labels": ["晴", "雨"],
41
+ "columns": ["晴", "雨"],
42
+ "title": f"维天信10min晴雨混淆矩阵"
43
+ }
44
+ data2CY ={
45
+ "labels": ["晴", "雨"],
46
+ "columns": ["晴", "雨"],
47
+ "title": f"彩云20min晴雨混淆矩阵"
48
+ }
49
+ data2WTX ={
50
+ "labels": ["晴", "雨"],
51
+ "columns": ["晴", "雨"],
52
+ "title": f"维天信20min晴雨混淆矩阵"
53
+ }
54
+
55
+ # {0: ['0.016', '0.976', '6157.69%'], 1: ['0.177', '0.892', '405.15%']} dict_values([['0.386', '0.921', '138.33%'], ['0.399', '0.819', '105.52%']]) 生成word表格输入数据
56
+
57
+ # from docx import Document
58
+ # doc = Document()
59
+ # doc.save("./test6.docx")
60
+
61
+ """
62
+
63
+ import pandas as pd
64
+ from shancx.Dsalgor.matrixLibJU import TS ,ACC ,F1 ,FAR ,PO ,sun_rain_Matrix ,pre1h_Matrix
65
+ from shancx.DOCJU import partialFN,add_text_to_doc,add_image_to_doc,add_heading
66
+ import numpy as np
67
+
68
+ from docx import Document
69
+
70
+
71
+ dataCY ={
72
+ "labels": ["晴", "雨"],
73
+ "columns": ["晴", "雨"],
74
+ "title": f"彩云1H晴雨混淆矩阵"
75
+ }
76
+ dataWTX ={
77
+ "labels": ["晴", "雨"],
78
+ "columns": ["晴", "雨"],
79
+ "title": f"维天信1H晴雨混淆矩阵"
80
+ }
81
+ data2CY ={
82
+ "labels": ["晴", "雨"],
83
+ "columns": ["晴", "雨"],
84
+ "title": f"彩云2H晴雨混淆矩阵"
85
+ }
86
+ data2WTX ={
87
+ "labels": ["晴", "雨"],
88
+ "columns": ["晴", "雨"],
89
+ "title": f"维天信2H晴雨混淆矩阵"
90
+ }
91
+
92
+ TSVD= {
93
+ "labels": ["1h", "2h"],
94
+ "columns":["彩云", "维天信", "提升百分比"],
95
+ "title": "TS评分"
96
+ }
97
+ F1VD = {
98
+ "labels": ["1h", "2h"],
99
+ "columns":["彩云", "维天信", "提升百分比"],
100
+ "title": "F1评分"
101
+ }
102
+ ACCVD= {
103
+ "labels": ["1h", "2h"],
104
+ "columns":["彩云", "维天信", "提升百分比"],
105
+ "title": "准确率评分"
106
+ }
107
+ POVD = {
108
+ "labels": ["1h", "2h"],
109
+ "columns":["彩云", "维天信", "提升百分比"],
110
+ "title": "漏报率评分 "
111
+ }
112
+ FARVD = {
113
+ "labels": ["1h", "2h"],
114
+ "columns":["彩云", "维天信", "提升百分比"],
115
+ "title": "空报率评分 "
116
+ }
117
+
118
+ w_1h_bigD = {
119
+ "labels": ["晴", "小雨", "中雨", "大雨", "暴雨"],
120
+ "columns": ["晴", "小雨", "中雨", "大雨", "暴雨"],
121
+ "title": f"维天信1小时预报"
122
+ }
123
+ w_2h_bigD = {
124
+ "labels": ["晴", "小雨", "中雨", "大雨", "暴雨"],
125
+ "columns": ["晴", "小雨", "中雨", "大雨", "暴雨"],
126
+ "title": f"维天信2小时预报"
127
+ }
128
+
129
+ dataCY = partialFN(dataCY)
130
+ dataWTX = partialFN(dataWTX)
131
+ data2CY = partialFN(data2CY)
132
+ data2WTX = partialFN(data2WTX)
133
+ TSVD = partialFN(TSVD)
134
+ F1VD = partialFN(F1VD)
135
+ ACCVD = partialFN(ACCVD)
136
+ POVD = partialFN(POVD)
137
+ FARVD = partialFN(FARVD)
138
+ w_1h_bigD = partialFN(w_1h_bigD)
139
+ w_2h_bigD = partialFN(w_2h_bigD)
140
+
141
+
142
+ basepathcsv = "/mnt/wtx_weather_forecast/scx/EXAMDATACSV10min/10min_202412240000_202412250000.csv"
143
+ df = pd.read_csv(basepathcsv)
144
+ TSV = TS(df["PRE1_r"], df["PRE1_w"], thresholdR=0.1, thresholdF=0.1)
145
+
146
+ TSV = {}
147
+ for i in range(1,3):
148
+ F1h =TS( df[f"PRE{i}_r"],df[f"PRE{i}_c"],thresholdR=0.1, thresholdF=0.031)
149
+ F1hm =TS(df[f"PRE{i}_r"],df[f"PRE{i}_w"],thresholdR=0.1, thresholdF=0.1)
150
+ TSV[i]=[np.round(F1h,3),np.round(F1hm,3),f"{np.round((F1hm-F1h)/F1h*100,2)}%"]
151
+ print(f"{i}h {np.round(F1h,3)} {np.round(F1hm,3)} {np.round((F1hm-F1h)/F1h*100,2)}%")
152
+ print("F1")
153
+ print("time", "CY", "WTX")
154
+ F1V = {}
155
+ for i in range(1,3):
156
+ F1h =F1(df[f"PRE{i}_r"], df[f"PRE{i}_c"],thresholdR=0.1, thresholdF=0.031)
157
+ F1hm =F1(df[f"PRE{i}_r"], df[f"PRE{i}_w"],thresholdR=0.1, thresholdF=0.1)
158
+ F1V[i] = [np.round(F1h, 3), np.round(F1hm, 3),f"{np.round((F1hm-F1h)/F1h*100,2)}%"]
159
+ print(f"{i}h {np.round(F1h,3)} {np.round(F1hm,3)} {np.round((F1hm-F1h)/F1h*100,2)}%")
160
+ print("ACC")
161
+ print("time", "CY", "WTX")
162
+ ACCV = {}
163
+ for i in range(1, 3):
164
+ F1h = ACC(df[f"PRE{i}_r"], df[f"PRE{i}_c"],thresholdR=0.1, thresholdF=0.031)
165
+ F1hm = ACC(df[f"PRE{i}_r"], df[f"PRE{i}_w"],thresholdR=0.1, thresholdF=0.1)
166
+ ACCV[i] = [np.round(F1h, 3), np.round(F1hm, 3),f"{np.round((F1hm-F1h)/F1h*100,2)}%"]
167
+ print(f"{i}h {np.round(F1h, 3)} {np.round(F1hm, 3)} {np.round((F1hm - F1h) / F1h * 100, 2)}%")
168
+ print("PO")
169
+ print("time", "CY", "WTX")
170
+ POV = {}
171
+ for i in range(1,3):
172
+ F1h =PO(df[f"PRE{i}_r"], df[f"PRE{i}_c"],thresholdR=0.1, thresholdF=0.031)
173
+ F1hm =PO(df[f"PRE{i}_r"], df[f"PRE{i}_w"],thresholdR=0.1, thresholdF=0.1)
174
+ POV[i] = [np.round(F1h, 3), np.round(F1hm, 3),f"{np.round((F1hm-F1h)/F1h*100,2)*-1}%"]
175
+ print(f"{i}h {np.round(F1h,3)} {np.round(F1hm,3)} {np.round((F1hm-F1h)/F1h*100,2)*-1}%")
176
+ print("FAR")
177
+ print("time", "CY", "WTX")
178
+ FARV = {}
179
+ for i in range(1, 3):
180
+ F1h = FAR(df[f"PRE{i}_r"], df[f"PRE{i}_c"],thresholdR=0.1, thresholdF=0.031)
181
+ F1hm = FAR(df[f"PRE{i}_r"], df[f"PRE{i}_w"],thresholdR=0.1, thresholdF=0.1)
182
+ FARV[i] = [np.round(F1h, 3), np.round(F1hm, 3),f"{np.round((F1hm-F1h)/F1h*100,2)*-1}%"]
183
+ print(f"{i}h {np.round(F1h, 3)} {np.round(F1hm, 3)} {np.round((F1hm-F1h)/F1h*100,2)*-1}%")
184
+
185
+ cm1_C = sun_rain_Matrix(df["PRE1_r"].values, df["PRE1_c"].values,0.031)
186
+ cm1_W = sun_rain_Matrix(df["PRE1_r"].values, df["PRE1_w"].values,0.1)
187
+ cm2_C = sun_rain_Matrix(df["PRE2_r"].values, df["PRE2_c"].values,0.031)
188
+ cm2_W = sun_rain_Matrix(df["PRE2_r"].values, df["PRE2_w"].values,0.1)
189
+
190
+ cm1_W_pre1h = pre1h_Matrix(df["PRE1_r"].values, df["PRE1_w"].values,"WTX")
191
+ cm2_W_pre1h = pre1h_Matrix(df["PRE2_r"].values, df["PRE2_w"].values,"WTX")
192
+
193
+ doc = Document() #CY_fn(doc,data =
194
+ add_heading(doc,"测试报告样例")
195
+ dataCY (doc,data = cm1_C)
196
+ dataWTX (doc,data =cm1_W )
197
+ data2CY (doc,data = cm2_C)
198
+ data2WTX (doc,data = cm2_W)
199
+ TSVD (doc,data =TSV.values() )
200
+ F1VD (doc,data =F1V.values() )
201
+ ACCVD (doc,data = ACCV.values())
202
+ add_image_to_doc(doc,"./TSF1.png")
203
+ add_text_to_doc(doc,"TS评分、F1和准确率ACC对比")
204
+ add_image_to_doc(doc,"./POFAR.png")
205
+ add_text_to_doc(doc,"漏报PO、空报FAR对比")
206
+ POVD (doc,data = POV.values())
207
+ FARVD (doc,data = FARV.values())
208
+ w_1h_bigD(doc,data = cm1_W_pre1h)
209
+ w_2h_bigD(doc,data = cm2_W_pre1h)
210
+ add_image_to_doc(doc,"bigsmall.png")
211
+ doc.save("./makedoc_test.docx")
212
+
213
+
214
+ """