shancx 1.9.33.109__py3-none-any.whl → 1.9.33.218__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. shancx/{Dsalgor → Algo}/__init__.py +37 -1
  2. shancx/Calmetrics/__init__.py +78 -9
  3. shancx/Calmetrics/calmetrics.py +14 -0
  4. shancx/Calmetrics/rmseR2score.py +14 -3
  5. shancx/{Command.py → Cmd.py} +20 -15
  6. shancx/Config_.py +26 -0
  7. shancx/Df/__init__.py +11 -0
  8. shancx/Df/tool.py +0 -1
  9. shancx/NN/__init__.py +200 -11
  10. shancx/{path.py → Path1.py} +2 -3
  11. shancx/Plot/__init__.py +129 -403
  12. shancx/Plot/draw_day_CR_PNG.py +4 -21
  13. shancx/Plot/exam.py +116 -0
  14. shancx/Plot/plotGlobal.py +325 -0
  15. shancx/Plot/radarNmc.py +1 -48
  16. shancx/Plot/single_china_map.py +1 -1
  17. shancx/Point.py +46 -0
  18. shancx/QC.py +223 -0
  19. shancx/Read.py +17 -10
  20. shancx/Resize.py +79 -0
  21. shancx/SN/__init__.py +8 -1
  22. shancx/Time/timeCycle.py +97 -23
  23. shancx/Train/makelist.py +161 -155
  24. shancx/__init__.py +79 -232
  25. shancx/bak.py +78 -53
  26. shancx/geosProj.py +2 -2
  27. shancx/wait.py +35 -1
  28. {shancx-1.9.33.109.dist-info → shancx-1.9.33.218.dist-info}/METADATA +12 -4
  29. shancx-1.9.33.218.dist-info/RECORD +91 -0
  30. {shancx-1.9.33.109.dist-info → shancx-1.9.33.218.dist-info}/WHEEL +1 -1
  31. shancx/Plot/Mip.py +0 -42
  32. shancx/Plot/border.py +0 -44
  33. shancx/Plot/draw_day_CR_PNGUS.py +0 -206
  34. shancx/Plot/draw_day_CR_SVG.py +0 -275
  35. shancx/Plot/draw_day_pre_PNGUS.py +0 -205
  36. shancx/Plot/radar_nmc_china_map_compare1.py +0 -50
  37. shancx/makenetCDFN.py +0 -42
  38. shancx-1.9.33.109.dist-info/RECORD +0 -91
  39. /shancx/{3DJU → 3D}/__init__.py +0 -0
  40. /shancx/{Dsalgor → Algo}/Class.py +0 -0
  41. /shancx/{Dsalgor → Algo}/CudaPrefetcher1.py +0 -0
  42. /shancx/{Dsalgor → Algo}/Fake_image.py +0 -0
  43. /shancx/{Dsalgor → Algo}/Hsml.py +0 -0
  44. /shancx/{Dsalgor → Algo}/L2Loss.py +0 -0
  45. /shancx/{Dsalgor → Algo}/MetricTracker.py +0 -0
  46. /shancx/{Dsalgor → Algo}/Normalize.py +0 -0
  47. /shancx/{Dsalgor → Algo}/OptimizerWScheduler.py +0 -0
  48. /shancx/{Dsalgor → Algo}/Rmageresize.py +0 -0
  49. /shancx/{Dsalgor → Algo}/Savemodel.py +0 -0
  50. /shancx/{Dsalgor → Algo}/SmoothL1_losses.py +0 -0
  51. /shancx/{Dsalgor → Algo}/Tqdm.py +0 -0
  52. /shancx/{Dsalgor → Algo}/checknan.py +0 -0
  53. /shancx/{Dsalgor → Algo}/dsalgor.py +0 -0
  54. /shancx/{Dsalgor → Algo}/iouJU.py +0 -0
  55. /shancx/{Dsalgor → Algo}/mask.py +0 -0
  56. /shancx/{Dsalgor → Algo}/psnr.py +0 -0
  57. /shancx/{Dsalgor → Algo}/ssim.py +0 -0
  58. /shancx/{Dsalgor → Algo}/structural_similarity.py +0 -0
  59. /shancx/{Dsalgor → Algo}/tool.py +0 -0
  60. /shancx/Calmetrics/{matrixLib.py → calmetricsmatrixLib.py} +0 -0
  61. /shancx/{Diffmodel → Diffm}/Psamples.py +0 -0
  62. /shancx/{Diffmodel → Diffm}/__init__.py +0 -0
  63. /shancx/{Diffmodel → Diffm}/test.py +0 -0
  64. /shancx/{Board → tensBoard}/__init__.py +0 -0
  65. {shancx-1.9.33.109.dist-info → shancx-1.9.33.218.dist-info}/top_level.txt +0 -0
shancx/QC.py ADDED
@@ -0,0 +1,223 @@
1
+ import cv2
2
+ import numpy as np
3
+ from numba import jit
4
+ def removeSmallPatches(binary_mask, min_pixels=50, min_area=40):
5
+ binary_mask = (binary_mask > 0).astype(np.uint8)
6
+ num_labels, labels, stats, centroids = cv2.connectedComponentsWithStats(
7
+ binary_mask, connectivity=8
8
+ )
9
+ output_mask = np.zeros_like(binary_mask)
10
+ for i in range(1, num_labels):
11
+ pixel_count = stats[i, cv2.CC_STAT_AREA]
12
+ if pixel_count < min_pixels:
13
+ continue
14
+ component_mask = (labels == i).astype(np.uint8)
15
+ contours, _ = cv2.findContours(component_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
16
+ if contours:
17
+ contour = contours[0]
18
+ area = cv2.contourArea(contour)
19
+ if area < min_area:
20
+ continue
21
+ output_mask[labels == i] = 255
22
+ return output_mask
23
+
24
+ """
25
+ mask = removeSmallPatches(b, min_pixels=50, min_area=40)
26
+ data = np.where(mask, data, 0)
27
+ filtered_data = np.full([256,256],0)
28
+ filtered_data[mask] = e[mask]
29
+ """
30
+
31
+ import cv2
32
+ import numpy as np
33
+ from concurrent.futures import ThreadPoolExecutor
34
+ def process_block_optimized(args):
35
+ block, coords, min_pixels, min_area = args
36
+ y, x, y_end, x_end = coords
37
+ num_labels, labels, stats, _ = cv2.connectedComponentsWithStats(block, 8)
38
+ result = np.zeros_like(block)
39
+ valid_labels = []
40
+ for i in range(1, num_labels):
41
+ if stats[i, cv2.CC_STAT_AREA] >= min_pixels:
42
+ valid_labels.append(i)
43
+ for i in valid_labels:
44
+ component_mask = (labels == i).astype(np.uint8)
45
+ contours, _ = cv2.findContours(component_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
46
+ if contours and cv2.contourArea(contours[0]) >= min_area:
47
+ result[labels == i] = 255
48
+ return result, coords
49
+ def removeSmallPatches_fast(binary_mask, min_pixels=100, min_area=40, num_workers=3):
50
+ binary_mask = (binary_mask > 0).astype(np.uint8)
51
+ h, w = binary_mask.shape
52
+ output = np.zeros_like(binary_mask)
53
+ block_size = 2000
54
+ blocks = []
55
+ for y in range(0, h, block_size):
56
+ for x in range(0, w, block_size):
57
+ y_end, x_end = min(y+block_size, h), min(x+block_size, w)
58
+ block = binary_mask[y:y_end, x:x_end]
59
+ blocks.append((block, (y, x, y_end, x_end), min_pixels, min_area))
60
+ with ThreadPoolExecutor(num_workers) as executor:
61
+ for result, (y, x, y_end, x_end) in executor.map(process_block_optimized, blocks):
62
+ output[y:y_end, x:x_end] = result
63
+ return output
64
+
65
+ """
66
+ mask = removeSmallPatches(b, min_pixels=50, min_area=40)
67
+ data = np.where(mask, data, 0)
68
+ filtered_data = np.full([256,256],0)
69
+ filtered_data[mask] = e[mask]
70
+ """
71
+
72
+ import cv2
73
+ import numpy as np
74
+ from numba import jit, prange
75
+ from concurrent.futures import ThreadPoolExecutor
76
+ def removeSmallPatches_optimized(binary_mask, min_pixels=50, min_area=40):
77
+ binary_mask = (binary_mask > 0).astype(np.uint8)
78
+ num_labels, labels, stats, _ = cv2.connectedComponentsWithStats(binary_mask, connectivity=8)
79
+ output_mask = np.zeros_like(binary_mask)
80
+ valid_labels = [i for i in range(1, num_labels) if stats[i, cv2.CC_STAT_AREA] >= min_pixels]
81
+ for i in valid_labels:
82
+ contour_mask = (labels == i).astype(np.uint8)
83
+ contours, _ = cv2.findContours(contour_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
84
+
85
+ if contours and cv2.contourArea(contours[0]) >= min_area:
86
+ output_mask[labels == i] = 255
87
+
88
+ return output_mask
89
+
90
+ @jit(nopython=True, parallel=True, nogil=True)
91
+ def numba_filter_components(labels, stats, min_pixels, min_area):
92
+ height, width = labels.shape
93
+ output = np.zeros((height, width), dtype=np.uint8)
94
+ for i in prange(1, stats.shape[0]):
95
+ if stats[i, 4] >= min_pixels: # stats[i, 4] 对应 cv2.CC_STAT_AREA
96
+ for y in range(height):
97
+ for x in range(width):
98
+ if labels[y, x] == i:
99
+ output[y, x] = 255
100
+ return output
101
+ def removeSmallPatches_numba(binary_mask, min_pixels=50, min_area=40):
102
+ binary_mask = (binary_mask > 0).astype(np.uint8)
103
+ num_labels, labels, stats, _ = cv2.connectedComponentsWithStats(binary_mask, connectivity=8)
104
+ output_mask = numba_filter_components(labels, stats, min_pixels, min_area)
105
+ if min_area > 0:
106
+ num_labels2, labels2, stats2, _ = cv2.connectedComponentsWithStats(output_mask, connectivity=8)
107
+ final_output = np.zeros_like(output_mask)
108
+ for i in range(1, num_labels2):
109
+ contour_mask = (labels2 == i).astype(np.uint8)
110
+ contours, _ = cv2.findContours(contour_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
111
+ if contours and cv2.contourArea(contours[0]) >= min_area:
112
+ final_output[labels2 == i] = 255
113
+ return final_output
114
+ return output_mask
115
+ def process_block_optimized_v2(args):
116
+ block, coords, min_pixels, min_area = args
117
+ num_labels, labels, stats, _ = cv2.connectedComponentsWithStats(block, connectivity=8)
118
+ result = np.zeros_like(block)
119
+ valid_labels = []
120
+ for i in range(1, num_labels):
121
+ if stats[i, cv2.CC_STAT_AREA] >= min_pixels:
122
+ valid_labels.append(i)
123
+ for i in valid_labels:
124
+ component_indices = (labels == i)
125
+ if component_indices.any():
126
+ component_mask = component_indices.astype(np.uint8)
127
+ contours, _ = cv2.findContours(component_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
128
+ if contours and cv2.contourArea(contours[0]) >= min_area:
129
+ result[component_indices] = 255
130
+ return result, coords
131
+ def removeSmallPatches_fast_v2(binary_mask, min_pixels=100, min_area=40, num_workers=4):
132
+ binary_mask = (binary_mask > 0).astype(np.uint8)
133
+ h, w = binary_mask.shape
134
+ optimal_block_size = max(500, min(2000, (h * w) // (num_workers * 10000)))
135
+ output = np.zeros_like(binary_mask)
136
+ blocks = []
137
+ for y in range(0, h, optimal_block_size):
138
+ for x in range(0, w, optimal_block_size):
139
+ y_end, x_end = min(y + optimal_block_size, h), min(x + optimal_block_size, w)
140
+ block = binary_mask[y:y_end, x:x_end].copy()
141
+ blocks.append((block, (y, x, y_end, x_end), min_pixels, min_area))
142
+ actual_workers = min(num_workers, len(blocks))
143
+ with ThreadPoolExecutor(max_workers=actual_workers) as executor:
144
+ for result, (y, x, y_end, x_end) in executor.map(process_block_optimized_v2, blocks):
145
+ output[y:y_end, x:x_end] = result
146
+ return output
147
+ def get_optimal_function(binary_mask, min_pixels=50, min_area=40):
148
+ h, w = binary_mask.shape
149
+ total_pixels = h * w
150
+ if total_pixels < 1000000: # 小于100万像素
151
+ return removeSmallPatches_optimized
152
+ if min_area <= 0:
153
+ return removeSmallPatches_numba
154
+ return removeSmallPatches_fast_v2
155
+ def auto_remove_small_patches(binary_mask, min_pixels=50, min_area=40):
156
+ optimal_func = get_optimal_function(binary_mask, min_pixels, min_area)
157
+ return optimal_func(binary_mask, min_pixels, min_area)
158
+ """
159
+ try:
160
+ result = auto_remove_small_patches(binary_mask, min_pixels=50, min_area=40)
161
+ except Exception as e:
162
+ from original_module import removeSmallPatches
163
+ result = removeSmallPatches(binary_mask, min_pixels=50, min_area=40)
164
+ removeSmallPatches_numba optimum performance min_area <= 0
165
+ removeSmallPatches_fast_v2 area
166
+ """
167
+ @jit(nopython=True, parallel=True, nogil=True)
168
+ def QC_simple_numba(mat, dbzTH = 10.0, areaTH=20):
169
+ # Create a copy of the matrix
170
+ mat1 = np.copy(mat)
171
+ rows, cols = mat1.shape
172
+
173
+ # Create binary mask based on threshold
174
+ mask = np.zeros((rows, cols), dtype=np.uint8)
175
+ for i in range(rows):
176
+ for j in range(cols):
177
+ if mat1[i, j] > dbzTH:
178
+ mask[i, j] = 1
179
+ # Simple 8-connectivity region labeling (flood fill algorithm)
180
+ labels = np.zeros((rows, cols), dtype=np.int32)
181
+ current_label = 1
182
+ region_areas = []
183
+ for i in range(rows):
184
+ for j in range(cols):
185
+ # If current pixel is foreground and not yet labeled
186
+ if mask[i, j] == 1 and labels[i, j] == 0:
187
+ # Start flood fill
188
+ stack = [(i, j)]
189
+ labels[i, j] = current_label
190
+ area = 0
191
+
192
+ while stack:
193
+ x, y = stack.pop()
194
+ area += 1
195
+
196
+ # Check 8 surrounding pixels
197
+ for dx in [-1, 0, 1]:
198
+ for dy in [-1, 0, 1]:
199
+ nx, ny = x + dx, y + dy
200
+ # Check bounds and conditions
201
+ if (0 <= nx < rows and 0 <= ny < cols and
202
+ mask[nx, ny] == 1 and labels[nx, ny] == 0):
203
+ labels[nx, ny] = current_label
204
+ stack.append((nx, ny))
205
+
206
+ region_areas.append(area)
207
+ current_label += 1
208
+
209
+ # Apply area threshold filtering
210
+ for i in range(rows):
211
+ for j in range(cols):
212
+ if labels[i, j] > 0 and region_areas[labels[i, j] - 1] < areaTH:
213
+ mat1[i, j] = 0
214
+ return mat1
215
+ def QC_ref_numba(mat, dbzTH = 10, areaTH=20):
216
+ for i in range(len(mat)):
217
+ mat[i] = QC_simple_numba(mat[i], dbzTH, areaTH)
218
+ return mat
219
+ """
220
+ CR = subset["CR"].data[0].copy()
221
+ CR[CR < 6] = 0
222
+ CR = QC_ref_numba(CR[None], areaTH=15)[0]
223
+ """
shancx/Read.py CHANGED
@@ -12,7 +12,6 @@ def readGrib(file_path, target_param=None):
12
12
  'shortName': getattr(grb, 'shortName', 'N/A'),
13
13
  'level': getattr(grb, 'level', -999),
14
14
  'typeOfLevel': getattr(grb, 'typeOfLevel', 'N/A'),
15
- 'forecastTime': getattr(grb, 'forecastTime', -999),
16
15
  'validDate': getattr(grb, 'validDate', 'N/A'),
17
16
  'units': getattr(grb, 'units', 'N/A'),
18
17
  'shape': grb.values.shape
@@ -54,12 +53,20 @@ if __name__ == "__main__":
54
53
  """
55
54
  latArr = latMat[:,0]
56
55
  lonArr = lonMat[0]
57
- """
58
-
59
- import yaml
60
- def _load_config(config_path: str) :
61
- """加载YAML配置文件"""
62
- print(f"load config file Get configuration parameters: {config_path}")
63
- with open(config_path, 'r') as f:
64
- config = yaml.safe_load(f)
65
- return config
56
+ """
57
+
58
+ import numpy as np
59
+ from pathlib import Path
60
+ def npsavez(output, data):
61
+ output = output.replace('.npy', '.npz')
62
+ output_path = Path(output).with_suffix('.npz')
63
+ output_path.parent.mkdir(parents=True, exist_ok=True)
64
+ np.savez_compressed(output_path, data=data)
65
+ print(f"{output_path} done")
66
+ def nploadz(output_path):
67
+ return np.load(Path(output_path).with_suffix('.npz'))['data']
68
+
69
+ """
70
+ savez(output, data)
71
+ nploadz(output_path)
72
+ """
shancx/Resize.py ADDED
@@ -0,0 +1,79 @@
1
+ import numpy as np
2
+ import random
3
+ # import albumentations as A
4
+ # import cv2
5
+
6
+ # def resize_array(array, size):
7
+ # # 定义变换管道
8
+ # transform = A.Compose([
9
+ # A.SmallestMaxSize(max_size=size, interpolation=cv2.INTER_AREA)
10
+ # ])
11
+ # transformed_array = transform(image=array)["image"]
12
+ # return transformed_array
13
+
14
+ def crop_array(array, crop_side_len):
15
+ cropper = A.RandomCrop(height=crop_side_len, width=crop_side_len)
16
+ cropped_array = cropper(image=array)["image"]
17
+ return cropped_array
18
+
19
+ def crop_cna_pair(min_side_len, low_res_data, high_res_data):
20
+ crop_side_len = min_side_len
21
+ top = random.randint(0, low_res_data.shape[-2] - crop_side_len)
22
+ left = random.randint(0, low_res_data.shape[-1] - crop_side_len)
23
+ super_factor = high_res_data.shape[-2] / low_res_data.shape[-2] # Assuming the ratio in height dimensio
24
+ cropped_low_res = low_res_data[top:top + crop_side_len, left:left + crop_side_len]
25
+ cropped_high_res = high_res_data[int(top * super_factor):int((top + crop_side_len) * super_factor),
26
+ int(left * super_factor):int((left + crop_side_len) * super_factor)]
27
+ return cropped_low_res, cropped_high_res
28
+ def random_crop_pair(min_side_len, low_res_data, high_res_data):
29
+ crop_side_len = min_side_len
30
+ top = random.randint(0, low_res_data.shape[-2] - crop_side_len)
31
+ left = random.randint(0, low_res_data.shape[-1] - crop_side_len)
32
+ super_factor = high_res_data.shape[-2] / low_res_data.shape[-2] # Assuming the ratio in height dimension
33
+ cropped_low_res = low_res_data[top:top + crop_side_len, left:left + crop_side_len]
34
+ cropped_high_res = high_res_data[int(top * super_factor):int((top + crop_side_len) * super_factor),
35
+ int(left * super_factor):int((left + crop_side_len) * super_factor)]
36
+ return cropped_low_res, cropped_high_res
37
+
38
+ import random
39
+
40
+ def random_crop_triplet(min_side_len, low_res_data, high_res_data1, high_res_data2):
41
+ top = random.randint(0, low_res_data.shape[-2] - min_side_len)
42
+ left = random.randint(0, low_res_data.shape[-1] - min_side_len)
43
+ cropped_low_res = low_res_data[..., top:top + min_side_len, left:left + min_side_len]
44
+ factor1_h = high_res_data1.shape[-2] / low_res_data.shape[-2]
45
+ factor1_w = high_res_data1.shape[-1] / low_res_data.shape[-1]
46
+ cropped_high_res1 = high_res_data1[...,
47
+ int(top * factor1_h):int((top + min_side_len) * factor1_h),
48
+ int(left * factor1_w):int((left + min_side_len) * factor1_w)]
49
+ factor2_h = high_res_data2.shape[-2] / low_res_data.shape[-2]
50
+ factor2_w = high_res_data2.shape[-1] / low_res_data.shape[-1]
51
+ cropped_high_res2 = high_res_data2[...,
52
+ int(top * factor2_h):int((top + min_side_len) * factor2_h),
53
+ int(left * factor2_w):int((left + min_side_len) * factor2_w)]
54
+ return cropped_low_res, cropped_high_res1, cropped_high_res2
55
+
56
+ def random_crop_single(cropsize, input_data):
57
+ # 确定裁剪的边长
58
+ crop_side_len = cropsize
59
+
60
+ # 随机选择左上角裁剪点
61
+ top = random.randint(0, input_data.shape[0] - crop_side_len)
62
+ left = random.randint(0, input_data.shape[1] - crop_side_len)
63
+
64
+ # 裁剪输入数据
65
+ cropped_data = input_data[top:top + crop_side_len, left:left + crop_side_len]
66
+
67
+ return cropped_data
68
+
69
+
70
+ if __name__ == "__main__":
71
+ low_res_data = np.load("./SAT_202507010900_49.42_117.82_100.npy")
72
+ high_res_data = np.load("./CR_202507010900_49.42_117.82_100.npy")
73
+ high_res_data1 = np.load("./mask_202507010900_49.42_117.82_100.npy")
74
+ d1,d2,d3 = random_crop_triplet(128, low_res_data, high_res_data[0], high_res_data1[0])
75
+ transformed_low_res_data = resize_array(low_res_data, 240)
76
+ transformed_high_res_data = resize_array(high_res_data, 960)
77
+ np.save("transformed_low_res_data.npy", transformed_low_res_data)
78
+ np.save("transformed_high_res_data.npy", transformed_high_res_data)
79
+
shancx/SN/__init__.py CHANGED
@@ -67,4 +67,11 @@ def sendMESplus(message,base = None):
67
67
  res = requests.post(url,json=data,headers=headers)
68
68
  except Exception as e:
69
69
  print(e)
70
-
70
+ """
71
+ import torch.nn as nn
72
+ # gpu_ids = [3, 4, 5]
73
+ device = torch.device(f"cuda:{gpu_ids[0]}" if torch.cuda.is_available() else "cpu")
74
+ model = get_model(model_name, in_channels=in_channels).to(device)
75
+ if len(gpu_ids) > 1:
76
+ model = torch.nn.DataParallel(model, device_ids=gpu_ids)
77
+ """
shancx/Time/timeCycle.py CHANGED
@@ -60,8 +60,102 @@ for i in range(7):
60
60
  """
61
61
 
62
62
  """
63
- 检查文件夹
63
+ import glob
64
+ import os
65
+ import numpy as np
66
+ from shancx import crDir
67
+ from shancx.NN import _loggers
68
+ logger = _loggers()
69
+ import netCDF4 as nc
70
+ import numpy as np
71
+ from shancx.Plot import plotRadar
72
+ from shancx import crDir
73
+ import traceback
74
+ from dateutil.relativedelta import relativedelta
75
+ paths = glob.glob("/root/data/ec_filter_npy_data/*")
76
+ basePath = f"/mnt/wtx_weather_forecast/scx/sever7/test/RADA/MQPF1109_1"
77
+ output_dirH9Npy = f"/mnt/wtx_weather_forecast/SAT/H9/sat_npy_CHN"
78
+ def GetMulData(conf):
79
+ sUTC = conf[0]
80
+ sUTCstr = sUTC.strftime("%Y%m%d%H%M")
81
+ output_path =f"{output_dirH9Npy}/{sUTCstr[:4]}/{sUTCstr[:8]}/MSP3_PMSC_H9_GEO_FD_{sUTCstr[:12]}_00000-00000.npy"
82
+ inputPathstr = f"{basePath}/{sUTCstr[:4]}/{sUTCstr[:8]}/*{sUTCstr[:12]}*.nc"
83
+ inputPathL = glob.glob(inputPathstr)
84
+ if len(inputPathL) ==0 :
85
+ print(f"outpath {inputPathstr} is missing ")
86
+ return None
87
+ inputPath = inputPathL[0]
88
+ with nc.Dataset(inputPath) as dataNC:
89
+ CR = dataNC["CR"][:]
90
+ lat = dataNC["lat"][:]
91
+ lon = dataNC["lon"][:]
92
+ crDir(output_path)
93
+ np.save(output_path,CR.data)
94
+ logger.info(f"{output_path} done ")
95
+
96
+ from shancx import Mul_sub
97
+ import argparse
98
+ import datetime
99
+ import pandas as pd
100
+ def options():
101
+ parser = argparse.ArgumentParser(description='examdatabasedata')
102
+ parser.add_argument('--times', type=str, default='202507010000,202510010000')
103
+ config= parser.parse_args()
104
+ print(config)
105
+ config.times = config.times.split(",")
106
+ if len(config.times) == 1:
107
+ config.times = [config.times[0], config.times[0]]
108
+ config.times = [datetime.datetime.strptime(config.times[0], "%Y%m%d%H%M"),
109
+ datetime.datetime.strptime(config.times[1], "%Y%m%d%H%M")]
110
+ return config
111
+ if __name__ == '__main__':
112
+ cfg = options()
113
+ sUTC = cfg.times[0]
114
+ eUTC = cfg.times[-1]
115
+ timeList = pd.date_range(sUTC, eUTC, freq='3h') #6T 分钟
116
+ print(timeList)
117
+ Mul_sub(GetMulData,[timeList],6)
118
+ """
119
+
120
+ """
121
+ import glob
122
+ import os
123
+ import numpy as np
124
+ import traceback
125
+ from dateutil.relativedelta import relativedelta
126
+ import os
127
+ from shancx.NN import Mul_TH
128
+ import argparse
129
+ import datetime
130
+ import pandas as pd
131
+ def GetMulData(conf):
132
+ sUTC = conf[0]
133
+ sUTCstr = sUTC.strftime("%Y%m%d%H%M")
134
+ commandstr = f""
135
+ os.system(commandstr)
136
+ def options():
137
+ parser = argparse.ArgumentParser(description='examdatabasedata')
138
+ parser.add_argument('--times', type=str, default='202508010000,202508010500')
139
+ config= parser.parse_args()
140
+ print(config)
141
+ config.times = config.times.split(",")
142
+ if len(config.times) == 1:
143
+ config.times = [config.times[0], config.times[0]]
144
+ config.times = [datetime.datetime.strptime(config.times[0], "%Y%m%d%H%M"),
145
+ datetime.datetime.strptime(config.times[1], "%Y%m%d%H%M")]
146
+ return config
147
+ if __name__ == '__main__':
148
+ cfg = options()
149
+ sUTC = cfg.times[0]
150
+ eUTC = cfg.times[-1]
151
+ timeList = pd.date_range(sUTC, eUTC, freq='10T') #6T 分钟
152
+ print(timeList)
153
+ Mul_TH(GetMulData,[timeList],3)
154
+ cd /mnt/wtx_weather_forecast/scx/sever7/SATdata/mkH9 ;timeout 1200s /home/scx/miniconda3/envs/H9/bin/python mkH9.py --time {sUTCstr[:12]}
155
+
156
+ """
64
157
 
158
+ """
65
159
  from shancx import crDir
66
160
  import os
67
161
  from shancx import loggers as logger
@@ -201,28 +295,8 @@ if __name__ == '__main__':
201
295
  minmax = Mul_sub(GetMulData,[summer_timeList],10)
202
296
  global_min = min(d['min'] for d in minmax)
203
297
  global_max = max(d['max'] for d in minmax) #117
204
- print()
205
-
298
+ print()
206
299
 
207
300
  """
208
301
 
209
- """
210
- BASE_TARGET_PATH = "/mnt/wtx_weather_forecast/SAT/H9/sat_nc"
211
- def get_mqpf_paths(UCTstr,sat_cd = "H9"):
212
- year = UCTstr[:4]
213
- date = UCTstr[:8]
214
- mqpfPath_pattern = os.path.join(BASE_TARGET_PATH,year, date,f"MSP3_PMSC_{sat_cd}_GEO_FD_{UCTstr}_00000-00000.nc")
215
- return mqpfPath_pattern
216
- def map_data(conf):
217
- UCT = conf[0]
218
- # UCT = CST + relativedelta(hours=-8)
219
- UCTstr = UCT.strftime("%Y%m%d%H%M")
220
- mqpfPath_pattern = get_mqpf_paths(UCTstr)
221
- mqpfPath_list = glob.glob(mqpfPath_pattern)
222
- if len(mqpfPath_list) > 0:
223
- with nc.Dataset(mqpfPath_list[0]) as dataNC:
224
- mqpf = dataNC.variables["B10"][:]
225
- return mqpf
226
- else:
227
- return np.full((1, 6201, 5201), np.nan)
228
- """
302
+