shancx 1.8.92__py3-none-any.whl → 1.9.33.218__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- shancx/3D/__init__.py +25 -0
- shancx/Algo/Class.py +11 -0
- shancx/Algo/CudaPrefetcher1.py +112 -0
- shancx/Algo/Fake_image.py +24 -0
- shancx/Algo/Hsml.py +391 -0
- shancx/Algo/L2Loss.py +10 -0
- shancx/Algo/MetricTracker.py +132 -0
- shancx/Algo/Normalize.py +66 -0
- shancx/Algo/OptimizerWScheduler.py +38 -0
- shancx/Algo/Rmageresize.py +79 -0
- shancx/Algo/Savemodel.py +33 -0
- shancx/Algo/SmoothL1_losses.py +27 -0
- shancx/Algo/Tqdm.py +62 -0
- shancx/Algo/__init__.py +121 -0
- shancx/Algo/checknan.py +28 -0
- shancx/Algo/iouJU.py +83 -0
- shancx/Algo/mask.py +25 -0
- shancx/Algo/psnr.py +9 -0
- shancx/Algo/ssim.py +70 -0
- shancx/Algo/structural_similarity.py +308 -0
- shancx/Algo/tool.py +704 -0
- shancx/Calmetrics/__init__.py +97 -0
- shancx/Calmetrics/calmetrics.py +14 -0
- shancx/Calmetrics/calmetricsmatrixLib.py +147 -0
- shancx/Calmetrics/rmseR2score.py +35 -0
- shancx/Clip/__init__.py +50 -0
- shancx/Cmd.py +126 -0
- shancx/Config_.py +26 -0
- shancx/Df/DataFrame.py +11 -2
- shancx/Df/__init__.py +17 -0
- shancx/Df/tool.py +0 -0
- shancx/Diffm/Psamples.py +18 -0
- shancx/Diffm/__init__.py +0 -0
- shancx/Diffm/test.py +207 -0
- shancx/Doc/__init__.py +214 -0
- shancx/E/__init__.py +178 -152
- shancx/Fillmiss/__init__.py +0 -0
- shancx/Fillmiss/imgidwJU.py +46 -0
- shancx/Fillmiss/imgidwLatLonJU.py +82 -0
- shancx/Gpu/__init__.py +55 -0
- shancx/H9/__init__.py +126 -0
- shancx/H9/ahi_read_hsd.py +877 -0
- shancx/H9/ahisearchtable.py +298 -0
- shancx/H9/geometry.py +2439 -0
- shancx/Hug/__init__.py +81 -0
- shancx/Inst.py +22 -0
- shancx/Lib.py +31 -0
- shancx/Mos/__init__.py +37 -0
- shancx/NN/__init__.py +235 -106
- shancx/Path1.py +161 -0
- shancx/Plot/GlobMap.py +276 -116
- shancx/Plot/__init__.py +491 -1
- shancx/Plot/draw_day_CR_PNG.py +4 -21
- shancx/Plot/exam.py +116 -0
- shancx/Plot/plotGlobal.py +325 -0
- shancx/{radar_nmc.py → Plot/radarNmc.py} +4 -34
- shancx/{subplots_single_china_map.py → Plot/single_china_map.py} +1 -1
- shancx/Point.py +46 -0
- shancx/QC.py +223 -0
- shancx/RdPzl/__init__.py +32 -0
- shancx/Read.py +72 -0
- shancx/Resize.py +79 -0
- shancx/SN/__init__.py +62 -123
- shancx/Time/GetTime.py +9 -3
- shancx/Time/__init__.py +66 -1
- shancx/Time/timeCycle.py +302 -0
- shancx/Time/tool.py +0 -0
- shancx/Train/__init__.py +74 -0
- shancx/Train/makelist.py +187 -0
- shancx/Train/multiGpu.py +27 -0
- shancx/Train/prepare.py +161 -0
- shancx/Train/renet50.py +157 -0
- shancx/ZR.py +12 -0
- shancx/__init__.py +333 -262
- shancx/args.py +27 -0
- shancx/bak.py +768 -0
- shancx/df2database.py +62 -2
- shancx/geosProj.py +80 -0
- shancx/info.py +38 -0
- shancx/netdfJU.py +231 -0
- shancx/sendM.py +59 -0
- shancx/tensBoard/__init__.py +28 -0
- shancx/wait.py +246 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/METADATA +15 -5
- shancx-1.9.33.218.dist-info/RECORD +91 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/WHEEL +1 -1
- my_timer_decorator/__init__.py +0 -10
- shancx/Dsalgor/__init__.py +0 -19
- shancx/E/DFGRRIB.py +0 -30
- shancx/EN/DFGRRIB.py +0 -30
- shancx/EN/__init__.py +0 -148
- shancx/FileRead.py +0 -44
- shancx/Gray2RGB.py +0 -86
- shancx/M/__init__.py +0 -137
- shancx/MN/__init__.py +0 -133
- shancx/N/__init__.py +0 -131
- shancx/Plot/draw_day_CR_PNGUS.py +0 -206
- shancx/Plot/draw_day_CR_SVG.py +0 -275
- shancx/Plot/draw_day_pre_PNGUS.py +0 -205
- shancx/Plot/glob_nation_map.py +0 -116
- shancx/Plot/radar_nmc.py +0 -61
- shancx/Plot/radar_nmc_china_map_compare1.py +0 -50
- shancx/Plot/radar_nmc_china_map_f.py +0 -121
- shancx/Plot/radar_nmc_us_map_f.py +0 -128
- shancx/Plot/subplots_compare_devlop.py +0 -36
- shancx/Plot/subplots_single_china_map.py +0 -45
- shancx/S/__init__.py +0 -138
- shancx/W/__init__.py +0 -132
- shancx/WN/__init__.py +0 -132
- shancx/code.py +0 -331
- shancx/draw_day_CR_PNG.py +0 -200
- shancx/draw_day_CR_PNGUS.py +0 -206
- shancx/draw_day_CR_SVG.py +0 -275
- shancx/draw_day_pre_PNGUS.py +0 -205
- shancx/makenetCDFN.py +0 -42
- shancx/mkIMGSCX.py +0 -92
- shancx/netCDF.py +0 -130
- shancx/radar_nmc_china_map_compare1.py +0 -50
- shancx/radar_nmc_china_map_f.py +0 -125
- shancx/radar_nmc_us_map_f.py +0 -67
- shancx/subplots_compare_devlop.py +0 -36
- shancx/tool.py +0 -18
- shancx/user/H8mess.py +0 -317
- shancx/user/__init__.py +0 -137
- shancx/user/cinradHJN.py +0 -496
- shancx/user/examMeso.py +0 -293
- shancx/user/hjnDAAS.py +0 -26
- shancx/user/hjnFTP.py +0 -81
- shancx/user/hjnGIS.py +0 -320
- shancx/user/hjnGPU.py +0 -21
- shancx/user/hjnIDW.py +0 -68
- shancx/user/hjnKDTree.py +0 -75
- shancx/user/hjnLAPSTransform.py +0 -47
- shancx/user/hjnMiscellaneous.py +0 -182
- shancx/user/hjnProj.py +0 -162
- shancx/user/inotify.py +0 -41
- shancx/user/matplotlibMess.py +0 -87
- shancx/user/mkNCHJN.py +0 -623
- shancx/user/newTypeRadar.py +0 -492
- shancx/user/test.py +0 -6
- shancx/user/tlogP.py +0 -129
- shancx/util_log.py +0 -33
- shancx/wtx/H8mess.py +0 -315
- shancx/wtx/__init__.py +0 -151
- shancx/wtx/cinradHJN.py +0 -496
- shancx/wtx/colormap.py +0 -64
- shancx/wtx/examMeso.py +0 -298
- shancx/wtx/hjnDAAS.py +0 -26
- shancx/wtx/hjnFTP.py +0 -81
- shancx/wtx/hjnGIS.py +0 -330
- shancx/wtx/hjnGPU.py +0 -21
- shancx/wtx/hjnIDW.py +0 -68
- shancx/wtx/hjnKDTree.py +0 -75
- shancx/wtx/hjnLAPSTransform.py +0 -47
- shancx/wtx/hjnLog.py +0 -78
- shancx/wtx/hjnMiscellaneous.py +0 -201
- shancx/wtx/hjnProj.py +0 -161
- shancx/wtx/inotify.py +0 -41
- shancx/wtx/matplotlibMess.py +0 -87
- shancx/wtx/mkNCHJN.py +0 -613
- shancx/wtx/newTypeRadar.py +0 -492
- shancx/wtx/test.py +0 -6
- shancx/wtx/tlogP.py +0 -129
- shancx-1.8.92.dist-info/RECORD +0 -99
- /shancx/{Dsalgor → Algo}/dsalgor.py +0 -0
- {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/top_level.txt +0 -0
shancx/RdPzl/__init__.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
def start_points(size, split_size, overlap=0.0):
|
|
4
|
+
stride = int(split_size * (1 - overlap)) # 计算步长
|
|
5
|
+
points = [i * stride for i in range((size - split_size) // stride + 1)]
|
|
6
|
+
if size > points[-1] + split_size:
|
|
7
|
+
points.append(size - split_size)
|
|
8
|
+
return points
|
|
9
|
+
|
|
10
|
+
"""
|
|
11
|
+
b = np.zeros(sat_data[0].shape)
|
|
12
|
+
x_point = start_points(sat_data[0].shape[0], 256, 0.14)
|
|
13
|
+
y_point = start_points(sat_data[0].shape[1], 256, 0.14)
|
|
14
|
+
overlap1 = 17
|
|
15
|
+
for x in x_point:
|
|
16
|
+
for y in y_point:
|
|
17
|
+
cliped = sat_data[:, x:x + 256, y:y + 256]
|
|
18
|
+
img1 = cliped[np.newaxis].float()
|
|
19
|
+
img1 = img1.cpu().numpy()
|
|
20
|
+
img1 = np.where(np.isnan(img1), 0, img1)
|
|
21
|
+
img2 = img1.reshape(1, 6, 256, 256).astype(np.float32) # Ensure correct shape and type
|
|
22
|
+
radarpre = run_onnx_inference(ort_session, img2)
|
|
23
|
+
radarpre = (radarpre * 72).squeeze()
|
|
24
|
+
radarpre[radarpre < 13] = 0
|
|
25
|
+
radarpre = QC_ref(radarpre[None], areaTH=30)[0]
|
|
26
|
+
|
|
27
|
+
b[x + overlap1:x + 256, y + overlap1:y + 256] = radarpre[overlap1:, overlap1:]
|
|
28
|
+
|
|
29
|
+
return b
|
|
30
|
+
|
|
31
|
+
"""
|
|
32
|
+
|
shancx/Read.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import pygrib
|
|
2
|
+
import numpy as np
|
|
3
|
+
import pandas as pd
|
|
4
|
+
def readGrib(file_path, target_param=None):
|
|
5
|
+
try:
|
|
6
|
+
with pygrib.open(file_path) as grbs:
|
|
7
|
+
field_info = []
|
|
8
|
+
for grb in grbs:
|
|
9
|
+
field_info.append({
|
|
10
|
+
'messageNumber': grb.messagenumber,
|
|
11
|
+
'parameterName': getattr(grb, 'parameterName', 'N/A'),
|
|
12
|
+
'shortName': getattr(grb, 'shortName', 'N/A'),
|
|
13
|
+
'level': getattr(grb, 'level', -999),
|
|
14
|
+
'typeOfLevel': getattr(grb, 'typeOfLevel', 'N/A'),
|
|
15
|
+
'validDate': getattr(grb, 'validDate', 'N/A'),
|
|
16
|
+
'units': getattr(grb, 'units', 'N/A'),
|
|
17
|
+
'shape': grb.values.shape
|
|
18
|
+
})
|
|
19
|
+
if target_param:
|
|
20
|
+
try:
|
|
21
|
+
grb = grbs.select(shortName=target_param)[0]
|
|
22
|
+
except:
|
|
23
|
+
try:
|
|
24
|
+
grb = grbs.select(parameterName=target_param)[0]
|
|
25
|
+
except:
|
|
26
|
+
raise ValueError(f"未找到参数: {target_param}")
|
|
27
|
+
else:
|
|
28
|
+
grb = grbs[1]
|
|
29
|
+
data = grb.values
|
|
30
|
+
lats, lons = grb.latlons()
|
|
31
|
+
return {
|
|
32
|
+
'data': data,
|
|
33
|
+
'lats': lats,
|
|
34
|
+
'lons': lons,
|
|
35
|
+
'metadata': {
|
|
36
|
+
'parameterName': grb.parameterName,
|
|
37
|
+
'level': grb.level,
|
|
38
|
+
'validDate': grb.validDate,
|
|
39
|
+
'units': grb.units
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
except Exception as e:
|
|
43
|
+
print(f"GRIB读取错误: {str(e)}")
|
|
44
|
+
return None
|
|
45
|
+
if __name__ == "__main__":
|
|
46
|
+
path = "/mnt/wtx_weather_forecast/CMA_DATA/NAFP/EC/C1D/2024/2024112720/ECMFC1D_PRTY_1_2024112712_GLB_1_2.grib2"
|
|
47
|
+
result = readGrib(path)
|
|
48
|
+
if result:
|
|
49
|
+
print("\n数据矩阵形状:", result['data'].shape)
|
|
50
|
+
print("经度范围:", np.min(result['lons']), "~", np.max(result['lons']))
|
|
51
|
+
print("纬度范围:", np.min(result['lats']), "~", np.max(result['lats']))
|
|
52
|
+
print("参数单位:", result['metadata']['units'])
|
|
53
|
+
"""
|
|
54
|
+
latArr = latMat[:,0]
|
|
55
|
+
lonArr = lonMat[0]
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
import numpy as np
|
|
59
|
+
from pathlib import Path
|
|
60
|
+
def npsavez(output, data):
|
|
61
|
+
output = output.replace('.npy', '.npz')
|
|
62
|
+
output_path = Path(output).with_suffix('.npz')
|
|
63
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
64
|
+
np.savez_compressed(output_path, data=data)
|
|
65
|
+
print(f"{output_path} done")
|
|
66
|
+
def nploadz(output_path):
|
|
67
|
+
return np.load(Path(output_path).with_suffix('.npz'))['data']
|
|
68
|
+
|
|
69
|
+
"""
|
|
70
|
+
savez(output, data)
|
|
71
|
+
nploadz(output_path)
|
|
72
|
+
"""
|
shancx/Resize.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import random
|
|
3
|
+
# import albumentations as A
|
|
4
|
+
# import cv2
|
|
5
|
+
|
|
6
|
+
# def resize_array(array, size):
|
|
7
|
+
# # 定义变换管道
|
|
8
|
+
# transform = A.Compose([
|
|
9
|
+
# A.SmallestMaxSize(max_size=size, interpolation=cv2.INTER_AREA)
|
|
10
|
+
# ])
|
|
11
|
+
# transformed_array = transform(image=array)["image"]
|
|
12
|
+
# return transformed_array
|
|
13
|
+
|
|
14
|
+
def crop_array(array, crop_side_len):
|
|
15
|
+
cropper = A.RandomCrop(height=crop_side_len, width=crop_side_len)
|
|
16
|
+
cropped_array = cropper(image=array)["image"]
|
|
17
|
+
return cropped_array
|
|
18
|
+
|
|
19
|
+
def crop_cna_pair(min_side_len, low_res_data, high_res_data):
|
|
20
|
+
crop_side_len = min_side_len
|
|
21
|
+
top = random.randint(0, low_res_data.shape[-2] - crop_side_len)
|
|
22
|
+
left = random.randint(0, low_res_data.shape[-1] - crop_side_len)
|
|
23
|
+
super_factor = high_res_data.shape[-2] / low_res_data.shape[-2] # Assuming the ratio in height dimensio
|
|
24
|
+
cropped_low_res = low_res_data[top:top + crop_side_len, left:left + crop_side_len]
|
|
25
|
+
cropped_high_res = high_res_data[int(top * super_factor):int((top + crop_side_len) * super_factor),
|
|
26
|
+
int(left * super_factor):int((left + crop_side_len) * super_factor)]
|
|
27
|
+
return cropped_low_res, cropped_high_res
|
|
28
|
+
def random_crop_pair(min_side_len, low_res_data, high_res_data):
|
|
29
|
+
crop_side_len = min_side_len
|
|
30
|
+
top = random.randint(0, low_res_data.shape[-2] - crop_side_len)
|
|
31
|
+
left = random.randint(0, low_res_data.shape[-1] - crop_side_len)
|
|
32
|
+
super_factor = high_res_data.shape[-2] / low_res_data.shape[-2] # Assuming the ratio in height dimension
|
|
33
|
+
cropped_low_res = low_res_data[top:top + crop_side_len, left:left + crop_side_len]
|
|
34
|
+
cropped_high_res = high_res_data[int(top * super_factor):int((top + crop_side_len) * super_factor),
|
|
35
|
+
int(left * super_factor):int((left + crop_side_len) * super_factor)]
|
|
36
|
+
return cropped_low_res, cropped_high_res
|
|
37
|
+
|
|
38
|
+
import random
|
|
39
|
+
|
|
40
|
+
def random_crop_triplet(min_side_len, low_res_data, high_res_data1, high_res_data2):
|
|
41
|
+
top = random.randint(0, low_res_data.shape[-2] - min_side_len)
|
|
42
|
+
left = random.randint(0, low_res_data.shape[-1] - min_side_len)
|
|
43
|
+
cropped_low_res = low_res_data[..., top:top + min_side_len, left:left + min_side_len]
|
|
44
|
+
factor1_h = high_res_data1.shape[-2] / low_res_data.shape[-2]
|
|
45
|
+
factor1_w = high_res_data1.shape[-1] / low_res_data.shape[-1]
|
|
46
|
+
cropped_high_res1 = high_res_data1[...,
|
|
47
|
+
int(top * factor1_h):int((top + min_side_len) * factor1_h),
|
|
48
|
+
int(left * factor1_w):int((left + min_side_len) * factor1_w)]
|
|
49
|
+
factor2_h = high_res_data2.shape[-2] / low_res_data.shape[-2]
|
|
50
|
+
factor2_w = high_res_data2.shape[-1] / low_res_data.shape[-1]
|
|
51
|
+
cropped_high_res2 = high_res_data2[...,
|
|
52
|
+
int(top * factor2_h):int((top + min_side_len) * factor2_h),
|
|
53
|
+
int(left * factor2_w):int((left + min_side_len) * factor2_w)]
|
|
54
|
+
return cropped_low_res, cropped_high_res1, cropped_high_res2
|
|
55
|
+
|
|
56
|
+
def random_crop_single(cropsize, input_data):
|
|
57
|
+
# 确定裁剪的边长
|
|
58
|
+
crop_side_len = cropsize
|
|
59
|
+
|
|
60
|
+
# 随机选择左上角裁剪点
|
|
61
|
+
top = random.randint(0, input_data.shape[0] - crop_side_len)
|
|
62
|
+
left = random.randint(0, input_data.shape[1] - crop_side_len)
|
|
63
|
+
|
|
64
|
+
# 裁剪输入数据
|
|
65
|
+
cropped_data = input_data[top:top + crop_side_len, left:left + crop_side_len]
|
|
66
|
+
|
|
67
|
+
return cropped_data
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
if __name__ == "__main__":
|
|
71
|
+
low_res_data = np.load("./SAT_202507010900_49.42_117.82_100.npy")
|
|
72
|
+
high_res_data = np.load("./CR_202507010900_49.42_117.82_100.npy")
|
|
73
|
+
high_res_data1 = np.load("./mask_202507010900_49.42_117.82_100.npy")
|
|
74
|
+
d1,d2,d3 = random_crop_triplet(128, low_res_data, high_res_data[0], high_res_data1[0])
|
|
75
|
+
transformed_low_res_data = resize_array(low_res_data, 240)
|
|
76
|
+
transformed_high_res_data = resize_array(high_res_data, 960)
|
|
77
|
+
np.save("transformed_low_res_data.npy", transformed_low_res_data)
|
|
78
|
+
np.save("transformed_high_res_data.npy", transformed_high_res_data)
|
|
79
|
+
|
shancx/SN/__init__.py
CHANGED
|
@@ -11,128 +11,67 @@ __author__ = 'shancx'
|
|
|
11
11
|
__author_email__ = 'shancx@126.com'
|
|
12
12
|
|
|
13
13
|
|
|
14
|
-
|
|
15
|
-
# @Time : 2023/09/27 下午8:52
|
|
16
|
-
# @Author : shanchangxi
|
|
17
|
-
# @File : util_log.py
|
|
18
|
-
import time
|
|
19
|
-
import logging
|
|
20
|
-
from logging import handlers
|
|
21
|
-
|
|
22
|
-
def mkDir(path):
|
|
23
|
-
if "." in path:
|
|
24
|
-
os.makedirs(os.path.dirname(path),exist_ok=True)
|
|
25
|
-
else:
|
|
26
|
-
os.makedirs(path, exist_ok=True)
|
|
27
|
-
|
|
28
|
-
loggers = logging.getLogger()
|
|
29
|
-
loggers.setLevel(logging.INFO)
|
|
30
|
-
log_name = './project_S.log'
|
|
31
|
-
mkDir(log_name)
|
|
32
|
-
logfile = log_name
|
|
33
|
-
time_rotating_file_handler = handlers.TimedRotatingFileHandler(filename=logfile, when='D', encoding='utf-8')
|
|
34
|
-
time_rotating_file_handler.setLevel(logging.INFO)
|
|
35
|
-
formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
|
|
36
|
-
time_rotating_file_handler.setFormatter(formatter)
|
|
37
|
-
loggers.addHandler(time_rotating_file_handler)
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
"""
|
|
41
|
-
import tracback
|
|
42
|
-
try:
|
|
43
|
-
|
|
44
|
-
except Exception as e:
|
|
45
|
-
|
|
46
|
-
logger.info(traceback.format_exc())
|
|
47
|
-
"""
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
'''
|
|
51
|
-
##定義一個streamHandler
|
|
52
|
-
# print_handler = logging.StreamHandler()
|
|
53
|
-
# print_handler.setFormatter(formatter)
|
|
54
|
-
# loggers.addHandler(print_handler)
|
|
55
|
-
'''
|
|
56
|
-
|
|
57
|
-
"""
|
|
58
|
-
from main import makeAll,options
|
|
59
|
-
from multiprocessing import Pool
|
|
60
|
-
import datetime
|
|
61
|
-
from config import logger,output
|
|
62
|
-
import time
|
|
63
|
-
import pandas as pd
|
|
14
|
+
import json
|
|
64
15
|
import os
|
|
65
|
-
from
|
|
66
|
-
import
|
|
67
|
-
|
|
68
|
-
def
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
16
|
+
from cryptography.fernet import Fernet, InvalidToken
|
|
17
|
+
from shancx import crDir
|
|
18
|
+
class UserManager:
|
|
19
|
+
def __init__(self, storage_file=None, info=None):
|
|
20
|
+
self.storage_file = storage_file
|
|
21
|
+
self.usersdata = info
|
|
22
|
+
self.data = self._load_or_initialize_data()
|
|
23
|
+
self.cipher = Fernet(self.data["key"])
|
|
24
|
+
|
|
25
|
+
def _load_or_initialize_data(self):
|
|
26
|
+
if self.storage_file and os.path.exists(self.storage_file) and os.path.getsize(self.storage_file) > 0:
|
|
27
|
+
with open(self.storage_file, "r") as file:
|
|
28
|
+
try:
|
|
29
|
+
data = json.load(file)
|
|
30
|
+
if "key" in data and "users" in data:
|
|
31
|
+
return data
|
|
32
|
+
except json.JSONDecodeError:
|
|
33
|
+
print("错误: 数据文件损坏,请删除文件并重新运行。")
|
|
34
|
+
key = Fernet.generate_key().decode("utf-8")
|
|
35
|
+
return {"key": key, "users": {}} if self.usersdata is None else self.usersdata
|
|
36
|
+
def _save_data(self):
|
|
37
|
+
if self.storage_file:
|
|
38
|
+
crDir(self.storage_file)
|
|
39
|
+
with open(self.storage_file, "w") as file:
|
|
40
|
+
json.dump(self.data, file, indent=4)
|
|
41
|
+
|
|
42
|
+
def add_user(self, user_id, secret_value):
|
|
43
|
+
encrypted_secret = self.cipher.encrypt(secret_value.encode("utf-8")).decode("utf-8")
|
|
44
|
+
self.data["users"][user_id] = {"s": encrypted_secret}
|
|
45
|
+
self._save_data()
|
|
46
|
+
print(f"用户 {user_id} 的秘钥已成功保存。")
|
|
47
|
+
|
|
48
|
+
def get_user(self, user_id):
|
|
49
|
+
user_data = self.data["users"].get(user_id)
|
|
50
|
+
if user_data:
|
|
51
|
+
try:
|
|
52
|
+
decrypted_secret = self.cipher.decrypt(user_data["s"].encode("utf-8")).decode("utf-8")
|
|
53
|
+
return json.loads(decrypted_secret)
|
|
54
|
+
except InvalidToken:
|
|
55
|
+
print("错误: 秘钥解密失败。")
|
|
56
|
+
else:
|
|
57
|
+
print(f"未找到用户 {user_id} 的数据。")
|
|
58
|
+
return None
|
|
59
|
+
import requests
|
|
60
|
+
def sendMESplus(message,base = None):
|
|
61
|
+
webHookUrl = f'{base[1]}{base[0]}'
|
|
62
|
+
response=None
|
|
63
|
+
try:
|
|
64
|
+
url=webHookUrl
|
|
65
|
+
headers = {"Content-Type":"application/json"}
|
|
66
|
+
data = {'msgtype':'text','text':{"content":message}}
|
|
67
|
+
res = requests.post(url,json=data,headers=headers)
|
|
68
|
+
except Exception as e:
|
|
69
|
+
print(e)
|
|
108
70
|
"""
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
import logging
|
|
117
|
-
from logging import handlers
|
|
118
|
-
|
|
119
|
-
logger = logging.getLogger()
|
|
120
|
-
logger.setLevel(logging.INFO)
|
|
121
|
-
log_name = 'project_tim_tor.log'
|
|
122
|
-
logfile = log_name
|
|
123
|
-
time_rotating_file_handler = handlers.TimedRotatingFileHandler(filename=logfile, when='D', encoding='utf-8')
|
|
124
|
-
time_rotating_file_handler.setLevel(logging.INFO)
|
|
125
|
-
formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
|
|
126
|
-
time_rotating_file_handler.setFormatter(formatter)
|
|
127
|
-
logger.addHandler(time_rotating_file_handler)
|
|
128
|
-
print_handler = logging.StreamHandler()
|
|
129
|
-
print_handler.setFormatter(formatter)
|
|
130
|
-
logger.addHandler(print_handler)
|
|
131
|
-
|
|
132
|
-
'''
|
|
133
|
-
|
|
134
|
-
'''
|
|
135
|
-
###解决方法 pip install torch==2.4.0 torchvision torchaudio三个同时安装 python 3.12 解决cuda启动不了的问题
|
|
136
|
-
|
|
137
|
-
Res网络
|
|
138
|
-
'''
|
|
71
|
+
import torch.nn as nn
|
|
72
|
+
# gpu_ids = [3, 4, 5]
|
|
73
|
+
device = torch.device(f"cuda:{gpu_ids[0]}" if torch.cuda.is_available() else "cpu")
|
|
74
|
+
model = get_model(model_name, in_channels=in_channels).to(device)
|
|
75
|
+
if len(gpu_ids) > 1:
|
|
76
|
+
model = torch.nn.DataParallel(model, device_ids=gpu_ids)
|
|
77
|
+
"""
|
shancx/Time/GetTime.py
CHANGED
|
@@ -27,6 +27,12 @@ def Datetime2str(datetime_):
|
|
|
27
27
|
return formatted_time
|
|
28
28
|
|
|
29
29
|
from dateutil.relativedelta import relativedelta
|
|
30
|
-
def
|
|
31
|
-
|
|
32
|
-
|
|
30
|
+
def relativeDelta(UTC,H = 8,M=0):
|
|
31
|
+
mktime = UTC + relativedelta(hours=H,minutes=M)
|
|
32
|
+
return mktime
|
|
33
|
+
|
|
34
|
+
def get30m(UTC):
|
|
35
|
+
for m in range(6):
|
|
36
|
+
T = 30
|
|
37
|
+
UTCStr = (UTC+relativedelta(minutes=(-m)*T-UTC.minute%T)).strftime("%Y%m%d%H%M")
|
|
38
|
+
return UTCStr
|
shancx/Time/__init__.py
CHANGED
|
@@ -22,7 +22,7 @@ def UTCStr():
|
|
|
22
22
|
now_utcstr = now_utc.strftime('%Y%m%d%H%M%S')
|
|
23
23
|
return now_utcstr
|
|
24
24
|
|
|
25
|
-
def
|
|
25
|
+
def CSTStr():
|
|
26
26
|
now_cst = datetime.datetime.now()
|
|
27
27
|
now_cststr = now_cst.strftime('%Y%m%d%H%M%S')
|
|
28
28
|
return now_cststr
|
|
@@ -45,5 +45,70 @@ def Relativedelta(T_,Th = 0,Tm=0):
|
|
|
45
45
|
mktime = T_+relativedelta(hours=Th,minutes=Tm)
|
|
46
46
|
return mktime
|
|
47
47
|
|
|
48
|
+
def nearest_hour():
|
|
49
|
+
now = datetime.datetime.now()
|
|
50
|
+
# 计算当前小时整点时间
|
|
51
|
+
minute = now.minute
|
|
52
|
+
second = now.second
|
|
53
|
+
# 如果分钟数大于等于30分钟,则向上取整
|
|
54
|
+
if minute >= 57:
|
|
55
|
+
next_hour = now + datetime.timedelta(hours=1)
|
|
56
|
+
nearest_hour = next_hour.replace(minute=0, second=0, microsecond=0)
|
|
57
|
+
elif minute <= 3:
|
|
58
|
+
nearest_hour = now.replace(minute=0, second=0, microsecond=0)
|
|
59
|
+
else:
|
|
60
|
+
nearest_hour = now
|
|
61
|
+
return nearest_hour.strftime("%Y%m%d%H%M")
|
|
62
|
+
|
|
63
|
+
import pandas as pd
|
|
64
|
+
def gtr(sUTC,eUTC, freq='6min'):
|
|
65
|
+
minute = sUTC.minute
|
|
66
|
+
if minute in [15, 45]:
|
|
67
|
+
start_time = sUTC + relativedelta(minutes=3) # 15 或 45 分钟时,起始点加 3 分钟
|
|
68
|
+
elif minute in [0, 30]:
|
|
69
|
+
start_time = sUTC + relativedelta(minutes=6) # 0 或 30 分钟时,起始点加 6 分钟
|
|
70
|
+
else:
|
|
71
|
+
raise ValueError("sUTC 的分钟数必须是 0、15、30 或 45 分钟")
|
|
72
|
+
new_times = pd.date_range(start_time, eUTC, freq=freq)
|
|
73
|
+
return new_times
|
|
74
|
+
|
|
75
|
+
def gtr10min(sUTC,eUTC, freq='6min'):
|
|
76
|
+
minute = sUTC.minute
|
|
77
|
+
if minute in [10,40]:
|
|
78
|
+
start_time = sUTC + relativedelta(minutes=2) # 15 或 45 分钟时,起始点加 2 分钟
|
|
79
|
+
elif minute in [20, 50]:
|
|
80
|
+
start_time = sUTC + relativedelta(minutes=4) # 0 或 30 分钟时,起始点加 4 分钟
|
|
81
|
+
elif minute in [0, 30]:
|
|
82
|
+
start_time = sUTC + relativedelta(minutes=6) # 0 或 30 分钟时,起始点加 4 分钟
|
|
83
|
+
else:
|
|
84
|
+
raise ValueError("sUTC 的分钟数必须是 0、10、20、30 或40、50 分钟")
|
|
85
|
+
new_times = pd.date_range(start_time, eUTC, freq=freq)
|
|
86
|
+
return new_times
|
|
87
|
+
|
|
88
|
+
import datetime as dt
|
|
89
|
+
import pandas as pd
|
|
90
|
+
def ldom(d): # last day of month
|
|
91
|
+
if d.month == 12:
|
|
92
|
+
return d.replace(year=d.year+1, month=1, day=1) - dt.timedelta(days=1)
|
|
93
|
+
return d.replace(month=d.month+1, day=1) - dt.timedelta(days=1)
|
|
94
|
+
def gen_dt(s, e, t="trn"): # generate dates
|
|
95
|
+
dr = pd.date_range(start=s, end=e, freq='1h')
|
|
96
|
+
res = []
|
|
97
|
+
for d in dr:
|
|
98
|
+
me = ldom(d)
|
|
99
|
+
is_me = d.day >= (me.day - 1)
|
|
100
|
+
if (t == "trn" and not is_me) or (t == "val" and is_me):
|
|
101
|
+
res.append(d.strftime('%Y%m%d%H%M'))
|
|
102
|
+
return res
|
|
103
|
+
"""
|
|
104
|
+
if __name__ == "__main__":
|
|
105
|
+
s = dt.datetime(2023, 1, 28)
|
|
106
|
+
e = dt.datetime(2023, 2, 3)
|
|
107
|
+
print("Train dt (excl month end):")
|
|
108
|
+
print(gen_dt(s, e, "trn"))
|
|
109
|
+
print("\nValid dt (only month end):")
|
|
110
|
+
print(gen_dt(s, e, "val"))
|
|
111
|
+
"""
|
|
112
|
+
|
|
48
113
|
|
|
49
114
|
|