shancx 1.9.33.109__py3-none-any.whl → 1.9.33.218__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- shancx/{Dsalgor → Algo}/__init__.py +37 -1
- shancx/Calmetrics/__init__.py +78 -9
- shancx/Calmetrics/calmetrics.py +14 -0
- shancx/Calmetrics/rmseR2score.py +14 -3
- shancx/{Command.py → Cmd.py} +20 -15
- shancx/Config_.py +26 -0
- shancx/Df/__init__.py +11 -0
- shancx/Df/tool.py +0 -1
- shancx/NN/__init__.py +200 -11
- shancx/{path.py → Path1.py} +2 -3
- shancx/Plot/__init__.py +129 -403
- shancx/Plot/draw_day_CR_PNG.py +4 -21
- shancx/Plot/exam.py +116 -0
- shancx/Plot/plotGlobal.py +325 -0
- shancx/Plot/radarNmc.py +1 -48
- shancx/Plot/single_china_map.py +1 -1
- shancx/Point.py +46 -0
- shancx/QC.py +223 -0
- shancx/Read.py +17 -10
- shancx/Resize.py +79 -0
- shancx/SN/__init__.py +8 -1
- shancx/Time/timeCycle.py +97 -23
- shancx/Train/makelist.py +161 -155
- shancx/__init__.py +79 -232
- shancx/bak.py +78 -53
- shancx/geosProj.py +2 -2
- shancx/wait.py +35 -1
- {shancx-1.9.33.109.dist-info → shancx-1.9.33.218.dist-info}/METADATA +12 -4
- shancx-1.9.33.218.dist-info/RECORD +91 -0
- {shancx-1.9.33.109.dist-info → shancx-1.9.33.218.dist-info}/WHEEL +1 -1
- shancx/Plot/Mip.py +0 -42
- shancx/Plot/border.py +0 -44
- shancx/Plot/draw_day_CR_PNGUS.py +0 -206
- shancx/Plot/draw_day_CR_SVG.py +0 -275
- shancx/Plot/draw_day_pre_PNGUS.py +0 -205
- shancx/Plot/radar_nmc_china_map_compare1.py +0 -50
- shancx/makenetCDFN.py +0 -42
- shancx-1.9.33.109.dist-info/RECORD +0 -91
- /shancx/{3DJU → 3D}/__init__.py +0 -0
- /shancx/{Dsalgor → Algo}/Class.py +0 -0
- /shancx/{Dsalgor → Algo}/CudaPrefetcher1.py +0 -0
- /shancx/{Dsalgor → Algo}/Fake_image.py +0 -0
- /shancx/{Dsalgor → Algo}/Hsml.py +0 -0
- /shancx/{Dsalgor → Algo}/L2Loss.py +0 -0
- /shancx/{Dsalgor → Algo}/MetricTracker.py +0 -0
- /shancx/{Dsalgor → Algo}/Normalize.py +0 -0
- /shancx/{Dsalgor → Algo}/OptimizerWScheduler.py +0 -0
- /shancx/{Dsalgor → Algo}/Rmageresize.py +0 -0
- /shancx/{Dsalgor → Algo}/Savemodel.py +0 -0
- /shancx/{Dsalgor → Algo}/SmoothL1_losses.py +0 -0
- /shancx/{Dsalgor → Algo}/Tqdm.py +0 -0
- /shancx/{Dsalgor → Algo}/checknan.py +0 -0
- /shancx/{Dsalgor → Algo}/dsalgor.py +0 -0
- /shancx/{Dsalgor → Algo}/iouJU.py +0 -0
- /shancx/{Dsalgor → Algo}/mask.py +0 -0
- /shancx/{Dsalgor → Algo}/psnr.py +0 -0
- /shancx/{Dsalgor → Algo}/ssim.py +0 -0
- /shancx/{Dsalgor → Algo}/structural_similarity.py +0 -0
- /shancx/{Dsalgor → Algo}/tool.py +0 -0
- /shancx/Calmetrics/{matrixLib.py → calmetricsmatrixLib.py} +0 -0
- /shancx/{Diffmodel → Diffm}/Psamples.py +0 -0
- /shancx/{Diffmodel → Diffm}/__init__.py +0 -0
- /shancx/{Diffmodel → Diffm}/test.py +0 -0
- /shancx/{Board → tensBoard}/__init__.py +0 -0
- {shancx-1.9.33.109.dist-info → shancx-1.9.33.218.dist-info}/top_level.txt +0 -0
|
@@ -82,4 +82,40 @@ if __name__ == "__main__":
|
|
|
82
82
|
Far=far_value
|
|
83
83
|
)
|
|
84
84
|
manager.log_progress(epoch, idx)
|
|
85
|
-
manager.close()
|
|
85
|
+
manager.close()
|
|
86
|
+
|
|
87
|
+
import psutil
|
|
88
|
+
import os
|
|
89
|
+
def get_memory():
|
|
90
|
+
process = psutil.Process(os.getpid())
|
|
91
|
+
return process.memory_info().rss / 1024 / 1024
|
|
92
|
+
|
|
93
|
+
'''
|
|
94
|
+
initial_memory = get_memory()
|
|
95
|
+
logger.info(f"内存使用前: {initial_memory:.2f} MB {sUTC}")
|
|
96
|
+
load_memory = get_memory()
|
|
97
|
+
logger.info(f"创建对象后: {load_memory:.2f} MB {sUTC}")
|
|
98
|
+
final_memory = get_memory()
|
|
99
|
+
logger.info(f"删除对象后: {final_memory:.2f} MB {sUTC}")
|
|
100
|
+
logger.info(f"内存变化: 初始{initial_memory:.2f} -> 峰值{load_memory:.2f} -> 最终{final_memory:.2f}")
|
|
101
|
+
'''
|
|
102
|
+
|
|
103
|
+
import psutil
|
|
104
|
+
import sys
|
|
105
|
+
def check_memory_threshold(threshold=90):
|
|
106
|
+
mem = psutil.virtual_memory()
|
|
107
|
+
return mem.percent >= threshold
|
|
108
|
+
try:
|
|
109
|
+
if check_memory_threshold(90):
|
|
110
|
+
raise RuntimeError("The system memory usage is too high. Terminating the task. error")
|
|
111
|
+
except RuntimeError as e:
|
|
112
|
+
print(f"Memory warning error: {e}")
|
|
113
|
+
except MemoryError:
|
|
114
|
+
print("Insufficient memory, task failed. error")
|
|
115
|
+
# except Exception as e:
|
|
116
|
+
# print(f"未知错误:{e}")
|
|
117
|
+
'''
|
|
118
|
+
if check_memory_threshold(90):
|
|
119
|
+
raise RuntimeError("The system memory usage is too high. Terminating the task. error")
|
|
120
|
+
|
|
121
|
+
'''
|
shancx/Calmetrics/__init__.py
CHANGED
|
@@ -1,18 +1,13 @@
|
|
|
1
1
|
#!/usr/bin/python
|
|
2
2
|
# -*- coding: utf-8 -*-
|
|
3
|
-
import os
|
|
4
|
-
def start():
|
|
5
|
-
print("import successful")
|
|
3
|
+
import os
|
|
6
4
|
# constants
|
|
7
5
|
__author__ = 'shancx'
|
|
8
6
|
|
|
9
|
-
__author_email__ = '
|
|
10
|
-
|
|
7
|
+
__author_email__ = 'shanhe12@163.com'
|
|
11
8
|
# @Time : 2025/08/19 下午11:31
|
|
12
9
|
# @Author : shanchangxi
|
|
13
|
-
# @File : Calmetrics.py
|
|
14
|
-
|
|
15
|
-
|
|
10
|
+
# @File : Calmetrics.py
|
|
16
11
|
from sklearn.metrics import mean_squared_error, r2_score
|
|
17
12
|
from scipy.stats import pearsonr
|
|
18
13
|
import numpy as np
|
|
@@ -25,4 +20,78 @@ def calculate_metrics(y_true, y_pred):
|
|
|
25
20
|
y_pred = y_pred[mask]
|
|
26
21
|
mape = np.mean(np.abs((y_true - y_pred) / y_true)) * 100
|
|
27
22
|
r2 = r2_score(y_true, y_pred)
|
|
28
|
-
|
|
23
|
+
rmse = f"{rmse:0.4f}"
|
|
24
|
+
correlation = f"{correlation:0.4f}"
|
|
25
|
+
mape = f"{mape:0.4f}"
|
|
26
|
+
r2 = f"{r2:0.4f}"
|
|
27
|
+
return rmse,correlation,mape,r2
|
|
28
|
+
import numpy as np
|
|
29
|
+
class Metrics:
|
|
30
|
+
def __init__(self, true_values, forecast_values):
|
|
31
|
+
"""
|
|
32
|
+
初始化Metrics类,传入真值和预测值
|
|
33
|
+
:param true_values: 实际观测值,二值化数据
|
|
34
|
+
:param forecast_values: 预测结果,二值化数据
|
|
35
|
+
"""
|
|
36
|
+
self.true_values = true_values
|
|
37
|
+
self.forecast_values = forecast_values
|
|
38
|
+
|
|
39
|
+
def cal_confusion_matrix(self):
|
|
40
|
+
"""
|
|
41
|
+
计算混淆矩阵的四个要素: TP, TN, FP, FN
|
|
42
|
+
:return: 返回TP, TN, FP, FN
|
|
43
|
+
"""
|
|
44
|
+
TP = np.sum((self.true_values == 1) & (self.forecast_values == 1)) # True Positive
|
|
45
|
+
TN = np.sum((self.true_values == 0) & (self.forecast_values == 0)) # True Negative
|
|
46
|
+
FP = np.sum((self.true_values == 0) & (self.forecast_values == 1)) # False Positive
|
|
47
|
+
FN = np.sum((self.true_values == 1) & (self.forecast_values == 0)) # False Negative
|
|
48
|
+
|
|
49
|
+
return TP, TN, FP, FN
|
|
50
|
+
|
|
51
|
+
def cal_ts(self):
|
|
52
|
+
"""
|
|
53
|
+
计算TS评分
|
|
54
|
+
:return: TS评分
|
|
55
|
+
"""
|
|
56
|
+
TP, TN, FP, FN = self.cal_confusion_matrix()
|
|
57
|
+
ts_score = TP / (TP + FP + FN) if (TP + FP + FN) > 0 else np.nan
|
|
58
|
+
return ts_score
|
|
59
|
+
|
|
60
|
+
def cal_acc(self):
|
|
61
|
+
"""
|
|
62
|
+
计算准确率
|
|
63
|
+
:return: 准确率
|
|
64
|
+
"""
|
|
65
|
+
TP, TN, FP, FN = self.cal_confusion_matrix()
|
|
66
|
+
accuracy = (TP + TN) / (TP + TN + FP + FN) if (TP + TN + FP + FN) > 0 else np.nan
|
|
67
|
+
return accuracy
|
|
68
|
+
|
|
69
|
+
def cal_pod(self):
|
|
70
|
+
"""
|
|
71
|
+
计算命中率(Probability of Detection, POD)
|
|
72
|
+
:return: 命中率(POD)
|
|
73
|
+
"""
|
|
74
|
+
TP, TN, FP, FN = self.cal_confusion_matrix()
|
|
75
|
+
pod = TP / (TP + FN) if (TP + FN) > 0 else np.nan
|
|
76
|
+
return pod
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def cal_fnr(self):
|
|
80
|
+
"""
|
|
81
|
+
计算漏报率(False Negative Rate, FNR)
|
|
82
|
+
:return: 漏报率(FNR)
|
|
83
|
+
"""
|
|
84
|
+
TP, TN, FP, FN = self.cal_confusion_matrix()
|
|
85
|
+
fnr = FN / (TP + FN) if (TP + FN) > 0 else np.nan
|
|
86
|
+
return fnr
|
|
87
|
+
|
|
88
|
+
def cal_far(self):
|
|
89
|
+
"""
|
|
90
|
+
计算空报率(False Alarm Rate, FAR)
|
|
91
|
+
:return: 空报率(FAR)
|
|
92
|
+
"""
|
|
93
|
+
TP, TN, FP, FN = self.cal_confusion_matrix()
|
|
94
|
+
far = FP / (TP + FP) if (TP + FP) > 0 else np.nan
|
|
95
|
+
return far
|
|
96
|
+
|
|
97
|
+
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
from . import Metrics
|
|
3
|
+
def cal_metrics(obs_v,pre_v):
|
|
4
|
+
metrics = Metrics(obs_v.ravel(), pre_v.ravel())
|
|
5
|
+
ts_score = np.around(metrics.cal_ts(),4)
|
|
6
|
+
accuracy = np.around(metrics.cal_acc(),4)
|
|
7
|
+
pod = np.around(metrics.cal_pod(),4)
|
|
8
|
+
fnr = np.around(metrics.cal_fnr(),4)
|
|
9
|
+
far = np.around(metrics.cal_far(),4)
|
|
10
|
+
return ts_score,accuracy,pod,fnr,far
|
|
11
|
+
|
|
12
|
+
"""
|
|
13
|
+
ts_score, accuracy, pod, fnr, far = cal_metrics(obs_v,pre_v) 两个是维度一致的二维数组
|
|
14
|
+
"""
|
shancx/Calmetrics/rmseR2score.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
|
|
2
1
|
from sklearn.metrics import mean_squared_error, r2_score
|
|
3
2
|
from scipy.stats import pearsonr
|
|
4
3
|
import numpy as np
|
|
@@ -10,8 +9,15 @@ def calculate_metrics(y_true, y_pred):
|
|
|
10
9
|
y_true = y_true[mask]
|
|
11
10
|
y_pred = y_pred[mask]
|
|
12
11
|
mape = np.mean(np.abs((y_true - y_pred) / y_true)) * 100
|
|
13
|
-
r2 = r2_score(y_true, y_pred)
|
|
14
|
-
|
|
12
|
+
r2 = r2_score(y_true, y_pred)
|
|
13
|
+
rmse = f"{rmse:0.4f}"
|
|
14
|
+
correlation = f"{correlation:0.4f}"
|
|
15
|
+
mape = f"{mape:0.4f}"
|
|
16
|
+
r2 = f"{r2:0.4f}"
|
|
17
|
+
return rmse,correlation,mape,r2
|
|
18
|
+
"""
|
|
19
|
+
calculate_metrics(data.flatten(),datanpy.flatten())
|
|
20
|
+
"""
|
|
15
21
|
"""
|
|
16
22
|
dt_res[f'{flag}'] = dt_res[f'{flag}'].astype(float)
|
|
17
23
|
dt_res[f'{flag}_p'] = dt_res[f'{flag}_p'].astype(float)
|
|
@@ -20,5 +26,10 @@ dt_ = dt_res[(dt_res[f'{flag}'] < 900000.0) & (dt_res[f'{flag}_p'] < 900000.0)]
|
|
|
20
26
|
dt_.replace([np.inf, -np.inf], np.nan, inplace=True)
|
|
21
27
|
dt_.dropna(inplace=True)
|
|
22
28
|
correlation, rmse, mape, r2 = calculate_metrics(dt_[f"{flag}"], dt_[f"{flag}_p"])
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
y_true = arry1.flatten()
|
|
32
|
+
y_pred = arry2.flatten()
|
|
33
|
+
correlation, rmse, mape, r2 = calculate_metrics(y_true, y_pred)
|
|
23
34
|
"""
|
|
24
35
|
|
shancx/{Command.py → Cmd.py}
RENAMED
|
@@ -2,29 +2,34 @@
|
|
|
2
2
|
|
|
3
3
|
import subprocess
|
|
4
4
|
import logging
|
|
5
|
-
|
|
6
|
-
|
|
5
|
+
from shancx.NN import setlogger
|
|
6
|
+
logger = setlogger(level=logging.INFO)
|
|
7
|
+
def runcommand(cmd, timeout=300):
|
|
7
8
|
try:
|
|
8
9
|
result = subprocess.run(
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
10
|
+
cmd
|
|
11
|
+
,shell=True
|
|
12
|
+
,timeout=timeout
|
|
13
|
+
,check=True
|
|
14
|
+
,capture_output=True
|
|
15
|
+
,text=True
|
|
16
|
+
)
|
|
17
|
+
for output, label in [(result.stdout, "output"), (result.stderr, "error output")]:
|
|
18
|
+
if output:
|
|
19
|
+
logger.info(f"Command {label}:\n{output}")
|
|
20
|
+
logger.info("Command succeeded!")
|
|
21
|
+
return True
|
|
18
22
|
except subprocess.TimeoutExpired:
|
|
19
|
-
|
|
23
|
+
logger.error(f"Timeout after {timeout} seconds!")
|
|
20
24
|
return False
|
|
21
25
|
except subprocess.CalledProcessError as e:
|
|
22
|
-
error_msg = e.stderr.
|
|
23
|
-
|
|
26
|
+
error_msg = e.stderr.strip() if e.stderr else "Unknown error"
|
|
27
|
+
logger.error(f"Command failed! Code: {e.returncode}, Error: {error_msg}")
|
|
24
28
|
return False
|
|
25
29
|
except Exception as e:
|
|
26
|
-
|
|
30
|
+
logger.error(f"Unexpected error: {str(e)}")
|
|
27
31
|
return False
|
|
32
|
+
|
|
28
33
|
"""
|
|
29
34
|
allDf.dtypes
|
|
30
35
|
|
shancx/Config_.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import pygrib
|
|
2
|
+
import numpy as np
|
|
3
|
+
import pandas as pd
|
|
4
|
+
import yaml
|
|
5
|
+
def _load_config(config_path: str) :
|
|
6
|
+
"""加载YAML配置文件"""
|
|
7
|
+
print(f"load config file Get configuration parameters: {config_path}")
|
|
8
|
+
with open(config_path, 'r') as f:
|
|
9
|
+
config = yaml.safe_load(f)
|
|
10
|
+
return config
|
|
11
|
+
|
|
12
|
+
import configparser
|
|
13
|
+
import traceback
|
|
14
|
+
def parse_config(path,section,option):
|
|
15
|
+
cp = configparser.ConfigParser()
|
|
16
|
+
try:
|
|
17
|
+
cp.read(path)
|
|
18
|
+
res = cp.get(section,option)
|
|
19
|
+
except Exception as e:
|
|
20
|
+
print(traceback.format_exc())
|
|
21
|
+
exit()
|
|
22
|
+
return res
|
|
23
|
+
"""
|
|
24
|
+
Path = "./application.conf"
|
|
25
|
+
radar_path = parse_config(Path, "JY", "radar_path") #JY是选择部分,radar_path配置路径
|
|
26
|
+
"""
|
shancx/Df/__init__.py
CHANGED
|
@@ -19,5 +19,16 @@ def Dtype(df_):
|
|
|
19
19
|
print(dftypes)
|
|
20
20
|
return dftypes
|
|
21
21
|
|
|
22
|
+
# 如果env_rang的定义是 (北界, 南界, 西界, 东界)
|
|
23
|
+
def getrange(df,env_rang=None):
|
|
24
|
+
north, south, west, east = env_rang
|
|
25
|
+
filtered_data = df[
|
|
26
|
+
(df["Lat"] < north) &
|
|
27
|
+
(df["Lat"] > south) &
|
|
28
|
+
(df["Lon"] > west) &
|
|
29
|
+
(df["Lon"] < east)
|
|
30
|
+
]
|
|
31
|
+
return filtered_data
|
|
32
|
+
|
|
22
33
|
#pd.concat(filter(None, results))
|
|
23
34
|
#valid_results = [df for df in results if isinstance(df, pd.DataFrame) and not df.empty]
|
shancx/Df/tool.py
CHANGED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
|
shancx/NN/__init__.py
CHANGED
|
@@ -4,22 +4,19 @@ import os
|
|
|
4
4
|
def start():
|
|
5
5
|
print("import successful")
|
|
6
6
|
# constants
|
|
7
|
-
__author__ = 'shancx'
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
# @Time : 2023/09/27 下午8:52
|
|
12
|
-
# @Author : shanchangxi
|
|
13
|
-
# @File : util_log.py
|
|
14
|
-
|
|
15
|
-
|
|
7
|
+
__author__ = 'shancx'
|
|
8
|
+
__author_email__ = 'shanhe12@163.com'
|
|
9
|
+
__time__ = '20251028 21:16'
|
|
16
10
|
import logging
|
|
17
11
|
from logging.handlers import RotatingFileHandler
|
|
18
12
|
import os
|
|
19
13
|
from shancx import crDir
|
|
20
|
-
def _loggers(logger_name="loggers", root="./logs", phase="project", level=logging.INFO, screen=True, max_bytes=10*1024*1024, backup_count=5, overwrite=False):
|
|
14
|
+
def _loggers(logger_name="loggers", root="./logs", phase="project", level=logging.INFO, screen=True, max_bytes=10*1024*1024, backup_count=5, overwrite=False,handlersflag=False):
|
|
21
15
|
'''set up logger with rotating file handler'''
|
|
22
16
|
l = logging.getLogger(logger_name)
|
|
17
|
+
if handlersflag:
|
|
18
|
+
if l.handlers:
|
|
19
|
+
return l
|
|
23
20
|
formatter = logging.Formatter(
|
|
24
21
|
"%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s", datefmt='%Y-%m-%d %H:%M:%S')
|
|
25
22
|
log_file = os.path.join(root, '{}.log'.format(phase))
|
|
@@ -36,7 +33,6 @@ def _loggers(logger_name="loggers", root="./logs", phase="project", level=loggin
|
|
|
36
33
|
l.addHandler(sh)
|
|
37
34
|
return l
|
|
38
35
|
|
|
39
|
-
|
|
40
36
|
"""
|
|
41
37
|
logger = _loggers(logger_name="test_logger", root=curpathplus, phase="test_log", overwrite=True, screen=True)
|
|
42
38
|
# 测试日志输出
|
|
@@ -46,6 +42,48 @@ for i in range(5):
|
|
|
46
42
|
"""
|
|
47
43
|
|
|
48
44
|
|
|
45
|
+
import logging
|
|
46
|
+
from logging.handlers import RotatingFileHandler
|
|
47
|
+
import os
|
|
48
|
+
from shancx import crDir
|
|
49
|
+
import multiprocessing
|
|
50
|
+
def _loggersPlus(logger_name="loggers", root="./logs", phase="project", level=logging.INFO,
|
|
51
|
+
screen=True, max_bytes=10 * 1024 * 1024, backup_count=5, overwrite=False,
|
|
52
|
+
handlersflag=False, enable_rotating=None):
|
|
53
|
+
l = logging.getLogger(logger_name)
|
|
54
|
+
if handlersflag:
|
|
55
|
+
if l.handlers:
|
|
56
|
+
return l
|
|
57
|
+
formatter = logging.Formatter(
|
|
58
|
+
"%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s",
|
|
59
|
+
datefmt='%Y-%m-%d %H:%M:%S'
|
|
60
|
+
)
|
|
61
|
+
log_file = os.path.join(root, '{}.log'.format(phase))
|
|
62
|
+
crDir(log_file)
|
|
63
|
+
mode = 'w' if overwrite else 'a'
|
|
64
|
+
if enable_rotating is None:
|
|
65
|
+
enable_rotating = (multiprocessing.current_process().name == 'MainProcess')
|
|
66
|
+
|
|
67
|
+
if enable_rotating:
|
|
68
|
+
fh = RotatingFileHandler(
|
|
69
|
+
log_file,
|
|
70
|
+
mode=mode,
|
|
71
|
+
maxBytes=max_bytes,
|
|
72
|
+
backupCount=backup_count
|
|
73
|
+
)
|
|
74
|
+
else:
|
|
75
|
+
fh = logging.FileHandler(log_file, mode=mode)
|
|
76
|
+
|
|
77
|
+
fh.setFormatter(formatter)
|
|
78
|
+
l.setLevel(level)
|
|
79
|
+
l.addHandler(fh)
|
|
80
|
+
|
|
81
|
+
if screen:
|
|
82
|
+
sh = logging.StreamHandler()
|
|
83
|
+
sh.setFormatter(formatter)
|
|
84
|
+
l.addHandler(sh)
|
|
85
|
+
return l
|
|
86
|
+
|
|
49
87
|
|
|
50
88
|
import logging
|
|
51
89
|
def setlogger(level=logging.INFO):
|
|
@@ -64,4 +102,155 @@ if __name__ == "__main__":
|
|
|
64
102
|
logger.debug("这条日志会显示") # 默认情况下DEBUG不显示,但因为我们设置了级别,现在会显示
|
|
65
103
|
logger.info("这是一条INFO日志")
|
|
66
104
|
'''
|
|
105
|
+
import os
|
|
106
|
+
import glob
|
|
107
|
+
import logging
|
|
108
|
+
from datetime import datetime
|
|
109
|
+
from shancx import loggers as logger
|
|
110
|
+
def cleanupLogs(log_dir='/mnt/wtx_weather_forecast/scx/SATH9SEAStest/logs', keep_count=10,
|
|
111
|
+
pattern='*.log', recursive=False, dry_run=False):
|
|
112
|
+
stats = {'total_dirs': 0, 'deleted_files': 0, 'errors': []}
|
|
113
|
+
def _cleanup_dir(directory):
|
|
114
|
+
stats['total_dirs'] += 1
|
|
115
|
+
if not os.path.exists(directory):
|
|
116
|
+
logging.warning(f"目录不存在: {directory}")
|
|
117
|
+
return
|
|
118
|
+
file_paths = glob.glob(os.path.join(directory, pattern))
|
|
119
|
+
log_files = [(path, os.path.getmtime(path)) for path in file_paths]
|
|
120
|
+
log_files.sort(key=lambda x: x[1], reverse=True)
|
|
121
|
+
if len(log_files) <= keep_count:
|
|
122
|
+
logging.info(f"目录 {directory} 中的文件数量 ({len(log_files)}) 不超过保留数量 ({keep_count}),无需清理")
|
|
123
|
+
return
|
|
124
|
+
files_to_delete = log_files[keep_count:]
|
|
125
|
+
for file_path, mtime in files_to_delete:
|
|
126
|
+
try:
|
|
127
|
+
if dry_run:
|
|
128
|
+
logging.info(f"[试运行] 将删除: {file_path} (修改时间: {datetime.fromtimestamp(mtime).strftime('%Y-%m-%d %H:%M:%S')})")
|
|
129
|
+
else:
|
|
130
|
+
os.remove(file_path)
|
|
131
|
+
logging.info(f"已删除: {file_path} (修改时间: {datetime.fromtimestamp(mtime).strftime('%Y-%m-%d %H:%M:%S')})")
|
|
132
|
+
stats['deleted_files'] += 1
|
|
133
|
+
except Exception as e:
|
|
134
|
+
error_msg = f"删除失败 {file_path}: {str(e)}"
|
|
135
|
+
logging.error(error_msg)
|
|
136
|
+
stats['errors'].append(error_msg)
|
|
137
|
+
if recursive:
|
|
138
|
+
for root, _, _ in os.walk(log_dir):
|
|
139
|
+
_cleanup_dir(root)
|
|
140
|
+
else:
|
|
141
|
+
_cleanup_dir(log_dir)
|
|
142
|
+
return stats
|
|
143
|
+
"""
|
|
144
|
+
if __name__ == "__main__":
|
|
145
|
+
dir = "/mnt/wtx_weather_forecast/scx/SATH9SEAStest/logs/H9SEAS/"
|
|
146
|
+
stats = cleanupLogs(dir,3, '*.log', False, False)
|
|
147
|
+
logging.info(f"清理完成: 处理目录数={stats['total_dirs']}, 删除文件数={stats['deleted_files']}, 错误数={len(stats['errors'])}")
|
|
148
|
+
if stats['errors']:
|
|
149
|
+
logging.error(f"错误详情: {stats['errors']}")
|
|
150
|
+
"""
|
|
151
|
+
|
|
152
|
+
from itertools import product
|
|
153
|
+
from concurrent.futures import ProcessPoolExecutor as PoolExecutor, as_completed
|
|
154
|
+
import sys
|
|
155
|
+
from tqdm import tqdm
|
|
156
|
+
def validate_param_list(param_list):
|
|
157
|
+
if len(param_list) == 0:
|
|
158
|
+
raise ValueError("param_list cannot be empty.")
|
|
159
|
+
for sublist in param_list:
|
|
160
|
+
if len(sublist) == 0:
|
|
161
|
+
raise ValueError("Sub-lists in param_list cannot be empty.")
|
|
162
|
+
def Mul_sub(task, param_list, num=6):
|
|
163
|
+
print(f"Pro num {num}")
|
|
164
|
+
validate_param_list(param_list)
|
|
165
|
+
if len(param_list) == 1:
|
|
166
|
+
product_list = [(x,) for x in param_list[0]]
|
|
167
|
+
else:
|
|
168
|
+
product_list = list(product(*param_list))
|
|
169
|
+
with PoolExecutor(max_workers=num) as executor:
|
|
170
|
+
try:
|
|
171
|
+
futures = [executor.submit(task, item) for item in product_list]
|
|
172
|
+
for future in tqdm(as_completed(futures), total=len(futures), desc="Processing tasks", unit="task"):
|
|
173
|
+
future.result()
|
|
174
|
+
except KeyboardInterrupt:
|
|
175
|
+
sys.exit(1)
|
|
176
|
+
print("All tasks completed")
|
|
177
|
+
|
|
178
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
179
|
+
from itertools import product
|
|
180
|
+
def Mul_TH(task, param_list, max_workers=6):
|
|
181
|
+
print(f"Thread num: {max_workers}")
|
|
182
|
+
validate_param_list(param_list)
|
|
183
|
+
task_args = [
|
|
184
|
+
(arg,) if len(param_list) == 1 else arg
|
|
185
|
+
for arg in (
|
|
186
|
+
param_list[0] if len(param_list) == 1
|
|
187
|
+
else product(*param_list)
|
|
188
|
+
)
|
|
189
|
+
]
|
|
190
|
+
with ThreadPoolExecutor(max_workers) as ex:
|
|
191
|
+
try:
|
|
192
|
+
list(ex.map(task, task_args))
|
|
193
|
+
except KeyboardInterrupt:
|
|
194
|
+
print("\n用户中断操作")
|
|
195
|
+
ex.shutdown(wait=False)
|
|
196
|
+
sys.exit(1)
|
|
197
|
+
|
|
198
|
+
import traceback
|
|
199
|
+
import shutil, os
|
|
200
|
+
def safe_del(path):
|
|
201
|
+
try:
|
|
202
|
+
shutil.rmtree(path) if os.path.isdir(path) else None
|
|
203
|
+
print(f"{path} deleted")
|
|
204
|
+
except Exception:
|
|
205
|
+
print(traceback.format_exc())
|
|
206
|
+
|
|
207
|
+
"""
|
|
208
|
+
safe_del("./data")
|
|
209
|
+
"""
|
|
210
|
+
|
|
211
|
+
import os, glob
|
|
212
|
+
def clean_files(folder, keep=0):
|
|
213
|
+
if os.path.isdir(folder):
|
|
214
|
+
try:
|
|
215
|
+
files = [os.path.join(folder, f) for f in os.listdir(folder)
|
|
216
|
+
if os.path.isfile(os.path.join(folder, f))]
|
|
217
|
+
if keep > 0 and len(files) > keep:
|
|
218
|
+
files.sort(key=os.path.getmtime)
|
|
219
|
+
[os.remove(f) for f in files[:-keep]]
|
|
220
|
+
elif keep == 0:
|
|
221
|
+
[os.remove(f) for f in files]
|
|
222
|
+
except Exception as e:
|
|
223
|
+
print(traceback.format_exc())
|
|
224
|
+
|
|
225
|
+
"""
|
|
226
|
+
clean_files("./logs", keep=10)
|
|
227
|
+
clean_files("./temp")
|
|
228
|
+
"""
|
|
67
229
|
|
|
230
|
+
# import os
|
|
231
|
+
# from datetime import datetime
|
|
232
|
+
# from shancx.NN import _loggers
|
|
233
|
+
# from shancx import lock_file
|
|
234
|
+
# from shancx.wait import check_lock
|
|
235
|
+
# from shancx import crDir
|
|
236
|
+
# logger =_loggers()
|
|
237
|
+
# def check_process_data(UTC, sat_cd,basepath ="/mnt/wtx_weather_forecast/scx/test/lock_files" ):
|
|
238
|
+
# try:
|
|
239
|
+
# UTCStr = UTC.strftime("%Y%m%d%H%M")
|
|
240
|
+
# file = f"/mnt/wtx_weather_forecast/scx/test/lock_files/{sat_cd}/{UTCStr[:4]}/{UTCStr[:8]}/File_{UTCStr}.lock"
|
|
241
|
+
# crDir(file)
|
|
242
|
+
# if not lock_file(file):
|
|
243
|
+
# if check_lock(file):
|
|
244
|
+
# logger.info("data is making or maked")
|
|
245
|
+
# return True ,file
|
|
246
|
+
# return False,file
|
|
247
|
+
# except Exception as e:
|
|
248
|
+
# logger.error(f"Error in check_and_process_data: {str(e)}")
|
|
249
|
+
# return False,file
|
|
250
|
+
# """
|
|
251
|
+
# flag1,file = check_process_data(UTC, "H9SEAS" )
|
|
252
|
+
# if flag1:
|
|
253
|
+
# sys.exit()
|
|
254
|
+
# if os.path.exists(output_path): #配合使用
|
|
255
|
+
# sys.exit()
|
|
256
|
+
# """
|
shancx/{path.py → Path1.py}
RENAMED
|
@@ -2,14 +2,14 @@
|
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
def
|
|
5
|
+
def curPath_():
|
|
6
6
|
current_file_path = os.path.abspath(__file__)
|
|
7
7
|
current_folder_path = os.path.dirname(current_file_path)
|
|
8
8
|
parent = Path(__file__).parent
|
|
9
9
|
return current_folder_path,parent
|
|
10
10
|
|
|
11
11
|
import sys
|
|
12
|
-
def
|
|
12
|
+
def curPath():
|
|
13
13
|
# 获取当前执行文件的绝对路径
|
|
14
14
|
current_file_path = os.path.abspath(sys.argv[0])
|
|
15
15
|
current_folder_path = os.path.dirname(current_file_path)
|
|
@@ -157,6 +157,5 @@ if __name__ == '__main__':
|
|
|
157
157
|
Mul_sub(map_data,[timeList1],31)
|
|
158
158
|
except Exception as e:
|
|
159
159
|
print(traceback.format_exc())
|
|
160
|
-
|
|
161
160
|
"""
|
|
162
161
|
|