shancx 1.8.92__py3-none-any.whl → 1.9.33.218__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (166) hide show
  1. shancx/3D/__init__.py +25 -0
  2. shancx/Algo/Class.py +11 -0
  3. shancx/Algo/CudaPrefetcher1.py +112 -0
  4. shancx/Algo/Fake_image.py +24 -0
  5. shancx/Algo/Hsml.py +391 -0
  6. shancx/Algo/L2Loss.py +10 -0
  7. shancx/Algo/MetricTracker.py +132 -0
  8. shancx/Algo/Normalize.py +66 -0
  9. shancx/Algo/OptimizerWScheduler.py +38 -0
  10. shancx/Algo/Rmageresize.py +79 -0
  11. shancx/Algo/Savemodel.py +33 -0
  12. shancx/Algo/SmoothL1_losses.py +27 -0
  13. shancx/Algo/Tqdm.py +62 -0
  14. shancx/Algo/__init__.py +121 -0
  15. shancx/Algo/checknan.py +28 -0
  16. shancx/Algo/iouJU.py +83 -0
  17. shancx/Algo/mask.py +25 -0
  18. shancx/Algo/psnr.py +9 -0
  19. shancx/Algo/ssim.py +70 -0
  20. shancx/Algo/structural_similarity.py +308 -0
  21. shancx/Algo/tool.py +704 -0
  22. shancx/Calmetrics/__init__.py +97 -0
  23. shancx/Calmetrics/calmetrics.py +14 -0
  24. shancx/Calmetrics/calmetricsmatrixLib.py +147 -0
  25. shancx/Calmetrics/rmseR2score.py +35 -0
  26. shancx/Clip/__init__.py +50 -0
  27. shancx/Cmd.py +126 -0
  28. shancx/Config_.py +26 -0
  29. shancx/Df/DataFrame.py +11 -2
  30. shancx/Df/__init__.py +17 -0
  31. shancx/Df/tool.py +0 -0
  32. shancx/Diffm/Psamples.py +18 -0
  33. shancx/Diffm/__init__.py +0 -0
  34. shancx/Diffm/test.py +207 -0
  35. shancx/Doc/__init__.py +214 -0
  36. shancx/E/__init__.py +178 -152
  37. shancx/Fillmiss/__init__.py +0 -0
  38. shancx/Fillmiss/imgidwJU.py +46 -0
  39. shancx/Fillmiss/imgidwLatLonJU.py +82 -0
  40. shancx/Gpu/__init__.py +55 -0
  41. shancx/H9/__init__.py +126 -0
  42. shancx/H9/ahi_read_hsd.py +877 -0
  43. shancx/H9/ahisearchtable.py +298 -0
  44. shancx/H9/geometry.py +2439 -0
  45. shancx/Hug/__init__.py +81 -0
  46. shancx/Inst.py +22 -0
  47. shancx/Lib.py +31 -0
  48. shancx/Mos/__init__.py +37 -0
  49. shancx/NN/__init__.py +235 -106
  50. shancx/Path1.py +161 -0
  51. shancx/Plot/GlobMap.py +276 -116
  52. shancx/Plot/__init__.py +491 -1
  53. shancx/Plot/draw_day_CR_PNG.py +4 -21
  54. shancx/Plot/exam.py +116 -0
  55. shancx/Plot/plotGlobal.py +325 -0
  56. shancx/{radar_nmc.py → Plot/radarNmc.py} +4 -34
  57. shancx/{subplots_single_china_map.py → Plot/single_china_map.py} +1 -1
  58. shancx/Point.py +46 -0
  59. shancx/QC.py +223 -0
  60. shancx/RdPzl/__init__.py +32 -0
  61. shancx/Read.py +72 -0
  62. shancx/Resize.py +79 -0
  63. shancx/SN/__init__.py +62 -123
  64. shancx/Time/GetTime.py +9 -3
  65. shancx/Time/__init__.py +66 -1
  66. shancx/Time/timeCycle.py +302 -0
  67. shancx/Time/tool.py +0 -0
  68. shancx/Train/__init__.py +74 -0
  69. shancx/Train/makelist.py +187 -0
  70. shancx/Train/multiGpu.py +27 -0
  71. shancx/Train/prepare.py +161 -0
  72. shancx/Train/renet50.py +157 -0
  73. shancx/ZR.py +12 -0
  74. shancx/__init__.py +333 -262
  75. shancx/args.py +27 -0
  76. shancx/bak.py +768 -0
  77. shancx/df2database.py +62 -2
  78. shancx/geosProj.py +80 -0
  79. shancx/info.py +38 -0
  80. shancx/netdfJU.py +231 -0
  81. shancx/sendM.py +59 -0
  82. shancx/tensBoard/__init__.py +28 -0
  83. shancx/wait.py +246 -0
  84. {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/METADATA +15 -5
  85. shancx-1.9.33.218.dist-info/RECORD +91 -0
  86. {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/WHEEL +1 -1
  87. my_timer_decorator/__init__.py +0 -10
  88. shancx/Dsalgor/__init__.py +0 -19
  89. shancx/E/DFGRRIB.py +0 -30
  90. shancx/EN/DFGRRIB.py +0 -30
  91. shancx/EN/__init__.py +0 -148
  92. shancx/FileRead.py +0 -44
  93. shancx/Gray2RGB.py +0 -86
  94. shancx/M/__init__.py +0 -137
  95. shancx/MN/__init__.py +0 -133
  96. shancx/N/__init__.py +0 -131
  97. shancx/Plot/draw_day_CR_PNGUS.py +0 -206
  98. shancx/Plot/draw_day_CR_SVG.py +0 -275
  99. shancx/Plot/draw_day_pre_PNGUS.py +0 -205
  100. shancx/Plot/glob_nation_map.py +0 -116
  101. shancx/Plot/radar_nmc.py +0 -61
  102. shancx/Plot/radar_nmc_china_map_compare1.py +0 -50
  103. shancx/Plot/radar_nmc_china_map_f.py +0 -121
  104. shancx/Plot/radar_nmc_us_map_f.py +0 -128
  105. shancx/Plot/subplots_compare_devlop.py +0 -36
  106. shancx/Plot/subplots_single_china_map.py +0 -45
  107. shancx/S/__init__.py +0 -138
  108. shancx/W/__init__.py +0 -132
  109. shancx/WN/__init__.py +0 -132
  110. shancx/code.py +0 -331
  111. shancx/draw_day_CR_PNG.py +0 -200
  112. shancx/draw_day_CR_PNGUS.py +0 -206
  113. shancx/draw_day_CR_SVG.py +0 -275
  114. shancx/draw_day_pre_PNGUS.py +0 -205
  115. shancx/makenetCDFN.py +0 -42
  116. shancx/mkIMGSCX.py +0 -92
  117. shancx/netCDF.py +0 -130
  118. shancx/radar_nmc_china_map_compare1.py +0 -50
  119. shancx/radar_nmc_china_map_f.py +0 -125
  120. shancx/radar_nmc_us_map_f.py +0 -67
  121. shancx/subplots_compare_devlop.py +0 -36
  122. shancx/tool.py +0 -18
  123. shancx/user/H8mess.py +0 -317
  124. shancx/user/__init__.py +0 -137
  125. shancx/user/cinradHJN.py +0 -496
  126. shancx/user/examMeso.py +0 -293
  127. shancx/user/hjnDAAS.py +0 -26
  128. shancx/user/hjnFTP.py +0 -81
  129. shancx/user/hjnGIS.py +0 -320
  130. shancx/user/hjnGPU.py +0 -21
  131. shancx/user/hjnIDW.py +0 -68
  132. shancx/user/hjnKDTree.py +0 -75
  133. shancx/user/hjnLAPSTransform.py +0 -47
  134. shancx/user/hjnMiscellaneous.py +0 -182
  135. shancx/user/hjnProj.py +0 -162
  136. shancx/user/inotify.py +0 -41
  137. shancx/user/matplotlibMess.py +0 -87
  138. shancx/user/mkNCHJN.py +0 -623
  139. shancx/user/newTypeRadar.py +0 -492
  140. shancx/user/test.py +0 -6
  141. shancx/user/tlogP.py +0 -129
  142. shancx/util_log.py +0 -33
  143. shancx/wtx/H8mess.py +0 -315
  144. shancx/wtx/__init__.py +0 -151
  145. shancx/wtx/cinradHJN.py +0 -496
  146. shancx/wtx/colormap.py +0 -64
  147. shancx/wtx/examMeso.py +0 -298
  148. shancx/wtx/hjnDAAS.py +0 -26
  149. shancx/wtx/hjnFTP.py +0 -81
  150. shancx/wtx/hjnGIS.py +0 -330
  151. shancx/wtx/hjnGPU.py +0 -21
  152. shancx/wtx/hjnIDW.py +0 -68
  153. shancx/wtx/hjnKDTree.py +0 -75
  154. shancx/wtx/hjnLAPSTransform.py +0 -47
  155. shancx/wtx/hjnLog.py +0 -78
  156. shancx/wtx/hjnMiscellaneous.py +0 -201
  157. shancx/wtx/hjnProj.py +0 -161
  158. shancx/wtx/inotify.py +0 -41
  159. shancx/wtx/matplotlibMess.py +0 -87
  160. shancx/wtx/mkNCHJN.py +0 -613
  161. shancx/wtx/newTypeRadar.py +0 -492
  162. shancx/wtx/test.py +0 -6
  163. shancx/wtx/tlogP.py +0 -129
  164. shancx-1.8.92.dist-info/RECORD +0 -99
  165. /shancx/{Dsalgor → Algo}/dsalgor.py +0 -0
  166. {shancx-1.8.92.dist-info → shancx-1.9.33.218.dist-info}/top_level.txt +0 -0
shancx/__init__.py CHANGED
@@ -1,263 +1,334 @@
1
- #!/usr/bin/python
2
- # -*- coding: utf-8 -*-
3
- # @Time : 2024/10/17 上午午10:40
4
- # @Author : shancx
5
- # @File : __init__.py
6
- # @email : shanhe12@163.com
7
-
8
- import os
9
- def start():
10
- print("import successful")
11
- # constants
12
- import subprocess
13
-
14
- __author__ = 'shancx'
15
-
16
- __author_email__ = 'shanhe12@163.com'
17
-
18
-
19
-
20
- # @Time : 2023/09/27 下午8:52
21
- # @Author : shanchangxi
22
- # @File : util_log.py
23
- import time
24
- import logging
25
- from logging import handlers
26
-
27
-
28
-
29
- loggers = logging.getLogger()
30
- loggers.setLevel(logging.INFO)
31
- log_name = 'project.log'
32
- # mkDir(log_name)
33
- logfile = log_name
34
- time_rotating_file_handler = handlers.TimedRotatingFileHandler(filename=logfile, when='D', encoding='utf-8')
35
- time_rotating_file_handler.setLevel(logging.INFO)
36
- formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
37
- time_rotating_file_handler.setFormatter(formatter)
38
- loggers.addHandler(time_rotating_file_handler)
39
-
40
- from pathlib import Path
41
- def crDir(path):
42
- path_obj = Path(path)
43
- directory = path_obj.parent if path_obj.suffix else path_obj
44
- directory.mkdir(parents=True, exist_ok=True)
45
-
46
-
47
- def Tim_(func):
48
- def wrapper(*args, **kwargs):
49
- start_time = time.time()
50
- result = func(*args, **kwargs)
51
- end_time = time.time()
52
- print(f"{func.__name__} took {end_time - start_time:.4f} seconds")
53
- loggers.info(f"{func.__name__} took {end_time - start_time:.4f} seconds")
54
- return result
55
- return wrapper
56
-
57
- def validate_param_list(param_list):
58
- if len(param_list) == 0:
59
- raise ValueError("param_list cannot be empty.")
60
- for sublist in param_list:
61
- if len(sublist) == 0:
62
- raise ValueError("Sub-lists in param_list cannot be empty.")
63
-
64
- from itertools import product
65
- from concurrent.futures import ProcessPoolExecutor as PoolExecutor
66
- def Mul_(map_fun,param_list,num=6):
67
- print(f"Pro num {num}")
68
- validate_param_list(param_list)
69
- if len(param_list) == 1:
70
- product_List = [(x,) for x in param_list[0]]
71
- else:
72
- product_List = list(product(*param_list))
73
- with PoolExecutor(num) as p:
74
- try:
75
- P_data = p.map(map_fun, product_List)
76
- except KeyboardInterrupt:
77
- sys.exit(1)
78
- return list(P_data)
79
-
80
- from concurrent.futures import ProcessPoolExecutor as PoolExecutor, as_completed
81
- import sys
82
-
83
- def Mul_sub(task, param_list, num=6):
84
- print(f"Pro num {num}")
85
- validate_param_list(param_list)
86
- if len(param_list) == 1:
87
- product_list = [(x,) for x in param_list[0]]
88
- else:
89
- product_list = list(product(*param_list))
90
- with PoolExecutor(max_workers=num) as executor:
91
- try:
92
- futures = [executor.submit(task, item) for item in product_list]
93
- results = [future.result() for future in as_completed(futures)]
94
- except KeyboardInterrupt:
95
- sys.exit(1)
96
- return results
97
-
98
- def Mul_sub_S(task, param_list, num=6):
99
- print(f"Pro num {num}")
100
- validate_param_list(param_list)
101
- if len(param_list) == 1:
102
- product_list = [(x,) for x in param_list[0]]
103
- else:
104
- product_list = list(product(*param_list))
105
- results = [None] * len(product_list)
106
-
107
- with PoolExecutor(max_workers=num) as executor:
108
- futures = {executor.submit(task, item): idx for idx, item in enumerate(product_list)}
109
- try:
110
- for future in as_completed(futures):
111
- idx = futures[future]
112
- results[idx] = future.result()
113
- except KeyboardInterrupt:
114
- sys.exit(1)
115
- return results
116
-
117
-
118
-
119
- def add_alias():
120
- command_to_add = "alias lt='ls -ltr'\n"
121
- bashrc_path = os.path.expanduser('~/.bashrc')
122
- with open(bashrc_path, 'a') as file:
123
- file.write(command_to_add)
124
- # 执行 source ~/.bashrc
125
- subprocess.run(['source', '~/.bashrc'], shell=True)
126
-
127
- '''
128
- from multiprocessing import Pool
129
- '''
130
- '''
131
- ##定義一個streamHandler
132
- # print_handler = logging.StreamHandler()
133
- # print_handler.setFormatter(formatter)
134
- # loggers.addHandler(print_handler)
135
- '''
136
-
137
- """
138
- from main import makeAll,options
139
- from multiprocessing import Pool
140
- import datetime
141
- from config import logger,output
142
- import time
143
- import pandas as pd
144
- import os
145
- from itertools import product
146
- import threading
147
-
148
- def excuteCommand(cmd):
149
- print(cmd)
150
- os.system(cmd)
151
-
152
- def gpuPro(makeListUTC, isPhase, isDebug, gpu, isOverwrite):
153
- productList = product(makeListUTC, [isPhase], [isDebug], [gpu], [isOverwrite])
154
-
155
- with Pool(4) as p:
156
- p.map(makeAll, productList)
157
-
158
- if __name__ == '__main__':
159
- cfg = options()
160
- isPhase = cfg.isPhase
161
- isDebug = cfg.isDebug
162
- sepSec = cfg.sepSec
163
- gpu = cfg.gpu
164
- pool = cfg.pool
165
- isOverwrite = cfg.isOverwrite
166
- timeList = pd.date_range(cfg.times[0], cfg.times[-1], freq=f"{sepSec}s")
167
- logger.info(f"时间段check {timeList}")
168
- gpuNum = 2
169
- eachGPU = 4
170
-
171
- makeListUTC = []
172
- for UTC in timeList:
173
- UTCStr = UTC.strftime("%Y%m%d%H%M")
174
- outpath = f"{output}/{UTCStr[:4]}/{UTCStr[:8]}/MSP2_WTX_AIW_QPF_L88_CHN_{UTCStr}_00000-00300-00006.nc"
175
- if not os.path.exists(outpath) or isOverwrite:
176
- makeListUTC.append(UTC)
177
- [print(element) for element in makeListUTC]
178
-
179
- phaseCMD = "--isPhase" if isPhase else ""
180
- debugCMD = "--isDebug" if isDebug else ""
181
- OverwriteCMD = "--isOverwrite"
182
- gpuCMD = f"--gpu={gpu}"
183
- # cmdList = list(map(lambda x:f"python main.py --times={x.strftime('%Y%m%d%H%M')} {phaseCMD} {debugCMD} {OverwriteCMD} {gpuCMD}",makeListUTC))
184
- cmdList = list(map(lambda x:f"python main.py --times={x.strftime('%Y%m%d%H%M')} {phaseCMD} {debugCMD} {gpuCMD}",makeListUTC))
185
-
186
- with Pool(pool) as p:
187
- p.map(excuteCommand, cmdList)
188
- """
189
-
190
-
191
- '''
192
- # @Time : 2023/09/27 下午8:52
193
- # @Author : shanchangxi
194
- # @File : util_log.py
195
- import time
196
- import logging
197
- from logging import handlers
198
-
199
- logger = logging.getLogger()
200
- logger.setLevel(logging.INFO)
201
- log_name = 'project_tim_tor.log'
202
- logfile = log_name
203
- time_rotating_file_handler = handlers.TimedRotatingFileHandler(filename=logfile, when='D', encoding='utf-8')
204
- time_rotating_file_handler.setLevel(logging.INFO)
205
- formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
206
- time_rotating_file_handler.setFormatter(formatter)
207
- logger.addHandler(time_rotating_file_handler)
208
- print_handler = logging.StreamHandler()
209
- print_handler.setFormatter(formatter)
210
- logger.addHandler(print_handler)
211
-
212
- '''
213
-
214
- '''
215
- ###解决方法 pip install torch==2.4.0 torchvision torchaudio三个同时安装 python 3.12 解决cuda启动不了的问题
216
-
217
- Res网络
218
- '''
219
-
220
- '''
221
- import concurrent.futures
222
- from itertools import product
223
- def task(args):
224
- args1,args2 = args
225
- print( f"Task ({args1}, {args2}) , result")
226
- return (args1,args2,5)
227
-
228
- def Mul_sub(task, pro):
229
- product_list = product(*pro)
230
- with concurrent.futures.ThreadPoolExecutor() as executor:
231
- futures = [executor.submit(task, item) for item in product_list]
232
- results = [future.result() for future in concurrent.futures.as_completed(futures)]
233
- return results
234
- res = Mul_sub(task, [[1, 23, 4, 5], ["n"]])
235
- print("res")
236
- print(res)
237
-
238
- '''
239
-
240
- '''
241
- parser = argparse.ArgumentParser(description='shancx argparse ')
242
- parser.add_argument('--times', type=str, default='202408280000,202408281700')
243
- parser.add_argument('--pac', type=str, default='100000')
244
- parser.add_argument('--combine',action='store_true',default=False)
245
- config= parser.parse_args()
246
- print(config)
247
- config.times = config.times.split(",")
248
- config.pac = config.pac.split(",")
249
- if len(config.times) == 1:
250
- config.times = [config.times[0], config.times[0]]
251
- config.times = [datetime.datetime.strptime(config.times[0], "%Y%m%d%H%M"),
252
- datetime.datetime.strptime(config.times[1], "%Y%m%d%H%M")]
253
- cfg = config
254
-
255
- '''
256
-
257
- """
258
- find /mnt/wtx_weather_forecast/scx/SpiderGLOBPNGSource -type f -name "*.png" -mtime +3 -exec rm {} \;
259
-
260
- -mtime 选项后面的数值代表天数。
261
- +n 表示“超过 n 天”,即查找最后修改时间在 n 天之前的文件。
262
-
1
+ #!/usr/bin/python
2
+ # -*- coding: utf-8 -*-
3
+ # @Time : 2024/10/17 上午午10:40
4
+ # @Author : shancx
5
+ # @File : __init__.py
6
+ # @email : shanhe12@163.com
7
+
8
+ import os
9
+ def start():
10
+ print("import successful")
11
+ # constants
12
+ import subprocess
13
+
14
+ __author__ = 'shancx'
15
+
16
+ __author_email__ = 'shanhe12@163.com'
17
+
18
+ # @Time : 2023/09/27 下午8:52
19
+ # @Author : shanchangxi
20
+ # @File : util_log.py
21
+ import time
22
+ import logging
23
+ from logging import handlers
24
+ import inspect
25
+
26
+ import time
27
+ import logging
28
+ from logging import handlers
29
+ loggers = logging.getLogger()
30
+ loggers.setLevel(logging.INFO)
31
+ formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
32
+ print_handler = logging.StreamHandler()
33
+ print_handler.setFormatter(formatter)
34
+ loggers.addHandler(print_handler)
35
+
36
+ from pathlib import Path as PathlibPath
37
+ def crDir(path):
38
+ path_obj = PathlibPath(path)
39
+ directory = path_obj.parent if path_obj.suffix else path_obj
40
+ directory.mkdir(parents=True, exist_ok=True)
41
+
42
+ def Tim_(func):
43
+ def wrapper(*args, **kwargs):
44
+ start_time = time.time()
45
+ result = func(*args, **kwargs)
46
+ end_time = time.time()
47
+ loggers.info(f"{func.__name__} took {end_time - start_time:.4f} seconds")
48
+ return result
49
+ return wrapper
50
+
51
+ def TimPlus(func):
52
+ def wrapper(*args, **kwargs):
53
+ func_file = inspect.getfile(func)
54
+ func_line = inspect.getsourcelines(func)[1]
55
+ start_time = time.time()
56
+ result = func(*args, **kwargs)
57
+ end_time = time.time()
58
+ elapsed_time = end_time - start_time
59
+ log_message = (
60
+ f"{func.__name__} line {func_line} (Defined at {func_file} ) "
61
+ f"took {elapsed_time:.4f} seconds"
62
+ )
63
+ loggers.info(log_message)
64
+ return result
65
+ return wrapper
66
+
67
+ def validate_param_list(param_list):
68
+ if len(param_list) == 0:
69
+ raise ValueError("param_list cannot be empty.")
70
+ for sublist in param_list:
71
+ if len(sublist) == 0:
72
+ raise ValueError("Sub-lists in param_list cannot be empty.")
73
+
74
+ from itertools import product
75
+ from concurrent.futures import ProcessPoolExecutor as PoolExecutor, as_completed
76
+ import sys
77
+ from tqdm import tqdm
78
+ def Mul_sub(task, param_list, num=6):
79
+ print(f"Pro num {num}")
80
+ validate_param_list(param_list)
81
+ if len(param_list) == 1:
82
+ product_list = [(x,) for x in param_list[0]]
83
+ else:
84
+ product_list = list(product(*param_list))
85
+ results = []
86
+ with PoolExecutor(max_workers=num) as executor:
87
+ try:
88
+ futures = [executor.submit(task, item) for item in product_list]
89
+ for future in tqdm(as_completed(futures), total=len(futures), desc="Processing tasks", unit="task"):
90
+ results.append(future.result())
91
+ except KeyboardInterrupt:
92
+ sys.exit(1)
93
+ return results
94
+
95
+ def Mul_sub_S(task, param_list, num=6):
96
+ print(f"Pro num {num}")
97
+ validate_param_list(param_list)
98
+ if len(param_list) == 1:
99
+ product_list = [(x,) for x in param_list[0]]
100
+ else:
101
+ product_list = list(product(*param_list))
102
+ results = [None] * len(product_list)
103
+ with PoolExecutor(max_workers=num) as executor:
104
+ futures = {executor.submit(task, item): idx for idx, item in enumerate(product_list)}
105
+ try:
106
+ for future in tqdm(as_completed(futures), total=len(futures), desc="Processing", unit="task"):
107
+ idx = futures[future]
108
+ results[idx] = future.result()
109
+ except KeyboardInterrupt:
110
+ sys.exit(1)
111
+ return results
112
+
113
+ from concurrent.futures import ProcessPoolExecutor as PoolExecutor, as_completed
114
+ import sys
115
+ from tqdm import tqdm
116
+ def Mul_subplus(task, param_lists, num=6):
117
+ results = []
118
+ with PoolExecutor(max_workers=num) as executor:
119
+ try:
120
+ futures = [
121
+ executor.submit(task, args)
122
+ for args in zip(*param_lists)
123
+ ]
124
+ for future in tqdm(as_completed(futures), total=len(futures), desc="Processing tasks", unit="task"):
125
+ results.append(future.result())
126
+ except KeyboardInterrupt:
127
+ sys.exit(1)
128
+ return results
129
+
130
+ from concurrent.futures import ProcessPoolExecutor as PoolExecutor, as_completed
131
+ import sys
132
+ from tqdm import tqdm
133
+ def Mul_sub_Splus(task, param_lists, num=6):
134
+ if not all(len(lst) == len(param_lists[0]) for lst in param_lists):
135
+ raise ValueError("All parameter lists must have the same length.")
136
+ results = [None] * len(param_lists[0])
137
+ with PoolExecutor(max_workers=num) as executor:
138
+ try:
139
+ futures = {
140
+ executor.submit(task, *args): idx
141
+ for idx, args in enumerate(zip(*param_lists))
142
+ }
143
+ for future in tqdm(as_completed(futures), total=len(futures), desc="Processing tasks", unit="task"):
144
+ idx = futures[future]
145
+ results[idx] = future.result()
146
+ except KeyboardInterrupt:
147
+ sys.exit(1)
148
+ return results
149
+
150
+ from concurrent.futures import ThreadPoolExecutor
151
+ from itertools import product
152
+ def Mul_TH(task, param_list, max_workers=6):
153
+ print(f"Thread num: {max_workers}")
154
+ validate_param_list(param_list)
155
+ task_args = [
156
+ (arg,) if len(param_list) == 1 else arg
157
+ for arg in (
158
+ param_list[0] if len(param_list) == 1
159
+ else product(*param_list)
160
+ )
161
+ ]
162
+ with ThreadPoolExecutor(max_workers) as ex:
163
+ try:
164
+ return list(ex.map(task, task_args))
165
+ except KeyboardInterrupt:
166
+ print("\n用户中断操作")
167
+ ex.shutdown(wait=False)
168
+ sys.exit(1)
169
+
170
+ from tqdm import tqdm
171
+ def _tqdm(train_loader,desc="Training Progress",epoch=None):
172
+ epoch = epoch +1 if epoch is not None else None
173
+ descdict = {"val":"Val Progress", "train":"Training Progress"}
174
+ return tqdm(train_loader, desc=f"Epoch {epoch} {descdict.get(desc,'DESC')}", ncols=100)
175
+
176
+ import os
177
+ import datetime
178
+ from pathlib import Path
179
+ def lock_file(lock_file_path):
180
+ if os.path.exists(lock_file_path):
181
+ print(f" {lock_file_path} is existded ")
182
+ return False
183
+ try:
184
+ os.makedirs(os.path.dirname(lock_file_path), exist_ok=True)
185
+ with open(lock_file_path, 'w') as f:
186
+ f.write(f"process_id:{os.getpid()},create_time:{datetime.datetime.now()}")
187
+ return True
188
+ except Exception as e:
189
+ print(f"创建锁文件失败 create lock failed : {e}")
190
+ return False
191
+ """
192
+ lock_file = f"{output_base_path}{sat_code}/lock_files/MSP2_WTX_AIW_QPF_L88_GLOB_{utc_time_str}.lock"
193
+ if os.path.exists(outpath):
194
+ print(f" 目标文件已存在,跳过处理: {outpath}")
195
+ return True
196
+ if os.path.exists(lock_file):
197
+ print(f"锁文件存在,已有程序在处理时次 {utc_time_str},跳过")
198
+ return False
199
+ """
200
+ import os
201
+ import time
202
+ def is_process_alive(pid):
203
+ try:
204
+ os.kill(pid, 0)
205
+ return True
206
+ except OSError:
207
+ return False
208
+ def check_lock(lock_file):
209
+ if not os.path.exists(lock_file):
210
+ return False
211
+ try:
212
+ with open(lock_file, 'r') as f:
213
+ content = f.read().strip()
214
+ if 'process_id:' in content and 'create_time:' in content:
215
+ pid_str = content.split('process_id:')[1].split(',')[0]
216
+ pid = int(pid_str)
217
+ if not is_process_alive(pid):
218
+ print(f"进程 {pid} 已消亡,清理锁文件")
219
+ os.remove(lock_file)
220
+ return False
221
+ else:
222
+ print(f"进程 {pid} 仍在运行,跳过执行")
223
+ return True
224
+ except Exception as e:
225
+ print(f"锁文件解析错误,清理: {e}")
226
+ os.remove(lock_file)
227
+ return False
228
+ return False
229
+ """
230
+ if check_lock(lock_file):
231
+ return False
232
+ """
233
+ import multiprocessing
234
+ def set_multiprocessing(method="spawn", verbose=True): # 补充缺失的verbose参数定义
235
+ try:
236
+ multiprocessing.set_start_method(method, force=True)
237
+ if verbose:
238
+ print(f"The startup method is: '{method}'")
239
+ return method
240
+ except RuntimeError as e:
241
+ if verbose:
242
+ print(f"Setup failed: {e}")
243
+ return multiprocessing.get_start_method()
244
+
245
+ """
246
+ current_method = set_multiprocessing_method()
247
+ # 输出:已设置多进程启动方式: 'spawn'
248
+ print("当前启动方式:", current_method) # 输出:当前启动方式:spawn
249
+ """
250
+ """
251
+ if __name__ == '__main__':
252
+ set_multiprocessing_method('auto')
253
+ print("当前启动方法:", multiprocessing.get_start_method())
254
+
255
+ """
256
+
257
+ """
258
+ zoom插值
259
+ from scipy.ndimage import zoom
260
+ d = zoom(d_clip, [4201/169,6201/249], order=1)[:-1, :-1]
261
+ """
262
+ """
263
+ torch.cuda.empty_cache()
264
+ del
265
+ gc.collect()
266
+ """
267
+
268
+ '''
269
+ from multiprocessing import Pool
270
+ '''
271
+ '''
272
+ ##定義一個streamHandler
273
+ # print_handler = logging.StreamHandler()
274
+ # print_handler.setFormatter(formatter)
275
+ # loggers.addHandler(print_handler)
276
+ '''
277
+ '''
278
+ # @Time : 2023/09/27 下午8:52
279
+ # @Author : shanchangxi
280
+ # @File : util_log.py
281
+ import time
282
+ import logging
283
+ from logging import handlers
284
+
285
+ logger = logging.getLogger()
286
+ logger.setLevel(logging.INFO)
287
+ log_name = 'project_tim_tor.log'
288
+ logfile = log_name
289
+ time_rotating_file_handler = handlers.TimedRotatingFileHandler(filename=logfile, when='D', encoding='utf-8')
290
+ time_rotating_file_handler.setLevel(logging.INFO)
291
+ formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
292
+ time_rotating_file_handler.setFormatter(formatter)
293
+ logger.addHandler(time_rotating_file_handler)
294
+ print_handler = logging.StreamHandler()
295
+ print_handler.setFormatter(formatter)
296
+ logger.addHandler(print_handler)
297
+ '''
298
+ '''
299
+ ###解决方法 pip install torch==2.4.0 torchvision torchaudio三个同时安装 python 3.12 解决cuda启动不了的问题
300
+ Res网络
301
+ '''
302
+
303
+ """
304
+ find /mnt/wtx_weather_forecast/scx/SpiderGLOBPNGSource -type f -name "*.png" -mtime +3 -exec rm {} \;
305
+ -mtime 选项后面的数值代表天数。
306
+ +n 表示“超过 n 天”,即查找最后修改时间在 n 天之前的文件。
307
+ """
308
+ """
309
+ from shancx.SN import UserManager,sendMESplus
310
+ from shancx._info import users
311
+ M = UserManager(info=users)
312
+ user_info = M.get_user("003")
313
+ sendMESplus("测试数据",base=user_info)
314
+ """
315
+ """
316
+ https://api.map.baidu.com/lbsapi/getpoint/index.html 坐标
317
+ [global]
318
+ index-url = https://pypi.tuna.tsinghua.edu.cn/simple pip.conf
319
+ python setup.py sdist bdist_wheel
320
+ twine upload dist/*
321
+ """
322
+ """ 与循环搭配使用
323
+ for key,value in dictflag.items():
324
+ try:
325
+ pac = all_df1[all_df1['PAC'].str.startswith(f'{key}')]
326
+ acctoal,acctoalEC,matEC,mat,rate_Lift_ratiotsEC,outpath= metriacfunall(pac)
327
+ if not len(matEC.shape) == (2,2):
328
+ continue
329
+ docdataset = mkdataset2TS(acctoal,acctoalEC,matEC,mat, rate_Lift_ratiotsEC,outpath)
330
+
331
+ except Exception as e:
332
+ print(traceback.format_exc())
333
+ continue
263
334
  """
shancx/args.py ADDED
@@ -0,0 +1,27 @@
1
+ import argparse
2
+ import datetime
3
+ import pandas as pd
4
+ from dateutil.relativedelta import relativedelta
5
+
6
+ def options():
7
+ parser = argparse.ArgumentParser(description='scx')
8
+ parser.add_argument('--times', type=str, default='202411100000,202411101000')
9
+ parser.add_argument('--pac', type=str, default='100000')
10
+ parser.add_argument('--tag', type=str, default='100000')
11
+ parser.add_argument('--isDebug',action='store_true',default=False)
12
+ config= parser.parse_args()
13
+ print(config)
14
+ config.times = config.times.split(",")
15
+ if len(config.times) == 1:
16
+ config.times = [config.times[0], config.times[0]]
17
+ config.times = [datetime.datetime.strptime(config.times[0], "%Y%m%d%H%M"),
18
+ datetime.datetime.strptime(config.times[1], "%Y%m%d%H%M")]
19
+ return config
20
+ if __name__ == '__main__':
21
+ cfg = options()
22
+ sCST = cfg.times[0]
23
+ eCST = cfg.times[-1]
24
+ timeList = pd.date_range(sCST, eCST + relativedelta(hours=24), freq="1h", inclusive="left")
25
+ print()
26
+
27
+