shancx 1.9.33.109__py3-none-any.whl → 1.9.33.218__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- shancx/{Dsalgor → Algo}/__init__.py +37 -1
- shancx/Calmetrics/__init__.py +78 -9
- shancx/Calmetrics/calmetrics.py +14 -0
- shancx/Calmetrics/rmseR2score.py +14 -3
- shancx/{Command.py → Cmd.py} +20 -15
- shancx/Config_.py +26 -0
- shancx/Df/__init__.py +11 -0
- shancx/Df/tool.py +0 -1
- shancx/NN/__init__.py +200 -11
- shancx/{path.py → Path1.py} +2 -3
- shancx/Plot/__init__.py +129 -403
- shancx/Plot/draw_day_CR_PNG.py +4 -21
- shancx/Plot/exam.py +116 -0
- shancx/Plot/plotGlobal.py +325 -0
- shancx/Plot/radarNmc.py +1 -48
- shancx/Plot/single_china_map.py +1 -1
- shancx/Point.py +46 -0
- shancx/QC.py +223 -0
- shancx/Read.py +17 -10
- shancx/Resize.py +79 -0
- shancx/SN/__init__.py +8 -1
- shancx/Time/timeCycle.py +97 -23
- shancx/Train/makelist.py +161 -155
- shancx/__init__.py +79 -232
- shancx/bak.py +78 -53
- shancx/geosProj.py +2 -2
- shancx/wait.py +35 -1
- {shancx-1.9.33.109.dist-info → shancx-1.9.33.218.dist-info}/METADATA +12 -4
- shancx-1.9.33.218.dist-info/RECORD +91 -0
- {shancx-1.9.33.109.dist-info → shancx-1.9.33.218.dist-info}/WHEEL +1 -1
- shancx/Plot/Mip.py +0 -42
- shancx/Plot/border.py +0 -44
- shancx/Plot/draw_day_CR_PNGUS.py +0 -206
- shancx/Plot/draw_day_CR_SVG.py +0 -275
- shancx/Plot/draw_day_pre_PNGUS.py +0 -205
- shancx/Plot/radar_nmc_china_map_compare1.py +0 -50
- shancx/makenetCDFN.py +0 -42
- shancx-1.9.33.109.dist-info/RECORD +0 -91
- /shancx/{3DJU → 3D}/__init__.py +0 -0
- /shancx/{Dsalgor → Algo}/Class.py +0 -0
- /shancx/{Dsalgor → Algo}/CudaPrefetcher1.py +0 -0
- /shancx/{Dsalgor → Algo}/Fake_image.py +0 -0
- /shancx/{Dsalgor → Algo}/Hsml.py +0 -0
- /shancx/{Dsalgor → Algo}/L2Loss.py +0 -0
- /shancx/{Dsalgor → Algo}/MetricTracker.py +0 -0
- /shancx/{Dsalgor → Algo}/Normalize.py +0 -0
- /shancx/{Dsalgor → Algo}/OptimizerWScheduler.py +0 -0
- /shancx/{Dsalgor → Algo}/Rmageresize.py +0 -0
- /shancx/{Dsalgor → Algo}/Savemodel.py +0 -0
- /shancx/{Dsalgor → Algo}/SmoothL1_losses.py +0 -0
- /shancx/{Dsalgor → Algo}/Tqdm.py +0 -0
- /shancx/{Dsalgor → Algo}/checknan.py +0 -0
- /shancx/{Dsalgor → Algo}/dsalgor.py +0 -0
- /shancx/{Dsalgor → Algo}/iouJU.py +0 -0
- /shancx/{Dsalgor → Algo}/mask.py +0 -0
- /shancx/{Dsalgor → Algo}/psnr.py +0 -0
- /shancx/{Dsalgor → Algo}/ssim.py +0 -0
- /shancx/{Dsalgor → Algo}/structural_similarity.py +0 -0
- /shancx/{Dsalgor → Algo}/tool.py +0 -0
- /shancx/Calmetrics/{matrixLib.py → calmetricsmatrixLib.py} +0 -0
- /shancx/{Diffmodel → Diffm}/Psamples.py +0 -0
- /shancx/{Diffmodel → Diffm}/__init__.py +0 -0
- /shancx/{Diffmodel → Diffm}/test.py +0 -0
- /shancx/{Board → tensBoard}/__init__.py +0 -0
- {shancx-1.9.33.109.dist-info → shancx-1.9.33.218.dist-info}/top_level.txt +0 -0
shancx/__init__.py
CHANGED
|
@@ -33,10 +33,9 @@ print_handler = logging.StreamHandler()
|
|
|
33
33
|
print_handler.setFormatter(formatter)
|
|
34
34
|
loggers.addHandler(print_handler)
|
|
35
35
|
|
|
36
|
-
|
|
37
|
-
from pathlib import Path
|
|
36
|
+
from pathlib import Path as PathlibPath
|
|
38
37
|
def crDir(path):
|
|
39
|
-
path_obj =
|
|
38
|
+
path_obj = PathlibPath(path)
|
|
40
39
|
directory = path_obj.parent if path_obj.suffix else path_obj
|
|
41
40
|
directory.mkdir(parents=True, exist_ok=True)
|
|
42
41
|
|
|
@@ -73,21 +72,6 @@ def validate_param_list(param_list):
|
|
|
73
72
|
raise ValueError("Sub-lists in param_list cannot be empty.")
|
|
74
73
|
|
|
75
74
|
from itertools import product
|
|
76
|
-
from concurrent.futures import ProcessPoolExecutor as PoolExecutor
|
|
77
|
-
def Mul_(map_fun,param_list,num=6):
|
|
78
|
-
print(f"Pro num {num}")
|
|
79
|
-
validate_param_list(param_list)
|
|
80
|
-
if len(param_list) == 1:
|
|
81
|
-
product_List = [(x,) for x in param_list[0]]
|
|
82
|
-
else:
|
|
83
|
-
product_List = list(product(*param_list))
|
|
84
|
-
with PoolExecutor(num) as p:
|
|
85
|
-
try:
|
|
86
|
-
P_data = [result for result in tqdm(p.map(map_fun, product_List), total=len(product_List), desc="Processing", unit="task")]
|
|
87
|
-
except KeyboardInterrupt:
|
|
88
|
-
sys.exit(1)
|
|
89
|
-
return list(P_data)
|
|
90
|
-
|
|
91
75
|
from concurrent.futures import ProcessPoolExecutor as PoolExecutor, as_completed
|
|
92
76
|
import sys
|
|
93
77
|
from tqdm import tqdm
|
|
@@ -102,83 +86,12 @@ def Mul_sub(task, param_list, num=6):
|
|
|
102
86
|
with PoolExecutor(max_workers=num) as executor:
|
|
103
87
|
try:
|
|
104
88
|
futures = [executor.submit(task, item) for item in product_list]
|
|
105
|
-
for future in tqdm(as_completed(futures), total=len(futures), desc="Processing tasks", unit="task"):
|
|
89
|
+
for future in tqdm(as_completed(futures), total=len(futures), desc="Processing tasks", unit="task"):
|
|
106
90
|
results.append(future.result())
|
|
107
91
|
except KeyboardInterrupt:
|
|
108
92
|
sys.exit(1)
|
|
109
93
|
return results
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
from concurrent.futures import ProcessPoolExecutor as PoolExecutor, as_completed, TimeoutError
|
|
113
|
-
import sys
|
|
114
|
-
from tqdm import tqdm
|
|
115
|
-
from itertools import product
|
|
116
94
|
|
|
117
|
-
def Mul_subT(task, param_list, num=3, timeout=3600):
|
|
118
|
-
print(f"Process num: {num}")
|
|
119
|
-
validate_param_list(param_list)
|
|
120
|
-
if len(param_list) == 1:
|
|
121
|
-
product_list = [(x,) for x in param_list[0]]
|
|
122
|
-
else:
|
|
123
|
-
product_list = list(product(*param_list))
|
|
124
|
-
results = []
|
|
125
|
-
with PoolExecutor(max_workers=num) as executor:
|
|
126
|
-
try:
|
|
127
|
-
futures = {executor.submit(task, item): item for item in product_list}
|
|
128
|
-
for future in tqdm(as_completed(futures),
|
|
129
|
-
total=len(futures),
|
|
130
|
-
desc="Processing tasks",
|
|
131
|
-
unit="task"):
|
|
132
|
-
try:
|
|
133
|
-
res = future.result(timeout=timeout)
|
|
134
|
-
results.append(res)
|
|
135
|
-
except TimeoutError:
|
|
136
|
-
print(f"\nTimeout on task {futures[future]}")
|
|
137
|
-
future.cancel()
|
|
138
|
-
except Exception as e:
|
|
139
|
-
print(f"\nTask failed: {futures[future]}, error: {str(e)}")
|
|
140
|
-
except KeyboardInterrupt:
|
|
141
|
-
print("\nReceived keyboard interrupt, terminating...")
|
|
142
|
-
executor.shutdown(wait=False)
|
|
143
|
-
sys.exit(1)
|
|
144
|
-
return results
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
from concurrent.futures import ProcessPoolExecutor as PoolExecutor, as_completed, TimeoutError
|
|
148
|
-
import sys
|
|
149
|
-
from tqdm import tqdm
|
|
150
|
-
from itertools import product
|
|
151
|
-
|
|
152
|
-
def Mul_subT_S(task, param_list, num=3, timeout=3600):
|
|
153
|
-
print(f"Process num: {num}")
|
|
154
|
-
validate_param_list(param_list)
|
|
155
|
-
if len(param_list) == 1:
|
|
156
|
-
product_list = [(x,) for x in param_list[0]]
|
|
157
|
-
else:
|
|
158
|
-
product_list = list(product(*param_list))
|
|
159
|
-
results = [None] * len(product_list)
|
|
160
|
-
with PoolExecutor(max_workers=num) as executor:
|
|
161
|
-
futures = {executor.submit(task, item): idx for idx, item in enumerate(product_list)}
|
|
162
|
-
|
|
163
|
-
try:
|
|
164
|
-
for future in tqdm(as_completed(futures),
|
|
165
|
-
total=len(futures),
|
|
166
|
-
desc="Processing tasks",
|
|
167
|
-
unit="task"):
|
|
168
|
-
idx = futures[future]
|
|
169
|
-
try:
|
|
170
|
-
results[idx] = future.result(timeout=timeout)
|
|
171
|
-
except TimeoutError:
|
|
172
|
-
print(f"\nTimeout on task {product_list[idx]}")
|
|
173
|
-
future.cancel()
|
|
174
|
-
except Exception as e:
|
|
175
|
-
print(f"\nTask failed: {product_list[idx]}, error: {str(e)}")
|
|
176
|
-
except KeyboardInterrupt:
|
|
177
|
-
print("\nReceived keyboard interrupt, terminating...")
|
|
178
|
-
executor.shutdown(wait=False)
|
|
179
|
-
sys.exit(1)
|
|
180
|
-
return results
|
|
181
|
-
|
|
182
95
|
def Mul_sub_S(task, param_list, num=6):
|
|
183
96
|
print(f"Pro num {num}")
|
|
184
97
|
validate_param_list(param_list)
|
|
@@ -197,7 +110,6 @@ def Mul_sub_S(task, param_list, num=6):
|
|
|
197
110
|
sys.exit(1)
|
|
198
111
|
return results
|
|
199
112
|
|
|
200
|
-
|
|
201
113
|
from concurrent.futures import ProcessPoolExecutor as PoolExecutor, as_completed
|
|
202
114
|
import sys
|
|
203
115
|
from tqdm import tqdm
|
|
@@ -235,36 +147,6 @@ def Mul_sub_Splus(task, param_lists, num=6):
|
|
|
235
147
|
sys.exit(1)
|
|
236
148
|
return results
|
|
237
149
|
|
|
238
|
-
import multiprocessing
|
|
239
|
-
from multiprocessing import Pool
|
|
240
|
-
from tqdm import tqdm
|
|
241
|
-
def parallel(add_numbers,tasks, num_processes=6):
|
|
242
|
-
if num_processes is None:
|
|
243
|
-
num_processes = 6
|
|
244
|
-
with Pool(processes=num_processes) as pool:
|
|
245
|
-
results = list(tqdm(
|
|
246
|
-
pool.imap(add_numbers, tasks),
|
|
247
|
-
total=len(tasks),
|
|
248
|
-
desc="Processing",
|
|
249
|
-
unit="task"
|
|
250
|
-
))
|
|
251
|
-
return results
|
|
252
|
-
|
|
253
|
-
from concurrent.futures import ThreadPoolExecutor
|
|
254
|
-
def Mul_th(task, task_args, workers=2):
|
|
255
|
-
with ThreadPoolExecutor(workers) as ex:
|
|
256
|
-
try:
|
|
257
|
-
return list(ex.map(task, task_args))
|
|
258
|
-
except KeyboardInterrupt:
|
|
259
|
-
print("\n用户中断操作")
|
|
260
|
-
ex.shutdown(wait=False)
|
|
261
|
-
sys.exit(1)
|
|
262
|
-
|
|
263
|
-
# task_args = [(cr_f1[i],) for i in range(cr_f1.shape[0])]
|
|
264
|
-
#task_args = [[chid,utcyear, utcmonth,latArr,lonArr]
|
|
265
|
-
# for chid in allcchannels]
|
|
266
|
-
# results = Mul_th(func, task_args)
|
|
267
|
-
|
|
268
150
|
from concurrent.futures import ThreadPoolExecutor
|
|
269
151
|
from itertools import product
|
|
270
152
|
def Mul_TH(task, param_list, max_workers=6):
|
|
@@ -285,121 +167,104 @@ def Mul_TH(task, param_list, max_workers=6):
|
|
|
285
167
|
ex.shutdown(wait=False)
|
|
286
168
|
sys.exit(1)
|
|
287
169
|
|
|
288
|
-
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
289
|
-
from itertools import product
|
|
290
|
-
from tqdm import tqdm
|
|
291
|
-
import sys
|
|
292
|
-
|
|
293
|
-
def Mul_sub_th(func, param_list=None,workers=2, show_progress=True):
|
|
294
|
-
print(f"Pro num {workers}")
|
|
295
|
-
validate_param_list(param_list)
|
|
296
|
-
iterable = [
|
|
297
|
-
(arg,) if len(param_list) == 1 else arg
|
|
298
|
-
for arg in (
|
|
299
|
-
param_list[0] if len(param_list) == 1
|
|
300
|
-
else product(*param_list)
|
|
301
|
-
)
|
|
302
|
-
]
|
|
303
|
-
try:
|
|
304
|
-
if show_progress:
|
|
305
|
-
with ThreadPoolExecutor(max_workers=workers) as executor:
|
|
306
|
-
futures = [
|
|
307
|
-
executor.submit(func, *args) if isinstance(args, tuple)
|
|
308
|
-
else executor.submit(func, args)
|
|
309
|
-
for args in iterable
|
|
310
|
-
]
|
|
311
|
-
results = []
|
|
312
|
-
progress = tqdm(as_completed(futures),
|
|
313
|
-
total=len(futures),
|
|
314
|
-
desc=f"Threading(workers={workers})",
|
|
315
|
-
unit="task")
|
|
316
|
-
try:
|
|
317
|
-
for future in progress:
|
|
318
|
-
results.append(future.result())
|
|
319
|
-
return results
|
|
320
|
-
except KeyboardInterrupt:
|
|
321
|
-
progress.close()
|
|
322
|
-
print("\n用户中断操作,正在取消剩余任务...")
|
|
323
|
-
for f in futures:
|
|
324
|
-
f.cancel()
|
|
325
|
-
executor.shutdown(wait=False)
|
|
326
|
-
sys.exit(1)
|
|
327
|
-
with ThreadPoolExecutor(workers) as ex:
|
|
328
|
-
try:
|
|
329
|
-
return list(ex.map(func, iterable))
|
|
330
|
-
except KeyboardInterrupt:
|
|
331
|
-
print("\n用户中断操作")
|
|
332
|
-
ex.shutdown(wait=False)
|
|
333
|
-
sys.exit(1)
|
|
334
|
-
except Exception as e:
|
|
335
|
-
print(f"执行出错: {str(e)}")
|
|
336
|
-
sys.exit(1)
|
|
337
|
-
|
|
338
170
|
from tqdm import tqdm
|
|
339
171
|
def _tqdm(train_loader,desc="Training Progress",epoch=None):
|
|
340
172
|
epoch = epoch +1 if epoch is not None else None
|
|
341
173
|
descdict = {"val":"Val Progress", "train":"Training Progress"}
|
|
342
174
|
return tqdm(train_loader, desc=f"Epoch {epoch} {descdict.get(desc,'DESC')}", ncols=100)
|
|
343
175
|
|
|
176
|
+
import os
|
|
177
|
+
import datetime
|
|
178
|
+
from pathlib import Path
|
|
179
|
+
def lock_file(lock_file_path):
|
|
180
|
+
if os.path.exists(lock_file_path):
|
|
181
|
+
print(f" {lock_file_path} is existded ")
|
|
182
|
+
return False
|
|
183
|
+
try:
|
|
184
|
+
os.makedirs(os.path.dirname(lock_file_path), exist_ok=True)
|
|
185
|
+
with open(lock_file_path, 'w') as f:
|
|
186
|
+
f.write(f"process_id:{os.getpid()},create_time:{datetime.datetime.now()}")
|
|
187
|
+
return True
|
|
188
|
+
except Exception as e:
|
|
189
|
+
print(f"创建锁文件失败 create lock failed : {e}")
|
|
190
|
+
return False
|
|
191
|
+
"""
|
|
192
|
+
lock_file = f"{output_base_path}{sat_code}/lock_files/MSP2_WTX_AIW_QPF_L88_GLOB_{utc_time_str}.lock"
|
|
193
|
+
if os.path.exists(outpath):
|
|
194
|
+
print(f" 目标文件已存在,跳过处理: {outpath}")
|
|
195
|
+
return True
|
|
196
|
+
if os.path.exists(lock_file):
|
|
197
|
+
print(f"锁文件存在,已有程序在处理时次 {utc_time_str},跳过")
|
|
198
|
+
return False
|
|
199
|
+
"""
|
|
200
|
+
import os
|
|
201
|
+
import time
|
|
202
|
+
def is_process_alive(pid):
|
|
203
|
+
try:
|
|
204
|
+
os.kill(pid, 0)
|
|
205
|
+
return True
|
|
206
|
+
except OSError:
|
|
207
|
+
return False
|
|
208
|
+
def check_lock(lock_file):
|
|
209
|
+
if not os.path.exists(lock_file):
|
|
210
|
+
return False
|
|
211
|
+
try:
|
|
212
|
+
with open(lock_file, 'r') as f:
|
|
213
|
+
content = f.read().strip()
|
|
214
|
+
if 'process_id:' in content and 'create_time:' in content:
|
|
215
|
+
pid_str = content.split('process_id:')[1].split(',')[0]
|
|
216
|
+
pid = int(pid_str)
|
|
217
|
+
if not is_process_alive(pid):
|
|
218
|
+
print(f"进程 {pid} 已消亡,清理锁文件")
|
|
219
|
+
os.remove(lock_file)
|
|
220
|
+
return False
|
|
221
|
+
else:
|
|
222
|
+
print(f"进程 {pid} 仍在运行,跳过执行")
|
|
223
|
+
return True
|
|
224
|
+
except Exception as e:
|
|
225
|
+
print(f"锁文件解析错误,清理: {e}")
|
|
226
|
+
os.remove(lock_file)
|
|
227
|
+
return False
|
|
228
|
+
return False
|
|
229
|
+
"""
|
|
230
|
+
if check_lock(lock_file):
|
|
231
|
+
return False
|
|
232
|
+
"""
|
|
344
233
|
import multiprocessing
|
|
345
|
-
|
|
346
|
-
from typing import Optional, Literal
|
|
347
|
-
def set_multiprocessing_method(
|
|
348
|
-
method: Optional[Literal['auto', 'fork', 'spawn', 'forkserver']] = 'auto',
|
|
349
|
-
verbose: bool = True
|
|
350
|
-
) -> str:
|
|
351
|
-
"""
|
|
352
|
-
method:
|
|
353
|
-
- 'auto' : 自动选择当前平台最优方式 (默认)
|
|
354
|
-
- 'fork' : 强制使用 fork (仅Unix)
|
|
355
|
-
- 'spawn' : 强制使用 spawn (所有平台)
|
|
356
|
-
- 'forkserver' : 强制使用 forkserver (仅Unix)
|
|
357
|
-
>>> set_multiprocessing_method('auto') # 自动选择
|
|
358
|
-
>>> set_multiprocessing_method('spawn') # 强制spawn
|
|
359
|
-
"""
|
|
360
|
-
available_methods = multiprocessing.get_all_start_methods()
|
|
361
|
-
current_method = multiprocessing.get_start_method()
|
|
362
|
-
if method == 'auto':
|
|
363
|
-
if sys.platform == 'linux' and 'fork' in available_methods:
|
|
364
|
-
method = 'fork'
|
|
365
|
-
elif 'forkserver' in available_methods:
|
|
366
|
-
method = 'forkserver'
|
|
367
|
-
else:
|
|
368
|
-
method = 'spawn'
|
|
369
|
-
if method not in available_methods:
|
|
370
|
-
raise ValueError(
|
|
371
|
-
f"启动方法 '{method}' 在当前平台不可用。可用方法: {available_methods}"
|
|
372
|
-
)
|
|
373
|
-
if current_method == method:
|
|
374
|
-
if verbose:
|
|
375
|
-
print(f"⚠️ 启动方法已是 '{method}',无需更改")
|
|
376
|
-
return method
|
|
234
|
+
def set_multiprocessing(method="spawn", verbose=True): # 补充缺失的verbose参数定义
|
|
377
235
|
try:
|
|
378
236
|
multiprocessing.set_start_method(method, force=True)
|
|
379
237
|
if verbose:
|
|
380
|
-
print(f"
|
|
381
|
-
if method == 'fork':
|
|
382
|
-
print(" ⚠️ 注意: fork 可能引发线程安全问题 (详见文档说明)")
|
|
238
|
+
print(f"The startup method is: '{method}'")
|
|
383
239
|
return method
|
|
384
|
-
except RuntimeError as e:
|
|
385
|
-
|
|
386
|
-
f"
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
240
|
+
except RuntimeError as e:
|
|
241
|
+
if verbose:
|
|
242
|
+
print(f"Setup failed: {e}")
|
|
243
|
+
return multiprocessing.get_start_method()
|
|
244
|
+
|
|
245
|
+
"""
|
|
246
|
+
current_method = set_multiprocessing_method()
|
|
247
|
+
# 输出:已设置多进程启动方式: 'spawn'
|
|
248
|
+
print("当前启动方式:", current_method) # 输出:当前启动方式:spawn
|
|
249
|
+
"""
|
|
391
250
|
"""
|
|
392
251
|
if __name__ == '__main__':
|
|
393
252
|
set_multiprocessing_method('auto')
|
|
394
253
|
print("当前启动方法:", multiprocessing.get_start_method())
|
|
395
254
|
|
|
396
255
|
"""
|
|
397
|
-
|
|
256
|
+
|
|
398
257
|
"""
|
|
399
258
|
zoom插值
|
|
400
259
|
from scipy.ndimage import zoom
|
|
401
260
|
d = zoom(d_clip, [4201/169,6201/249], order=1)[:-1, :-1]
|
|
402
261
|
"""
|
|
262
|
+
"""
|
|
263
|
+
torch.cuda.empty_cache()
|
|
264
|
+
del
|
|
265
|
+
gc.collect()
|
|
266
|
+
"""
|
|
267
|
+
|
|
403
268
|
'''
|
|
404
269
|
from multiprocessing import Pool
|
|
405
270
|
'''
|
|
@@ -434,24 +299,6 @@ logger.addHandler(print_handler)
|
|
|
434
299
|
###解决方法 pip install torch==2.4.0 torchvision torchaudio三个同时安装 python 3.12 解决cuda启动不了的问题
|
|
435
300
|
Res网络
|
|
436
301
|
'''
|
|
437
|
-
'''
|
|
438
|
-
import concurrent.futures
|
|
439
|
-
from itertools import product
|
|
440
|
-
def task(args):
|
|
441
|
-
args1,args2 = args
|
|
442
|
-
print( f"Task ({args1}, {args2}) , result")
|
|
443
|
-
return (args1,args2,5)
|
|
444
|
-
|
|
445
|
-
def Mul_sub(task, pro):
|
|
446
|
-
product_list = product(*pro)
|
|
447
|
-
with concurrent.futures.ThreadPoolExecutor() as executor:
|
|
448
|
-
futures = [executor.submit(task, item) for item in product_list]
|
|
449
|
-
results = [future.result() for future in concurrent.futures.as_completed(futures)]
|
|
450
|
-
return results
|
|
451
|
-
res = Mul_sub(task, [[1, 23, 4, 5], ["n"]])
|
|
452
|
-
print("res")
|
|
453
|
-
print(res)
|
|
454
|
-
'''
|
|
455
302
|
|
|
456
303
|
"""
|
|
457
304
|
find /mnt/wtx_weather_forecast/scx/SpiderGLOBPNGSource -type f -name "*.png" -mtime +3 -exec rm {} \;
|
shancx/bak.py
CHANGED
|
@@ -109,6 +109,8 @@ if __name__ == '__main__':
|
|
|
109
109
|
else:
|
|
110
110
|
print("cmdList is empty, skipping the call.")
|
|
111
111
|
raise ValueError("cmdList is empty, cannot execute command.")
|
|
112
|
+
|
|
113
|
+
|
|
112
114
|
CUDA_LAUNCH_BLOCKING=1 python makeHis.py --times 202410010048,202410110048 --gpu=0 --isDebug --sepSec 3600 --pool 5
|
|
113
115
|
CUDA_LAUNCH_BLOCKING=1 python makeHis1.py --times 202410010048,202410110048 --gpu=0 --isDebug --sepSec 3600 --pool 5
|
|
114
116
|
"""
|
|
@@ -154,9 +156,11 @@ python makeDOC_newv2.py --times $end_date"0000",$end_date"2359" --tag $tag
|
|
|
154
156
|
"""
|
|
155
157
|
frile name :launch.json
|
|
156
158
|
args:
|
|
159
|
+
|
|
157
160
|
{
|
|
158
161
|
"version": "0.2.0",
|
|
159
162
|
"configurations": [
|
|
163
|
+
|
|
160
164
|
{
|
|
161
165
|
"name": "Python: Current File",
|
|
162
166
|
"type": "debugpy",
|
|
@@ -166,32 +170,35 @@ args:
|
|
|
166
170
|
"cwd": "${fileDirname}",
|
|
167
171
|
"purpose": ["debug-in-terminal"],
|
|
168
172
|
"justMyCode": false,
|
|
169
|
-
"args": [
|
|
170
|
-
|
|
173
|
+
"args": [
|
|
174
|
+
"--times", "202410010042,202410020042",
|
|
175
|
+
"--isDebug" ,
|
|
176
|
+
"--isOverwrite",
|
|
177
|
+
"--sepSec", "3600",
|
|
178
|
+
"--gpu", "0"
|
|
171
179
|
]
|
|
172
180
|
}
|
|
173
181
|
]
|
|
174
182
|
}
|
|
175
183
|
|
|
184
|
+
|
|
176
185
|
{
|
|
177
186
|
"version": "0.2.0",
|
|
178
|
-
"configurations": [
|
|
179
|
-
|
|
187
|
+
"configurations": [
|
|
180
188
|
{
|
|
181
|
-
"name": "
|
|
189
|
+
"name": "VAE: Train SEVIR-LR",
|
|
182
190
|
"type": "debugpy",
|
|
183
191
|
"request": "launch",
|
|
184
|
-
"program": "${
|
|
192
|
+
"program": "${workspaceFolder}/scripts/vae/sevirlr/train.py",
|
|
185
193
|
"console": "integratedTerminal",
|
|
186
|
-
"cwd": "${
|
|
194
|
+
"cwd": "${workspaceFolder}",
|
|
187
195
|
"purpose": ["debug-in-terminal"],
|
|
188
196
|
"justMyCode": false,
|
|
197
|
+
"python": "/home/scx/miniconda3/envs/mqpf/bin/python",
|
|
189
198
|
"args": [
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
"--sepSec", "3600",
|
|
194
|
-
"--gpu", "0"
|
|
199
|
+
"--save", "vae_sevirlr_train",
|
|
200
|
+
"--gpus", "1",
|
|
201
|
+
"--cfg", "${workspaceFolder}/scripts/vae/sevirlr/cfg.yaml"
|
|
195
202
|
]
|
|
196
203
|
}
|
|
197
204
|
]
|
|
@@ -450,8 +457,6 @@ sudo chmod -R 777 /mnt/wtx_weather_forecast/scx/MSG/MSG_Data
|
|
|
450
457
|
"""
|
|
451
458
|
|
|
452
459
|
"""
|
|
453
|
-
|
|
454
|
-
|
|
455
460
|
import os
|
|
456
461
|
import numpy as np
|
|
457
462
|
import pandas as pd
|
|
@@ -617,10 +622,8 @@ def map_fun(conf):
|
|
|
617
622
|
logger.error(f"{UTC} error {e}")
|
|
618
623
|
logger.info(traceback.format_exc())
|
|
619
624
|
print(traceback.format_exc())
|
|
620
|
-
return
|
|
621
|
-
|
|
625
|
+
return
|
|
622
626
|
|
|
623
|
-
|
|
624
627
|
def getCheckArea(self, eps):
|
|
625
628
|
'''
|
|
626
629
|
split area
|
|
@@ -678,46 +681,21 @@ class drawPng():
|
|
|
678
681
|
"""
|
|
679
682
|
"""
|
|
680
683
|
conda install conda-forge::cudatoolkit==11.8.0
|
|
684
|
+
"""
|
|
681
685
|
"""
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
sudo
|
|
685
|
-
|
|
686
|
-
sudo
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
# 重新创建用户(UID=1015,GID=1015)
|
|
690
|
-
sudo useradd -u 1015 -g scx -m -s /bin/bash scx
|
|
691
|
-
# 设置密码
|
|
692
|
-
sudo passwd scx
|
|
693
|
-
# 赋予 sudo 权限
|
|
694
|
-
sudo usermod -aG sudo scx
|
|
695
|
-
2. 如果不想删除用户,可以修改用户的主组
|
|
696
|
-
bash
|
|
697
|
-
# 查看 scx 用户当前的主组
|
|
698
|
-
id scx
|
|
699
|
-
# 修改 scx 用户的主组为其他组(如 users)
|
|
700
|
-
sudo usermod -g users scx
|
|
701
|
-
# 现在可以删除 scx 组
|
|
702
|
-
sudo groupdel scx
|
|
703
|
-
# 重新创建 scx 组(GID=1015)
|
|
686
|
+
sudo pkill -9 -u scx 2>/dev/null || true
|
|
687
|
+
sudo groupdel scx 2>/dev/null; sudo userdel -r scx 2>/dev/null; sudo groupadd -g 1015 scx && sudo useradd -m -u 1015 -g 1015 -s /bin/bash scx && echo "scx:123456" | sudo chpasswd && sudo chown -R scx:scx /home/scx && id scx
|
|
688
|
+
sudo pkill -9 -u scx 2>/dev/null || true
|
|
689
|
+
sudo ps aux | grep scx | awk '{print $2}' | xargs -r sudo kill -9 2>/dev/null || true
|
|
690
|
+
sudo userdel -rf scx 2>/dev/null || true
|
|
691
|
+
sudo groupdel -f scx 2>/dev/null || true
|
|
692
|
+
sleep 2
|
|
704
693
|
sudo groupadd -g 1015 scx
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
验证
|
|
708
|
-
bash
|
|
694
|
+
sudo useradd -m -u 1015 -g 1015 -s /bin/bash scx
|
|
695
|
+
echo "scx:123456" | sudo chpasswd
|
|
709
696
|
id scx
|
|
710
|
-
预期输出:
|
|
711
|
-
text
|
|
712
|
-
uid=1015(scx) gid=1015(scx) groups=1015(scx),27(sudo)
|
|
713
|
-
|
|
714
|
-
groupmod -g 1016 scx
|
|
715
|
-
sudo usermod -u 1015 scx
|
|
716
|
-
# 修改用户的主组(GID)
|
|
717
|
-
sudo usermod -g scx scx
|
|
718
|
-
# 确保家目录权限正确
|
|
719
|
-
sudo chown -R scx:scx /home/scx
|
|
720
697
|
"""
|
|
698
|
+
|
|
721
699
|
"""
|
|
722
700
|
from hjnwtx.mkNCHJN import dataClass, mkNCCommonUni,envelope,timeSeq,mkDir
|
|
723
701
|
env = envelope(35,10,108,125)
|
|
@@ -740,4 +718,51 @@ find /mnt/wtx_weather_forecast/scx/GOES -type f -name "*.txt" -mmin +300 -delet
|
|
|
740
718
|
"""
|
|
741
719
|
"""
|
|
742
720
|
mask = (mask_data == 0).to(device)
|
|
721
|
+
"""
|
|
722
|
+
|
|
723
|
+
"""
|
|
724
|
+
import pdb
|
|
725
|
+
pdb.set_trace()
|
|
726
|
+
l 10 查看最近10行
|
|
727
|
+
(Pdb) !a = 5 # 在当前作用域创建变量 a
|
|
728
|
+
(Pdb) p a
|
|
729
|
+
for i in range(5): print(i)
|
|
730
|
+
n 执行下一行
|
|
731
|
+
c 继续执行
|
|
732
|
+
q 退出
|
|
733
|
+
(Pdb) n # Next line
|
|
734
|
+
(Pdb) s # Step into function
|
|
735
|
+
(Pdb) c # Continue execution
|
|
736
|
+
(Pdb) b <line> # Set breakpoint
|
|
737
|
+
(Pdb) q # Quit debugger
|
|
738
|
+
(Pdb) !import os; os.listdir('.')
|
|
739
|
+
(Pdb) p locals() # Show local variables
|
|
740
|
+
(Pdb) p globals() # Show global variables
|
|
741
|
+
(Pdb) where # Show stack trace
|
|
742
|
+
(Pdb) list # Show current code context
|
|
743
|
+
"""
|
|
744
|
+
"""
|
|
745
|
+
np.savez_compressed(output_path.replace('.npy', '.npz'), data=data)
|
|
746
|
+
data = np.load(output_path.replace('.npy', '.npz'))['data']
|
|
747
|
+
with np.load(output_path.replace('.npy', '.npz')) as npz_file:
|
|
748
|
+
data = npz_file['data']
|
|
749
|
+
with np.load(output_path.replace('.npy', '.npz')) as npz_file:
|
|
750
|
+
data = npz_file[npz_file.files[0]]
|
|
751
|
+
"""
|
|
752
|
+
"""
|
|
753
|
+
lats = np.linspace(15, 60, h)
|
|
754
|
+
lons = np.linspace(70, 140, w)
|
|
755
|
+
|
|
756
|
+
# import cv2
|
|
757
|
+
# [ cv2.resize(i, (6200, 4200),interpolation=cv2.INTER_LINEAR) for i in dP.df_Mat ]
|
|
758
|
+
B08_fixed = B08.astype(np.float32)
|
|
759
|
+
print(f"转换后dtype: {B08_fixed.dtype}") 高位字节前后问题, cv2希望高阶字字节在后
|
|
760
|
+
print(f"转换后范围: {B08_fixed.min()} ~ {B08_fixed.max()}")
|
|
761
|
+
d_test = cv2.resize(B08_fixed, (100, 100), interpolation=cv2.INTER_CUBIC) #双三次插值
|
|
762
|
+
d_cv2 = cv2.resize(B08, (6200, 4200), interpolation=cv2.INTER_LINEAR) #双线性插值
|
|
763
|
+
print(f"测试插值范围: {d_test.min()} ~ {d_test.max()}")
|
|
764
|
+
"""
|
|
765
|
+
"""
|
|
766
|
+
import torch
|
|
767
|
+
torch.cuda.empty_cache()
|
|
743
768
|
"""
|
shancx/geosProj.py
CHANGED
|
@@ -17,7 +17,7 @@ class goesProj():
|
|
|
17
17
|
self.CFAC=FAC[resolution]
|
|
18
18
|
self.LFAC=FAC[resolution]
|
|
19
19
|
|
|
20
|
-
def transform(self,latD,lonDe,RO=
|
|
20
|
+
def transform(self,latD,lonDe,RO=22):
|
|
21
21
|
lat=np.radians(latD)
|
|
22
22
|
lon=np.radians(lonDe)
|
|
23
23
|
ba2=np.square(self.eb/self.ea)
|
|
@@ -58,7 +58,7 @@ class goesProjMSG10():
|
|
|
58
58
|
self.CFAC=FAC[resolution]
|
|
59
59
|
self.LFAC=FAC[resolution]
|
|
60
60
|
|
|
61
|
-
def transform(self,latD,lonDe,ROC
|
|
61
|
+
def transform(self,latD,lonDe,ROC=-10,ROL=-30):
|
|
62
62
|
lat=np.radians(latD)
|
|
63
63
|
lon=np.radians(lonDe)
|
|
64
64
|
ba2=np.square(self.eb/self.ea)
|
shancx/wait.py
CHANGED
|
@@ -157,6 +157,39 @@ def checkSize(pattern: str,size_mb: float = 50.0,timeout: int = 180,interval: in
|
|
|
157
157
|
checkSize(pattern: str,size_mb: float = 50.0,timeout: int = 180,interval: int = 5)
|
|
158
158
|
"""
|
|
159
159
|
|
|
160
|
+
import os
|
|
161
|
+
import time
|
|
162
|
+
def is_process_alive(pid):
|
|
163
|
+
try:
|
|
164
|
+
os.kill(pid, 0)
|
|
165
|
+
return True
|
|
166
|
+
except OSError:
|
|
167
|
+
return False
|
|
168
|
+
def check_lock(lock_file):
|
|
169
|
+
if not os.path.exists(lock_file):
|
|
170
|
+
return False
|
|
171
|
+
try:
|
|
172
|
+
with open(lock_file, 'r') as f:
|
|
173
|
+
content = f.read().strip()
|
|
174
|
+
if 'process_id:' in content and 'create_time:' in content:
|
|
175
|
+
pid_str = content.split('process_id:')[1].split(',')[0]
|
|
176
|
+
pid = int(pid_str)
|
|
177
|
+
if not is_process_alive(pid):
|
|
178
|
+
print(f"进程 {pid} 已消亡,清理锁文件")
|
|
179
|
+
os.remove(lock_file)
|
|
180
|
+
return False
|
|
181
|
+
else:
|
|
182
|
+
print(f"进程 {pid} 仍在运行,跳过执行")
|
|
183
|
+
return True
|
|
184
|
+
except Exception as e:
|
|
185
|
+
print(f"锁文件解析错误,清理: {e}")
|
|
186
|
+
os.remove(lock_file)
|
|
187
|
+
return False
|
|
188
|
+
return False
|
|
189
|
+
"""
|
|
190
|
+
if check_lock(lock_file):
|
|
191
|
+
return False
|
|
192
|
+
"""
|
|
160
193
|
|
|
161
194
|
import numpy as np
|
|
162
195
|
from typing import Union
|
|
@@ -209,4 +242,5 @@ def safe_delete(path_pattern):
|
|
|
209
242
|
"""
|
|
210
243
|
if os.path.exists(zip_file):
|
|
211
244
|
safe_delete(zip_file)
|
|
212
|
-
"""
|
|
245
|
+
"""
|
|
246
|
+
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: shancx
|
|
3
|
-
Version: 1.9.33.
|
|
3
|
+
Version: 1.9.33.218
|
|
4
4
|
Summary: A simple timer decorator
|
|
5
5
|
Home-page: https://gitee.com/shancx
|
|
6
6
|
Author: shancx
|
|
@@ -13,13 +13,21 @@ Description-Content-Type: text/markdown
|
|
|
13
13
|
Requires-Dist: tqdm
|
|
14
14
|
Requires-Dist: pandas
|
|
15
15
|
Requires-Dist: matplotlib
|
|
16
|
+
Dynamic: author
|
|
17
|
+
Dynamic: author-email
|
|
18
|
+
Dynamic: classifier
|
|
19
|
+
Dynamic: description
|
|
20
|
+
Dynamic: description-content-type
|
|
21
|
+
Dynamic: home-page
|
|
22
|
+
Dynamic: requires-dist
|
|
23
|
+
Dynamic: requires-python
|
|
24
|
+
Dynamic: summary
|
|
16
25
|
|
|
17
|
-
#
|
|
26
|
+
# Welecome to shancx
|
|
18
27
|
|
|
19
28
|
A simple Python package that provides a timer decorator to measure the execution time of functions.
|
|
20
29
|
|
|
21
30
|
## Installation
|
|
22
|
-
|
|
23
31
|
|
|
24
32
|
pip install shancx
|
|
25
33
|
|