oafuncs 0.0.66__py2.py3-none-any.whl → 0.0.68__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oafuncs/oa_down/hycom_3hourly.py +246 -336
- oafuncs/oa_down/refs_pdf.py +6 -1
- {oafuncs-0.0.66.dist-info → oafuncs-0.0.68.dist-info}/METADATA +1 -1
- {oafuncs-0.0.66.dist-info → oafuncs-0.0.68.dist-info}/RECORD +7 -8
- oafuncs/oa_down/hycom_3hourly_wholeday.py +0 -1173
- {oafuncs-0.0.66.dist-info → oafuncs-0.0.68.dist-info}/LICENSE.txt +0 -0
- {oafuncs-0.0.66.dist-info → oafuncs-0.0.68.dist-info}/WHEEL +0 -0
- {oafuncs-0.0.66.dist-info → oafuncs-0.0.68.dist-info}/top_level.txt +0 -0
oafuncs/oa_down/hycom_3hourly.py
CHANGED
@@ -4,13 +4,13 @@
|
|
4
4
|
Author: Liu Kun && 16031215@qq.com
|
5
5
|
Date: 2024-11-01 10:31:09
|
6
6
|
LastEditors: Liu Kun && 16031215@qq.com
|
7
|
-
LastEditTime: 2024-11-30
|
7
|
+
LastEditTime: 2024-11-30 20:24:34
|
8
8
|
FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_down\\hycom_3hourly.py
|
9
9
|
Description:
|
10
10
|
EditPlatform: vscode
|
11
11
|
ComputerInfo: XPS 15 9510
|
12
12
|
SystemInfo: Windows 11
|
13
|
-
Python Version: 3.
|
13
|
+
Python Version: 3.12
|
14
14
|
'''
|
15
15
|
import datetime
|
16
16
|
import os
|
@@ -29,7 +29,7 @@ from rich.progress import Progress
|
|
29
29
|
|
30
30
|
warnings.filterwarnings("ignore", category=RuntimeWarning, message="Engine '.*' loading failed:.*")
|
31
31
|
|
32
|
-
__all__ = ['draw_time_range', 'download', 'how_to_use', 'get_time_list'
|
32
|
+
__all__ = ['draw_time_range', 'download', 'how_to_use', 'get_time_list']
|
33
33
|
|
34
34
|
# time resolution
|
35
35
|
data_info = {'yearly': {}, 'monthly': {}, 'daily': {}, 'hourly': {}}
|
@@ -229,106 +229,46 @@ def draw_time_range(pic_save_folder=None):
|
|
229
229
|
plt.close()
|
230
230
|
|
231
231
|
|
232
|
-
def
|
233
|
-
# old_time = '2023080203'
|
234
|
-
# time_new = '2023-08-02T03%3A00%3A00Z'
|
235
|
-
time_new = f'{time_str[:4]}-{time_str[4:6]}-{time_str[6:8]}T{time_str[8:10]}%3A00%3A00Z'
|
236
|
-
return time_new
|
237
|
-
|
238
|
-
|
239
|
-
def get_time_list(time_s, time_e, delta_hour):
|
240
|
-
'''
|
241
|
-
Description: get a list of time strings from time_s to time_e with delta_hour
|
242
|
-
Args:
|
243
|
-
time_s: start time string, e.g. '2023080203'
|
244
|
-
time_e: end time string, e.g. '2023080303'
|
245
|
-
delta_hour: interval of hours
|
246
|
-
Returns:
|
247
|
-
dt_list: a list of time strings
|
248
|
-
'''
|
249
|
-
dt = datetime.datetime.strptime(time_s, '%Y%m%d%H')
|
250
|
-
dt_list = []
|
251
|
-
while dt.strftime('%Y%m%d%H') <= time_e:
|
252
|
-
dt_list.append(dt.strftime('%Y%m%d%H'))
|
253
|
-
dt = dt + datetime.timedelta(hours=delta_hour)
|
254
|
-
return dt_list
|
255
|
-
|
256
|
-
|
257
|
-
def get_hour_list(time_s, time_e, delta_hour):
|
232
|
+
def get_time_list(time_s, time_e, delta, interval_type='hour'):
|
258
233
|
'''
|
259
|
-
Description: get a list of time strings from time_s to time_e with
|
234
|
+
Description: get a list of time strings from time_s to time_e with a specified interval
|
260
235
|
Args:
|
261
|
-
time_s: start time string, e.g. '2023080203'
|
262
|
-
time_e: end time string, e.g. '2023080303'
|
263
|
-
|
236
|
+
time_s: start time string, e.g. '2023080203' for hours or '20230802' for days
|
237
|
+
time_e: end time string, e.g. '2023080303' for hours or '20230803' for days
|
238
|
+
delta: interval of hours or days
|
239
|
+
interval_type: 'hour' for hour interval, 'day' for day interval
|
264
240
|
Returns:
|
265
241
|
dt_list: a list of time strings
|
266
242
|
'''
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
243
|
+
time_s, time_e = str(time_s), str(time_e)
|
244
|
+
if interval_type == 'hour':
|
245
|
+
time_format = '%Y%m%d%H'
|
246
|
+
delta_type = 'hours'
|
247
|
+
elif interval_type == 'day':
|
248
|
+
time_format = '%Y%m%d'
|
249
|
+
delta_type = 'days'
|
250
|
+
# Ensure time strings are in the correct format for days
|
251
|
+
time_s = time_s[:8]
|
252
|
+
time_e = time_e[:8]
|
253
|
+
else:
|
254
|
+
raise ValueError("interval_type must be 'hour' or 'day'")
|
274
255
|
|
275
|
-
|
276
|
-
'''
|
277
|
-
Description: get a list of time strings from time_s to time_e with delta_hour
|
278
|
-
Args:
|
279
|
-
time_s: start time string, e.g. '20230802'
|
280
|
-
time_e: end time string, e.g. '20230803'
|
281
|
-
delta_hour: interval of hours
|
282
|
-
Returns:
|
283
|
-
dt_list: a list of time strings
|
284
|
-
'''
|
285
|
-
time_s = time_s[:8]
|
286
|
-
time_e = time_e[:8]
|
287
|
-
dt = datetime.datetime.strptime(time_s, '%Y%m%d')
|
256
|
+
dt = datetime.datetime.strptime(time_s, time_format)
|
288
257
|
dt_list = []
|
289
|
-
while dt.strftime(
|
290
|
-
dt_list.append(dt.strftime(
|
291
|
-
dt
|
258
|
+
while dt.strftime(time_format) <= time_e:
|
259
|
+
dt_list.append(dt.strftime(time_format))
|
260
|
+
dt += datetime.timedelta(**{delta_type: delta})
|
292
261
|
return dt_list
|
293
262
|
|
294
263
|
|
295
|
-
def
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
def set_query_dict_no_vertical(var, lon_min, lon_max, lat_min, lat_max, time_str_ymdh):
|
301
|
-
query_dict = {
|
302
|
-
'var': variable_info[var]['var_name'],
|
303
|
-
'north': lat_max,
|
304
|
-
'west': lon_min,
|
305
|
-
'east': lon_max,
|
306
|
-
'south': lat_min,
|
307
|
-
'horizStride': 1,
|
308
|
-
'time': transform_time(time_str_ymdh),
|
309
|
-
'addLatLon': 'true',
|
310
|
-
'accept': 'netcdf4',
|
311
|
-
}
|
312
|
-
return query_dict
|
313
|
-
|
314
|
-
|
315
|
-
def set_query_dict_depth_or_level(var, lon_min, lon_max, lat_min, lat_max, time_str_ymdh):
|
316
|
-
query_dict = {
|
317
|
-
'var': variable_info[var]['var_name'],
|
318
|
-
'north': lat_max,
|
319
|
-
'west': lon_min,
|
320
|
-
'east': lon_max,
|
321
|
-
'south': lat_min,
|
322
|
-
'horizStride': 1,
|
323
|
-
'time': transform_time(time_str_ymdh),
|
324
|
-
'vertCoord': 0,
|
325
|
-
'addLatLon': 'true',
|
326
|
-
'accept': 'netcdf4',
|
327
|
-
}
|
328
|
-
return query_dict
|
264
|
+
def transform_time(time_str):
|
265
|
+
# old_time = '2023080203'
|
266
|
+
# time_new = '2023-08-02T03%3A00%3A00Z'
|
267
|
+
time_new = f'{time_str[:4]}-{time_str[4:6]}-{time_str[6:8]}T{time_str[8:10]}%3A00%3A00Z'
|
268
|
+
return time_new
|
329
269
|
|
330
270
|
|
331
|
-
def
|
271
|
+
def get_query_dict(var, lon_min, lon_max, lat_min, lat_max, time_str_ymdh, mode='single_depth', depth=None, level_num=None):
|
332
272
|
query_dict = {
|
333
273
|
'var': variable_info[var]['var_name'],
|
334
274
|
'north': lat_max,
|
@@ -337,64 +277,62 @@ def set_query_dict_full(var, lon_min, lon_max, lat_min, lat_max, time_str_ymdh):
|
|
337
277
|
'south': lat_min,
|
338
278
|
'horizStride': 1,
|
339
279
|
'time': transform_time(time_str_ymdh),
|
340
|
-
'
|
280
|
+
'vertCoord': None,
|
281
|
+
'vertStride': None,
|
341
282
|
'addLatLon': 'true',
|
342
283
|
'accept': 'netcdf4',
|
343
284
|
}
|
344
|
-
return query_dict
|
345
|
-
|
346
285
|
|
347
|
-
def
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
286
|
+
def get_nearest_level_index(depth):
|
287
|
+
level_depth = [0.0, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 15.0, 20.0, 25.0, 30.0, 35.0, 40.0, 45.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0, 125.0, 150.0, 200.0, 250.0, 300.0, 350.0, 400.0, 500.0, 600.0, 700.0, 800.0, 900.0, 1000.0, 1250.0, 1500.0, 2000.0, 2500.0, 3000.0, 4000.0, 5000]
|
288
|
+
return min(range(len(level_depth)), key=lambda i: abs(level_depth[i]-depth))
|
289
|
+
|
290
|
+
if var not in ['ssh', 'u_b', 'v_b', 'temp_b', 'salt_b'] and var in ['u', 'v', 'temp', 'salt']:
|
291
|
+
if mode == 'depth':
|
292
|
+
if depth < 0 or depth > 5000:
|
293
|
+
print('Please ensure the depth is in the range of 0-5000 m')
|
294
|
+
query_dict['vertCoord'] = get_nearest_level_index(depth) + 1
|
295
|
+
elif mode == 'level':
|
296
|
+
if level_num < 1 or level_num > 40:
|
297
|
+
print('Please ensure the level_num is in the range of 1-40')
|
298
|
+
query_dict['vertCoord'] = max(1, min(level_num, 40))
|
299
|
+
elif mode == 'full':
|
300
|
+
query_dict['vertStride'] = 1
|
301
|
+
else:
|
302
|
+
raise ValueError("Invalid mode. Choose from 'depth', 'level', or 'full'")
|
356
303
|
|
304
|
+
query_dict = {k: v for k, v in query_dict.items() if v is not None}
|
357
305
|
|
358
|
-
def get_query_dict_single_level(var, lon_min, lon_max, lat_min, lat_max, level_num, time_str_ymdh):
|
359
|
-
if var in ['ssh']:
|
360
|
-
query_dict = set_query_dict_no_vertical(var, lon_min, lon_max, lat_min, lat_max, time_str_ymdh)
|
361
|
-
else:
|
362
|
-
# level_num: 1-40
|
363
|
-
query_dict = set_query_dict_depth_or_level(var, lon_min, lon_max, lat_min, lat_max, time_str_ymdh)
|
364
|
-
if var in ['u', 'v', 'temp', 'salt']:
|
365
|
-
print('Please ensure the level_num is in the range of 1-40')
|
366
|
-
if level_num == 0:
|
367
|
-
level_num = 1
|
368
|
-
print('The level_num is set to 1')
|
369
|
-
if level_num > 40:
|
370
|
-
level_num = 40
|
371
|
-
print('The level_num is set to 40')
|
372
|
-
query_dict['vertCoord'] = level_num
|
373
306
|
return query_dict
|
374
307
|
|
375
308
|
|
376
|
-
def
|
377
|
-
|
378
|
-
|
379
|
-
else:
|
380
|
-
query_dict = set_query_dict_full(var, lon_min, lon_max, lat_min, lat_max, time_str_ymdh)
|
381
|
-
return query_dict
|
309
|
+
def check_time_in_dataset_and_version(time_input, time_end=None):
|
310
|
+
# 判断是处理单个时间点还是时间范围
|
311
|
+
is_single_time = time_end is None
|
382
312
|
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
313
|
+
# 如果是单个时间点,初始化时间范围
|
314
|
+
if is_single_time:
|
315
|
+
time_start = int(time_input)
|
316
|
+
time_end = time_start
|
317
|
+
time_input_str = str(time_input)
|
387
318
|
else:
|
388
|
-
|
389
|
-
|
319
|
+
time_start = int(time_input)
|
320
|
+
time_end = int(time_end)
|
321
|
+
time_input_str = f'{time_input}-{time_end}'
|
390
322
|
|
323
|
+
# 根据时间长度补全时间格式
|
324
|
+
if len(str(time_start)) == 8:
|
325
|
+
time_start = str(time_start) + '00'
|
326
|
+
if len(str(time_end)) == 8:
|
327
|
+
time_end = str(time_end) + '21'
|
328
|
+
time_start, time_end = int(time_start), int(time_end)
|
391
329
|
|
392
|
-
def ymdh_in_which_dataset_and_version(time_ymdh):
|
393
|
-
time_ymdh = int(time_ymdh)
|
394
330
|
d_list = []
|
395
331
|
v_list = []
|
396
332
|
trange_list = []
|
397
333
|
have_data = False
|
334
|
+
|
335
|
+
# 遍历数据集和版本
|
398
336
|
for dataset_name in data_info['hourly']['dataset'].keys():
|
399
337
|
for version_name in data_info['hourly']['dataset'][dataset_name]['version'].keys():
|
400
338
|
time_s, time_e = list(data_info['hourly']['dataset'][dataset_name]['version'][version_name]['time_range'].values())
|
@@ -403,70 +341,84 @@ def ymdh_in_which_dataset_and_version(time_ymdh):
|
|
403
341
|
time_s = time_s + '00'
|
404
342
|
if len(time_e) == 8:
|
405
343
|
time_e = time_e + '21'
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
-
|
344
|
+
# 检查时间是否在数据集的时间范围内
|
345
|
+
if is_single_time:
|
346
|
+
if time_start >= int(time_s) and time_start <= int(time_e):
|
347
|
+
d_list.append(dataset_name)
|
348
|
+
v_list.append(version_name)
|
349
|
+
trange_list.append(f'{time_s}-{time_e}')
|
350
|
+
have_data = True
|
351
|
+
else:
|
352
|
+
if time_start >= int(time_s) and time_end <= int(time_e):
|
353
|
+
d_list.append(dataset_name)
|
354
|
+
v_list.append(version_name)
|
355
|
+
trange_list.append(f'{time_s}-{time_e}')
|
356
|
+
have_data = True
|
357
|
+
|
358
|
+
# 输出结果
|
359
|
+
print(f'[bold red]{time_input_str} is in the following dataset and version:')
|
412
360
|
if have_data:
|
413
361
|
for d, v, trange in zip(d_list, v_list, trange_list):
|
414
362
|
print(f'[bold blue]{d} {v} {trange}')
|
415
363
|
return True
|
416
364
|
else:
|
417
|
-
|
418
|
-
print(f'[bold red]{time_ymdh} is not in any dataset and version')
|
365
|
+
print(f'[bold red]{time_input_str} is not in any dataset and version')
|
419
366
|
return False
|
420
367
|
|
421
368
|
|
422
|
-
def
|
423
|
-
|
424
|
-
|
425
|
-
|
426
|
-
|
427
|
-
|
428
|
-
|
429
|
-
|
430
|
-
|
431
|
-
|
432
|
-
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
|
438
|
-
|
439
|
-
|
440
|
-
|
441
|
-
|
442
|
-
|
443
|
-
|
444
|
-
|
369
|
+
def ensure_time_in_specific_dataset_and_version(dataset_name, version_name, time_input, time_end=None):
|
370
|
+
# 根据时间长度补全时间格式
|
371
|
+
if len(str(time_input)) == 8:
|
372
|
+
time_input = str(time_input) + '00'
|
373
|
+
time_start = int(time_input)
|
374
|
+
if time_end is not None:
|
375
|
+
if len(str(time_end)) == 8:
|
376
|
+
time_end = str(time_end) + '21'
|
377
|
+
time_end = int(time_end)
|
378
|
+
else:
|
379
|
+
time_end = time_start
|
380
|
+
|
381
|
+
# 检查指定的数据集和版本是否存在
|
382
|
+
if dataset_name not in data_info['hourly']['dataset']:
|
383
|
+
print(f'[bold red]Dataset {dataset_name} not found.')
|
384
|
+
return False
|
385
|
+
if version_name not in data_info['hourly']['dataset'][dataset_name]['version']:
|
386
|
+
print(f'[bold red]Version {version_name} not found in dataset {dataset_name}.')
|
387
|
+
return False
|
388
|
+
|
389
|
+
# 获取指定数据集和版本的时间范围
|
390
|
+
time_range = data_info['hourly']['dataset'][dataset_name]['version'][version_name]['time_range']
|
391
|
+
time_s, time_e = list(time_range.values())
|
392
|
+
time_s, time_e = str(time_s), str(time_e)
|
393
|
+
if len(time_s) == 8:
|
394
|
+
time_s = time_s + '00'
|
395
|
+
if len(time_e) == 8:
|
396
|
+
time_e = time_e + '21'
|
397
|
+
time_s, time_e = int(time_s), int(time_e)
|
398
|
+
|
399
|
+
# 检查时间是否在指定数据集和版本的时间范围内
|
400
|
+
if time_start >= time_s and time_end <= time_e:
|
401
|
+
print(f'[bold blue]Time {time_input} to {time_end} is within dataset {dataset_name} and version {version_name}.')
|
445
402
|
return True
|
446
403
|
else:
|
447
|
-
|
448
|
-
print(f'[bold red]{time_ymd} is not in any dataset and version')
|
404
|
+
print(f'[bold red]Time {time_input} to {time_end} is not within dataset {dataset_name} and version {version_name}.')
|
449
405
|
return False
|
450
406
|
|
451
407
|
|
452
|
-
def direct_choose_dataset_and_version(
|
453
|
-
|
454
|
-
|
455
|
-
|
456
|
-
|
457
|
-
|
458
|
-
if len(time_s) == 8:
|
459
|
-
time_s = time_s + '00'
|
460
|
-
if len(time_e) == 8:
|
461
|
-
time_e = time_e + '21'
|
462
|
-
if time_ymdh >= int(time_s) and time_ymdh <= int(time_e):
|
463
|
-
print(f'[bold purple]dataset: {dataset_name}, version: {version_name} is chosen')
|
464
|
-
return dataset_name, version_name
|
465
|
-
return None, None
|
408
|
+
def direct_choose_dataset_and_version(time_input, time_end=None):
|
409
|
+
# 假设 data_info 是一个字典,包含了数据集和版本的信息
|
410
|
+
# 示例结构:data_info['hourly']['dataset'][dataset_name]['version'][version_name]['time_range']
|
411
|
+
|
412
|
+
if len(str(time_input)) == 8:
|
413
|
+
time_input = str(time_input) + '00'
|
466
414
|
|
415
|
+
# 如果 time_end 是 None,则将 time_input 的值赋给它
|
416
|
+
if time_end is None:
|
417
|
+
time_end = time_input
|
418
|
+
|
419
|
+
# 处理开始和结束时间,确保它们是完整的 ymdh 格式
|
420
|
+
time_start, time_end = int(str(time_input)[:10]), int(str(time_end)[:10])
|
467
421
|
|
468
|
-
def direct_choose_dataset_and_version_whole_day(time_ymd):
|
469
|
-
time_ymd = int(str(time_ymd)[:8])
|
470
422
|
for dataset_name in data_info['hourly']['dataset'].keys():
|
471
423
|
for version_name in data_info['hourly']['dataset'][dataset_name]['version'].keys():
|
472
424
|
[time_s, time_e] = list(data_info['hourly']['dataset'][dataset_name]['version'][version_name]['time_range'].values())
|
@@ -475,10 +427,16 @@ def direct_choose_dataset_and_version_whole_day(time_ymd):
|
|
475
427
|
time_s = time_s + '00'
|
476
428
|
if len(time_e) == 8:
|
477
429
|
time_e = time_e + '21'
|
478
|
-
|
430
|
+
time_s, time_e = int(time_s), int(time_e)
|
431
|
+
|
432
|
+
# 检查时间是否在数据集版本的时间范围内
|
433
|
+
if time_start >= time_s and time_end <= time_e:
|
479
434
|
print(f'[bold purple]dataset: {dataset_name}, version: {version_name} is chosen')
|
480
435
|
return dataset_name, version_name
|
481
436
|
|
437
|
+
# 如果没有找到匹配的数据集和版本,返回 None
|
438
|
+
return None, None
|
439
|
+
|
482
440
|
|
483
441
|
def get_base_url(dataset_name, version_name, var, year_str):
|
484
442
|
url_dict = data_info['hourly']['dataset'][dataset_name]['version'][version_name]['url']
|
@@ -525,7 +483,7 @@ def check_existing_file(file_full_path):
|
|
525
483
|
print(f'[bold #FFA54F]{file_full_path} exists')
|
526
484
|
return True
|
527
485
|
else:
|
528
|
-
print(f'{file_full_path} does not exist')
|
486
|
+
# print(f'{file_full_path} does not exist')
|
529
487
|
return False
|
530
488
|
|
531
489
|
|
@@ -543,8 +501,8 @@ def get_ua():
|
|
543
501
|
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv,2.0.1) Gecko/20100101 Firefox/4.0.1",
|
544
502
|
"Mozilla/5.0 (Windows NT 6.1; rv,2.0.1) Gecko/20100101 Firefox/4.0.1",
|
545
503
|
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2",
|
546
|
-
"
|
547
|
-
"
|
504
|
+
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.122 Safari/537.36",
|
505
|
+
"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
|
548
506
|
"Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
|
549
507
|
"Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
|
550
508
|
"Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
|
@@ -594,10 +552,10 @@ def get_ua():
|
|
594
552
|
"NOKIA5700/UCWEB7.0.2.37/28/999",
|
595
553
|
"Openwave/UCWEB7.0.2.37/28/999",
|
596
554
|
"Openwave/UCWEB7.0.2.37/28/999",
|
555
|
+
|
597
556
|
]
|
598
|
-
ua_index = random.randint(0, len(ua_list)-1)
|
599
|
-
ua = ua_list[ua_index]
|
600
557
|
# print(f'Using User-Agent: {ua}')
|
558
|
+
ua = random.choice(ua_list)
|
601
559
|
return ua
|
602
560
|
|
603
561
|
|
@@ -621,7 +579,7 @@ def get_proxy():
|
|
621
579
|
|
622
580
|
|
623
581
|
def dlownload_file(target_url, store_path, file_name, check=False):
|
624
|
-
print(f'[bold
|
582
|
+
print(f'[bold #96cbd7]Downloading {file_name}...')
|
625
583
|
# 创建会话
|
626
584
|
s = requests.Session()
|
627
585
|
download_success = False
|
@@ -632,14 +590,19 @@ def dlownload_file(target_url, store_path, file_name, check=False):
|
|
632
590
|
if check_existing_file(filename):
|
633
591
|
return
|
634
592
|
clear_existing_file(filename)
|
635
|
-
print(f'Download_start_time: {datetime.datetime.now()}')
|
593
|
+
# print(f'Download_start_time: {datetime.datetime.now()}')
|
594
|
+
download_time_s = datetime.datetime.now()
|
636
595
|
while not download_success:
|
596
|
+
if request_times >= 10:
|
597
|
+
# print(f'下载失败,已重试 {request_times} 次\n可先跳过,后续再试')
|
598
|
+
print(f'[bold #ffe5c0]Download failed after {request_times} times\nYou can skip it and try again later')
|
599
|
+
break
|
637
600
|
if request_times > 0:
|
638
|
-
print(f'\r正在重试第 {request_times} 次', end="")
|
601
|
+
# print(f'\r正在重试第 {request_times} 次', end="")
|
602
|
+
print(f'[bold #ffe5c0]Retrying the {request_times} time...')
|
639
603
|
# 尝试下载文件
|
640
604
|
try:
|
641
|
-
headers = {
|
642
|
-
'User-Agent': get_ua()}
|
605
|
+
headers = {'User-Agent': get_ua()}
|
643
606
|
response = s.get(target_url, headers=headers, timeout=5)
|
644
607
|
response.raise_for_status() # 如果请求返回的不是200,将抛出HTTPError异常
|
645
608
|
|
@@ -647,10 +610,12 @@ def dlownload_file(target_url, store_path, file_name, check=False):
|
|
647
610
|
with open(filename, 'wb') as f:
|
648
611
|
f.write(response.content)
|
649
612
|
# print(f'\r文件 {filename} 下载成功', end="")
|
650
|
-
# query_ncfile_time(filename) # 这个函数在linux上目前会出问题
|
651
613
|
if os.path.exists(filename):
|
652
614
|
download_success = True
|
653
|
-
|
615
|
+
download_time_e = datetime.datetime.now()
|
616
|
+
download_delta = download_time_e - download_time_s
|
617
|
+
print(f'[#65b168]File [bold #dfff73]{filename} [#65b168]has been downloaded successfully, Time: [#39cbdd]{download_delta}')
|
618
|
+
# print(f'Download_end_time: {datetime.datetime.now()}')
|
654
619
|
|
655
620
|
except requests.exceptions.HTTPError as errh:
|
656
621
|
print(f"Http Error: {errh}")
|
@@ -663,7 +628,6 @@ def dlownload_file(target_url, store_path, file_name, check=False):
|
|
663
628
|
|
664
629
|
time.sleep(3)
|
665
630
|
request_times += 1
|
666
|
-
print(f'Download_end_time: {datetime.datetime.now()}')
|
667
631
|
|
668
632
|
|
669
633
|
def check_hour_is_valid(ymdh_str):
|
@@ -675,53 +639,65 @@ def check_hour_is_valid(ymdh_str):
|
|
675
639
|
return False
|
676
640
|
|
677
641
|
|
678
|
-
def
|
679
|
-
|
680
|
-
|
642
|
+
def check_dataset_version(dataset_name, version_name, download_time, download_time_end=None):
|
643
|
+
if dataset_name is not None and version_name is not None:
|
644
|
+
just_ensure = ensure_time_in_specific_dataset_and_version(dataset_name, version_name, download_time, download_time_end)
|
645
|
+
if just_ensure:
|
646
|
+
return dataset_name, version_name
|
647
|
+
else:
|
648
|
+
return None, None
|
649
|
+
|
650
|
+
# 确保下载时间是一个字符串
|
651
|
+
download_time_str = str(download_time)
|
652
|
+
|
653
|
+
if len(download_time_str) == 8:
|
654
|
+
download_time_str = download_time_str + '00'
|
655
|
+
|
656
|
+
# 检查小时是否有效(如果需要的话)
|
657
|
+
if download_time_end is None and not check_hour_is_valid(download_time_str):
|
681
658
|
print('Please ensure the hour is 00, 03, 06, 09, 12, 15, 18, 21')
|
682
659
|
raise ValueError('The hour is invalid')
|
683
|
-
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
|
688
|
-
|
689
|
-
|
690
|
-
|
691
|
-
print('Please ensure the dataset_name is not None')
|
692
|
-
print('If you do not add the dataset_name, both the dataset and version will be chosen according to the download_time.')
|
693
|
-
dataset_name, version_name = direct_choose_dataset_and_version(download_time)
|
694
|
-
elif dataset_name is not None and version_name is None:
|
695
|
-
print('Please ensure the version_name is not None')
|
696
|
-
print('If you do not add the version_name, both the dataset and version will be chosen according to the download_time.')
|
697
|
-
dataset_name, version_name = direct_choose_dataset_and_version(download_time)
|
660
|
+
|
661
|
+
# 根据是否检查整个天来设置时间范围
|
662
|
+
if download_time_end is not None:
|
663
|
+
if len(str(download_time_end)) == 8:
|
664
|
+
download_time_end = str(download_time_end) + '21'
|
665
|
+
have_data = check_time_in_dataset_and_version(download_time_str, download_time_end)
|
666
|
+
if have_data:
|
667
|
+
return direct_choose_dataset_and_version(download_time_str, download_time_end)
|
698
668
|
else:
|
699
|
-
|
669
|
+
have_data = check_time_in_dataset_and_version(download_time_str)
|
670
|
+
if have_data:
|
671
|
+
return direct_choose_dataset_and_version(download_time_str)
|
700
672
|
|
701
|
-
return
|
673
|
+
return None, None
|
702
674
|
|
703
675
|
|
704
676
|
def get_submit_url_var(var, depth, level_num, lon_min, lon_max, lat_min, lat_max, dataset_name, version_name, download_time):
|
705
677
|
year_str = str(download_time)[:4]
|
706
678
|
if depth is not None and level_num is not None:
|
707
679
|
print('Please ensure the depth or level_num is None')
|
708
|
-
|
709
|
-
|
710
|
-
|
711
|
-
|
680
|
+
print('Progress will use the depth')
|
681
|
+
which_mode = 'depth'
|
682
|
+
elif depth is not None and level_num is None:
|
683
|
+
print(f'Data of single depth (~{depth} m) will be downloaded...')
|
684
|
+
which_mode = 'depth'
|
685
|
+
elif level_num is not None and depth is None:
|
712
686
|
print(f'Data of single level ({level_num}) will be downloaded...')
|
713
|
-
|
687
|
+
which_mode = 'level'
|
714
688
|
else:
|
715
689
|
print('Full depth or full level data will be downloaded...')
|
716
|
-
|
690
|
+
which_mode = 'full'
|
691
|
+
query_dict = get_query_dict(var, lon_min, lon_max, lat_min, lat_max, download_time, which_mode, depth, level_num)
|
717
692
|
submit_url = get_submit_url(
|
718
693
|
dataset_name, version_name, var, year_str, query_dict)
|
719
694
|
return submit_url
|
720
695
|
|
721
696
|
|
722
|
-
def
|
697
|
+
def direct_download_single_hour(var, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, download_time='2024083100', depth=None, level_num=None, store_path=None, dataset_name=None, version_name=None, check=False):
|
698
|
+
print('[bold #ecdbfe]-'*150)
|
723
699
|
download_time = str(download_time)
|
724
|
-
dataset_name, version_name =
|
700
|
+
dataset_name, version_name = check_dataset_version(dataset_name, version_name, download_time)
|
725
701
|
if dataset_name is None and version_name is None:
|
726
702
|
return
|
727
703
|
|
@@ -763,32 +739,10 @@ def direct_download_single_time(var, lon_min=0, lon_max=359.92, lat_min=-80, lat
|
|
763
739
|
dlownload_file(submit_url, store_path, file_name, check)
|
764
740
|
|
765
741
|
|
766
|
-
def check_dataset_version_whold_day(dataset_name, version_name, download_time):
|
767
|
-
download_time = str(download_time)
|
768
|
-
have_data = ymd_in_which_dataset_and_version(download_time)
|
769
|
-
if not have_data:
|
770
|
-
return None, None
|
771
|
-
if dataset_name is None and version_name is None:
|
772
|
-
print('The dataset_name and version_name are None, so the dataset and version will be chosen according to the download_time.\nIf there is more than one dataset and version in the time range, the first one will be chosen.')
|
773
|
-
print('If you wanna choose the dataset and version by yourself, please set the dataset_name and version_name together.')
|
774
|
-
dataset_name, version_name = direct_choose_dataset_and_version_whole_day(download_time)
|
775
|
-
elif dataset_name is None and version_name is not None:
|
776
|
-
print('Please ensure the dataset_name is not None')
|
777
|
-
print('If you do not add the dataset_name, both the dataset and version will be chosen according to the download_time.')
|
778
|
-
dataset_name, version_name = direct_choose_dataset_and_version_whole_day(download_time)
|
779
|
-
elif dataset_name is not None and version_name is None:
|
780
|
-
print('Please ensure the version_name is not None')
|
781
|
-
print('If you do not add the version_name, both the dataset and version will be chosen according to the download_time.')
|
782
|
-
dataset_name, version_name = direct_choose_dataset_and_version_whole_day(download_time)
|
783
|
-
else:
|
784
|
-
print('The dataset_name and version_name are both set by yourself.')
|
785
|
-
|
786
|
-
return dataset_name, version_name
|
787
|
-
|
788
|
-
|
789
742
|
def direct_download_whole_day(var, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, download_time='20240831', depth=None, level_num=None, store_path=None, dataset_name=None, version_name=None, check=False):
|
743
|
+
print('[bold #ecdbfe]-'*150)
|
790
744
|
download_time = str(download_time)[:8]+'00'
|
791
|
-
dataset_name, version_name =
|
745
|
+
dataset_name, version_name = check_dataset_version(dataset_name, version_name, download_time, str(download_time)[:8]+'21')
|
792
746
|
if dataset_name is None and version_name is None:
|
793
747
|
return
|
794
748
|
|
@@ -867,10 +821,10 @@ def download_task(var, time_str, lon_min, lon_max, lat_min, lat_max, depth, leve
|
|
867
821
|
if len(time_str) == 8:
|
868
822
|
direct_download_whole_day(var, lon_min, lon_max, lat_min, lat_max, time_str, depth, level, store_path, dataset_name, version_name, check)
|
869
823
|
else:
|
870
|
-
|
824
|
+
direct_download_single_hour(var, lon_min, lon_max, lat_min, lat_max, time_str, depth, level, store_path, dataset_name, version_name, check)
|
871
825
|
|
872
826
|
|
873
|
-
def download_single_hour(var, time_s, time_e, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, depth=None, level=None, store_path=None, dataset_name=None, version_name=None, num_workers=None, check=False):
|
827
|
+
def download_single_hour(var, time_s, time_e=None, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, depth=None, level=None, store_path=None, dataset_name=None, version_name=None, num_workers=None, check=False):
|
874
828
|
'''
|
875
829
|
Description:
|
876
830
|
Download the data of single time or a series of time
|
@@ -912,21 +866,24 @@ def download_single_hour(var, time_s, time_e, lon_min=0, lon_max=359.92, lat_min
|
|
912
866
|
ymdh_time_s = str(time_s)
|
913
867
|
if len(ymdh_time_s) == 8:
|
914
868
|
ymdh_time_s += '00'
|
915
|
-
|
916
|
-
|
917
|
-
|
869
|
+
if time_e is None:
|
870
|
+
ymdh_time_e = ymdh_time_s[:]
|
871
|
+
else:
|
872
|
+
ymdh_time_e = str(time_e)
|
873
|
+
if len(ymdh_time_e) == 8:
|
874
|
+
ymdh_time_e += '21'
|
918
875
|
if ymdh_time_s == ymdh_time_e:
|
919
|
-
|
876
|
+
direct_download_single_hour(var, lon_min, lon_max, lat_min, lat_max, ymdh_time_s, depth, level, store_path, dataset_name, version_name)
|
920
877
|
elif int(ymdh_time_s) < int(ymdh_time_e):
|
921
878
|
print('Downloading a series of files...')
|
922
|
-
time_list =
|
879
|
+
time_list = get_time_list(ymdh_time_s, ymdh_time_e, 3, 'hour')
|
923
880
|
with Progress() as progress:
|
924
881
|
task = progress.add_task("[cyan]Downloading...", total=len(time_list))
|
925
882
|
if num_workers is None or num_workers <= 1:
|
926
883
|
# 串行方式
|
927
|
-
for time_str in time_list:
|
928
|
-
|
929
|
-
progress.update(task, advance=1)
|
884
|
+
for i, time_str in enumerate(time_list):
|
885
|
+
direct_download_single_hour(var, lon_min, lon_max, lat_min, lat_max, time_str, depth, level, store_path, dataset_name, version_name, check)
|
886
|
+
progress.update(task, advance=1, description=f'[cyan]Downloading... {i+1}/{len(time_list)}')
|
930
887
|
else:
|
931
888
|
# 并行方式
|
932
889
|
if num_workers > 10:
|
@@ -935,13 +892,13 @@ def download_single_hour(var, time_s, time_e, lon_min=0, lon_max=359.92, lat_min
|
|
935
892
|
num_workers = 10
|
936
893
|
with ThreadPoolExecutor(max_workers=num_workers) as executor:
|
937
894
|
futures = [executor.submit(download_task, var, time_str, lon_min, lon_max, lat_min, lat_max, depth, level, store_path, dataset_name, version_name, check) for time_str in time_list]
|
938
|
-
for future in futures:
|
939
|
-
future.add_done_callback(lambda _: progress.update(task, advance=1))
|
895
|
+
for i, future in enumerate(futures):
|
896
|
+
future.add_done_callback(lambda _: progress.update(task, advance=1, description=f'[cyan]Downloading... {i+1}/{len(time_list)}'))
|
940
897
|
else:
|
941
898
|
print('Please ensure the time_s is less than the time_e')
|
942
899
|
|
943
900
|
|
944
|
-
def download_whole_day(var, time_s, time_e, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, depth=None, level=None, store_path=None, dataset_name=None, version_name=None, num_workers=None, check=False):
|
901
|
+
def download_whole_day(var, time_s, time_e=None, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, depth=None, level=None, store_path=None, dataset_name=None, version_name=None, num_workers=None, check=False):
|
945
902
|
'''
|
946
903
|
Description:
|
947
904
|
Download the data of single time or a series of time
|
@@ -980,20 +937,24 @@ def download_whole_day(var, time_s, time_e, lon_min=0, lon_max=359.92, lat_min=-
|
|
980
937
|
print('The range of lon_min, lon_max is 0~359.92')
|
981
938
|
print('The range of lat_min, lat_max is -80~90')
|
982
939
|
raise ValueError('The lon or lat is invalid')
|
983
|
-
time_s
|
940
|
+
time_s = str(time_s)[:8]
|
941
|
+
if time_e is None:
|
942
|
+
time_e = time_s[:]
|
943
|
+
else:
|
944
|
+
time_e = str(time_e)[:8]
|
984
945
|
|
985
946
|
if time_s == time_e:
|
986
947
|
direct_download_whole_day(var, lon_min, lon_max, lat_min, lat_max, time_s, depth, level, store_path, dataset_name, version_name)
|
987
948
|
elif int(time_s) < int(time_e):
|
988
949
|
print('Downloading a series of files...')
|
989
|
-
time_list =
|
950
|
+
time_list = get_time_list(time_s, time_e, 1, 'day')
|
990
951
|
with Progress() as progress:
|
991
952
|
task = progress.add_task("[cyan]Downloading...", total=len(time_list))
|
992
953
|
if num_workers is None or num_workers <= 1:
|
993
954
|
# 串行方式
|
994
|
-
for time_str in time_list:
|
955
|
+
for i, time_str in enumerate(time_list):
|
995
956
|
direct_download_whole_day(var, lon_min, lon_max, lat_min, lat_max, time_str, depth, level, store_path, dataset_name, version_name, check)
|
996
|
-
progress.update(task, advance=1)
|
957
|
+
progress.update(task, advance=1, description=f'[cyan]Downloading... {i+1}/{len(time_list)}')
|
997
958
|
else:
|
998
959
|
# 并行方式
|
999
960
|
if num_workers > 10:
|
@@ -1002,13 +963,13 @@ def download_whole_day(var, time_s, time_e, lon_min=0, lon_max=359.92, lat_min=-
|
|
1002
963
|
num_workers = 10
|
1003
964
|
with ThreadPoolExecutor(max_workers=num_workers) as executor:
|
1004
965
|
futures = [executor.submit(download_task, var, time_str, lon_min, lon_max, lat_min, lat_max, depth, level, store_path, dataset_name, version_name, check) for time_str in time_list]
|
1005
|
-
for future in futures:
|
1006
|
-
future.add_done_callback(lambda _: progress.update(task, advance=1))
|
966
|
+
for i, future in enumerate(futures):
|
967
|
+
future.add_done_callback(lambda _: progress.update(task, advance=1, description=f'[cyan]Downloading... {i+1}/{len(time_list)}'))
|
1007
968
|
else:
|
1008
969
|
print('Please ensure the time_s is less than the time_e')
|
1009
970
|
|
1010
971
|
|
1011
|
-
def
|
972
|
+
def download(var, time_s, time_e=None, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, depth=None, level=None, store_path=None, dataset_name=None, version_name=None, num_workers=None, check=False, resolution='hour'):
|
1012
973
|
'''
|
1013
974
|
Description:
|
1014
975
|
Download the data of single time or a series of time
|
@@ -1031,72 +992,20 @@ def download_his1(var, time_s, time_e, lon_min=0, lon_max=359.92, lat_min=-80, l
|
|
1031
992
|
Returns:
|
1032
993
|
None
|
1033
994
|
'''
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
1037
|
-
|
1038
|
-
|
1039
|
-
|
1040
|
-
print('
|
1041
|
-
|
1042
|
-
print('
|
1043
|
-
|
1044
|
-
ymdh_time_s = str(time_s)
|
1045
|
-
if len(ymdh_time_s) == 8:
|
1046
|
-
ymdh_time_s += '00'
|
1047
|
-
ymdh_time_e = str(time_e)
|
1048
|
-
if len(ymdh_time_e) == 8:
|
1049
|
-
ymdh_time_e += '21'
|
1050
|
-
if ymdh_time_s == ymdh_time_e:
|
1051
|
-
direct_download_single_time(var, lon_min, lon_max, lat_min, lat_max, ymdh_time_s, depth, level, store_path, dataset_name, version_name)
|
1052
|
-
elif int(ymdh_time_s) < int(ymdh_time_e):
|
1053
|
-
print('Downloading a series of files...')
|
1054
|
-
time_list = get_hour_list(ymdh_time_s, ymdh_time_e, 3)
|
1055
|
-
with Progress() as progress:
|
1056
|
-
task = progress.add_task("[cyan]Downloading...", total=len(time_list))
|
1057
|
-
if num_workers is None or num_workers <= 1:
|
1058
|
-
# 串行方式
|
1059
|
-
for time_str in time_list:
|
1060
|
-
direct_download_single_time(var, lon_min, lon_max, lat_min, lat_max, time_str, depth, level, store_path, dataset_name, version_name, check)
|
1061
|
-
progress.update(task, advance=1)
|
1062
|
-
else:
|
1063
|
-
# 并行方式
|
1064
|
-
if num_workers > 10:
|
1065
|
-
print('The number of workers is too large!')
|
1066
|
-
print('In order to avoid the server being blocked, the number of workers is set to 10')
|
1067
|
-
num_workers = 10
|
1068
|
-
with ThreadPoolExecutor(max_workers=num_workers) as executor:
|
1069
|
-
futures = [executor.submit(download_task, var, time_str, lon_min, lon_max, lat_min, lat_max, depth, level, store_path, dataset_name, version_name, check) for time_str in time_list]
|
1070
|
-
for future in futures:
|
1071
|
-
future.add_done_callback(
|
1072
|
-
lambda _: progress.update(task, advance=1))
|
995
|
+
# 打印信息并处理数据集和版本名称
|
996
|
+
if dataset_name is None and version_name is None:
|
997
|
+
print('The dataset_name and version_name are None, so the dataset and version will be chosen according to the download_time.\nIf there is more than one dataset and version in the time range, the first one will be chosen.')
|
998
|
+
print('If you wanna choose the dataset and version by yourself, please set the dataset_name and version_name together.')
|
999
|
+
elif dataset_name is None and version_name is not None:
|
1000
|
+
print('Please ensure the dataset_name is not None')
|
1001
|
+
print('If you do not add the dataset_name, both the dataset and version will be chosen according to the download_time.')
|
1002
|
+
elif dataset_name is not None and version_name is None:
|
1003
|
+
print('Please ensure the version_name is not None')
|
1004
|
+
print('If you do not add the version_name, both the dataset and version will be chosen according to the download_time.')
|
1073
1005
|
else:
|
1074
|
-
print('
|
1075
|
-
|
1076
|
-
|
1077
|
-
def download(var, time_s, time_e, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, depth=None, level=None, store_path=None, dataset_name=None, version_name=None, num_workers=None, check=False, resolution='hour'):
|
1078
|
-
'''
|
1079
|
-
Description:
|
1080
|
-
Download the data of single time or a series of time
|
1081
|
-
|
1082
|
-
Parameters:
|
1083
|
-
var: str, the variable name, such as 'u', 'v', 'temp', 'salt', 'ssh', 'u_b', 'v_b', 'temp_b', 'salt_b' or 'water_u', 'water_v', 'water_temp', 'salinity', 'surf_el', 'water_u_bottom', 'water_v_bottom', 'water_temp_bottom', 'salinity_bottom'
|
1084
|
-
time_s: str, the start time, such as '2024110100' or '20241101', if add hour, the hour should be 00, 03, 06, 09, 12, 15, 18, 21
|
1085
|
-
time_e: str, the end time, such as '2024110221' or '20241102', if add hour, the hour should be 00, 03, 06, 09, 12, 15, 18, 21
|
1086
|
-
lon_min: float, the minimum longitude, default is 0
|
1087
|
-
lon_max: float, the maximum longitude, default is 359.92
|
1088
|
-
lat_min: float, the minimum latitude, default is -80
|
1089
|
-
lat_max: float, the maximum latitude, default is 90
|
1090
|
-
depth: float, the depth, default is None
|
1091
|
-
level: int, the level number, default is None
|
1092
|
-
store_path: str, the path to store the data, default is None
|
1093
|
-
dataset_name: str, the dataset name, default is None, example: 'GLBv0.08', 'GLBu0.08', 'GLBy0.08'
|
1094
|
-
version_name: str, the version name, default is None, example: '53.X', '56.3'
|
1095
|
-
num_workers: int, the number of workers, default is None
|
1006
|
+
print('The dataset_name and version_name are both set by yourself.')
|
1007
|
+
print('Please ensure the dataset_name and version_name are correct.')
|
1096
1008
|
|
1097
|
-
Returns:
|
1098
|
-
None
|
1099
|
-
'''
|
1100
1009
|
if resolution == 'hour':
|
1101
1010
|
download_single_hour(var, time_s, time_e, lon_min, lon_max, lat_min, lat_max, depth, level, store_path, dataset_name, version_name, num_workers, check)
|
1102
1011
|
elif resolution == 'day':
|
@@ -1112,7 +1021,7 @@ def how_to_use():
|
|
1112
1021
|
print('''
|
1113
1022
|
# 1. Choose the dataset and version according to the time:
|
1114
1023
|
# 1.1 Use function to query
|
1115
|
-
You can use the function
|
1024
|
+
You can use the function check_time_in_dataset_and_version(time_input=20241101) to find the dataset and version according to the time.
|
1116
1025
|
Then, you can see the dataset and version in the output.
|
1117
1026
|
# 1.2 Draw a picture to see
|
1118
1027
|
You can draw a picture to see the time range of each dataset and version.
|
@@ -1159,8 +1068,8 @@ def how_to_use():
|
|
1159
1068
|
|
1160
1069
|
if __name__ == '__main__':
|
1161
1070
|
# help(hycom3h.download)
|
1162
|
-
time_s, time_e = '
|
1163
|
-
merge_name = '
|
1071
|
+
time_s, time_e = '2018070100', '2019123121'
|
1072
|
+
merge_name = '2018_2024'
|
1164
1073
|
root_path = r'G:\Data\HYCOM\3hourly'
|
1165
1074
|
location_dict = {'west': 105, 'east': 130, 'south': 15, 'north': 45}
|
1166
1075
|
download_dict = {
|
@@ -1184,7 +1093,7 @@ if __name__ == '__main__':
|
|
1184
1093
|
# if you wanna download all depth or level, set both False
|
1185
1094
|
depth = None # or 0-4000 meters
|
1186
1095
|
level = None # or 1-40 levels
|
1187
|
-
num_workers =
|
1096
|
+
num_workers = 1
|
1188
1097
|
|
1189
1098
|
check = True
|
1190
1099
|
|
@@ -1200,7 +1109,7 @@ if __name__ == '__main__':
|
|
1200
1109
|
download(var=var_list, time_s=time_s, time_e=time_e, store_path=Path(root_path), lon_min=location_dict['west'], lon_max=location_dict['east'], lat_min=location_dict['south'], lat_max=location_dict['north'], num_workers=num_workers, check=check, depth=depth, level=level)
|
1201
1110
|
|
1202
1111
|
""" if combine_switch or copy_switch:
|
1203
|
-
time_list =
|
1112
|
+
time_list = get_time_list(time_s, time_e, 3, 'hour')
|
1204
1113
|
for var_name in var_list:
|
1205
1114
|
file_list = []
|
1206
1115
|
if single_var:
|
@@ -1218,6 +1127,7 @@ if __name__ == '__main__':
|
|
1218
1127
|
fname = 'surf_el'
|
1219
1128
|
for time_str in time_list:
|
1220
1129
|
file_list.append(Path(root_path)/f'HYCOM_{fname}_{time_str}.nc')
|
1130
|
+
merge_path_name = Path(root_path)/f'HYCOM_{fname}_{merge_name}.nc'
|
1221
1131
|
if combine_switch:
|
1222
1132
|
merge5nc(file_list, var_name, 'time', merge_path_name)
|
1223
1133
|
if copy_switch:
|