oafuncs 0.0.97.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oafuncs/__init__.py +54 -0
 - oafuncs/_script/__init__.py +27 -0
 - oafuncs/_script/plot_dataset.py +299 -0
 - oafuncs/data_store/OAFuncs.png +0 -0
 - oafuncs/data_store/hycom_3hourly.png +0 -0
 - oafuncs/oa_cmap.py +215 -0
 - oafuncs/oa_data.py +293 -0
 - oafuncs/oa_down/User_Agent-list.txt +6697 -0
 - oafuncs/oa_down/__init__.py +22 -0
 - oafuncs/oa_down/hycom_3hourly.py +1309 -0
 - oafuncs/oa_down/hycom_3hourly_20250129.py +1307 -0
 - oafuncs/oa_down/idm.py +50 -0
 - oafuncs/oa_down/literature.py +288 -0
 - oafuncs/oa_down/test_ua.py +151 -0
 - oafuncs/oa_down/user_agent.py +31 -0
 - oafuncs/oa_draw.py +326 -0
 - oafuncs/oa_file.py +413 -0
 - oafuncs/oa_help.py +144 -0
 - oafuncs/oa_model/__init__.py +19 -0
 - oafuncs/oa_model/roms/__init__.py +20 -0
 - oafuncs/oa_model/roms/test.py +19 -0
 - oafuncs/oa_model/wrf/__init__.py +18 -0
 - oafuncs/oa_model/wrf/little_r.py +186 -0
 - oafuncs/oa_nc.py +523 -0
 - oafuncs/oa_python.py +108 -0
 - oafuncs/oa_sign/__init__.py +21 -0
 - oafuncs/oa_sign/meteorological.py +168 -0
 - oafuncs/oa_sign/ocean.py +158 -0
 - oafuncs/oa_sign/scientific.py +139 -0
 - oafuncs/oa_tool/__init__.py +19 -0
 - oafuncs/oa_tool/email.py +114 -0
 - oafuncs/oa_tool/parallel.py +90 -0
 - oafuncs/oa_tool/time.py +22 -0
 - oafuncs-0.0.97.1.dist-info/LICENSE.txt +19 -0
 - oafuncs-0.0.97.1.dist-info/METADATA +106 -0
 - oafuncs-0.0.97.1.dist-info/RECORD +38 -0
 - oafuncs-0.0.97.1.dist-info/WHEEL +5 -0
 - oafuncs-0.0.97.1.dist-info/top_level.txt +1 -0
 
| 
         @@ -0,0 +1,1307 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            #!/usr/bin/env python
         
     | 
| 
      
 2 
     | 
    
         
            +
            # coding=utf-8
         
     | 
| 
      
 3 
     | 
    
         
            +
            """
         
     | 
| 
      
 4 
     | 
    
         
            +
            Author: Liu Kun && 16031215@qq.com
         
     | 
| 
      
 5 
     | 
    
         
            +
            Date: 2025-01-29 17:53:21
         
     | 
| 
      
 6 
     | 
    
         
            +
            LastEditors: Liu Kun && 16031215@qq.com
         
     | 
| 
      
 7 
     | 
    
         
            +
            LastEditTime: 2025-01-29 17:53:21
         
     | 
| 
      
 8 
     | 
    
         
            +
            FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_down\\hycom_3hourly copy.py
         
     | 
| 
      
 9 
     | 
    
         
            +
            Description:
         
     | 
| 
      
 10 
     | 
    
         
            +
            EditPlatform: vscode
         
     | 
| 
      
 11 
     | 
    
         
            +
            ComputerInfo: XPS 15 9510
         
     | 
| 
      
 12 
     | 
    
         
            +
            SystemInfo: Windows 11
         
     | 
| 
      
 13 
     | 
    
         
            +
            Python Version: 3.12
         
     | 
| 
      
 14 
     | 
    
         
            +
            """
         
     | 
| 
      
 15 
     | 
    
         
            +
             
     | 
| 
      
 16 
     | 
    
         
            +
             
     | 
| 
      
 17 
     | 
    
         
            +
             
     | 
| 
      
 18 
     | 
    
         
            +
            import datetime
         
     | 
| 
      
 19 
     | 
    
         
            +
            import os
         
     | 
| 
      
 20 
     | 
    
         
            +
            import random
         
     | 
| 
      
 21 
     | 
    
         
            +
            import re
         
     | 
| 
      
 22 
     | 
    
         
            +
            import time
         
     | 
| 
      
 23 
     | 
    
         
            +
            import warnings
         
     | 
| 
      
 24 
     | 
    
         
            +
            from concurrent.futures import ThreadPoolExecutor, as_completed
         
     | 
| 
      
 25 
     | 
    
         
            +
            from pathlib import Path
         
     | 
| 
      
 26 
     | 
    
         
            +
            from threading import Lock
         
     | 
| 
      
 27 
     | 
    
         
            +
             
     | 
| 
      
 28 
     | 
    
         
            +
            import matplotlib.pyplot as plt
         
     | 
| 
      
 29 
     | 
    
         
            +
            import netCDF4 as nc
         
     | 
| 
      
 30 
     | 
    
         
            +
            import numpy as np
         
     | 
| 
      
 31 
     | 
    
         
            +
            import pandas as pd
         
     | 
| 
      
 32 
     | 
    
         
            +
            import requests
         
     | 
| 
      
 33 
     | 
    
         
            +
            import xarray as xr
         
     | 
| 
      
 34 
     | 
    
         
            +
            from rich import print
         
     | 
| 
      
 35 
     | 
    
         
            +
            from rich.progress import Progress
         
     | 
| 
      
 36 
     | 
    
         
            +
             
     | 
| 
      
 37 
     | 
    
         
            +
            from oafuncs.oa_down.idm import downloader as idm_downloader
         
     | 
| 
      
 38 
     | 
    
         
            +
            from oafuncs.oa_down.user_agent import get_ua
         
     | 
| 
      
 39 
     | 
    
         
            +
            from oafuncs.oa_file import file_size, mean_size
         
     | 
| 
      
 40 
     | 
    
         
            +
            from oafuncs.oa_nc import check as check_nc
         
     | 
| 
      
 41 
     | 
    
         
            +
            from oafuncs.oa_nc import modify as modify_nc
         
     | 
| 
      
 42 
     | 
    
         
            +
             
     | 
| 
      
 43 
     | 
    
         
            +
            warnings.filterwarnings("ignore", category=RuntimeWarning, message="Engine '.*' loading failed:.*")
         
     | 
| 
      
 44 
     | 
    
         
            +
             
     | 
| 
      
 45 
     | 
    
         
            +
            __all__ = ["draw_time_range", "download", "how_to_use", "get_time_list"]
         
     | 
| 
      
 46 
     | 
    
         
            +
             
     | 
| 
      
 47 
     | 
    
         
            +
             
     | 
| 
      
 48 
     | 
    
         
            +
            def _get_initial_data():
         
     | 
| 
      
 49 
     | 
    
         
            +
                global variable_info, data_info, var_group, single_var_group
         
     | 
| 
      
 50 
     | 
    
         
            +
                # ----------------------------------------------
         
     | 
| 
      
 51 
     | 
    
         
            +
                # variable
         
     | 
| 
      
 52 
     | 
    
         
            +
                variable_info = {
         
     | 
| 
      
 53 
     | 
    
         
            +
                    "u": {"var_name": "water_u", "standard_name": "eastward_sea_water_velocity"},
         
     | 
| 
      
 54 
     | 
    
         
            +
                    "v": {"var_name": "water_v", "standard_name": "northward_sea_water_velocity"},
         
     | 
| 
      
 55 
     | 
    
         
            +
                    "temp": {"var_name": "water_temp", "standard_name": "sea_water_potential_temperature"},
         
     | 
| 
      
 56 
     | 
    
         
            +
                    "salt": {"var_name": "salinity", "standard_name": "sea_water_salinity"},
         
     | 
| 
      
 57 
     | 
    
         
            +
                    "ssh": {"var_name": "surf_el", "standard_name": "sea_surface_elevation"},
         
     | 
| 
      
 58 
     | 
    
         
            +
                    "u_b": {"var_name": "water_u_bottom", "standard_name": "eastward_sea_water_velocity_at_sea_floor"},
         
     | 
| 
      
 59 
     | 
    
         
            +
                    "v_b": {"var_name": "water_v_bottom", "standard_name": "northward_sea_water_velocity_at_sea_floor"},
         
     | 
| 
      
 60 
     | 
    
         
            +
                    "temp_b": {"var_name": "water_temp_bottom", "standard_name": "sea_water_potential_temperature_at_sea_floor"},
         
     | 
| 
      
 61 
     | 
    
         
            +
                    "salt_b": {"var_name": "salinity_bottom", "standard_name": "sea_water_salinity_at_sea_floor"},
         
     | 
| 
      
 62 
     | 
    
         
            +
                }
         
     | 
| 
      
 63 
     | 
    
         
            +
                # ----------------------------------------------
         
     | 
| 
      
 64 
     | 
    
         
            +
                # time resolution
         
     | 
| 
      
 65 
     | 
    
         
            +
                data_info = {"yearly": {}, "monthly": {}, "daily": {}, "hourly": {}}
         
     | 
| 
      
 66 
     | 
    
         
            +
             
     | 
| 
      
 67 
     | 
    
         
            +
                # hourly data
         
     | 
| 
      
 68 
     | 
    
         
            +
                # dataset: GLBv0.08, GLBu0.08, GLBy0.08
         
     | 
| 
      
 69 
     | 
    
         
            +
                data_info["hourly"]["dataset"] = {"GLBv0.08": {}, "GLBu0.08": {}, "GLBy0.08": {}, "ESPC_D": {}}
         
     | 
| 
      
 70 
     | 
    
         
            +
             
     | 
| 
      
 71 
     | 
    
         
            +
                # version
         
     | 
| 
      
 72 
     | 
    
         
            +
                # version of GLBv0.08: 53.X, 56.3, 57.2, 92.8, 57.7, 92.9, 93.0
         
     | 
| 
      
 73 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"] = {"53.X": {}, "56.3": {}, "57.2": {}, "92.8": {}, "57.7": {}, "92.9": {}, "93.0": {}}
         
     | 
| 
      
 74 
     | 
    
         
            +
                # version of GLBu0.08: 93.0
         
     | 
| 
      
 75 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBu0.08"]["version"] = {"93.0": {}}
         
     | 
| 
      
 76 
     | 
    
         
            +
                # version of GLBy0.08: 93.0
         
     | 
| 
      
 77 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBy0.08"]["version"] = {"93.0": {}}
         
     | 
| 
      
 78 
     | 
    
         
            +
                # version of ESPC_D: V02
         
     | 
| 
      
 79 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["ESPC_D"]["version"] = {"V02": {}}
         
     | 
| 
      
 80 
     | 
    
         
            +
             
     | 
| 
      
 81 
     | 
    
         
            +
                # info details
         
     | 
| 
      
 82 
     | 
    
         
            +
                # time range
         
     | 
| 
      
 83 
     | 
    
         
            +
                # GLBv0.08
         
     | 
| 
      
 84 
     | 
    
         
            +
                # 在网页上提交超过范围的时间,会返回该数据集实际时间范围,从而纠正下面的时间范围
         
     | 
| 
      
 85 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["53.X"]["time_range"] = {"time_start": "1994010112", "time_end": "2015123109"}
         
     | 
| 
      
 86 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["56.3"]["time_range"] = {"time_start": "2014070112", "time_end": "2016093009"}
         
     | 
| 
      
 87 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["57.2"]["time_range"] = {"time_start": "2016050112", "time_end": "2017020109"}
         
     | 
| 
      
 88 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["92.8"]["time_range"] = {"time_start": "2017020112", "time_end": "2017060109"}
         
     | 
| 
      
 89 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["57.7"]["time_range"] = {"time_start": "2017060112", "time_end": "2017100109"}
         
     | 
| 
      
 90 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["92.9"]["time_range"] = {"time_start": "2017100112", "time_end": "2018032009"}
         
     | 
| 
      
 91 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["93.0"]["time_range"] = {"time_start": "2018010112", "time_end": "2020021909"}
         
     | 
| 
      
 92 
     | 
    
         
            +
                # GLBu0.08
         
     | 
| 
      
 93 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBu0.08"]["version"]["93.0"]["time_range"] = {"time_start": "2018091912", "time_end": "2018120909"}
         
     | 
| 
      
 94 
     | 
    
         
            +
                # GLBy0.08
         
     | 
| 
      
 95 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBy0.08"]["version"]["93.0"]["time_range"] = {"time_start": "2018120412", "time_end": "2024090509"}
         
     | 
| 
      
 96 
     | 
    
         
            +
                # ESPC-D
         
     | 
| 
      
 97 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["ESPC_D"]["version"]["V02"]["time_range"] = {"time_start": "2024081012", "time_end": "2030010100"}
         
     | 
| 
      
 98 
     | 
    
         
            +
             
     | 
| 
      
 99 
     | 
    
         
            +
                # classification method
         
     | 
| 
      
 100 
     | 
    
         
            +
                # year_different: the data of different years is stored in different files
         
     | 
| 
      
 101 
     | 
    
         
            +
                # same_path: the data of different years is stored in the same file
         
     | 
| 
      
 102 
     | 
    
         
            +
                # var_different: the data of different variables is stored in different files
         
     | 
| 
      
 103 
     | 
    
         
            +
                # var_year_different: the data of different variables and years is stored in different files
         
     | 
| 
      
 104 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["53.X"]["classification"] = "year_different"
         
     | 
| 
      
 105 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["56.3"]["classification"] = "same_path"
         
     | 
| 
      
 106 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["57.2"]["classification"] = "same_path"
         
     | 
| 
      
 107 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["92.8"]["classification"] = "var_different"
         
     | 
| 
      
 108 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["57.7"]["classification"] = "same_path"
         
     | 
| 
      
 109 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["92.9"]["classification"] = "var_different"
         
     | 
| 
      
 110 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["93.0"]["classification"] = "var_different"
         
     | 
| 
      
 111 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBu0.08"]["version"]["93.0"]["classification"] = "var_different"
         
     | 
| 
      
 112 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBy0.08"]["version"]["93.0"]["classification"] = "var_year_different"
         
     | 
| 
      
 113 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["ESPC_D"]["version"]["V02"]["classification"] = "single_var_year_different"
         
     | 
| 
      
 114 
     | 
    
         
            +
             
     | 
| 
      
 115 
     | 
    
         
            +
                # download info
         
     | 
| 
      
 116 
     | 
    
         
            +
                # base url
         
     | 
| 
      
 117 
     | 
    
         
            +
                # GLBv0.08 53.X
         
     | 
| 
      
 118 
     | 
    
         
            +
                url_53x = {}
         
     | 
| 
      
 119 
     | 
    
         
            +
                for y_53x in range(1994, 2016):
         
     | 
| 
      
 120 
     | 
    
         
            +
                    # r'https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_53.X/data/2013?'
         
     | 
| 
      
 121 
     | 
    
         
            +
                    url_53x[str(y_53x)] = rf"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_53.X/data/{y_53x}?"
         
     | 
| 
      
 122 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["53.X"]["url"] = url_53x
         
     | 
| 
      
 123 
     | 
    
         
            +
                # GLBv0.08 56.3
         
     | 
| 
      
 124 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["56.3"]["url"] = r"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_56.3?"
         
     | 
| 
      
 125 
     | 
    
         
            +
                # GLBv0.08 57.2
         
     | 
| 
      
 126 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["57.2"]["url"] = r"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_57.2?"
         
     | 
| 
      
 127 
     | 
    
         
            +
                # GLBv0.08 92.8
         
     | 
| 
      
 128 
     | 
    
         
            +
                url_928 = {
         
     | 
| 
      
 129 
     | 
    
         
            +
                    "uv3z": r"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_92.8/uv3z?",
         
     | 
| 
      
 130 
     | 
    
         
            +
                    "ts3z": r"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_92.8/ts3z?",
         
     | 
| 
      
 131 
     | 
    
         
            +
                    "ssh": r"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_92.8/ssh?",
         
     | 
| 
      
 132 
     | 
    
         
            +
                }
         
     | 
| 
      
 133 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["92.8"]["url"] = url_928
         
     | 
| 
      
 134 
     | 
    
         
            +
                # GLBv0.08 57.7
         
     | 
| 
      
 135 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["57.7"]["url"] = r"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_57.7?"
         
     | 
| 
      
 136 
     | 
    
         
            +
                # GLBv0.08 92.9
         
     | 
| 
      
 137 
     | 
    
         
            +
                url_929 = {
         
     | 
| 
      
 138 
     | 
    
         
            +
                    "uv3z": r"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_92.9/uv3z?",
         
     | 
| 
      
 139 
     | 
    
         
            +
                    "ts3z": r"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_92.9/ts3z?",
         
     | 
| 
      
 140 
     | 
    
         
            +
                    "ssh": r"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_92.9/ssh?",
         
     | 
| 
      
 141 
     | 
    
         
            +
                }
         
     | 
| 
      
 142 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["92.9"]["url"] = url_929
         
     | 
| 
      
 143 
     | 
    
         
            +
                # GLBv0.08 93.0
         
     | 
| 
      
 144 
     | 
    
         
            +
                url_930_v = {
         
     | 
| 
      
 145 
     | 
    
         
            +
                    "uv3z": r"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_93.0/uv3z?",
         
     | 
| 
      
 146 
     | 
    
         
            +
                    "ts3z": r"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_93.0/ts3z?",
         
     | 
| 
      
 147 
     | 
    
         
            +
                    "ssh": r"https://ncss.hycom.org/thredds/ncss/GLBv0.08/expt_93.0/ssh?",
         
     | 
| 
      
 148 
     | 
    
         
            +
                }
         
     | 
| 
      
 149 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["93.0"]["url"] = url_930_v
         
     | 
| 
      
 150 
     | 
    
         
            +
                # GLBu0.08 93.0
         
     | 
| 
      
 151 
     | 
    
         
            +
                url_930_u = {
         
     | 
| 
      
 152 
     | 
    
         
            +
                    "uv3z": r"https://ncss.hycom.org/thredds/ncss/GLBu0.08/expt_93.0/uv3z?",
         
     | 
| 
      
 153 
     | 
    
         
            +
                    "ts3z": r"https://ncss.hycom.org/thredds/ncss/GLBu0.08/expt_93.0/ts3z?",
         
     | 
| 
      
 154 
     | 
    
         
            +
                    "ssh": r"https://ncss.hycom.org/thredds/ncss/GLBu0.08/expt_93.0/ssh?",
         
     | 
| 
      
 155 
     | 
    
         
            +
                }
         
     | 
| 
      
 156 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBu0.08"]["version"]["93.0"]["url"] = url_930_u
         
     | 
| 
      
 157 
     | 
    
         
            +
                # GLBy0.08 93.0
         
     | 
| 
      
 158 
     | 
    
         
            +
                uv3z_930_y = {}
         
     | 
| 
      
 159 
     | 
    
         
            +
                ts3z_930_y = {}
         
     | 
| 
      
 160 
     | 
    
         
            +
                ssh_930_y = {}
         
     | 
| 
      
 161 
     | 
    
         
            +
                for y_930_y in range(2018, 2025):
         
     | 
| 
      
 162 
     | 
    
         
            +
                    uv3z_930_y[str(y_930_y)] = rf"https://ncss.hycom.org/thredds/ncss/GLBy0.08/expt_93.0/uv3z/{y_930_y}?"
         
     | 
| 
      
 163 
     | 
    
         
            +
                    ts3z_930_y[str(y_930_y)] = rf"https://ncss.hycom.org/thredds/ncss/GLBy0.08/expt_93.0/ts3z/{y_930_y}?"
         
     | 
| 
      
 164 
     | 
    
         
            +
                    ssh_930_y[str(y_930_y)] = rf"https://ncss.hycom.org/thredds/ncss/GLBy0.08/expt_93.0/ssh/{y_930_y}?"
         
     | 
| 
      
 165 
     | 
    
         
            +
                # GLBy0.08 93.0 data time range in each year: year-01-01 12:00 to year+1-01-01 09:00
         
     | 
| 
      
 166 
     | 
    
         
            +
                url_930_y = {
         
     | 
| 
      
 167 
     | 
    
         
            +
                    "uv3z": uv3z_930_y,
         
     | 
| 
      
 168 
     | 
    
         
            +
                    "ts3z": ts3z_930_y,
         
     | 
| 
      
 169 
     | 
    
         
            +
                    "ssh": ssh_930_y,
         
     | 
| 
      
 170 
     | 
    
         
            +
                }
         
     | 
| 
      
 171 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["GLBy0.08"]["version"]["93.0"]["url"] = url_930_y
         
     | 
| 
      
 172 
     | 
    
         
            +
                # ESPC-D-V02
         
     | 
| 
      
 173 
     | 
    
         
            +
                u3z_espc_d_v02_y = {}
         
     | 
| 
      
 174 
     | 
    
         
            +
                v3z_espc_d_v02_y = {}
         
     | 
| 
      
 175 
     | 
    
         
            +
                t3z_espc_d_v02_y = {}
         
     | 
| 
      
 176 
     | 
    
         
            +
                s3z_espc_d_v02_y = {}
         
     | 
| 
      
 177 
     | 
    
         
            +
                ssh_espc_d_v02_y = {}
         
     | 
| 
      
 178 
     | 
    
         
            +
                for y_espc_d_v02 in range(2024, 2030):
         
     | 
| 
      
 179 
     | 
    
         
            +
                    u3z_espc_d_v02_y[str(y_espc_d_v02)] = rf"https://ncss.hycom.org/thredds/ncss/ESPC-D-V02/u3z/{y_espc_d_v02}?"
         
     | 
| 
      
 180 
     | 
    
         
            +
                    v3z_espc_d_v02_y[str(y_espc_d_v02)] = rf"https://ncss.hycom.org/thredds/ncss/ESPC-D-V02/v3z/{y_espc_d_v02}?"
         
     | 
| 
      
 181 
     | 
    
         
            +
                    t3z_espc_d_v02_y[str(y_espc_d_v02)] = rf"https://ncss.hycom.org/thredds/ncss/ESPC-D-V02/t3z/{y_espc_d_v02}?"
         
     | 
| 
      
 182 
     | 
    
         
            +
                    s3z_espc_d_v02_y[str(y_espc_d_v02)] = rf"https://ncss.hycom.org/thredds/ncss/ESPC-D-V02/s3z/{y_espc_d_v02}?"
         
     | 
| 
      
 183 
     | 
    
         
            +
                    ssh_espc_d_v02_y[str(y_espc_d_v02)] = rf"https://ncss.hycom.org/thredds/ncss/ESPC-D-V02/ssh/{y_espc_d_v02}?"
         
     | 
| 
      
 184 
     | 
    
         
            +
                url_espc_d_v02_y = {
         
     | 
| 
      
 185 
     | 
    
         
            +
                    "u3z": u3z_espc_d_v02_y,
         
     | 
| 
      
 186 
     | 
    
         
            +
                    "v3z": v3z_espc_d_v02_y,
         
     | 
| 
      
 187 
     | 
    
         
            +
                    "t3z": t3z_espc_d_v02_y,
         
     | 
| 
      
 188 
     | 
    
         
            +
                    "s3z": s3z_espc_d_v02_y,
         
     | 
| 
      
 189 
     | 
    
         
            +
                    "ssh": ssh_espc_d_v02_y,
         
     | 
| 
      
 190 
     | 
    
         
            +
                }
         
     | 
| 
      
 191 
     | 
    
         
            +
                data_info["hourly"]["dataset"]["ESPC_D"]["version"]["V02"]["url"] = url_espc_d_v02_y
         
     | 
| 
      
 192 
     | 
    
         
            +
                # ----------------------------------------------
         
     | 
| 
      
 193 
     | 
    
         
            +
                var_group = {
         
     | 
| 
      
 194 
     | 
    
         
            +
                    "uv3z": ["u", "v", "u_b", "v_b"],
         
     | 
| 
      
 195 
     | 
    
         
            +
                    "ts3z": ["temp", "salt", "temp_b", "salt_b"],
         
     | 
| 
      
 196 
     | 
    
         
            +
                    "ssh": ["ssh"],
         
     | 
| 
      
 197 
     | 
    
         
            +
                }
         
     | 
| 
      
 198 
     | 
    
         
            +
                # ----------------------------------------------
         
     | 
| 
      
 199 
     | 
    
         
            +
                single_var_group = {
         
     | 
| 
      
 200 
     | 
    
         
            +
                    "u3z": ["u"],
         
     | 
| 
      
 201 
     | 
    
         
            +
                    "v3z": ["v"],
         
     | 
| 
      
 202 
     | 
    
         
            +
                    "t3z": ["temp"],
         
     | 
| 
      
 203 
     | 
    
         
            +
                    "s3z": ["salt"],
         
     | 
| 
      
 204 
     | 
    
         
            +
                    "ssh": ["ssh"],
         
     | 
| 
      
 205 
     | 
    
         
            +
                }
         
     | 
| 
      
 206 
     | 
    
         
            +
             
     | 
| 
      
 207 
     | 
    
         
            +
                return variable_info, data_info, var_group, single_var_group
         
     | 
| 
      
 208 
     | 
    
         
            +
             
     | 
| 
      
 209 
     | 
    
         
            +
             
     | 
| 
      
 210 
     | 
    
         
            +
            def draw_time_range(pic_save_folder=None):
         
     | 
| 
      
 211 
     | 
    
         
            +
                if pic_save_folder is not None:
         
     | 
| 
      
 212 
     | 
    
         
            +
                    os.makedirs(pic_save_folder, exist_ok=True)
         
     | 
| 
      
 213 
     | 
    
         
            +
                # Converting the data into a format suitable for plotting
         
     | 
| 
      
 214 
     | 
    
         
            +
                data = []
         
     | 
| 
      
 215 
     | 
    
         
            +
                for dataset, versions in data_info["hourly"]["dataset"].items():
         
     | 
| 
      
 216 
     | 
    
         
            +
                    for version, time_range in versions["version"].items():
         
     | 
| 
      
 217 
     | 
    
         
            +
                        t_s = time_range["time_range"]["time_start"]
         
     | 
| 
      
 218 
     | 
    
         
            +
                        t_e = time_range["time_range"]["time_end"]
         
     | 
| 
      
 219 
     | 
    
         
            +
                        if len(t_s) == 8:
         
     | 
| 
      
 220 
     | 
    
         
            +
                            t_s = t_s + "00"
         
     | 
| 
      
 221 
     | 
    
         
            +
                        if len(t_e) == 8:
         
     | 
| 
      
 222 
     | 
    
         
            +
                            t_e = t_e + "21"
         
     | 
| 
      
 223 
     | 
    
         
            +
                        t_s, t_e = t_s + "0000", t_e + "0000"
         
     | 
| 
      
 224 
     | 
    
         
            +
                        data.append(
         
     | 
| 
      
 225 
     | 
    
         
            +
                            {
         
     | 
| 
      
 226 
     | 
    
         
            +
                                "dataset": dataset,
         
     | 
| 
      
 227 
     | 
    
         
            +
                                "version": version,
         
     | 
| 
      
 228 
     | 
    
         
            +
                                "start_date": pd.to_datetime(t_s),
         
     | 
| 
      
 229 
     | 
    
         
            +
                                "end_date": pd.to_datetime(t_e),
         
     | 
| 
      
 230 
     | 
    
         
            +
                            }
         
     | 
| 
      
 231 
     | 
    
         
            +
                        )
         
     | 
| 
      
 232 
     | 
    
         
            +
             
     | 
| 
      
 233 
     | 
    
         
            +
                # Creating a DataFrame
         
     | 
| 
      
 234 
     | 
    
         
            +
                df = pd.DataFrame(data)
         
     | 
| 
      
 235 
     | 
    
         
            +
             
     | 
| 
      
 236 
     | 
    
         
            +
                # Plotting with combined labels for datasets and versions on the y-axis
         
     | 
| 
      
 237 
     | 
    
         
            +
                plt.figure(figsize=(12, 6))
         
     | 
| 
      
 238 
     | 
    
         
            +
             
     | 
| 
      
 239 
     | 
    
         
            +
                # Combined labels for datasets and versions
         
     | 
| 
      
 240 
     | 
    
         
            +
                combined_labels = [f"{dataset}_{version}" for dataset, version in zip(df["dataset"], df["version"])]
         
     | 
| 
      
 241 
     | 
    
         
            +
             
     | 
| 
      
 242 
     | 
    
         
            +
                colors = plt.cm.viridis(np.linspace(0, 1, len(combined_labels)))
         
     | 
| 
      
 243 
     | 
    
         
            +
             
     | 
| 
      
 244 
     | 
    
         
            +
                # Assigning a color to each combined label
         
     | 
| 
      
 245 
     | 
    
         
            +
                label_colors = {label: colors[i] for i, label in enumerate(combined_labels)}
         
     | 
| 
      
 246 
     | 
    
         
            +
             
     | 
| 
      
 247 
     | 
    
         
            +
                # Plotting each time range
         
     | 
| 
      
 248 
     | 
    
         
            +
                k = 1
         
     | 
| 
      
 249 
     | 
    
         
            +
                for _, row in df.iterrows():
         
     | 
| 
      
 250 
     | 
    
         
            +
                    plt.plot([row["start_date"], row["end_date"]], [k, k], color=label_colors[f"{row['dataset']}_{row['version']}"], linewidth=6)
         
     | 
| 
      
 251 
     | 
    
         
            +
                    # plt.text(row['end_date'], k,
         
     | 
| 
      
 252 
     | 
    
         
            +
                    #          f"{row['version']}", ha='right', color='black')
         
     | 
| 
      
 253 
     | 
    
         
            +
                    ymdh_s = row["start_date"].strftime("%Y-%m-%d %H")
         
     | 
| 
      
 254 
     | 
    
         
            +
                    ymdh_e = row["end_date"].strftime("%Y-%m-%d %H")
         
     | 
| 
      
 255 
     | 
    
         
            +
                    # if k == 1 or k == len(combined_labels):
         
     | 
| 
      
 256 
     | 
    
         
            +
                    if k == 1:
         
     | 
| 
      
 257 
     | 
    
         
            +
                        plt.text(row["start_date"], k + 0.125, f"{ymdh_s}", ha="left", color="black")
         
     | 
| 
      
 258 
     | 
    
         
            +
                        plt.text(row["end_date"], k + 0.125, f"{ymdh_e}", ha="right", color="black")
         
     | 
| 
      
 259 
     | 
    
         
            +
                    else:
         
     | 
| 
      
 260 
     | 
    
         
            +
                        plt.text(row["start_date"], k + 0.125, f"{ymdh_s}", ha="right", color="black")
         
     | 
| 
      
 261 
     | 
    
         
            +
                        plt.text(row["end_date"], k + 0.125, f"{ymdh_e}", ha="left", color="black")
         
     | 
| 
      
 262 
     | 
    
         
            +
                    k += 1
         
     | 
| 
      
 263 
     | 
    
         
            +
             
     | 
| 
      
 264 
     | 
    
         
            +
                # Setting the y-axis labels
         
     | 
| 
      
 265 
     | 
    
         
            +
                plt.yticks(range(1, len(combined_labels) + 1), combined_labels)
         
     | 
| 
      
 266 
     | 
    
         
            +
                plt.xlabel("Time")
         
     | 
| 
      
 267 
     | 
    
         
            +
                plt.ylabel("Dataset - Version")
         
     | 
| 
      
 268 
     | 
    
         
            +
                plt.title("Time Range of Different Versions of Datasets")
         
     | 
| 
      
 269 
     | 
    
         
            +
                plt.xticks(rotation=45)
         
     | 
| 
      
 270 
     | 
    
         
            +
                plt.grid(True)
         
     | 
| 
      
 271 
     | 
    
         
            +
                plt.tight_layout()
         
     | 
| 
      
 272 
     | 
    
         
            +
                if pic_save_folder:
         
     | 
| 
      
 273 
     | 
    
         
            +
                    plt.savefig(Path(pic_save_folder) / "HYCOM_time_range.png")
         
     | 
| 
      
 274 
     | 
    
         
            +
                    print(f"[bold green]HYCOM_time_range.png has been saved in {pic_save_folder}")
         
     | 
| 
      
 275 
     | 
    
         
            +
                else:
         
     | 
| 
      
 276 
     | 
    
         
            +
                    plt.savefig("HYCOM_time_range.png")
         
     | 
| 
      
 277 
     | 
    
         
            +
                    print("[bold green]HYCOM_time_range.png has been saved in the current folder")
         
     | 
| 
      
 278 
     | 
    
         
            +
                    print(f"Curren folder: {os.getcwd()}")
         
     | 
| 
      
 279 
     | 
    
         
            +
                # plt.show()
         
     | 
| 
      
 280 
     | 
    
         
            +
                plt.close()
         
     | 
| 
      
 281 
     | 
    
         
            +
             
     | 
| 
      
 282 
     | 
    
         
            +
             
     | 
| 
      
 283 
     | 
    
         
            +
            def get_time_list(time_s, time_e, delta, interval_type="hour"):
         
     | 
| 
      
 284 
     | 
    
         
            +
                """
         
     | 
| 
      
 285 
     | 
    
         
            +
                Description: get a list of time strings from time_s to time_e with a specified interval
         
     | 
| 
      
 286 
     | 
    
         
            +
                Args:
         
     | 
| 
      
 287 
     | 
    
         
            +
                    time_s: start time string, e.g. '2023080203' for hours or '20230802' for days
         
     | 
| 
      
 288 
     | 
    
         
            +
                    time_e: end time string, e.g. '2023080303' for hours or '20230803' for days
         
     | 
| 
      
 289 
     | 
    
         
            +
                    delta: interval of hours or days
         
     | 
| 
      
 290 
     | 
    
         
            +
                    interval_type: 'hour' for hour interval, 'day' for day interval
         
     | 
| 
      
 291 
     | 
    
         
            +
                Returns:
         
     | 
| 
      
 292 
     | 
    
         
            +
                    dt_list: a list of time strings
         
     | 
| 
      
 293 
     | 
    
         
            +
                """
         
     | 
| 
      
 294 
     | 
    
         
            +
                time_s, time_e = str(time_s), str(time_e)
         
     | 
| 
      
 295 
     | 
    
         
            +
                if interval_type == "hour":
         
     | 
| 
      
 296 
     | 
    
         
            +
                    time_format = "%Y%m%d%H"
         
     | 
| 
      
 297 
     | 
    
         
            +
                    delta_type = "hours"
         
     | 
| 
      
 298 
     | 
    
         
            +
                elif interval_type == "day":
         
     | 
| 
      
 299 
     | 
    
         
            +
                    time_format = "%Y%m%d"
         
     | 
| 
      
 300 
     | 
    
         
            +
                    delta_type = "days"
         
     | 
| 
      
 301 
     | 
    
         
            +
                    # Ensure time strings are in the correct format for days
         
     | 
| 
      
 302 
     | 
    
         
            +
                    time_s = time_s[:8]
         
     | 
| 
      
 303 
     | 
    
         
            +
                    time_e = time_e[:8]
         
     | 
| 
      
 304 
     | 
    
         
            +
                else:
         
     | 
| 
      
 305 
     | 
    
         
            +
                    raise ValueError("interval_type must be 'hour' or 'day'")
         
     | 
| 
      
 306 
     | 
    
         
            +
             
     | 
| 
      
 307 
     | 
    
         
            +
                dt = datetime.datetime.strptime(time_s, time_format)
         
     | 
| 
      
 308 
     | 
    
         
            +
                dt_list = []
         
     | 
| 
      
 309 
     | 
    
         
            +
                while dt.strftime(time_format) <= time_e:
         
     | 
| 
      
 310 
     | 
    
         
            +
                    dt_list.append(dt.strftime(time_format))
         
     | 
| 
      
 311 
     | 
    
         
            +
                    dt += datetime.timedelta(**{delta_type: delta})
         
     | 
| 
      
 312 
     | 
    
         
            +
                return dt_list
         
     | 
| 
      
 313 
     | 
    
         
            +
             
     | 
| 
      
 314 
     | 
    
         
            +
             
     | 
| 
      
 315 
     | 
    
         
            +
            def _transform_time(time_str):
         
     | 
| 
      
 316 
     | 
    
         
            +
                # old_time = '2023080203'
         
     | 
| 
      
 317 
     | 
    
         
            +
                # time_new = '2023-08-02T03%3A00%3A00Z'
         
     | 
| 
      
 318 
     | 
    
         
            +
                time_new = f"{time_str[:4]}-{time_str[4:6]}-{time_str[6:8]}T{time_str[8:10]}%3A00%3A00Z"
         
     | 
| 
      
 319 
     | 
    
         
            +
                return time_new
         
     | 
| 
      
 320 
     | 
    
         
            +
             
     | 
| 
      
 321 
     | 
    
         
            +
             
     | 
| 
      
 322 
     | 
    
         
            +
            def _get_query_dict(var, lon_min, lon_max, lat_min, lat_max, time_str_ymdh, time_str_end=None, mode="single_depth", depth=None, level_num=None):
         
     | 
| 
      
 323 
     | 
    
         
            +
                query_dict = {
         
     | 
| 
      
 324 
     | 
    
         
            +
                    "var": variable_info[var]["var_name"],
         
     | 
| 
      
 325 
     | 
    
         
            +
                    "north": lat_max,
         
     | 
| 
      
 326 
     | 
    
         
            +
                    "west": lon_min,
         
     | 
| 
      
 327 
     | 
    
         
            +
                    "east": lon_max,
         
     | 
| 
      
 328 
     | 
    
         
            +
                    "south": lat_min,
         
     | 
| 
      
 329 
     | 
    
         
            +
                    "horizStride": 1,
         
     | 
| 
      
 330 
     | 
    
         
            +
                    "time": None,
         
     | 
| 
      
 331 
     | 
    
         
            +
                    "time_start": None,
         
     | 
| 
      
 332 
     | 
    
         
            +
                    "time_end": None,
         
     | 
| 
      
 333 
     | 
    
         
            +
                    "timeStride": None,
         
     | 
| 
      
 334 
     | 
    
         
            +
                    "vertCoord": None,
         
     | 
| 
      
 335 
     | 
    
         
            +
                    "vertStride": None,
         
     | 
| 
      
 336 
     | 
    
         
            +
                    "addLatLon": "true",
         
     | 
| 
      
 337 
     | 
    
         
            +
                    "accept": "netcdf4",
         
     | 
| 
      
 338 
     | 
    
         
            +
                }
         
     | 
| 
      
 339 
     | 
    
         
            +
             
     | 
| 
      
 340 
     | 
    
         
            +
                if time_str_end is not None:
         
     | 
| 
      
 341 
     | 
    
         
            +
                    query_dict["time_start"] = _transform_time(time_str_ymdh)
         
     | 
| 
      
 342 
     | 
    
         
            +
                    query_dict["time_end"] = _transform_time(time_str_end)
         
     | 
| 
      
 343 
     | 
    
         
            +
                    query_dict["timeStride"] = 1
         
     | 
| 
      
 344 
     | 
    
         
            +
                else:
         
     | 
| 
      
 345 
     | 
    
         
            +
                    query_dict["time"] = _transform_time(time_str_ymdh)
         
     | 
| 
      
 346 
     | 
    
         
            +
             
     | 
| 
      
 347 
     | 
    
         
            +
                def get_nearest_level_index(depth):
         
     | 
| 
      
 348 
     | 
    
         
            +
                    level_depth = [0.0, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 15.0, 20.0, 25.0, 30.0, 35.0, 40.0, 45.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0, 125.0, 150.0, 200.0, 250.0, 300.0, 350.0, 400.0, 500.0, 600.0, 700.0, 800.0, 900.0, 1000.0, 1250.0, 1500.0, 2000.0, 2500.0, 3000.0, 4000.0, 5000]
         
     | 
| 
      
 349 
     | 
    
         
            +
                    return min(range(len(level_depth)), key=lambda i: abs(level_depth[i] - depth))
         
     | 
| 
      
 350 
     | 
    
         
            +
             
     | 
| 
      
 351 
     | 
    
         
            +
                if var not in ["ssh", "u_b", "v_b", "temp_b", "salt_b"] and var in ["u", "v", "temp", "salt"]:
         
     | 
| 
      
 352 
     | 
    
         
            +
                    if mode == "depth":
         
     | 
| 
      
 353 
     | 
    
         
            +
                        if depth < 0 or depth > 5000:
         
     | 
| 
      
 354 
     | 
    
         
            +
                            print("Please ensure the depth is in the range of 0-5000 m")
         
     | 
| 
      
 355 
     | 
    
         
            +
                        query_dict["vertCoord"] = get_nearest_level_index(depth) + 1
         
     | 
| 
      
 356 
     | 
    
         
            +
                    elif mode == "level":
         
     | 
| 
      
 357 
     | 
    
         
            +
                        if level_num < 1 or level_num > 40:
         
     | 
| 
      
 358 
     | 
    
         
            +
                            print("Please ensure the level_num is in the range of 1-40")
         
     | 
| 
      
 359 
     | 
    
         
            +
                        query_dict["vertCoord"] = max(1, min(level_num, 40))
         
     | 
| 
      
 360 
     | 
    
         
            +
                    elif mode == "full":
         
     | 
| 
      
 361 
     | 
    
         
            +
                        query_dict["vertStride"] = 1
         
     | 
| 
      
 362 
     | 
    
         
            +
                    else:
         
     | 
| 
      
 363 
     | 
    
         
            +
                        raise ValueError("Invalid mode. Choose from 'depth', 'level', or 'full'")
         
     | 
| 
      
 364 
     | 
    
         
            +
             
     | 
| 
      
 365 
     | 
    
         
            +
                query_dict = {k: v for k, v in query_dict.items() if v is not None}
         
     | 
| 
      
 366 
     | 
    
         
            +
             
     | 
| 
      
 367 
     | 
    
         
            +
                return query_dict
         
     | 
| 
      
 368 
     | 
    
         
            +
             
     | 
| 
      
 369 
     | 
    
         
            +
             
     | 
| 
      
 370 
     | 
    
         
            +
            def _check_time_in_dataset_and_version(time_input, time_end=None):
         
     | 
| 
      
 371 
     | 
    
         
            +
                # 判断是处理单个时间点还是时间范围
         
     | 
| 
      
 372 
     | 
    
         
            +
                is_single_time = time_end is None
         
     | 
| 
      
 373 
     | 
    
         
            +
             
     | 
| 
      
 374 
     | 
    
         
            +
                # 如果是单个时间点,初始化时间范围
         
     | 
| 
      
 375 
     | 
    
         
            +
                if is_single_time:
         
     | 
| 
      
 376 
     | 
    
         
            +
                    time_start = int(time_input)
         
     | 
| 
      
 377 
     | 
    
         
            +
                    time_end = time_start
         
     | 
| 
      
 378 
     | 
    
         
            +
                    time_input_str = str(time_input)
         
     | 
| 
      
 379 
     | 
    
         
            +
                else:
         
     | 
| 
      
 380 
     | 
    
         
            +
                    time_start = int(time_input)
         
     | 
| 
      
 381 
     | 
    
         
            +
                    time_end = int(time_end)
         
     | 
| 
      
 382 
     | 
    
         
            +
                    time_input_str = f"{time_input}-{time_end}"
         
     | 
| 
      
 383 
     | 
    
         
            +
             
     | 
| 
      
 384 
     | 
    
         
            +
                # 根据时间长度补全时间格式
         
     | 
| 
      
 385 
     | 
    
         
            +
                if len(str(time_start)) == 8:
         
     | 
| 
      
 386 
     | 
    
         
            +
                    time_start = str(time_start) + "00"
         
     | 
| 
      
 387 
     | 
    
         
            +
                if len(str(time_end)) == 8:
         
     | 
| 
      
 388 
     | 
    
         
            +
                    time_end = str(time_end) + "21"
         
     | 
| 
      
 389 
     | 
    
         
            +
                time_start, time_end = int(time_start), int(time_end)
         
     | 
| 
      
 390 
     | 
    
         
            +
             
     | 
| 
      
 391 
     | 
    
         
            +
                d_list = []
         
     | 
| 
      
 392 
     | 
    
         
            +
                v_list = []
         
     | 
| 
      
 393 
     | 
    
         
            +
                trange_list = []
         
     | 
| 
      
 394 
     | 
    
         
            +
                have_data = False
         
     | 
| 
      
 395 
     | 
    
         
            +
             
     | 
| 
      
 396 
     | 
    
         
            +
                # 遍历数据集和版本
         
     | 
| 
      
 397 
     | 
    
         
            +
                for dataset_name in data_info["hourly"]["dataset"].keys():
         
     | 
| 
      
 398 
     | 
    
         
            +
                    for version_name in data_info["hourly"]["dataset"][dataset_name]["version"].keys():
         
     | 
| 
      
 399 
     | 
    
         
            +
                        time_s, time_e = list(data_info["hourly"]["dataset"][dataset_name]["version"][version_name]["time_range"].values())
         
     | 
| 
      
 400 
     | 
    
         
            +
                        time_s, time_e = str(time_s), str(time_e)
         
     | 
| 
      
 401 
     | 
    
         
            +
                        if len(time_s) == 8:
         
     | 
| 
      
 402 
     | 
    
         
            +
                            time_s = time_s + "00"
         
     | 
| 
      
 403 
     | 
    
         
            +
                        if len(time_e) == 8:
         
     | 
| 
      
 404 
     | 
    
         
            +
                            time_e = time_e + "21"
         
     | 
| 
      
 405 
     | 
    
         
            +
                        # 检查时间是否在数据集的时间范围内
         
     | 
| 
      
 406 
     | 
    
         
            +
                        if is_single_time:
         
     | 
| 
      
 407 
     | 
    
         
            +
                            if time_start >= int(time_s) and time_start <= int(time_e):
         
     | 
| 
      
 408 
     | 
    
         
            +
                                d_list.append(dataset_name)
         
     | 
| 
      
 409 
     | 
    
         
            +
                                v_list.append(version_name)
         
     | 
| 
      
 410 
     | 
    
         
            +
                                trange_list.append(f"{time_s}-{time_e}")
         
     | 
| 
      
 411 
     | 
    
         
            +
                                have_data = True
         
     | 
| 
      
 412 
     | 
    
         
            +
                        else:
         
     | 
| 
      
 413 
     | 
    
         
            +
                            if time_start >= int(time_s) and time_end <= int(time_e):
         
     | 
| 
      
 414 
     | 
    
         
            +
                                d_list.append(dataset_name)
         
     | 
| 
      
 415 
     | 
    
         
            +
                                v_list.append(version_name)
         
     | 
| 
      
 416 
     | 
    
         
            +
                                trange_list.append(f"{time_s}-{time_e}")
         
     | 
| 
      
 417 
     | 
    
         
            +
                                have_data = True
         
     | 
| 
      
 418 
     | 
    
         
            +
             
     | 
| 
      
 419 
     | 
    
         
            +
                # 输出结果
         
     | 
| 
      
 420 
     | 
    
         
            +
                print(f"[bold red]{time_input_str} is in the following dataset and version:")
         
     | 
| 
      
 421 
     | 
    
         
            +
                if have_data:
         
     | 
| 
      
 422 
     | 
    
         
            +
                    for d, v, trange in zip(d_list, v_list, trange_list):
         
     | 
| 
      
 423 
     | 
    
         
            +
                        print(f"[bold blue]{d} {v} {trange}")
         
     | 
| 
      
 424 
     | 
    
         
            +
                    if is_single_time:
         
     | 
| 
      
 425 
     | 
    
         
            +
                        return True
         
     | 
| 
      
 426 
     | 
    
         
            +
                    else:
         
     | 
| 
      
 427 
     | 
    
         
            +
                        base_url_s = _get_base_url(d_list[0], v_list[0], "u", str(time_start))
         
     | 
| 
      
 428 
     | 
    
         
            +
                        base_url_e = _get_base_url(d_list[0], v_list[0], "u", str(time_end))
         
     | 
| 
      
 429 
     | 
    
         
            +
                        if base_url_s == base_url_e:
         
     | 
| 
      
 430 
     | 
    
         
            +
                            return True
         
     | 
| 
      
 431 
     | 
    
         
            +
                        else:
         
     | 
| 
      
 432 
     | 
    
         
            +
                            print(f"[bold red]{time_start} to {time_end} is in different datasets or versions, so you can't download them together")
         
     | 
| 
      
 433 
     | 
    
         
            +
                            return False
         
     | 
| 
      
 434 
     | 
    
         
            +
                else:
         
     | 
| 
      
 435 
     | 
    
         
            +
                    print(f"[bold red]{time_input_str} is not in any dataset and version")
         
     | 
| 
      
 436 
     | 
    
         
            +
                    return False
         
     | 
| 
      
 437 
     | 
    
         
            +
             
     | 
| 
      
 438 
     | 
    
         
            +
             
     | 
| 
      
 439 
     | 
    
         
            +
            def _ensure_time_in_specific_dataset_and_version(dataset_name, version_name, time_input, time_end=None):
         
     | 
| 
      
 440 
     | 
    
         
            +
                # 根据时间长度补全时间格式
         
     | 
| 
      
 441 
     | 
    
         
            +
                if len(str(time_input)) == 8:
         
     | 
| 
      
 442 
     | 
    
         
            +
                    time_input = str(time_input) + "00"
         
     | 
| 
      
 443 
     | 
    
         
            +
                time_start = int(time_input)
         
     | 
| 
      
 444 
     | 
    
         
            +
                if time_end is not None:
         
     | 
| 
      
 445 
     | 
    
         
            +
                    if len(str(time_end)) == 8:
         
     | 
| 
      
 446 
     | 
    
         
            +
                        time_end = str(time_end) + "21"
         
     | 
| 
      
 447 
     | 
    
         
            +
                    time_end = int(time_end)
         
     | 
| 
      
 448 
     | 
    
         
            +
                else:
         
     | 
| 
      
 449 
     | 
    
         
            +
                    time_end = time_start
         
     | 
| 
      
 450 
     | 
    
         
            +
             
     | 
| 
      
 451 
     | 
    
         
            +
                # 检查指定的数据集和版本是否存在
         
     | 
| 
      
 452 
     | 
    
         
            +
                if dataset_name not in data_info["hourly"]["dataset"]:
         
     | 
| 
      
 453 
     | 
    
         
            +
                    print(f"[bold red]Dataset {dataset_name} not found.")
         
     | 
| 
      
 454 
     | 
    
         
            +
                    return False
         
     | 
| 
      
 455 
     | 
    
         
            +
                if version_name not in data_info["hourly"]["dataset"][dataset_name]["version"]:
         
     | 
| 
      
 456 
     | 
    
         
            +
                    print(f"[bold red]Version {version_name} not found in dataset {dataset_name}.")
         
     | 
| 
      
 457 
     | 
    
         
            +
                    return False
         
     | 
| 
      
 458 
     | 
    
         
            +
             
     | 
| 
      
 459 
     | 
    
         
            +
                # 获取指定数据集和版本的时间范围
         
     | 
| 
      
 460 
     | 
    
         
            +
                time_range = data_info["hourly"]["dataset"][dataset_name]["version"][version_name]["time_range"]
         
     | 
| 
      
 461 
     | 
    
         
            +
                time_s, time_e = list(time_range.values())
         
     | 
| 
      
 462 
     | 
    
         
            +
                time_s, time_e = str(time_s), str(time_e)
         
     | 
| 
      
 463 
     | 
    
         
            +
                if len(time_s) == 8:
         
     | 
| 
      
 464 
     | 
    
         
            +
                    time_s = time_s + "00"
         
     | 
| 
      
 465 
     | 
    
         
            +
                if len(time_e) == 8:
         
     | 
| 
      
 466 
     | 
    
         
            +
                    time_e = time_e + "21"
         
     | 
| 
      
 467 
     | 
    
         
            +
                time_s, time_e = int(time_s), int(time_e)
         
     | 
| 
      
 468 
     | 
    
         
            +
             
     | 
| 
      
 469 
     | 
    
         
            +
                # 检查时间是否在指定数据集和版本的时间范围内
         
     | 
| 
      
 470 
     | 
    
         
            +
                if time_start >= time_s and time_end <= time_e:
         
     | 
| 
      
 471 
     | 
    
         
            +
                    print(f"[bold blue]Time {time_input} to {time_end} is within dataset {dataset_name} and version {version_name}.")
         
     | 
| 
      
 472 
     | 
    
         
            +
                    return True
         
     | 
| 
      
 473 
     | 
    
         
            +
                else:
         
     | 
| 
      
 474 
     | 
    
         
            +
                    print(f"[bold red]Time {time_input} to {time_end} is not within dataset {dataset_name} and version {version_name}.")
         
     | 
| 
      
 475 
     | 
    
         
            +
                    return False
         
     | 
| 
      
 476 
     | 
    
         
            +
             
     | 
| 
      
 477 
     | 
    
         
            +
             
     | 
| 
      
 478 
     | 
    
         
            +
            def _direct_choose_dataset_and_version(time_input, time_end=None):
         
     | 
| 
      
 479 
     | 
    
         
            +
                # 假设 data_info 是一个字典,包含了数据集和版本的信息
         
     | 
| 
      
 480 
     | 
    
         
            +
                # 示例结构:data_info['hourly']['dataset'][dataset_name]['version'][version_name]['time_range']
         
     | 
| 
      
 481 
     | 
    
         
            +
             
     | 
| 
      
 482 
     | 
    
         
            +
                if len(str(time_input)) == 8:
         
     | 
| 
      
 483 
     | 
    
         
            +
                    time_input = str(time_input) + "00"
         
     | 
| 
      
 484 
     | 
    
         
            +
             
     | 
| 
      
 485 
     | 
    
         
            +
                # 如果 time_end 是 None,则将 time_input 的值赋给它
         
     | 
| 
      
 486 
     | 
    
         
            +
                if time_end is None:
         
     | 
| 
      
 487 
     | 
    
         
            +
                    time_end = time_input
         
     | 
| 
      
 488 
     | 
    
         
            +
             
     | 
| 
      
 489 
     | 
    
         
            +
                # 处理开始和结束时间,确保它们是完整的 ymdh 格式
         
     | 
| 
      
 490 
     | 
    
         
            +
                time_start, time_end = int(str(time_input)[:10]), int(str(time_end)[:10])
         
     | 
| 
      
 491 
     | 
    
         
            +
             
     | 
| 
      
 492 
     | 
    
         
            +
                dataset_name_out, version_name_out = None, None
         
     | 
| 
      
 493 
     | 
    
         
            +
             
     | 
| 
      
 494 
     | 
    
         
            +
                for dataset_name in data_info["hourly"]["dataset"].keys():
         
     | 
| 
      
 495 
     | 
    
         
            +
                    for version_name in data_info["hourly"]["dataset"][dataset_name]["version"].keys():
         
     | 
| 
      
 496 
     | 
    
         
            +
                        [time_s, time_e] = list(data_info["hourly"]["dataset"][dataset_name]["version"][version_name]["time_range"].values())
         
     | 
| 
      
 497 
     | 
    
         
            +
                        time_s, time_e = str(time_s), str(time_e)
         
     | 
| 
      
 498 
     | 
    
         
            +
                        if len(time_s) == 8:
         
     | 
| 
      
 499 
     | 
    
         
            +
                            time_s = time_s + "00"
         
     | 
| 
      
 500 
     | 
    
         
            +
                        if len(time_e) == 8:
         
     | 
| 
      
 501 
     | 
    
         
            +
                            time_e = time_e + "21"
         
     | 
| 
      
 502 
     | 
    
         
            +
                        time_s, time_e = int(time_s), int(time_e)
         
     | 
| 
      
 503 
     | 
    
         
            +
             
     | 
| 
      
 504 
     | 
    
         
            +
                        # 检查时间是否在数据集版本的时间范围内
         
     | 
| 
      
 505 
     | 
    
         
            +
                        if time_start >= time_s and time_end <= time_e:
         
     | 
| 
      
 506 
     | 
    
         
            +
                            # print(f'[bold purple]dataset: {dataset_name}, version: {version_name} is chosen')
         
     | 
| 
      
 507 
     | 
    
         
            +
                            # return dataset_name, version_name
         
     | 
| 
      
 508 
     | 
    
         
            +
                            dataset_name_out, version_name_out = dataset_name, version_name
         
     | 
| 
      
 509 
     | 
    
         
            +
             
     | 
| 
      
 510 
     | 
    
         
            +
                if dataset_name_out is not None and version_name_out is not None:
         
     | 
| 
      
 511 
     | 
    
         
            +
                    print(f"[bold purple]dataset: {dataset_name_out}, version: {version_name_out} is chosen")
         
     | 
| 
      
 512 
     | 
    
         
            +
             
     | 
| 
      
 513 
     | 
    
         
            +
                # 如果没有找到匹配的数据集和版本,会返回 None
         
     | 
| 
      
 514 
     | 
    
         
            +
                return dataset_name_out, version_name_out
         
     | 
| 
      
 515 
     | 
    
         
            +
             
     | 
| 
      
 516 
     | 
    
         
            +
             
     | 
| 
      
 517 
     | 
    
         
            +
            def _get_base_url(dataset_name, version_name, var, ymdh_str):
         
     | 
| 
      
 518 
     | 
    
         
            +
                year_str = int(ymdh_str[:4])
         
     | 
| 
      
 519 
     | 
    
         
            +
                url_dict = data_info["hourly"]["dataset"][dataset_name]["version"][version_name]["url"]
         
     | 
| 
      
 520 
     | 
    
         
            +
                classification_method = data_info["hourly"]["dataset"][dataset_name]["version"][version_name]["classification"]
         
     | 
| 
      
 521 
     | 
    
         
            +
                if classification_method == "year_different":
         
     | 
| 
      
 522 
     | 
    
         
            +
                    base_url = url_dict[str(year_str)]
         
     | 
| 
      
 523 
     | 
    
         
            +
                elif classification_method == "same_path":
         
     | 
| 
      
 524 
     | 
    
         
            +
                    base_url = url_dict
         
     | 
| 
      
 525 
     | 
    
         
            +
                elif classification_method == "var_different":
         
     | 
| 
      
 526 
     | 
    
         
            +
                    base_url = None
         
     | 
| 
      
 527 
     | 
    
         
            +
                    for key, value in var_group.items():
         
     | 
| 
      
 528 
     | 
    
         
            +
                        if var in value:
         
     | 
| 
      
 529 
     | 
    
         
            +
                            base_url = url_dict[key]
         
     | 
| 
      
 530 
     | 
    
         
            +
                            break
         
     | 
| 
      
 531 
     | 
    
         
            +
                    if base_url is None:
         
     | 
| 
      
 532 
     | 
    
         
            +
                        print("Please ensure the var is in [u,v,temp,salt,ssh,u_b,v_b,temp_b,salt_b]")
         
     | 
| 
      
 533 
     | 
    
         
            +
                elif classification_method == "var_year_different":
         
     | 
| 
      
 534 
     | 
    
         
            +
                    if dataset_name == "GLBy0.08" and version_name == "93.0":
         
     | 
| 
      
 535 
     | 
    
         
            +
                        mdh_str = ymdh_str[4:]
         
     | 
| 
      
 536 
     | 
    
         
            +
                        # GLBy0.08 93.0
         
     | 
| 
      
 537 
     | 
    
         
            +
                        # data time range in each year: year-01-01 12:00 to year+1-01-01 09:00
         
     | 
| 
      
 538 
     | 
    
         
            +
                        if mdh_str <= "010109":
         
     | 
| 
      
 539 
     | 
    
         
            +
                            year_str = int(ymdh_str[:4]) - 1
         
     | 
| 
      
 540 
     | 
    
         
            +
                    base_url = None
         
     | 
| 
      
 541 
     | 
    
         
            +
                    for key, value in var_group.items():
         
     | 
| 
      
 542 
     | 
    
         
            +
                        if var in value:
         
     | 
| 
      
 543 
     | 
    
         
            +
                            base_url = url_dict[key][str(year_str)]
         
     | 
| 
      
 544 
     | 
    
         
            +
                            break
         
     | 
| 
      
 545 
     | 
    
         
            +
                    if base_url is None:
         
     | 
| 
      
 546 
     | 
    
         
            +
                        print("Please ensure the var is in [u,v,temp,salt,ssh,u_b,v_b,temp_b,salt_b]")
         
     | 
| 
      
 547 
     | 
    
         
            +
                elif classification_method == "single_var_year_different":
         
     | 
| 
      
 548 
     | 
    
         
            +
                    base_url = None
         
     | 
| 
      
 549 
     | 
    
         
            +
                    for key, value in single_var_group.items():
         
     | 
| 
      
 550 
     | 
    
         
            +
                        if var in value:
         
     | 
| 
      
 551 
     | 
    
         
            +
                            base_url = url_dict[key][str(year_str)]
         
     | 
| 
      
 552 
     | 
    
         
            +
                            break
         
     | 
| 
      
 553 
     | 
    
         
            +
                    if base_url is None:
         
     | 
| 
      
 554 
     | 
    
         
            +
                        print("Please ensure the var is in [u,v,temp,salt,ssh]")
         
     | 
| 
      
 555 
     | 
    
         
            +
                return base_url
         
     | 
| 
      
 556 
     | 
    
         
            +
             
     | 
| 
      
 557 
     | 
    
         
            +
             
     | 
| 
      
 558 
     | 
    
         
            +
            def _get_submit_url(dataset_name, version_name, var, ymdh_str, query_dict):
         
     | 
| 
      
 559 
     | 
    
         
            +
                base_url = _get_base_url(dataset_name, version_name, var, ymdh_str)
         
     | 
| 
      
 560 
     | 
    
         
            +
                if isinstance(query_dict["var"], str):
         
     | 
| 
      
 561 
     | 
    
         
            +
                    query_dict["var"] = [query_dict["var"]]
         
     | 
| 
      
 562 
     | 
    
         
            +
                target_url = base_url + "&".join(f"var={var}" for var in query_dict["var"]) + "&" + "&".join(f"{key}={value}" for key, value in query_dict.items() if key != "var")
         
     | 
| 
      
 563 
     | 
    
         
            +
                return target_url
         
     | 
| 
      
 564 
     | 
    
         
            +
             
     | 
| 
      
 565 
     | 
    
         
            +
             
     | 
| 
      
 566 
     | 
    
         
            +
            def _clear_existing_file(file_full_path):
         
     | 
| 
      
 567 
     | 
    
         
            +
                if os.path.exists(file_full_path):
         
     | 
| 
      
 568 
     | 
    
         
            +
                    os.remove(file_full_path)
         
     | 
| 
      
 569 
     | 
    
         
            +
                    print(f"{file_full_path} has been removed")
         
     | 
| 
      
 570 
     | 
    
         
            +
             
     | 
| 
      
 571 
     | 
    
         
            +
             
     | 
| 
      
 572 
     | 
    
         
            +
            def _check_existing_file(file_full_path, avg_size):
         
     | 
| 
      
 573 
     | 
    
         
            +
                if os.path.exists(file_full_path):
         
     | 
| 
      
 574 
     | 
    
         
            +
                    print(f"[bold #FFA54F]{file_full_path} exists")
         
     | 
| 
      
 575 
     | 
    
         
            +
                    fsize = file_size(file_full_path)
         
     | 
| 
      
 576 
     | 
    
         
            +
                    delta_size_ratio = (fsize - avg_size) / avg_size
         
     | 
| 
      
 577 
     | 
    
         
            +
                    if abs(delta_size_ratio) > 0.025:
         
     | 
| 
      
 578 
     | 
    
         
            +
                        if check_nc(file_full_path):
         
     | 
| 
      
 579 
     | 
    
         
            +
                            # print(f"File size is abnormal but can be opened normally, file size: {fsize:.2f} KB")
         
     | 
| 
      
 580 
     | 
    
         
            +
                            if not _check_ftime(file_full_path, if_print=True):
         
     | 
| 
      
 581 
     | 
    
         
            +
                                return False
         
     | 
| 
      
 582 
     | 
    
         
            +
                            else:
         
     | 
| 
      
 583 
     | 
    
         
            +
                                return True
         
     | 
| 
      
 584 
     | 
    
         
            +
                        else:
         
     | 
| 
      
 585 
     | 
    
         
            +
                            print(f"File size is abnormal and cannot be opened, {file_full_path}: {fsize:.2f} KB")
         
     | 
| 
      
 586 
     | 
    
         
            +
                            return False
         
     | 
| 
      
 587 
     | 
    
         
            +
                    else:
         
     | 
| 
      
 588 
     | 
    
         
            +
                        if not _check_ftime(file_full_path, if_print=True):
         
     | 
| 
      
 589 
     | 
    
         
            +
                            return False
         
     | 
| 
      
 590 
     | 
    
         
            +
                        else:
         
     | 
| 
      
 591 
     | 
    
         
            +
                            return True
         
     | 
| 
      
 592 
     | 
    
         
            +
                else:
         
     | 
| 
      
 593 
     | 
    
         
            +
                    return False
         
     | 
| 
      
 594 
     | 
    
         
            +
             
     | 
| 
      
 595 
     | 
    
         
            +
             
     | 
| 
      
 596 
     | 
    
         
            +
            def _get_mean_size30(store_path, same_file):
         
     | 
| 
      
 597 
     | 
    
         
            +
                if same_file not in fsize_dict.keys():
         
     | 
| 
      
 598 
     | 
    
         
            +
                    # print(f'Same file name: {same_file}')
         
     | 
| 
      
 599 
     | 
    
         
            +
                    fsize_dict[same_file] = {"size": 0, "count": 0}
         
     | 
| 
      
 600 
     | 
    
         
            +
             
     | 
| 
      
 601 
     | 
    
         
            +
                if fsize_dict[same_file]["count"] < 30 or fsize_dict[same_file]["size"] == 0:
         
     | 
| 
      
 602 
     | 
    
         
            +
                    # 更新30次文件最小值,后续认为可以代表所有文件,不再更新占用时间
         
     | 
| 
      
 603 
     | 
    
         
            +
                    fsize_mean = mean_size(store_path, same_file, max_num=30)
         
     | 
| 
      
 604 
     | 
    
         
            +
                    set_min_size = fsize_mean * 0.95
         
     | 
| 
      
 605 
     | 
    
         
            +
                    fsize_dict[same_file]["size"] = set_min_size
         
     | 
| 
      
 606 
     | 
    
         
            +
                    fsize_dict[same_file]["count"] += 1
         
     | 
| 
      
 607 
     | 
    
         
            +
                else:
         
     | 
| 
      
 608 
     | 
    
         
            +
                    set_min_size = fsize_dict[same_file]["size"]
         
     | 
| 
      
 609 
     | 
    
         
            +
                return set_min_size
         
     | 
| 
      
 610 
     | 
    
         
            +
             
     | 
| 
      
 611 
     | 
    
         
            +
             
     | 
| 
      
 612 
     | 
    
         
            +
            def _get_mean_size_move(same_file, current_file):
         
     | 
| 
      
 613 
     | 
    
         
            +
                # 获取锁
         
     | 
| 
      
 614 
     | 
    
         
            +
                with fsize_dict_lock:  # 全局锁,确保同一时间只能有一个线程访问
         
     | 
| 
      
 615 
     | 
    
         
            +
                    # 初始化字典中的值,如果文件不在字典中
         
     | 
| 
      
 616 
     | 
    
         
            +
                    if same_file not in fsize_dict.keys():
         
     | 
| 
      
 617 
     | 
    
         
            +
                        fsize_dict[same_file] = {"size_list": [], "mean_size": 1.0}
         
     | 
| 
      
 618 
     | 
    
         
            +
             
     | 
| 
      
 619 
     | 
    
         
            +
                    tolerance_ratio = 0.025  # 容忍的阈值比例
         
     | 
| 
      
 620 
     | 
    
         
            +
                    current_file_size = file_size(current_file)
         
     | 
| 
      
 621 
     | 
    
         
            +
             
     | 
| 
      
 622 
     | 
    
         
            +
                    # 如果列表不为空,则计算平均值,否则保持为1
         
     | 
| 
      
 623 
     | 
    
         
            +
                    if fsize_dict[same_file]["size_list"]:
         
     | 
| 
      
 624 
     | 
    
         
            +
                        fsize_dict[same_file]["mean_size"] = sum(fsize_dict[same_file]["size_list"]) / len(fsize_dict[same_file]["size_list"])
         
     | 
| 
      
 625 
     | 
    
         
            +
                        fsize_dict[same_file]["mean_size"] = max(fsize_dict[same_file]["mean_size"], 1.0)
         
     | 
| 
      
 626 
     | 
    
         
            +
                    else:
         
     | 
| 
      
 627 
     | 
    
         
            +
                        fsize_dict[same_file]["mean_size"] = 1.0
         
     | 
| 
      
 628 
     | 
    
         
            +
             
     | 
| 
      
 629 
     | 
    
         
            +
                    size_difference_ratio = (current_file_size - fsize_dict[same_file]["mean_size"]) / fsize_dict[same_file]["mean_size"]
         
     | 
| 
      
 630 
     | 
    
         
            +
             
     | 
| 
      
 631 
     | 
    
         
            +
                    if abs(size_difference_ratio) > tolerance_ratio:
         
     | 
| 
      
 632 
     | 
    
         
            +
                        if check_nc(current_file):
         
     | 
| 
      
 633 
     | 
    
         
            +
                            # print(f"File size is abnormal but can be opened normally, file size: {current_file_size:.2f} KB")
         
     | 
| 
      
 634 
     | 
    
         
            +
                            # 文件可以正常打开,但大小异常,保留当前文件大小
         
     | 
| 
      
 635 
     | 
    
         
            +
                            fsize_dict[same_file]["size_list"] = [current_file_size]
         
     | 
| 
      
 636 
     | 
    
         
            +
                            fsize_dict[same_file]["mean_size"] = current_file_size
         
     | 
| 
      
 637 
     | 
    
         
            +
                        else:
         
     | 
| 
      
 638 
     | 
    
         
            +
                            _clear_existing_file(current_file)
         
     | 
| 
      
 639 
     | 
    
         
            +
                            print(f"File size is abnormal, may need to be downloaded again, file size: {current_file_size:.2f} KB")
         
     | 
| 
      
 640 
     | 
    
         
            +
                    else:
         
     | 
| 
      
 641 
     | 
    
         
            +
                        # 添加当前文件大小到列表中,并更新计数
         
     | 
| 
      
 642 
     | 
    
         
            +
                        fsize_dict[same_file]["size_list"].append(current_file_size)
         
     | 
| 
      
 643 
     | 
    
         
            +
             
     | 
| 
      
 644 
     | 
    
         
            +
                # 返回调整后的平均值,这里根据您的需求,返回的是添加新值之前的平均值
         
     | 
| 
      
 645 
     | 
    
         
            +
                return fsize_dict[same_file]["mean_size"]
         
     | 
| 
      
 646 
     | 
    
         
            +
             
     | 
| 
      
 647 
     | 
    
         
            +
             
     | 
| 
      
 648 
     | 
    
         
            +
            def _check_ftime(nc_file, tname="time", if_print=False):
         
     | 
| 
      
 649 
     | 
    
         
            +
                if not os.path.exists(nc_file):
         
     | 
| 
      
 650 
     | 
    
         
            +
                    return False
         
     | 
| 
      
 651 
     | 
    
         
            +
                nc_file = str(nc_file)
         
     | 
| 
      
 652 
     | 
    
         
            +
                try:
         
     | 
| 
      
 653 
     | 
    
         
            +
                    ds = xr.open_dataset(nc_file)
         
     | 
| 
      
 654 
     | 
    
         
            +
                    real_time = ds[tname].values[0]
         
     | 
| 
      
 655 
     | 
    
         
            +
                    ds.close()
         
     | 
| 
      
 656 
     | 
    
         
            +
                    real_time = str(real_time)[:13]
         
     | 
| 
      
 657 
     | 
    
         
            +
                    real_time = real_time.replace("-", "").replace("T", "")
         
     | 
| 
      
 658 
     | 
    
         
            +
                    # -----------------------------------------------------
         
     | 
| 
      
 659 
     | 
    
         
            +
                    f_time = re.findall(r"\d{10}", nc_file)[0]
         
     | 
| 
      
 660 
     | 
    
         
            +
                    if real_time == f_time:
         
     | 
| 
      
 661 
     | 
    
         
            +
                        return True
         
     | 
| 
      
 662 
     | 
    
         
            +
                    else:
         
     | 
| 
      
 663 
     | 
    
         
            +
                        if if_print:
         
     | 
| 
      
 664 
     | 
    
         
            +
                            print(f"[bold #daff5c]File time error, file/real time: [bold blue]{f_time}/{real_time}")
         
     | 
| 
      
 665 
     | 
    
         
            +
                        return False
         
     | 
| 
      
 666 
     | 
    
         
            +
                except Exception as e:
         
     | 
| 
      
 667 
     | 
    
         
            +
                    if if_print:
         
     | 
| 
      
 668 
     | 
    
         
            +
                        print(f"[bold #daff5c]File time check failed, {nc_file}: {e}")
         
     | 
| 
      
 669 
     | 
    
         
            +
                    return False
         
     | 
| 
      
 670 
     | 
    
         
            +
             
     | 
| 
      
 671 
     | 
    
         
            +
             
     | 
| 
      
 672 
     | 
    
         
            +
            def _correct_time(nc_file):
         
     | 
| 
      
 673 
     | 
    
         
            +
                # 打开NC文件
         
     | 
| 
      
 674 
     | 
    
         
            +
                dataset = nc.Dataset(nc_file)
         
     | 
| 
      
 675 
     | 
    
         
            +
             
     | 
| 
      
 676 
     | 
    
         
            +
                # 读取时间单位
         
     | 
| 
      
 677 
     | 
    
         
            +
                time_units = dataset.variables["time"].units
         
     | 
| 
      
 678 
     | 
    
         
            +
             
     | 
| 
      
 679 
     | 
    
         
            +
                # 关闭文件
         
     | 
| 
      
 680 
     | 
    
         
            +
                dataset.close()
         
     | 
| 
      
 681 
     | 
    
         
            +
             
     | 
| 
      
 682 
     | 
    
         
            +
                # 解析时间单位字符串以获取时间原点
         
     | 
| 
      
 683 
     | 
    
         
            +
                origin_str = time_units.split("since")[1].strip()
         
     | 
| 
      
 684 
     | 
    
         
            +
                origin_datetime = datetime.datetime.strptime(origin_str, "%Y-%m-%d %H:%M:%S")
         
     | 
| 
      
 685 
     | 
    
         
            +
             
     | 
| 
      
 686 
     | 
    
         
            +
                # 从文件名中提取日期字符串
         
     | 
| 
      
 687 
     | 
    
         
            +
                given_date_str = re.findall(r"\d{10}", str(nc_file))[0]
         
     | 
| 
      
 688 
     | 
    
         
            +
             
     | 
| 
      
 689 
     | 
    
         
            +
                # 将提取的日期字符串转换为datetime对象
         
     | 
| 
      
 690 
     | 
    
         
            +
                given_datetime = datetime.datetime.strptime(given_date_str, "%Y%m%d%H")
         
     | 
| 
      
 691 
     | 
    
         
            +
             
     | 
| 
      
 692 
     | 
    
         
            +
                # 计算给定日期与时间原点之间的差值(以小时为单位)
         
     | 
| 
      
 693 
     | 
    
         
            +
                time_difference = (given_datetime - origin_datetime).total_seconds()
         
     | 
| 
      
 694 
     | 
    
         
            +
                if "hours" in time_units:
         
     | 
| 
      
 695 
     | 
    
         
            +
                    time_difference /= 3600
         
     | 
| 
      
 696 
     | 
    
         
            +
                elif "days" in time_units:
         
     | 
| 
      
 697 
     | 
    
         
            +
                    time_difference /= 3600 * 24
         
     | 
| 
      
 698 
     | 
    
         
            +
             
     | 
| 
      
 699 
     | 
    
         
            +
                # 修改NC文件中的时间变量
         
     | 
| 
      
 700 
     | 
    
         
            +
                modify_nc(nc_file, "time", None, time_difference)
         
     | 
| 
      
 701 
     | 
    
         
            +
             
     | 
| 
      
 702 
     | 
    
         
            +
             
     | 
| 
      
 703 
     | 
    
         
            +
            def _download_file(target_url, store_path, file_name, check=False):
         
     | 
| 
      
 704 
     | 
    
         
            +
                # Check if the file exists
         
     | 
| 
      
 705 
     | 
    
         
            +
                fname = Path(store_path) / file_name
         
     | 
| 
      
 706 
     | 
    
         
            +
                file_name_split = file_name.split("_")
         
     | 
| 
      
 707 
     | 
    
         
            +
                file_name_split = file_name_split[:-1]
         
     | 
| 
      
 708 
     | 
    
         
            +
                # same_file = f"{file_name_split[0]}_{file_name_split[1]}*nc"
         
     | 
| 
      
 709 
     | 
    
         
            +
                same_file = "_".join(file_name_split) + "*nc"
         
     | 
| 
      
 710 
     | 
    
         
            +
             
     | 
| 
      
 711 
     | 
    
         
            +
                if check:
         
     | 
| 
      
 712 
     | 
    
         
            +
                    if same_file not in fsize_dict.keys():  # 对第一个文件单独进行检查,因为没有大小可以对比
         
     | 
| 
      
 713 
     | 
    
         
            +
                        check_nc(fname, delete_switch=True)
         
     | 
| 
      
 714 
     | 
    
         
            +
             
     | 
| 
      
 715 
     | 
    
         
            +
                    # set_min_size = _get_mean_size30(store_path, same_file) # 原方案,只30次取平均值;若遇变化,无法判断
         
     | 
| 
      
 716 
     | 
    
         
            +
                    get_mean_size = _get_mean_size_move(same_file, fname)
         
     | 
| 
      
 717 
     | 
    
         
            +
             
     | 
| 
      
 718 
     | 
    
         
            +
                    if _check_existing_file(fname, get_mean_size):
         
     | 
| 
      
 719 
     | 
    
         
            +
                        count_dict["skip"] += 1
         
     | 
| 
      
 720 
     | 
    
         
            +
                        return
         
     | 
| 
      
 721 
     | 
    
         
            +
                _clear_existing_file(fname)
         
     | 
| 
      
 722 
     | 
    
         
            +
             
     | 
| 
      
 723 
     | 
    
         
            +
                if not use_idm:
         
     | 
| 
      
 724 
     | 
    
         
            +
                    # -----------------------------------------------
         
     | 
| 
      
 725 
     | 
    
         
            +
                    print(f"[bold #f0f6d0]Requesting {file_name} ...")
         
     | 
| 
      
 726 
     | 
    
         
            +
                    # 创建会话
         
     | 
| 
      
 727 
     | 
    
         
            +
                    s = requests.Session()
         
     | 
| 
      
 728 
     | 
    
         
            +
                    download_success = False
         
     | 
| 
      
 729 
     | 
    
         
            +
                    request_times = 0
         
     | 
| 
      
 730 
     | 
    
         
            +
             
     | 
| 
      
 731 
     | 
    
         
            +
                    def calculate_wait_time(time_str, target_url):
         
     | 
| 
      
 732 
     | 
    
         
            +
                        # 定义正则表达式,匹配YYYYMMDDHH格式的时间
         
     | 
| 
      
 733 
     | 
    
         
            +
                        time_pattern = r"\d{10}"
         
     | 
| 
      
 734 
     | 
    
         
            +
             
     | 
| 
      
 735 
     | 
    
         
            +
                        # 定义两个字符串
         
     | 
| 
      
 736 
     | 
    
         
            +
                        # str1 = 'HYCOM_water_u_2018010100-2018010112.nc'
         
     | 
| 
      
 737 
     | 
    
         
            +
                        # str2 = 'HYCOM_water_u_2018010100.nc'
         
     | 
| 
      
 738 
     | 
    
         
            +
             
     | 
| 
      
 739 
     | 
    
         
            +
                        # 使用正则表达式查找时间
         
     | 
| 
      
 740 
     | 
    
         
            +
                        times_in_str = re.findall(time_pattern, time_str)
         
     | 
| 
      
 741 
     | 
    
         
            +
             
     | 
| 
      
 742 
     | 
    
         
            +
                        # 计算每个字符串中的时间数量
         
     | 
| 
      
 743 
     | 
    
         
            +
                        num_times_str = len(times_in_str)
         
     | 
| 
      
 744 
     | 
    
         
            +
             
     | 
| 
      
 745 
     | 
    
         
            +
                        if num_times_str > 1:
         
     | 
| 
      
 746 
     | 
    
         
            +
                            delta_t = datetime.datetime.strptime(times_in_str[1], "%Y%m%d%H") - datetime.datetime.strptime(times_in_str[0], "%Y%m%d%H")
         
     | 
| 
      
 747 
     | 
    
         
            +
                            delta_t = delta_t.total_seconds() / 3600
         
     | 
| 
      
 748 
     | 
    
         
            +
                            delta_t = delta_t / 3 + 1
         
     | 
| 
      
 749 
     | 
    
         
            +
                        else:
         
     | 
| 
      
 750 
     | 
    
         
            +
                            delta_t = 1
         
     | 
| 
      
 751 
     | 
    
         
            +
                        # 单个要素最多等待5分钟,不宜太短,太短可能请求失败;也不宜太长,太长可能会浪费时间
         
     | 
| 
      
 752 
     | 
    
         
            +
                        num_var = int(target_url.count("var="))
         
     | 
| 
      
 753 
     | 
    
         
            +
                        if num_var <= 0:
         
     | 
| 
      
 754 
     | 
    
         
            +
                            num_var = 1
         
     | 
| 
      
 755 
     | 
    
         
            +
                        return int(delta_t * 5 * 60 * num_var)
         
     | 
| 
      
 756 
     | 
    
         
            +
             
     | 
| 
      
 757 
     | 
    
         
            +
                    max_timeout = calculate_wait_time(file_name, target_url)
         
     | 
| 
      
 758 
     | 
    
         
            +
                    print(f"[bold #912dbc]Max timeout: {max_timeout} seconds")
         
     | 
| 
      
 759 
     | 
    
         
            +
             
     | 
| 
      
 760 
     | 
    
         
            +
                    # print(f'Download_start_time: {datetime.datetime.now()}')
         
     | 
| 
      
 761 
     | 
    
         
            +
                    download_time_s = datetime.datetime.now()
         
     | 
| 
      
 762 
     | 
    
         
            +
                    order_list = ["1st", "2nd", "3rd", "4th", "5th", "6th", "7th", "8th", "9th", "10th"]
         
     | 
| 
      
 763 
     | 
    
         
            +
                    while not download_success:
         
     | 
| 
      
 764 
     | 
    
         
            +
                        if request_times >= 10:
         
     | 
| 
      
 765 
     | 
    
         
            +
                            # print(f'下载失败,已重试 {request_times} 次\n可先跳过,后续再试')
         
     | 
| 
      
 766 
     | 
    
         
            +
                            print(f"[bold #ffe5c0]Download failed after {request_times} times\nYou can skip it and try again later")
         
     | 
| 
      
 767 
     | 
    
         
            +
                            count_dict["fail"] += 1
         
     | 
| 
      
 768 
     | 
    
         
            +
                            break
         
     | 
| 
      
 769 
     | 
    
         
            +
                        if request_times > 0:
         
     | 
| 
      
 770 
     | 
    
         
            +
                            # print(f'\r正在重试第 {request_times} 次', end="")
         
     | 
| 
      
 771 
     | 
    
         
            +
                            print(f"[bold #ffe5c0]Retrying the {order_list[request_times - 1]} time...")
         
     | 
| 
      
 772 
     | 
    
         
            +
                        # 尝试下载文件
         
     | 
| 
      
 773 
     | 
    
         
            +
                        try:
         
     | 
| 
      
 774 
     | 
    
         
            +
                            headers = {"User-Agent": get_ua()}
         
     | 
| 
      
 775 
     | 
    
         
            +
                            """ response = s.get(target_url, headers=headers, timeout=random.randint(5, max_timeout))
         
     | 
| 
      
 776 
     | 
    
         
            +
                            response.raise_for_status()  # 如果请求返回的不是200,将抛出HTTPError异常
         
     | 
| 
      
 777 
     | 
    
         
            +
             
     | 
| 
      
 778 
     | 
    
         
            +
                            # 保存文件
         
     | 
| 
      
 779 
     | 
    
         
            +
                            with open(filename, 'wb') as f:
         
     | 
| 
      
 780 
     | 
    
         
            +
                                f.write(response.content) """
         
     | 
| 
      
 781 
     | 
    
         
            +
             
     | 
| 
      
 782 
     | 
    
         
            +
                            response = s.get(target_url, headers=headers, stream=True, timeout=random.randint(5, max_timeout))  # 启用流式传输
         
     | 
| 
      
 783 
     | 
    
         
            +
                            response.raise_for_status()  # 如果请求返回的不是200,将抛出HTTPError异常
         
     | 
| 
      
 784 
     | 
    
         
            +
                            # 保存文件
         
     | 
| 
      
 785 
     | 
    
         
            +
                            with open(fname, "wb") as f:
         
     | 
| 
      
 786 
     | 
    
         
            +
                                print(f"[bold #96cbd7]Downloading {file_name} ...")
         
     | 
| 
      
 787 
     | 
    
         
            +
                                for chunk in response.iter_content(chunk_size=1024):
         
     | 
| 
      
 788 
     | 
    
         
            +
                                    if chunk:
         
     | 
| 
      
 789 
     | 
    
         
            +
                                        f.write(chunk)
         
     | 
| 
      
 790 
     | 
    
         
            +
             
     | 
| 
      
 791 
     | 
    
         
            +
                            f.close()
         
     | 
| 
      
 792 
     | 
    
         
            +
             
     | 
| 
      
 793 
     | 
    
         
            +
                            if not _check_ftime(fname, if_print=True):
         
     | 
| 
      
 794 
     | 
    
         
            +
                                if match_time:
         
     | 
| 
      
 795 
     | 
    
         
            +
                                    _correct_time(fname)
         
     | 
| 
      
 796 
     | 
    
         
            +
                                else:
         
     | 
| 
      
 797 
     | 
    
         
            +
                                    _clear_existing_file(fname)
         
     | 
| 
      
 798 
     | 
    
         
            +
                                    # print(f"[bold #ffe5c0]File time error, {fname}")
         
     | 
| 
      
 799 
     | 
    
         
            +
                                    count_dict["no_data"] += 1
         
     | 
| 
      
 800 
     | 
    
         
            +
                                    break
         
     | 
| 
      
 801 
     | 
    
         
            +
             
     | 
| 
      
 802 
     | 
    
         
            +
                            # print(f'\r文件 {fname} 下载成功', end="")
         
     | 
| 
      
 803 
     | 
    
         
            +
                            if os.path.exists(fname):
         
     | 
| 
      
 804 
     | 
    
         
            +
                                download_success = True
         
     | 
| 
      
 805 
     | 
    
         
            +
                                download_time_e = datetime.datetime.now()
         
     | 
| 
      
 806 
     | 
    
         
            +
                                download_delta = download_time_e - download_time_s
         
     | 
| 
      
 807 
     | 
    
         
            +
                                print(f"[#3dfc40]File [bold #dfff73]{fname} [#3dfc40]has been downloaded successfully, Time: [#39cbdd]{download_delta}")
         
     | 
| 
      
 808 
     | 
    
         
            +
                                count_dict["success"] += 1
         
     | 
| 
      
 809 
     | 
    
         
            +
                                # print(f'Download_end_time: {datetime.datetime.now()}')
         
     | 
| 
      
 810 
     | 
    
         
            +
             
     | 
| 
      
 811 
     | 
    
         
            +
                        except requests.exceptions.HTTPError as errh:
         
     | 
| 
      
 812 
     | 
    
         
            +
                            print(f"Http Error: {errh}")
         
     | 
| 
      
 813 
     | 
    
         
            +
                        except requests.exceptions.ConnectionError as errc:
         
     | 
| 
      
 814 
     | 
    
         
            +
                            print(f"Error Connecting: {errc}")
         
     | 
| 
      
 815 
     | 
    
         
            +
                        except requests.exceptions.Timeout as errt:
         
     | 
| 
      
 816 
     | 
    
         
            +
                            print(f"Timeout Error: {errt}")
         
     | 
| 
      
 817 
     | 
    
         
            +
                        except requests.exceptions.RequestException as err:
         
     | 
| 
      
 818 
     | 
    
         
            +
                            print(f"OOps: Something Else: {err}")
         
     | 
| 
      
 819 
     | 
    
         
            +
             
     | 
| 
      
 820 
     | 
    
         
            +
                        time.sleep(3)
         
     | 
| 
      
 821 
     | 
    
         
            +
                        request_times += 1
         
     | 
| 
      
 822 
     | 
    
         
            +
                else:
         
     | 
| 
      
 823 
     | 
    
         
            +
                    idm_downloader(target_url, store_path, file_name, given_idm_engine)
         
     | 
| 
      
 824 
     | 
    
         
            +
                    idm_download_list.append(fname)
         
     | 
| 
      
 825 
     | 
    
         
            +
                    print(f"[bold #3dfc40]File [bold #dfff73]{fname} [#3dfc40]has been submit to IDM for downloading")
         
     | 
| 
      
 826 
     | 
    
         
            +
             
     | 
| 
      
 827 
     | 
    
         
            +
             
     | 
| 
      
 828 
     | 
    
         
            +
            def _check_hour_is_valid(ymdh_str):
         
     | 
| 
      
 829 
     | 
    
         
            +
                # hour should be 00, 03, 06, 09, 12, 15, 18, 21
         
     | 
| 
      
 830 
     | 
    
         
            +
                hh = int(str(ymdh_str[-2:]))
         
     | 
| 
      
 831 
     | 
    
         
            +
                if hh in [0, 3, 6, 9, 12, 15, 18, 21]:
         
     | 
| 
      
 832 
     | 
    
         
            +
                    return True
         
     | 
| 
      
 833 
     | 
    
         
            +
                else:
         
     | 
| 
      
 834 
     | 
    
         
            +
                    return False
         
     | 
| 
      
 835 
     | 
    
         
            +
             
     | 
| 
      
 836 
     | 
    
         
            +
             
     | 
| 
      
 837 
     | 
    
         
            +
            def _check_dataset_version(dataset_name, version_name, download_time, download_time_end=None):
         
     | 
| 
      
 838 
     | 
    
         
            +
                if dataset_name is not None and version_name is not None:
         
     | 
| 
      
 839 
     | 
    
         
            +
                    just_ensure = _ensure_time_in_specific_dataset_and_version(dataset_name, version_name, download_time, download_time_end)
         
     | 
| 
      
 840 
     | 
    
         
            +
                    if just_ensure:
         
     | 
| 
      
 841 
     | 
    
         
            +
                        return dataset_name, version_name
         
     | 
| 
      
 842 
     | 
    
         
            +
                    else:
         
     | 
| 
      
 843 
     | 
    
         
            +
                        return None, None
         
     | 
| 
      
 844 
     | 
    
         
            +
             
     | 
| 
      
 845 
     | 
    
         
            +
                # 确保下载时间是一个字符串
         
     | 
| 
      
 846 
     | 
    
         
            +
                download_time_str = str(download_time)
         
     | 
| 
      
 847 
     | 
    
         
            +
             
     | 
| 
      
 848 
     | 
    
         
            +
                if len(download_time_str) == 8:
         
     | 
| 
      
 849 
     | 
    
         
            +
                    download_time_str = download_time_str + "00"
         
     | 
| 
      
 850 
     | 
    
         
            +
             
     | 
| 
      
 851 
     | 
    
         
            +
                # 检查小时是否有效(如果需要的话)
         
     | 
| 
      
 852 
     | 
    
         
            +
                if download_time_end is None and not _check_hour_is_valid(download_time_str):
         
     | 
| 
      
 853 
     | 
    
         
            +
                    print("Please ensure the hour is 00, 03, 06, 09, 12, 15, 18, 21")
         
     | 
| 
      
 854 
     | 
    
         
            +
                    raise ValueError("The hour is invalid")
         
     | 
| 
      
 855 
     | 
    
         
            +
             
     | 
| 
      
 856 
     | 
    
         
            +
                # 根据是否检查整个天来设置时间范围
         
     | 
| 
      
 857 
     | 
    
         
            +
                if download_time_end is not None:
         
     | 
| 
      
 858 
     | 
    
         
            +
                    if len(str(download_time_end)) == 8:
         
     | 
| 
      
 859 
     | 
    
         
            +
                        download_time_end = str(download_time_end) + "21"
         
     | 
| 
      
 860 
     | 
    
         
            +
                    have_data = _check_time_in_dataset_and_version(download_time_str, download_time_end)
         
     | 
| 
      
 861 
     | 
    
         
            +
                    if have_data:
         
     | 
| 
      
 862 
     | 
    
         
            +
                        return _direct_choose_dataset_and_version(download_time_str, download_time_end)
         
     | 
| 
      
 863 
     | 
    
         
            +
                else:
         
     | 
| 
      
 864 
     | 
    
         
            +
                    have_data = _check_time_in_dataset_and_version(download_time_str)
         
     | 
| 
      
 865 
     | 
    
         
            +
                    if have_data:
         
     | 
| 
      
 866 
     | 
    
         
            +
                        return _direct_choose_dataset_and_version(download_time_str)
         
     | 
| 
      
 867 
     | 
    
         
            +
             
     | 
| 
      
 868 
     | 
    
         
            +
                return None, None
         
     | 
| 
      
 869 
     | 
    
         
            +
             
     | 
| 
      
 870 
     | 
    
         
            +
             
     | 
| 
      
 871 
     | 
    
         
            +
            def _get_submit_url_var(var, depth, level_num, lon_min, lon_max, lat_min, lat_max, dataset_name, version_name, download_time, download_time_end=None):
         
     | 
| 
      
 872 
     | 
    
         
            +
                # year_str = str(download_time)[:4]
         
     | 
| 
      
 873 
     | 
    
         
            +
                ymdh_str = str(download_time)
         
     | 
| 
      
 874 
     | 
    
         
            +
                if depth is not None and level_num is not None:
         
     | 
| 
      
 875 
     | 
    
         
            +
                    print("Please ensure the depth or level_num is None")
         
     | 
| 
      
 876 
     | 
    
         
            +
                    print("Progress will use the depth")
         
     | 
| 
      
 877 
     | 
    
         
            +
                    which_mode = "depth"
         
     | 
| 
      
 878 
     | 
    
         
            +
                elif depth is not None and level_num is None:
         
     | 
| 
      
 879 
     | 
    
         
            +
                    print(f"Data of single depth (~{depth} m) will be downloaded...")
         
     | 
| 
      
 880 
     | 
    
         
            +
                    which_mode = "depth"
         
     | 
| 
      
 881 
     | 
    
         
            +
                elif level_num is not None and depth is None:
         
     | 
| 
      
 882 
     | 
    
         
            +
                    print(f"Data of single level ({level_num}) will be downloaded...")
         
     | 
| 
      
 883 
     | 
    
         
            +
                    which_mode = "level"
         
     | 
| 
      
 884 
     | 
    
         
            +
                else:
         
     | 
| 
      
 885 
     | 
    
         
            +
                    # print("Full depth or full level data will be downloaded...")
         
     | 
| 
      
 886 
     | 
    
         
            +
                    which_mode = "full"
         
     | 
| 
      
 887 
     | 
    
         
            +
                query_dict = _get_query_dict(var, lon_min, lon_max, lat_min, lat_max, download_time, download_time_end, which_mode, depth, level_num)
         
     | 
| 
      
 888 
     | 
    
         
            +
                submit_url = _get_submit_url(dataset_name, version_name, var, ymdh_str, query_dict)
         
     | 
| 
      
 889 
     | 
    
         
            +
                return submit_url
         
     | 
| 
      
 890 
     | 
    
         
            +
             
     | 
| 
      
 891 
     | 
    
         
            +
             
     | 
| 
      
 892 
     | 
    
         
            +
            def _prepare_url_to_download(var, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, download_time="2024083100", download_time_end=None, depth=None, level_num=None, store_path=None, dataset_name=None, version_name=None, check=False):
         
     | 
| 
      
 893 
     | 
    
         
            +
                print("[bold #ecdbfe]-" * 160)
         
     | 
| 
      
 894 
     | 
    
         
            +
                download_time = str(download_time)
         
     | 
| 
      
 895 
     | 
    
         
            +
                if download_time_end is not None:
         
     | 
| 
      
 896 
     | 
    
         
            +
                    download_time_end = str(download_time_end)
         
     | 
| 
      
 897 
     | 
    
         
            +
                    dataset_name, version_name = _check_dataset_version(dataset_name, version_name, download_time, download_time_end)
         
     | 
| 
      
 898 
     | 
    
         
            +
                else:
         
     | 
| 
      
 899 
     | 
    
         
            +
                    dataset_name, version_name = _check_dataset_version(dataset_name, version_name, download_time)
         
     | 
| 
      
 900 
     | 
    
         
            +
                if dataset_name is None and version_name is None:
         
     | 
| 
      
 901 
     | 
    
         
            +
                    count_dict["no_data"] += 1
         
     | 
| 
      
 902 
     | 
    
         
            +
                    if download_time_end is not None:
         
     | 
| 
      
 903 
     | 
    
         
            +
                        count_dict["no_data_list"].append(f"{download_time}-{download_time_end}")
         
     | 
| 
      
 904 
     | 
    
         
            +
                    else:
         
     | 
| 
      
 905 
     | 
    
         
            +
                        count_dict["no_data_list"].append(download_time)
         
     | 
| 
      
 906 
     | 
    
         
            +
                    return
         
     | 
| 
      
 907 
     | 
    
         
            +
             
     | 
| 
      
 908 
     | 
    
         
            +
                if isinstance(var, str):
         
     | 
| 
      
 909 
     | 
    
         
            +
                    var = [var]
         
     | 
| 
      
 910 
     | 
    
         
            +
             
     | 
| 
      
 911 
     | 
    
         
            +
                if isinstance(var, list):
         
     | 
| 
      
 912 
     | 
    
         
            +
                    if len(var) == 1:
         
     | 
| 
      
 913 
     | 
    
         
            +
                        var = var[0]
         
     | 
| 
      
 914 
     | 
    
         
            +
                        submit_url = _get_submit_url_var(var, depth, level_num, lon_min, lon_max, lat_min, lat_max, dataset_name, version_name, download_time, download_time_end)
         
     | 
| 
      
 915 
     | 
    
         
            +
                        file_name = f"HYCOM_{variable_info[var]['var_name']}_{download_time}.nc"
         
     | 
| 
      
 916 
     | 
    
         
            +
                        if download_time_end is not None:
         
     | 
| 
      
 917 
     | 
    
         
            +
                            file_name = f"HYCOM_{variable_info[var]['var_name']}_{download_time}-{download_time_end}.nc"  # 这里时间不能用下划线,不然后续处理查找同一变量文件会出问题
         
     | 
| 
      
 918 
     | 
    
         
            +
                        _download_file(submit_url, store_path, file_name, check)
         
     | 
| 
      
 919 
     | 
    
         
            +
                    else:
         
     | 
| 
      
 920 
     | 
    
         
            +
                        if download_time < "2024081012":
         
     | 
| 
      
 921 
     | 
    
         
            +
                            varlist = [_ for _ in var]
         
     | 
| 
      
 922 
     | 
    
         
            +
                            for key, value in var_group.items():
         
     | 
| 
      
 923 
     | 
    
         
            +
                                current_group = []
         
     | 
| 
      
 924 
     | 
    
         
            +
                                for v in varlist:
         
     | 
| 
      
 925 
     | 
    
         
            +
                                    if v in value:
         
     | 
| 
      
 926 
     | 
    
         
            +
                                        current_group.append(v)
         
     | 
| 
      
 927 
     | 
    
         
            +
                                if len(current_group) == 0:
         
     | 
| 
      
 928 
     | 
    
         
            +
                                    continue
         
     | 
| 
      
 929 
     | 
    
         
            +
             
     | 
| 
      
 930 
     | 
    
         
            +
                                var = current_group[0]
         
     | 
| 
      
 931 
     | 
    
         
            +
                                submit_url = _get_submit_url_var(var, depth, level_num, lon_min, lon_max, lat_min, lat_max, dataset_name, version_name, download_time, download_time_end)
         
     | 
| 
      
 932 
     | 
    
         
            +
                                file_name = f"HYCOM_{variable_info[var]['var_name']}_{download_time}.nc"
         
     | 
| 
      
 933 
     | 
    
         
            +
                                old_str = f"var={variable_info[var]['var_name']}"
         
     | 
| 
      
 934 
     | 
    
         
            +
                                new_str = f"var={variable_info[var]['var_name']}"
         
     | 
| 
      
 935 
     | 
    
         
            +
                                if len(current_group) > 1:
         
     | 
| 
      
 936 
     | 
    
         
            +
                                    for v in current_group[1:]:
         
     | 
| 
      
 937 
     | 
    
         
            +
                                        new_str = f"{new_str}&var={variable_info[v]['var_name']}"
         
     | 
| 
      
 938 
     | 
    
         
            +
                                    submit_url = submit_url.replace(old_str, new_str)
         
     | 
| 
      
 939 
     | 
    
         
            +
                                    # file_name = f'HYCOM_{'-'.join([variable_info[v]["var_name"] for v in current_group])}_{download_time}.nc'
         
     | 
| 
      
 940 
     | 
    
         
            +
                                    file_name = f"HYCOM_{key}_{download_time}.nc"
         
     | 
| 
      
 941 
     | 
    
         
            +
                                    if download_time_end is not None:
         
     | 
| 
      
 942 
     | 
    
         
            +
                                        file_name = f"HYCOM_{key}_{download_time}-{download_time_end}.nc"  # 这里时间不能用下划线,不然后续处理查找同一变量文件会出问题
         
     | 
| 
      
 943 
     | 
    
         
            +
                                _download_file(submit_url, store_path, file_name, check)
         
     | 
| 
      
 944 
     | 
    
         
            +
                        else:
         
     | 
| 
      
 945 
     | 
    
         
            +
                            for v in var:
         
     | 
| 
      
 946 
     | 
    
         
            +
                                submit_url = _get_submit_url_var(v, depth, level_num, lon_min, lon_max, lat_min, lat_max, dataset_name, version_name, download_time, download_time_end)
         
     | 
| 
      
 947 
     | 
    
         
            +
                                file_name = f"HYCOM_{variable_info[v]['var_name']}_{download_time}.nc"
         
     | 
| 
      
 948 
     | 
    
         
            +
                                if download_time_end is not None:
         
     | 
| 
      
 949 
     | 
    
         
            +
                                    file_name = f"HYCOM_{variable_info[v]['var_name']}_{download_time}-{download_time_end}.nc"
         
     | 
| 
      
 950 
     | 
    
         
            +
                                _download_file(submit_url, store_path, file_name, check)
         
     | 
| 
      
 951 
     | 
    
         
            +
             
     | 
| 
      
 952 
     | 
    
         
            +
             
     | 
| 
      
 953 
     | 
    
         
            +
            def _convert_full_name_to_short_name(full_name):
         
     | 
| 
      
 954 
     | 
    
         
            +
                for var, info in variable_info.items():
         
     | 
| 
      
 955 
     | 
    
         
            +
                    if full_name == info["var_name"] or full_name == info["standard_name"] or full_name == var:
         
     | 
| 
      
 956 
     | 
    
         
            +
                        return var
         
     | 
| 
      
 957 
     | 
    
         
            +
                print("[bold #FFE4E1]Please ensure the var is in:\n[bold blue]u,v,temp,salt,ssh,u_b,v_b,temp_b,salt_b")
         
     | 
| 
      
 958 
     | 
    
         
            +
                print("or")
         
     | 
| 
      
 959 
     | 
    
         
            +
                print("[bold blue]water_u, water_v, water_temp, salinity, surf_el, water_u_bottom, water_v_bottom, water_temp_bottom, salinity_bottom")
         
     | 
| 
      
 960 
     | 
    
         
            +
                return False
         
     | 
| 
      
 961 
     | 
    
         
            +
             
     | 
| 
      
 962 
     | 
    
         
            +
             
     | 
| 
      
 963 
     | 
    
         
            +
            def _download_task(var, time_str, time_str_end, lon_min, lon_max, lat_min, lat_max, depth, level, store_path, dataset_name, version_name, check):
         
     | 
| 
      
 964 
     | 
    
         
            +
                """
         
     | 
| 
      
 965 
     | 
    
         
            +
                # 并行下载任务
         
     | 
| 
      
 966 
     | 
    
         
            +
                # 这个函数是为了并行下载而设置的,是必须的,直接调用direct_download并行下载会出问题
         
     | 
| 
      
 967 
     | 
    
         
            +
             
     | 
| 
      
 968 
     | 
    
         
            +
                任务封装:将每个任务需要的数据和操作封装在一个函数中,这样每个任务都是独立的,不会相互干扰。
         
     | 
| 
      
 969 
     | 
    
         
            +
                本情况下,download_task函数的作用是将每个下载任务封装起来,包括它所需的所有参数。
         
     | 
| 
      
 970 
     | 
    
         
            +
                这样,每个任务都是独立的,有自己的参数和数据,不会与其他任务共享或修改任何数据。
         
     | 
| 
      
 971 
     | 
    
         
            +
                因此,即使多个任务同时执行,也不会出现数据交互错乱的问题。
         
     | 
| 
      
 972 
     | 
    
         
            +
                """
         
     | 
| 
      
 973 
     | 
    
         
            +
             
     | 
| 
      
 974 
     | 
    
         
            +
                _prepare_url_to_download(var, lon_min, lon_max, lat_min, lat_max, time_str, time_str_end, depth, level, store_path, dataset_name, version_name, check)
         
     | 
| 
      
 975 
     | 
    
         
            +
             
     | 
| 
      
 976 
     | 
    
         
            +
             
     | 
| 
      
 977 
     | 
    
         
            +
            def _done_callback(future, progress, task, total, counter_lock):
         
     | 
| 
      
 978 
     | 
    
         
            +
                """
         
     | 
| 
      
 979 
     | 
    
         
            +
                # 并行下载任务的回调函数
         
     | 
| 
      
 980 
     | 
    
         
            +
                # 这个函数是为了并行下载而设置的,是必须的,直接调用direct_download并行下载会出问题
         
     | 
| 
      
 981 
     | 
    
         
            +
             
     | 
| 
      
 982 
     | 
    
         
            +
                回调函数:当一个任务完成后,会调用这个函数,这样可以及时更新进度条,显示任务的完成情况。
         
     | 
| 
      
 983 
     | 
    
         
            +
                本情况下,done_callback函数的作用是当一个任务完成后,更新进度条的进度,显示任务的完成情况。
         
     | 
| 
      
 984 
     | 
    
         
            +
                这样,即使多个任务同时执行,也可以及时看到每个任务的完成情况,不会等到所有任务都完成才显示。
         
     | 
| 
      
 985 
     | 
    
         
            +
                """
         
     | 
| 
      
 986 
     | 
    
         
            +
             
     | 
| 
      
 987 
     | 
    
         
            +
                global parallel_counter
         
     | 
| 
      
 988 
     | 
    
         
            +
                with counter_lock:
         
     | 
| 
      
 989 
     | 
    
         
            +
                    parallel_counter += 1
         
     | 
| 
      
 990 
     | 
    
         
            +
                    progress.update(task, advance=1, description=f"[cyan]Downloading... {parallel_counter}/{total}")
         
     | 
| 
      
 991 
     | 
    
         
            +
             
     | 
| 
      
 992 
     | 
    
         
            +
             
     | 
| 
      
 993 
     | 
    
         
            +
            def _download_hourly_func(var, time_s, time_e, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, depth=None, level=None, store_path=None, dataset_name=None, version_name=None, num_workers=None, check=False, ftimes=1):
         
     | 
| 
      
 994 
     | 
    
         
            +
                """
         
     | 
| 
      
 995 
     | 
    
         
            +
                Description:
         
     | 
| 
      
 996 
     | 
    
         
            +
                Download the data of single time or a series of time
         
     | 
| 
      
 997 
     | 
    
         
            +
             
     | 
| 
      
 998 
     | 
    
         
            +
                Parameters:
         
     | 
| 
      
 999 
     | 
    
         
            +
                var: str, the variable name, such as 'u', 'v', 'temp', 'salt', 'ssh', 'u_b', 'v_b', 'temp_b', 'salt_b' or 'water_u', 'water_v', 'water_temp', 'salinity', 'surf_el', 'water_u_bottom', 'water_v_bottom', 'water_temp_bottom', 'salinity_bottom'
         
     | 
| 
      
 1000 
     | 
    
         
            +
                time_s: str, the start time, such as '2024110100' or '20241101', if add hour, the hour should be 00, 03, 06, 09, 12, 15, 18, 21
         
     | 
| 
      
 1001 
     | 
    
         
            +
                time_e: str, the end time, such as '2024110221' or '20241102', if add hour, the hour should be 00, 03, 06, 09, 12, 15, 18, 21
         
     | 
| 
      
 1002 
     | 
    
         
            +
                lon_min: float, the minimum longitude, default is 0
         
     | 
| 
      
 1003 
     | 
    
         
            +
                lon_max: float, the maximum longitude, default is 359.92
         
     | 
| 
      
 1004 
     | 
    
         
            +
                lat_min: float, the minimum latitude, default is -80
         
     | 
| 
      
 1005 
     | 
    
         
            +
                lat_max: float, the maximum latitude, default is 90
         
     | 
| 
      
 1006 
     | 
    
         
            +
                depth: float, the depth, default is None
         
     | 
| 
      
 1007 
     | 
    
         
            +
                level: int, the level number, default is None
         
     | 
| 
      
 1008 
     | 
    
         
            +
                store_path: str, the path to store the data, default is None
         
     | 
| 
      
 1009 
     | 
    
         
            +
                dataset_name: str, the dataset name, default is None, example: 'GLBv0.08', 'GLBu0.08', 'GLBy0.08'
         
     | 
| 
      
 1010 
     | 
    
         
            +
                version_name: str, the version name, default is None, example: '53.X', '56.3'
         
     | 
| 
      
 1011 
     | 
    
         
            +
                num_workers: int, the number of workers, default is None
         
     | 
| 
      
 1012 
     | 
    
         
            +
             
     | 
| 
      
 1013 
     | 
    
         
            +
                Returns:
         
     | 
| 
      
 1014 
     | 
    
         
            +
                None
         
     | 
| 
      
 1015 
     | 
    
         
            +
                """
         
     | 
| 
      
 1016 
     | 
    
         
            +
                ymdh_time_s, ymdh_time_e = str(time_s), str(time_e)
         
     | 
| 
      
 1017 
     | 
    
         
            +
                if num_workers is not None and num_workers > 1:  # 如果使用多线程下载,用于进度条显示
         
     | 
| 
      
 1018 
     | 
    
         
            +
                    global parallel_counter
         
     | 
| 
      
 1019 
     | 
    
         
            +
                    parallel_counter = 0
         
     | 
| 
      
 1020 
     | 
    
         
            +
                    counter_lock = Lock()  # 创建一个锁,线程安全的计数器
         
     | 
| 
      
 1021 
     | 
    
         
            +
                if ymdh_time_s == ymdh_time_e:
         
     | 
| 
      
 1022 
     | 
    
         
            +
                    _prepare_url_to_download(var, lon_min, lon_max, lat_min, lat_max, ymdh_time_s, None, depth, level, store_path, dataset_name, version_name, check)
         
     | 
| 
      
 1023 
     | 
    
         
            +
                elif int(ymdh_time_s) < int(ymdh_time_e):
         
     | 
| 
      
 1024 
     | 
    
         
            +
                    print("Downloading a series of files...")
         
     | 
| 
      
 1025 
     | 
    
         
            +
                    time_list = get_time_list(ymdh_time_s, ymdh_time_e, 3, "hour")
         
     | 
| 
      
 1026 
     | 
    
         
            +
                    with Progress() as progress:
         
     | 
| 
      
 1027 
     | 
    
         
            +
                        task = progress.add_task("[cyan]Downloading...", total=len(time_list))
         
     | 
| 
      
 1028 
     | 
    
         
            +
                        if ftimes == 1:
         
     | 
| 
      
 1029 
     | 
    
         
            +
                            if num_workers is None or num_workers <= 1:
         
     | 
| 
      
 1030 
     | 
    
         
            +
                                # 串行方式
         
     | 
| 
      
 1031 
     | 
    
         
            +
                                for i, time_str in enumerate(time_list):
         
     | 
| 
      
 1032 
     | 
    
         
            +
                                    _prepare_url_to_download(var, lon_min, lon_max, lat_min, lat_max, time_str, None, depth, level, store_path, dataset_name, version_name, check)
         
     | 
| 
      
 1033 
     | 
    
         
            +
                                    progress.update(task, advance=1, description=f"[cyan]Downloading... {i + 1}/{len(time_list)}")
         
     | 
| 
      
 1034 
     | 
    
         
            +
                            else:
         
     | 
| 
      
 1035 
     | 
    
         
            +
                                # 并行方式
         
     | 
| 
      
 1036 
     | 
    
         
            +
                                with ThreadPoolExecutor(max_workers=num_workers) as executor:
         
     | 
| 
      
 1037 
     | 
    
         
            +
                                    futures = [executor.submit(_download_task, var, time_str, None, lon_min, lon_max, lat_min, lat_max, depth, level, store_path, dataset_name, version_name, check) for time_str in time_list]
         
     | 
| 
      
 1038 
     | 
    
         
            +
                                    """ for i, future in enumerate(futures):
         
     | 
| 
      
 1039 
     | 
    
         
            +
                                        future.add_done_callback(lambda _: progress.update(task, advance=1, description=f"[cyan]Downloading... {i+1}/{len(time_list)}")) """
         
     | 
| 
      
 1040 
     | 
    
         
            +
                                    for feature in as_completed(futures):
         
     | 
| 
      
 1041 
     | 
    
         
            +
                                        _done_callback(feature, progress, task, len(time_list), counter_lock)
         
     | 
| 
      
 1042 
     | 
    
         
            +
                        else:
         
     | 
| 
      
 1043 
     | 
    
         
            +
                            new_time_list = get_time_list(ymdh_time_s, ymdh_time_e, 3 * ftimes, "hour")
         
     | 
| 
      
 1044 
     | 
    
         
            +
                            total_num = len(new_time_list)
         
     | 
| 
      
 1045 
     | 
    
         
            +
                            if num_workers is None or num_workers <= 1:
         
     | 
| 
      
 1046 
     | 
    
         
            +
                                # 串行方式
         
     | 
| 
      
 1047 
     | 
    
         
            +
                                for i, time_str in enumerate(new_time_list):
         
     | 
| 
      
 1048 
     | 
    
         
            +
                                    time_str_end_index = int(min(len(time_list) - 1, int(i * ftimes + ftimes - 1)))
         
     | 
| 
      
 1049 
     | 
    
         
            +
                                    time_str_end = time_list[time_str_end_index]
         
     | 
| 
      
 1050 
     | 
    
         
            +
                                    _prepare_url_to_download(var, lon_min, lon_max, lat_min, lat_max, time_str, time_str_end, depth, level, store_path, dataset_name, version_name, check)
         
     | 
| 
      
 1051 
     | 
    
         
            +
                                    progress.update(task, advance=1, description=f"[cyan]Downloading... {i + 1}/{total_num}")
         
     | 
| 
      
 1052 
     | 
    
         
            +
                            else:
         
     | 
| 
      
 1053 
     | 
    
         
            +
                                # 并行方式
         
     | 
| 
      
 1054 
     | 
    
         
            +
                                with ThreadPoolExecutor(max_workers=num_workers) as executor:
         
     | 
| 
      
 1055 
     | 
    
         
            +
                                    futures = [executor.submit(_download_task, var, new_time_list[i], time_list[int(min(len(time_list) - 1, int(i * ftimes + ftimes - 1)))], lon_min, lon_max, lat_min, lat_max, depth, level, store_path, dataset_name, version_name, check) for i in range(total_num)]
         
     | 
| 
      
 1056 
     | 
    
         
            +
                                    """ for i, future in enumerate(futures):
         
     | 
| 
      
 1057 
     | 
    
         
            +
                                        future.add_done_callback(lambda _: progress.update(task, advance=1, description=f"[cyan]Downloading... {i+1}/{total_num}")) """
         
     | 
| 
      
 1058 
     | 
    
         
            +
                                    for feature in as_completed(futures):
         
     | 
| 
      
 1059 
     | 
    
         
            +
                                        _done_callback(feature, progress, task, len(time_list), counter_lock)
         
     | 
| 
      
 1060 
     | 
    
         
            +
                else:
         
     | 
| 
      
 1061 
     | 
    
         
            +
                    print("[bold red]Please ensure the time_s is no more than time_e")
         
     | 
| 
      
 1062 
     | 
    
         
            +
             
     | 
| 
      
 1063 
     | 
    
         
            +
             
     | 
| 
      
 1064 
     | 
    
         
            +
            def download(var, time_s, time_e=None, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90, depth=None, level=None, store_path=None, dataset_name=None, version_name=None, num_workers=None, check=False, ftimes=1, idm_engine=None, fill_time=False):
         
     | 
| 
      
 1065 
     | 
    
         
            +
                """
         
     | 
| 
      
 1066 
     | 
    
         
            +
                Description:
         
     | 
| 
      
 1067 
     | 
    
         
            +
                    Download the data of single time or a series of time
         
     | 
| 
      
 1068 
     | 
    
         
            +
             
     | 
| 
      
 1069 
     | 
    
         
            +
                Parameters:
         
     | 
| 
      
 1070 
     | 
    
         
            +
                    var: str or list, the variable name, such as 'u', 'v', 'temp', 'salt', 'ssh', 'u_b', 'v_b', 'temp_b', 'salt_b' or 'water_u', 'water_v', 'water_temp', 'salinity', 'surf_el', 'water_u_bottom', 'water_v_bottom', 'water_temp_bottom', 'salinity_bottom'
         
     | 
| 
      
 1071 
     | 
    
         
            +
                    time_s: str, the start time, such as '2024110100' or '20241101', if add hour, the hour should be 00, 03, 06, 09, 12, 15, 18, 21
         
     | 
| 
      
 1072 
     | 
    
         
            +
                    time_e: str, the end time, such as '2024110221' or '20241102', if add hour, the hour should be 00, 03, 06, 09, 12, 15, 18, 21; default is None, if not set, the data of single time will be downloaded; or same as time_s, the data of single time will be downloaded
         
     | 
| 
      
 1073 
     | 
    
         
            +
                    lon_min: float, the minimum longitude, default is 0
         
     | 
| 
      
 1074 
     | 
    
         
            +
                    lon_max: float, the maximum longitude, default is 359.92
         
     | 
| 
      
 1075 
     | 
    
         
            +
                    lat_min: float, the minimum latitude, default is -80
         
     | 
| 
      
 1076 
     | 
    
         
            +
                    lat_max: float, the maximum latitude, default is 90
         
     | 
| 
      
 1077 
     | 
    
         
            +
                    depth: float, the depth, default is None, if you wanna get the data of single depth, you can set the depth, suggest to set the depth in [0, 5000]
         
     | 
| 
      
 1078 
     | 
    
         
            +
                    level: int, the level number, default is None, if you wanna get the data of single level, you can set the level, suggest to set the level in [1, 40]
         
     | 
| 
      
 1079 
     | 
    
         
            +
                    store_path: str, the path to store the data, default is None, if not set, the data will be stored in the current working directory
         
     | 
| 
      
 1080 
     | 
    
         
            +
                    dataset_name: str, the dataset name, default is None, example: 'GLBv0.08', 'GLBu0.08', 'GLBy0.08', if not set, the dataset will be chosen according to the download_time
         
     | 
| 
      
 1081 
     | 
    
         
            +
                    version_name: str, the version name, default is None, example: '53.X', '56.3', if not set, the version will be chosen according to the download_time
         
     | 
| 
      
 1082 
     | 
    
         
            +
                    num_workers: int, the number of workers, default is None, if not set, the number of workers will be 1; suggest not to set the number of workers too large
         
     | 
| 
      
 1083 
     | 
    
         
            +
                    check: bool, whether to check the existing file, default is False, if set to True, the existing file will be checked and not downloaded again; else, the existing file will be covered
         
     | 
| 
      
 1084 
     | 
    
         
            +
                    ftimes: int, the number of time in one file, default is 1, if set to 1, the data of single time will be downloaded; the maximum is 8, if set to 8, the data of 8 times will be downloaded in one file
         
     | 
| 
      
 1085 
     | 
    
         
            +
                    idm_engine: str, the IDM engine, default is None, if set, the IDM will be used to download the data; example: "D:\\Programs\\Internet Download Manager\\IDMan.exe"
         
     | 
| 
      
 1086 
     | 
    
         
            +
                    fill_time: bool, whether to match the time, default is False, if set to True, the time in the file name will be corrected according to the time in the file; else, the data will be skip if the time is not correct. Because the real time of some data that has been downloaded does not match the time in the file name, eg. the required time is 2024110100, but the time in the file name is 2024110103, so the data will be skip if the fill_time is False. Note: it is not the right time data, so it is not recommended to set fill_time to True
         
     | 
| 
      
 1087 
     | 
    
         
            +
             
     | 
| 
      
 1088 
     | 
    
         
            +
                Returns:
         
     | 
| 
      
 1089 
     | 
    
         
            +
                    None
         
     | 
| 
      
 1090 
     | 
    
         
            +
                """
         
     | 
| 
      
 1091 
     | 
    
         
            +
                _get_initial_data()
         
     | 
| 
      
 1092 
     | 
    
         
            +
             
     | 
| 
      
 1093 
     | 
    
         
            +
                # 打印信息并处理数据集和版本名称
         
     | 
| 
      
 1094 
     | 
    
         
            +
                if dataset_name is None and version_name is None:
         
     | 
| 
      
 1095 
     | 
    
         
            +
                    print("The dataset_name and version_name are None, so the dataset and version will be chosen according to the download_time.\nIf there is more than one dataset and version in the time range, the first one will be chosen.")
         
     | 
| 
      
 1096 
     | 
    
         
            +
                    print("If you wanna choose the dataset and version by yourself, please set the dataset_name and version_name together.")
         
     | 
| 
      
 1097 
     | 
    
         
            +
                elif dataset_name is None and version_name is not None:
         
     | 
| 
      
 1098 
     | 
    
         
            +
                    print("Please ensure the dataset_name is not None")
         
     | 
| 
      
 1099 
     | 
    
         
            +
                    print("If you do not add the dataset_name, both the dataset and version will be chosen according to the download_time.")
         
     | 
| 
      
 1100 
     | 
    
         
            +
                elif dataset_name is not None and version_name is None:
         
     | 
| 
      
 1101 
     | 
    
         
            +
                    print("Please ensure the version_name is not None")
         
     | 
| 
      
 1102 
     | 
    
         
            +
                    print("If you do not add the version_name, both the dataset and version will be chosen according to the download_time.")
         
     | 
| 
      
 1103 
     | 
    
         
            +
                else:
         
     | 
| 
      
 1104 
     | 
    
         
            +
                    print("The dataset_name and version_name are both set by yourself.")
         
     | 
| 
      
 1105 
     | 
    
         
            +
                    print("Please ensure the dataset_name and version_name are correct.")
         
     | 
| 
      
 1106 
     | 
    
         
            +
             
     | 
| 
      
 1107 
     | 
    
         
            +
                if isinstance(var, list):
         
     | 
| 
      
 1108 
     | 
    
         
            +
                    if len(var) == 1:
         
     | 
| 
      
 1109 
     | 
    
         
            +
                        var = _convert_full_name_to_short_name(var[0])
         
     | 
| 
      
 1110 
     | 
    
         
            +
                    else:
         
     | 
| 
      
 1111 
     | 
    
         
            +
                        var = [_convert_full_name_to_short_name(v) for v in var]
         
     | 
| 
      
 1112 
     | 
    
         
            +
                elif isinstance(var, str):
         
     | 
| 
      
 1113 
     | 
    
         
            +
                    var = _convert_full_name_to_short_name(var)
         
     | 
| 
      
 1114 
     | 
    
         
            +
                else:
         
     | 
| 
      
 1115 
     | 
    
         
            +
                    raise ValueError("The var is invalid")
         
     | 
| 
      
 1116 
     | 
    
         
            +
                if var is False:
         
     | 
| 
      
 1117 
     | 
    
         
            +
                    raise ValueError("The var is invalid")
         
     | 
| 
      
 1118 
     | 
    
         
            +
                if lon_min < 0 or lon_min > 359.92 or lon_max < 0 or lon_max > 359.92 or lat_min < -80 or lat_min > 90 or lat_max < -80 or lat_max > 90:
         
     | 
| 
      
 1119 
     | 
    
         
            +
                    print("Please ensure the lon_min, lon_max, lat_min, lat_max are in the range")
         
     | 
| 
      
 1120 
     | 
    
         
            +
                    print("The range of lon_min, lon_max is 0~359.92")
         
     | 
| 
      
 1121 
     | 
    
         
            +
                    print("The range of lat_min, lat_max is -80~90")
         
     | 
| 
      
 1122 
     | 
    
         
            +
                    raise ValueError("The lon or lat is invalid")
         
     | 
| 
      
 1123 
     | 
    
         
            +
             
     | 
| 
      
 1124 
     | 
    
         
            +
                if ftimes != 1:
         
     | 
| 
      
 1125 
     | 
    
         
            +
                    print("Please ensure the ftimes is in [1, 8]")
         
     | 
| 
      
 1126 
     | 
    
         
            +
                    ftimes = max(min(ftimes, 8), 1)
         
     | 
| 
      
 1127 
     | 
    
         
            +
             
     | 
| 
      
 1128 
     | 
    
         
            +
                if store_path is None:
         
     | 
| 
      
 1129 
     | 
    
         
            +
                    store_path = str(Path.cwd())
         
     | 
| 
      
 1130 
     | 
    
         
            +
                else:
         
     | 
| 
      
 1131 
     | 
    
         
            +
                    os.makedirs(str(store_path), exist_ok=True)
         
     | 
| 
      
 1132 
     | 
    
         
            +
             
     | 
| 
      
 1133 
     | 
    
         
            +
                if num_workers is not None:
         
     | 
| 
      
 1134 
     | 
    
         
            +
                    num_workers = max(min(num_workers, 10), 1)  # 暂时不限制最大值,再检查的时候可以多开一些线程
         
     | 
| 
      
 1135 
     | 
    
         
            +
                    # num_workers = int(max(num_workers, 1))
         
     | 
| 
      
 1136 
     | 
    
         
            +
                time_s = str(time_s)
         
     | 
| 
      
 1137 
     | 
    
         
            +
                if len(time_s) == 8:
         
     | 
| 
      
 1138 
     | 
    
         
            +
                    time_s += "00"
         
     | 
| 
      
 1139 
     | 
    
         
            +
                if time_e is None:
         
     | 
| 
      
 1140 
     | 
    
         
            +
                    time_e = time_s[:]
         
     | 
| 
      
 1141 
     | 
    
         
            +
                else:
         
     | 
| 
      
 1142 
     | 
    
         
            +
                    time_e = str(time_e)
         
     | 
| 
      
 1143 
     | 
    
         
            +
                    if len(time_e) == 8:
         
     | 
| 
      
 1144 
     | 
    
         
            +
                        time_e += "21"
         
     | 
| 
      
 1145 
     | 
    
         
            +
             
     | 
| 
      
 1146 
     | 
    
         
            +
                global count_dict
         
     | 
| 
      
 1147 
     | 
    
         
            +
                count_dict = {"success": 0, "fail": 0, "skip": 0, "no_data": 0, "total": 0, "no_data_list": []}
         
     | 
| 
      
 1148 
     | 
    
         
            +
             
     | 
| 
      
 1149 
     | 
    
         
            +
                """ global current_platform
         
     | 
| 
      
 1150 
     | 
    
         
            +
                current_platform = platform.system() """
         
     | 
| 
      
 1151 
     | 
    
         
            +
             
     | 
| 
      
 1152 
     | 
    
         
            +
                global fsize_dict
         
     | 
| 
      
 1153 
     | 
    
         
            +
                fsize_dict = {}
         
     | 
| 
      
 1154 
     | 
    
         
            +
             
     | 
| 
      
 1155 
     | 
    
         
            +
                global fsize_dict_lock
         
     | 
| 
      
 1156 
     | 
    
         
            +
                fsize_dict_lock = Lock()
         
     | 
| 
      
 1157 
     | 
    
         
            +
             
     | 
| 
      
 1158 
     | 
    
         
            +
                global use_idm, given_idm_engine, idm_download_list
         
     | 
| 
      
 1159 
     | 
    
         
            +
                if idm_engine is not None:
         
     | 
| 
      
 1160 
     | 
    
         
            +
                    use_idm = True
         
     | 
| 
      
 1161 
     | 
    
         
            +
                    num_workers = 1
         
     | 
| 
      
 1162 
     | 
    
         
            +
                    given_idm_engine = idm_engine
         
     | 
| 
      
 1163 
     | 
    
         
            +
                    idm_download_list = []
         
     | 
| 
      
 1164 
     | 
    
         
            +
                else:
         
     | 
| 
      
 1165 
     | 
    
         
            +
                    use_idm = False
         
     | 
| 
      
 1166 
     | 
    
         
            +
             
     | 
| 
      
 1167 
     | 
    
         
            +
                global match_time
         
     | 
| 
      
 1168 
     | 
    
         
            +
                if fill_time:
         
     | 
| 
      
 1169 
     | 
    
         
            +
                    match_time = True
         
     | 
| 
      
 1170 
     | 
    
         
            +
                else:
         
     | 
| 
      
 1171 
     | 
    
         
            +
                    match_time = False
         
     | 
| 
      
 1172 
     | 
    
         
            +
             
     | 
| 
      
 1173 
     | 
    
         
            +
                _download_hourly_func(var, time_s, time_e, lon_min, lon_max, lat_min, lat_max, depth, level, store_path, dataset_name, version_name, num_workers, check, ftimes)
         
     | 
| 
      
 1174 
     | 
    
         
            +
             
     | 
| 
      
 1175 
     | 
    
         
            +
                if idm_download_list:
         
     | 
| 
      
 1176 
     | 
    
         
            +
                    for f in idm_download_list:
         
     | 
| 
      
 1177 
     | 
    
         
            +
                        wait_success = 0
         
     | 
| 
      
 1178 
     | 
    
         
            +
                        success = False
         
     | 
| 
      
 1179 
     | 
    
         
            +
                        while not success:
         
     | 
| 
      
 1180 
     | 
    
         
            +
                            if check_nc(f):
         
     | 
| 
      
 1181 
     | 
    
         
            +
                                if match_time:
         
     | 
| 
      
 1182 
     | 
    
         
            +
                                    _correct_time(f)
         
     | 
| 
      
 1183 
     | 
    
         
            +
                                    count_dict["success"] += 1
         
     | 
| 
      
 1184 
     | 
    
         
            +
                                else:
         
     | 
| 
      
 1185 
     | 
    
         
            +
                                    if not _check_ftime(f):
         
     | 
| 
      
 1186 
     | 
    
         
            +
                                        _clear_existing_file(f)
         
     | 
| 
      
 1187 
     | 
    
         
            +
                                        count_dict["no_data"] += 1
         
     | 
| 
      
 1188 
     | 
    
         
            +
                                        count_dict["no_data_list"].append(str(f).split("_")[-1].split(".")[0])
         
     | 
| 
      
 1189 
     | 
    
         
            +
                                    else:
         
     | 
| 
      
 1190 
     | 
    
         
            +
                                        count_dict["success"] += 1
         
     | 
| 
      
 1191 
     | 
    
         
            +
                                success = True
         
     | 
| 
      
 1192 
     | 
    
         
            +
                            else:
         
     | 
| 
      
 1193 
     | 
    
         
            +
                                wait_success += 1
         
     | 
| 
      
 1194 
     | 
    
         
            +
                                time.sleep(3)
         
     | 
| 
      
 1195 
     | 
    
         
            +
                                if wait_success >= 20:
         
     | 
| 
      
 1196 
     | 
    
         
            +
                                    success = True
         
     | 
| 
      
 1197 
     | 
    
         
            +
                                    # print(f'{f} download failed')
         
     | 
| 
      
 1198 
     | 
    
         
            +
                                    count_dict["fail"] += 1
         
     | 
| 
      
 1199 
     | 
    
         
            +
             
     | 
| 
      
 1200 
     | 
    
         
            +
                count_dict["total"] = count_dict["success"] + count_dict["fail"] + count_dict["skip"] + count_dict["no_data"]
         
     | 
| 
      
 1201 
     | 
    
         
            +
             
     | 
| 
      
 1202 
     | 
    
         
            +
                print("[bold #ecdbfe]-" * 160)
         
     | 
| 
      
 1203 
     | 
    
         
            +
                print(f"[bold #ff80ab]Total: {count_dict['total']}\nSuccess: {count_dict['success']}\nFail: {count_dict['fail']}\nSkip: {count_dict['skip']}")
         
     | 
| 
      
 1204 
     | 
    
         
            +
                if count_dict["fail"] > 0:
         
     | 
| 
      
 1205 
     | 
    
         
            +
                    print("[bold #be5528]Please try again to download the failed data later")
         
     | 
| 
      
 1206 
     | 
    
         
            +
                if count_dict["no_data"] > 0:
         
     | 
| 
      
 1207 
     | 
    
         
            +
                    if count_dict["no_data"] == 1:
         
     | 
| 
      
 1208 
     | 
    
         
            +
                        print(f"[bold #f90000]There is {count_dict['no_data']} data that does not exist in any dataset and version")
         
     | 
| 
      
 1209 
     | 
    
         
            +
                    else:
         
     | 
| 
      
 1210 
     | 
    
         
            +
                        print(f"[bold #f90000]These are {count_dict['no_data']} data that do not exist in any dataset and version")
         
     | 
| 
      
 1211 
     | 
    
         
            +
                    for no_data in count_dict["no_data_list"]:
         
     | 
| 
      
 1212 
     | 
    
         
            +
                        print(f"[bold #d81b60]{no_data}")
         
     | 
| 
      
 1213 
     | 
    
         
            +
                print("[bold #ecdbfe]-" * 160)
         
     | 
| 
      
 1214 
     | 
    
         
            +
             
     | 
| 
      
 1215 
     | 
    
         
            +
             
     | 
| 
      
 1216 
     | 
    
         
            +
            def how_to_use():
         
     | 
| 
      
 1217 
     | 
    
         
            +
                print("""
         
     | 
| 
      
 1218 
     | 
    
         
            +
                # 1. Choose the dataset and version according to the time:
         
     | 
| 
      
 1219 
     | 
    
         
            +
                # 1.1 Use function to query
         
     | 
| 
      
 1220 
     | 
    
         
            +
                You can use the function check_time_in_dataset_and_version(time_input=20241101) to find the dataset and version  according to the time.
         
     | 
| 
      
 1221 
     | 
    
         
            +
                Then, you can see the dataset and version in the output.
         
     | 
| 
      
 1222 
     | 
    
         
            +
                # 1.2 Draw a picture to see
         
     | 
| 
      
 1223 
     | 
    
         
            +
                You can draw a picture to see the time range of each dataset and version.
         
     | 
| 
      
 1224 
     | 
    
         
            +
                Using the function draw_time_range(pic_save_folder=None) to draw the picture.
         
     | 
| 
      
 1225 
     | 
    
         
            +
             
     | 
| 
      
 1226 
     | 
    
         
            +
                # 2. Get the base url according to the dataset, version, var and year:
         
     | 
| 
      
 1227 
     | 
    
         
            +
                # 2.1 Dataset and version were found in step 1
         
     | 
| 
      
 1228 
     | 
    
         
            +
                # 2.2 Var: u, v, temp, salt, ssh, u_b, v_b, temp_b, salt_b
         
     | 
| 
      
 1229 
     | 
    
         
            +
                # 2.3 Year: 1994-2024(current year)
         
     | 
| 
      
 1230 
     | 
    
         
            +
             
     | 
| 
      
 1231 
     | 
    
         
            +
                # 3. Get the query_dict according to the var, lon_min, lon_max, lat_min, lat_max, depth, level_num, time_str_ymdh:
         
     | 
| 
      
 1232 
     | 
    
         
            +
                # 3.1 Var: u, v, temp, salt, ssh, u_b, v_b, temp_b, salt_b
         
     | 
| 
      
 1233 
     | 
    
         
            +
                # 3.2 Lon_min, lon_max, lat_min, lat_max: float
         
     | 
| 
      
 1234 
     | 
    
         
            +
                # 3.3 Depth: 0-5000m, if you wanna get single depth data, you can set the depth
         
     | 
| 
      
 1235 
     | 
    
         
            +
                # 3.4 Level_num: 1-40, if you wanna get single level data, you can set the level_num
         
     | 
| 
      
 1236 
     | 
    
         
            +
                # 3.5 Time_str_ymdh: '2024110112', the hour normally is 00, 03, 06, 09, 12, 15, 18, 21, besides 1 hourly data
         
     | 
| 
      
 1237 
     | 
    
         
            +
                # 3.6 Use the function to get the query_dict
         
     | 
| 
      
 1238 
     | 
    
         
            +
                # 3.7 Note: If you wanna get the full depth or full level data, you can needn't set the depth or level_num
         
     | 
| 
      
 1239 
     | 
    
         
            +
             
     | 
| 
      
 1240 
     | 
    
         
            +
                # 4. Get the submit url according to the dataset, version, var, year, query_dict:
         
     | 
| 
      
 1241 
     | 
    
         
            +
                # 4.1 Use the function to get the submit url
         
     | 
| 
      
 1242 
     | 
    
         
            +
                # 4.2 You can use the submit url to download the data
         
     | 
| 
      
 1243 
     | 
    
         
            +
             
     | 
| 
      
 1244 
     | 
    
         
            +
                # 5. Download the data according to the submit url:
         
     | 
| 
      
 1245 
     | 
    
         
            +
                # 5.1 Use the function to download the data
         
     | 
| 
      
 1246 
     | 
    
         
            +
                # 5.2 You can download the data of single time or a series of time
         
     | 
| 
      
 1247 
     | 
    
         
            +
                # 5.3 Note: If you wanna download a series of data, you can set the ymdh_time_s and ymdh_time_e different
         
     | 
| 
      
 1248 
     | 
    
         
            +
                # 5.4 Note: The time resolution is 3 hours
         
     | 
| 
      
 1249 
     | 
    
         
            +
             
     | 
| 
      
 1250 
     | 
    
         
            +
                # 6. Direct download the data:
         
     | 
| 
      
 1251 
     | 
    
         
            +
                # 6.1 Use the function to direct download the data
         
     | 
| 
      
 1252 
     | 
    
         
            +
                # 6.2 You can set the dataset_name and version_name by yourself
         
     | 
| 
      
 1253 
     | 
    
         
            +
                # 6.3 Note: If you do not set the dataset_name and version_name, the dataset and version will be chosen according to the download_time
         
     | 
| 
      
 1254 
     | 
    
         
            +
                # 6.4 Note: If you set the dataset_name and version_name, please ensure the dataset_name and version_name are correct
         
     | 
| 
      
 1255 
     | 
    
         
            +
                # 6.5 Note: If you just set one of the dataset_name and version_name, both the dataset and version will be chosen according to the download_time
         
     | 
| 
      
 1256 
     | 
    
         
            +
             
     | 
| 
      
 1257 
     | 
    
         
            +
                # 7. Simple use:
         
     | 
| 
      
 1258 
     | 
    
         
            +
                # 7.1 You can use the function: download(var, ymdh_time_s, ymdh_time_e, lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90,  depth=None, level_num=None, store_path=None, dataset_name=None, version_name=None)
         
     | 
| 
      
 1259 
     | 
    
         
            +
                # 7.2 You can download the data of single time or a series of time
         
     | 
| 
      
 1260 
     | 
    
         
            +
                # 7.3 The parameters you must set are var, ymdh_time_s, ymdh_time_e
         
     | 
| 
      
 1261 
     | 
    
         
            +
                # 7.4 Example: download('u', '2024110112', '2024110212', lon_min=0, lon_max=359.92, lat_min=-80, lat_max=90,  depth=None, level_num=None, store_path=None, dataset_name=None, version_name=None)
         
     | 
| 
      
 1262 
     | 
    
         
            +
                """)
         
     | 
| 
      
 1263 
     | 
    
         
            +
             
     | 
| 
      
 1264 
     | 
    
         
            +
             
     | 
| 
      
 1265 
     | 
    
         
            +
            if __name__ == "__main__":
         
     | 
| 
      
 1266 
     | 
    
         
            +
                download_dict = {
         
     | 
| 
      
 1267 
     | 
    
         
            +
                    "water_u": {"simple_name": "u", "download": 1},
         
     | 
| 
      
 1268 
     | 
    
         
            +
                    "water_v": {"simple_name": "v", "download": 1},
         
     | 
| 
      
 1269 
     | 
    
         
            +
                    "surf_el": {"simple_name": "ssh", "download": 1},
         
     | 
| 
      
 1270 
     | 
    
         
            +
                    "water_temp": {"simple_name": "temp", "download": 1},
         
     | 
| 
      
 1271 
     | 
    
         
            +
                    "salinity": {"simple_name": "salt", "download": 1},
         
     | 
| 
      
 1272 
     | 
    
         
            +
                    "water_u_bottom": {"simple_name": "u_b", "download": 0},
         
     | 
| 
      
 1273 
     | 
    
         
            +
                    "water_v_bottom": {"simple_name": "v_b", "download": 0},
         
     | 
| 
      
 1274 
     | 
    
         
            +
                    "water_temp_bottom": {"simple_name": "temp_b", "download": 0},
         
     | 
| 
      
 1275 
     | 
    
         
            +
                    "salinity_bottom": {"simple_name": "salt_b", "download": 0},
         
     | 
| 
      
 1276 
     | 
    
         
            +
                }
         
     | 
| 
      
 1277 
     | 
    
         
            +
             
     | 
| 
      
 1278 
     | 
    
         
            +
                var_list = [var_name for var_name in download_dict.keys() if download_dict[var_name]["download"]]
         
     | 
| 
      
 1279 
     | 
    
         
            +
             
     | 
| 
      
 1280 
     | 
    
         
            +
                single_var = False
         
     | 
| 
      
 1281 
     | 
    
         
            +
             
     | 
| 
      
 1282 
     | 
    
         
            +
                # draw_time_range(pic_save_folder=r'I:\Delete')
         
     | 
| 
      
 1283 
     | 
    
         
            +
             
     | 
| 
      
 1284 
     | 
    
         
            +
                options = {
         
     | 
| 
      
 1285 
     | 
    
         
            +
                    "var": var_list,
         
     | 
| 
      
 1286 
     | 
    
         
            +
                    "time_s": "2018010100",
         
     | 
| 
      
 1287 
     | 
    
         
            +
                    "time_e": "2020123121",
         
     | 
| 
      
 1288 
     | 
    
         
            +
                    "store_path": r"F:\Data\HYCOM\3hourly",
         
     | 
| 
      
 1289 
     | 
    
         
            +
                    "lon_min": 105,
         
     | 
| 
      
 1290 
     | 
    
         
            +
                    "lon_max": 130,
         
     | 
| 
      
 1291 
     | 
    
         
            +
                    "lat_min": 15,
         
     | 
| 
      
 1292 
     | 
    
         
            +
                    "lat_max": 45,
         
     | 
| 
      
 1293 
     | 
    
         
            +
                    "num_workers": 3,
         
     | 
| 
      
 1294 
     | 
    
         
            +
                    "check": True,
         
     | 
| 
      
 1295 
     | 
    
         
            +
                    "depth": None,  # or 0-5000 meters
         
     | 
| 
      
 1296 
     | 
    
         
            +
                    "level": None,  # or 1-40 levels
         
     | 
| 
      
 1297 
     | 
    
         
            +
                    "ftimes": 1,
         
     | 
| 
      
 1298 
     | 
    
         
            +
                    "idm_engine": r"D:\Programs\Internet Download Manager\IDMan.exe",  # 查漏补缺不建议开启
         
     | 
| 
      
 1299 
     | 
    
         
            +
                    "fill_time": False,
         
     | 
| 
      
 1300 
     | 
    
         
            +
                }
         
     | 
| 
      
 1301 
     | 
    
         
            +
             
     | 
| 
      
 1302 
     | 
    
         
            +
                if single_var:
         
     | 
| 
      
 1303 
     | 
    
         
            +
                    for var_name in var_list:
         
     | 
| 
      
 1304 
     | 
    
         
            +
                        options["var"] = var_name
         
     | 
| 
      
 1305 
     | 
    
         
            +
                        download(**options)
         
     | 
| 
      
 1306 
     | 
    
         
            +
                else:
         
     | 
| 
      
 1307 
     | 
    
         
            +
                    download(**options)
         
     |