oafuncs 0.0.79__py2.py3-none-any.whl → 0.0.81__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. oafuncs/__init__.py +25 -7
  2. oafuncs/oa_cmap.py +31 -52
  3. oafuncs/oa_down/hycom_3hourly.py +68 -25
  4. oafuncs/oa_down/test_ua.py +151 -0
  5. oafuncs/oa_nc.py +120 -10
  6. oafuncs/oa_s/__init__.py +23 -0
  7. oafuncs/oa_s/oa_cmap.py +163 -0
  8. oafuncs/oa_s/oa_data.py +187 -0
  9. oafuncs/oa_s/oa_draw.py +451 -0
  10. oafuncs/oa_s/oa_file.py +332 -0
  11. oafuncs/oa_s/oa_help.py +39 -0
  12. oafuncs/oa_s/oa_nc.py +410 -0
  13. oafuncs/oa_s/oa_python.py +107 -0
  14. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/__init__.py" +26 -0
  15. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_cmap.py" +163 -0
  16. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_data.py" +187 -0
  17. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_down/__init__.py" +20 -0
  18. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_down/hycom_3hourly.py" +1176 -0
  19. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_down/literature.py" +332 -0
  20. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_down/test_ua.py" +151 -0
  21. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_draw.py" +451 -0
  22. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_file.py" +332 -0
  23. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_help.py" +39 -0
  24. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_nc.py" +410 -0
  25. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_python.py" +107 -0
  26. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_sign/__init__.py" +21 -0
  27. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_sign/meteorological.py" +168 -0
  28. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_sign/ocean.py" +158 -0
  29. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_sign/scientific.py" +139 -0
  30. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_tool/__init__.py" +18 -0
  31. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_tool/email.py" +114 -0
  32. {oafuncs-0.0.79.dist-info → oafuncs-0.0.81.dist-info}/METADATA +1 -2
  33. oafuncs-0.0.81.dist-info/RECORD +51 -0
  34. oafuncs-0.0.79.dist-info/RECORD +0 -24
  35. {oafuncs-0.0.79.dist-info → oafuncs-0.0.81.dist-info}/LICENSE.txt +0 -0
  36. {oafuncs-0.0.79.dist-info → oafuncs-0.0.81.dist-info}/WHEEL +0 -0
  37. {oafuncs-0.0.79.dist-info → oafuncs-0.0.81.dist-info}/top_level.txt +0 -0
oafuncs/__init__.py CHANGED
@@ -1,26 +1,44 @@
1
1
  #!/usr/bin/env python
2
2
  # coding=utf-8
3
- '''
3
+ """
4
4
  Author: Liu Kun && 16031215@qq.com
5
5
  Date: 2024-09-17 16:09:20
6
6
  LastEditors: Liu Kun && 16031215@qq.com
7
- LastEditTime: 2024-10-14 17:08:57
8
- FilePath: \\Python\\My_Funcs\\OAFuncs\\OAFuncs\\__init__.py
9
- Description:
7
+ LastEditTime: 2024-12-13 12:31:06
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_s\\__init__.py
9
+ Description:
10
10
  EditPlatform: vscode
11
11
  ComputerInfo: XPS 15 9510
12
12
  SystemInfo: Windows 11
13
- Python Version: 3.11
14
- '''
13
+ Python Version: 3.12
14
+ """
15
+
15
16
 
16
17
  # 会导致OAFuncs直接导入所有函数,不符合模块化设计
18
+ # from oafuncs.oa_s.oa_cmap import *
19
+ # from oafuncs.oa_s.oa_data import *
20
+ # from oafuncs.oa_s.oa_draw import *
21
+ # from oafuncs.oa_s.oa_file import *
22
+ # from oafuncs.oa_s.oa_help import *
23
+ # from oafuncs.oa_s.oa_nc import *
24
+ # from oafuncs.oa_s.oa_python import *
25
+
26
+ # ------------------- 2024-12-13 12:31:06 -------------------
27
+ # path: My_Funcs/OAFuncs/oafuncs/
17
28
  from .oa_cmap import *
18
29
  from .oa_data import *
19
- from .oa_down import *
20
30
  from .oa_draw import *
21
31
  from .oa_file import *
22
32
  from .oa_help import *
23
33
  from .oa_nc import *
24
34
  from .oa_python import *
35
+ # ------------------- 2024-12-13 12:31:06 -------------------
36
+ # path: My_Funcs/OAFuncs/oafuncs/oa_down/
37
+ from .oa_down import *
38
+ # ------------------- 2024-12-13 12:31:06 -------------------
39
+ # path: My_Funcs/OAFuncs/oafuncs/oa_sign/
25
40
  from .oa_sign import *
41
+ # ------------------- 2024-12-13 12:31:06 -------------------
42
+ # path: My_Funcs/OAFuncs/oafuncs/oa_tool/
26
43
  from .oa_tool import *
44
+ # ------------------- 2024-12-13 12:31:06 -------------------
oafuncs/oa_cmap.py CHANGED
@@ -17,11 +17,9 @@ import matplotlib as mpl
17
17
  import matplotlib.pyplot as plt
18
18
  import numpy as np
19
19
 
20
- __all__ = ["show", "extract_colors", "create_custom", "create_diverging", "create_5rgb_txt", "my_cmap"]
20
+ __all__ = ["show", "cmap2colors", "create_cmap", "create_cmap_rgbtxt", "choose_cmap"]
21
21
 
22
22
  # ** 将cmap用填色图可视化(官网摘抄函数)
23
-
24
-
25
23
  def show(colormaps: list):
26
24
  """
27
25
  Helper function to plot data with associated colormap.
@@ -40,28 +38,28 @@ def show(colormaps: list):
40
38
 
41
39
 
42
40
  # ** 将cmap转为list,即多个颜色的列表
43
- def extract_colors(cmap, n=256):
41
+ def cmap2colors(cmap, n=256):
44
42
  """
45
43
  cmap : cmap名称
46
44
  n : 提取颜色数量
47
45
  return : 提取的颜色列表
48
- example : out_cmap = extract_colors('viridis', 256)
46
+ example : out_colors = cmap2colors('viridis', 256)
49
47
  """
50
48
  c_map = mpl.colormaps.get_cmap(cmap)
51
- out_cmap = [c_map(i) for i in np.linspace(0, 1, n)]
52
- return out_cmap
49
+ out_colors = [c_map(i) for i in np.linspace(0, 1, n)]
50
+ return out_colors
53
51
 
54
52
 
55
53
  # ** 自制cmap,多色,可带位置
56
- def create_custom(colors: list, nodes=None, under=None, over=None): # 利用颜色快速配色
54
+ def create_cmap(colors: list, nodes=None, under=None, over=None): # 利用颜色快速配色
57
55
  """
58
56
  func : 自制cmap,自动确定颜色位置(等比例)
59
57
  description : colors可以是颜色名称,也可以是十六进制颜色代码
60
58
  param {*} colors 颜色
61
59
  param {*} nodes 颜色位置,默认不提供,等间距
62
- return {*} c_map
63
- example : c_map = mk_cmap(['#C2B7F3','#B3BBF2','#B0CBF1','#ACDCF0','#A8EEED'])
64
- c_map = mk_cmap(['aliceblue','skyblue','deepskyblue'],[0.0,0.5,1.0])
60
+ return {*} cmap
61
+ example : cmap = create_cmap(['#C2B7F3','#B3BBF2','#B0CBF1','#ACDCF0','#A8EEED'])
62
+ cmap = create_cmap(['aliceblue','skyblue','deepskyblue'],[0.0,0.5,1.0])
65
63
  """
66
64
  if nodes is None: # 采取自动分配比例
67
65
  cmap_color = mpl.colors.LinearSegmentedColormap.from_list("mycmap", colors)
@@ -74,46 +72,27 @@ def create_custom(colors: list, nodes=None, under=None, over=None): # 利用颜
74
72
  return cmap_color
75
73
 
76
74
 
77
- # ** 自制diverging型cmap,默认中间为白色
78
-
79
-
80
- def create_diverging(colors: list):
81
- """
82
- func : 自制cmap,双色,中间默认为白色;如果输入偶数个颜色,则中间为白,如果奇数个颜色,则中间色为中间色
83
- description : colors可以是颜色名称,也可以是十六进制颜色代码
84
- param {*} colors
85
- return {*}
86
- example : diverging_cmap = mk_cmap_diverging(["#00c0ff", "#a1d3ff", "#DCDCDC", "#FFD39B", "#FF8247"])
87
- """
88
- # 自定义颜色位置
89
- n = len(colors)
90
- nodes = np.linspace(0.0, 1.0, n + 1 if n % 2 == 0 else n)
91
- newcolors = colors
92
- if n % 2 == 0:
93
- newcolors.insert(int(n / 2), "#ffffff") # 偶数个颜色,中间为白色
94
- cmap_color = mpl.colors.LinearSegmentedColormap.from_list("mycmap", list(zip(nodes, newcolors)))
95
- return cmap_color
96
-
97
-
98
75
  # ** 根据RGB的txt文档制作色卡(利用Grads调色盘)
99
-
100
-
101
- def create_5rgb_txt(rgb_txt_filepath: str): # 根据RGB的txt文档制作色卡/根据rgb值制作
76
+ def create_cmap_rgbtxt(rgbtxt_file,split_mark=','): # 根据RGB的txt文档制作色卡/根据rgb值制作
102
77
  """
103
78
  func : 根据RGB的txt文档制作色卡
104
- description : rgb_txt_filepath='E:/python/colorbar/test.txt'
105
- param {*} rgb_txt_filepath txt文件路径
79
+ description : rgbtxt_file='E:/python/colorbar/test.txt'
80
+ param {*} rgbtxt_file txt文件路径
106
81
  return {*} camp
107
- example : cmap_color=dcmap(path)
82
+ example : cmap=create_cmap_rgbtxt(path,split_mark=',') #
83
+
84
+ txt example : 251,251,253
85
+ 225,125,25
86
+ 250,205,255
108
87
  """
109
- with open(rgb_txt_filepath) as fid:
88
+ with open(rgbtxt_file) as fid:
110
89
  data = fid.readlines()
111
90
  n = len(data)
112
91
  rgb = np.zeros((n, 3))
113
92
  for i in np.arange(n):
114
- rgb[i][0] = data[i].split(",")[0]
115
- rgb[i][1] = data[i].split(",")[1]
116
- rgb[i][2] = data[i].split(",")[2]
93
+ rgb[i][0] = data[i].split(split_mark)[0]
94
+ rgb[i][1] = data[i].split(split_mark)[1]
95
+ rgb[i][2] = data[i].split(split_mark)[2]
117
96
  max_rgb = np.max(rgb)
118
97
  if max_rgb > 2: # 如果rgb值大于2,则认为是0-255的值,需要归一化
119
98
  rgb = rgb / 255.0
@@ -121,7 +100,7 @@ def create_5rgb_txt(rgb_txt_filepath: str): # 根据RGB的txt文档制作色卡
121
100
  return icmap
122
101
 
123
102
 
124
- def my_cmap(cmap_name=None, query=False):
103
+ def choose_cmap(cmap_name=None, query=False):
125
104
  """
126
105
  description: Choosing a colormap from the list of available colormaps or a custom colormap
127
106
  param {*} cmap_name:
@@ -130,9 +109,9 @@ def my_cmap(cmap_name=None, query=False):
130
109
  """
131
110
 
132
111
  my_cmap_dict = {
133
- "diverging_1": create_custom(["#4e00b3", "#0000FF", "#00c0ff", "#a1d3ff", "#DCDCDC", "#FFD39B", "#FF8247", "#FF0000", "#FF5F9E"]),
134
- "cold_1": create_custom(["#4e00b3", "#0000FF", "#00c0ff", "#a1d3ff", "#DCDCDC"]),
135
- "warm_1": create_custom(["#DCDCDC", "#FFD39B", "#FF8247", "#FF0000", "#FF5F9E"]),
112
+ "diverging_1": create_cmap(["#4e00b3", "#0000FF", "#00c0ff", "#a1d3ff", "#DCDCDC", "#FFD39B", "#FF8247", "#FF0000", "#FF5F9E"]),
113
+ "cold_1": create_cmap(["#4e00b3", "#0000FF", "#00c0ff", "#a1d3ff", "#DCDCDC"]),
114
+ "warm_1": create_cmap(["#DCDCDC", "#FFD39B", "#FF8247", "#FF0000", "#FF5F9E"]),
136
115
  # "land_1": create_custom(["#3E6436", "#678A59", "#91A176", "#B8A87D", "#D9CBB2"], under="#A6CEE3", over="#FFFFFF"), # 陆地颜色从深绿到浅棕,表示从植被到沙地的递减
137
116
  # "ocean_1": create_custom(["#126697", "#2D88B3", "#4EA1C9", "#78B9D8", "#A6CEE3"], under="#8470FF", over="#3E6436"), # 海洋颜色从深蓝到浅蓝,表示从深海到浅海的递减
138
117
  # "ocean_land_1": create_custom(
@@ -150,7 +129,7 @@ def my_cmap(cmap_name=None, query=False):
150
129
  # "#3E6436", # 深绿(高山)
151
130
  # ]
152
131
  # ),
153
- "colorful_1": create_custom(["#6d00db", "#9800cb", "#F2003C", "#ff4500", "#ff7f00", "#FE28A2", "#FFC0CB", "#DDA0DD", "#40E0D0", "#1a66f2", "#00f7fb", "#8fff88", "#E3FF00"]),
132
+ "colorful_1": create_cmap(["#6d00db", "#9800cb", "#F2003C", "#ff4500", "#ff7f00", "#FE28A2", "#FFC0CB", "#DDA0DD", "#40E0D0", "#1a66f2", "#00f7fb", "#8fff88", "#E3FF00"]),
154
133
  }
155
134
  if query:
156
135
  for key, _ in my_cmap_dict.items():
@@ -160,7 +139,7 @@ def my_cmap(cmap_name=None, query=False):
160
139
  return my_cmap_dict[cmap_name]
161
140
  else:
162
141
  try:
163
- return mpl.cm.get_cmap(cmap_name)
142
+ return mpl.colormaps.get_cmap(cmap_name)
164
143
  except ValueError:
165
144
  raise ValueError(f"Unknown cmap name: {cmap_name}")
166
145
 
@@ -169,16 +148,16 @@ if __name__ == "__main__":
169
148
  # ** 测试自制cmap
170
149
  colors = ["#C2B7F3", "#B3BBF2", "#B0CBF1", "#ACDCF0", "#A8EEED"]
171
150
  nodes = [0.0, 0.2, 0.4, 0.6, 1.0]
172
- c_map = create_custom(colors, nodes)
151
+ c_map = create_cmap(colors, nodes)
173
152
  show([c_map])
174
153
 
175
154
  # ** 测试自制diverging型cmap
176
- diverging_cmap = create_diverging(["#4e00b3", "#0000FF", "#00c0ff", "#a1d3ff", "#DCDCDC", "#FFD39B", "#FF8247", "#FF0000", "#FF5F9E"])
155
+ diverging_cmap = create_cmap(["#4e00b3", "#0000FF", "#00c0ff", "#a1d3ff", "#DCDCDC", "#FFD39B", "#FF8247", "#FF0000", "#FF5F9E"])
177
156
  show([diverging_cmap])
178
157
 
179
158
  # ** 测试根据RGB的txt文档制作色卡
180
159
  file_path = "E:/python/colorbar/test.txt"
181
- cmap_color = create_5rgb_txt(file_path)
160
+ cmap_rgb = create_cmap_rgbtxt(file_path)
182
161
 
183
162
  # ** 测试将cmap转为list
184
- out_cmap = extract_colors("viridis", 256)
163
+ out_colors = cmap2colors("viridis", 256)
@@ -55,17 +55,17 @@ data_info["hourly"]["dataset"]["GLBy0.08"]["version"] = {"93.0": {}}
55
55
  # 在网页上提交超过范围的时间,会返回该数据集实际时间范围,从而纠正下面的时间范围
56
56
  # 目前只纠正了GLBv0.08 93.0的时间范围,具体到小时了
57
57
  # 其他数据集的时刻暂时默认为00起,21止
58
- data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["53.X"]["time_range"] = {"time_start": "19940101", "time_end": "20151231"}
59
- data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["56.3"]["time_range"] = {"time_start": "20140701", "time_end": "20160430"}
60
- data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["57.2"]["time_range"] = {"time_start": "20160501", "time_end": "20170131"}
61
- data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["92.8"]["time_range"] = {"time_start": "20170201", "time_end": "20170531"}
62
- data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["57.7"]["time_range"] = {"time_start": "20170601", "time_end": "20170930"}
63
- data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["92.9"]["time_range"] = {"time_start": "20171001", "time_end": "20171231"}
58
+ data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["53.X"]["time_range"] = {"time_start": "1994010112", "time_end": "2015123109"}
59
+ data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["56.3"]["time_range"] = {"time_start": "2014070112", "time_end": "2016093009"}
60
+ data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["57.2"]["time_range"] = {"time_start": "2016050112", "time_end": "2017020109"}
61
+ data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["92.8"]["time_range"] = {"time_start": "2017020112", "time_end": "2017060109"}
62
+ data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["57.7"]["time_range"] = {"time_start": "2017060112", "time_end": "2017100109"}
63
+ data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["92.9"]["time_range"] = {"time_start": "2017100112", "time_end": "2018032009"}
64
64
  data_info["hourly"]["dataset"]["GLBv0.08"]["version"]["93.0"]["time_range"] = {"time_start": "2018010112", "time_end": "2020021909"}
65
65
  # GLBu0.08
66
- data_info["hourly"]["dataset"]["GLBu0.08"]["version"]["93.0"]["time_range"] = {"time_start": "20180919", "time_end": "20181208"}
66
+ data_info["hourly"]["dataset"]["GLBu0.08"]["version"]["93.0"]["time_range"] = {"time_start": "2018091912", "time_end": "2018120909"}
67
67
  # GLBy0.08
68
- data_info["hourly"]["dataset"]["GLBy0.08"]["version"]["93.0"]["time_range"] = {"time_start": "20181204", "time_end": "20300904"}
68
+ data_info["hourly"]["dataset"]["GLBy0.08"]["version"]["93.0"]["time_range"] = {"time_start": "2018120412", "time_end": "20300904"}
69
69
 
70
70
  # variable
71
71
  variable_info = {
@@ -141,10 +141,11 @@ data_info["hourly"]["dataset"]["GLBu0.08"]["version"]["93.0"]["url"] = url_930_u
141
141
  uv3z_930_y = {}
142
142
  ts3z_930_y = {}
143
143
  ssh_930_y = {}
144
- for y_930_y in range(2018, 2025):
144
+ for y_930_y in range(2018, 2030):
145
145
  uv3z_930_y[str(y_930_y)] = rf"https://ncss.hycom.org/thredds/ncss/GLBy0.08/expt_93.0/uv3z/{y_930_y}?"
146
146
  ts3z_930_y[str(y_930_y)] = rf"https://ncss.hycom.org/thredds/ncss/GLBy0.08/expt_93.0/ts3z/{y_930_y}?"
147
147
  ssh_930_y[str(y_930_y)] = rf"https://ncss.hycom.org/thredds/ncss/GLBy0.08/expt_93.0/ssh/{y_930_y}?"
148
+ # GLBy0.08 93.0 data time range in each year: year-01-01 12:00 to year+1-01-01 09:00
148
149
  url_930_y = {
149
150
  "uv3z": uv3z_930_y,
150
151
  "ts3z": ts3z_930_y,
@@ -372,7 +373,16 @@ def check_time_in_dataset_and_version(time_input, time_end=None):
372
373
  if have_data:
373
374
  for d, v, trange in zip(d_list, v_list, trange_list):
374
375
  print(f"[bold blue]{d} {v} {trange}")
375
- return True
376
+ if is_single_time:
377
+ return True
378
+ else:
379
+ base_url_s = get_base_url(d_list[0], v_list[0], "u", str(time_start))
380
+ base_url_e = get_base_url(d_list[0], v_list[0], "u", str(time_end))
381
+ if base_url_s == base_url_e:
382
+ return True
383
+ else:
384
+ print(f"[bold red]{time_start} to {time_end} is in different datasets or versions, so you can't download them together")
385
+ return False
376
386
  else:
377
387
  print(f"[bold red]{time_input_str} is not in any dataset and version")
378
388
  return False
@@ -456,7 +466,8 @@ def direct_choose_dataset_and_version(time_input, time_end=None):
456
466
  return dataset_name_out, version_name_out
457
467
 
458
468
 
459
- def get_base_url(dataset_name, version_name, var, year_str):
469
+ def get_base_url(dataset_name, version_name, var, ymdh_str):
470
+ year_str = int(ymdh_str[:4])
460
471
  url_dict = data_info["hourly"]["dataset"][dataset_name]["version"][version_name]["url"]
461
472
  classification_method = data_info["hourly"]["dataset"][dataset_name]["version"][version_name]["classification"]
462
473
  if classification_method == "year_different":
@@ -472,6 +483,12 @@ def get_base_url(dataset_name, version_name, var, year_str):
472
483
  if base_url is None:
473
484
  print("Please ensure the var is in [u,v,temp,salt,ssh,u_b,v_b,temp_b,salt_b]")
474
485
  elif classification_method == "var_year_different":
486
+ if dataset_name == "GLBy0.08" and version_name == "93.0":
487
+ mdh_str = ymdh_str[4:]
488
+ # GLBy0.08 93.0
489
+ # data time range in each year: year-01-01 12:00 to year+1-01-01 09:00
490
+ if mdh_str <= "010109":
491
+ year_str = int(ymdh_str[:4]) - 1
475
492
  base_url = None
476
493
  for key, value in var_group.items():
477
494
  if var in value:
@@ -482,8 +499,8 @@ def get_base_url(dataset_name, version_name, var, year_str):
482
499
  return base_url
483
500
 
484
501
 
485
- def get_submit_url(dataset_name, version_name, var, year_str, query_dict):
486
- base_url = get_base_url(dataset_name, version_name, var, year_str)
502
+ def get_submit_url(dataset_name, version_name, var, ymdh_str, query_dict):
503
+ base_url = get_base_url(dataset_name, version_name, var, ymdh_str)
487
504
  if isinstance(query_dict["var"], str):
488
505
  query_dict["var"] = [query_dict["var"]]
489
506
  target_url = base_url + "&".join(f"var={var}" for var in query_dict["var"]) + "&" + "&".join(f"{key}={value}" for key, value in query_dict.items() if key != "var")
@@ -496,10 +513,37 @@ def clear_existing_file(file_full_path):
496
513
  print(f"{file_full_path} has been removed")
497
514
 
498
515
 
516
+ def _get_file_size(file_path, unit="KB"):
517
+ # 检查文件是否存在
518
+ if not os.path.exists(file_path):
519
+ return "文件不存在"
520
+
521
+ # 获取文件大小(字节)
522
+ file_size = os.path.getsize(file_path)
523
+
524
+ # 单位转换字典
525
+ unit_dict = {"PB": 1024**5, "TB": 1024**4, "GB": 1024**3, "MB": 1024**2, "KB": 1024}
526
+
527
+ # 检查传入的单位是否合法
528
+ if unit not in unit_dict:
529
+ return "单位不合法,请选择PB、TB、GB、MB、KB中的一个"
530
+
531
+ # 转换文件大小到指定单位
532
+ converted_size = file_size / unit_dict[unit]
533
+
534
+ return converted_size
535
+
536
+
499
537
  def check_existing_file(file_full_path):
500
538
  if os.path.exists(file_full_path):
501
539
  print(f"[bold #FFA54F]{file_full_path} exists")
502
- return True
540
+ fsize = _get_file_size(file_full_path)
541
+ if fsize < 5:
542
+ print(f"[bold #FFA54F]{file_full_path} may be incomplete\nFile size: {fsize:.2f} KB")
543
+ # clear_existing_file(file_full_path)
544
+ return False
545
+ else:
546
+ return True
503
547
  else:
504
548
  # print(f'{file_full_path} does not exist')
505
549
  return False
@@ -567,11 +611,12 @@ def scrape_and_categorize_proxies(choose_protocol="http"):
567
611
 
568
612
  return proxies_list
569
613
 
614
+
570
615
  def get_proxy():
571
616
  ip_list = scrape_and_categorize_proxies(choose_protocol="http")
572
617
  choose_ip = random.choice(ip_list)
573
618
  proxies = {"http": f"http://{choose_ip}", "https": f"http://{choose_ip}"}
574
- print(f'Using proxy: {proxies}')
619
+ print(f"Using proxy: {proxies}")
575
620
  return proxies
576
621
 
577
622
 
@@ -643,7 +688,7 @@ def download_file(target_url, store_path, file_name, check=False):
643
688
  # 保存文件
644
689
  with open(filename, 'wb') as f:
645
690
  f.write(response.content) """
646
-
691
+
647
692
  if find_proxy:
648
693
  proxies = get_proxy()
649
694
  response = s.get(target_url, headers=headers, proxies=proxies, stream=True, timeout=random.randint(5, max_timeout))
@@ -726,7 +771,8 @@ def check_dataset_version(dataset_name, version_name, download_time, download_ti
726
771
 
727
772
 
728
773
  def get_submit_url_var(var, depth, level_num, lon_min, lon_max, lat_min, lat_max, dataset_name, version_name, download_time, download_time_end=None):
729
- year_str = str(download_time)[:4]
774
+ # year_str = str(download_time)[:4]
775
+ ymdh_str = str(download_time)
730
776
  if depth is not None and level_num is not None:
731
777
  print("Please ensure the depth or level_num is None")
732
778
  print("Progress will use the depth")
@@ -738,10 +784,10 @@ def get_submit_url_var(var, depth, level_num, lon_min, lon_max, lat_min, lat_max
738
784
  print(f"Data of single level ({level_num}) will be downloaded...")
739
785
  which_mode = "level"
740
786
  else:
741
- print("Full depth or full level data will be downloaded...")
787
+ # print("Full depth or full level data will be downloaded...")
742
788
  which_mode = "full"
743
789
  query_dict = get_query_dict(var, lon_min, lon_max, lat_min, lat_max, download_time, download_time_end, which_mode, depth, level_num)
744
- submit_url = get_submit_url(dataset_name, version_name, var, year_str, query_dict)
790
+ submit_url = get_submit_url(dataset_name, version_name, var, ymdh_str, query_dict)
745
791
  return submit_url
746
792
 
747
793
 
@@ -992,7 +1038,7 @@ def download(var, time_s, time_e=None, lon_min=0, lon_max=359.92, lat_min=-80, l
992
1038
 
993
1039
  """ global current_platform
994
1040
  current_platform = platform.system() """
995
-
1041
+
996
1042
  global find_proxy
997
1043
  find_proxy = False
998
1044
 
@@ -1065,7 +1111,7 @@ def how_to_use():
1065
1111
 
1066
1112
  if __name__ == "__main__":
1067
1113
  # help(hycom3h.download)
1068
- time_s, time_e = "2018070100", "2019123121"
1114
+ time_s, time_e = "2023010100", "2023123121"
1069
1115
  merge_name = f"{time_s}_{time_e}" # 合并后的文件名
1070
1116
  root_path = r"G:\Data\HYCOM\3hourly"
1071
1117
  location_dict = {"west": 105, "east": 130, "south": 15, "north": 45}
@@ -1081,10 +1127,7 @@ if __name__ == "__main__":
1081
1127
  "salinity_bottom": {"simple_name": "salt_b", "download": 0},
1082
1128
  }
1083
1129
 
1084
- var_list = []
1085
- for var_name in download_dict.keys():
1086
- if download_dict[var_name]["download"] == 1:
1087
- var_list.append(var_name)
1130
+ var_list = [var_name for var_name in download_dict.keys() if download_dict[var_name]["download"]]
1088
1131
 
1089
1132
  # set depth or level, only one can be True
1090
1133
  # if you wanna download all depth or level, set both False
@@ -0,0 +1,151 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ """
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2024-12-01 19:32:25
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2024-12-10 11:16:36
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_down\\test.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.12
14
+ """
15
+
16
+ import os
17
+ import random
18
+ import re
19
+
20
+
21
+ def is_valid_user_agent(user_agent):
22
+ # 简单的正则表达式来检查User Agent的格式
23
+ # 这个正则表达式检查User Agent是否包含常见的浏览器信息格式
24
+ pattern = re.compile(
25
+ r"^(?:(?:Mozilla|Opera|Chrome|Safari|Edg|OPR)/[\d.]+)"
26
+ r"(?:\s(?:\(.*?\)))?"
27
+ r"(?:\s(?:Gecko|AppleWebKit|KHTML, like Gecko|Version|Edge|OPR)/[\d.]+)?"
28
+ r"(?:\s.*?(?:rv:|Version/|Ubuntu|Macintosh|Windows|X11|Linux|CrOS|FreeBSD|OpenBSD|NetBSD|iPhone|iPad|iPod|Android|BlackBerry|BB10|Mobile|Symbian|Windows Phone|IEMobile|Opera Mini|Opera Mobi|UCBrowser|MQQBrowser|baiduboxapp|baidubrowser|Safari|Firefox|MSIE|Trident|Edge|EdgA|Chrome|CriOS|Vivaldi|Sleipnir|Midori|ELinks|Lynx|w3m|Arora|Epiphany|Konqueror|Dillo|Netscape|SeaMonkey|K-Meleon|Camino|Iceape|Galeon|GranParadiso|Iceweasel|Firefox|Fennec|Conkeror|PaleMoon|Uzbl|QupZilla|Otter|Waterfox|Basilisk|Cyberfox|PaleMoon|GNU IceCat|GNU IceWeasel|IceCat|IceWeasel|Seamonkey|Iceape|Firefox|Epiphany|Web|Safari|Android|Mobile|BlackBerry|BB10|Tablet|Silk|Kindle|FxiOS|Focus|SamsungBrowser|browser|AppleWebKit|Puffin|DuckDuckGo|YaBrowser|Yandex|Amigo|NokiaBrowser|OviBrowser|OneBrowser|Chrome|Firefox|Safari|OPR|Coast|Mercury|Silk|Skyfire|IEMobile|Bolt|Jasmine|NativeHost|Crosswalk|TizenBrowser|SailfishBrowser|SamsungBrowser|Silk-Accelerated|UCBrowser|Quark|XiaoMi|OnePlus|Vivo|Oppo|Realme|Meizu|Lenovo|Huawei|ZTE|Alcatel|Sony|Nokia|LG|HTC|Asus|Acer|Motorola|Samsung)/[\d.]+)?$"
29
+ )
30
+
31
+ # 使用正则表达式匹配User Agent字符串
32
+ if pattern.match(user_agent):
33
+ return True
34
+ else:
35
+ return False
36
+
37
+
38
+ def get_ua():
39
+ current_dir = os.path.dirname(os.path.abspath(__file__))
40
+ ua_file_txt = os.path.join(current_dir, "User_Agent-list.txt")
41
+
42
+ with open(ua_file_txt, "r") as f:
43
+ ua_list = f.readlines()
44
+ # 去掉换行符和空行
45
+ ua_list = [line.strip() for line in ua_list if line.strip()]
46
+
47
+ return random.choice(ua_list)
48
+
49
+
50
+ def get_ua_org():
51
+ ua_list = [
52
+ "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 OPR/26.0.1656.60",
53
+ "Opera/8.0 (Windows NT 5.1; U; en)",
54
+ "Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 9.50",
55
+ "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; en) Opera 9.50",
56
+ "Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11",
57
+ "Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
58
+ "Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10",
59
+ "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0",
60
+ "Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10",
61
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv,2.0.1) Gecko/20100101 Firefox/4.0.1",
62
+ "Mozilla/5.0 (Windows NT 6.1; rv,2.0.1) Gecko/20100101 Firefox/4.0.1",
63
+ "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2",
64
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.122 Safari/537.36",
65
+ "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
66
+ "Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
67
+ "Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
68
+ "Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
69
+ "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36",
70
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11",
71
+ "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16",
72
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
73
+ "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36",
74
+ "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko",
75
+ "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)",
76
+ "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11",
77
+ "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER",
78
+ "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; LBBROWSER)",
79
+ "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E; LBBROWSER)",
80
+ "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; QQBrowser/7.0.3698.400)",
81
+ "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)",
82
+ "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0",
83
+ "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; SE 2.X MetaSr 1.0)",
84
+ "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)",
85
+ "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.3.4000 Chrome/30.0.1599.101 Safari/537.36",
86
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
87
+ "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 UBrowser/4.0.3214.0 Safari/537.36",
88
+ "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 UBrowser/6.2.4094.1 Safari/537.36",
89
+ "Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
90
+ "Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
91
+ "Mozilla/5.0 (iPad; U; CPU OS 4_2_1 like Mac OS X; zh-cn) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148 Safari/6533.18.5",
92
+ "Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
93
+ "Mozilla/5.0 (Linux; U; Android 2.2.1; zh-cn; HTC_Wildfire_A3333 Build/FRG83D) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
94
+ "Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
95
+ "MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
96
+ "Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10",
97
+ "Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13",
98
+ "Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+",
99
+ "Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0",
100
+ "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0;",
101
+ "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)",
102
+ "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)",
103
+ "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)",
104
+ "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)",
105
+ "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)",
106
+ "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)",
107
+ "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)",
108
+ "Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
109
+ "Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124",
110
+ "Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)",
111
+ "UCWEB7.0.2.37/28/999",
112
+ "NOKIA5700/UCWEB7.0.2.37/28/999",
113
+ "Openwave/UCWEB7.0.2.37/28/999",
114
+ "Openwave/UCWEB7.0.2.37/28/999",
115
+ ]
116
+ with open(newtxtfile, "w") as f:
117
+ for line in ua_list:
118
+ f.write(line + "\n")
119
+ # print(f'Using User-Agent: {ua}')
120
+ ua = random.choice(ua_list)
121
+ return ua
122
+
123
+
124
+ # get_ua_org()
125
+
126
+ if __name__ == "__main__":
127
+ txtfile = r"E:\Code\Python\My_Funcs\OAFuncs\oafuncs\oa_down\User_Agent-list.txt"
128
+
129
+ with open(txtfile, "r") as f:
130
+ lines = f.readlines()
131
+ # 去掉换行符和空行
132
+ lines = [line.strip() for line in lines if line.strip()]
133
+ """ new_line = []
134
+ for i in range(len(lines)):
135
+ if '/' in lines[i]:
136
+ new_line.append(lines[i])
137
+ else:
138
+ print(lines[i]) """
139
+
140
+ new_line = []
141
+ for line in lines:
142
+ if is_valid_user_agent(line):
143
+ # print(line)
144
+ new_line.append(line)
145
+ else:
146
+ print(f"Invalid User-Agent: {line}")
147
+
148
+ newtxtfile = r"E:\Code\Python\My_Funcs\OAFuncs\oafuncs\oa_down\ua_list_new.txt"
149
+ with open(newtxtfile, "w") as f:
150
+ for line in new_line:
151
+ f.write(line + "\n")