oafuncs 0.0.98.4__py3-none-any.whl → 0.0.98.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oafuncs/oa_down/hycom_3hourly.py +132 -130
- oafuncs/oa_down/{hycom_3hourly_20250416.py → hycom_3hourly_proxy.py} +139 -100
- oafuncs/oa_down/read_proxy.py +108 -0
- {oafuncs-0.0.98.4.dist-info → oafuncs-0.0.98.6.dist-info}/METADATA +1 -1
- {oafuncs-0.0.98.4.dist-info → oafuncs-0.0.98.6.dist-info}/RECORD +8 -8
- oafuncs/oa_down/hycom_3hourly_20250407.py +0 -1295
- {oafuncs-0.0.98.4.dist-info → oafuncs-0.0.98.6.dist-info}/WHEEL +0 -0
- {oafuncs-0.0.98.4.dist-info → oafuncs-0.0.98.6.dist-info}/licenses/LICENSE.txt +0 -0
- {oafuncs-0.0.98.4.dist-info → oafuncs-0.0.98.6.dist-info}/top_level.txt +0 -0
@@ -2,9 +2,9 @@
|
|
2
2
|
# coding=utf-8
|
3
3
|
"""
|
4
4
|
Author: Liu Kun && 16031215@qq.com
|
5
|
-
Date: 2025-04-
|
5
|
+
Date: 2025-04-17 15:16:02
|
6
6
|
LastEditors: Liu Kun && 16031215@qq.com
|
7
|
-
LastEditTime: 2025-04-
|
7
|
+
LastEditTime: 2025-04-17 15:16:04
|
8
8
|
FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_down\\hycom_3hourly copy.py
|
9
9
|
Description:
|
10
10
|
EditPlatform: vscode
|
@@ -15,7 +15,9 @@ Python Version: 3.12
|
|
15
15
|
|
16
16
|
|
17
17
|
|
18
|
+
import asyncio
|
18
19
|
import datetime
|
20
|
+
import logging
|
19
21
|
import os
|
20
22
|
import random
|
21
23
|
import re
|
@@ -25,11 +27,11 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
25
27
|
from pathlib import Path
|
26
28
|
from threading import Lock
|
27
29
|
|
30
|
+
import httpx
|
28
31
|
import matplotlib.pyplot as plt
|
29
32
|
import netCDF4 as nc
|
30
33
|
import numpy as np
|
31
34
|
import pandas as pd
|
32
|
-
import requests
|
33
35
|
import xarray as xr
|
34
36
|
from rich import print
|
35
37
|
from rich.progress import Progress
|
@@ -40,6 +42,9 @@ from oafuncs.oa_file import file_size
|
|
40
42
|
from oafuncs.oa_nc import check as check_nc
|
41
43
|
from oafuncs.oa_nc import modify as modify_nc
|
42
44
|
|
45
|
+
logging.getLogger("httpx").setLevel(logging.WARNING) # 关闭 httpx 的 INFO 日志,只显示 WARNING 及以上
|
46
|
+
|
47
|
+
|
43
48
|
warnings.filterwarnings("ignore", category=RuntimeWarning, message="Engine '.*' loading failed:.*")
|
44
49
|
|
45
50
|
__all__ = ["draw_time_range", "download"]
|
@@ -416,13 +421,13 @@ def _check_time_in_dataset_and_version(time_input, time_end=None):
|
|
416
421
|
trange_list.append(f"{time_s}-{time_e}")
|
417
422
|
have_data = True
|
418
423
|
|
419
|
-
# 输出结果
|
420
|
-
if match_time is None:
|
421
|
-
print(f"[bold red]{time_input_str} is in the following dataset and version:")
|
422
424
|
if have_data:
|
423
425
|
if match_time is None:
|
426
|
+
print(f"[bold red]Time {time_input_str} included in:")
|
427
|
+
dv_num = 1
|
424
428
|
for d, v, trange in zip(d_list, v_list, trange_list):
|
425
|
-
print(f"[bold blue]{d} {v} {trange}")
|
429
|
+
print(f"{dv_num} -> [bold blue]{d} - {v} : {trange}")
|
430
|
+
dv_num += 1
|
426
431
|
if is_single_time:
|
427
432
|
return True
|
428
433
|
else:
|
@@ -434,7 +439,7 @@ def _check_time_in_dataset_and_version(time_input, time_end=None):
|
|
434
439
|
print(f"[bold red]{time_start} to {time_end} is in different datasets or versions, so you can't download them together")
|
435
440
|
return False
|
436
441
|
else:
|
437
|
-
print(f"[bold red]{time_input_str}
|
442
|
+
print(f"[bold red]Time {time_input_str} has no data")
|
438
443
|
return False
|
439
444
|
|
440
445
|
|
@@ -509,7 +514,8 @@ def _direct_choose_dataset_and_version(time_input, time_end=None):
|
|
509
514
|
|
510
515
|
if dataset_name_out is not None and version_name_out is not None:
|
511
516
|
if match_time is None:
|
512
|
-
print(f"[bold purple]dataset: {dataset_name_out}, version: {version_name_out} is chosen")
|
517
|
+
# print(f"[bold purple]dataset: {dataset_name_out}, version: {version_name_out} is chosen")
|
518
|
+
print(f"[bold purple]Chosen dataset: {dataset_name_out} - {version_name_out}")
|
513
519
|
|
514
520
|
# 如果没有找到匹配的数据集和版本,会返回 None
|
515
521
|
return dataset_name_out, version_name_out
|
@@ -664,117 +670,145 @@ def _correct_time(nc_file):
|
|
664
670
|
modify_nc(nc_file, "time", None, time_difference)
|
665
671
|
|
666
672
|
|
673
|
+
class _HycomDownloader:
|
674
|
+
def __init__(self, tasks, delay_range=(3, 6), timeout_factor=120, max_var_count=5, max_retries=3):
|
675
|
+
"""
|
676
|
+
:param tasks: List of (url, save_path)
|
677
|
+
"""
|
678
|
+
self.tasks = tasks
|
679
|
+
self.delay_range = delay_range
|
680
|
+
self.timeout_factor = timeout_factor
|
681
|
+
self.max_var_count = max_var_count
|
682
|
+
self.max_retries = max_retries
|
683
|
+
self.count = {"success": 0, "fail": 0}
|
684
|
+
|
685
|
+
def user_agent(self):
|
686
|
+
return get_ua()
|
687
|
+
|
688
|
+
async def _download_one(self, url, save_path):
|
689
|
+
file_name = os.path.basename(save_path)
|
690
|
+
headers = {"User-Agent": self.user_agent()}
|
691
|
+
var_count = min(max(url.count("var="), 1), self.max_var_count)
|
692
|
+
timeout_max = self.timeout_factor * var_count
|
693
|
+
timeout = random.randint(timeout_max // 2, timeout_max)
|
694
|
+
|
695
|
+
retry = 0
|
696
|
+
while retry <= self.max_retries:
|
697
|
+
if proxy_txt_path:
|
698
|
+
from .read_proxy import get_valid_proxy
|
699
|
+
|
700
|
+
proxy_one = get_valid_proxy(proxy_txt_path)
|
701
|
+
if proxy_one:
|
702
|
+
if proxy_one.startswith("http://"):
|
703
|
+
proxy_one = proxy_one[7:]
|
704
|
+
elif proxy_one.startswith("https://"):
|
705
|
+
proxy_one = proxy_one[8:]
|
706
|
+
else:
|
707
|
+
proxy_one = None
|
708
|
+
if proxy_one:
|
709
|
+
proxy = f"http://{proxy_one}"
|
710
|
+
mounts = {
|
711
|
+
"http://": httpx.AsyncHTTPTransport(proxy=proxy),
|
712
|
+
"https://": httpx.AsyncHTTPTransport(proxy=proxy),
|
713
|
+
}
|
714
|
+
else:
|
715
|
+
proxy = None
|
716
|
+
mounts = None
|
717
|
+
try:
|
718
|
+
await asyncio.sleep(random.uniform(*self.delay_range))
|
719
|
+
start = datetime.datetime.now()
|
720
|
+
|
721
|
+
async with httpx.AsyncClient(
|
722
|
+
timeout=httpx.Timeout(timeout),
|
723
|
+
limits=httpx.Limits(max_connections=2, max_keepalive_connections=2),
|
724
|
+
transport=httpx.AsyncHTTPTransport(retries=2),
|
725
|
+
# proxy=proxy,
|
726
|
+
mounts=mounts,
|
727
|
+
) as client:
|
728
|
+
print(f"[bold #f0f6d0]Requesting {file_name} (Attempt {retry + 1}) ...")
|
729
|
+
response = await client.get(url, headers=headers, follow_redirects=True)
|
730
|
+
response.raise_for_status()
|
731
|
+
if not response.content:
|
732
|
+
raise ValueError("Empty response received")
|
733
|
+
|
734
|
+
print(f"[bold #96cbd7]Downloading {file_name} ...")
|
735
|
+
with open(save_path, "wb") as f:
|
736
|
+
async for chunk in response.aiter_bytes(32 * 1024):
|
737
|
+
f.write(chunk)
|
738
|
+
|
739
|
+
elapsed = datetime.datetime.now() - start
|
740
|
+
print(f"[#3dfc40]File [bold #dfff73]{file_name} [#3dfc40]downloaded, Time: [#39cbdd]{elapsed}")
|
741
|
+
self.count["success"] += 1
|
742
|
+
count_dict["success"] += 1
|
743
|
+
return
|
744
|
+
|
745
|
+
except Exception as e:
|
746
|
+
print(f"[bold red]Failed ({type(e).__name__}): {e}")
|
747
|
+
if retry < self.max_retries:
|
748
|
+
backoff = 2**retry
|
749
|
+
print(f"[yellow]Retrying in {backoff:.1f}s ...")
|
750
|
+
await asyncio.sleep(backoff)
|
751
|
+
retry += 1
|
752
|
+
else:
|
753
|
+
print(f"[red]Giving up on {file_name}")
|
754
|
+
self.count["fail"] += 1
|
755
|
+
count_dict["fail"] += 1
|
756
|
+
return
|
757
|
+
|
758
|
+
async def run(self):
|
759
|
+
print(f"📥 Starting download of {len(self.tasks)} files ...")
|
760
|
+
for url, save_path in self.tasks:
|
761
|
+
await self._download_one(url, save_path)
|
762
|
+
|
763
|
+
print("\n✅ All tasks completed.")
|
764
|
+
print(f"✔️ Success: {self.count['success']} | ❌ Fail: {self.count['fail']}")
|
765
|
+
|
766
|
+
|
667
767
|
def _download_file(target_url, store_path, file_name, cover=False):
|
668
|
-
|
768
|
+
save_path = Path(store_path) / file_name
|
669
769
|
file_name_split = file_name.split("_")
|
670
770
|
file_name_split = file_name_split[:-1]
|
671
771
|
same_file = "_".join(file_name_split) + "*nc"
|
672
772
|
|
673
773
|
if match_time is not None:
|
674
|
-
if check_nc(
|
675
|
-
if not _check_ftime(
|
774
|
+
if check_nc(save_path, print_messages=False):
|
775
|
+
if not _check_ftime(save_path, if_print=True):
|
676
776
|
if match_time:
|
677
|
-
_correct_time(
|
777
|
+
_correct_time(save_path)
|
678
778
|
count_dict["skip"] += 1
|
679
779
|
else:
|
680
|
-
_clear_existing_file(
|
780
|
+
_clear_existing_file(save_path)
|
681
781
|
count_dict["no_data"] += 1
|
682
782
|
else:
|
683
783
|
count_dict["skip"] += 1
|
684
784
|
print(f"[bold green]{file_name} is correct")
|
685
785
|
return
|
686
786
|
|
687
|
-
if not cover and os.path.exists(
|
688
|
-
print(f"[bold #FFA54F]{
|
787
|
+
if not cover and os.path.exists(save_path):
|
788
|
+
print(f"[bold #FFA54F]{save_path} exists, skipping ...")
|
689
789
|
count_dict["skip"] += 1
|
690
790
|
return
|
691
791
|
|
692
792
|
if same_file not in fsize_dict.keys():
|
693
|
-
check_nc(
|
793
|
+
check_nc(save_path, delete_if_invalid=True, print_messages=False)
|
694
794
|
|
695
|
-
get_mean_size = _get_mean_size_move(same_file,
|
795
|
+
get_mean_size = _get_mean_size_move(same_file, save_path)
|
696
796
|
|
697
|
-
if _check_existing_file(
|
797
|
+
if _check_existing_file(save_path, get_mean_size):
|
698
798
|
count_dict["skip"] += 1
|
699
799
|
return
|
700
800
|
|
701
|
-
_clear_existing_file(
|
801
|
+
_clear_existing_file(save_path)
|
702
802
|
|
703
803
|
if not use_idm:
|
704
|
-
|
705
|
-
|
706
|
-
|
707
|
-
request_times = 0
|
708
|
-
|
709
|
-
def calculate_wait_time(time_str, target_url):
|
710
|
-
time_pattern = r"\d{10}"
|
711
|
-
times_in_str = re.findall(time_pattern, time_str)
|
712
|
-
num_times_str = len(times_in_str)
|
713
|
-
|
714
|
-
if num_times_str > 1:
|
715
|
-
delta_t = datetime.datetime.strptime(times_in_str[1], "%Y%m%d%H") - datetime.datetime.strptime(times_in_str[0], "%Y%m%d%H")
|
716
|
-
delta_t = delta_t.total_seconds() / 3600
|
717
|
-
delta_t = delta_t / 3 + 1
|
718
|
-
else:
|
719
|
-
delta_t = 1
|
720
|
-
num_var = int(target_url.count("var="))
|
721
|
-
if num_var <= 0:
|
722
|
-
num_var = 1
|
723
|
-
return int(delta_t * 5 * 60 * num_var)
|
724
|
-
|
725
|
-
max_timeout = calculate_wait_time(file_name, target_url)
|
726
|
-
print(f"[bold #912dbc]Max timeout: {max_timeout} seconds")
|
727
|
-
|
728
|
-
download_time_s = datetime.datetime.now()
|
729
|
-
order_list = ["1st", "2nd", "3rd", "4th", "5th", "6th", "7th", "8th", "9th", "10th"]
|
730
|
-
while not download_success:
|
731
|
-
if request_times >= 10:
|
732
|
-
print(f"[bold #ffe5c0]Download failed after {request_times} times\nYou can skip it and try again later")
|
733
|
-
count_dict["fail"] += 1
|
734
|
-
break
|
735
|
-
if request_times > 0:
|
736
|
-
print(f"[bold #ffe5c0]Retrying the {order_list[request_times - 1]} time...")
|
737
|
-
try:
|
738
|
-
referer_center = target_url.split("?")[0].split("ncss/")[-1]
|
739
|
-
headers = {
|
740
|
-
"User-Agent": get_ua(), # 后面几项可以不加,依旧能下载
|
741
|
-
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
|
742
|
-
"Accept-Encoding": "gzip, deflate, br, zstd",
|
743
|
-
"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
|
744
|
-
"Referer": rf"https://ncss.hycom.org/thredds/ncss/grid/{referer_center}/dataset.html",
|
745
|
-
}
|
746
|
-
response = s.get(target_url, headers=headers, stream=True, timeout=random.randint(5, max_timeout))
|
747
|
-
response.raise_for_status()
|
748
|
-
with open(fname, "wb") as f:
|
749
|
-
print(f"[bold #96cbd7]Downloading {file_name} ...")
|
750
|
-
for chunk in response.iter_content(chunk_size=1024):
|
751
|
-
if chunk:
|
752
|
-
f.write(chunk)
|
753
|
-
|
754
|
-
f.close()
|
755
|
-
|
756
|
-
if os.path.exists(fname):
|
757
|
-
download_success = True
|
758
|
-
download_time_e = datetime.datetime.now()
|
759
|
-
download_delta = download_time_e - download_time_s
|
760
|
-
print(f"[#3dfc40]File [bold #dfff73]{fname} [#3dfc40]has been downloaded successfully, Time: [#39cbdd]{download_delta}")
|
761
|
-
count_dict["success"] += 1
|
762
|
-
|
763
|
-
except requests.exceptions.HTTPError as errh:
|
764
|
-
print(f"Http Error: {errh}")
|
765
|
-
except requests.exceptions.ConnectionError as errc:
|
766
|
-
print(f"Error Connecting: {errc}")
|
767
|
-
except requests.exceptions.Timeout as errt:
|
768
|
-
print(f"Timeout Error: {errt}")
|
769
|
-
except requests.exceptions.RequestException as err:
|
770
|
-
print(f"OOps: Something Else: {err}")
|
771
|
-
|
772
|
-
time.sleep(3)
|
773
|
-
request_times += 1
|
804
|
+
python_downloader = _HycomDownloader([(target_url, save_path)])
|
805
|
+
asyncio.run(python_downloader.run())
|
806
|
+
time.sleep(3 + random.uniform(0, 10))
|
774
807
|
else:
|
775
808
|
idm_downloader(target_url, store_path, file_name, given_idm_engine)
|
776
|
-
idm_download_list.append(
|
777
|
-
print(f"[bold #3dfc40]File [bold #dfff73]{
|
809
|
+
idm_download_list.append(save_path)
|
810
|
+
# print(f"[bold #3dfc40]File [bold #dfff73]{save_path} [#3dfc40]has been submit to IDM for downloading")
|
811
|
+
time.sleep(3 + random.uniform(0, 10))
|
778
812
|
|
779
813
|
|
780
814
|
def _check_hour_is_valid(ymdh_str):
|
@@ -962,6 +996,7 @@ def download(
|
|
962
996
|
idm_path=None,
|
963
997
|
validate_time=None,
|
964
998
|
interval_hours=3,
|
999
|
+
proxy_txt=None,
|
965
1000
|
):
|
966
1001
|
"""
|
967
1002
|
Download data for a single time or a series of times.
|
@@ -1095,6 +1130,9 @@ def download(
|
|
1095
1130
|
global mark_len
|
1096
1131
|
mark_len = 100
|
1097
1132
|
|
1133
|
+
global proxy_txt_path
|
1134
|
+
proxy_txt_path = proxy_txt
|
1135
|
+
|
1098
1136
|
if validate_time is not None:
|
1099
1137
|
workers = 1
|
1100
1138
|
print("*" * mark_len)
|
@@ -1167,20 +1205,21 @@ if __name__ == "__main__":
|
|
1167
1205
|
|
1168
1206
|
options = {
|
1169
1207
|
"variables": var_list,
|
1170
|
-
"start_time": "
|
1171
|
-
"end_time": "
|
1172
|
-
"output_dir": r"
|
1208
|
+
"start_time": "2018010100",
|
1209
|
+
"end_time": "2019063000",
|
1210
|
+
"output_dir": r"G:\Data\HYCOM\china_sea\hourly_24",
|
1173
1211
|
"lon_min": 105,
|
1174
|
-
"lon_max":
|
1175
|
-
"lat_min":
|
1212
|
+
"lon_max": 135,
|
1213
|
+
"lat_min": 10,
|
1176
1214
|
"lat_max": 45,
|
1177
1215
|
"workers": 1,
|
1178
1216
|
"overwrite": False,
|
1179
1217
|
"depth": None,
|
1180
1218
|
"level": None,
|
1181
|
-
"validate_time":
|
1182
|
-
"idm_path": r"D:\Programs\Internet Download Manager\IDMan.exe",
|
1183
|
-
"interval_hours":
|
1219
|
+
"validate_time": None,
|
1220
|
+
# "idm_path": r"D:\Programs\Internet Download Manager\IDMan.exe",
|
1221
|
+
"interval_hours": 24,
|
1222
|
+
"proxy_txt": None,
|
1184
1223
|
}
|
1185
1224
|
|
1186
1225
|
if single_var:
|
@@ -0,0 +1,108 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# coding=utf-8
|
3
|
+
"""
|
4
|
+
Author: Liu Kun && 16031215@qq.com
|
5
|
+
Date: 2025-04-17 13:59:14
|
6
|
+
LastEditors: Liu Kun && 16031215@qq.com
|
7
|
+
LastEditTime: 2025-04-17 14:00:38
|
8
|
+
FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_down\\read_proxy.py
|
9
|
+
Description:
|
10
|
+
EditPlatform: vscode
|
11
|
+
ComputerInfo: XPS 15 9510
|
12
|
+
SystemInfo: Windows 11
|
13
|
+
Python Version: 3.12
|
14
|
+
"""
|
15
|
+
|
16
|
+
import threading
|
17
|
+
from queue import Queue
|
18
|
+
import random
|
19
|
+
import requests
|
20
|
+
import os
|
21
|
+
|
22
|
+
|
23
|
+
# 从文件中读取代理列表
|
24
|
+
def read_proxies_from_file(filename):
|
25
|
+
try:
|
26
|
+
with open(filename, "r") as file:
|
27
|
+
proxies = [line.strip() for line in file if line.strip()]
|
28
|
+
return proxies
|
29
|
+
except FileNotFoundError:
|
30
|
+
print(f"未找到文件: {filename},请检查文件是否存在。")
|
31
|
+
return []
|
32
|
+
|
33
|
+
|
34
|
+
# 测试单个代理的可用性
|
35
|
+
def test_single_proxy(proxy, test_url, working_proxies_queue):
|
36
|
+
try:
|
37
|
+
response = requests.get(test_url, proxies={"http": proxy, "https": proxy}, timeout=5)
|
38
|
+
if response.status_code == 200:
|
39
|
+
# print(f"代理 {proxy} 可用,返回 IP: {response.json()['origin']}")
|
40
|
+
working_proxies_queue.put(proxy)
|
41
|
+
else:
|
42
|
+
# print(f"代理 {proxy} 不可用,状态码: {response.status_code}")
|
43
|
+
pass
|
44
|
+
except Exception as e: # noqa: F841
|
45
|
+
# print(f"代理 {proxy} 不可用,错误: {e}")
|
46
|
+
pass
|
47
|
+
|
48
|
+
|
49
|
+
# 测试代理的可用性(多线程)
|
50
|
+
def test_proxies(proxies, test_url):
|
51
|
+
working_proxies_queue = Queue()
|
52
|
+
threads = []
|
53
|
+
|
54
|
+
# 为每个代理创建一个线程
|
55
|
+
for proxy in proxies:
|
56
|
+
thread = threading.Thread(target=test_single_proxy, args=(proxy, test_url, working_proxies_queue))
|
57
|
+
threads.append(thread)
|
58
|
+
thread.start()
|
59
|
+
|
60
|
+
# 等待所有线程完成
|
61
|
+
for thread in threads:
|
62
|
+
thread.join()
|
63
|
+
|
64
|
+
# 从队列中取出所有可用代理
|
65
|
+
working_proxies = []
|
66
|
+
while not working_proxies_queue.empty():
|
67
|
+
working_proxies.append(working_proxies_queue.get())
|
68
|
+
|
69
|
+
return working_proxies
|
70
|
+
|
71
|
+
|
72
|
+
|
73
|
+
|
74
|
+
# 主函数
|
75
|
+
def read_test(input_filename=r"E:\Code\Python\Tools\Yccol\output\http.txt"):
|
76
|
+
# 测试 URL
|
77
|
+
test_url = "http://httpbin.org/ip"
|
78
|
+
|
79
|
+
# 读取代理列表
|
80
|
+
proxies = read_proxies_from_file(input_filename)
|
81
|
+
if not proxies:
|
82
|
+
print(f"文件 '{input_filename}' 中没有找到有效的代理。")
|
83
|
+
return
|
84
|
+
|
85
|
+
# print(f"从文件 '{input_filename}' 中读取到 {len(proxies)} 个代理,开始测试...")
|
86
|
+
|
87
|
+
# 测试代理
|
88
|
+
working_proxies = test_proxies(proxies, test_url)
|
89
|
+
|
90
|
+
return working_proxies
|
91
|
+
|
92
|
+
def get_valid_proxy(input_filename=r"E:\Code\Python\Tools\Yccol\output\http.txt"):
|
93
|
+
working_proxies = read_test(input_filename)
|
94
|
+
if not working_proxies:
|
95
|
+
print("没有找到可用的代理。")
|
96
|
+
return None
|
97
|
+
choose_proxy = random.choice(working_proxies)
|
98
|
+
print(f"Randomly selected available proxy: {choose_proxy}")
|
99
|
+
|
100
|
+
# proxies = {"http": choose_proxy, "https": choose_proxy}
|
101
|
+
# return proxies
|
102
|
+
|
103
|
+
return choose_proxy
|
104
|
+
|
105
|
+
|
106
|
+
if __name__ == "__main__":
|
107
|
+
pwd = os.path.dirname(os.path.abspath(__file__))
|
108
|
+
read_test()
|
@@ -21,11 +21,11 @@ oafuncs/_script/plot_dataset.py,sha256=zkSEnO_-biyagorwWXPoihts_cwuvripzEt-l9bHJ
|
|
21
21
|
oafuncs/_script/replace_file_content.py,sha256=eCFZjnZcwyRvy6b4mmIfBna-kylSZTyJRfgXd6DdCjk,5982
|
22
22
|
oafuncs/oa_down/User_Agent-list.txt,sha256=pHaMlElMvZ8TG4vf4BqkZYKqe0JIGkr4kCN0lM1Y9FQ,514295
|
23
23
|
oafuncs/oa_down/__init__.py,sha256=kRX5eTUCbAiz3zTaQM1501paOYS_3fizDN4Pa0mtNUA,585
|
24
|
-
oafuncs/oa_down/hycom_3hourly.py,sha256=
|
25
|
-
oafuncs/oa_down/
|
26
|
-
oafuncs/oa_down/hycom_3hourly_20250416.py,sha256=X_fcV_xeJtYD-PB3GRdFWNHMPOVUYgh17MgWOtrdIbc,53493
|
24
|
+
oafuncs/oa_down/hycom_3hourly.py,sha256=zU7eNFyg5oa-5MVrCeCVyZfoYHukY7MA0-DmvCj5yIk,54723
|
25
|
+
oafuncs/oa_down/hycom_3hourly_proxy.py,sha256=1eaoJGI_m-7w4ZZ3n7NGxkZaeFdsm0d3U-hyw8RFNbc,54563
|
27
26
|
oafuncs/oa_down/idm.py,sha256=4z5IvgfTyIKEI1kOtqXZwN7Jnfjwp6qDBOIoVyOLp0I,1823
|
28
27
|
oafuncs/oa_down/literature.py,sha256=2bF9gSKQbzcci9LcKE81j8JEjIJwON7jbwQB3gDDA3E,11331
|
28
|
+
oafuncs/oa_down/read_proxy.py,sha256=f5YidVA2ISRFNm-pgRsb_Omj0dySevrhcVoH6Y125UU,3237
|
29
29
|
oafuncs/oa_down/test_ua.py,sha256=l8MCD6yU2W75zRPTDKUZTJhCWNF9lfk-MiSFqAqKH1M,1398
|
30
30
|
oafuncs/oa_down/user_agent.py,sha256=TsPcAxFmMTYAEHRFjurI1bQBJfDhcA70MdHoUPwQmks,785
|
31
31
|
oafuncs/oa_model/__init__.py,sha256=__ImltHkP1bSsIpsmKpDE8QwwA-2Z8K7mZUHGGcRdro,484
|
@@ -37,8 +37,8 @@ oafuncs/oa_sign/__init__.py,sha256=QKqTFrJDFK40C5uvk48GlRRbGFzO40rgkYwu6dYxatM,5
|
|
37
37
|
oafuncs/oa_sign/meteorological.py,sha256=8091SHo2L8kl4dCFmmSH5NGVHDku5i5lSiLEG5DLnOQ,6489
|
38
38
|
oafuncs/oa_sign/ocean.py,sha256=xrW-rWD7xBWsB5PuCyEwQ1Q_RDKq2KCLz-LOONHgldU,5932
|
39
39
|
oafuncs/oa_sign/scientific.py,sha256=a4JxOBgm9vzNZKpJ_GQIQf7cokkraV5nh23HGbmTYKw,5064
|
40
|
-
oafuncs-0.0.98.
|
41
|
-
oafuncs-0.0.98.
|
42
|
-
oafuncs-0.0.98.
|
43
|
-
oafuncs-0.0.98.
|
44
|
-
oafuncs-0.0.98.
|
40
|
+
oafuncs-0.0.98.6.dist-info/licenses/LICENSE.txt,sha256=rMtLpVg8sKiSlwClfR9w_Dd_5WubTQgoOzE2PDFxzs4,1074
|
41
|
+
oafuncs-0.0.98.6.dist-info/METADATA,sha256=g6aPVBYo1PEpyW-4-0o1HgXZiNJZKGg1p3YEP2skSAY,4242
|
42
|
+
oafuncs-0.0.98.6.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
43
|
+
oafuncs-0.0.98.6.dist-info/top_level.txt,sha256=bgC35QkXbN4EmPHEveg_xGIZ5i9NNPYWqtJqaKqTPsQ,8
|
44
|
+
oafuncs-0.0.98.6.dist-info/RECORD,,
|