oafuncs 0.0.59__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
oafuncs/oa_python.py ADDED
@@ -0,0 +1,106 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ '''
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2024-10-11 21:02:07
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2024-10-11 21:47:19
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\OAFuncs\\oa_python.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.11
14
+ '''
15
+
16
+ import os
17
+
18
+ __all__ = ['install_lib', 'upgrade_lib']
19
+
20
+ def install_lib(libs=None,python_exe='python'):
21
+ '''
22
+ libs: list, 需要安装的库
23
+ python_exe: str, python版本;如在windows下,将python.exe复制为python312.exe,然后python_exe='python312'
24
+ '''
25
+ os.system(python_exe + " -m ensurepip")
26
+ os.system(python_exe + " -m pip install --upgrade pip")
27
+ if libs is None:
28
+ libs = [
29
+ # "OAFuncs", # 自己的库,在这个函数不宜操作,避免报错
30
+ "requests", # 网页
31
+ "xlwt", # excel文件
32
+ "xlrd", # excel文件
33
+ "openpyxl", # excel文件
34
+ "netCDF4", # nc文件
35
+ "numpy", # 数组
36
+ "pandas", # 数据
37
+ "xarray", # 数组
38
+ "scipy", # 科学计算
39
+ # "scikit-learn", # 机器学习
40
+ "matplotlib", # 绘图
41
+ # "seaborn",
42
+ "imageio", # 图像
43
+ # "pylustrator", # 绘图
44
+ "Cartopy", # 绘图 #cartopy已经支持python3.11并且可以直接pip安装
45
+ "seawater", # 海洋计算
46
+ "cmaps", # 颜色
47
+ "colorcet", # 颜色
48
+ "cmasher", # 颜色
49
+ "tqdm", # 进度条
50
+ # "taichi", # 加速
51
+ "icecream", # 打印调试
52
+ # "pyperclip", # 系统剪切板
53
+ "rich", # 精美文本终端
54
+ # "stratify", # 大气海洋数据垂直插值
55
+ "dask", # 并行计算
56
+ "bs4", # 网页
57
+ "pathlib", # 路径
58
+ "opencv-contrib-python", # 图像处理
59
+ # "pydap", # 网络数据xarray下载
60
+ "gsw", # 海洋计算
61
+ "global_land_mask", # 陆地海洋掩码
62
+ # "cfgrib", # grib文件
63
+ # "ecmwflibs", # grib文件, 两个库都需要安装
64
+ "geopandas", # 矢量数据,shp文件
65
+ # "geopy", # 地理位置
66
+ # "flask", # 网页
67
+ "cdsapi", # 网络数据下载(era5)
68
+ # 以下不太重要
69
+ "lxml", # 网页
70
+ "keyboard", # 键盘
71
+ "zhdate", # 中国农历
72
+ "python-pptx", # ppt
73
+ "python-docx", # word
74
+ "ipywidgets", # jupyter显示进度条插件
75
+ "salem", # 地图投影,可部分替代wrf-python
76
+ "meteva", # 气象数据处理,中国气象局开发
77
+ "wget", # 下载
78
+ "pyautogui", # 鼠标键盘,自动连点脚本需要
79
+ ]
80
+ try:
81
+ installed_libs = os.popen(python_exe + ' -m pip list').read()
82
+ lib_num = len(libs)
83
+ for i, lib in enumerate(libs):
84
+ # 判断库是否已经安装,已安装跳过
85
+ if lib in installed_libs:
86
+ print(lib, "早已安装")
87
+ continue
88
+ else:
89
+ os.system(python_exe + " -m " + "pip install " + lib)
90
+ print('-'*100)
91
+ print("安装成功", lib, "({}/{})".format(i+1, lib_num))
92
+ print('-'*100)
93
+ except Exception as e:
94
+ print("安装失败:", str(e))
95
+
96
+
97
+ def upgrade_lib(libs=None,python_exe='python'):
98
+ if libs is None:
99
+ installed_libs = os.popen(python_exe + ' -m pip list').read()
100
+ libs = installed_libs
101
+ try:
102
+ for lib in libs:
103
+ os.system(python_exe + " -m " + "pip install --upgrade " + lib)
104
+ print("升级成功")
105
+ except Exception as e:
106
+ print("升级失败:", str(e))
@@ -0,0 +1,21 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ '''
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2024-09-17 16:09:20
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2024-10-14 18:12:12
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\OAFuncs\\oa_sign\\__init__.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.11
14
+ '''
15
+
16
+ # from .love_ocean import sign as love_ocean
17
+ # from .meteorological_home import sign as meteorological_home
18
+
19
+ from .ocean import *
20
+ from .meteorological import *
21
+ from .scientific import *
@@ -0,0 +1,175 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ '''
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2024-10-14 16:14:50
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2024-10-14 18:38:57
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\OAFuncs\\oa_sign\\meteorological.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.11
14
+ '''
15
+
16
+
17
+ import warnings
18
+ from rich import print
19
+ from bs4 import BeautifulSoup
20
+ import requests
21
+ import time
22
+ import hashlib
23
+ warnings.filterwarnings("ignore")
24
+
25
+ __all__ = ['sign_in_meteorological_home']
26
+
27
+ def sign_in_meteorological_home(email, password):
28
+ '''
29
+ 气象家园:http://bbs.06climate.com/
30
+ email: str, 气象家园的邮箱
31
+ password: str, 气象家园的密码
32
+ '''
33
+ def get_login_hash():
34
+ url = 'http://bbs.06climate.com/member.php?mod=logging&action=login'
35
+ response = s.get(url)
36
+ response.raise_for_status()
37
+ soup = BeautifulSoup(response.text, 'lxml')
38
+ login_hash = soup.find('form', attrs={'name': 'login'})[
39
+ 'action'].split('loginhash=')[1]
40
+ return login_hash
41
+
42
+ def get_login_formhash():
43
+ url = 'http://bbs.06climate.com/member.php?mod=logging&action=login'
44
+ response = s.get(url)
45
+ response.raise_for_status()
46
+ soup = BeautifulSoup(response.text, 'lxml')
47
+ formhash = soup.find('input', attrs={'name': 'formhash'})['value']
48
+ return formhash
49
+
50
+ def get_check_formhash():
51
+ url = 'http://bbs.06climate.com/'
52
+ response = s.get(url)
53
+ response.raise_for_status()
54
+ soup = BeautifulSoup(response.text, 'lxml')
55
+ formhash = soup.find('input', attrs={'name': 'formhash'})['value']
56
+ return formhash
57
+
58
+ def write_response(response, default_path=r'F:\response_气象家园.txt'):
59
+ with open(default_path, 'w', encoding='utf-8') as f:
60
+ f.write('-'*350+'\n')
61
+ f.write(time.strftime(
62
+ '%Y-%m-%d %H:%M:%S', time.localtime()) + '\n')
63
+ f.write(response.text)
64
+ f.write('-'*350+'\n')
65
+
66
+ def login():
67
+ url = 'http://bbs.06climate.com/member.php?'
68
+ # 登录密码需要转码为 216fc900fb57c27dd3c5e3dfbcac1849
69
+ mydata['password'] = hashlib.md5(
70
+ mydata['password'].encode()).hexdigest()
71
+ credentials = {
72
+ 'password': mydata['password'],
73
+ }
74
+ choose_login_ways = ['username', 'email']
75
+ choose_login = choose_login_ways[1]
76
+ credentials['selecti'] = choose_login_ways.index(choose_login)
77
+ credentials['username'] = mydata[choose_login]
78
+ query_params = {
79
+ 'mod': 'logging',
80
+ 'action': 'login',
81
+ 'loginsubmit': 'yes',
82
+ 'loginhash': get_login_hash(),
83
+ 'inajax': '1',
84
+ }
85
+ from_data = {
86
+ 'formhash': get_login_formhash(),
87
+ 'referer': 'http://bbs.06climate.com/',
88
+ 'loginfield': choose_login,
89
+ 'username': mydata[choose_login],
90
+ 'password': mydata['password'],
91
+ 'questionid': '0',
92
+ 'answer': '',
93
+ }
94
+ head = {
95
+ 'Host': 'bbs.06climate.com',
96
+ 'Origin': 'http://bbs.06climate.com',
97
+ 'Referer': 'http://bbs.06climate.com/member.php?mod=logging&action=login',
98
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36',
99
+ }
100
+ response = s.post(url, params=query_params,
101
+ data=from_data, headers=head)
102
+ if '欢迎' in response.text:
103
+ print(' [bold green]登录成功')
104
+ try:
105
+ rescookie = response.cookies
106
+ cookie = requests.utils.dict_from_cookiejar(rescookie)
107
+ return cookie
108
+ except Exception as e:
109
+ print('cookie 获取失败:', str(e))
110
+ else:
111
+ print(' [bold red]登录失败')
112
+
113
+ def check_in():
114
+ url = 'http://bbs.06climate.com/plugin.php?'
115
+ query_params = {
116
+ 'id': 'dsu_amupper',
117
+ 'ppersubmit': 'true',
118
+ 'formhash': get_check_formhash(),
119
+ 'infloat': 'yes',
120
+ 'handlekey': 'dsu_amupper',
121
+ 'inajax': '1',
122
+ 'ajaxtarget': 'fwin_content_dsu_amupper'
123
+ }
124
+ head = {'X-Requested-With': 'XMLHttpRequest'}
125
+ if cookie is not None:
126
+ s.cookies.update(cookie)
127
+ response = s.get(url, params=query_params, headers=head)
128
+ response.raise_for_status()
129
+ success_indicators = ['累计签到', '连续签到', '特奖励',
130
+ '明日签到', '另奖励', '连续签到', '再连续签到', '奖励', '签到完毕']
131
+ if any(indicator in response.text for indicator in success_indicators):
132
+ print(' [bold green]签到完毕')
133
+ else:
134
+ print(' [bold red]签到失败')
135
+
136
+ def get_info():
137
+ url = 'http://bbs.06climate.com/'
138
+ response = s.get(url)
139
+ response.raise_for_status()
140
+ soup = BeautifulSoup(response.text, 'lxml')
141
+ credit = soup.find('a', attrs={'id': 'extcreditmenu'}).text
142
+ user_group = soup.find('a', attrs={'id': 'g_upmine'}).text
143
+ cumulate = soup.select('.pperwbm .times')[0].text
144
+ continuous = soup.select('.pperwbm .times')[1].text
145
+ last_sign = soup.select('.pperwbm .times')[2].text
146
+ info = {credit.split(': ')[0]: credit.split(
147
+ ':')[1], user_group.split(': ')[0]: user_group.split(':')[1], '累计签到': cumulate+'次', '连续签到': continuous+'次', '上次签到': last_sign}
148
+
149
+ print('[bold blue]-----------签到信息-----------')
150
+ for k, v in info.items():
151
+ if '积分' in k:
152
+ k = '当前积分'
153
+ v = v.split(' ')[-1]
154
+ if '用户组' in k:
155
+ k = '现用户组'
156
+ v = v.split(' ')[-1]
157
+ print(f'[bold blue]{k}: [bold green]{v}')
158
+ print('[bold blue]------------------------------')
159
+
160
+ mydata = {'username': None,
161
+ 'email': email, 'password': password}
162
+ s = requests.Session()
163
+ print('[bold purple]-----------气象家园-----------')
164
+ cookie = login()
165
+ check_in()
166
+ get_info()
167
+ s.close()
168
+
169
+
170
+
171
+ if __name__ == '__main__':
172
+ # email = '16031215@qq.com'
173
+ # password = 'xxxxx'
174
+ # sign(email=email, password=password)
175
+ pass
@@ -0,0 +1,163 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ '''
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2024-10-14 16:59:26
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2024-10-14 18:28:11
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\OAFuncs\\oa_sign\\ocean.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.11
14
+ '''
15
+
16
+
17
+ import hashlib
18
+ import time
19
+ import warnings
20
+
21
+ import requests
22
+ from bs4 import BeautifulSoup
23
+ from rich import print
24
+
25
+ warnings.filterwarnings("ignore")
26
+
27
+ __all__ = ['sign_in_love_ocean']
28
+
29
+
30
+ def sign_in_love_ocean(email, password):
31
+ '''
32
+ 吾爱海洋:https://www.52ocean.cn/
33
+ email: str, 吾爱海洋的邮箱
34
+ password: str, 吾爱海洋的密码
35
+ '''
36
+ def _get_login_hash():
37
+ url = 'https://www.52ocean.cn/member.php?'
38
+ para_login = {'mod': 'logging', 'action': 'login', 'infloat': 'yes',
39
+ 'handlekey': 'login', 'inajax': '1', 'ajaxtarget': 'fwin_content_login'}
40
+ response = s.get(url, params=para_login)
41
+ response.raise_for_status()
42
+ soup = BeautifulSoup(response.text, 'lxml')
43
+ login_hash = soup.find('form', attrs={'name': 'login'})[
44
+ 'action'].split('loginhash=')[1]
45
+ return login_hash
46
+
47
+ def _get_login_formhash():
48
+ url = 'https://www.52ocean.cn/member.php?'
49
+ para_login = {'mod': 'logging', 'action': 'login', 'infloat': 'yes',
50
+ 'handlekey': 'login', 'inajax': '1', 'ajaxtarget': 'fwin_content_login'}
51
+ response = s.get(url, params=para_login)
52
+ response.raise_for_status()
53
+ soup = BeautifulSoup(response.text, 'lxml')
54
+ formhash = soup.find('input', attrs={'name': 'formhash'})['value']
55
+ return formhash
56
+
57
+ def _get_check_formhash():
58
+ url = 'https://www.52ocean.cn/'
59
+ response = s.get(url)
60
+ response.raise_for_status()
61
+ soup = BeautifulSoup(response.text, 'lxml')
62
+ formhash = soup.find('input', attrs={'name': 'formhash'})['value']
63
+ return formhash
64
+
65
+ def write_response(response, default_path=r'F:\response_吾爱海洋.txt'):
66
+ with open(default_path, 'w', encoding='utf-8') as f:
67
+ f.write('-'*350+'\n')
68
+ f.write(time.strftime(
69
+ '%Y-%m-%d %H:%M:%S', time.localtime()) + '\n')
70
+ f.write(response.text)
71
+ f.write('-'*350+'\n')
72
+
73
+ def _login():
74
+ url = 'https://www.52ocean.cn/member.php?'
75
+ mydata['password'] = hashlib.md5(
76
+ mydata['password'].encode()).hexdigest()
77
+ credentials = {
78
+ 'password': mydata['password'],
79
+ }
80
+ choose_login_ways = ['username', 'email']
81
+ choose_login = choose_login_ways[1]
82
+ credentials['selecti'] = choose_login_ways.index(choose_login)
83
+ credentials['username'] = mydata[choose_login]
84
+ query_params = {
85
+ 'mod': 'logging',
86
+ 'action': 'login',
87
+ 'loginsubmit': 'yes',
88
+ 'handlekey': 'login',
89
+ 'loginhash': _get_login_hash(),
90
+ 'inajax': '1',
91
+ }
92
+ from_data = {
93
+ 'formhash': _get_login_formhash(),
94
+ 'referer': 'https://www.52ocean.cn/',
95
+ 'loginfield': choose_login,
96
+ 'username': mydata[choose_login],
97
+ 'password': mydata['password'],
98
+ 'questionid': '0',
99
+ 'answer': '',
100
+ }
101
+ head = {
102
+ 'Origin': 'https://www.52ocean.cn',
103
+ 'Referer': 'https://www.52ocean.cn/member.php?',
104
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36'
105
+ }
106
+ response = s.post(url, params=query_params,
107
+ data=from_data, headers=head)
108
+ if '欢迎' in response.text:
109
+ print(' [bold green]登录成功')
110
+ try:
111
+ rescookie = response.cookies
112
+ cookie = requests.utils.dict_from_cookiejar(rescookie)
113
+ return cookie
114
+ except Exception as e:
115
+ print('cookie 获取失败:', str(e))
116
+ else:
117
+ print(' [bold red]登录失败')
118
+
119
+ def _check_in():
120
+ url = 'https://www.52ocean.cn/plugin.php?id=zqlj_sign'
121
+ query_params = {
122
+ 'sign': _get_check_formhash(),
123
+ }
124
+ head = {'X-Requested-With': 'XMLHttpRequest'}
125
+ if cookie is not None:
126
+ s.cookies.update(cookie)
127
+ response = s.get(url, params=query_params, headers=head)
128
+ response.raise_for_status()
129
+ success_indicators = ['恭喜您,打卡成功!', '今日已打卡', '已经打过卡']
130
+ if any(indicator in response.text for indicator in success_indicators):
131
+ print(' [bold green]打卡完毕')
132
+ else:
133
+ print(' [bold red]打卡失败')
134
+
135
+ def _get_info():
136
+ url = 'https://www.52ocean.cn/plugin.php?id=zqlj_sign'
137
+ response = s.get(url)
138
+ response.raise_for_status()
139
+ soup = BeautifulSoup(response.text, 'html.parser')
140
+ sign_info = soup.find('ul', class_='xl xl1').find_all('li')
141
+ print('[bold blue]-----------打卡动态-----------')
142
+ for info in sign_info:
143
+ k, v = info.get_text().split(':')
144
+ if '当前' in k:
145
+ k = k[2:]
146
+ print(f'[bold blue]{k}: [bold green]{v}')
147
+ print('[bold blue]------------------------------')
148
+
149
+ mydata = {'username': None,
150
+ 'email': email, 'password': password} # 不要修改关键字
151
+ s = requests.Session()
152
+ print('[bold purple]-----------吾爱海洋-----------')
153
+ cookie = _login()
154
+ _check_in()
155
+ _get_info()
156
+ s.close()
157
+
158
+
159
+ if __name__ == '__main__':
160
+ # email = '16031215@qq.com'
161
+ # password = 'xxxxx'
162
+ # sign(email=email, password=password)
163
+ pass
@@ -0,0 +1,139 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ '''
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2024-10-14 18:29:52
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2024-10-14 18:57:39
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\OAFuncs\\oa_sign\\scientific.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.11
14
+ '''
15
+
16
+
17
+ import time
18
+
19
+ import requests
20
+ from bs4 import BeautifulSoup
21
+ from rich import print
22
+
23
+ __all__ = ['sign_in_scientific_research']
24
+
25
+
26
+ def sign_in_scientific_research(email, password):
27
+ '''
28
+ 科研通:https://www.ablesci.com/
29
+ email: str, 科研通的邮箱
30
+ password: str, 科研通的密码
31
+ '''
32
+ def get_login_csrf():
33
+ url = 'https://www.ablesci.com/site/login'
34
+ response = s.get(url)
35
+ response.raise_for_status()
36
+ soup = BeautifulSoup(response.text, 'lxml')
37
+ csrf = soup.find('meta', attrs={'name': 'csrf-token'})['content']
38
+ return csrf
39
+
40
+ def write_response(response, default_path=r'F:\response_科研通.txt'):
41
+ with open(default_path, 'w', encoding='utf-8') as f:
42
+ f.write('-'*350+'\n')
43
+ f.write(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) + '\n')
44
+ f.write(response.text)
45
+ f.write('-'*350+'\n')
46
+
47
+ def login():
48
+ url = 'https://www.ablesci.com/site/login'
49
+ from_data = {
50
+ '_csrf': get_login_csrf(),
51
+ 'email': mydata['email'],
52
+ 'password': mydata['password'],
53
+ 'remember': 'on'
54
+ }
55
+ head = {
56
+ 'Origin': 'https://www.ablesci.com',
57
+ 'Referer': 'https://www.ablesci.com/site/login',
58
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36',
59
+ 'X-Requested-With': 'XMLHttpRequest'
60
+ }
61
+ response = s.post(url, data=from_data, headers=head)
62
+ if '登录成功' in response.text:
63
+ print(' [bold green]已登录')
64
+ try:
65
+ rescookie = response.cookies
66
+ cookie = requests.utils.dict_from_cookiejar(rescookie)
67
+ return cookie
68
+ except Exception as e:
69
+ print('cookie 获取失败:', str(e))
70
+ else:
71
+ print(' [bold red]未登录')
72
+
73
+ def check_in():
74
+ url = 'https://www.ablesci.com/user/sign'
75
+ if cookie is not None:
76
+ s.cookies.update(cookie)
77
+ response = s.get(url)
78
+ response.raise_for_status()
79
+ success_indicators = ['签到成功', '已连续签到', '本次获得']
80
+ if any(indicator in response.text for indicator in success_indicators):
81
+ print(' [bold green]已签到')
82
+ else:
83
+ url = 'https://www.ablesci.com/'
84
+ response = s.get(url)
85
+ response.raise_for_status()
86
+ if '已连续签到' in response.text:
87
+ print(' [bold green]已签到')
88
+ else:
89
+ print(' [bold red]未签到')
90
+
91
+ def check_in_r():
92
+ # 先检查是否已经签到
93
+ url = 'https://www.ablesci.com/'
94
+ if cookie is not None:
95
+ s.cookies.update(cookie)
96
+ response = s.get(url)
97
+ response.raise_for_status()
98
+ if '已连续签到' in response.text:
99
+ print(' [bold green]已签到')
100
+ else:
101
+ url = 'https://www.ablesci.com/user/sign'
102
+ response = s.get(url)
103
+ response.raise_for_status()
104
+ success_indicators = ['签到成功', '已连续签到', '本次获得']
105
+ if any(indicator in response.text for indicator in success_indicators):
106
+ print(' [bold green]已签到')
107
+ else:
108
+ print(' [bold red]未签到')
109
+
110
+ def get_info():
111
+ url = 'https://www.ablesci.com/'
112
+ response = s.get(url)
113
+ response.raise_for_status()
114
+ soup = BeautifulSoup(response.text, 'lxml')
115
+ credit = soup.find('a', attrs={'class': 'signin-points'}).text
116
+ continuous = soup.find('span', attrs={'class': 'signin-days'}).text
117
+ info = {'积分': credit[4:], '连续签到': continuous[5:]}
118
+ print('[bold blue]-----------签到录-----------')
119
+ for k, v in info.items():
120
+ if '积分' in k:
121
+ k = '当前积分'
122
+ v = v.split(' ')[-1]
123
+ print(f'[bold blue]{k}: [bold green]{v}')
124
+ print('[bold blue]----------------------------')
125
+
126
+ mydata = {'email': email, 'password': password} # 不要修改关键字
127
+ s = requests.Session()
128
+ print('[bold purple]-----------科研通-----------')
129
+ cookie = login()
130
+ check_in()
131
+ get_info()
132
+ s.close()
133
+
134
+
135
+ if __name__ == '__main__':
136
+ # email = '16031215@qq.com'
137
+ # password = 'xxxxx'
138
+ # sign_in_research_connect(email, password)
139
+ pass
@@ -0,0 +1,18 @@
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ '''
4
+ Author: Liu Kun && 16031215@qq.com
5
+ Date: 2024-11-21 09:48:00
6
+ LastEditors: Liu Kun && 16031215@qq.com
7
+ LastEditTime: 2024-11-21 10:18:33
8
+ FilePath: \\Python\\My_Funcs\\OAFuncs\\OAFuncs\\oa_tool\\__init__.py
9
+ Description:
10
+ EditPlatform: vscode
11
+ ComputerInfo: XPS 15 9510
12
+ SystemInfo: Windows 11
13
+ Python Version: 3.12
14
+ '''
15
+
16
+
17
+ # 会导致OAFuncs直接导入所有函数,不符合模块化设计
18
+ from .email import *