oafuncs 0.0.81__py2.py3-none-any.whl → 0.0.83__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. oafuncs/data_store/OAFuncs.png +0 -0
  2. oafuncs/oa_cmap.py +1 -0
  3. oafuncs/oa_data.py +107 -28
  4. oafuncs/oa_down/__init__.py +5 -4
  5. oafuncs/oa_down/hycom_3hourly.py +152 -35
  6. oafuncs/oa_down/user_agent.py +34 -0
  7. oafuncs/oa_draw.py +165 -103
  8. oafuncs/oa_file.py +66 -53
  9. oafuncs/oa_help.py +19 -16
  10. oafuncs/oa_nc.py +82 -114
  11. oafuncs-0.0.83.dist-info/METADATA +91 -0
  12. oafuncs-0.0.83.dist-info/RECORD +26 -0
  13. oafuncs/oa_down/test.py +0 -151
  14. oafuncs/oa_s/__init__.py +0 -23
  15. oafuncs/oa_s/oa_cmap.py +0 -163
  16. oafuncs/oa_s/oa_data.py +0 -187
  17. oafuncs/oa_s/oa_draw.py +0 -451
  18. oafuncs/oa_s/oa_file.py +0 -332
  19. oafuncs/oa_s/oa_help.py +0 -39
  20. oafuncs/oa_s/oa_nc.py +0 -410
  21. oafuncs/oa_s/oa_python.py +0 -107
  22. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/__init__.py" +0 -26
  23. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_cmap.py" +0 -163
  24. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_data.py" +0 -187
  25. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_down/__init__.py" +0 -20
  26. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_down/hycom_3hourly.py" +0 -1176
  27. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_down/literature.py" +0 -332
  28. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_down/test_ua.py" +0 -151
  29. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_draw.py" +0 -451
  30. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_file.py" +0 -332
  31. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_help.py" +0 -39
  32. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_nc.py" +0 -410
  33. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_python.py" +0 -107
  34. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_sign/__init__.py" +0 -21
  35. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_sign/meteorological.py" +0 -168
  36. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_sign/ocean.py" +0 -158
  37. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_sign/scientific.py" +0 -139
  38. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_tool/__init__.py" +0 -18
  39. oafuncs - /321/205/320/231/320/277/321/206/320/254/320/274/oa_tool/email.py" +0 -114
  40. oafuncs-0.0.81.dist-info/METADATA +0 -918
  41. oafuncs-0.0.81.dist-info/RECORD +0 -51
  42. {oafuncs-0.0.81.dist-info → oafuncs-0.0.83.dist-info}/LICENSE.txt +0 -0
  43. {oafuncs-0.0.81.dist-info → oafuncs-0.0.83.dist-info}/WHEEL +0 -0
  44. {oafuncs-0.0.81.dist-info → oafuncs-0.0.83.dist-info}/top_level.txt +0 -0
@@ -1,168 +0,0 @@
1
- #!/usr/bin/env python
2
- # coding=utf-8
3
- '''
4
- Author: Liu Kun && 16031215@qq.com
5
- Date: 2024-10-14 16:14:50
6
- LastEditors: Liu Kun && 16031215@qq.com
7
- LastEditTime: 2024-10-14 18:38:57
8
- FilePath: \\Python\\My_Funcs\\OAFuncs\\OAFuncs\\oa_sign\\meteorological.py
9
- Description:
10
- EditPlatform: vscode
11
- ComputerInfo: XPS 15 9510
12
- SystemInfo: Windows 11
13
- Python Version: 3.11
14
- '''
15
-
16
-
17
- import warnings
18
- from rich import print
19
- from bs4 import BeautifulSoup
20
- import requests
21
- import time
22
- import hashlib
23
- warnings.filterwarnings("ignore")
24
-
25
- __all__ = ['sign_in_meteorological_home']
26
-
27
- def sign_in_meteorological_home(email, password):
28
- '''
29
- 气象家园:http://bbs.06climate.com/
30
- email: str, 气象家园的邮箱
31
- password: str, 气象家园的密码
32
- '''
33
- def get_login_hash():
34
- url = 'http://bbs.06climate.com/member.php?mod=logging&action=login'
35
- response = s.get(url)
36
- response.raise_for_status()
37
- soup = BeautifulSoup(response.text, 'lxml')
38
- login_hash = soup.find('form', attrs={'name': 'login'})['action'].split('loginhash=')[1]
39
- return login_hash
40
-
41
- def get_login_formhash():
42
- url = 'http://bbs.06climate.com/member.php?mod=logging&action=login'
43
- response = s.get(url)
44
- response.raise_for_status()
45
- soup = BeautifulSoup(response.text, 'lxml')
46
- formhash = soup.find('input', attrs={'name': 'formhash'})['value']
47
- return formhash
48
-
49
- def get_check_formhash():
50
- url = 'http://bbs.06climate.com/'
51
- response = s.get(url)
52
- response.raise_for_status()
53
- soup = BeautifulSoup(response.text, 'lxml')
54
- formhash = soup.find('input', attrs={'name': 'formhash'})['value']
55
- return formhash
56
-
57
- def write_response(response, default_path=r'F:\response_气象家园.txt'):
58
- with open(default_path, 'w', encoding='utf-8') as f:
59
- f.write('-'*350+'\n')
60
- f.write(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) + '\n')
61
- f.write(response.text)
62
- f.write('-'*350+'\n')
63
-
64
- def login():
65
- url = 'http://bbs.06climate.com/member.php?'
66
- # 登录密码需要转码为 216fc900fb57c27dd3c5e3dfbcac1849
67
- mydata['password'] = hashlib.md5(mydata['password'].encode()).hexdigest()
68
- credentials = {
69
- 'password': mydata['password'],
70
- }
71
- choose_login_ways = ['username', 'email']
72
- choose_login = choose_login_ways[1]
73
- credentials['selecti'] = choose_login_ways.index(choose_login)
74
- credentials['username'] = mydata[choose_login]
75
- query_params = {
76
- 'mod': 'logging',
77
- 'action': 'login',
78
- 'loginsubmit': 'yes',
79
- 'loginhash': get_login_hash(),
80
- 'inajax': '1',
81
- }
82
- from_data = {
83
- 'formhash': get_login_formhash(),
84
- 'referer': 'http://bbs.06climate.com/',
85
- 'loginfield': choose_login,
86
- 'username': mydata[choose_login],
87
- 'password': mydata['password'],
88
- 'questionid': '0',
89
- 'answer': '',
90
- }
91
- head = {
92
- 'Host': 'bbs.06climate.com',
93
- 'Origin': 'http://bbs.06climate.com',
94
- 'Referer': 'http://bbs.06climate.com/member.php?mod=logging&action=login',
95
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36',
96
- }
97
- response = s.post(url, params=query_params, data=from_data, headers=head)
98
- if '欢迎' in response.text:
99
- print(' [bold green]登录成功')
100
- try:
101
- rescookie = response.cookies
102
- cookie = requests.utils.dict_from_cookiejar(rescookie)
103
- return cookie
104
- except Exception as e:
105
- print('cookie 获取失败:', str(e))
106
- else:
107
- print(' [bold red]登录失败')
108
-
109
- def check_in():
110
- url = 'http://bbs.06climate.com/plugin.php?'
111
- query_params = {
112
- 'id': 'dsu_amupper',
113
- 'ppersubmit': 'true',
114
- 'formhash': get_check_formhash(),
115
- 'infloat': 'yes',
116
- 'handlekey': 'dsu_amupper',
117
- 'inajax': '1',
118
- 'ajaxtarget': 'fwin_content_dsu_amupper'
119
- }
120
- head = {'X-Requested-With': 'XMLHttpRequest'}
121
- if cookie is not None:
122
- s.cookies.update(cookie)
123
- response = s.get(url, params=query_params, headers=head)
124
- response.raise_for_status()
125
- success_indicators = ['累计签到', '连续签到', '特奖励', '明日签到', '另奖励', '连续签到', '再连续签到', '奖励', '签到完毕']
126
- if any(indicator in response.text for indicator in success_indicators):
127
- print(' [bold green]签到完毕')
128
- else:
129
- print(' [bold red]签到失败')
130
-
131
- def get_info():
132
- url = 'http://bbs.06climate.com/'
133
- response = s.get(url)
134
- response.raise_for_status()
135
- soup = BeautifulSoup(response.text, 'lxml')
136
- credit = soup.find('a', attrs={'id': 'extcreditmenu'}).text
137
- user_group = soup.find('a', attrs={'id': 'g_upmine'}).text
138
- cumulate = soup.select('.pperwbm .times')[0].text
139
- continuous = soup.select('.pperwbm .times')[1].text
140
- last_sign = soup.select('.pperwbm .times')[2].text
141
- info = {credit.split(': ')[0]: credit.split(':')[1], user_group.split(': ')[0]: user_group.split(':')[1], '累计签到': cumulate+'次', '连续签到': continuous+'次', '上次签到': last_sign}
142
-
143
- print('[bold blue]-----------签到信息-----------')
144
- for k, v in info.items():
145
- if '积分' in k:
146
- k = '当前积分'
147
- v = v.split(' ')[-1]
148
- if '用户组' in k:
149
- k = '现用户组'
150
- v = v.split(' ')[-1]
151
- print(f'[bold blue]{k}: [bold green]{v}')
152
- print('[bold blue]------------------------------')
153
-
154
- mydata = {'username': None, 'email': email, 'password': password}
155
- s = requests.Session()
156
- print('[bold purple]-----------气象家园-----------')
157
- cookie = login()
158
- check_in()
159
- get_info()
160
- s.close()
161
-
162
-
163
-
164
- if __name__ == '__main__':
165
- # email = '16031215@qq.com'
166
- # password = 'xxxxx'
167
- # sign(email=email, password=password)
168
- pass
@@ -1,158 +0,0 @@
1
- #!/usr/bin/env python
2
- # coding=utf-8
3
- '''
4
- Author: Liu Kun && 16031215@qq.com
5
- Date: 2024-10-14 16:59:26
6
- LastEditors: Liu Kun && 16031215@qq.com
7
- LastEditTime: 2024-11-21 13:16:14
8
- FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_sign\\ocean.py
9
- Description:
10
- EditPlatform: vscode
11
- ComputerInfo: XPS 15 9510
12
- SystemInfo: Windows 11
13
- Python Version: 3.11
14
- '''
15
-
16
-
17
- import hashlib
18
- import time
19
- import warnings
20
-
21
- import requests
22
- from bs4 import BeautifulSoup
23
- from rich import print
24
-
25
- warnings.filterwarnings("ignore")
26
-
27
- __all__ = ['sign_in_love_ocean']
28
-
29
-
30
- def sign_in_love_ocean(email, password):
31
- '''
32
- 吾爱海洋:https://www.52ocean.cn/
33
- email: str, 吾爱海洋的邮箱
34
- password: str, 吾爱海洋的密码
35
- '''
36
- def _get_login_hash():
37
- url = 'https://www.52ocean.cn/member.php?'
38
- para_login = {'mod': 'logging', 'action': 'login', 'infloat': 'yes',
39
- 'handlekey': 'login', 'inajax': '1', 'ajaxtarget': 'fwin_content_login'}
40
- response = s.get(url, params=para_login)
41
- response.raise_for_status()
42
- soup = BeautifulSoup(response.text, 'lxml')
43
- login_hash = soup.find('form', attrs={'name': 'login'})['action'].split('loginhash=')[1]
44
- return login_hash
45
-
46
- def _get_login_formhash():
47
- url = 'https://www.52ocean.cn/member.php?'
48
- para_login = {'mod': 'logging', 'action': 'login', 'infloat': 'yes', 'handlekey': 'login', 'inajax': '1', 'ajaxtarget': 'fwin_content_login'}
49
- response = s.get(url, params=para_login)
50
- response.raise_for_status()
51
- soup = BeautifulSoup(response.text, 'lxml')
52
- formhash = soup.find('input', attrs={'name': 'formhash'})['value']
53
- return formhash
54
-
55
- def _get_check_formhash():
56
- url = 'https://www.52ocean.cn/'
57
- response = s.get(url)
58
- response.raise_for_status()
59
- soup = BeautifulSoup(response.text, 'lxml')
60
- formhash = soup.find('input', attrs={'name': 'formhash'})['value']
61
- return formhash
62
-
63
- def write_response(response, default_path=r'F:\response_吾爱海洋.txt'):
64
- with open(default_path, 'w', encoding='utf-8') as f:
65
- f.write('-'*350+'\n')
66
- f.write(time.strftime(
67
- '%Y-%m-%d %H:%M:%S', time.localtime()) + '\n')
68
- f.write(response.text)
69
- f.write('-'*350+'\n')
70
-
71
- def _login():
72
- url = 'https://www.52ocean.cn/member.php?'
73
- mydata['password'] = hashlib.md5(mydata['password'].encode()).hexdigest()
74
- credentials = {
75
- 'password': mydata['password'],
76
- }
77
- choose_login_ways = ['username', 'email']
78
- choose_login = choose_login_ways[1]
79
- credentials['selecti'] = choose_login_ways.index(choose_login)
80
- credentials['username'] = mydata[choose_login]
81
- query_params = {
82
- 'mod': 'logging',
83
- 'action': 'login',
84
- 'loginsubmit': 'yes',
85
- 'handlekey': 'login',
86
- 'loginhash': _get_login_hash(),
87
- 'inajax': '1',
88
- }
89
- from_data = {
90
- 'formhash': _get_login_formhash(),
91
- 'referer': 'https://www.52ocean.cn/',
92
- 'loginfield': choose_login,
93
- 'username': mydata[choose_login],
94
- 'password': mydata['password'],
95
- 'questionid': '0',
96
- 'answer': '',
97
- }
98
- head = {
99
- 'Origin': 'https://www.52ocean.cn',
100
- 'Referer': 'https://www.52ocean.cn/member.php?',
101
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36'
102
- }
103
- response = s.post(url, params=query_params, data=from_data, headers=head)
104
- if '欢迎' in response.text:
105
- print(' [bold green]登录成功')
106
- try:
107
- rescookie = response.cookies
108
- cookie = requests.utils.dict_from_cookiejar(rescookie)
109
- return cookie
110
- except Exception as e:
111
- print('cookie 获取失败:', str(e))
112
- else:
113
- print(' [bold red]登录失败')
114
-
115
- def _check_in():
116
- url = 'https://www.52ocean.cn/plugin.php?id=zqlj_sign'
117
- query_params = {
118
- 'sign': _get_check_formhash(),
119
- }
120
- head = {'X-Requested-With': 'XMLHttpRequest'}
121
- if cookie is not None:
122
- s.cookies.update(cookie)
123
- response = s.get(url, params=query_params, headers=head)
124
- response.raise_for_status()
125
- success_indicators = ['恭喜您,打卡成功!', '今日已打卡', '已经打过卡']
126
- if any(indicator in response.text for indicator in success_indicators):
127
- print(' [bold green]打卡完毕')
128
- else:
129
- print(' [bold red]打卡失败')
130
-
131
- def _get_info():
132
- url = 'https://www.52ocean.cn/plugin.php?id=zqlj_sign'
133
- response = s.get(url)
134
- response.raise_for_status()
135
- soup = BeautifulSoup(response.text, 'html.parser')
136
- sign_info = soup.find('ul', class_='xl xl1').find_all('li')
137
- print('[bold blue]-----------打卡动态-----------')
138
- for info in sign_info:
139
- k, v = info.get_text().split(':')
140
- if '当前' in k:
141
- k = k[2:]
142
- print(f'[bold blue]{k}: [bold green]{v}')
143
- print('[bold blue]------------------------------')
144
-
145
- mydata = {'username': None, 'email': email, 'password': password} # 不要修改关键字
146
- s = requests.Session()
147
- print('[bold purple]-----------吾爱海洋-----------')
148
- cookie = _login()
149
- _check_in()
150
- _get_info()
151
- s.close()
152
-
153
-
154
- if __name__ == '__main__':
155
- # email = '16031215@qq.com'
156
- # password = 'xxxxx'
157
- # sign(email=email, password=password)
158
- pass
@@ -1,139 +0,0 @@
1
- #!/usr/bin/env python
2
- # coding=utf-8
3
- '''
4
- Author: Liu Kun && 16031215@qq.com
5
- Date: 2024-10-14 18:29:52
6
- LastEditors: Liu Kun && 16031215@qq.com
7
- LastEditTime: 2024-10-14 18:57:39
8
- FilePath: \\Python\\My_Funcs\\OAFuncs\\OAFuncs\\oa_sign\\scientific.py
9
- Description:
10
- EditPlatform: vscode
11
- ComputerInfo: XPS 15 9510
12
- SystemInfo: Windows 11
13
- Python Version: 3.11
14
- '''
15
-
16
-
17
- import time
18
-
19
- import requests
20
- from bs4 import BeautifulSoup
21
- from rich import print
22
-
23
- __all__ = ['sign_in_scientific_research']
24
-
25
-
26
- def sign_in_scientific_research(email, password):
27
- '''
28
- 科研通:https://www.ablesci.com/
29
- email: str, 科研通的邮箱
30
- password: str, 科研通的密码
31
- '''
32
- def get_login_csrf():
33
- url = 'https://www.ablesci.com/site/login'
34
- response = s.get(url)
35
- response.raise_for_status()
36
- soup = BeautifulSoup(response.text, 'lxml')
37
- csrf = soup.find('meta', attrs={'name': 'csrf-token'})['content']
38
- return csrf
39
-
40
- def write_response(response, default_path=r'F:\response_科研通.txt'):
41
- with open(default_path, 'w', encoding='utf-8') as f:
42
- f.write('-'*350+'\n')
43
- f.write(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) + '\n')
44
- f.write(response.text)
45
- f.write('-'*350+'\n')
46
-
47
- def login():
48
- url = 'https://www.ablesci.com/site/login'
49
- from_data = {
50
- '_csrf': get_login_csrf(),
51
- 'email': mydata['email'],
52
- 'password': mydata['password'],
53
- 'remember': 'on'
54
- }
55
- head = {
56
- 'Origin': 'https://www.ablesci.com',
57
- 'Referer': 'https://www.ablesci.com/site/login',
58
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36',
59
- 'X-Requested-With': 'XMLHttpRequest'
60
- }
61
- response = s.post(url, data=from_data, headers=head)
62
- if '登录成功' in response.text:
63
- print(' [bold green]已登录')
64
- try:
65
- rescookie = response.cookies
66
- cookie = requests.utils.dict_from_cookiejar(rescookie)
67
- return cookie
68
- except Exception as e:
69
- print('cookie 获取失败:', str(e))
70
- else:
71
- print(' [bold red]未登录')
72
-
73
- def check_in():
74
- url = 'https://www.ablesci.com/user/sign'
75
- if cookie is not None:
76
- s.cookies.update(cookie)
77
- response = s.get(url)
78
- response.raise_for_status()
79
- success_indicators = ['签到成功', '已连续签到', '本次获得']
80
- if any(indicator in response.text for indicator in success_indicators):
81
- print(' [bold green]已签到')
82
- else:
83
- url = 'https://www.ablesci.com/'
84
- response = s.get(url)
85
- response.raise_for_status()
86
- if '已连续签到' in response.text:
87
- print(' [bold green]已签到')
88
- else:
89
- print(' [bold red]未签到')
90
-
91
- def check_in_r():
92
- # 先检查是否已经签到
93
- url = 'https://www.ablesci.com/'
94
- if cookie is not None:
95
- s.cookies.update(cookie)
96
- response = s.get(url)
97
- response.raise_for_status()
98
- if '已连续签到' in response.text:
99
- print(' [bold green]已签到')
100
- else:
101
- url = 'https://www.ablesci.com/user/sign'
102
- response = s.get(url)
103
- response.raise_for_status()
104
- success_indicators = ['签到成功', '已连续签到', '本次获得']
105
- if any(indicator in response.text for indicator in success_indicators):
106
- print(' [bold green]已签到')
107
- else:
108
- print(' [bold red]未签到')
109
-
110
- def get_info():
111
- url = 'https://www.ablesci.com/'
112
- response = s.get(url)
113
- response.raise_for_status()
114
- soup = BeautifulSoup(response.text, 'lxml')
115
- credit = soup.find('a', attrs={'class': 'signin-points'}).text
116
- continuous = soup.find('span', attrs={'class': 'signin-days'}).text
117
- info = {'积分': credit[4:], '连续签到': continuous[5:]}
118
- print('[bold blue]-----------签到录-----------')
119
- for k, v in info.items():
120
- if '积分' in k:
121
- k = '当前积分'
122
- v = v.split(' ')[-1]
123
- print(f'[bold blue]{k}: [bold green]{v}')
124
- print('[bold blue]----------------------------')
125
-
126
- mydata = {'email': email, 'password': password} # 不要修改关键字
127
- s = requests.Session()
128
- print('[bold purple]-----------科研通-----------')
129
- cookie = login()
130
- check_in()
131
- get_info()
132
- s.close()
133
-
134
-
135
- if __name__ == '__main__':
136
- # email = '16031215@qq.com'
137
- # password = 'xxxxx'
138
- # sign_in_research_connect(email, password)
139
- pass
@@ -1,18 +0,0 @@
1
- #!/usr/bin/env python
2
- # coding=utf-8
3
- '''
4
- Author: Liu Kun && 16031215@qq.com
5
- Date: 2024-11-21 09:48:00
6
- LastEditors: Liu Kun && 16031215@qq.com
7
- LastEditTime: 2024-11-21 10:18:33
8
- FilePath: \\Python\\My_Funcs\\OAFuncs\\OAFuncs\\oa_tool\\__init__.py
9
- Description:
10
- EditPlatform: vscode
11
- ComputerInfo: XPS 15 9510
12
- SystemInfo: Windows 11
13
- Python Version: 3.12
14
- '''
15
-
16
-
17
- # 会导致OAFuncs直接导入所有函数,不符合模块化设计
18
- from .email import *
@@ -1,114 +0,0 @@
1
- #!/usr/bin/env python
2
- # coding=utf-8
3
- '''
4
- Author: Liu Kun && 16031215@qq.com
5
- Date: 2024-11-21 09:47:41
6
- LastEditors: Liu Kun && 16031215@qq.com
7
- LastEditTime: 2024-11-29 20:54:43
8
- FilePath: \\Python\\My_Funcs\\OAFuncs\\oafuncs\\oa_tool\\email.py
9
- Description:
10
- EditPlatform: vscode
11
- ComputerInfo: XPS 15 9510
12
- SystemInfo: Windows 11
13
- Python Version: 3.12
14
- '''
15
-
16
-
17
- import random
18
- import smtplib
19
- from email.header import Header
20
- from email.mime.multipart import MIMEMultipart
21
- from email.mime.text import MIMEText
22
-
23
- from rich import print
24
-
25
- __all__ = ['send']
26
-
27
-
28
- def _email_info():
29
- email_dict = {
30
- 'liukun0312@vip.qq.com': [4, 13, -10, 2, -10, 4, -7, -8, 8, -1, 3, -2, -11, -6, -9, -7],
31
- '756866877@qq.com': [4, -2, -3, 13, 12, 8, -6, 9, -12, 13, -10, -12, -11, -12, -4, -11],
32
- '99138763@qq.com': [0, 6, 12, 2, 9, 9, 4, -1, 11, -7, -12, 6, -11, -3, -5, -11],
33
- '1031260@qq.com': [-1, -5, -9, 9, -3, 4, -8, -7, -12, -2, 0, -9, -11, -3, -7, -10],
34
- }
35
- keys = list(email_dict.keys())
36
- choose_email = random.randint(0, len(keys)-1)
37
- msg_from = keys[choose_email]
38
- return msg_from, email_dict[msg_from]
39
-
40
-
41
- def _decode_password(password):
42
- return ''.join([chr(i+109) for i in password])
43
-
44
-
45
- def _send_message(title, content, msg_to):
46
- # 1. 连接邮箱服务器
47
- con = smtplib.SMTP_SSL('smtp.qq.com', 465)
48
-
49
- # 2. 登录邮箱
50
- msg_from, password = _email_info()
51
- con.login(msg_from, _decode_password(password))
52
-
53
- # 3. 准备数据
54
- # 创建邮件对象
55
- msg = MIMEMultipart()
56
-
57
- # 设置邮件主题
58
- subject = Header(title, 'utf-8').encode()
59
- msg['Subject'] = subject
60
-
61
- # 设置邮件发送者
62
- msg['From'] = msg_from
63
-
64
- # 设置邮件接受者
65
- msg['To'] = msg_to
66
-
67
- # # 添加html内容
68
- # content = """
69
- # <h2>我是正文中的标题</h2>
70
- # <p>邮件正文描述性文字1</p>
71
- # <p>邮件正文描述性文字2</p>
72
- # <img src='https://www.baidu.com/img/bd_logo1.png'>
73
- # <center>百度图片</center>
74
- # <a href='https://www.baidu.com'>百度一下</a>
75
- # """
76
- # html = MIMEText(content, 'html', 'utf-8')
77
- # msg.attach(html)
78
-
79
- # or
80
- # content = '发送内容'
81
- msg.attach(MIMEText(content, 'plain', 'utf-8'))
82
-
83
- # 4.发送邮件
84
- con.sendmail(msg_from, msg_to, msg.as_string())
85
- con.quit()
86
-
87
- print(f'已通过{msg_from}成功向{msg_to}发送邮件!')
88
- print('发送内容为:\n{}\n\n'.format(content))
89
-
90
-
91
- def send(title='Title', content=None, send_to='16031215@qq.com'):
92
- '''
93
- Description: 发送邮件
94
-
95
- Args:
96
- title: 邮件标题
97
- content: 邮件内容
98
- send_to: 发送对象
99
-
100
- Returns:
101
- None
102
-
103
- Example:
104
- send(title='Title', content='Content', '123@qq.com')
105
- '''
106
- if content is None:
107
- # 避免发送空邮件,或有人误调用
108
- return
109
- else:
110
- _send_message(title, content, send_to)
111
-
112
-
113
- if __name__ == "__main__":
114
- send()