mns-common 1.3.9.9__py3-none-any.whl → 1.6.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mns-common might be problematic. Click here for more details.

Files changed (65) hide show
  1. mns_common/__init__.py +1 -0
  2. mns_common/api/akshare/__init__.py +0 -1
  3. mns_common/api/akshare/k_line_api.py +19 -2
  4. mns_common/api/akshare/stock_bid_ask_api.py +21 -14
  5. mns_common/api/akshare/stock_zb_pool.py +2 -0
  6. mns_common/api/akshare/stock_zt_pool_api.py +1 -1
  7. mns_common/api/em/gd/east_money_stock_gdfx_free_top_10_api.py +62 -7
  8. mns_common/api/em/real_time/__init__.py +1 -1
  9. mns_common/api/em/real_time/east_money_debt_api.py +168 -71
  10. mns_common/api/em/real_time/east_money_etf_api.py +165 -27
  11. mns_common/api/em/real_time/east_money_stock_a_api.py +37 -38
  12. mns_common/api/em/real_time/east_money_stock_a_v2_api.py +97 -53
  13. mns_common/api/em/real_time/east_money_stock_common_api.py +174 -0
  14. mns_common/api/em/real_time/east_money_stock_hk_api.py +252 -271
  15. mns_common/api/em/real_time/east_money_stock_hk_gtt_api.py +291 -0
  16. mns_common/api/em/real_time/east_money_stock_multi_thread_api_v3.py +154 -0
  17. mns_common/api/em/real_time/east_money_stock_us_api.py +210 -82
  18. mns_common/api/em/real_time/real_time_quotes_repeat_api.py +195 -0
  19. mns_common/api/foreign_exchange/foreign_exchange_api.py +38 -0
  20. mns_common/api/k_line/stock_k_line_data_api.py +11 -1
  21. mns_common/api/kpl/common/__init__.py +3 -2
  22. mns_common/api/kpl/common/kpl_common_api.py +35 -0
  23. mns_common/api/kpl/symbol/symbol_his_quotes_api.py +1 -1
  24. mns_common/api/kpl/theme/kpl_theme_api.py +69 -0
  25. mns_common/api/kpl/yidong/__init__.py +7 -0
  26. mns_common/api/kpl/yidong/stock_bid_yi_dong_api.py +52 -0
  27. mns_common/api/proxies/liu_guan_proxy_api.py +55 -5
  28. mns_common/api/ths/company/company_product_area_industry_index_query.py +46 -0
  29. mns_common/api/ths/company/ths_company_info_api.py +2 -1
  30. mns_common/api/ths/company/ths_company_info_web.py +159 -0
  31. mns_common/api/ths/concept/app/ths_concept_index_app.py +3 -1
  32. mns_common/api/ths/wen_cai/ths_wen_cai_api.py +1 -1
  33. mns_common/api/ths/zt/ths_stock_zt_pool_api.py +20 -1
  34. mns_common/api/ths/zt/ths_stock_zt_pool_v2_api.py +105 -29
  35. mns_common/api/ths/zt/ths_stock_zt_reason_web_api.py +100 -0
  36. mns_common/api/us/ths_us_company_info_api.py +131 -0
  37. mns_common/api/xueqiu/xue_qiu_k_line_api.py +16 -5
  38. mns_common/component/common_service_fun_api.py +28 -8
  39. mns_common/component/company/company_common_service_new_api.py +2 -0
  40. mns_common/component/cookie/cookie_enum.py +16 -0
  41. mns_common/component/cookie/cookie_info_service.py +18 -8
  42. mns_common/component/data/data_init_api.py +13 -8
  43. mns_common/component/deal/deal_service_api.py +70 -8
  44. mns_common/component/deal/deal_service_v2_api.py +167 -0
  45. mns_common/component/em/em_stock_info_api.py +12 -3
  46. mns_common/component/main_line/__init__.py +7 -0
  47. mns_common/component/main_line/main_line_zt_reason_service.py +257 -0
  48. mns_common/component/proxies/proxy_common_api.py +169 -109
  49. mns_common/component/tfp/stock_tfp_api.py +82 -12
  50. mns_common/component/us/__init__.py +7 -0
  51. mns_common/component/us/us_stock_etf_info_api.py +130 -0
  52. mns_common/constant/db_name_constant.py +75 -26
  53. mns_common/constant/extra_income_db_name.py +94 -17
  54. mns_common/constant/strategy_classify.py +72 -0
  55. mns_common/db/MongodbUtil.py +3 -0
  56. mns_common/db/MongodbUtilLocal.py +3 -0
  57. {mns_common-1.3.9.9.dist-info → mns_common-1.6.1.4.dist-info}/METADATA +1 -1
  58. {mns_common-1.3.9.9.dist-info → mns_common-1.6.1.4.dist-info}/RECORD +62 -47
  59. mns_common/api/ths/concept/web/ths_company_info_web.py +0 -163
  60. mns_common/component/qmt/qmt_buy_service.py +0 -172
  61. mns_common/component/task/real_time_data_sync_check.py +0 -97
  62. /mns_common/{component/qmt → api/foreign_exchange}/__init__.py +0 -0
  63. /mns_common/{component/task → api/kpl/theme}/__init__.py +0 -0
  64. {mns_common-1.3.9.9.dist-info → mns_common-1.6.1.4.dist-info}/WHEEL +0 -0
  65. {mns_common-1.3.9.9.dist-info → mns_common-1.6.1.4.dist-info}/top_level.txt +0 -0
@@ -12,79 +12,89 @@ import mns_common.utils.data_frame_util as data_frame_util
12
12
  from mns_common.db.MongodbUtil import MongodbUtil
13
13
  import mns_common.constant.db_name_constant as db_name_constant
14
14
  import datetime
15
- import requests
16
15
  import time
17
16
  from loguru import logger
18
17
  from functools import lru_cache
18
+ import mns_common.api.em.real_time.east_money_stock_a_api as east_money_stock_a_api
19
+ import threading
19
20
 
20
21
  mongodb_util = MongodbUtil('27017')
21
22
 
23
+ IP_POOL = 'ip_pool'
24
+ ONE_IP = 'one_ip'
25
+ query_one = {'ip_type': ONE_IP}
26
+ query_pool = {'ip_type': IP_POOL}
27
+
22
28
 
23
29
  def query_liu_guan_proxy_ip():
24
- ip_proxy_pool = mongodb_util.find_all_data(db_name_constant.IP_PROXY_POOL)
30
+ ip_proxy_pool = mongodb_util.find_query_data(db_name_constant.IP_PROXY_POOL, query_one)
25
31
  return ip_proxy_pool
26
32
 
27
33
 
28
- def remove_proxy_ip():
29
- mongodb_util.remove_data({}, db_name_constant.IP_PROXY_POOL)
34
+ def remove_one_proxy_ip():
35
+ mongodb_util.remove_data(query_one, db_name_constant.IP_PROXY_POOL)
30
36
 
31
37
 
32
38
  def check_valid(ip_proxy_pool):
33
39
  effect_time = list(ip_proxy_pool['effect_time'])[0]
34
-
35
40
  now_date = datetime.datetime.now()
36
-
37
41
  str_now_date = now_date.strftime('%Y-%m-%d %H:%M:%S')
38
-
39
42
  if effect_time > str_now_date:
40
43
  return True
41
44
  else:
42
- remove_proxy_ip()
45
+ remove_one_proxy_ip()
43
46
  return False
44
47
 
45
48
 
46
49
  @lru_cache(maxsize=None)
47
50
  def get_account_cache():
48
- query = {"type": "liu_guan_proxy", }
51
+ query = {"type": "liu_guan_proxy"}
49
52
  return mongodb_util.find_query_data(db_name_constant.STOCK_ACCOUNT_INFO, query)
50
53
 
51
54
 
52
55
  def generate_proxy_ip_api(minutes):
53
- stock_account_info = get_account_cache()
54
- order_id = list(stock_account_info['password'])[0]
55
- secret = list(stock_account_info['account'])[0]
56
- # 获取10分钟动态ip
57
- ip = liu_guan_proxy_api.get_proxy_api(order_id, secret, str(60 * minutes))
58
- return ip
56
+ try_numer = 3
57
+ while try_numer > 0:
58
+ try:
59
+ stock_account_info = get_account_cache()
60
+ order_id = list(stock_account_info['password'])[0]
61
+ secret = list(stock_account_info['account'])[0]
62
+ # 获取10分钟动态ip
63
+ liu_guan_ip = liu_guan_proxy_api.get_proxy_api(order_id, secret, str(60 * minutes))
64
+ try_numer = try_numer
65
+ logger.info("生成新的ip:{}", liu_guan_ip)
66
+ return liu_guan_ip
67
+ except BaseException as e:
68
+ logger.error("获取ip失败:{}", str(e))
69
+ time.sleep(1)
70
+ continue
59
71
 
60
72
 
61
73
  def generate_proxy_ip(minutes):
62
- ip_proxy_pool = mongodb_util.find_all_data(db_name_constant.IP_PROXY_POOL)
74
+ ip_proxy_pool = query_liu_guan_proxy_ip()
63
75
  if data_frame_util.is_not_empty(ip_proxy_pool):
64
76
  return list(ip_proxy_pool['ip'])[0]
65
77
  else:
66
- remove_proxy_ip()
78
+ remove_one_proxy_ip()
67
79
  now_date = datetime.datetime.now()
68
80
  # 加上分钟
69
81
  time_to_add = datetime.timedelta(minutes=minutes)
70
82
  new_date = now_date + time_to_add
71
83
  str_now_date = new_date.strftime('%Y-%m-%d %H:%M:%S')
84
+ try:
85
+ ip_proxy = generate_proxy_ip_api(minutes)
72
86
 
73
- # 获取10分钟动态ip
74
- while True:
75
- ip = generate_proxy_ip_api(minutes)
76
- if check_proxy(ip, timeout=2):
77
- break
78
- else:
79
- time.sleep(0.5)
80
- result_dict = {"_id": ip,
81
- 'effect_time': str_now_date,
82
- 'ip': ip}
83
- result_df = pd.DataFrame(result_dict, index=[1])
87
+ result_dict = {"_id": ip_proxy,
88
+ 'ip_type': ONE_IP,
89
+ 'effect_time': str_now_date,
90
+ 'ip': ip_proxy}
91
+ result_df = pd.DataFrame(result_dict, index=[1])
84
92
 
85
- mongodb_util.insert_mongo(result_df, db_name_constant.IP_PROXY_POOL)
93
+ mongodb_util.insert_mongo(result_df, db_name_constant.IP_PROXY_POOL)
94
+ except BaseException as e:
95
+ logger.error("获取ip失败:{}", str(e))
86
96
 
87
- return ip
97
+ return ip_proxy
88
98
 
89
99
 
90
100
  def get_proxy_ip(minutes):
@@ -98,95 +108,145 @@ def get_proxy_ip(minutes):
98
108
  return generate_proxy_ip(minutes)
99
109
 
100
110
 
101
- def check_baidu_proxy(proxy_ip, timeout=2):
102
- """
103
- 检测代理IP是否能访问百度
104
- :param proxy_ip: 代理IP地址
105
- :param proxy_port: 代理端口
106
- :param timeout: 超时时间()
107
- :return: (是否可用, 响应时间, 检测结果信息)
108
- """
109
- # 构造代理地址
111
+ #
112
+ # def check_proxy(proxy_ip):
113
+ # try:
114
+ # # 两秒超时
115
+ # test_df = call_with_timeout(get_em_real_time_data, proxy_ip, timeout=2)
116
+ # if data_frame_util.is_not_empty(test_df):
117
+ # logger.info("可用代理ip:{}", proxy_ip)
118
+ # return True
119
+ # else:
120
+ # return False
121
+ # except Exception as e:
122
+ # logger.error("代理ip不可用:{},{}", proxy_ip, e)
123
+ # return False
124
+
125
+
126
+ # 查询ip池子
127
+ def query_liu_guan_proxy_ip_pool():
128
+ ip_proxy_pool = mongodb_util.find_query_data(db_name_constant.IP_PROXY_POOL, query_pool)
129
+ return ip_proxy_pool
110
130
 
111
- # 设置代理参数
112
- proxies = {
113
- "http": proxy_ip,
114
- "https": proxy_ip
115
- }
116
131
 
117
- # 模拟浏览器请求头
118
- headers = {
119
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
120
- "Accept-Language": "zh-CN,zh;q=0.9",
121
- "Connection": "keep-alive"
122
- }
132
+ def remove_proxy_ip_pool():
133
+ mongodb_util.remove_data(query_pool, db_name_constant.IP_PROXY_POOL)
134
+
135
+
136
+ def generate_proxy_ip_pool_api(minutes, ip_num):
137
+ stock_account_info = get_account_cache()
138
+ order_id = list(stock_account_info['password'])[0]
139
+ secret = list(stock_account_info['account'])[0]
140
+ # 获取10分钟动态ip
141
+ ip_pool = liu_guan_proxy_api.get_proxy_pool_api(order_id, secret, str(60 * minutes), ip_num)
142
+ return ip_pool
123
143
 
124
- try:
125
- # 记录开始时间
126
- start_time = time.time()
127
-
128
- # 发送请求到百度
129
- response = requests.get(
130
- url="https://www.baidu.com",
131
- proxies=proxies,
132
- headers=headers,
133
- timeout=timeout,
134
- allow_redirects=True # 允许重定向
135
- )
136
-
137
- # 计算响应时间
138
- response_time = round((time.time() - start_time) * 1000) # 毫秒
139
- # 检查响应状态和内容
140
- if response.status_code == 200:
141
- # 验证是否返回百度页面
142
- if "百度一下" in response.text and "baidu.com" in response.text:
143
- logger.info("代理ip可用:{},响应时间:{}", proxy_ip, response_time)
144
- return True
145
- else:
146
- logger.error("代理ip不可用:{},响应时间:{}", proxy_ip, response_time)
147
- return False
144
+
145
+ def get_proxy_ip_pool(minutes, seconds, ip_num):
146
+ ip_proxy_pool = query_liu_guan_proxy_ip_pool()
147
+ if data_frame_util.is_empty(ip_proxy_pool):
148
+ return generate_proxy_ip_pool(minutes, seconds, ip_num)
149
+ else:
150
+ if check_valid(ip_proxy_pool):
151
+ ip_pool = list(ip_proxy_pool['ip_pool'])[0]
152
+ effect_time = list(ip_proxy_pool['effect_time'])[0]
153
+ result = {'ip_pool': ip_pool,
154
+ 'effect_time': effect_time}
155
+ return result
148
156
  else:
149
- logger.error("代理ip不可用:{},响应时间:{},HTTP状态码异常:{}", proxy_ip, response_time, response.status_code)
150
- return False
151
- except requests.exceptions.ConnectTimeout:
152
- logger.error("代理ip不可用:{},连接超时", proxy_ip, response_time)
153
- return False
154
- except requests.exceptions.ProxyError:
155
- logger.error("代理ip不可用:{},代理拒绝连接", proxy_ip, response_time)
156
- return False
157
- except requests.exceptions.SSLError:
158
- logger.error("代理ip不可用:{},SSL证书错误", proxy_ip, response_time)
159
- return False
160
- except requests.exceptions.RequestException as e:
161
- logger.error("代理ip不可用:{},网络错误:{}", proxy_ip, str(e))
162
- return False
157
+ # 已经失效 移除ip pool
158
+ remove_proxy_ip_pool()
159
+ # 重新生成
160
+ return generate_proxy_ip_pool(minutes, seconds, ip_num)
161
+
162
+
163
+ # seconds 有效秒数,minutes 需要减1
164
+ def generate_proxy_ip_pool(minutes, seconds, ip_num):
165
+ ip_proxy_pool = query_liu_guan_proxy_ip_pool()
166
+ if data_frame_util.is_not_empty(ip_proxy_pool):
167
+ ip_pool = list(ip_proxy_pool['ip_pool'])[0]
168
+ effect_time = list(ip_proxy_pool['effect_time'])[0]
169
+
170
+
171
+ else:
172
+ remove_proxy_ip_pool()
173
+ now_date = datetime.datetime.now()
174
+ # 加上分钟 少10秒
175
+ time_to_add = datetime.timedelta(minutes=minutes - 1, seconds=seconds)
176
+ new_date = now_date + time_to_add
177
+ effect_time = new_date.strftime('%Y-%m-%d %H:%M:%S')
178
+ ip_pool = generate_proxy_ip_pool_api(minutes, ip_num)
179
+ result_dict = {
180
+ "_id": [IP_POOL],
181
+ 'ip_type': [IP_POOL],
182
+ 'effect_time': [effect_time],
183
+ 'ip_pool': [ip_pool] # 每个字段都包装成列表
184
+ }
185
+ result_df = pd.DataFrame(result_dict)
186
+
187
+ mongodb_util.insert_mongo(result_df, db_name_constant.IP_PROXY_POOL)
188
+ result = {'ip_pool': ip_pool,
189
+ 'effect_time': effect_time}
190
+ return result
163
191
 
164
192
 
165
- def check_proxy(proxy_ip, timeout=2):
193
+ def get_em_real_time_data(proxy_ip):
166
194
  proxies = {
167
195
  "http": proxy_ip,
168
196
  "https": proxy_ip
169
197
  }
170
- try:
171
- # 测试请求(httpbin.org 返回请求的IP)
172
- response = requests.get(
173
- "http://httpbin.org/ip",
174
- proxies=proxies,
175
- timeout=timeout # 超时时间
176
- )
177
- if response.status_code == 200:
178
- return True
179
- else:
180
- logger.error("代理ip不可用:{}", proxy_ip)
181
- return False
182
- except Exception as e:
183
- logger.error("代理ip不可用:{},{}", proxy_ip, e)
184
- return False
198
+ return east_money_stock_a_api.get_stock_page_data(1, proxies, 20, 10)
199
+
200
+
201
+ # 定义一个带超时的函数调用
202
+ def call_with_timeout(func, *args, timeout=2, **kwargs):
203
+ # 用于存储函数执行结果
204
+ result = None
205
+ exception = None
206
+
207
+ # 定义一个线程目标函数
208
+ def target():
209
+ nonlocal result, exception
210
+ try:
211
+ result = func(*args, **kwargs)
212
+ except Exception as e:
213
+ exception = e
214
+
215
+ # 创建线程并启动
216
+ thread = threading.Thread(target=target)
217
+ thread.start()
218
+
219
+ # 等待线程完成,最多等待 timeout 秒
220
+ thread.join(timeout)
221
+
222
+ # 如果线程仍然存活,说明函数超时了
223
+ if thread.is_alive():
224
+ raise TimeoutError(f"Function exceeded timeout of {timeout} seconds")
225
+
226
+ # 如果函数抛出了异常,重新抛出
227
+ if exception is not None:
228
+ raise exception
229
+ return result
230
+
231
+
232
+ @lru_cache(maxsize=None)
233
+ def query_province_and_city_info():
234
+ return mongodb_util.find_all_data(db_name_constant.IP_PROXY_CITY_PROVINCE)
235
+
236
+
237
+ def import_province_and_city():
238
+ # 设置文件夹路径
239
+ folder_path = r'E:\province-and-city.xlsx'
240
+ df = pd.read_excel(folder_path)
241
+ df['_id'] = df['cid']
242
+
243
+ mongodb_util.save_mongo(df, db_name_constant.IP_PROXY_CITY_PROVINCE)
244
+ return df
185
245
 
186
246
 
187
247
  if __name__ == "__main__":
188
- target_ip = "112.28.228.67:35528" # Google DNS
189
- if check_proxy(target_ip, 2):
190
- print(f"{target_ip} 可以访问")
191
- else:
192
- print(f"{target_ip} 无法访问")
248
+ stock_account_info_test = get_account_cache()
249
+ order_id_test = list(stock_account_info_test['password'])[0]
250
+ secret_test = list(stock_account_info_test['account'])[0]
251
+ # 获取10分钟动态ip
252
+ ip = liu_guan_proxy_api.get_proxy_api(order_id_test, secret_test, str(60 * 1))
@@ -5,7 +5,6 @@ file_path = os.path.abspath(__file__)
5
5
  end = file_path.index('mns') + 16
6
6
  project_path = file_path[0:end]
7
7
  sys.path.append(project_path)
8
- import akshare as ak
9
8
  import mns_common.utils.date_handle_util as date_handle_util
10
9
  from loguru import logger
11
10
  import mns_common.utils.data_frame_util as data_frame_util
@@ -15,20 +14,87 @@ import mns_common.constant.db_name_constant as db_name_constant
15
14
 
16
15
  mongodb_util = MongodbUtil('27017')
17
16
 
17
+ """
18
+ Date: 2024/4/29 15:00
19
+ Desc: 东方财富网-数据中心-特色数据-停复牌信息
20
+ https://data.eastmoney.com/tfpxx/
21
+ """
22
+
23
+ import pandas as pd
24
+ import requests
25
+
26
+
27
+ def stock_tfp_em(date: str = "20240426") -> pd.DataFrame:
28
+ """
29
+ 东方财富网-数据中心-特色数据-停复牌信息
30
+ https://data.eastmoney.com/tfpxx/
31
+ :param date: specific date as "2020-03-19"
32
+ :type date: str
33
+ :return: 停复牌信息表
34
+ :rtype: pandas.DataFrame
35
+ """
36
+ url = "https://datacenter-web.eastmoney.com/api/data/v1/get"
37
+ params = {
38
+ "sortColumns": "SUSPEND_START_DATE",
39
+ "sortTypes": "-1",
40
+ "pageSize": "500",
41
+ "pageNumber": "1",
42
+ "reportName": "RPT_CUSTOM_SUSPEND_DATA_INTERFACE",
43
+ "columns": "ALL",
44
+ "source": "WEB",
45
+ "client": "WEB",
46
+ "filter": f"""(MARKET="全部")(DATETIME='{"-".join([date[:4], date[4:6], date[6:]])}')""",
47
+ }
48
+ r = requests.get(url, params=params)
49
+ data_json = r.json()
50
+ total_page = data_json["result"]["pages"]
51
+ big_df = pd.DataFrame()
52
+ for page in range(1, total_page + 1):
53
+ params.update({"pageNumber": page})
54
+ r = requests.get(url, params=params)
55
+ data_json = r.json()
56
+ temp_df = pd.DataFrame(data_json["result"]["data"])
57
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
58
+
59
+ big_df.reset_index(inplace=True)
60
+
61
+ big_df["SUSPEND_START_TIME"] = pd.to_datetime(big_df["SUSPEND_START_TIME"], errors="coerce").dt.date
62
+ big_df["SUSPEND_END_TIME"] = pd.to_datetime(
63
+ big_df["SUSPEND_END_TIME"], errors="coerce"
64
+ ).dt.date
65
+
66
+ big_df["SUSPEND_START_DATE"] = pd.to_datetime(
67
+ big_df["SUSPEND_START_DATE"], errors="coerce"
68
+ ).dt.date
69
+ big_df["PREDICT_RESUME_DATE"] = pd.to_datetime(
70
+ big_df["PREDICT_RESUME_DATE"], errors="coerce"
71
+ ).dt.date
72
+
73
+ big_df = big_df[['index', 'SECURITY_CODE', 'SECURITY_NAME_ABBR', 'SUSPEND_START_TIME',
74
+ 'SUSPEND_END_TIME', 'SUSPEND_EXPIRE', 'SUSPEND_REASON', 'TRADE_MARKET',
75
+ 'SUSPEND_START_DATE',
76
+ 'PREDICT_RESUME_DATE'
77
+ ]]
78
+
79
+ return big_df
80
+
18
81
 
19
82
  def get_stock_tfp_by_day(str_day):
20
- stock_tfp_em_df = ak.stock_tfp_em(date_handle_util.no_slash_date(str_day))
83
+ stock_tfp_em_df = stock_tfp_em(date_handle_util.no_slash_date(str_day))
21
84
  stock_tfp_em_df = stock_tfp_em_df.rename(
22
- columns={'序号': 'index',
23
- '代码': 'symbol',
24
- '名称': 'name',
25
- '停牌时间': 'sus_begin_time',
26
- '停牌截止时间': 'sus_end_time',
27
- '停牌截止时间': 'sus_end_time',
28
- '停牌期限': 'sus_period',
29
- '停牌原因': 'sus_reason',
30
- '所属市场': 'market',
31
- '预计复牌时间': 'resume_time'
85
+ columns={'index': 'index',
86
+ 'SECURITY_CODE': 'symbol',
87
+ 'SECURITY_NAME_ABBR': 'name',
88
+ 'SUSPEND_START_TIME': 'sus_begin_time',
89
+ 'SUSPEND_END_TIME': 'sus_end_time',
90
+
91
+ 'SUSPEND_START_DATE': 'sus_begin_date',
92
+ 'PREDICT_RESUME_DATE': 'resume_time',
93
+
94
+ 'SUSPEND_EXPIRE': 'sus_period',
95
+ 'SUSPEND_REASON': 'sus_reason',
96
+ 'TRADE_MARKET': 'market',
97
+
32
98
  })
33
99
  return stock_tfp_em_df
34
100
 
@@ -60,3 +126,7 @@ def get_stock_tfp_symbol_from_db(str_day):
60
126
  except BaseException as e:
61
127
  logger.error("获取停牌信息异常:{}", e)
62
128
  return ['666666']
129
+
130
+
131
+ if __name__ == '__main__':
132
+ get_stock_tfp_symbol_list_by_day('2025-06-21')
@@ -0,0 +1,7 @@
1
+ import sys
2
+ import os
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 16
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
@@ -0,0 +1,130 @@
1
+ import sys
2
+ import os
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 21
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
8
+ from loguru import logger
9
+ import csv
10
+ import requests
11
+ import pandas as pd
12
+ import mns_common.component.em.em_stock_info_api as em_stock_info_api
13
+ from functools import lru_cache
14
+ import mns_common.utils.data_frame_util as data_frame_util
15
+
16
+
17
+ @lru_cache()
18
+ def get_us_stock_info():
19
+ # 东财美股列表
20
+ em_us_stock_info_df = em_stock_info_api.get_us_stock_info()
21
+ em_us_stock_info_df['symbol'] = em_us_stock_info_df['symbol'].str.replace('_', '-')
22
+ em_us_stock_info_df = em_us_stock_info_df.loc[em_us_stock_info_df['total_mv'] != 0]
23
+
24
+ if data_frame_util.is_not_empty(em_us_stock_info_df):
25
+ em_us_stock_info_df.fillna({'list_date': 10000101}, inplace=True)
26
+ em_us_stock_info_df = em_us_stock_info_df[['symbol', 'name', 'list_date']]
27
+
28
+ # alpha 股票名单
29
+ alpha_us_stock_info = get_us_alpha_stock_list()
30
+ alpha_us_stock_info = alpha_us_stock_info.loc[alpha_us_stock_info['assetType'] == 'Stock']
31
+ if data_frame_util.is_not_empty(alpha_us_stock_info):
32
+ alpha_us_stock_info.fillna({'list_date': '1000-01-01'}, inplace=True)
33
+ alpha_us_stock_info = alpha_us_stock_info[['symbol', 'name', 'list_date']]
34
+
35
+ alpha_us_stock_info['list_date'] = alpha_us_stock_info['list_date'].astype(str).str.replace('-', '').astype(int)
36
+
37
+ us_stock_result_df = pd.concat([alpha_us_stock_info, em_us_stock_info_df])
38
+ us_stock_result_df.drop_duplicates(subset=['symbol'], inplace=True)
39
+
40
+ return us_stock_result_df
41
+
42
+
43
+ @lru_cache()
44
+ def get_us_etf_info():
45
+ us_etf_info_df = em_stock_info_api.get_us_etf_info()
46
+ if data_frame_util.is_not_empty(us_etf_info_df):
47
+ us_etf_info_df.fillna({'list_date': 10000101}, inplace=True)
48
+ us_etf_info_df = us_etf_info_df[['symbol', 'name', 'list_date']]
49
+
50
+ # alpha ETF名单
51
+ alpha_us_etf_info = get_us_alpha_stock_list()
52
+ alpha_us_etf_info = alpha_us_etf_info.loc[alpha_us_etf_info['assetType'] == 'ETF']
53
+ if data_frame_util.is_not_empty(alpha_us_etf_info):
54
+ alpha_us_etf_info.fillna({'list_date': '1000-01-01'}, inplace=True)
55
+ alpha_us_etf_info = alpha_us_etf_info[['symbol', 'name', 'list_date']]
56
+
57
+ alpha_us_etf_info['list_date'] = alpha_us_etf_info['list_date'].astype(str).str.replace('-', '').astype(int)
58
+ us_etf_result_df = pd.concat([us_etf_info_df, alpha_us_etf_info])
59
+ us_etf_result_df.drop_duplicates(subset=['symbol'], inplace=True)
60
+
61
+ return us_etf_result_df
62
+
63
+
64
+ # 退市 https://www.alphavantage.co/query?function=LISTING_STATUS&date=2012-07-10&state=delisted&apikey=QODR3TBYB2U4M9YR
65
+ @lru_cache()
66
+ def get_us_alpha_stock_list(apikey):
67
+ try:
68
+ # replace the "demo" apikey below with your own key from https://www.alphavantage.co/support/#api-key
69
+ CSV_URL = 'https://www.alphavantage.co/query?function=LISTING_STATUS&apikey=' + apikey
70
+ with requests.Session() as s:
71
+ download = s.get(CSV_URL)
72
+ decoded_content = download.content.decode('utf-8')
73
+ cr = csv.reader(decoded_content.splitlines(), delimiter=',')
74
+ my_list = list(cr)
75
+ # 提取列名(第1行)
76
+ columns = my_list[0]
77
+ # 提取数据(第2行及以后)
78
+ values = my_list[1:]
79
+
80
+ # 转换为 DataFrame
81
+ df = pd.DataFrame(values, columns=columns)
82
+ df = df.rename(columns={'ipoDate': 'list_date'})
83
+ if data_frame_util.is_not_empty(df):
84
+ df.to_csv(r'D:\mns\mns-common\mns_common\component\us\listing_status.csv', index=False, encoding='gbk')
85
+ else:
86
+ df = pd.read_csv(r'D:\mns\mns-common\mns_common\component\us\listing_status.csv', encoding='utf-8')
87
+ return df
88
+ except BaseException as e:
89
+ logger.error("下载出现异常:{},", e)
90
+ df = pd.read_csv(r'D:\mns\mns-common\mns_common\component\us\listing_status.csv', encoding='utf-8')
91
+ df = df.rename(columns={'ipoDate': 'list_date'})
92
+ return df
93
+
94
+
95
+ def get_us_alpha_stock_de_list(apikey):
96
+ try:
97
+ # replace the "demo" apikey below with your own key from https://www.alphavantage.co/support/#api-key
98
+ CSV_URL = 'https://www.alphavantage.co/query?function=LISTING_STATUS&state=delisted&apikey=' + apikey
99
+ with requests.Session() as s:
100
+ download = s.get(CSV_URL)
101
+ decoded_content = download.content.decode('utf-8')
102
+ cr = csv.reader(decoded_content.splitlines(), delimiter=',')
103
+ my_list = list(cr)
104
+ # 提取列名(第1行)
105
+ columns = my_list[0]
106
+ # 提取数据(第2行及以后)
107
+ values = my_list[1:]
108
+
109
+ # 转换为 DataFrame
110
+ df = pd.DataFrame(values, columns=columns)
111
+ df = df.rename(columns={'ipoDate': 'list_date'})
112
+ if data_frame_util.is_not_empty(df):
113
+ df.to_csv(r'D:\mns\mns-common\mns_common\component\us\de_list_status.csv', index=False, encoding='gbk')
114
+ else:
115
+ df = pd.read_csv(r'D:\mns\mns-common\mns_common\component\us\de_list_status.csv', encoding='utf-8')
116
+ return df
117
+ except BaseException as e:
118
+ logger.error("下载出现异常:{},", e)
119
+ df = pd.read_csv(r'D:\mns\mns-common\mns_common\component\us\de_list_status.csv', encoding='utf-8')
120
+ df = df.rename(columns={'ipoDate': 'list_date'})
121
+ return df
122
+
123
+
124
+ if __name__ == '__main__':
125
+ # get_us_alpha_stock_de_list()
126
+ get_us_alpha_stock_de_list()
127
+ df_test = get_us_stock_info()
128
+ df_test.drop_duplicates(subset=['symbol'], inplace=True)
129
+ print(df_test)
130
+ get_us_alpha_stock_de_list()