mns-common 1.5.1.7__py3-none-any.whl → 1.5.1.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mns-common might be problematic. Click here for more details.
- {mns_common-1.5.1.7.dist-info → mns_common-1.5.1.8.dist-info}/METADATA +1 -1
- {mns_common-1.5.1.7.dist-info → mns_common-1.5.1.8.dist-info}/RECORD +4 -23
- mns_common/api/akshare/__init__.py +0 -7
- mns_common/api/akshare/k_line_api.py +0 -123
- mns_common/api/akshare/stock_bid_ask_api.py +0 -94
- mns_common/api/akshare/stock_dt_pool.py +0 -47
- mns_common/api/akshare/stock_zb_pool.py +0 -48
- mns_common/api/akshare/stock_zt_pool_api.py +0 -47
- mns_common/api/akshare/yjyg_sync_api.py +0 -98
- mns_common/api/em/concept/__init__.py +0 -7
- mns_common/api/em/concept/em_concept_index_api.py +0 -230
- mns_common/api/em/gd/__init__.py +0 -7
- mns_common/api/em/gd/east_money_stock_gdfx_free_top_10_api.py +0 -252
- mns_common/api/em/real_time/__init__.py +0 -7
- mns_common/api/em/real_time/east_money_debt_api.py +0 -306
- mns_common/api/em/real_time/east_money_etf_api.py +0 -374
- mns_common/api/em/real_time/east_money_stock_a_api.py +0 -303
- mns_common/api/em/real_time/east_money_stock_a_v2_api.py +0 -296
- mns_common/api/em/real_time/east_money_stock_hk_api.py +0 -337
- mns_common/api/em/real_time/east_money_stock_us_api.py +0 -234
- mns_common/api/em/real_time/real_time_quotes_repeat_api.py +0 -363
- {mns_common-1.5.1.7.dist-info → mns_common-1.5.1.8.dist-info}/WHEEL +0 -0
- {mns_common-1.5.1.7.dist-info → mns_common-1.5.1.8.dist-info}/top_level.txt +0 -0
|
@@ -1,303 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import sys
|
|
3
|
-
from loguru import logger
|
|
4
|
-
|
|
5
|
-
file_path = os.path.abspath(__file__)
|
|
6
|
-
end = file_path.index('mns') + 16
|
|
7
|
-
project_path = file_path[0:end]
|
|
8
|
-
sys.path.append(project_path)
|
|
9
|
-
from mns_common.db.MongodbUtil import MongodbUtil
|
|
10
|
-
import requests
|
|
11
|
-
import json
|
|
12
|
-
import pandas as pd
|
|
13
|
-
from concurrent.futures import ThreadPoolExecutor
|
|
14
|
-
import datetime
|
|
15
|
-
import mns_common.utils.data_frame_util as data_frame_util
|
|
16
|
-
|
|
17
|
-
mongodb_util = MongodbUtil('27017')
|
|
18
|
-
fields = ("f352,f2,f3,f5,f6,f8,f10,f11,f22,f12,f14,f15,f16,f17,"
|
|
19
|
-
"f18,f20,f21,f26,f33,f34,f35,f62,f66,f69,f72,f100,f184,f211,f212"),
|
|
20
|
-
fs = "m:0 t:6,m:0 t:80,m:1 t:2,m:1 t:23,m:0 t:81 s:2048"
|
|
21
|
-
|
|
22
|
-
# 最大返回条数
|
|
23
|
-
max_number = 5800
|
|
24
|
-
# 最小返回条数
|
|
25
|
-
min_number = 5600
|
|
26
|
-
# 分页条数
|
|
27
|
-
PAGE_SIZE = 100
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
def get_stock_page_data(pn, proxies, page_size):
|
|
31
|
-
"""
|
|
32
|
-
获取单页股票数据
|
|
33
|
-
"""
|
|
34
|
-
# 获取当前日期和时间
|
|
35
|
-
current_time = datetime.datetime.now()
|
|
36
|
-
|
|
37
|
-
# 将当前时间转换为时间戳(以毫秒为单位)
|
|
38
|
-
current_timestamp_ms = int(current_time.timestamp() * 1000)
|
|
39
|
-
|
|
40
|
-
url = "https://33.push2.eastmoney.com/api/qt/clist/get"
|
|
41
|
-
params = {
|
|
42
|
-
"cb": "jQuery1124046660442520420653_" + str(current_timestamp_ms),
|
|
43
|
-
"pn": str(pn),
|
|
44
|
-
"pz": str(page_size), # 每页最大200条
|
|
45
|
-
"po": "0",
|
|
46
|
-
"np": "3",
|
|
47
|
-
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
|
|
48
|
-
"fltt": "2",
|
|
49
|
-
"invt": "2",
|
|
50
|
-
"wbp2u": "|0|0|0|web",
|
|
51
|
-
"fid": "f12",
|
|
52
|
-
"fs": fs,
|
|
53
|
-
"fields": fields,
|
|
54
|
-
"_": current_timestamp_ms
|
|
55
|
-
}
|
|
56
|
-
try:
|
|
57
|
-
if proxies is None:
|
|
58
|
-
r = requests.get(url, params)
|
|
59
|
-
else:
|
|
60
|
-
r = requests.get(url, params, proxies=proxies)
|
|
61
|
-
|
|
62
|
-
data_text = r.text
|
|
63
|
-
if pn == 1:
|
|
64
|
-
try:
|
|
65
|
-
begin_index_total = data_text.index('"total":')
|
|
66
|
-
|
|
67
|
-
end_index_total = data_text.index('"diff"')
|
|
68
|
-
global max_number
|
|
69
|
-
max_number = int(data_text[begin_index_total + 8:end_index_total - 1])
|
|
70
|
-
except Exception as e:
|
|
71
|
-
# logger.error(f"获取第{pn}页股票列表异常: {e}")
|
|
72
|
-
return pd.DataFrame()
|
|
73
|
-
|
|
74
|
-
begin_index = data_text.index('[')
|
|
75
|
-
end_index = data_text.index(']')
|
|
76
|
-
data_json = data_text[begin_index:end_index + 1]
|
|
77
|
-
data_json = json.loads(data_json)
|
|
78
|
-
if data_json is None:
|
|
79
|
-
return pd.DataFrame()
|
|
80
|
-
else:
|
|
81
|
-
result_df = pd.DataFrame(data_json)
|
|
82
|
-
result_df['page_number'] = pn
|
|
83
|
-
return result_df
|
|
84
|
-
except Exception as e:
|
|
85
|
-
logger.error("获取第{}页股票列表异常:{}", pn, str(e))
|
|
86
|
-
return pd.DataFrame()
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
def all_stock_ticker_data_new(proxies) -> pd.DataFrame:
|
|
90
|
-
"""
|
|
91
|
-
使用多线程获取所有股票数据
|
|
92
|
-
"""
|
|
93
|
-
|
|
94
|
-
per_page = PAGE_SIZE
|
|
95
|
-
total_pages = (max_number + per_page - 1) // per_page # 向上取整
|
|
96
|
-
|
|
97
|
-
# 创建线程池
|
|
98
|
-
with ThreadPoolExecutor(max_workers=10) as executor:
|
|
99
|
-
# 提交任务,获取每页数据
|
|
100
|
-
futures = [executor.submit(get_stock_page_data, pn, proxies, PAGE_SIZE)
|
|
101
|
-
for pn in range(1, total_pages + 1)]
|
|
102
|
-
|
|
103
|
-
# 收集结果
|
|
104
|
-
results = []
|
|
105
|
-
for future in futures:
|
|
106
|
-
result = future.result()
|
|
107
|
-
if not result.empty:
|
|
108
|
-
results.append(result)
|
|
109
|
-
|
|
110
|
-
# 合并所有页面的数据
|
|
111
|
-
if results:
|
|
112
|
-
return pd.concat(results, ignore_index=True)
|
|
113
|
-
else:
|
|
114
|
-
return pd.DataFrame()
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
def get_real_time_quotes_all_stocks(proxies):
|
|
118
|
-
page_df = all_stock_ticker_data_new(proxies)
|
|
119
|
-
page_df = rename_real_time_quotes_df(page_df)
|
|
120
|
-
page_df.drop_duplicates('symbol', keep='last', inplace=True)
|
|
121
|
-
return page_df
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
# 获取所有股票实时行情数据 f33,委比
|
|
125
|
-
def rename_real_time_quotes_df(temp_df):
|
|
126
|
-
temp_df = temp_df.rename(columns={
|
|
127
|
-
"f2": "now_price",
|
|
128
|
-
"f3": "chg",
|
|
129
|
-
"f5": "volume",
|
|
130
|
-
"f6": "amount",
|
|
131
|
-
"f8": "exchange",
|
|
132
|
-
"f10": "quantity_ratio",
|
|
133
|
-
"f22": "up_speed",
|
|
134
|
-
"f11": "up_speed_05",
|
|
135
|
-
"f12": "symbol",
|
|
136
|
-
"f14": "name",
|
|
137
|
-
"f15": "high",
|
|
138
|
-
"f16": "low",
|
|
139
|
-
"f17": "open",
|
|
140
|
-
"f18": "yesterday_price",
|
|
141
|
-
"f20": "total_mv",
|
|
142
|
-
"f21": "flow_mv",
|
|
143
|
-
"f26": "list_date",
|
|
144
|
-
"f33": "wei_bi",
|
|
145
|
-
"f34": "outer_disk",
|
|
146
|
-
"f35": "inner_disk",
|
|
147
|
-
"f62": "today_main_net_inflow",
|
|
148
|
-
"f66": "super_large_order_net_inflow",
|
|
149
|
-
"f69": "super_large_order_net_inflow_ratio",
|
|
150
|
-
"f72": "large_order_net_inflow",
|
|
151
|
-
# "f78": "medium_order_net_inflow",
|
|
152
|
-
# "f84": "small_order_net_inflow",
|
|
153
|
-
"f100": "industry",
|
|
154
|
-
# "f103": "concept",
|
|
155
|
-
"f184": "today_main_net_inflow_ratio",
|
|
156
|
-
"f352": "average_price",
|
|
157
|
-
"f211": "buy_1_num",
|
|
158
|
-
"f212": "sell_1_num"
|
|
159
|
-
})
|
|
160
|
-
if data_frame_util.is_empty(temp_df):
|
|
161
|
-
return pd.DataFrame()
|
|
162
|
-
else:
|
|
163
|
-
temp_df.loc[temp_df['buy_1_num'] == '-', 'buy_1_num'] = 0
|
|
164
|
-
temp_df.loc[temp_df['sell_1_num'] == '-', 'sell_1_num'] = 0
|
|
165
|
-
temp_df.loc[temp_df['up_speed_05'] == '-', 'up_speed_05'] = 0
|
|
166
|
-
temp_df.loc[temp_df['up_speed'] == '-', 'up_speed'] = 0
|
|
167
|
-
temp_df.loc[temp_df['average_price'] == '-', 'average_price'] = 0
|
|
168
|
-
temp_df.loc[temp_df['wei_bi'] == '-', 'wei_bi'] = 0
|
|
169
|
-
temp_df.loc[temp_df['yesterday_price'] == '-', 'yesterday_price'] = 0
|
|
170
|
-
temp_df.loc[temp_df['now_price'] == '-', 'now_price'] = 0
|
|
171
|
-
temp_df.loc[temp_df['chg'] == '-', 'chg'] = 0
|
|
172
|
-
temp_df.loc[temp_df['volume'] == '-', 'volume'] = 0
|
|
173
|
-
temp_df.loc[temp_df['amount'] == '-', 'amount'] = 0
|
|
174
|
-
temp_df.loc[temp_df['exchange'] == '-', 'exchange'] = 0
|
|
175
|
-
temp_df.loc[temp_df['quantity_ratio'] == '-', 'quantity_ratio'] = 0
|
|
176
|
-
temp_df.loc[temp_df['high'] == '-', 'high'] = 0
|
|
177
|
-
temp_df.loc[temp_df['low'] == '-', 'low'] = 0
|
|
178
|
-
temp_df.loc[temp_df['open'] == '-', 'open'] = 0
|
|
179
|
-
temp_df.loc[temp_df['total_mv'] == '-', 'total_mv'] = 0
|
|
180
|
-
temp_df.loc[temp_df['flow_mv'] == '-', 'flow_mv'] = 0
|
|
181
|
-
temp_df.loc[temp_df['inner_disk'] == '-', 'inner_disk'] = 0
|
|
182
|
-
temp_df.loc[temp_df['outer_disk'] == '-', 'outer_disk'] = 0
|
|
183
|
-
temp_df.loc[temp_df['today_main_net_inflow_ratio'] == '-', 'today_main_net_inflow_ratio'] = 0
|
|
184
|
-
temp_df.loc[temp_df['today_main_net_inflow'] == '-', 'today_main_net_inflow'] = 0
|
|
185
|
-
temp_df.loc[temp_df['super_large_order_net_inflow'] == '-', 'super_large_order_net_inflow'] = 0
|
|
186
|
-
temp_df.loc[temp_df['super_large_order_net_inflow_ratio'] == '-', 'super_large_order_net_inflow_ratio'] = 0
|
|
187
|
-
temp_df.loc[temp_df['large_order_net_inflow'] == '-', 'large_order_net_inflow'] = 0
|
|
188
|
-
# temp_df.loc[temp_df['medium_order_net_inflow'] == '-', 'medium_order_net_inflow'] = 0
|
|
189
|
-
# temp_df.loc[temp_df['small_order_net_inflow'] == '-', 'small_order_net_inflow'] = 0
|
|
190
|
-
|
|
191
|
-
temp_df["list_date"] = pd.to_numeric(temp_df["list_date"], errors="coerce")
|
|
192
|
-
temp_df["wei_bi"] = pd.to_numeric(temp_df["wei_bi"], errors="coerce")
|
|
193
|
-
temp_df["average_price"] = pd.to_numeric(temp_df["average_price"], errors="coerce")
|
|
194
|
-
temp_df["yesterday_price"] = pd.to_numeric(temp_df["yesterday_price"], errors="coerce")
|
|
195
|
-
temp_df["now_price"] = pd.to_numeric(temp_df["now_price"], errors="coerce")
|
|
196
|
-
temp_df["chg"] = pd.to_numeric(temp_df["chg"], errors="coerce")
|
|
197
|
-
temp_df["volume"] = pd.to_numeric(temp_df["volume"], errors="coerce")
|
|
198
|
-
temp_df["amount"] = pd.to_numeric(temp_df["amount"], errors="coerce")
|
|
199
|
-
temp_df["exchange"] = pd.to_numeric(temp_df["exchange"], errors="coerce")
|
|
200
|
-
temp_df["quantity_ratio"] = pd.to_numeric(temp_df["quantity_ratio"], errors="coerce")
|
|
201
|
-
temp_df["high"] = pd.to_numeric(temp_df["high"], errors="coerce")
|
|
202
|
-
temp_df["low"] = pd.to_numeric(temp_df["low"], errors="coerce")
|
|
203
|
-
temp_df["open"] = pd.to_numeric(temp_df["open"], errors="coerce")
|
|
204
|
-
temp_df["total_mv"] = pd.to_numeric(temp_df["total_mv"], errors="coerce")
|
|
205
|
-
temp_df["flow_mv"] = pd.to_numeric(temp_df["flow_mv"], errors="coerce")
|
|
206
|
-
temp_df["outer_disk"] = pd.to_numeric(temp_df["outer_disk"], errors="coerce")
|
|
207
|
-
temp_df["inner_disk"] = pd.to_numeric(temp_df["inner_disk"], errors="coerce")
|
|
208
|
-
temp_df["today_main_net_inflow"] = pd.to_numeric(temp_df["today_main_net_inflow"], errors="coerce")
|
|
209
|
-
temp_df["super_large_order_net_inflow"] = pd.to_numeric(temp_df["super_large_order_net_inflow"],
|
|
210
|
-
errors="coerce")
|
|
211
|
-
temp_df["super_large_order_net_inflow_ratio"] = pd.to_numeric(temp_df["super_large_order_net_inflow_ratio"],
|
|
212
|
-
errors="coerce")
|
|
213
|
-
temp_df["large_order_net_inflow"] = pd.to_numeric(temp_df["large_order_net_inflow"],
|
|
214
|
-
errors="coerce")
|
|
215
|
-
# temp_df["medium_order_net_inflow"] = pd.to_numeric(temp_df["medium_order_net_inflow"],
|
|
216
|
-
# errors="coerce")
|
|
217
|
-
# temp_df["small_order_net_inflow"] = pd.to_numeric(temp_df["small_order_net_inflow"], errors="coerce")
|
|
218
|
-
|
|
219
|
-
# 大单比例
|
|
220
|
-
temp_df['large_order_net_inflow_ratio'] = round((temp_df['large_order_net_inflow'] / temp_df['amount']) * 100,
|
|
221
|
-
2)
|
|
222
|
-
|
|
223
|
-
# 外盘是内盘倍数
|
|
224
|
-
temp_df['disk_ratio'] = round((temp_df['outer_disk'] - temp_df['inner_disk']) / temp_df['inner_disk'], 2)
|
|
225
|
-
# 只有外盘没有内盘
|
|
226
|
-
temp_df.loc[temp_df["inner_disk"] == 0, ['disk_ratio']] = 1688
|
|
227
|
-
temp_df = temp_df.sort_values(by=['chg'], ascending=False)
|
|
228
|
-
return temp_df
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
# 北向/南向资金状况 北向已经停止
|
|
232
|
-
def get_sum_north_south_net_buy_amt():
|
|
233
|
-
# 设置请求头部信息
|
|
234
|
-
headers = {
|
|
235
|
-
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'
|
|
236
|
-
}
|
|
237
|
-
|
|
238
|
-
# 设置请求URL
|
|
239
|
-
url = 'http://push2.eastmoney.com/api/qt/kamt/get?fields1=f1,f2,f3,f4&fields2=f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f62,f63,f64,f65,f66,f67,f68,f69,f70&ut=b2884a393a59ad640022ce1e1e78431c&deviceid=0&cb=jsonp_1622790712837&_=1622790712926'
|
|
240
|
-
|
|
241
|
-
# 发送HTTP请求
|
|
242
|
-
response = requests.get(url, headers=headers, params={"type": "json"})
|
|
243
|
-
|
|
244
|
-
# 解析JSON数据
|
|
245
|
-
data = json.loads(response.text.lstrip('jsonp_1622790712837(').rstrip(');'))
|
|
246
|
-
|
|
247
|
-
# 处理数据
|
|
248
|
-
|
|
249
|
-
# 单位(万元)
|
|
250
|
-
# dayNetAmtIn 资金净流入
|
|
251
|
-
# dayAmtRemain 当日资金余额
|
|
252
|
-
# dayAmtThreshold 当日资金限额
|
|
253
|
-
# monthNetAmtIn 当月净流入
|
|
254
|
-
# yearNetAmtIn 年度净流入
|
|
255
|
-
# allNetAmtIn 总净流入
|
|
256
|
-
# buyAmt 当日买入金额
|
|
257
|
-
# sellAmt 当日卖出金额
|
|
258
|
-
# buySellAmt 当日买入卖出总金额
|
|
259
|
-
# netBuyAmt 成交净买额
|
|
260
|
-
|
|
261
|
-
# Hongkong to Shanghai
|
|
262
|
-
hk2sh = data['data']['hk2sh']
|
|
263
|
-
hk2sh_df = pd.DataFrame(hk2sh, index=[0])
|
|
264
|
-
# Hongkong to ShenZhen
|
|
265
|
-
hk2sz = data['data']['hk2sz']
|
|
266
|
-
hk2sz_df = pd.DataFrame(hk2sz, index=[0])
|
|
267
|
-
|
|
268
|
-
# Shanghai to Hongkong
|
|
269
|
-
sh2hk = data['data']['sh2hk']
|
|
270
|
-
sh2hk_df = pd.DataFrame(sh2hk, index=[0])
|
|
271
|
-
|
|
272
|
-
# ShenZhen to Hongkong
|
|
273
|
-
sz2hk = data['data']['sz2hk']
|
|
274
|
-
sz2hk_df = pd.DataFrame(sz2hk, index=[0])
|
|
275
|
-
# 北向总额
|
|
276
|
-
sum_north_netBuyAmt = hk2sh_df['netBuyAmt'] + hk2sz_df['netBuyAmt']
|
|
277
|
-
|
|
278
|
-
sum_south_netBuyAmt = sh2hk_df['netBuyAmt'] + sz2hk_df['netBuyAmt']
|
|
279
|
-
|
|
280
|
-
df = pd.DataFrame([[
|
|
281
|
-
list(hk2sh_df['netBuyAmt'])[0],
|
|
282
|
-
list(hk2sz_df['netBuyAmt'])[0],
|
|
283
|
-
list(sum_north_netBuyAmt)[0],
|
|
284
|
-
list(sh2hk_df['netBuyAmt'])[0],
|
|
285
|
-
list(sz2hk_df['netBuyAmt'])[0],
|
|
286
|
-
list(sum_south_netBuyAmt)[0]]],
|
|
287
|
-
columns=['sh_netBuyAmt', 'sz_netBuyAmt', 'sum_north_netBuyAmt',
|
|
288
|
-
'sh_hk_netBuyAmt', 'sz_hk_netBuyAmt', 'sum_south_netBuyAmt'])
|
|
289
|
-
|
|
290
|
-
# 打印结果
|
|
291
|
-
return df
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
import mns_common.component.proxies.proxy_common_api as proxy_common_api
|
|
295
|
-
|
|
296
|
-
if __name__ == '__main__':
|
|
297
|
-
|
|
298
|
-
proxy_ip = proxy_common_api.generate_proxy_ip_api(1)
|
|
299
|
-
proxies = {"https": proxy_ip,
|
|
300
|
-
"http": proxy_ip}
|
|
301
|
-
while True:
|
|
302
|
-
result = all_stock_ticker_data_new(proxies)
|
|
303
|
-
print(result)
|
|
@@ -1,296 +0,0 @@
|
|
|
1
|
-
import sys
|
|
2
|
-
import os
|
|
3
|
-
|
|
4
|
-
file_path = os.path.abspath(__file__)
|
|
5
|
-
end = file_path.index('mns') + 16
|
|
6
|
-
project_path = file_path[0:end]
|
|
7
|
-
sys.path.append(project_path)
|
|
8
|
-
|
|
9
|
-
import requests
|
|
10
|
-
import json
|
|
11
|
-
import pandas as pd
|
|
12
|
-
from concurrent.futures import ThreadPoolExecutor
|
|
13
|
-
import datetime
|
|
14
|
-
from loguru import logger
|
|
15
|
-
|
|
16
|
-
#
|
|
17
|
-
# fields_02 = "f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f11,f12,f13,f14,f15,f16,f17,f18,f19,f20,f21,f22,f23,f24,f25,f26,f27,f28,f29,f30,f31,f32,f33,f34,f35,f36,f37,f38,f39,f40,f41,f42,f43,f44,f45,f46,f47,f48,f49,f50,f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f62,f63,f64,f65,f66,f67,f68,f69,f70,f71,f72,f73,f74,f75,f76,f77,f78,f79,f80,f81,f82,f83,f84,f85,f86,f87,f88,f89,f90,f91,f92,f93,f94,f95,f96,f97,f98,f99,f100,f101,f102,f103,f104,f105,f106,f107,f108" \
|
|
18
|
-
# ",f109,f110,f111,f112,f113,f114,f115,f116,f117,f118,f119,f120,f121,f122,f123,f124,f125,f126,f127,f128,f129,f130,f131,f132,f133,f134,f135,f136,f137,f138,f139,f140,f141,f142,f143,f144,f145,f146,f147,f148,f149,f150,f151,f152,f153,f154,f155,f156,f157,f158,f159,f160,f161,f162,f163,f164,f165,f166,f167,f168,f169,f170,f171,f172,f173,f174,f175,f176,f177,f178,f179,f180,f181,f182,f183,f184,f185,f186,f187,f188,f189,f190,f191,f192,f193,f194,f195,f196,f197,f198,f199,f200" \
|
|
19
|
-
# ",f209,f210,f212,f213,f214,f215,f216,f217,f218,f219,f220,f221,f222,f223,f224,f225,f226,f227,f228,f229,f230,f231,f232,f233,f234,f235,f236,f237,f238,f239,f240,f241,f242,f243,f244,f245,f246,f247,f248,f249,f250,f251,f252,f253,f254,f255,f256,f257,f258,f259,f260,f261,f262,f263,f264,f265,f266,f267,f268,f269,f270,f271,f272,f273,f274,f275,f276,f277,f278,f279,f280,f281,f282,f283,f284,f285,f286,f287,f288,f289,f290,f291,f292,f293,f294,f295,f296,f297,f298,f299,f300" \
|
|
20
|
-
# ",f309,f310,f312,f313,f314,f315,f316,f317,f318,f319,f320,f321,f322,f323,f324,f325,f326,f327,f328,f329,f330,f331,f332,f333,f334,f335,f336,f337,f338,f339,f340,f341,f342,f343,f344,f345,f346,f347,f348,f349,f350,f351,f352,f353,f354,f355,f356,f357,f358,f359,f360,f361,f362,f363,f364,f365,f366,f367,f368,f369,f370,f371,f372,f373,f374,f375,f376,f377,f378,f379,f380,f381,f382,f383,f384,f385,f386,f387,f388,f389,f390,f391,f392,f393,f394,f395,f396,f397,f398,f399,f401"
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
fields = ("f2,f3,f5,f6,f8,"
|
|
24
|
-
"f9,f10,f22,f12,f13,"
|
|
25
|
-
"f14,f15,f16,f17,f18,"
|
|
26
|
-
"f20,f21,f23,f26,f33,"
|
|
27
|
-
"f34,f35,f37,f38,f39,"
|
|
28
|
-
"f62,f64,f65,f67,f68,"
|
|
29
|
-
"f66,f69,f70,f71,f72,"
|
|
30
|
-
"f76,f77,f78,f82,f83,"
|
|
31
|
-
"f84,f102,f184,f100,f103,"
|
|
32
|
-
"f352,f191,f193,f24,f25")
|
|
33
|
-
|
|
34
|
-
# 最大返回条数
|
|
35
|
-
max_number = 5800
|
|
36
|
-
# 最小返回条数
|
|
37
|
-
min_number = 5600
|
|
38
|
-
# 分页条数
|
|
39
|
-
page_number = 100
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
def get_stock_page_data(pn, fields, fs, proxies):
|
|
43
|
-
"""
|
|
44
|
-
获取单页股票数据
|
|
45
|
-
"""
|
|
46
|
-
# 获取当前日期和时间
|
|
47
|
-
current_time = datetime.datetime.now()
|
|
48
|
-
|
|
49
|
-
# 将当前时间转换为时间戳(以毫秒为单位)
|
|
50
|
-
current_timestamp_ms = int(current_time.timestamp() * 1000)
|
|
51
|
-
|
|
52
|
-
url = "https://13.push2.eastmoney.com/api/qt/clist/get"
|
|
53
|
-
params = {
|
|
54
|
-
"cb": "jQuery1124046660442520420653_" + str(current_timestamp_ms),
|
|
55
|
-
"pn": str(pn),
|
|
56
|
-
"pz": "10000", # 每页最大200条
|
|
57
|
-
"po": "1",
|
|
58
|
-
"np": "3",
|
|
59
|
-
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
|
|
60
|
-
"fltt": "2",
|
|
61
|
-
"invt": "2",
|
|
62
|
-
"wbp2u": "|0|0|0|web",
|
|
63
|
-
"fid": "f3",
|
|
64
|
-
"fs": fs,
|
|
65
|
-
"fields": fields,
|
|
66
|
-
"_": current_timestamp_ms
|
|
67
|
-
}
|
|
68
|
-
try:
|
|
69
|
-
if proxies is None:
|
|
70
|
-
r = requests.get(url, params)
|
|
71
|
-
else:
|
|
72
|
-
r = requests.get(url, params, proxies=proxies)
|
|
73
|
-
data_text = r.text
|
|
74
|
-
begin_index = data_text.index('[')
|
|
75
|
-
end_index = data_text.index(']')
|
|
76
|
-
data_json = data_text[begin_index:end_index + 1]
|
|
77
|
-
data_json = json.loads(data_json)
|
|
78
|
-
if data_json is None:
|
|
79
|
-
return pd.DataFrame()
|
|
80
|
-
else:
|
|
81
|
-
return pd.DataFrame(data_json)
|
|
82
|
-
except Exception as e:
|
|
83
|
-
logger.error(f"获取第{pn}页股票列表异常: {e}")
|
|
84
|
-
return pd.DataFrame()
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
def all_stock_ticker_data_new(fields, fs, proxies) -> pd.DataFrame:
|
|
88
|
-
"""
|
|
89
|
-
使用多线程获取所有股票数据
|
|
90
|
-
"""
|
|
91
|
-
|
|
92
|
-
per_page = page_number
|
|
93
|
-
total_pages = (max_number + per_page - 1) // per_page # 向上取整
|
|
94
|
-
|
|
95
|
-
# 创建线程池
|
|
96
|
-
with ThreadPoolExecutor(max_workers=10) as executor:
|
|
97
|
-
# 提交任务,获取每页数据
|
|
98
|
-
futures = [executor.submit(get_stock_page_data, pn, fields, fs, proxies)
|
|
99
|
-
for pn in range(1, total_pages + 1)]
|
|
100
|
-
|
|
101
|
-
# 收集结果
|
|
102
|
-
results = []
|
|
103
|
-
for future in futures:
|
|
104
|
-
result = future.result()
|
|
105
|
-
if not result.empty:
|
|
106
|
-
results.append(result)
|
|
107
|
-
|
|
108
|
-
# 合并所有页面的数据
|
|
109
|
-
if results:
|
|
110
|
-
return pd.concat(results, ignore_index=True)
|
|
111
|
-
else:
|
|
112
|
-
return pd.DataFrame()
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
def get_all_real_time_quotes(proxies):
|
|
116
|
-
fs = "m:0 t:6,m:0 t:80,m:1 t:2,m:1 t:23,m:0 t:81 s:2048"
|
|
117
|
-
# 获取第一页数据
|
|
118
|
-
page_one_df = get_stock_page_data(1, fields, fs, proxies)
|
|
119
|
-
# 数据接口正常返回5600以上的数量
|
|
120
|
-
if page_one_df.shape[0] > min_number:
|
|
121
|
-
page_one_df = rename_real_time_quotes_df(page_one_df)
|
|
122
|
-
return page_one_df
|
|
123
|
-
else:
|
|
124
|
-
page_df = all_stock_ticker_data_new(fields, fs, proxies)
|
|
125
|
-
page_df = rename_real_time_quotes_df(page_df)
|
|
126
|
-
return page_df
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
# 获取所有股票实时行情数据 f33,委比
|
|
130
|
-
def rename_real_time_quotes_df(temp_df):
|
|
131
|
-
temp_df = temp_df.rename(columns={
|
|
132
|
-
"f2": "now_price",
|
|
133
|
-
"f3": "chg",
|
|
134
|
-
"f5": "volume",
|
|
135
|
-
"f6": "amount",
|
|
136
|
-
"f8": "exchange",
|
|
137
|
-
"f9": "pe_ttm",
|
|
138
|
-
"f10": "quantity_ratio",
|
|
139
|
-
"f22": "up_speed",
|
|
140
|
-
"f12": "symbol",
|
|
141
|
-
"f13": "sz_sh",
|
|
142
|
-
"f14": "name",
|
|
143
|
-
"f15": "high",
|
|
144
|
-
"f16": "low",
|
|
145
|
-
"f17": "open",
|
|
146
|
-
"f18": "yesterday_price",
|
|
147
|
-
"f20": "total_mv",
|
|
148
|
-
"f21": "flow_mv",
|
|
149
|
-
"f23": "pb",
|
|
150
|
-
"f26": "list_date",
|
|
151
|
-
"f33": "wei_bi",
|
|
152
|
-
"f34": "outer_disk",
|
|
153
|
-
"f35": "inner_disk",
|
|
154
|
-
"f37": "ROE",
|
|
155
|
-
"f38": "total_share",
|
|
156
|
-
"f39": "flow_share",
|
|
157
|
-
"f62": "today_main_net_inflow",
|
|
158
|
-
"f64": "super_large_order_inflow",
|
|
159
|
-
"f65": "super_large_order_outflow",
|
|
160
|
-
"f67": "super_large_order_inflow_ratio",
|
|
161
|
-
"f68": "super_large_order_outflow_ratio",
|
|
162
|
-
|
|
163
|
-
"f66": "super_large_order_net_inflow",
|
|
164
|
-
"f69": "super_large_order_net_inflow_ratio",
|
|
165
|
-
"f70": "large_order_inflow",
|
|
166
|
-
"f71": "large_order_outflow",
|
|
167
|
-
"f72": "large_order_net_inflow",
|
|
168
|
-
|
|
169
|
-
"f76": "medium_order_inflow",
|
|
170
|
-
"f77": "medium_order_outflow",
|
|
171
|
-
"f78": "medium_order_net_inflow",
|
|
172
|
-
"f82": "small_order_inflow",
|
|
173
|
-
"f83": "small_order_outflow",
|
|
174
|
-
|
|
175
|
-
"f84": "small_order_net_inflow",
|
|
176
|
-
"f102": "area",
|
|
177
|
-
"f184": "today_main_net_inflow_ratio",
|
|
178
|
-
"f100": "industry",
|
|
179
|
-
"f103": "concept",
|
|
180
|
-
|
|
181
|
-
"f352": "average_price",
|
|
182
|
-
"f191": "hk_stock_code",
|
|
183
|
-
"f193": "hk_stock_name",
|
|
184
|
-
"f24": "sixty_day_chg",
|
|
185
|
-
"f25": "now_year_chg",
|
|
186
|
-
})
|
|
187
|
-
temp_df.loc[temp_df['sixty_day_chg'] == '-', 'total_share'] = 0
|
|
188
|
-
|
|
189
|
-
temp_df.loc[temp_df['now_year_chg'] == '-', 'now_year_chg'] = 0
|
|
190
|
-
temp_df.loc[temp_df['total_share'] == '-', 'total_share'] = 0
|
|
191
|
-
temp_df.loc[temp_df['flow_share'] == '-', 'flow_share'] = 0
|
|
192
|
-
temp_df.loc[temp_df['pe_ttm'] == '-', 'pe_ttm'] = 0
|
|
193
|
-
temp_df.loc[temp_df['up_speed'] == '-', 'up_speed'] = 0
|
|
194
|
-
temp_df.loc[temp_df['average_price'] == '-', 'average_price'] = 0
|
|
195
|
-
temp_df.loc[temp_df['wei_bi'] == '-', 'wei_bi'] = 0
|
|
196
|
-
temp_df.loc[temp_df['yesterday_price'] == '-', 'yesterday_price'] = 0
|
|
197
|
-
temp_df.loc[temp_df['now_price'] == '-', 'now_price'] = 0
|
|
198
|
-
temp_df.loc[temp_df['chg'] == '-', 'chg'] = 0
|
|
199
|
-
temp_df.loc[temp_df['volume'] == '-', 'volume'] = 0
|
|
200
|
-
temp_df.loc[temp_df['amount'] == '-', 'amount'] = 0
|
|
201
|
-
temp_df.loc[temp_df['exchange'] == '-', 'exchange'] = 0
|
|
202
|
-
temp_df.loc[temp_df['quantity_ratio'] == '-', 'quantity_ratio'] = 0
|
|
203
|
-
temp_df.loc[temp_df['high'] == '-', 'high'] = 0
|
|
204
|
-
temp_df.loc[temp_df['low'] == '-', 'low'] = 0
|
|
205
|
-
temp_df.loc[temp_df['open'] == '-', 'open'] = 0
|
|
206
|
-
temp_df.loc[temp_df['total_mv'] == '-', 'total_mv'] = 0
|
|
207
|
-
temp_df.loc[temp_df['flow_mv'] == '-', 'flow_mv'] = 0
|
|
208
|
-
temp_df.loc[temp_df['inner_disk'] == '-', 'inner_disk'] = 0
|
|
209
|
-
temp_df.loc[temp_df['outer_disk'] == '-', 'outer_disk'] = 0
|
|
210
|
-
temp_df.loc[temp_df['today_main_net_inflow_ratio'] == '-', 'today_main_net_inflow_ratio'] = 0
|
|
211
|
-
temp_df.loc[temp_df['today_main_net_inflow'] == '-', 'today_main_net_inflow'] = 0
|
|
212
|
-
temp_df.loc[temp_df['super_large_order_inflow'] == '-', 'super_large_order_inflow'] = 0
|
|
213
|
-
temp_df.loc[temp_df['super_large_order_outflow'] == '-', 'super_large_order_outflow'] = 0
|
|
214
|
-
temp_df.loc[temp_df['super_large_order_net_inflow'] == '-', 'super_large_order_net_inflow'] = 0
|
|
215
|
-
temp_df.loc[temp_df['super_large_order_inflow_ratio'] == '-', 'super_large_order_inflow_ratio'] = 0
|
|
216
|
-
temp_df.loc[temp_df['super_large_order_outflow_ratio'] == '-', 'super_large_order_outflow_ratio'] = 0
|
|
217
|
-
temp_df.loc[temp_df['super_large_order_net_inflow_ratio'] == '-', 'super_large_order_net_inflow_ratio'] = 0
|
|
218
|
-
|
|
219
|
-
temp_df.loc[temp_df['large_order_net_inflow'] == '-', 'large_order_net_inflow'] = 0
|
|
220
|
-
temp_df.loc[temp_df['large_order_inflow'] == '-', 'large_order_inflow'] = 0
|
|
221
|
-
temp_df.loc[temp_df['large_order_outflow'] == '-', 'large_order_outflow'] = 0
|
|
222
|
-
|
|
223
|
-
temp_df.loc[temp_df['medium_order_net_inflow'] == '-', 'medium_order_net_inflow'] = 0
|
|
224
|
-
temp_df.loc[temp_df['medium_order_outflow'] == '-', 'medium_order_outflow'] = 0
|
|
225
|
-
temp_df.loc[temp_df['medium_order_inflow'] == '-', 'medium_order_inflow'] = 0
|
|
226
|
-
|
|
227
|
-
temp_df.loc[temp_df['small_order_inflow'] == '-', 'small_order_inflow'] = 0
|
|
228
|
-
temp_df.loc[temp_df['small_order_outflow'] == '-', 'small_order_outflow'] = 0
|
|
229
|
-
temp_df.loc[temp_df['small_order_net_inflow'] == '-', 'small_order_net_inflow'] = 0
|
|
230
|
-
|
|
231
|
-
temp_df["list_date"] = pd.to_numeric(temp_df["list_date"], errors="coerce")
|
|
232
|
-
temp_df["wei_bi"] = pd.to_numeric(temp_df["wei_bi"], errors="coerce")
|
|
233
|
-
temp_df["average_price"] = pd.to_numeric(temp_df["average_price"], errors="coerce")
|
|
234
|
-
temp_df["yesterday_price"] = pd.to_numeric(temp_df["yesterday_price"], errors="coerce")
|
|
235
|
-
temp_df["now_price"] = pd.to_numeric(temp_df["now_price"], errors="coerce")
|
|
236
|
-
temp_df["chg"] = pd.to_numeric(temp_df["chg"], errors="coerce")
|
|
237
|
-
temp_df["volume"] = pd.to_numeric(temp_df["volume"], errors="coerce")
|
|
238
|
-
temp_df["amount"] = pd.to_numeric(temp_df["amount"], errors="coerce")
|
|
239
|
-
temp_df["exchange"] = pd.to_numeric(temp_df["exchange"], errors="coerce")
|
|
240
|
-
temp_df["quantity_ratio"] = pd.to_numeric(temp_df["quantity_ratio"], errors="coerce")
|
|
241
|
-
temp_df["high"] = pd.to_numeric(temp_df["high"], errors="coerce")
|
|
242
|
-
temp_df["low"] = pd.to_numeric(temp_df["low"], errors="coerce")
|
|
243
|
-
temp_df["open"] = pd.to_numeric(temp_df["open"], errors="coerce")
|
|
244
|
-
temp_df["total_mv"] = pd.to_numeric(temp_df["total_mv"], errors="coerce")
|
|
245
|
-
temp_df["flow_mv"] = pd.to_numeric(temp_df["flow_mv"], errors="coerce")
|
|
246
|
-
temp_df["outer_disk"] = pd.to_numeric(temp_df["outer_disk"], errors="coerce")
|
|
247
|
-
temp_df["inner_disk"] = pd.to_numeric(temp_df["inner_disk"], errors="coerce")
|
|
248
|
-
temp_df["today_main_net_inflow"] = pd.to_numeric(temp_df["today_main_net_inflow"], errors="coerce")
|
|
249
|
-
temp_df["super_large_order_net_inflow"] = pd.to_numeric(temp_df["super_large_order_net_inflow"],
|
|
250
|
-
errors="coerce")
|
|
251
|
-
temp_df["super_large_order_net_inflow_ratio"] = pd.to_numeric(temp_df["super_large_order_net_inflow_ratio"],
|
|
252
|
-
errors="coerce")
|
|
253
|
-
temp_df["large_order_net_inflow"] = pd.to_numeric(temp_df["large_order_net_inflow"],
|
|
254
|
-
errors="coerce")
|
|
255
|
-
temp_df["medium_order_net_inflow"] = pd.to_numeric(temp_df["medium_order_net_inflow"],
|
|
256
|
-
errors="coerce")
|
|
257
|
-
|
|
258
|
-
temp_df["small_order_net_inflow"] = pd.to_numeric(temp_df["small_order_net_inflow"], errors="coerce")
|
|
259
|
-
|
|
260
|
-
temp_df["pe_ttm"] = pd.to_numeric(temp_df["pe_ttm"], errors="coerce")
|
|
261
|
-
temp_df["total_share"] = pd.to_numeric(temp_df["total_share"], errors="coerce")
|
|
262
|
-
temp_df["flow_share"] = pd.to_numeric(temp_df["flow_share"], errors="coerce")
|
|
263
|
-
|
|
264
|
-
temp_df["super_large_order_inflow"] = pd.to_numeric(temp_df["super_large_order_inflow"], errors="coerce")
|
|
265
|
-
temp_df["super_large_order_outflow"] = pd.to_numeric(temp_df["super_large_order_outflow"], errors="coerce")
|
|
266
|
-
|
|
267
|
-
temp_df["super_large_order_inflow_ratio"] = pd.to_numeric(temp_df["super_large_order_inflow_ratio"],
|
|
268
|
-
errors="coerce")
|
|
269
|
-
temp_df["super_large_order_outflow_ratio"] = pd.to_numeric(temp_df["super_large_order_outflow_ratio"],
|
|
270
|
-
errors="coerce")
|
|
271
|
-
|
|
272
|
-
temp_df["super_large_order_net_inflow"] = pd.to_numeric(temp_df["super_large_order_net_inflow"], errors="coerce")
|
|
273
|
-
temp_df["super_large_order_net_inflow_ratio"] = pd.to_numeric(temp_df["super_large_order_net_inflow_ratio"],
|
|
274
|
-
errors="coerce")
|
|
275
|
-
|
|
276
|
-
temp_df["medium_order_inflow"] = pd.to_numeric(temp_df["medium_order_inflow"], errors="coerce")
|
|
277
|
-
temp_df["medium_order_outflow"] = pd.to_numeric(temp_df["medium_order_outflow"], errors="coerce")
|
|
278
|
-
|
|
279
|
-
temp_df["small_order_inflow"] = pd.to_numeric(temp_df["small_order_inflow"], errors="coerce")
|
|
280
|
-
temp_df["small_order_outflow"] = pd.to_numeric(temp_df["small_order_outflow"], errors="coerce")
|
|
281
|
-
|
|
282
|
-
outer_disk = temp_df['outer_disk']
|
|
283
|
-
inner_disk = temp_df['inner_disk']
|
|
284
|
-
disk_ratio = (outer_disk - inner_disk) / inner_disk
|
|
285
|
-
temp_df['disk_ratio'] = round(disk_ratio, 2)
|
|
286
|
-
return temp_df
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
# 示例调用
|
|
290
|
-
if __name__ == "__main__":
|
|
291
|
-
number = 1
|
|
292
|
-
while True:
|
|
293
|
-
df = get_all_real_time_quotes(None)
|
|
294
|
-
zt_df = df.loc[df['wei_bi'] == 100]
|
|
295
|
-
logger.info("同步次数,{}", number)
|
|
296
|
-
number = number + 1
|