mns-common 1.5.1.5__py3-none-any.whl → 1.5.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mns-common might be problematic. Click here for more details.

@@ -5,4 +5,3 @@ file_path = os.path.abspath(__file__)
5
5
  end = file_path.index('mns') + 14
6
6
  project_path = file_path[0:end]
7
7
  sys.path.append(project_path)
8
-
@@ -1,6 +1,6 @@
1
- import sys
2
1
  import os
3
- import time
2
+ import sys
3
+ from loguru import logger
4
4
 
5
5
  file_path = os.path.abspath(__file__)
6
6
  end = file_path.index('mns') + 16
@@ -12,7 +12,6 @@ import json
12
12
  import pandas as pd
13
13
  from concurrent.futures import ThreadPoolExecutor
14
14
  import datetime
15
- from loguru import logger
16
15
  import mns_common.utils.data_frame_util as data_frame_util
17
16
 
18
17
  mongodb_util = MongodbUtil('27017')
@@ -69,7 +68,7 @@ def get_stock_page_data(pn, proxies, page_size):
69
68
  global max_number
70
69
  max_number = int(data_text[begin_index_total + 8:end_index_total - 1])
71
70
  except Exception as e:
72
- logger.error(f"获取第{pn}页股票列表异常: {e}")
71
+ # logger.error(f"获取第{pn}页股票列表异常: {e}")
73
72
  return pd.DataFrame()
74
73
 
75
74
  begin_index = data_text.index('[')
@@ -79,9 +78,11 @@ def get_stock_page_data(pn, proxies, page_size):
79
78
  if data_json is None:
80
79
  return pd.DataFrame()
81
80
  else:
82
- return pd.DataFrame(data_json)
81
+ result_df = pd.DataFrame(data_json)
82
+ result_df['page_number'] = pn
83
+ return result_df
83
84
  except Exception as e:
84
- logger.error(f"获取第{pn}页股票列表异常: {e}")
85
+ logger.error("获取第{}页股票列表异常:{}", pn, str(e))
85
86
  return pd.DataFrame()
86
87
 
87
88
 
@@ -290,16 +291,13 @@ def get_sum_north_south_net_buy_amt():
290
291
  return df
291
292
 
292
293
 
293
- # 示例调用
294
- if __name__ == "__main__":
294
+ import mns_common.component.proxies.proxy_common_api as proxy_common_api
295
+
296
+ if __name__ == '__main__':
297
+
298
+ proxy_ip = proxy_common_api.generate_proxy_ip_api(1)
299
+ proxies = {"https": proxy_ip,
300
+ "http": proxy_ip}
295
301
  while True:
296
- ip_proxy_pool = mongodb_util.find_all_data('ip_proxy_pool')
297
- if data_frame_util.is_not_empty(ip_proxy_pool):
298
- proxy_ip = list(ip_proxy_pool['ip'])[0]
299
- proxy = {
300
- "https": proxy_ip}
301
- df = get_real_time_quotes_all_stocks(proxy)
302
- logger.info("涨停数据,{}", 1)
303
- else:
304
- time.sleep(1)
305
- logger.error("ip为空")
302
+ result = all_stock_ticker_data_new(proxies)
303
+ print(result)
@@ -0,0 +1,363 @@
1
+ import requests
2
+
3
+ import mns_common.utils.data_frame_util as data_frame_util
4
+ import json
5
+ import datetime
6
+
7
+ import threading
8
+ from concurrent.futures import ThreadPoolExecutor
9
+ import mns_common.component.proxies.proxy_common_api as proxy_common_api
10
+ from loguru import logger
11
+ import concurrent.futures
12
+ import pandas as pd
13
+ import time
14
+ from concurrent.futures import ThreadPoolExecutor, as_completed
15
+ from threading import Lock
16
+
17
+ fields = ("f352,f2,f3,f5,f6,f8,f10,f11,f22,f12,f14,f15,f16,f17,"
18
+ "f18,f20,f21,f26,f33,f34,f35,f62,f66,f69,f72,f100,f184,f211,f212"),
19
+ fs = "m:0 t:6,m:0 t:80,m:1 t:2,m:1 t:23,m:0 t:81 s:2048"
20
+
21
+ # 最大返回条数
22
+ max_number = 5800
23
+ # 最小返回条数
24
+ min_number = 5600
25
+ # 分页条数
26
+ PAGE_SIZE = 100
27
+
28
+
29
+ def get_stock_page_data_time_out(pn, proxies, page_size, time_out):
30
+ """
31
+ 获取单页股票数据
32
+ """
33
+ # 获取当前日期和时间
34
+ current_time = datetime.datetime.now()
35
+
36
+ # 将当前时间转换为时间戳(以毫秒为单位)
37
+ current_timestamp_ms = int(current_time.timestamp() * 1000)
38
+
39
+ url = "https://33.push2.eastmoney.com/api/qt/clist/get"
40
+ params = {
41
+ "cb": "jQuery1124046660442520420653_" + str(current_timestamp_ms),
42
+ "pn": str(pn),
43
+ "pz": str(page_size), # 每页最大200条
44
+ "po": "0",
45
+ "np": "3",
46
+ "ut": "bd1d9ddb04089700cf9c27f6f7426281",
47
+ "fltt": "2",
48
+ "invt": "2",
49
+ "wbp2u": "|0|0|0|web",
50
+ "fid": "f12",
51
+ "fs": fs,
52
+ "fields": fields,
53
+ "_": current_timestamp_ms
54
+ }
55
+ try:
56
+ if proxies is None:
57
+ r = requests.get(url, params, timeout=time_out)
58
+ else:
59
+ r = requests.get(url, params, proxies=proxies, timeout=time_out)
60
+
61
+ data_text = r.text
62
+ if pn == 1:
63
+ try:
64
+ begin_index_total = data_text.index('"total":')
65
+
66
+ end_index_total = data_text.index('"diff"')
67
+ global max_number
68
+ max_number = int(data_text[begin_index_total + 8:end_index_total - 1])
69
+ except Exception as e:
70
+ logger.error("获取第{}页股票列表异常:{}", pn, str(e))
71
+ return pd.DataFrame()
72
+
73
+ begin_index = data_text.index('[')
74
+ end_index = data_text.index(']')
75
+ data_json = data_text[begin_index:end_index + 1]
76
+ data_json = json.loads(data_json)
77
+ if data_json is None:
78
+ return pd.DataFrame()
79
+ else:
80
+ result_df = pd.DataFrame(data_json)
81
+ result_df['page_number'] = pn
82
+ return result_df
83
+ except Exception as e:
84
+ logger.error("获取第{}页股票列表异常:{}", pn, str(e))
85
+ return pd.DataFrame()
86
+
87
+
88
+ def repeated_acquisition_ask(per_page, max_number, time_out, max_workers=5):
89
+ total_pages = (max_number + per_page - 1) // per_page # 向上取整
90
+ result_df = pd.DataFrame()
91
+ df_lock = Lock() # 线程安全的DataFrame合并锁
92
+
93
+ def fetch_pages(page_nums):
94
+ """单个线程处理一组页面,复用代理IP直到失效"""
95
+ proxy_ip = proxy_common_api.generate_proxy_ip_api(1)
96
+ proxies = {"https": proxy_ip, "http": proxy_ip}
97
+ thread_results = [] # 线程内临时存储结果
98
+
99
+ for page_num in page_nums:
100
+ while True: # 重试循环(复用当前IP)
101
+ try:
102
+ page_df = get_stock_page_data_time_out(
103
+ page_num, proxies, per_page, time_out
104
+ )
105
+ if data_frame_util.is_not_empty(page_df):
106
+ logger.info("线程{} 页面{}获取成功(IP复用中)",
107
+ threading.get_ident(), page_num)
108
+ thread_results.append(page_df)
109
+ break # 成功后继续用当前IP处理下一页
110
+ else:
111
+ logger.warning("页面数据为空:{},重试中...", page_num)
112
+ # 数据为空,更换IP
113
+ proxy_ip = proxy_common_api.generate_proxy_ip_api(1)
114
+ proxies = {"https": proxy_ip, "http": proxy_ip}
115
+ time.sleep(0.2)
116
+ except BaseException as e:
117
+ logger.error("线程{} 页面{}获取异常[{}],更换IP重试",
118
+ threading.get_ident(), page_num, str(e))
119
+ # 发生异常,更换IP
120
+ proxy_ip = proxy_common_api.generate_proxy_ip_api(1)
121
+ proxies = {"https": proxy_ip, "http": proxy_ip}
122
+ time.sleep(1)
123
+ return thread_results
124
+
125
+ # 页面分配:平均分配给每个线程
126
+ def split_pages(total, workers):
127
+ pages = list(range(1, total + 1))
128
+ avg = total // workers
129
+ remainder = total % workers
130
+ split = []
131
+ start = 0
132
+ for i in range(workers):
133
+ end = start + avg + (1 if i < remainder else 0)
134
+ split.append(pages[start:end])
135
+ start = end
136
+ return split
137
+
138
+ # 分配页面组
139
+ page_groups = split_pages(total_pages, max_workers)
140
+
141
+ # 多线程执行
142
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
143
+ futures = [executor.submit(fetch_pages, group) for group in page_groups]
144
+
145
+ # 合并结果
146
+ for future in as_completed(futures):
147
+ try:
148
+ thread_dfs = future.result()
149
+ if thread_dfs:
150
+ with df_lock:
151
+ result_df = pd.concat([result_df] + thread_dfs, ignore_index=True)
152
+ except Exception as e:
153
+ logger.error("线程结果处理失败:{}", str(e))
154
+
155
+ return result_df
156
+
157
+
158
+ def repeated_acquisition_ask_sync(time_out):
159
+ per_page = PAGE_SIZE
160
+ total_pages = (max_number + per_page - 1) // per_page # 向上取整
161
+ result_df = pd.DataFrame()
162
+ now_page = 1
163
+ proxy_ip = proxy_common_api.generate_proxy_ip_api(1)
164
+ while now_page <= total_pages:
165
+ proxies = {"https": proxy_ip,
166
+ "http": proxy_ip}
167
+ try:
168
+ page_df = get_stock_page_data_time_out(now_page, proxies, PAGE_SIZE, time_out)
169
+ if data_frame_util.is_not_empty(page_df):
170
+ result_df = pd.concat([page_df, result_df])
171
+ logger.info("获取页面数据成功:{}", now_page)
172
+ now_page = now_page + 1
173
+ else:
174
+ time.sleep(0.2)
175
+ proxy_ip = proxy_common_api.generate_proxy_ip_api(1)
176
+ logger.info("获取页面数据失败:{}", now_page)
177
+ except BaseException as e:
178
+ time.sleep(1)
179
+ proxy_ip = proxy_common_api.generate_proxy_ip_api(1)
180
+ # 示例调用
181
+ return result_df
182
+
183
+
184
+ def repeated_acquisition_ask_async(time_out, max_number, num_threads, pages_per_thread):
185
+ per_page = PAGE_SIZE
186
+ total_pages = (max_number + per_page - 1) // per_page # 向上取整
187
+ result_df = pd.DataFrame()
188
+
189
+ # 创建线程锁以确保线程安全
190
+ df_lock = Lock()
191
+
192
+ # 计算每个线程处理的页数范围
193
+ def process_page_range(start_page, end_page, thread_id):
194
+ nonlocal result_df
195
+ local_df = pd.DataFrame()
196
+ current_page = start_page
197
+ proxy_ip = proxy_common_api.generate_proxy_ip_api(1)
198
+
199
+ while current_page <= end_page and current_page <= total_pages:
200
+ proxies = {"https": proxy_ip, "http": proxy_ip}
201
+ try:
202
+ page_df = get_stock_page_data_time_out(current_page, proxies, PAGE_SIZE, time_out)
203
+ if data_frame_util.is_not_empty(page_df):
204
+ local_df = pd.concat([local_df, page_df])
205
+ logger.info("线程{}获取页面数据成功: {}", thread_id, current_page)
206
+ current_page += 1
207
+ else:
208
+ time.sleep(0.2)
209
+ proxy_ip = proxy_common_api.generate_proxy_ip_api(1)
210
+ logger.info("线程{}获取页面数据失败: {}", thread_id, current_page)
211
+ except BaseException as e:
212
+ time.sleep(1)
213
+ proxy_ip = proxy_common_api.generate_proxy_ip_api(1)
214
+ logger.error("线程{}处理页面{}时发生错误: {}", thread_id, current_page, e)
215
+
216
+ with df_lock:
217
+ result_df = pd.concat([result_df, local_df])
218
+ return len(local_df)
219
+
220
+ # 计算每个线程的页面范围
221
+ page_ranges = []
222
+ for i in range(num_threads):
223
+ start_page = i * pages_per_thread + 1
224
+ end_page = (i + 1) * pages_per_thread
225
+ if start_page > total_pages:
226
+ break
227
+ page_ranges.append((start_page, end_page, i + 1))
228
+
229
+ # 使用线程池执行任务
230
+ with concurrent.futures.ThreadPoolExecutor(max_workers=num_threads) as executor:
231
+ # 提交所有任务
232
+ futures = [
233
+ executor.submit(process_page_range, start, end, tid)
234
+ for start, end, tid in page_ranges
235
+ ]
236
+
237
+ # 等待所有任务完成并获取结果
238
+ results = []
239
+ for future in concurrent.futures.as_completed(futures):
240
+ try:
241
+ result = future.result()
242
+ results.append(result)
243
+ except Exception as e:
244
+ logger.error("线程执行出错: {}", e)
245
+
246
+ return rename_real_time_quotes_df(result_df)
247
+
248
+
249
+ def rename_real_time_quotes_df(temp_df):
250
+ temp_df = temp_df.rename(columns={
251
+ "f2": "now_price",
252
+ "f3": "chg",
253
+ "f5": "volume",
254
+ "f6": "amount",
255
+ "f8": "exchange",
256
+ "f10": "quantity_ratio",
257
+ "f22": "up_speed",
258
+ "f11": "up_speed_05",
259
+ "f12": "symbol",
260
+ "f14": "name",
261
+ "f15": "high",
262
+ "f16": "low",
263
+ "f17": "open",
264
+ "f18": "yesterday_price",
265
+ "f20": "total_mv",
266
+ "f21": "flow_mv",
267
+ "f26": "list_date",
268
+ "f33": "wei_bi",
269
+ "f34": "outer_disk",
270
+ "f35": "inner_disk",
271
+ "f62": "today_main_net_inflow",
272
+ "f66": "super_large_order_net_inflow",
273
+ "f69": "super_large_order_net_inflow_ratio",
274
+ "f72": "large_order_net_inflow",
275
+ # "f78": "medium_order_net_inflow",
276
+ # "f84": "small_order_net_inflow",
277
+ "f100": "industry",
278
+ # "f103": "concept",
279
+ "f184": "today_main_net_inflow_ratio",
280
+ "f352": "average_price",
281
+ "f211": "buy_1_num",
282
+ "f212": "sell_1_num"
283
+ })
284
+ if data_frame_util.is_empty(temp_df):
285
+ return pd.DataFrame()
286
+ else:
287
+ temp_df.loc[temp_df['buy_1_num'] == '-', 'buy_1_num'] = 0
288
+ temp_df.loc[temp_df['sell_1_num'] == '-', 'sell_1_num'] = 0
289
+ temp_df.loc[temp_df['up_speed_05'] == '-', 'up_speed_05'] = 0
290
+ temp_df.loc[temp_df['up_speed'] == '-', 'up_speed'] = 0
291
+ temp_df.loc[temp_df['average_price'] == '-', 'average_price'] = 0
292
+ temp_df.loc[temp_df['wei_bi'] == '-', 'wei_bi'] = 0
293
+ temp_df.loc[temp_df['yesterday_price'] == '-', 'yesterday_price'] = 0
294
+ temp_df.loc[temp_df['now_price'] == '-', 'now_price'] = 0
295
+ temp_df.loc[temp_df['chg'] == '-', 'chg'] = 0
296
+ temp_df.loc[temp_df['volume'] == '-', 'volume'] = 0
297
+ temp_df.loc[temp_df['amount'] == '-', 'amount'] = 0
298
+ temp_df.loc[temp_df['exchange'] == '-', 'exchange'] = 0
299
+ temp_df.loc[temp_df['quantity_ratio'] == '-', 'quantity_ratio'] = 0
300
+ temp_df.loc[temp_df['high'] == '-', 'high'] = 0
301
+ temp_df.loc[temp_df['low'] == '-', 'low'] = 0
302
+ temp_df.loc[temp_df['open'] == '-', 'open'] = 0
303
+ temp_df.loc[temp_df['total_mv'] == '-', 'total_mv'] = 0
304
+ temp_df.loc[temp_df['flow_mv'] == '-', 'flow_mv'] = 0
305
+ temp_df.loc[temp_df['inner_disk'] == '-', 'inner_disk'] = 0
306
+ temp_df.loc[temp_df['outer_disk'] == '-', 'outer_disk'] = 0
307
+ temp_df.loc[temp_df['today_main_net_inflow_ratio'] == '-', 'today_main_net_inflow_ratio'] = 0
308
+ temp_df.loc[temp_df['today_main_net_inflow'] == '-', 'today_main_net_inflow'] = 0
309
+ temp_df.loc[temp_df['super_large_order_net_inflow'] == '-', 'super_large_order_net_inflow'] = 0
310
+ temp_df.loc[temp_df['super_large_order_net_inflow_ratio'] == '-', 'super_large_order_net_inflow_ratio'] = 0
311
+ temp_df.loc[temp_df['large_order_net_inflow'] == '-', 'large_order_net_inflow'] = 0
312
+ # temp_df.loc[temp_df['medium_order_net_inflow'] == '-', 'medium_order_net_inflow'] = 0
313
+ # temp_df.loc[temp_df['small_order_net_inflow'] == '-', 'small_order_net_inflow'] = 0
314
+
315
+ temp_df["list_date"] = pd.to_numeric(temp_df["list_date"], errors="coerce")
316
+ temp_df["wei_bi"] = pd.to_numeric(temp_df["wei_bi"], errors="coerce")
317
+ temp_df["average_price"] = pd.to_numeric(temp_df["average_price"], errors="coerce")
318
+ temp_df["yesterday_price"] = pd.to_numeric(temp_df["yesterday_price"], errors="coerce")
319
+ temp_df["now_price"] = pd.to_numeric(temp_df["now_price"], errors="coerce")
320
+ temp_df["chg"] = pd.to_numeric(temp_df["chg"], errors="coerce")
321
+ temp_df["volume"] = pd.to_numeric(temp_df["volume"], errors="coerce")
322
+ temp_df["amount"] = pd.to_numeric(temp_df["amount"], errors="coerce")
323
+ temp_df["exchange"] = pd.to_numeric(temp_df["exchange"], errors="coerce")
324
+ temp_df["quantity_ratio"] = pd.to_numeric(temp_df["quantity_ratio"], errors="coerce")
325
+ temp_df["high"] = pd.to_numeric(temp_df["high"], errors="coerce")
326
+ temp_df["low"] = pd.to_numeric(temp_df["low"], errors="coerce")
327
+ temp_df["open"] = pd.to_numeric(temp_df["open"], errors="coerce")
328
+ temp_df["total_mv"] = pd.to_numeric(temp_df["total_mv"], errors="coerce")
329
+ temp_df["flow_mv"] = pd.to_numeric(temp_df["flow_mv"], errors="coerce")
330
+ temp_df["outer_disk"] = pd.to_numeric(temp_df["outer_disk"], errors="coerce")
331
+ temp_df["inner_disk"] = pd.to_numeric(temp_df["inner_disk"], errors="coerce")
332
+ temp_df["today_main_net_inflow"] = pd.to_numeric(temp_df["today_main_net_inflow"], errors="coerce")
333
+ temp_df["super_large_order_net_inflow"] = pd.to_numeric(temp_df["super_large_order_net_inflow"],
334
+ errors="coerce")
335
+ temp_df["super_large_order_net_inflow_ratio"] = pd.to_numeric(temp_df["super_large_order_net_inflow_ratio"],
336
+ errors="coerce")
337
+ temp_df["large_order_net_inflow"] = pd.to_numeric(temp_df["large_order_net_inflow"],
338
+ errors="coerce")
339
+ # temp_df["medium_order_net_inflow"] = pd.to_numeric(temp_df["medium_order_net_inflow"],
340
+ # errors="coerce")
341
+ # temp_df["small_order_net_inflow"] = pd.to_numeric(temp_df["small_order_net_inflow"], errors="coerce")
342
+
343
+ # 大单比例
344
+ temp_df['large_order_net_inflow_ratio'] = round((temp_df['large_order_net_inflow'] / temp_df['amount']) * 100,
345
+ 2)
346
+
347
+ # 外盘是内盘倍数
348
+ temp_df['disk_ratio'] = round((temp_df['outer_disk'] - temp_df['inner_disk']) / temp_df['inner_disk'], 2)
349
+ # 只有外盘没有内盘
350
+ temp_df.loc[temp_df["inner_disk"] == 0, ['disk_ratio']] = 1688
351
+ temp_df = temp_df.sort_values(by=['chg'], ascending=False)
352
+ return temp_df
353
+
354
+
355
+ if __name__ == '__main__':
356
+
357
+ while True:
358
+ # proxy_ip = proxy_common_api.generate_proxy_ip_api(1)
359
+ # proxies = {"https": proxy_ip,
360
+ # "http": proxy_ip}
361
+ time_out = 10 # Set the timeout value
362
+ result = repeated_acquisition_ask_async(time_out, 5800, 6, 10)
363
+ print(result)
@@ -10,6 +10,13 @@ import requests
10
10
  import time
11
11
  import hashlib
12
12
  import json
13
+ from mns_common.db.MongodbUtil import MongodbUtil
14
+ from functools import lru_cache
15
+ import mns_common.constant.db_name_constant as db_name_constant
16
+
17
+ mongodb_util = MongodbUtil('27017')
18
+
19
+ import random
13
20
 
14
21
  # 提取订单
15
22
  """
@@ -26,8 +33,49 @@ import json
26
33
  """
27
34
 
28
35
 
36
+ @lru_cache(maxsize=None)
37
+ def query_province_and_city_info():
38
+ return mongodb_util.find_all_data(db_name_constant.IP_PROXY_CITY_PROVINCE)
39
+
40
+
29
41
  def get_proxy_api(order_id, secret, unbind_time):
42
+ province_and_city_info_df = query_province_and_city_info()
43
+ random_row = province_and_city_info_df.sample(n=1)
44
+ cid = str(list(random_row['cid'])[0])
45
+ pid = str(list(random_row['pid'])[0])
46
+
30
47
  num = "1"
48
+
49
+ noDuplicate = "1"
50
+ lineSeparator = "0"
51
+ singleIp = "0"
52
+ time_str = str(int(time.time())) # 时间戳
53
+
54
+ # 计算sign
55
+ txt = "orderId=" + order_id + "&" + "secret=" + secret + "&" + "time=" + time_str
56
+ sign = hashlib.md5(txt.encode()).hexdigest()
57
+ # 访问URL获取IP
58
+ url = (
59
+ "http://api.hailiangip.com:8422/api/getIp?type=1" + "&num=" + num + "&pid=" + pid
60
+ + "&unbindTime=" + unbind_time + "&cid=" + cid
61
+ + "&orderId=" + order_id + "&time=" + time_str + "&sign=" + sign + "&dataType=0"
62
+ + "&lineSeparator=" + lineSeparator + "&noDuplicate=" + noDuplicate + "&singleIp=" + singleIp)
63
+ my_response = requests.get(url).content
64
+ js_res = json.loads(my_response)
65
+ for dic in js_res["data"]:
66
+ try:
67
+ ip = dic["ip"]
68
+ port = dic["port"]
69
+ ip_port = ip + ":" + str(port)
70
+ return ip_port
71
+ except BaseException as e:
72
+ logger.error("获取ip地址异常:{}", e)
73
+ return None
74
+
75
+
76
+ # 线程池
77
+ def get_proxy_pool_api(order_id, secret, unbind_time, ip_num):
78
+ num = str(ip_num)
31
79
  pid = "-1"
32
80
  cid = ""
33
81
  noDuplicate = "1"
@@ -46,20 +94,22 @@ def get_proxy_api(order_id, secret, unbind_time):
46
94
  + "&lineSeparator=" + lineSeparator + "&noDuplicate=" + noDuplicate + "&singleIp=" + singleIp)
47
95
  my_response = requests.get(url).content
48
96
  js_res = json.loads(my_response)
97
+ ip_pool_list = []
49
98
  for dic in js_res["data"]:
50
99
  try:
51
100
  ip = dic["ip"]
52
101
  port = dic["port"]
53
102
  ip_port = ip + ":" + str(port)
54
- return ip_port
103
+ ip_pool_list.append(ip_port)
55
104
  except BaseException as e:
56
105
  logger.error("获取ip地址异常:{}", e)
57
106
  return None
107
+ return ip_pool_list
58
108
 
59
109
 
60
110
  if __name__ == '__main__':
61
- order_id = ''
62
- secret = ''
63
- unbind_time = str(60 * 10)
64
- ip = get_proxy_api(order_id, secret, unbind_time)
111
+ order_id_test = ''
112
+ secret_test = ''
113
+ unbind_time_test = str(60 * 10)
114
+ ip = get_proxy_api(order_id_test, secret_test, unbind_time_test)
65
115
  print(ip)
@@ -12,7 +12,6 @@ import mns_common.utils.data_frame_util as data_frame_util
12
12
  from mns_common.db.MongodbUtil import MongodbUtil
13
13
  import mns_common.constant.db_name_constant as db_name_constant
14
14
  import datetime
15
- import requests
16
15
  import time
17
16
  from loguru import logger
18
17
  from functools import lru_cache
@@ -21,23 +20,25 @@ import threading
21
20
 
22
21
  mongodb_util = MongodbUtil('27017')
23
22
 
23
+ IP_POOL = 'ip_pool'
24
+ ONE_IP = 'one_ip'
25
+ query_one = {'ip_type': ONE_IP}
26
+ query_pool = {'ip_type': IP_POOL}
27
+
24
28
 
25
29
  def query_liu_guan_proxy_ip():
26
- ip_proxy_pool = mongodb_util.find_all_data(db_name_constant.IP_PROXY_POOL)
30
+ ip_proxy_pool = mongodb_util.find_query_data(db_name_constant.IP_PROXY_POOL, query_one)
27
31
  return ip_proxy_pool
28
32
 
29
33
 
30
34
  def remove_proxy_ip():
31
- mongodb_util.remove_data({}, db_name_constant.IP_PROXY_POOL)
35
+ mongodb_util.remove_data(query_one, db_name_constant.IP_PROXY_POOL)
32
36
 
33
37
 
34
38
  def check_valid(ip_proxy_pool):
35
39
  effect_time = list(ip_proxy_pool['effect_time'])[0]
36
-
37
40
  now_date = datetime.datetime.now()
38
-
39
41
  str_now_date = now_date.strftime('%Y-%m-%d %H:%M:%S')
40
-
41
42
  if effect_time > str_now_date:
42
43
  return True
43
44
  else:
@@ -47,17 +48,24 @@ def check_valid(ip_proxy_pool):
47
48
 
48
49
  @lru_cache(maxsize=None)
49
50
  def get_account_cache():
50
- query = {"type": "liu_guan_proxy", }
51
+ query = {"type": "liu_guan_proxy"}
51
52
  return mongodb_util.find_query_data(db_name_constant.STOCK_ACCOUNT_INFO, query)
52
53
 
53
54
 
54
55
  def generate_proxy_ip_api(minutes):
55
- stock_account_info = get_account_cache()
56
- order_id = list(stock_account_info['password'])[0]
57
- secret = list(stock_account_info['account'])[0]
58
- # 获取10分钟动态ip
59
- ip = liu_guan_proxy_api.get_proxy_api(order_id, secret, str(60 * minutes))
60
- return ip
56
+ try_numer = 3
57
+ while try_numer > 0:
58
+ try:
59
+ stock_account_info = get_account_cache()
60
+ order_id = list(stock_account_info['password'])[0]
61
+ secret = list(stock_account_info['account'])[0]
62
+ # 获取10分钟动态ip
63
+ ip = liu_guan_proxy_api.get_proxy_api(order_id, secret, str(60 * minutes))
64
+ try_numer = try_numer - 1
65
+ return ip
66
+ except BaseException as e:
67
+ time.sleep(1)
68
+ continue
61
69
 
62
70
 
63
71
  def generate_proxy_ip(minutes):
@@ -77,6 +85,7 @@ def generate_proxy_ip(minutes):
77
85
  ip = generate_proxy_ip_api(minutes)
78
86
  if check_proxy(ip):
79
87
  result_dict = {"_id": ip,
88
+ 'ip_type': ONE_IP,
80
89
  'effect_time': str_now_date,
81
90
  'ip': ip}
82
91
  result_df = pd.DataFrame(result_dict, index=[1])
@@ -113,6 +122,73 @@ def check_proxy(proxy_ip):
113
122
  return False
114
123
 
115
124
 
125
+ # 查询ip池子
126
+ def query_liu_guan_proxy_ip_pool():
127
+ ip_proxy_pool = mongodb_util.find_query_data(db_name_constant.IP_PROXY_POOL, query_pool)
128
+ return ip_proxy_pool
129
+
130
+
131
+ def remove_proxy_ip_pool():
132
+ mongodb_util.remove_data(query_pool, db_name_constant.IP_PROXY_POOL)
133
+
134
+
135
+ def generate_proxy_ip_pool_api(minutes, ip_num):
136
+ stock_account_info = get_account_cache()
137
+ order_id = list(stock_account_info['password'])[0]
138
+ secret = list(stock_account_info['account'])[0]
139
+ # 获取10分钟动态ip
140
+ ip_pool = liu_guan_proxy_api.get_proxy_pool_api(order_id, secret, str(60 * minutes), ip_num)
141
+ return ip_pool
142
+
143
+
144
+ def get_proxy_ip_pool(minutes, seconds, ip_num):
145
+ ip_proxy_pool = query_liu_guan_proxy_ip_pool()
146
+ if data_frame_util.is_empty(ip_proxy_pool):
147
+ return generate_proxy_ip_pool(minutes, seconds, ip_num)
148
+ else:
149
+ if check_valid(ip_proxy_pool):
150
+ ip_pool = list(ip_proxy_pool['ip_pool'])[0]
151
+ effect_time = list(ip_proxy_pool['effect_time'])[0]
152
+ result = {'ip_pool': ip_pool,
153
+ 'effect_time': effect_time}
154
+ return result
155
+ else:
156
+ # 已经失效 移除ip pool
157
+ remove_proxy_ip_pool()
158
+ # 重新生成
159
+ return generate_proxy_ip_pool(minutes, seconds, ip_num)
160
+
161
+
162
+ # seconds 有效秒数,minutes 需要减1
163
+ def generate_proxy_ip_pool(minutes, seconds, ip_num):
164
+ ip_proxy_pool = query_liu_guan_proxy_ip_pool()
165
+ if data_frame_util.is_not_empty(ip_proxy_pool):
166
+ ip_pool = list(ip_proxy_pool['ip_pool'])[0]
167
+ effect_time = list(ip_proxy_pool['effect_time'])[0]
168
+
169
+
170
+ else:
171
+ remove_proxy_ip_pool()
172
+ now_date = datetime.datetime.now()
173
+ # 加上分钟 少10秒
174
+ time_to_add = datetime.timedelta(minutes=minutes - 1, seconds=seconds)
175
+ new_date = now_date + time_to_add
176
+ effect_time = new_date.strftime('%Y-%m-%d %H:%M:%S')
177
+ ip_pool = generate_proxy_ip_pool_api(minutes, ip_num)
178
+ result_dict = {
179
+ "_id": [IP_POOL],
180
+ 'ip_type': [IP_POOL],
181
+ 'effect_time': [effect_time],
182
+ 'ip_pool': [ip_pool] # 每个字段都包装成列表
183
+ }
184
+ result_df = pd.DataFrame(result_dict)
185
+
186
+ mongodb_util.insert_mongo(result_df, db_name_constant.IP_PROXY_POOL)
187
+ result = {'ip_pool': ip_pool,
188
+ 'effect_time': effect_time}
189
+ return result
190
+
191
+
116
192
  def get_em_real_time_data(proxy_ip):
117
193
  proxies = {
118
194
  "http": proxy_ip,
@@ -152,5 +228,21 @@ def call_with_timeout(func, *args, timeout=2, **kwargs):
152
228
  return result
153
229
 
154
230
 
231
+ @lru_cache(maxsize=None)
232
+ def query_province_and_city_info():
233
+ return mongodb_util.find_all_data(db_name_constant.IP_PROXY_CITY_PROVINCE)
234
+
235
+
236
+ def import_province_and_city():
237
+ # 设置文件夹路径
238
+ folder_path = r'E:\province-and-city.xlsx'
239
+ df = pd.read_excel(folder_path)
240
+ df['_id'] = df['cid']
241
+
242
+ mongodb_util.save_mongo(df, db_name_constant.IP_PROXY_CITY_PROVINCE)
243
+ return df
244
+
245
+
155
246
  if __name__ == "__main__":
156
- generate_proxy_ip(1)
247
+ import_province_and_city()
248
+ # get_proxy_ip_pool(1, 50, 2)
@@ -24,6 +24,9 @@ EM_US_STOCK_INFO = 'em_us_stock_info'
24
24
  # ip代理池
25
25
  IP_PROXY_POOL = 'ip_proxy_pool'
26
26
 
27
+ # ip代理城市信息
28
+ IP_PROXY_CITY_PROVINCE = 'ip_proxy_city_province'
29
+
27
30
  # 大单同步表
28
31
  BIG_DEAL_NAME = "ths_big_deal_fund"
29
32
  # 大单选择表
@@ -151,7 +154,6 @@ COMPANY_HOLDING_INFO = 'company_holding_info'
151
154
  # 公司业务组成
152
155
  COMPANY_BUSINESS_INFO = 'company_business_info'
153
156
 
154
-
155
157
  # 公司公告信息
156
158
  COMPANY_ANNOUNCE_INFO = 'company_announce_info'
157
159
 
@@ -1,4 +1,4 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mns-common
3
- Version: 1.5.1.5
3
+ Version: 1.5.1.7
4
4
 
@@ -1,6 +1,6 @@
1
1
  mns_common/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
2
2
  mns_common/api/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
3
- mns_common/api/akshare/__init__.py,sha256=xu36nA6MJTauswUWPfKIKH0E-lpOAHTw2TL5QI_6TeY,165
3
+ mns_common/api/akshare/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
4
4
  mns_common/api/akshare/k_line_api.py,sha256=IfqJDG2e-_Ry1f_MqjIff6jad9IIC3TjnpmaJ9H_pbk,4290
5
5
  mns_common/api/akshare/stock_bid_ask_api.py,sha256=Af9t6Pv_-p7PJJ_7rF_JVaGBomkvePMMqALwuBh2Gfw,4139
6
6
  mns_common/api/akshare/stock_dt_pool.py,sha256=sKedOTzqsBZprJHJEr2sRYa8xbeSK7tRenqBE3wOdUc,2245
@@ -15,10 +15,11 @@ mns_common/api/em/gd/east_money_stock_gdfx_free_top_10_api.py,sha256=I2-JjFjTjvO
15
15
  mns_common/api/em/real_time/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
16
16
  mns_common/api/em/real_time/east_money_debt_api.py,sha256=jMvMZtlrDfExl_4jZ1hepHX8rUoeVLoLSOIhRBjkUGk,14753
17
17
  mns_common/api/em/real_time/east_money_etf_api.py,sha256=tCyH4fNx-KfVRFuNGkgM8d_xkvR0oAfr8T3e7_XrjTM,14414
18
- mns_common/api/em/real_time/east_money_stock_a_api.py,sha256=6xYcNJyMhFFL2eNxQ9c0TbQ53wgumNovVHisXgQX7bs,12737
18
+ mns_common/api/em/real_time/east_money_stock_a_api.py,sha256=-FtOGAsR4HtdqkKrm2JE65yTsUnvxAq5ACTp-VvSaSQ,12654
19
19
  mns_common/api/em/real_time/east_money_stock_a_v2_api.py,sha256=mL4uuL6sVsC2Vnl09826AUnzxePGAUhlZ7I5BBFw8Ks,14530
20
20
  mns_common/api/em/real_time/east_money_stock_hk_api.py,sha256=KFIYUZ3N4ULrataeCIXwZPo775O7joKgMF466uwVDdY,15154
21
21
  mns_common/api/em/real_time/east_money_stock_us_api.py,sha256=RiTrdZDuDgTOtiMSD1Ba9aQAx4vghM66pEp_LicH3Ps,11632
22
+ mns_common/api/em/real_time/real_time_quotes_repeat_api.py,sha256=kihvxJCUqNr5rQR7CkK8ECWqYd65lcRfvvZoB_HYKH8,16090
22
23
  mns_common/api/hk/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
23
24
  mns_common/api/hk/ths_hk_company_info_api.py,sha256=Cxlbuccopa0G1s8o0uTnnyLn2QaxOvbDpJQJOj7J8a8,5360
24
25
  mns_common/api/k_line/__init__.py,sha256=itoGlqKhsx7EVXQoD1vchDKQ5GPB16vDjofTSuQtrXg,161
@@ -43,7 +44,7 @@ mns_common/api/kpl/symbol/symbol_his_quotes_api.py,sha256=5F9L8V2UI_YUYe2dO6FbVK
43
44
  mns_common/api/msg/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
44
45
  mns_common/api/msg/push_msg_api.py,sha256=z8jDqFWygfxnCFFfQp4K-llgg27nRLv7Mx72lOddBH0,1390
45
46
  mns_common/api/proxies/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
46
- mns_common/api/proxies/liu_guan_proxy_api.py,sha256=VASqcWnKyAEmF4UYNY39Jazh45qugVMYblRrdKfQFuE,1923
47
+ mns_common/api/proxies/liu_guan_proxy_api.py,sha256=lULS2ejxmVuM6t6PHBczvH-HjMJxiCYEDrCUAtci-t4,3730
47
48
  mns_common/api/qmt/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
48
49
  mns_common/api/qmt/qmt_minunte_tick_data.py,sha256=uwSw_AkA9RaD3pXPKzxqi4TKEkpglmFUwtYl9r5E6G8,3019
49
50
  mns_common/api/ths/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
@@ -116,7 +117,7 @@ mns_common/component/k_line/patterns/pattern_Enum.py,sha256=bl8cH1H3BWdj_deVO124
116
117
  mns_common/component/price/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
117
118
  mns_common/component/price/trade_price_service_api.py,sha256=0loBjbOt__o-ngc2Q4n5lF8_0x2WINRpL-cH1341Uaw,4396
118
119
  mns_common/component/proxies/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
119
- mns_common/component/proxies/proxy_common_api.py,sha256=N2VaiEQ15KycjSNmCHAguR1xiASZzJu1y24NhFCr7BA,4663
120
+ mns_common/component/proxies/proxy_common_api.py,sha256=knTYLnVhBg1UIXVrqzyFhb7BH9UKhQlyOzY8BmKRwAY,7984
120
121
  mns_common/component/qmt/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
121
122
  mns_common/component/qmt/qmt_buy_service.py,sha256=tLTgrSxCcxuMhADRBBrW4ZWR_3MdbMZvvMdH5hbwyJU,7190
122
123
  mns_common/component/real_time/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
@@ -138,7 +139,7 @@ mns_common/component/zt/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3Hi
138
139
  mns_common/component/zt/zt_common_service_api.py,sha256=6pHRLLJjKcLLBA-xXkAU8SE6DZ5dgVFBRVjJmhkL0II,11945
139
140
  mns_common/constant/__init__.py,sha256=xu36nA6MJTauswUWPfKIKH0E-lpOAHTw2TL5QI_6TeY,165
140
141
  mns_common/constant/black_list_classify_enum.py,sha256=I8U_DcltzYvlWjgn-TFLImgVgPuO0lxMnEJAQJBljdo,3995
141
- mns_common/constant/db_name_constant.py,sha256=RkSnmESDHf2yNrrwoa7Ox7SVVa2roq_RDwn1Lx0kP_k,4958
142
+ mns_common/constant/db_name_constant.py,sha256=lDb4WD7ZsoMofLuzZU2R-B-6pjU185_9pBKeeQH_-78,5033
142
143
  mns_common/constant/east_money_stock_api.py,sha256=mW0b8sEgkf8WJtars2frOQYzsWgjIl4FDYEwcCcCSZY,7557
143
144
  mns_common/constant/extra_income_db_name.py,sha256=aXPuJSEgX7F3zpf4zal6wxejkxPbVmou_LMVlfms1SY,2701
144
145
  mns_common/constant/price_enum.py,sha256=nhcPxk0AFdQAp8IsNr5EP9xURLqqJuSl6ljIzTp7Wyo,1093
@@ -158,7 +159,7 @@ mns_common/utils/date_handle_util.py,sha256=XS-MyA8_7k35LOCFAYOHgVcVkMft_Kc4Wa9U
158
159
  mns_common/utils/db_util.py,sha256=hSmfNAN4vEeEaUva6_cicZEhb2jSnib-Gvk2reke1vc,2590
159
160
  mns_common/utils/file_util.py,sha256=egWu6PenGPRp_ixrNTHKarT4dAnOT6FETR82EHUZJnQ,1042
160
161
  mns_common/utils/ip_util.py,sha256=UTcYfz_uytB__6nlBf7T-izuI7hi4XdB6ET0sJgEel4,969
161
- mns_common-1.5.1.5.dist-info/METADATA,sha256=YLPOLO_99ZymHoEEaKi2LjFOg3DPzburAquYZHCVRLs,61
162
- mns_common-1.5.1.5.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
163
- mns_common-1.5.1.5.dist-info/top_level.txt,sha256=ZC58kAR-8Hvc6U2xhYNBNLAh3mb6sZazbdj5nZpvEkQ,11
164
- mns_common-1.5.1.5.dist-info/RECORD,,
162
+ mns_common-1.5.1.7.dist-info/METADATA,sha256=u0T_v08wnex8MtMRI8qfYjKUI8JWaZt7cgl3PzSP0fk,61
163
+ mns_common-1.5.1.7.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
164
+ mns_common-1.5.1.7.dist-info/top_level.txt,sha256=ZC58kAR-8Hvc6U2xhYNBNLAh3mb6sZazbdj5nZpvEkQ,11
165
+ mns_common-1.5.1.7.dist-info/RECORD,,