mns-common 1.3.2.2__py3-none-any.whl → 1.3.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mns-common might be problematic. Click here for more details.

@@ -33,7 +33,7 @@ def fund_etf_spot_em() -> pd.DataFrame:
33
33
  "fields": (
34
34
  "f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,"
35
35
  "f12,f13,f14,f15,f16,f17,f18,f20,f21,"
36
- "f23,f24,f25,f22,f11,f30,f31,f32,f33,"
36
+ "f23,f24,f25,f26,f22,f11,f30,f31,f32,f33,"
37
37
  "f34,f35,f38,f62,f63,f64,f65,f66,f69,"
38
38
  "f72,f75,f78,f81,f84,f87,f115,f124,f128,"
39
39
  "f136,f152,f184,f297,f402,f441"
@@ -45,6 +45,7 @@ def fund_etf_spot_em() -> pd.DataFrame:
45
45
  temp_df = pd.DataFrame(data_json["data"]["diff"])
46
46
  temp_df.rename(
47
47
  columns={
48
+ "f26": "上市时间",
48
49
  "f12": "代码",
49
50
  "f14": "名称",
50
51
  "f2": "最新价",
@@ -125,7 +126,8 @@ def fund_etf_spot_em() -> pd.DataFrame:
125
126
  "总市值",
126
127
  "数据日期",
127
128
  "更新时间",
128
- "market"
129
+ "market",
130
+ "上市时间"
129
131
  ]
130
132
  ]
131
133
  temp_df["最新价"] = pd.to_numeric(temp_df["最新价"], errors="coerce")
@@ -196,6 +198,7 @@ def fund_etf_spot_em() -> pd.DataFrame:
196
198
  def get_etf_real_time_quotes():
197
199
  fund_etf_spot_em_df = fund_etf_spot_em()
198
200
  fund_etf_spot_em_df = fund_etf_spot_em_df.rename(columns={
201
+ "上市时间": "list_date",
199
202
  "最新价": "now_price",
200
203
  "涨跌幅": "chg",
201
204
  "基金折价率": "fund_discount_rate",
@@ -262,7 +265,8 @@ def get_etf_real_time_quotes():
262
265
  "latest_share",
263
266
  "data_time",
264
267
  "update_time",
265
- "market"
268
+ "market",
269
+ 'list_date'
266
270
  ]]
267
271
 
268
272
  fund_etf_spot_em_df['disk_ratio'] = round(
@@ -0,0 +1,11 @@
1
+ import sys
2
+ import os
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 16
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
8
+ import akshare as ak
9
+
10
+ stock_zh_a_hist_df = ak.stock_zh_a_hist(symbol="000001", period="daily", start_date="20170301", end_date='20240528', adjust="")
11
+ print(stock_zh_a_hist_df)
@@ -0,0 +1,106 @@
1
+ import sys
2
+ import os
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 16
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
8
+ import pandas as pd
9
+ import requests
10
+
11
+
12
+ def stock_k_line_hist(
13
+ symbol: str = "000001",
14
+ period: str = "daily",
15
+ start_date: str = "19700101",
16
+ end_date: str = "20500101",
17
+ adjust: str = "",
18
+ timeout: float = None,
19
+ ) -> pd.DataFrame:
20
+ """
21
+ 东方财富网-行情首页-沪深京 A 股-每日行情
22
+ https://quote.eastmoney.com/concept/sh603777.html?from=classic
23
+ :param symbol: 股票代码
24
+ :type symbol: str
25
+ :param period: choice of {'daily', 'weekly', 'monthly'}
26
+ :type period: str
27
+ :param start_date: 开始date
28
+ :type start_date: str
29
+ :param end_date: 结束date
30
+ :type end_date: str
31
+ :param adjust: choice of {"qfq": "前复权", "hfq": "后复权", "": "不复权"}
32
+ :type adjust: str
33
+ :param timeout: choice of None or a positive float number
34
+ :type timeout: float
35
+ :return: 每日行情
36
+ :rtype: pandas.DataFrame
37
+ """
38
+ adjust_dict = {"qfq": "1", "hfq": "2", "": "0"}
39
+ period_dict = {"daily": "101", "weekly": "102", "monthly": "103"}
40
+ url = "https://push2his.eastmoney.com/api/qt/stock/kline/get"
41
+ params = {
42
+ "fields1": "f1,f2,f3,f4,f5,f6",
43
+ "fields2": "f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f116",
44
+ "ut": "7eea3edcaed734bea9cbfc24409ed989",
45
+ "klt": period_dict[period],
46
+ "fqt": adjust_dict[adjust],
47
+ "secid": symbol,
48
+ "beg": start_date,
49
+ "end": end_date,
50
+ "_": "1623766962675",
51
+ }
52
+ r = requests.get(url, params=params, timeout=timeout)
53
+ data_json = r.json()
54
+ if not (data_json["data"] and data_json["data"]["klines"]):
55
+ return pd.DataFrame()
56
+ temp_df = pd.DataFrame([item.split(",") for item in data_json["data"]["klines"]])
57
+ temp_df.columns = [
58
+ "date",
59
+ "open",
60
+ "close",
61
+ "high",
62
+ "low",
63
+ "volume",
64
+ "amount",
65
+ "pct_chg",
66
+ "chg",
67
+ "change_price",
68
+ "exchange",
69
+ ]
70
+ temp_df["date"] = pd.to_datetime(temp_df["date"], errors="coerce").dt.date
71
+ temp_df["open"] = pd.to_numeric(temp_df["open"], errors="coerce")
72
+ temp_df["close"] = pd.to_numeric(temp_df["close"], errors="coerce")
73
+ temp_df["high"] = pd.to_numeric(temp_df["high"], errors="coerce")
74
+ temp_df["low"] = pd.to_numeric(temp_df["low"], errors="coerce")
75
+ temp_df["volume"] = pd.to_numeric(temp_df["volume"], errors="coerce")
76
+ temp_df["amount"] = pd.to_numeric(temp_df["amount"], errors="coerce")
77
+ temp_df["pct_chg"] = pd.to_numeric(temp_df["pct_chg"], errors="coerce")
78
+ temp_df["chg"] = pd.to_numeric(temp_df["chg"], errors="coerce")
79
+ temp_df["change_price"] = pd.to_numeric(temp_df["change_price"], errors="coerce")
80
+ temp_df["exchange"] = pd.to_numeric(temp_df["exchange"], errors="coerce")
81
+ temp_df = temp_df[
82
+ [
83
+ "date",
84
+ "open",
85
+ "close",
86
+ "high",
87
+ "low",
88
+ "volume",
89
+ "amount",
90
+ "pct_chg",
91
+ "chg",
92
+ "change_price",
93
+ "exchange",
94
+ ]
95
+ ]
96
+ return temp_df
97
+
98
+
99
+ if __name__ == '__main__':
100
+ stock_k_line_hist(
101
+ "1.513180",
102
+ "daily",
103
+ "19700101",
104
+ "20500101",
105
+ "",
106
+ None)
@@ -0,0 +1,145 @@
1
+ import sys
2
+ import os
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 16
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
8
+ import pandas as pd
9
+ import requests
10
+
11
+ # symbol 代码
12
+ # 北交所:0 深圳:0 上海:1
13
+ """
14
+ 获取分钟数据
15
+ """
16
+
17
+
18
+ def get_minute_data(symbol, start_date, end_date, period,
19
+ adjust) -> pd.DataFrame:
20
+ """
21
+ 东方财富网-行情首页-沪深京 A 股-每日分时行情
22
+ https://quote.eastmoney.com/concept/sh603777.html?from=classic
23
+ :param symbol: 股票代码
24
+ :type symbol: str
25
+ :param start_date: 开始日期
26
+ :type start_date: str
27
+ :param end_date: 结束日期
28
+ :type end_date: str
29
+ :param period: choice of {'1', '5', '15', '30', '60'}
30
+ :type period: str
31
+ :param adjust: choice of {'', 'qfq', 'hfq'}
32
+ :type adjust: str
33
+ :return: 每日分时行情
34
+ :rtype: pandas.DataFrame
35
+ """
36
+ adjust_map = {
37
+ "": "0",
38
+ "qfq": "1",
39
+ "hfq": "2",
40
+ }
41
+ if period == "1":
42
+ url = "https://push2his.eastmoney.com/api/qt/stock/trends2/get"
43
+ params = {
44
+ "fields1": "f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f11,f12,f13",
45
+ "fields2": "f51,f52,f53,f54,f55,f56,f57,f58",
46
+ "ut": "7eea3edcaed734bea9cbfc24409ed989",
47
+ "ndays": "5",
48
+ "iscr": "0",
49
+ "secid": symbol,
50
+ "_": "1623766962675",
51
+ }
52
+ r = requests.get(url, timeout=15, params=params)
53
+ data_json = r.json()
54
+ temp_df = pd.DataFrame(
55
+ [item.split(",") for item in data_json["data"]["trends"]]
56
+ )
57
+ temp_df.columns = [
58
+ "time",
59
+ "open",
60
+ "close",
61
+ "high",
62
+ "low",
63
+ "amount",
64
+ "volume",
65
+ "ava_price",
66
+ ]
67
+ temp_df.index = pd.to_datetime(temp_df["time"])
68
+ temp_df = temp_df[start_date:end_date]
69
+ temp_df.reset_index(drop=True, inplace=True)
70
+ temp_df["volume"] = pd.to_numeric(temp_df["volume"], errors="coerce")
71
+ temp_df["close"] = pd.to_numeric(temp_df["close"], errors="coerce")
72
+ temp_df["high"] = pd.to_numeric(temp_df["high"], errors="coerce")
73
+ temp_df["low"] = pd.to_numeric(temp_df["low"], errors="coerce")
74
+ temp_df["amount"] = pd.to_numeric(temp_df["amount"], errors="coerce")
75
+ temp_df["volume"] = pd.to_numeric(temp_df["volume"], errors="coerce")
76
+ temp_df["ava_price"] = pd.to_numeric(temp_df["ava_price"], errors="coerce")
77
+ temp_df["time"] = pd.to_datetime(temp_df["time"]).astype(str)
78
+ return temp_df
79
+ else:
80
+ url = "https://push2his.eastmoney.com/api/qt/stock/kline/get"
81
+ params = {
82
+ "fields1": "f1,f2,f3,f4,f5,f6",
83
+ "fields2": "f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61",
84
+ "ut": "7eea3edcaed734bea9cbfc24409ed989",
85
+ "klt": period,
86
+ "fqt": adjust_map[adjust],
87
+ "secid": symbol,
88
+ "beg": "0",
89
+ "end": "20500000",
90
+ "_": "1630930917857",
91
+ }
92
+ r = requests.get(url, timeout=15, params=params)
93
+ data_json = r.json()
94
+ temp_df = pd.DataFrame(
95
+ [item.split(",") for item in data_json["data"]["klines"]]
96
+ )
97
+ temp_df.columns = [
98
+ "time",
99
+ "open",
100
+ "close",
101
+ "high",
102
+ "low",
103
+ "amount",
104
+ "volume",
105
+ "pct_chg",
106
+ "chg",
107
+ "change",
108
+ "exchange",
109
+ ]
110
+ temp_df.index = pd.to_datetime(temp_df["time"])
111
+ temp_df = temp_df[start_date:end_date]
112
+ temp_df.reset_index(drop=True, inplace=True)
113
+ temp_df["open"] = pd.to_numeric(temp_df["open"], errors="coerce")
114
+ temp_df["close"] = pd.to_numeric(temp_df["close"], errors="coerce")
115
+ temp_df["high"] = pd.to_numeric(temp_df["high"], errors="coerce")
116
+ temp_df["low"] = pd.to_numeric(temp_df["low"], errors="coerce")
117
+ temp_df["amount"] = pd.to_numeric(temp_df["amount"], errors="coerce")
118
+ temp_df["volume"] = pd.to_numeric(temp_df["volume"], errors="coerce")
119
+ temp_df["pct_chg"] = pd.to_numeric(temp_df["pct_chg"], errors="coerce")
120
+ temp_df["chg"] = pd.to_numeric(temp_df["chg"], errors="coerce")
121
+ temp_df["change"] = pd.to_numeric(temp_df["change"], errors="coerce")
122
+ temp_df["exchange"] = pd.to_numeric(temp_df["exchange"], errors="coerce")
123
+ temp_df["time"] = pd.to_datetime(temp_df["time"]).astype(str)
124
+ temp_df = temp_df[
125
+ [
126
+ "time",
127
+ "volume",
128
+ "close",
129
+ "high",
130
+ "low",
131
+ "chg",
132
+ "change",
133
+ "amount",
134
+ "volume",
135
+ "pct_chg",
136
+ "exchange",
137
+ ]
138
+ ]
139
+ return temp_df
140
+
141
+
142
+ if __name__ == '__main__':
143
+ test_df = get_minute_data('0.899050', start_date="2025-03-03 09:30:00",
144
+ end_date="2025-03-07 15:00:00", period="1", adjust="")
145
+ print(test_df)
@@ -0,0 +1,7 @@
1
+ import sys
2
+ import os
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 16
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
@@ -0,0 +1,82 @@
1
+ import sys
2
+ import os
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 16
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
8
+
9
+ from xtquant import xtdata
10
+ import time
11
+
12
+
13
+ def my_download(stock_list: list, period: str, start_date='', end_date=''):
14
+ '''
15
+ 用于显示下载进度
16
+ '''
17
+ import string
18
+
19
+ if [i for i in ["d", "w", "mon", "q", "y", ] if i in period]:
20
+ period = "1d"
21
+ elif "m" in period:
22
+ numb = period.translate(str.maketrans("", "", string.ascii_letters))
23
+ if int(numb) < 5:
24
+ period = "1m"
25
+ else:
26
+ period = "5m"
27
+ elif "tick" == period:
28
+ pass
29
+ else:
30
+ raise KeyboardInterrupt("周期传入错误")
31
+
32
+ n = 1
33
+ num = len(stock_list)
34
+ for i in stock_list:
35
+ print(f"当前正在下载 {period} {n}/{num}")
36
+
37
+ xtdata.download_history_data(i, period, start_date, end_date)
38
+ n += 1
39
+ print("下载任务结束")
40
+
41
+
42
+ def do_subscribe_quote(stock_list: list, period: str):
43
+ for i in stock_list:
44
+ xtdata.subscribe_quote(i, period=period)
45
+ time.sleep(1) # 等待订阅完成
46
+
47
+
48
+ if __name__ == "__main__":
49
+
50
+ start_date = '20220101' # 格式"YYYYMMDD",开始下载的日期,date = ""时全量下载
51
+ end_date = ""
52
+ period = "1m"
53
+
54
+ need_download = 1 # 取数据是空值时,将need_download赋值为1,确保正确下载了历史数据
55
+
56
+ code_list = ["588000.SH", "600519.SH"] # 股票列表
57
+
58
+ if need_download: # 判断要不要下载数据, gmd系列函数都是从本地读取历史数据,从服务器订阅获取最新数据
59
+ my_download(code_list, period, start_date, end_date)
60
+
61
+ ############ 仅获取历史行情 #####################
62
+ count = -1 # 设置count参数,使gmd_ex返回全部数据
63
+ data1 = xtdata.get_market_data_ex([], code_list, period=period, start_time=start_date, end_time=end_date)
64
+
65
+ ############ 仅获取最新行情 #####################
66
+ do_subscribe_quote(code_list, period) # 设置订阅参数,使gmd_ex取到最新行情
67
+ count = 1 # 设置count参数,使gmd_ex仅返回最新行情数据
68
+ data2 = xtdata.get_market_data_ex([], code_list, period=period, start_time=start_date, end_time=end_date,
69
+ count=1) # count 设置为1,使返回值只包含最新行情
70
+
71
+ ############ 获取历史行情+最新行情 #####################
72
+ do_subscribe_quote(code_list, period) # 设置订阅参数,使gmd_ex取到最新行情
73
+ count = -1 # 设置count参数,使gmd_ex返回全部数据
74
+ data3 = xtdata.get_market_data_ex([], code_list, period=period, start_time=start_date, end_time=end_date,
75
+ count=-1) # count 设置为1,使返回值只包含最新行情
76
+
77
+ print(data1[code_list[0]].tail()) # 行情数据查看
78
+ print(data2[code_list[0]].tail())
79
+ print(data3[code_list[0]].tail())
80
+
81
+
82
+
@@ -161,3 +161,24 @@ THS_INDUSTRY_LIST = 'ths_industry_list'
161
161
 
162
162
  # 同花顺行业股票详情
163
163
  THS_STOCK_INDUSTRY_DETAIL = 'ths_stock_industry_detail'
164
+
165
+ # 创业板分钟集合数据
166
+ ONE_MINUTE_K_LINE_BFQ_C = 'one_minute_k_line_bfq_c'
167
+
168
+ # 北交所分钟集合数据
169
+ ONE_MINUTE_K_LINE_BFQ_BJ = 'one_minute_k_line_bfq_bj'
170
+
171
+ # 上海主板分钟集合数据
172
+ ONE_MINUTE_K_LINE_BFQ_H = 'one_minute_k_line_bfq_h'
173
+
174
+ # 科创板分钟集合数据
175
+ ONE_MINUTE_K_LINE_BFQ_K = 'one_minute_k_line_bfq_k'
176
+
177
+ # 深圳主板分钟集合数据
178
+ ONE_MINUTE_K_LINE_BFQ_K = 'one_minute_k_line_bfq_s'
179
+
180
+ # 可转债分钟集合数据
181
+ ONE_MINUTE_K_LINE_BFQ_KZZ = 'one_minute_k_line_bfq_kzz'
182
+
183
+ # ETF分钟集合数据
184
+ ONE_MINUTE_K_LINE_BFQ_ETF = 'one_minute_k_line_bfq_etf'
@@ -0,0 +1,320 @@
1
+ import sys
2
+ import os
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 14
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
8
+ import pandas as pd
9
+ import pymongo
10
+ from mns_common.utils.async_fun import async_fun
11
+ from loguru import logger
12
+ import mns_common.utils.ip_util as ip_util
13
+ import warnings
14
+
15
+ warnings.filterwarnings("ignore")
16
+
17
+
18
+ class MongodbUtilV2:
19
+ def __init__(self, port, db_name):
20
+ self.port = port
21
+ self.db_name = db_name
22
+
23
+ def get_db(self):
24
+ port = self.port
25
+ db_name = self.db_name
26
+ mac_address = ip_util.get_mac_address()
27
+ if port == "remote":
28
+ client = pymongo.MongoClient("mongodb://100.87.2.149:" + '27017/' + db_name)
29
+ elif port == "remote1":
30
+ client = pymongo.MongoClient("mongodb://100.87.2.149:" + '27019/' + db_name)
31
+ # 家里
32
+ elif mac_address is not None and mac_address == ip_util.WINDOWS_MAC_ADDRESS_CD:
33
+ client = pymongo.MongoClient("mongodb://127.0.0.1:" + port + "/" + db_name)
34
+ elif '192.168.1' in ip_util.get_host_ip() and mac_address is not None and mac_address == ip_util.APPLE_AIR_MAC_ADDRESS:
35
+ client = pymongo.MongoClient("mongodb://127.0.0.1:" + port + "/" + db_name)
36
+ else:
37
+ client = pymongo.MongoClient("mongodb://127.0.0.1:" + port + "/" + db_name)
38
+ return client.patience
39
+
40
+ def group(self, query, coll_name):
41
+ db = self.get_db()
42
+ collection = db[coll_name]
43
+ data = collection.aggregate(query)
44
+ return pd.DataFrame(list(data))
45
+
46
+ def remove_data(self, query, coll_name):
47
+ db = self.get_db()
48
+ collection = db[coll_name]
49
+ return collection.delete_many(query)
50
+
51
+ def exist_data_query(self, coll_name, query):
52
+ db = self.get_db()
53
+ collection = db[coll_name]
54
+ return collection.count(query, limit=1) > 0
55
+
56
+ def find_one(self, coll_name, _id):
57
+ db = self.get_db()
58
+ collection = db[coll_name]
59
+ return collection.find_one({'_id': _id})
60
+
61
+ def find_one_query(self, coll_name, query):
62
+ db = self.get_db()
63
+ collection = db[coll_name]
64
+ return pd.DataFrame(collection.find_one(query), index=[0])
65
+
66
+ def find_all_data(self, coll_name):
67
+ db = self.get_db()
68
+ collection = db[coll_name]
69
+ rows = collection.find({})
70
+ df = pd.DataFrame([basic for basic in rows])
71
+ return df
72
+
73
+ def find_query_data(self, coll_name, query):
74
+ db = self.get_db()
75
+ collection = db[coll_name]
76
+ rows = collection.find(query)
77
+ df = pd.DataFrame(list(rows))
78
+ return df
79
+
80
+ def find_query_data_choose_field(self, coll_name, query, query_field):
81
+ db = self.get_db()
82
+ collection = db[coll_name]
83
+ rows = collection.find(query, query_field)
84
+ df = pd.DataFrame(list(rows))
85
+ return df
86
+
87
+ def find_query_data_list(self, coll_name, query):
88
+ db = self.get_db()
89
+ collection = db[coll_name]
90
+ rows = collection.find(query)
91
+ return list(rows)
92
+
93
+ def find_query_data_list(self, coll_name, query):
94
+ db = self.get_db()
95
+ collection = db[coll_name]
96
+ rows = collection.find(query)
97
+ return list(rows)
98
+
99
+ def remove_all_data(self, database):
100
+ db = self.get_db()
101
+ collection = db[database]
102
+ query = {"_id": {"$ne": "null"}}
103
+ collection.delete_many(query)
104
+
105
+ def drop_collection(self, database):
106
+ db = self.get_db()
107
+ collection = db[database]
108
+ collection.drop()
109
+
110
+ def ascend_query(self, query, coll_name, field, num):
111
+ db = self.get_db()
112
+ collection = db[coll_name]
113
+ return pd.DataFrame(list(collection.find(query).sort(field, 1).skip(0).limit(num)));
114
+
115
+ def descend_query(self, query, coll_name, field, num):
116
+ db = self.get_db()
117
+ collection = db[coll_name]
118
+ return pd.DataFrame(list(collection.find(query).sort(field, -1).skip(0).limit(num)));
119
+
120
+ def count(self, query, coll_name):
121
+ db = self.get_db()
122
+ collection = db[coll_name]
123
+ return collection.count_documents(query)
124
+
125
+ def query_max(self, query, coll_name, field, num):
126
+ db = self.get_db()
127
+ collection = db[coll_name]
128
+ return pd.DataFrame(list(collection.find(query).sort(field, -1).skip(0).limit(num)));
129
+
130
+ def query_min(self, query, coll_name, field):
131
+ db = self.get_db()
132
+ collection = db[coll_name]
133
+ return pd.DataFrame(list(collection.find(query).sort(field, 1).skip(0).limit(1)));
134
+
135
+ def insert_mongo(self, df, database):
136
+ db = self.get_db()
137
+ if df is None or len(df) == 0:
138
+ return
139
+ collection = db[database]
140
+ # 格式转换
141
+ try:
142
+ # df = df.T.drop_duplicates().T
143
+ records = df.to_dict('records')
144
+ collection.insert_many(records)
145
+ except BaseException as e:
146
+ logger.error("插入数据异常:{}", e)
147
+
148
+ def insert_mongo_json(self, json, database):
149
+ db = self.get_db()
150
+ collection = db[database]
151
+ # 格式转换
152
+ try:
153
+ collection.insert_many(json)
154
+ except BaseException as e:
155
+ logger.error("插入数据异常:{}", e)
156
+
157
+ def save_mongo_json(self, json, database):
158
+ db = self.get_db()
159
+ collection = db[database]
160
+ for record in json:
161
+ try:
162
+ collection.save(record)
163
+ except BaseException as e:
164
+ logger.error("保存数据出现异常:{}", e)
165
+
166
+ def save_mongo(self, df, database):
167
+ db = self.get_db()
168
+ if df is None or len(df) == 0:
169
+ return
170
+ collection = db[database]
171
+ # df = df.T.drop_duplicates().T
172
+ # 格式转换
173
+ records = df.to_dict('records')
174
+ for record in records:
175
+ try:
176
+ collection.save(record)
177
+ except BaseException as e:
178
+ logger.error("保存数据出现异常:{},{}", record, e)
179
+
180
+ def save_mongo_no_catch_exception(self, df, database):
181
+ db = self.get_db()
182
+ if df is None or len(df) == 0:
183
+ return
184
+ collection = db[database]
185
+ # df = df.T.drop_duplicates().T
186
+ # 格式转换
187
+ records = df.to_dict('records')
188
+ for record in records:
189
+ collection.save(record)
190
+
191
+ def update_one(self, df, database):
192
+ db = self.get_db()
193
+ condition = {'_id': list(df['_id'])[0]}
194
+ if len(df) == 0:
195
+ return
196
+ collection = db[database]
197
+ collection.update(condition, df)
198
+
199
+ def update_many(self, query, new_values, database):
200
+ db = self.get_db()
201
+ collection = db[database]
202
+ x = collection.update_many(query, new_values)
203
+ return x
204
+
205
+ @async_fun
206
+ def update_one_query(self, query, new_values, database):
207
+ db = self.get_db()
208
+ collection = db[database]
209
+ x = collection.update(query, new_values)
210
+ return x
211
+
212
+ def distinct_field(self, database, field, query):
213
+ db = self.get_db()
214
+ collection = db[database]
215
+ return collection.distinct(field, query)
216
+
217
+ def create_index(self, database, index):
218
+ db = self.get_db()
219
+ collection = db[database]
220
+ collection.create_index(
221
+ index)
222
+
223
+ def aggregate(self, pipeline, database):
224
+ db = self.get_db()
225
+ collection = db[database]
226
+ data = collection.aggregate(pipeline)
227
+ return pd.DataFrame(list(data))
228
+
229
+ def get_col_keys(self, database):
230
+ db = self.get_db()
231
+ collection = db[database]
232
+ keys = collection.find_one().keys()
233
+ return keys
234
+
235
+ # 分页查询 descend 是否降序
236
+ def find_page_skip_data(self, coll_name, page_query, page, page_number, field, descend):
237
+ db = self.get_db()
238
+ collection = db[coll_name]
239
+ if descend:
240
+ sort_tag = -1
241
+ else:
242
+ sort_tag = 1
243
+ rows = collection.find(page_query).sort(field, sort_tag).skip((page - 1) * page_number).limit(page_number)
244
+ df = pd.DataFrame(list(rows))
245
+ return df
246
+
247
+
248
+ # if __name__ == '__main__':
249
+ # symbol = '002992'
250
+ # query = {'symbol': symbol,
251
+ # '$and': [{'str_day': {'$gte': '2022-07-06'}}, {'str_day': {'$lte': '2022-11-06'}}]}
252
+ # mongodb_util = MongodbUtil('27017')
253
+ # # num = mongodb_util.count(query, 'stock_zt_pool')
254
+ # # print(num)
255
+ # key = mongodb_util.get_col_keys('stock_zt_pool')
256
+ # print(key)
257
+ #
258
+ # # num = mongodb_util.count(query, 'stock_zt_pool')
259
+ # # print(num)
260
+ #
261
+ # pipeline = [
262
+ # {'$match': {
263
+ # "classification": {'$in': ["K", "C"]},
264
+ # "str_day": {'$gte': "2022-03-16"}}},
265
+ # {'$group': {'_id': "$flow_mv_level", 'count': {'$sum': 1}}}
266
+ # ]
267
+ # result = mongodb_util.aggregate(pipeline, 'realtime_quotes_now_zt_new_kc_open')
268
+ #
269
+ # result = result.sort_values(by=['_id'], ascending=True)
270
+ # print(result)
271
+ from io import StringIO
272
+ import re
273
+
274
+ if __name__ == '__main__':
275
+ mongodb_util = MongodbUtil('27017')
276
+ #
277
+ # kpl_best_choose_index_df = mongodb_util.find_page_skip_data('kpl_best_choose_index', {"index_class": "sub_index"},
278
+ # 1, 100, 'create_time', True)
279
+ key_word = '高速连接'
280
+ EXCLUDE_INFO_KEY = '股东人数'
281
+ # query = {
282
+ # "$or": [{'question': {"$regex": re.compile(key_word, re.IGNORECASE)}},
283
+ # {'answer_content': {"$regex": re.compile(key_word, re.IGNORECASE)}}],
284
+ # "$and": [{'question': {"$not": re.compile(EXCLUDE_INFO_KEY, re.IGNORECASE)}},
285
+ # {'answer_content': {"$not": re.compile(EXCLUDE_INFO_KEY, re.IGNORECASE)}}],
286
+ # }
287
+ #
288
+ # pipeline = [
289
+ # {'$match': query},
290
+ # {'$group': {'_id': "$symbol", 'count': {'$sum': 1}}}
291
+ # ]
292
+ # result = mongodb_util.aggregate(pipeline, 'stock_interactive_question')
293
+ #
294
+ # result = result.sort_values(by=['_id'], ascending=True)
295
+ # print(result)
296
+ #
297
+ # # ths_new_concept = mongodb_util.find_all_data('ths_new_concept')
298
+ # key = mongodb_util.get_col_keys('company_info')
299
+ # print(key)
300
+
301
+ # mongodb_util.create_index('realtime_quotes_now_open', [("number", 1)])
302
+ # mongodb_util.create_index('realtime_quotes_now_open', [("symbol", 1), ("number", 1)])
303
+ # mongodb_util.create_index('realtime_quotes_now_open', [("str_day", 1)])
304
+ # update_query = {"str_day": "2023-06-30"}
305
+ # mongodb_util.update_many(update_query, {"$set": {"number": 1}}, "realtime_quotes_now_open")
306
+ # query = {"symbol": "000617"}
307
+ # company_info_base = mongodb_util.find_query_data('company_info_base', query)
308
+ # ths_stock_concept_detail = mongodb_util.find_query_data('ths_stock_concept_detail', query)
309
+ # ths_stock_concept_detail = ths_stock_concept_detail[[
310
+ # 'concept_code',
311
+ # 'concept_name',
312
+ # 'str_now_time',
313
+ # 'concept_create_day']]
314
+ # # 去除空格
315
+ # ths_stock_concept_detail['concept_name'] = ths_stock_concept_detail['concept_name'].str.replace(' ', '')
316
+ # company_info_base.loc[:, 'ths_concept_list_info'] = ths_stock_concept_detail.to_string(index=False)
317
+ # for company_one in company_info_base.itertuples():
318
+ # ths_concept_list_info = company_one.ths_concept_list_info
319
+ # ths_concept_list_info_df = pd.read_csv(StringIO(ths_concept_list_info), delim_whitespace=True)
320
+ # print(ths_concept_list_info_df)
@@ -0,0 +1,7 @@
1
+ import sys
2
+ import os
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 16
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
@@ -1,4 +1,4 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mns_common
3
- Version: 1.3.2.2
3
+ Version: 1.3.2.3
4
4
 
@@ -9,7 +9,7 @@ mns_common/api/akshare/stock_zt_pool_api.py,sha256=_4PG_Wd88S4zweKNwZprwxaMMHBF4
9
9
  mns_common/api/akshare/yjyg_sync_api.py,sha256=cvk50_XhJWUqduOiC15SYvQTCQqECt6td_L2Hvnl7Jg,4108
10
10
  mns_common/api/em/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
11
11
  mns_common/api/em/east_money_debt_api.py,sha256=lToffwPlS0S05FxZp6QHhLxCJDFAnvorgqjYXDW5p8I,12429
12
- mns_common/api/em/east_money_etf_api.py,sha256=AvfmOZKTOq01kgh59EJbsrD0xdBQDhKT_k38z1k0VYA,11578
12
+ mns_common/api/em/east_money_etf_api.py,sha256=GuFHflQOCbfIdKQawrAFE2TimyLGedne662fBUs4Vr0,11707
13
13
  mns_common/api/em/east_money_stock_api.py,sha256=kG4a_xcnQMYPkUec74f_7TtpmNb6GPVS23Xgr4Cq2og,8392
14
14
  mns_common/api/em/east_money_stock_api_develop.py,sha256=IijcfCwqzuwd7okAjOnFoWiWSvlRttSg1vGzkDXljeM,9888
15
15
  mns_common/api/em/east_money_stock_gdfx_free_top_10_api.py,sha256=jVy3fNdrkLq3ri7yUwXWt0ItB8LCHzt9CPz91Fj8sPA,9198
@@ -19,6 +19,9 @@ mns_common/api/em/em_concept_index_api.py,sha256=PP87ES8a_y0o3SKLzBsPrc7DCPI3MBC
19
19
  mns_common/api/em/self_choose/__init__.py,sha256=vAy9qYgUgZL9Y0w3BBbqmZ9zES46pPnlJjO2hdtotp0,2673
20
20
  mns_common/api/hk/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
21
21
  mns_common/api/hk/ths_hk_company_info_api.py,sha256=Cxlbuccopa0G1s8o0uTnnyLn2QaxOvbDpJQJOj7J8a8,5360
22
+ mns_common/api/k_line/__init__.py,sha256=NmNsR8WLULfHv9AvBmhURHvtY-wLDkSwCNcsvSRJ2XY,341
23
+ mns_common/api/k_line/stock_k_line_data_api.py,sha256=v27p4qU0bhBGp2t8bZDnyMz20a71GduhmK7uylt6GH4,3496
24
+ mns_common/api/k_line/stock_minute_data_api.py,sha256=xoYhX23j7h9xCYjGopb83ZNTg7wuloGNJTRkcBUvcl0,5247
22
25
  mns_common/api/kpl/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
23
26
  mns_common/api/kpl/common/__init__.py,sha256=8b2PuXJM5fLoq71cWPXp695czQuaRtyR6OVHajGjDPc,161
24
27
  mns_common/api/kpl/common/kpl_common_api.py,sha256=DP8RqJXGGB804G3ykYgyYh3fHtv8E2v0BWiEGPXSSBc,4272
@@ -37,6 +40,8 @@ mns_common/api/kpl/symbol/kpl_symbol_common_field_constant.py,sha256=EijxWFjOb18
37
40
  mns_common/api/kpl/symbol/symbol_his_quotes_api.py,sha256=r3n7U2F7MZUDZFQgnx-JI4sb8MiRTIwVeh21iehbFwE,4210
38
41
  mns_common/api/msg/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
39
42
  mns_common/api/msg/push_msg_api.py,sha256=z8jDqFWygfxnCFFfQp4K-llgg27nRLv7Mx72lOddBH0,1390
43
+ mns_common/api/qmt/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
44
+ mns_common/api/qmt/qmt_minunte_tick_data.py,sha256=uwSw_AkA9RaD3pXPKzxqi4TKEkpglmFUwtYl9r5E6G8,3019
40
45
  mns_common/api/ths/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
41
46
  mns_common/api/ths/big_deal/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
42
47
  mns_common/api/ths/big_deal/ths_big_deal_api.py,sha256=gxtIUbowxx8gDJZfT2RISrhXVmvsgbdclYbxUiZsTlY,3644
@@ -113,13 +118,15 @@ mns_common/component/zt/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3Hi
113
118
  mns_common/component/zt/zt_common_service_api.py,sha256=6pHRLLJjKcLLBA-xXkAU8SE6DZ5dgVFBRVjJmhkL0II,11945
114
119
  mns_common/constant/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
115
120
  mns_common/constant/black_list_classify_enum.py,sha256=I8U_DcltzYvlWjgn-TFLImgVgPuO0lxMnEJAQJBljdo,3995
116
- mns_common/constant/db_name_constant.py,sha256=5j_dfmZi8t92B5SbsXRIsps6r9rs0TspPlqLAAcqCjQ,4140
121
+ mns_common/constant/db_name_constant.py,sha256=bxzUSZh2jYvbSZd8Guz7knVVGAwDscmMIAECiqQj4EE,4752
117
122
  mns_common/constant/price_enum.py,sha256=nhcPxk0AFdQAp8IsNr5EP9xURLqqJuSl6ljIzTp7Wyo,1093
118
123
  mns_common/constant/redis_msg_constant.py,sha256=fMtI_WbJ2IkMX4qGwvR5MkMO0NqU8XgUUZqQzHIRscU,501
119
124
  mns_common/constant/self_choose_constant.py,sha256=Xnzp9cn5x5_UdqF4gozSDErkzafnbCYBjDH8YpTEU84,777
120
125
  mns_common/db/MongodbUtil.py,sha256=zefStY9z67HqkvzmshsSV6kG4RgZSVe3fnIp8m0zWYU,11706
121
126
  mns_common/db/MongodbUtilLocal.py,sha256=xLUTeeZ83BbqPtUEwy7QmPtkSfwPyNB_5ggkn-eMpBQ,7560
122
127
  mns_common/db/__init__.py,sha256=2U9DiKslxsWwLLEcZKjS8UiQPN1QgALvnK3HiJNIZE0,163
128
+ mns_common/db/v2/MongodbUtilV2.py,sha256=looRsWjz3_wrqnUE9reR7nxJHut_LV1Z9h1KIE5vJmc,11781
129
+ mns_common/db/v2/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
123
130
  mns_common/utils/__init__.py,sha256=xu36nA6MJTauswUWPfKIKH0E-lpOAHTw2TL5QI_6TeY,165
124
131
  mns_common/utils/async_fun.py,sha256=YTxjAtCmOz38OHNC8L_5thjM_uFBmh3arx0eJwILi_A,352
125
132
  mns_common/utils/cmd_util.py,sha256=fIS17OpuJNpDTmpIF0JcAgXCw4CZo7xO8xUwGxITL5w,4963
@@ -128,7 +135,7 @@ mns_common/utils/date_handle_util.py,sha256=P4WJUmoDpo4IoCrt2z4keyr7pqXHKmCZBVod
128
135
  mns_common/utils/db_util.py,sha256=hSmfNAN4vEeEaUva6_cicZEhb2jSnib-Gvk2reke1vc,2590
129
136
  mns_common/utils/file_util.py,sha256=egWu6PenGPRp_ixrNTHKarT4dAnOT6FETR82EHUZJnQ,1042
130
137
  mns_common/utils/ip_util.py,sha256=UTcYfz_uytB__6nlBf7T-izuI7hi4XdB6ET0sJgEel4,969
131
- mns_common-1.3.2.2.dist-info/METADATA,sha256=-RVvjR18Ff2I9Cpqiz8E14yX-p5Fy7miDoyoljEsMYA,61
132
- mns_common-1.3.2.2.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
133
- mns_common-1.3.2.2.dist-info/top_level.txt,sha256=ZC58kAR-8Hvc6U2xhYNBNLAh3mb6sZazbdj5nZpvEkQ,11
134
- mns_common-1.3.2.2.dist-info/RECORD,,
138
+ mns_common-1.3.2.3.dist-info/METADATA,sha256=n3cd8Wy0qFoZ9Bzo9PCai9lM5dl6I0fxvcOv9SPZrhs,61
139
+ mns_common-1.3.2.3.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
140
+ mns_common-1.3.2.3.dist-info/top_level.txt,sha256=ZC58kAR-8Hvc6U2xhYNBNLAh3mb6sZazbdj5nZpvEkQ,11
141
+ mns_common-1.3.2.3.dist-info/RECORD,,