mns-scheduler 1.1.6.5__py3-none-any.whl → 1.1.6.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mns-scheduler might be problematic. Click here for more details.
- mns_scheduler/concept/clean/ths_concept_clean_api.py +6 -1
- mns_scheduler/concept/ths/common/ths_concept_sync_common_api.py +10 -2
- mns_scheduler/db/script/sync/__init__.py +7 -0
- mns_scheduler/db/script/sync/local_mongo_util.py +231 -0
- mns_scheduler/db/script/sync/remote_data_sync_to_local.py +52 -0
- mns_scheduler/db/script/sync/remote_mongo_util.py +306 -0
- mns_scheduler/zt/zt_pool/em_zt_pool_sync_api.py +1 -1
- {mns_scheduler-1.1.6.5.dist-info → mns_scheduler-1.1.6.7.dist-info}/METADATA +1 -1
- {mns_scheduler-1.1.6.5.dist-info → mns_scheduler-1.1.6.7.dist-info}/RECORD +11 -7
- {mns_scheduler-1.1.6.5.dist-info → mns_scheduler-1.1.6.7.dist-info}/WHEEL +0 -0
- {mns_scheduler-1.1.6.5.dist-info → mns_scheduler-1.1.6.7.dist-info}/top_level.txt +0 -0
|
@@ -77,7 +77,12 @@ def update_one_concept_relevance_industry(concept_code):
|
|
|
77
77
|
ths_stock_concept_detail_df['second_relevance_industry'] = second_relevance_industry
|
|
78
78
|
|
|
79
79
|
mongodb_util.save_mongo(ths_concept_list_one_df, db_name_constant.THS_CONCEPT_LIST)
|
|
80
|
-
|
|
80
|
+
|
|
81
|
+
ths_stock_concept_detail_df['grade'] = 1
|
|
82
|
+
# 详细标识
|
|
83
|
+
ths_stock_concept_detail_df['remark'] = ''
|
|
84
|
+
# 简单标识
|
|
85
|
+
ths_stock_concept_detail_df['remark_flag'] = ''
|
|
81
86
|
mongodb_util.save_mongo(ths_stock_concept_detail_df, db_name_constant.THS_STOCK_CONCEPT_DETAIL)
|
|
82
87
|
|
|
83
88
|
|
|
@@ -135,7 +135,11 @@ def save_ths_concept_detail(new_concept_symbol_df,
|
|
|
135
135
|
query_detail = {"concept_code": int(concept_code)}
|
|
136
136
|
exist_concept_detail = mongodb_util.find_query_data(db_name_constant.THS_STOCK_CONCEPT_DETAIL, query_detail)
|
|
137
137
|
if exist_concept_detail is None or exist_concept_detail.shape[0] == 0:
|
|
138
|
-
new_concept_symbol_df['
|
|
138
|
+
new_concept_symbol_df['grade'] = 1
|
|
139
|
+
# 详细标识
|
|
140
|
+
new_concept_symbol_df['remark'] = ''
|
|
141
|
+
# 简单标识
|
|
142
|
+
new_concept_symbol_df['remark_flag'] = ''
|
|
139
143
|
mongodb_util.save_mongo(new_concept_symbol_df, db_name_constant.THS_STOCK_CONCEPT_DETAIL)
|
|
140
144
|
# 保存到当日新增概念列表
|
|
141
145
|
new_concept_symbol_df['concept_type'] = 'ths'
|
|
@@ -145,7 +149,11 @@ def save_ths_concept_detail(new_concept_symbol_df,
|
|
|
145
149
|
new_concept_symbol_df = new_concept_symbol_df.loc[~(
|
|
146
150
|
new_concept_symbol_df['symbol'].isin(exist_concept_detail_symbol_list))]
|
|
147
151
|
if new_concept_symbol_df.shape[0] > 0:
|
|
148
|
-
new_concept_symbol_df['
|
|
152
|
+
new_concept_symbol_df['grade'] = 1
|
|
153
|
+
# 详细标识
|
|
154
|
+
new_concept_symbol_df['remark'] = ''
|
|
155
|
+
# 简单标识
|
|
156
|
+
new_concept_symbol_df['remark_flag'] = ''
|
|
149
157
|
mongodb_util.save_mongo(new_concept_symbol_df, db_name_constant.THS_STOCK_CONCEPT_DETAIL)
|
|
150
158
|
# 保存到当日新增概念列表
|
|
151
159
|
new_concept_symbol_df['concept_type'] = 'ths'
|
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 14
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import pandas as pd
|
|
9
|
+
import pymongo
|
|
10
|
+
from mns_common.utils.async_fun import async_fun
|
|
11
|
+
from loguru import logger
|
|
12
|
+
import warnings
|
|
13
|
+
|
|
14
|
+
warnings.filterwarnings("ignore")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class LocalMongodbUtil:
|
|
18
|
+
def __init__(self, port):
|
|
19
|
+
self.port = port
|
|
20
|
+
|
|
21
|
+
def get_db(self):
|
|
22
|
+
client = pymongo.MongoClient("mongodb://127.0.0.1:" + '27017' + "/patience")
|
|
23
|
+
return client.patience
|
|
24
|
+
|
|
25
|
+
def group(self, query, coll_name):
|
|
26
|
+
db = self.get_db()
|
|
27
|
+
collection = db[coll_name]
|
|
28
|
+
data = collection.aggregate(query)
|
|
29
|
+
return pd.DataFrame(list(data))
|
|
30
|
+
|
|
31
|
+
def remove_data(self, query, coll_name):
|
|
32
|
+
db = self.get_db()
|
|
33
|
+
collection = db[coll_name]
|
|
34
|
+
return collection.delete_many(query)
|
|
35
|
+
|
|
36
|
+
def exist_data_query(self, coll_name, query):
|
|
37
|
+
db = self.get_db()
|
|
38
|
+
collection = db[coll_name]
|
|
39
|
+
return collection.count(query, limit=1) > 0
|
|
40
|
+
|
|
41
|
+
def find_one(self, coll_name, _id):
|
|
42
|
+
db = self.get_db()
|
|
43
|
+
collection = db[coll_name]
|
|
44
|
+
return collection.find_one({'_id': _id})
|
|
45
|
+
|
|
46
|
+
def find_one_query(self, coll_name, query):
|
|
47
|
+
db = self.get_db()
|
|
48
|
+
collection = db[coll_name]
|
|
49
|
+
return pd.DataFrame(collection.find_one(query), index=[0])
|
|
50
|
+
|
|
51
|
+
def find_all_data(self, coll_name):
|
|
52
|
+
db = self.get_db()
|
|
53
|
+
collection = db[coll_name]
|
|
54
|
+
rows = collection.find({})
|
|
55
|
+
df = pd.DataFrame([basic for basic in rows])
|
|
56
|
+
return df
|
|
57
|
+
|
|
58
|
+
def find_query_data(self, coll_name, query):
|
|
59
|
+
db = self.get_db()
|
|
60
|
+
collection = db[coll_name]
|
|
61
|
+
rows = collection.find(query)
|
|
62
|
+
df = pd.DataFrame(list(rows))
|
|
63
|
+
return df
|
|
64
|
+
|
|
65
|
+
def find_query_data_choose_field(self, coll_name, query, query_field):
|
|
66
|
+
db = self.get_db()
|
|
67
|
+
collection = db[coll_name]
|
|
68
|
+
rows = collection.find(query, query_field)
|
|
69
|
+
df = pd.DataFrame(list(rows))
|
|
70
|
+
return df
|
|
71
|
+
|
|
72
|
+
def find_query_data_list(self, coll_name, query):
|
|
73
|
+
db = self.get_db()
|
|
74
|
+
collection = db[coll_name]
|
|
75
|
+
rows = collection.find(query)
|
|
76
|
+
return list(rows)
|
|
77
|
+
|
|
78
|
+
def find_query_data_list(self, coll_name, query):
|
|
79
|
+
db = self.get_db()
|
|
80
|
+
collection = db[coll_name]
|
|
81
|
+
rows = collection.find(query)
|
|
82
|
+
return list(rows)
|
|
83
|
+
|
|
84
|
+
def remove_all_data(self, database):
|
|
85
|
+
db = self.get_db()
|
|
86
|
+
collection = db[database]
|
|
87
|
+
query = {"_id": {"$ne": "null"}}
|
|
88
|
+
return collection.delete_many(query)
|
|
89
|
+
|
|
90
|
+
def drop_collection(self, database):
|
|
91
|
+
db = self.get_db()
|
|
92
|
+
collection = db[database]
|
|
93
|
+
collection.drop()
|
|
94
|
+
|
|
95
|
+
def ascend_query(self, query, coll_name, field, num):
|
|
96
|
+
db = self.get_db()
|
|
97
|
+
collection = db[coll_name]
|
|
98
|
+
return pd.DataFrame(list(collection.find(query).sort(field, 1).skip(0).limit(num)));
|
|
99
|
+
|
|
100
|
+
def descend_query(self, query, coll_name, field, num):
|
|
101
|
+
db = self.get_db()
|
|
102
|
+
collection = db[coll_name]
|
|
103
|
+
return pd.DataFrame(list(collection.find(query).sort(field, -1).skip(0).limit(num)));
|
|
104
|
+
|
|
105
|
+
def count(self, query, coll_name):
|
|
106
|
+
db = self.get_db()
|
|
107
|
+
collection = db[coll_name]
|
|
108
|
+
return collection.count_documents(query)
|
|
109
|
+
|
|
110
|
+
def query_max(self, query, coll_name, field, num):
|
|
111
|
+
db = self.get_db()
|
|
112
|
+
collection = db[coll_name]
|
|
113
|
+
return pd.DataFrame(list(collection.find(query).sort(field, -1).skip(0).limit(num)));
|
|
114
|
+
|
|
115
|
+
def query_min(self, query, coll_name, field):
|
|
116
|
+
db = self.get_db()
|
|
117
|
+
collection = db[coll_name]
|
|
118
|
+
return pd.DataFrame(list(collection.find(query).sort(field, 1).skip(0).limit(1)));
|
|
119
|
+
|
|
120
|
+
def insert_mongo(self, df, database):
|
|
121
|
+
db = self.get_db()
|
|
122
|
+
if df is None or len(df) == 0:
|
|
123
|
+
return
|
|
124
|
+
collection = db[database]
|
|
125
|
+
# 格式转换
|
|
126
|
+
try:
|
|
127
|
+
df = df.drop_duplicates()
|
|
128
|
+
# df = df.T.drop_duplicates().T
|
|
129
|
+
records = df.to_dict('records')
|
|
130
|
+
collection.insert_many(records)
|
|
131
|
+
except BaseException as e:
|
|
132
|
+
logger.error("插入数据异常:{}", e)
|
|
133
|
+
|
|
134
|
+
def insert_mongo_json(self, json, database):
|
|
135
|
+
db = self.get_db()
|
|
136
|
+
collection = db[database]
|
|
137
|
+
# 格式转换
|
|
138
|
+
try:
|
|
139
|
+
collection.insert_many(json)
|
|
140
|
+
except BaseException as e:
|
|
141
|
+
logger.error("插入数据异常:{}", e)
|
|
142
|
+
|
|
143
|
+
def save_mongo_json(self, json, database):
|
|
144
|
+
db = self.get_db()
|
|
145
|
+
collection = db[database]
|
|
146
|
+
for record in json:
|
|
147
|
+
try:
|
|
148
|
+
collection.save(record)
|
|
149
|
+
except BaseException as e:
|
|
150
|
+
logger.error("保存数据出现异常:{}", e)
|
|
151
|
+
|
|
152
|
+
def save_mongo(self, df, database):
|
|
153
|
+
db = self.get_db()
|
|
154
|
+
if df is None or len(df) == 0:
|
|
155
|
+
return
|
|
156
|
+
collection = db[database]
|
|
157
|
+
# df = df.T.drop_duplicates().T
|
|
158
|
+
# 格式转换
|
|
159
|
+
records = df.to_dict('records')
|
|
160
|
+
for record in records:
|
|
161
|
+
try:
|
|
162
|
+
collection.save(record)
|
|
163
|
+
except BaseException as e:
|
|
164
|
+
logger.error("保存数据出现异常:{},{}", record, e)
|
|
165
|
+
|
|
166
|
+
def save_mongo_no_catch_exception(self, df, database):
|
|
167
|
+
db = self.get_db()
|
|
168
|
+
if df is None or len(df) == 0:
|
|
169
|
+
return
|
|
170
|
+
collection = db[database]
|
|
171
|
+
# df = df.T.drop_duplicates().T
|
|
172
|
+
# 格式转换
|
|
173
|
+
records = df.to_dict('records')
|
|
174
|
+
for record in records:
|
|
175
|
+
collection.save(record)
|
|
176
|
+
|
|
177
|
+
def update_one(self, df, database):
|
|
178
|
+
db = self.get_db()
|
|
179
|
+
condition = {'_id': list(df['_id'])[0]}
|
|
180
|
+
if len(df) == 0:
|
|
181
|
+
return
|
|
182
|
+
collection = db[database]
|
|
183
|
+
collection.update(condition, df)
|
|
184
|
+
|
|
185
|
+
def update_many(self, query, new_values, database):
|
|
186
|
+
db = self.get_db()
|
|
187
|
+
collection = db[database]
|
|
188
|
+
x = collection.update_many(query, new_values)
|
|
189
|
+
return x
|
|
190
|
+
|
|
191
|
+
@async_fun
|
|
192
|
+
def update_one_query(self, query, new_values, database):
|
|
193
|
+
db = self.get_db()
|
|
194
|
+
collection = db[database]
|
|
195
|
+
x = collection.update(query, new_values)
|
|
196
|
+
return x
|
|
197
|
+
|
|
198
|
+
def distinct_field(self, database, field, query):
|
|
199
|
+
db = self.get_db()
|
|
200
|
+
collection = db[database]
|
|
201
|
+
return collection.distinct(field, query)
|
|
202
|
+
|
|
203
|
+
def create_index(self, database, index):
|
|
204
|
+
db = self.get_db()
|
|
205
|
+
collection = db[database]
|
|
206
|
+
collection.create_index(
|
|
207
|
+
index)
|
|
208
|
+
|
|
209
|
+
def aggregate(self, pipeline, database):
|
|
210
|
+
db = self.get_db()
|
|
211
|
+
collection = db[database]
|
|
212
|
+
data = collection.aggregate(pipeline)
|
|
213
|
+
return pd.DataFrame(list(data))
|
|
214
|
+
|
|
215
|
+
def get_col_keys(self, database):
|
|
216
|
+
db = self.get_db()
|
|
217
|
+
collection = db[database]
|
|
218
|
+
keys = collection.find_one().keys()
|
|
219
|
+
return keys
|
|
220
|
+
|
|
221
|
+
# 分页查询 descend 是否降序
|
|
222
|
+
def find_page_skip_data(self, coll_name, page_query, page, page_number, field, descend):
|
|
223
|
+
db = self.get_db()
|
|
224
|
+
collection = db[coll_name]
|
|
225
|
+
if descend:
|
|
226
|
+
sort_tag = -1
|
|
227
|
+
else:
|
|
228
|
+
sort_tag = 1
|
|
229
|
+
rows = collection.find(page_query).sort(field, sort_tag).skip((page - 1) * page_number).limit(page_number)
|
|
230
|
+
df = pd.DataFrame(list(rows))
|
|
231
|
+
return df
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
9
|
+
from mns_scheduler.db.script.sync.remote_mongo_util import RemoteMongodbUtil
|
|
10
|
+
from mns_scheduler.db.script.sync.local_mongo_util import LocalMongodbUtil
|
|
11
|
+
from loguru import logger
|
|
12
|
+
|
|
13
|
+
remote_mongodb_util = RemoteMongodbUtil('27017')
|
|
14
|
+
local_mongodb_util = LocalMongodbUtil('27017')
|
|
15
|
+
|
|
16
|
+
col_list = [
|
|
17
|
+
'company_remark_info',
|
|
18
|
+
'company_holding_info',
|
|
19
|
+
'industry_concept_remark',
|
|
20
|
+
'trade_date_list',
|
|
21
|
+
'company_info',
|
|
22
|
+
'de_list_stock',
|
|
23
|
+
'kpl_best_choose_index',
|
|
24
|
+
'kpl_best_choose_index_detail',
|
|
25
|
+
'realtime_quotes_now_zt_new_kc_open',
|
|
26
|
+
'industry_concept_remark',
|
|
27
|
+
'self_black_stock',
|
|
28
|
+
'self_choose_plate',
|
|
29
|
+
'self_choose_stock',
|
|
30
|
+
'stock_account_info',
|
|
31
|
+
'ths_concept_list',
|
|
32
|
+
'stock_zt_pool',
|
|
33
|
+
'ths_stock_concept_detail'
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def remote_data():
|
|
38
|
+
for col in col_list:
|
|
39
|
+
try:
|
|
40
|
+
col_df = remote_mongodb_util.find_all_data(col)
|
|
41
|
+
if data_frame_util.is_not_empty(col_df):
|
|
42
|
+
result = local_mongodb_util.remove_all_data(col)
|
|
43
|
+
if result.acknowledged:
|
|
44
|
+
local_mongodb_util.save_mongo(col_df, col)
|
|
45
|
+
|
|
46
|
+
logger.info("同步集合完成:{}", col)
|
|
47
|
+
except BaseException as e:
|
|
48
|
+
logger.error("同步失败:{},{}", e, col)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
if __name__ == '__main__':
|
|
52
|
+
remote_data()
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 14
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import pandas as pd
|
|
9
|
+
import pymongo
|
|
10
|
+
from mns_common.utils.async_fun import async_fun
|
|
11
|
+
from loguru import logger
|
|
12
|
+
import warnings
|
|
13
|
+
|
|
14
|
+
warnings.filterwarnings("ignore")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class RemoteMongodbUtil:
|
|
18
|
+
def __init__(self, port):
|
|
19
|
+
self.port = port
|
|
20
|
+
|
|
21
|
+
def get_db(self):
|
|
22
|
+
client = pymongo.MongoClient("mongodb://100.87.2.149:" + '27017' + "/patience")
|
|
23
|
+
return client.patience
|
|
24
|
+
|
|
25
|
+
def group(self, query, coll_name):
|
|
26
|
+
db = self.get_db()
|
|
27
|
+
collection = db[coll_name]
|
|
28
|
+
data = collection.aggregate(query)
|
|
29
|
+
return pd.DataFrame(list(data))
|
|
30
|
+
|
|
31
|
+
def remove_data(self, query, coll_name):
|
|
32
|
+
db = self.get_db()
|
|
33
|
+
collection = db[coll_name]
|
|
34
|
+
return collection.delete_many(query)
|
|
35
|
+
|
|
36
|
+
def exist_data_query(self, coll_name, query):
|
|
37
|
+
db = self.get_db()
|
|
38
|
+
collection = db[coll_name]
|
|
39
|
+
return collection.count(query, limit=1) > 0
|
|
40
|
+
|
|
41
|
+
def find_one(self, coll_name, _id):
|
|
42
|
+
db = self.get_db()
|
|
43
|
+
collection = db[coll_name]
|
|
44
|
+
return collection.find_one({'_id': _id})
|
|
45
|
+
|
|
46
|
+
def find_one_query(self, coll_name, query):
|
|
47
|
+
db = self.get_db()
|
|
48
|
+
collection = db[coll_name]
|
|
49
|
+
return pd.DataFrame(collection.find_one(query), index=[0])
|
|
50
|
+
|
|
51
|
+
def find_all_data(self, coll_name):
|
|
52
|
+
db = self.get_db()
|
|
53
|
+
collection = db[coll_name]
|
|
54
|
+
rows = collection.find({})
|
|
55
|
+
df = pd.DataFrame([basic for basic in rows])
|
|
56
|
+
return df
|
|
57
|
+
|
|
58
|
+
def find_query_data(self, coll_name, query):
|
|
59
|
+
db = self.get_db()
|
|
60
|
+
collection = db[coll_name]
|
|
61
|
+
rows = collection.find(query)
|
|
62
|
+
df = pd.DataFrame(list(rows))
|
|
63
|
+
return df
|
|
64
|
+
|
|
65
|
+
def find_query_data_choose_field(self, coll_name, query, query_field):
|
|
66
|
+
db = self.get_db()
|
|
67
|
+
collection = db[coll_name]
|
|
68
|
+
rows = collection.find(query, query_field)
|
|
69
|
+
df = pd.DataFrame(list(rows))
|
|
70
|
+
return df
|
|
71
|
+
|
|
72
|
+
def find_query_data_list(self, coll_name, query):
|
|
73
|
+
db = self.get_db()
|
|
74
|
+
collection = db[coll_name]
|
|
75
|
+
rows = collection.find(query)
|
|
76
|
+
return list(rows)
|
|
77
|
+
|
|
78
|
+
def find_query_data_list(self, coll_name, query):
|
|
79
|
+
db = self.get_db()
|
|
80
|
+
collection = db[coll_name]
|
|
81
|
+
rows = collection.find(query)
|
|
82
|
+
return list(rows)
|
|
83
|
+
|
|
84
|
+
def remove_all_data(self, database):
|
|
85
|
+
db = self.get_db()
|
|
86
|
+
collection = db[database]
|
|
87
|
+
query = {"_id": {"$ne": "null"}}
|
|
88
|
+
collection.delete_many(query)
|
|
89
|
+
|
|
90
|
+
def drop_collection(self, database):
|
|
91
|
+
db = self.get_db()
|
|
92
|
+
collection = db[database]
|
|
93
|
+
collection.drop()
|
|
94
|
+
|
|
95
|
+
def ascend_query(self, query, coll_name, field, num):
|
|
96
|
+
db = self.get_db()
|
|
97
|
+
collection = db[coll_name]
|
|
98
|
+
return pd.DataFrame(list(collection.find(query).sort(field, 1).skip(0).limit(num)));
|
|
99
|
+
|
|
100
|
+
def descend_query(self, query, coll_name, field, num):
|
|
101
|
+
db = self.get_db()
|
|
102
|
+
collection = db[coll_name]
|
|
103
|
+
return pd.DataFrame(list(collection.find(query).sort(field, -1).skip(0).limit(num)));
|
|
104
|
+
|
|
105
|
+
def count(self, query, coll_name):
|
|
106
|
+
db = self.get_db()
|
|
107
|
+
collection = db[coll_name]
|
|
108
|
+
return collection.count_documents(query)
|
|
109
|
+
|
|
110
|
+
def query_max(self, query, coll_name, field, num):
|
|
111
|
+
db = self.get_db()
|
|
112
|
+
collection = db[coll_name]
|
|
113
|
+
return pd.DataFrame(list(collection.find(query).sort(field, -1).skip(0).limit(num)));
|
|
114
|
+
|
|
115
|
+
def query_min(self, query, coll_name, field):
|
|
116
|
+
db = self.get_db()
|
|
117
|
+
collection = db[coll_name]
|
|
118
|
+
return pd.DataFrame(list(collection.find(query).sort(field, 1).skip(0).limit(1)));
|
|
119
|
+
|
|
120
|
+
def insert_mongo(self, df, database):
|
|
121
|
+
db = self.get_db()
|
|
122
|
+
if df is None or len(df) == 0:
|
|
123
|
+
return
|
|
124
|
+
collection = db[database]
|
|
125
|
+
# 格式转换
|
|
126
|
+
try:
|
|
127
|
+
df = df.drop_duplicates()
|
|
128
|
+
# df = df.T.drop_duplicates().T
|
|
129
|
+
records = df.to_dict('records')
|
|
130
|
+
collection.insert_many(records)
|
|
131
|
+
except BaseException as e:
|
|
132
|
+
logger.error("插入数据异常:{}", e)
|
|
133
|
+
|
|
134
|
+
def insert_mongo_json(self, json, database):
|
|
135
|
+
db = self.get_db()
|
|
136
|
+
collection = db[database]
|
|
137
|
+
# 格式转换
|
|
138
|
+
try:
|
|
139
|
+
collection.insert_many(json)
|
|
140
|
+
except BaseException as e:
|
|
141
|
+
logger.error("插入数据异常:{}", e)
|
|
142
|
+
|
|
143
|
+
def save_mongo_json(self, json, database):
|
|
144
|
+
db = self.get_db()
|
|
145
|
+
collection = db[database]
|
|
146
|
+
for record in json:
|
|
147
|
+
try:
|
|
148
|
+
collection.save(record)
|
|
149
|
+
except BaseException as e:
|
|
150
|
+
logger.error("保存数据出现异常:{}", e)
|
|
151
|
+
|
|
152
|
+
def save_mongo(self, df, database):
|
|
153
|
+
db = self.get_db()
|
|
154
|
+
if df is None or len(df) == 0:
|
|
155
|
+
return
|
|
156
|
+
collection = db[database]
|
|
157
|
+
# df = df.T.drop_duplicates().T
|
|
158
|
+
# 格式转换
|
|
159
|
+
records = df.to_dict('records')
|
|
160
|
+
for record in records:
|
|
161
|
+
try:
|
|
162
|
+
collection.save(record)
|
|
163
|
+
except BaseException as e:
|
|
164
|
+
logger.error("保存数据出现异常:{},{}", record, e)
|
|
165
|
+
|
|
166
|
+
def save_mongo_no_catch_exception(self, df, database):
|
|
167
|
+
db = self.get_db()
|
|
168
|
+
if df is None or len(df) == 0:
|
|
169
|
+
return
|
|
170
|
+
collection = db[database]
|
|
171
|
+
# df = df.T.drop_duplicates().T
|
|
172
|
+
# 格式转换
|
|
173
|
+
records = df.to_dict('records')
|
|
174
|
+
for record in records:
|
|
175
|
+
collection.save(record)
|
|
176
|
+
|
|
177
|
+
def update_one(self, df, database):
|
|
178
|
+
db = self.get_db()
|
|
179
|
+
condition = {'_id': list(df['_id'])[0]}
|
|
180
|
+
if len(df) == 0:
|
|
181
|
+
return
|
|
182
|
+
collection = db[database]
|
|
183
|
+
collection.update(condition, df)
|
|
184
|
+
|
|
185
|
+
def update_many(self, query, new_values, database):
|
|
186
|
+
db = self.get_db()
|
|
187
|
+
collection = db[database]
|
|
188
|
+
x = collection.update_many(query, new_values)
|
|
189
|
+
return x
|
|
190
|
+
|
|
191
|
+
@async_fun
|
|
192
|
+
def update_one_query(self, query, new_values, database):
|
|
193
|
+
db = self.get_db()
|
|
194
|
+
collection = db[database]
|
|
195
|
+
x = collection.update(query, new_values)
|
|
196
|
+
return x
|
|
197
|
+
|
|
198
|
+
def distinct_field(self, database, field, query):
|
|
199
|
+
db = self.get_db()
|
|
200
|
+
collection = db[database]
|
|
201
|
+
return collection.distinct(field, query)
|
|
202
|
+
|
|
203
|
+
def create_index(self, database, index):
|
|
204
|
+
db = self.get_db()
|
|
205
|
+
collection = db[database]
|
|
206
|
+
collection.create_index(
|
|
207
|
+
index)
|
|
208
|
+
|
|
209
|
+
def aggregate(self, pipeline, database):
|
|
210
|
+
db = self.get_db()
|
|
211
|
+
collection = db[database]
|
|
212
|
+
data = collection.aggregate(pipeline)
|
|
213
|
+
return pd.DataFrame(list(data))
|
|
214
|
+
|
|
215
|
+
def get_col_keys(self, database):
|
|
216
|
+
db = self.get_db()
|
|
217
|
+
collection = db[database]
|
|
218
|
+
keys = collection.find_one().keys()
|
|
219
|
+
return keys
|
|
220
|
+
|
|
221
|
+
# 分页查询 descend 是否降序
|
|
222
|
+
def find_page_skip_data(self, coll_name, page_query, page, page_number, field, descend):
|
|
223
|
+
db = self.get_db()
|
|
224
|
+
collection = db[coll_name]
|
|
225
|
+
if descend:
|
|
226
|
+
sort_tag = -1
|
|
227
|
+
else:
|
|
228
|
+
sort_tag = 1
|
|
229
|
+
rows = collection.find(page_query).sort(field, sort_tag).skip((page - 1) * page_number).limit(page_number)
|
|
230
|
+
df = pd.DataFrame(list(rows))
|
|
231
|
+
return df
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
# if __name__ == '__main__':
|
|
235
|
+
# symbol = '002992'
|
|
236
|
+
# query = {'symbol': symbol,
|
|
237
|
+
# '$and': [{'str_day': {'$gte': '2022-07-06'}}, {'str_day': {'$lte': '2022-11-06'}}]}
|
|
238
|
+
# mongodb_util = MongodbUtil('27017')
|
|
239
|
+
# # num = mongodb_util.count(query, 'stock_zt_pool')
|
|
240
|
+
# # print(num)
|
|
241
|
+
# key = mongodb_util.get_col_keys('stock_zt_pool')
|
|
242
|
+
# print(key)
|
|
243
|
+
#
|
|
244
|
+
# # num = mongodb_util.count(query, 'stock_zt_pool')
|
|
245
|
+
# # print(num)
|
|
246
|
+
#
|
|
247
|
+
# pipeline = [
|
|
248
|
+
# {'$match': {
|
|
249
|
+
# "classification": {'$in': ["K", "C"]},
|
|
250
|
+
# "str_day": {'$gte': "2022-03-16"}}},
|
|
251
|
+
# {'$group': {'_id': "$flow_mv_level", 'count': {'$sum': 1}}}
|
|
252
|
+
# ]
|
|
253
|
+
# result = mongodb_util.aggregate(pipeline, 'realtime_quotes_now_zt_new_kc_open')
|
|
254
|
+
#
|
|
255
|
+
# result = result.sort_values(by=['_id'], ascending=True)
|
|
256
|
+
# print(result)
|
|
257
|
+
from io import StringIO
|
|
258
|
+
import re
|
|
259
|
+
|
|
260
|
+
if __name__ == '__main__':
|
|
261
|
+
mongodb_util = RemoteMongodbUtil('27017')
|
|
262
|
+
#
|
|
263
|
+
# kpl_best_choose_index_df = mongodb_util.find_page_skip_data('kpl_best_choose_index', {"index_class": "sub_index"},
|
|
264
|
+
# 1, 100, 'create_time', True)
|
|
265
|
+
key_word = '高速连接'
|
|
266
|
+
EXCLUDE_INFO_KEY = '股东人数'
|
|
267
|
+
# query = {
|
|
268
|
+
# "$or": [{'question': {"$regex": re.compile(key_word, re.IGNORECASE)}},
|
|
269
|
+
# {'answer_content': {"$regex": re.compile(key_word, re.IGNORECASE)}}],
|
|
270
|
+
# "$and": [{'question': {"$not": re.compile(EXCLUDE_INFO_KEY, re.IGNORECASE)}},
|
|
271
|
+
# {'answer_content': {"$not": re.compile(EXCLUDE_INFO_KEY, re.IGNORECASE)}}],
|
|
272
|
+
# }
|
|
273
|
+
#
|
|
274
|
+
# pipeline = [
|
|
275
|
+
# {'$match': query},
|
|
276
|
+
# {'$group': {'_id': "$symbol", 'count': {'$sum': 1}}}
|
|
277
|
+
# ]
|
|
278
|
+
# result = mongodb_util.aggregate(pipeline, 'stock_interactive_question')
|
|
279
|
+
#
|
|
280
|
+
# result = result.sort_values(by=['_id'], ascending=True)
|
|
281
|
+
# print(result)
|
|
282
|
+
#
|
|
283
|
+
# # ths_new_concept = mongodb_util.find_all_data('ths_new_concept')
|
|
284
|
+
# key = mongodb_util.get_col_keys('company_info')
|
|
285
|
+
# print(key)
|
|
286
|
+
|
|
287
|
+
# mongodb_util.create_index('realtime_quotes_now_open', [("number", 1)])
|
|
288
|
+
# mongodb_util.create_index('realtime_quotes_now_open', [("symbol", 1), ("number", 1)])
|
|
289
|
+
# mongodb_util.create_index('realtime_quotes_now_open', [("str_day", 1)])
|
|
290
|
+
# update_query = {"str_day": "2023-06-30"}
|
|
291
|
+
# mongodb_util.update_many(update_query, {"$set": {"number": 1}}, "realtime_quotes_now_open")
|
|
292
|
+
# query = {"symbol": "000617"}
|
|
293
|
+
# company_info_base = mongodb_util.find_query_data('company_info_base', query)
|
|
294
|
+
# ths_stock_concept_detail = mongodb_util.find_query_data('ths_stock_concept_detail', query)
|
|
295
|
+
# ths_stock_concept_detail = ths_stock_concept_detail[[
|
|
296
|
+
# 'concept_code',
|
|
297
|
+
# 'concept_name',
|
|
298
|
+
# 'str_now_time',
|
|
299
|
+
# 'concept_create_day']]
|
|
300
|
+
# # 去除空格
|
|
301
|
+
# ths_stock_concept_detail['concept_name'] = ths_stock_concept_detail['concept_name'].str.replace(' ', '')
|
|
302
|
+
# company_info_base.loc[:, 'ths_concept_list_info'] = ths_stock_concept_detail.to_string(index=False)
|
|
303
|
+
# for company_one in company_info_base.itertuples():
|
|
304
|
+
# ths_concept_list_info = company_one.ths_concept_list_info
|
|
305
|
+
# ths_concept_list_info_df = pd.read_csv(StringIO(ths_concept_list_info), delim_whitespace=True)
|
|
306
|
+
# print(ths_concept_list_info_df)
|
|
@@ -18,10 +18,10 @@ mns_scheduler/company_info/remark/company_remark_info_sync.py,sha256=hzQ8uBK4-od
|
|
|
18
18
|
mns_scheduler/concept/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
|
|
19
19
|
mns_scheduler/concept/clean/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
|
|
20
20
|
mns_scheduler/concept/clean/kpl_concept_clean_api.py,sha256=xxIIgrXLI6xLf10t4unJa7aMB_QeKeki1HJVeXsntkY,4512
|
|
21
|
-
mns_scheduler/concept/clean/ths_concept_clean_api.py,sha256=
|
|
21
|
+
mns_scheduler/concept/clean/ths_concept_clean_api.py,sha256=AB-BV-wEcO-Q7p6GJrkqWYV-kh5PttSwvHIHCqa327c,6142
|
|
22
22
|
mns_scheduler/concept/ths/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
|
|
23
23
|
mns_scheduler/concept/ths/common/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
|
|
24
|
-
mns_scheduler/concept/ths/common/ths_concept_sync_common_api.py,sha256=
|
|
24
|
+
mns_scheduler/concept/ths/common/ths_concept_sync_common_api.py,sha256=H94aJNMVbOm5e8rqlAdAo9hQsQI22rVURpcVoA86xeY,8915
|
|
25
25
|
mns_scheduler/concept/ths/common/ths_concept_update_common_api.py,sha256=4BQT3A9t-nDIyCpILgRZF7ZOgK1oabp-gJl5nyWvKWc,4418
|
|
26
26
|
mns_scheduler/concept/ths/detaill/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
27
27
|
mns_scheduler/concept/ths/detaill/ths_concept_detail_api.py,sha256=DND0wPAbxXSSu8D9dJe6FaamAnnDftDMfLc9H6UmmSg,10885
|
|
@@ -36,6 +36,10 @@ mns_scheduler/db/db_status.py,sha256=e5eW5ZSm5J7tHvmxxhFmFdbZb2_oB_SAcdcFqc4KDmw
|
|
|
36
36
|
mns_scheduler/db/real_time_task_check.py,sha256=R8-ZmS7wzC_zZPGgDwv6kx2v25hBrYOTvXlU3cP2BVQ,3167
|
|
37
37
|
mns_scheduler/db/script/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
|
|
38
38
|
mns_scheduler/db/script/col_move_script.py,sha256=0WNv0xbnPPKTME2_WMEPSGNT0DUWC7hS4mJ2VeNgc08,1163
|
|
39
|
+
mns_scheduler/db/script/sync/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
|
|
40
|
+
mns_scheduler/db/script/sync/local_mongo_util.py,sha256=MFG-S7aUBLKe4tkhKNklUzpiZef2n078YXd39dfOMy0,7540
|
|
41
|
+
mns_scheduler/db/script/sync/remote_data_sync_to_local.py,sha256=WKJPo4u1Z6HeTz7aqlmDy81B1pOx6-sxxOWxix62-iQ,1558
|
|
42
|
+
mns_scheduler/db/script/sync/remote_mongo_util.py,sha256=-BCR2zeQ9z0zeZg6wO0aCS4bGnsGIohFRH7QR8XXJSo,10966
|
|
39
43
|
mns_scheduler/debt/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
|
|
40
44
|
mns_scheduler/debt/kzz_bond_info_sync.py,sha256=3o0Y4FBxP3AOXwf7Z7jVO1N_DcqxeOVqcgMM3y7E4uo,1336
|
|
41
45
|
mns_scheduler/dt/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
|
|
@@ -127,12 +131,12 @@ mns_scheduler/zt/script/kcx_high_chg_open_his_data_handle.py,sha256=aTrYgshcccoH
|
|
|
127
131
|
mns_scheduler/zt/script/sync_high_chg_pool_his_data.py,sha256=dtREQdNpoDM4KLTvdXuiEhXmdjpc1dScMDgR3fbEbww,1685
|
|
128
132
|
mns_scheduler/zt/script/sync_now_higt_chg_zt.py,sha256=bhoIGDWTI3w0YKVfIAVNHWpUrYqJYDMGPt-1i3d_Zmw,1850
|
|
129
133
|
mns_scheduler/zt/zt_pool/__init__.py,sha256=Tyvi_iQlv3jz59EdH67Mycnt9CSixcWPQoJwu55bOq0,165
|
|
130
|
-
mns_scheduler/zt/zt_pool/em_zt_pool_sync_api.py,sha256=
|
|
134
|
+
mns_scheduler/zt/zt_pool/em_zt_pool_sync_api.py,sha256=Am5N29FStM2NaNPdwhDymFP61Y42YrwONFRcmwfkl2Y,7070
|
|
131
135
|
mns_scheduler/zt/zt_pool/ths_zt_pool_sync_api.py,sha256=Sy39T-yFwLSIIoSZqQzS-6-W1RlaFWvYpksEXKQVFdI,10456
|
|
132
136
|
mns_scheduler/zt/zt_pool/update_null_zt_reason_api.py,sha256=OuklSKUhZMj1aKcwDLpZilKIqFbHY3hvvIuA_UFnPqA,2135
|
|
133
137
|
mns_scheduler/zz_task/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
|
|
134
138
|
mns_scheduler/zz_task/data_sync_task.py,sha256=dOcYkNEnziop-9fERFib2qdOUsu0eyKZd7ye_MZtuWo,20770
|
|
135
|
-
mns_scheduler-1.1.6.
|
|
136
|
-
mns_scheduler-1.1.6.
|
|
137
|
-
mns_scheduler-1.1.6.
|
|
138
|
-
mns_scheduler-1.1.6.
|
|
139
|
+
mns_scheduler-1.1.6.7.dist-info/METADATA,sha256=RcaNoYI6q73UhwABhhDxNbIHHOijUpTrW1S5vplPcM8,64
|
|
140
|
+
mns_scheduler-1.1.6.7.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
141
|
+
mns_scheduler-1.1.6.7.dist-info/top_level.txt,sha256=PXQDFBGR1pWmsUbH5yiLAh71P5HZODTRED0zJ8CCgOc,14
|
|
142
|
+
mns_scheduler-1.1.6.7.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|