mns-scheduler 1.2.7.3__py3-none-any.whl → 1.2.7.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mns-scheduler might be problematic. Click here for more details.
- mns_scheduler/extraIncome/one_minute/common/db_create_index.py +9 -0
- mns_scheduler/extraIncome/one_minute/common/symbol_handle_util.py +41 -0
- mns_scheduler/extraIncome/one_minute/etf/etf_one_minute_sync_task.py +87 -0
- mns_scheduler/extraIncome/one_minute/index/main_index_sync_task.py +74 -0
- mns_scheduler/extraIncome/one_minute/kzz/__init__.py +7 -0
- mns_scheduler/extraIncome/one_minute/kzz/kzz_one_minute_sync_task.py +87 -0
- mns_scheduler/extraIncome/one_minute/one_minute_sync_task.py +34 -0
- mns_scheduler/extraIncome/one_minute/stock/__init__.py +7 -0
- mns_scheduler/extraIncome/one_minute/stock/stock_one_minute_sync_task.py +89 -0
- mns_scheduler/extraIncome/temp/__init__.py +7 -0
- mns_scheduler/extraIncome/temp/tu_share_data_etf_sync.py +103 -0
- mns_scheduler/extraIncome/temp/tu_share_data_kzz_sync.py +113 -0
- mns_scheduler/extraIncome/{stock/sync_stock_one_minute_now_api.py → temp/tu_share_data_stock_sync.py} +27 -56
- mns_scheduler/extraIncome/temp/tu_share_zhi_shu_sync_api.py +107 -0
- mns_scheduler/zz_task/data_sync_task.py +9 -5
- {mns_scheduler-1.2.7.3.dist-info → mns_scheduler-1.2.7.5.dist-info}/METADATA +1 -1
- {mns_scheduler-1.2.7.3.dist-info → mns_scheduler-1.2.7.5.dist-info}/RECORD +23 -10
- /mns_scheduler/extraIncome/{etf → one_minute}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{kzz → one_minute/common}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{stock → one_minute/etf}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{stock/one_minute_k_line_sync.py → one_minute/index/__init__.py} +0 -0
- {mns_scheduler-1.2.7.3.dist-info → mns_scheduler-1.2.7.5.dist-info}/WHEEL +0 -0
- {mns_scheduler-1.2.7.3.dist-info → mns_scheduler-1.2.7.5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
def create_index(db_util, col_name):
|
|
2
|
+
index_create = [('symbol', 1), ('time', 1)]
|
|
3
|
+
db_util.create_index(col_name, index_create)
|
|
4
|
+
index_create_01 = [('time', 1)]
|
|
5
|
+
db_util.create_index(col_name, index_create_01)
|
|
6
|
+
index_create_02 = [('symbol', 1)]
|
|
7
|
+
db_util.create_index(col_name, index_create_02)
|
|
8
|
+
|
|
9
|
+
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
9
|
+
import pandas as pd
|
|
10
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
11
|
+
|
|
12
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def symbol_add_prefix(symbol):
|
|
16
|
+
symbol_simple = symbol[0:6]
|
|
17
|
+
suffix = symbol[7:9]
|
|
18
|
+
if suffix in ['SH']:
|
|
19
|
+
return '1.' + symbol_simple
|
|
20
|
+
elif suffix in ['SZ']:
|
|
21
|
+
return '0.' + symbol_simple
|
|
22
|
+
elif suffix in ['BJ']:
|
|
23
|
+
return '0.' + symbol_simple
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# col_name 保存数据的结婚
|
|
27
|
+
def save_fail_data(now_day, symbol_prefix, col_name):
|
|
28
|
+
fail_dict = {'begin_date': now_day,
|
|
29
|
+
'end_date': now_day,
|
|
30
|
+
'symbol': symbol_prefix,
|
|
31
|
+
'col_name': col_name,
|
|
32
|
+
'type': 'kzz',
|
|
33
|
+
'sync_day': now_day,
|
|
34
|
+
'valid': True,
|
|
35
|
+
}
|
|
36
|
+
fail_df = pd.DataFrame(fail_dict, index=[1])
|
|
37
|
+
mongodbUtilV2_27019.insert_mongo(fail_df, extra_income_db_name.ONE_MINUTE_SYNC_FAIL)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
if __name__ == '__main__':
|
|
41
|
+
symbol_add_prefix('000001.SZ')
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
|
|
9
|
+
import mns_common.api.em.east_money_etf_api as east_money_etf_api
|
|
10
|
+
from loguru import logger
|
|
11
|
+
import time
|
|
12
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
13
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
14
|
+
import mns_scheduler.extraIncome.one_minute.common.symbol_handle_util as symbol_handle_util
|
|
15
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
16
|
+
import mns_scheduler.extraIncome.one_minute.common.db_create_index as db_create_index
|
|
17
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
18
|
+
import mns_common.api.k_line.stock_minute_data_api as stock_minute_data_api
|
|
19
|
+
from datetime import datetime
|
|
20
|
+
|
|
21
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
22
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def sync_etf_one_minute(data_tag):
|
|
26
|
+
etf_real_time_quotes = east_money_etf_api.get_etf_real_time_quotes()
|
|
27
|
+
etf_real_time_quotes = classify_symbol(etf_real_time_quotes)
|
|
28
|
+
etf_real_time_quotes['symbol'] = etf_real_time_quotes.apply(
|
|
29
|
+
lambda row: row['symbol'] + '.SZ' if row['classification'] in ['S', 'C']
|
|
30
|
+
else row['symbol'] + '.BJ' if row['classification'] in ['X']
|
|
31
|
+
else row['symbol'] + '.SH',
|
|
32
|
+
axis=1
|
|
33
|
+
)
|
|
34
|
+
# 假设数字格式为 YYYYMMDD
|
|
35
|
+
# debt_real_time_quotes['list_date'] = pd.to_datetime(debt_real_time_quotes['list_date'],
|
|
36
|
+
# format='%Y%m%d').dt.strftime('%Y-%m-%d')
|
|
37
|
+
|
|
38
|
+
etf_real_time_quotes = etf_real_time_quotes.loc[etf_real_time_quotes['amount'] != 0]
|
|
39
|
+
|
|
40
|
+
now_date = datetime.now()
|
|
41
|
+
now_day = now_date.strftime('%Y-%m-%d')
|
|
42
|
+
year = now_date.strftime('%Y')
|
|
43
|
+
col_name = extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_ETF + '_' + str(year)
|
|
44
|
+
# 创建索引
|
|
45
|
+
db_create_index.create_index(mongodbUtilV2_27019, col_name)
|
|
46
|
+
|
|
47
|
+
for stock_one in etf_real_time_quotes.itertuples():
|
|
48
|
+
|
|
49
|
+
symbol = stock_one.symbol
|
|
50
|
+
symbol_prefix = symbol_handle_util.symbol_add_prefix(symbol)
|
|
51
|
+
try:
|
|
52
|
+
one_min_df = stock_minute_data_api.get_minute_data(symbol_prefix, now_day, now_day, '1', '')
|
|
53
|
+
one_min_df['symbol'] = symbol
|
|
54
|
+
one_min_df['_id'] = one_min_df['symbol'] + '_' + one_min_df['time']
|
|
55
|
+
if data_frame_util.is_empty(one_min_df) or one_min_df.shape[0] < 241:
|
|
56
|
+
symbol_handle_util.save_fail_data(now_day, symbol_prefix, col_name)
|
|
57
|
+
logger.error("当前ETF分钟数据同步异常:{}", symbol)
|
|
58
|
+
continue
|
|
59
|
+
else:
|
|
60
|
+
del one_min_df['ava_price']
|
|
61
|
+
if data_tag:
|
|
62
|
+
mongodbUtilV2_27019.insert_mongo(one_min_df, col_name)
|
|
63
|
+
else:
|
|
64
|
+
mongodbUtilV2_27019.save_mongo(one_min_df, col_name)
|
|
65
|
+
except BaseException as e:
|
|
66
|
+
time.sleep(2)
|
|
67
|
+
symbol_handle_util.save_fail_data(now_day, symbol_prefix, col_name)
|
|
68
|
+
logger.error("同步ETF分钟数据出现异常:{},{},{}", e, symbol, now_day)
|
|
69
|
+
logger.info("同步完ETF分钟数据:{},{}", stock_one.symbol, stock_one.name)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def classify_symbol(etf_real_time_quotes):
|
|
73
|
+
etf_real_time_quotes['classification'] = etf_real_time_quotes['market'].apply(
|
|
74
|
+
lambda market: classify_symbol_one(market))
|
|
75
|
+
return etf_real_time_quotes
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
# 单个股票分类
|
|
79
|
+
def classify_symbol_one(market):
|
|
80
|
+
if market == 0:
|
|
81
|
+
return 'S'
|
|
82
|
+
else:
|
|
83
|
+
return 'H'
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
if __name__ == '__main__':
|
|
87
|
+
sync_etf_one_minute("2025-03-17", "2025-03-17")
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
|
|
9
|
+
from loguru import logger
|
|
10
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
11
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
12
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
13
|
+
import mns_scheduler.extraIncome.one_minute.common.db_create_index as db_create_index
|
|
14
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
15
|
+
from datetime import datetime
|
|
16
|
+
import mns_common.api.k_line.stock_minute_data_api as stock_minute_data_api
|
|
17
|
+
import mns_scheduler.extraIncome.one_minute.common.symbol_handle_util as symbol_handle_util
|
|
18
|
+
|
|
19
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
20
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
21
|
+
|
|
22
|
+
main_index_list = [
|
|
23
|
+
"000001.SH",
|
|
24
|
+
"000016.SH",
|
|
25
|
+
"000010.SH",
|
|
26
|
+
"000009.SH",
|
|
27
|
+
"000015.SH",
|
|
28
|
+
"399001.SZ",
|
|
29
|
+
"399004.SZ",
|
|
30
|
+
"399005.SZ",
|
|
31
|
+
"399006.SZ",
|
|
32
|
+
"000300.SH",
|
|
33
|
+
"000905.SH",
|
|
34
|
+
"000688.SH",
|
|
35
|
+
"000903.SH",
|
|
36
|
+
"000906.SH",
|
|
37
|
+
"000852.SH",
|
|
38
|
+
"000932.SH",
|
|
39
|
+
"000933.SH",
|
|
40
|
+
"980017.SZ",
|
|
41
|
+
"399808.SZ",
|
|
42
|
+
"399997.SZ",
|
|
43
|
+
]
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def sync_main_index_one_minute(data_tag):
|
|
47
|
+
now_date = datetime.now()
|
|
48
|
+
now_day = now_date.strftime('%Y-%m-%d')
|
|
49
|
+
year = now_date.strftime('%Y')
|
|
50
|
+
col_name = extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_MAIN_INDEX + '_' + str(year)
|
|
51
|
+
# 创建索引
|
|
52
|
+
db_create_index.create_index(mongodbUtilV2_27019, col_name)
|
|
53
|
+
for symbol in main_index_list:
|
|
54
|
+
try:
|
|
55
|
+
symbol_prefix = symbol_handle_util.symbol_add_prefix(symbol)
|
|
56
|
+
one_min_df = stock_minute_data_api.get_minute_data(symbol_prefix, now_day, now_day, '1', '')
|
|
57
|
+
one_min_df['symbol'] = symbol
|
|
58
|
+
one_min_df['_id'] = one_min_df['symbol'] + '_' + one_min_df['time']
|
|
59
|
+
if data_frame_util.is_empty(one_min_df) or one_min_df.shape[0] < 241:
|
|
60
|
+
logger.error("当前沪深指数分钟数据同步异常:{}", symbol)
|
|
61
|
+
continue
|
|
62
|
+
else:
|
|
63
|
+
del one_min_df['ava_price']
|
|
64
|
+
if data_tag:
|
|
65
|
+
mongodbUtilV2_27019.insert_mongo(one_min_df, col_name)
|
|
66
|
+
else:
|
|
67
|
+
mongodbUtilV2_27019.save_mongo(one_min_df, col_name)
|
|
68
|
+
except BaseException as e:
|
|
69
|
+
logger.error("沪深指数分钟数据同步异常:{},{}", symbol, e)
|
|
70
|
+
logger.info("沪深指数分钟数据同步完成:{}", symbol)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
if __name__ == '__main__':
|
|
74
|
+
sync_main_index_one_minute(False)
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
|
|
9
|
+
import mns_common.api.em.east_money_debt_api as east_money_debt_api
|
|
10
|
+
from loguru import logger
|
|
11
|
+
import time
|
|
12
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
13
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
14
|
+
import mns_scheduler.extraIncome.one_minute.common.symbol_handle_util as symbol_handle_util
|
|
15
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
16
|
+
import mns_scheduler.extraIncome.one_minute.common.db_create_index as db_create_index
|
|
17
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
18
|
+
import mns_common.api.k_line.stock_minute_data_api as stock_minute_data_api
|
|
19
|
+
from datetime import datetime
|
|
20
|
+
|
|
21
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
22
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def sync_kzz_one_minute(data_tag):
|
|
26
|
+
debt_real_time_quotes = east_money_debt_api.get_debt_real_time_quotes()
|
|
27
|
+
debt_real_time_quotes = classify_symbol(debt_real_time_quotes)
|
|
28
|
+
debt_real_time_quotes['symbol'] = debt_real_time_quotes.apply(
|
|
29
|
+
lambda row: row['symbol'] + '.SZ' if row['classification'] in ['S', 'C']
|
|
30
|
+
else row['symbol'] + '.BJ' if row['classification'] in ['X']
|
|
31
|
+
else row['symbol'] + '.SH',
|
|
32
|
+
axis=1
|
|
33
|
+
)
|
|
34
|
+
# 假设数字格式为 YYYYMMDD
|
|
35
|
+
# debt_real_time_quotes['list_date'] = pd.to_datetime(debt_real_time_quotes['list_date'],
|
|
36
|
+
# format='%Y%m%d').dt.strftime('%Y-%m-%d')
|
|
37
|
+
|
|
38
|
+
debt_real_time_quotes = debt_real_time_quotes.loc[debt_real_time_quotes['amount'] != 0]
|
|
39
|
+
|
|
40
|
+
now_date = datetime.now()
|
|
41
|
+
now_day = now_date.strftime('%Y-%m-%d')
|
|
42
|
+
year = now_date.strftime('%Y')
|
|
43
|
+
col_name = extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_KZZ + '_' + str(year)
|
|
44
|
+
# 创建索引
|
|
45
|
+
db_create_index.create_index(mongodbUtilV2_27019, col_name)
|
|
46
|
+
|
|
47
|
+
for stock_one in debt_real_time_quotes.itertuples():
|
|
48
|
+
|
|
49
|
+
symbol = stock_one.symbol
|
|
50
|
+
symbol_prefix = symbol_handle_util.symbol_add_prefix(symbol)
|
|
51
|
+
try:
|
|
52
|
+
one_min_df = stock_minute_data_api.get_minute_data(symbol_prefix, now_day, now_day, '1', '')
|
|
53
|
+
one_min_df['symbol'] = symbol
|
|
54
|
+
one_min_df['_id'] = one_min_df['symbol'] + '_' + one_min_df['time']
|
|
55
|
+
if data_frame_util.is_empty(one_min_df) or one_min_df.shape[0] < 241:
|
|
56
|
+
symbol_handle_util.save_fail_data(now_day, symbol_prefix, col_name)
|
|
57
|
+
logger.error("当前可转债分钟数据同步异常:{}", symbol)
|
|
58
|
+
continue
|
|
59
|
+
else:
|
|
60
|
+
del one_min_df['ava_price']
|
|
61
|
+
if data_tag:
|
|
62
|
+
mongodbUtilV2_27019.insert_mongo(one_min_df, col_name)
|
|
63
|
+
else:
|
|
64
|
+
mongodbUtilV2_27019.save_mongo(one_min_df, col_name)
|
|
65
|
+
except BaseException as e:
|
|
66
|
+
time.sleep(2)
|
|
67
|
+
symbol_handle_util.save_fail_data(now_day, symbol_prefix, col_name)
|
|
68
|
+
logger.error("同步可转债分钟数据出现异常:,{},{},{}", e, symbol, now_day)
|
|
69
|
+
logger.info("同步完可转债分钟数据:{},{}", stock_one.symbol, stock_one.name)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def classify_symbol(debt_real_time_quotes_df):
|
|
73
|
+
debt_real_time_quotes_df['classification'] = debt_real_time_quotes_df['market'].apply(
|
|
74
|
+
lambda market: classify_symbol_one(market))
|
|
75
|
+
return debt_real_time_quotes_df
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
# 单个股票分类
|
|
79
|
+
def classify_symbol_one(market):
|
|
80
|
+
if market == 0:
|
|
81
|
+
return 'S'
|
|
82
|
+
else:
|
|
83
|
+
return 'H'
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
if __name__ == '__main__':
|
|
87
|
+
sync_kzz_one_minute()
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_scheduler.extraIncome.one_minute.index.main_index_sync_task as main_index_sync_task
|
|
9
|
+
import mns_scheduler.extraIncome.one_minute.etf.etf_one_minute_sync_task as etf_one_minute_sync_task
|
|
10
|
+
import mns_scheduler.extraIncome.one_minute.kzz.kzz_one_minute_sync_task as kzz_one_minute_sync_task
|
|
11
|
+
import mns_scheduler.extraIncome.one_minute.stock.stock_one_minute_sync_task as stock_one_minute_sync_task
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
import mns_common.component.trade_date.trade_date_common_service_api as trade_date_common_service_api
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def sync_one_minute_data():
|
|
17
|
+
now_date = datetime.now()
|
|
18
|
+
hour = now_date.hour
|
|
19
|
+
now_day = now_date.strftime('%Y-%m-%d')
|
|
20
|
+
if trade_date_common_service_api.is_trade_day(now_day):
|
|
21
|
+
if 15 < hour < 20:
|
|
22
|
+
main_index_sync_task.sync_main_index_one_minute(True)
|
|
23
|
+
etf_one_minute_sync_task.sync_etf_one_minute(True)
|
|
24
|
+
kzz_one_minute_sync_task.sync_kzz_one_minute(True)
|
|
25
|
+
stock_one_minute_sync_task.sync_stock_one_minute(True)
|
|
26
|
+
elif hour < 9 or hour >= 20:
|
|
27
|
+
main_index_sync_task.sync_main_index_one_minute(False)
|
|
28
|
+
etf_one_minute_sync_task.sync_etf_one_minute(False)
|
|
29
|
+
kzz_one_minute_sync_task.sync_kzz_one_minute(False)
|
|
30
|
+
stock_one_minute_sync_task.sync_stock_one_minute(False)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
if __name__ == '__main__':
|
|
34
|
+
sync_one_minute_data()
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.component.common_service_fun_api as common_service_fun_api
|
|
9
|
+
import mns_common.api.em.east_money_stock_api as east_money_stock_api
|
|
10
|
+
from loguru import logger
|
|
11
|
+
import time
|
|
12
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
13
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
14
|
+
import mns_scheduler.extraIncome.one_minute.common.symbol_handle_util as symbol_handle_util
|
|
15
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
16
|
+
import mns_scheduler.extraIncome.one_minute.common.db_create_index as db_create_index
|
|
17
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
18
|
+
import mns_common.api.k_line.stock_minute_data_api as stock_minute_data_api
|
|
19
|
+
from datetime import datetime
|
|
20
|
+
|
|
21
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
22
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def sync_stock_one_minute(data_tag):
|
|
26
|
+
real_time_quotes_all_stocks = east_money_stock_api.get_real_time_quotes_all_stocks()
|
|
27
|
+
real_time_quotes_all_stocks = common_service_fun_api.classify_symbol(real_time_quotes_all_stocks)
|
|
28
|
+
real_time_quotes_all_stocks['symbol'] = real_time_quotes_all_stocks.apply(
|
|
29
|
+
lambda row: row['symbol'] + '.SZ' if row['classification'] in ['S', 'C']
|
|
30
|
+
else row['symbol'] + '.BJ' if row['classification'] in ['X']
|
|
31
|
+
else row['symbol'] + '.SH',
|
|
32
|
+
axis=1)
|
|
33
|
+
# 假设数字格式为 YYYYMMDD
|
|
34
|
+
# debt_real_time_quotes['list_date'] = pd.to_datetime(debt_real_time_quotes['list_date'],
|
|
35
|
+
# format='%Y%m%d').dt.strftime('%Y-%m-%d')
|
|
36
|
+
|
|
37
|
+
real_time_quotes_all_stocks = real_time_quotes_all_stocks.loc[real_time_quotes_all_stocks['amount'] != 0]
|
|
38
|
+
|
|
39
|
+
now_date = datetime.now()
|
|
40
|
+
now_day = now_date.strftime('%Y-%m-%d')
|
|
41
|
+
year = now_date.strftime('%Y')
|
|
42
|
+
|
|
43
|
+
# 创建索引
|
|
44
|
+
db_create_index.create_index(mongodbUtilV2_27019, extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_C + '_' + str(year))
|
|
45
|
+
db_create_index.create_index(mongodbUtilV2_27019, extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_BJ + '_' + str(year))
|
|
46
|
+
db_create_index.create_index(mongodbUtilV2_27019, extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_H + '_' + str(year))
|
|
47
|
+
db_create_index.create_index(mongodbUtilV2_27019, extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_K + '_' + str(year))
|
|
48
|
+
db_create_index.create_index(mongodbUtilV2_27019, extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_S + '_' + str(year))
|
|
49
|
+
|
|
50
|
+
for stock_one in real_time_quotes_all_stocks.itertuples():
|
|
51
|
+
classification = stock_one.classification
|
|
52
|
+
|
|
53
|
+
if classification == 'X':
|
|
54
|
+
col_name = extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_BJ
|
|
55
|
+
elif classification == 'S':
|
|
56
|
+
col_name = extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_S
|
|
57
|
+
elif classification == 'H':
|
|
58
|
+
col_name = extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_H
|
|
59
|
+
elif classification == 'K':
|
|
60
|
+
col_name = extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_K
|
|
61
|
+
elif classification == 'C':
|
|
62
|
+
col_name = extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_C
|
|
63
|
+
col_name = col_name + '_' + str(year)
|
|
64
|
+
|
|
65
|
+
symbol = stock_one.symbol
|
|
66
|
+
symbol_prefix = symbol_handle_util.symbol_add_prefix(symbol)
|
|
67
|
+
try:
|
|
68
|
+
one_min_df = stock_minute_data_api.get_minute_data(symbol_prefix, now_day, now_day, '1', '')
|
|
69
|
+
one_min_df['symbol'] = symbol
|
|
70
|
+
one_min_df['_id'] = one_min_df['symbol'] + '_' + one_min_df['time']
|
|
71
|
+
if data_frame_util.is_empty(one_min_df) or one_min_df.shape[0] < 241:
|
|
72
|
+
symbol_handle_util.save_fail_data(now_day, symbol_prefix, col_name)
|
|
73
|
+
logger.error("当前股票分钟数据同步异常:{}", symbol)
|
|
74
|
+
continue
|
|
75
|
+
else:
|
|
76
|
+
del one_min_df['ava_price']
|
|
77
|
+
if data_tag:
|
|
78
|
+
mongodbUtilV2_27019.insert_mongo(one_min_df, col_name)
|
|
79
|
+
else:
|
|
80
|
+
mongodbUtilV2_27019.save_mongo(one_min_df, col_name)
|
|
81
|
+
except BaseException as e:
|
|
82
|
+
time.sleep(2)
|
|
83
|
+
symbol_handle_util.save_fail_data(now_day, symbol_prefix, col_name)
|
|
84
|
+
logger.error("同步股票分钟数据出现异常:,{},{},{}", e, symbol, now_day)
|
|
85
|
+
logger.info("同步股票分钟票数据完整:{},{}", stock_one.symbol, stock_one.name)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
if __name__ == '__main__':
|
|
89
|
+
sync_stock_one_minute()
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.api.em.east_money_etf_api as east_money_etf_api
|
|
9
|
+
import pandas as pd
|
|
10
|
+
from loguru import logger
|
|
11
|
+
import time
|
|
12
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
13
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
14
|
+
import tushare as ts
|
|
15
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
16
|
+
import mns_scheduler.extraIncome.one_minute.common.db_create_index as db_create_index
|
|
17
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
18
|
+
|
|
19
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
20
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
21
|
+
pro = ts.pro_api('782213d20640249f1dbae50a7f56b22684b8e915a61e435e015579a1')
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def get_minute_data(symbol, freq, start_date, end_date):
|
|
25
|
+
# 获取浦发银行60000.SH的历史分钟数据
|
|
26
|
+
df = pro.stk_mins(ts_code=symbol, freq=freq, start_date=start_date, end_date=end_date)
|
|
27
|
+
return df
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def sync_etf_one_minute(trade_date_list_df):
|
|
31
|
+
etf_real_time_quotes_df = east_money_etf_api.get_etf_real_time_quotes()
|
|
32
|
+
etf_real_time_quotes_df = classify_symbol(etf_real_time_quotes_df)
|
|
33
|
+
etf_real_time_quotes_df['symbol'] = etf_real_time_quotes_df.apply(
|
|
34
|
+
lambda row: row['symbol'] + '.SZ' if row['classification'] in ['S', 'C']
|
|
35
|
+
else row['symbol'] + '.BJ' if row['classification'] in ['X']
|
|
36
|
+
else row['symbol'] + '.SH',
|
|
37
|
+
axis=1
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
db_name = extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_ETF
|
|
41
|
+
db_create_index.create_index(mongodbUtilV2_27019, db_name)
|
|
42
|
+
for stock_one in etf_real_time_quotes_df.itertuples():
|
|
43
|
+
trade_date_list_df_copy = trade_date_list_df.copy()
|
|
44
|
+
symbol = stock_one.symbol
|
|
45
|
+
for i in range(0, len(trade_date_list_df_copy), 28):
|
|
46
|
+
try:
|
|
47
|
+
new_df = trade_date_list_df_copy.iloc[i:i + 28]
|
|
48
|
+
first_df = new_df.iloc[0] # 第一个 DataFrame
|
|
49
|
+
last_df = new_df.iloc[-1] # 最后一个 DataFrame
|
|
50
|
+
begin_date = first_df.trade_date + ' 09:20:00'
|
|
51
|
+
end_date = last_df.trade_date + ' 15:00:00'
|
|
52
|
+
|
|
53
|
+
df = get_minute_data(symbol, '1min', begin_date, end_date)
|
|
54
|
+
if data_frame_util.is_not_empty(df):
|
|
55
|
+
df = df.rename(columns={
|
|
56
|
+
"trade_time": "time",
|
|
57
|
+
"ts_code": "symbol",
|
|
58
|
+
"vol": "volume",
|
|
59
|
+
})
|
|
60
|
+
df['time_tick'] = df['time'].str[11:19]
|
|
61
|
+
df = df.loc[df['time_tick'] <= '15:00:00']
|
|
62
|
+
del df['time_tick']
|
|
63
|
+
df['_id'] = df['symbol'] + '_' + df['time']
|
|
64
|
+
mongodbUtilV2_27019.insert_mongo(df, db_name)
|
|
65
|
+
except BaseException as e:
|
|
66
|
+
time.sleep(2)
|
|
67
|
+
first_df = new_df.iloc[0] # 第一个 DataFrame
|
|
68
|
+
last_df = new_df.iloc[-1] # 最后一个 DataFrame
|
|
69
|
+
begin_date = first_df.trade_date + ' 09:20:00'
|
|
70
|
+
end_date = last_df.trade_date + ' 15:00:00'
|
|
71
|
+
fail_dict = {'begin_date': begin_date,
|
|
72
|
+
'end_date': end_date,
|
|
73
|
+
'symbol': symbol,
|
|
74
|
+
'db_name': db_name
|
|
75
|
+
}
|
|
76
|
+
fail_df = pd.DataFrame(fail_dict, index=[1])
|
|
77
|
+
mongodbUtilV2_27019.insert_mongo(fail_df, db_name + '_fail')
|
|
78
|
+
|
|
79
|
+
logger.error("同步数据出现异常:{},{},{},{}", e, symbol, begin_date, end_date)
|
|
80
|
+
logger.info("同步完数据:{},{}", stock_one.symbol, stock_one.name)
|
|
81
|
+
|
|
82
|
+
return etf_real_time_quotes_df
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def classify_symbol(debt_real_time_quotes_df):
|
|
86
|
+
debt_real_time_quotes_df['classification'] = debt_real_time_quotes_df['market'].apply(
|
|
87
|
+
lambda market: classify_symbol_one(market))
|
|
88
|
+
return debt_real_time_quotes_df
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
# 单个股票分类
|
|
92
|
+
def classify_symbol_one(market):
|
|
93
|
+
if market == 0:
|
|
94
|
+
return 'S'
|
|
95
|
+
else:
|
|
96
|
+
return 'H'
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
if __name__ == '__main__':
|
|
100
|
+
query_trade = {"$and": [{"trade_date": {"$gte": "2025-03-08"}}, {"trade_date": {"$lte": "2025-03-16"}}]}
|
|
101
|
+
trade_date_list_df_all = mongodb_util_27017.find_query_data('trade_date_list', query_trade)
|
|
102
|
+
|
|
103
|
+
sync_etf_one_minute(trade_date_list_df_all)
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
|
|
9
|
+
import mns_common.api.em.east_money_debt_api as east_money_debt_api
|
|
10
|
+
import pandas as pd
|
|
11
|
+
from loguru import logger
|
|
12
|
+
import time
|
|
13
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
14
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
15
|
+
import tushare as ts
|
|
16
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
17
|
+
import mns_scheduler.extraIncome.one_minute.common.db_create_index as db_create_index
|
|
18
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
19
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
20
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
21
|
+
pro = ts.pro_api('782213d20640249f1dbae50a7f56b22684b8e915a61e435e015579a1')
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def get_minute_data(symbol, freq, start_date, end_date):
|
|
25
|
+
# 获取浦发银行60000.SH的历史分钟数据
|
|
26
|
+
df = pro.stk_mins(ts_code=symbol, freq=freq, start_date=start_date, end_date=end_date)
|
|
27
|
+
return df
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def sync_kzz_one_minute(trade_date_list_df):
|
|
31
|
+
debt_real_time_quotes = east_money_debt_api.get_debt_real_time_quotes()
|
|
32
|
+
debt_real_time_quotes = classify_symbol(debt_real_time_quotes)
|
|
33
|
+
debt_real_time_quotes['symbol'] = debt_real_time_quotes.apply(
|
|
34
|
+
lambda row: row['symbol'] + '.SZ' if row['classification'] in ['S', 'C']
|
|
35
|
+
else row['symbol'] + '.BJ' if row['classification'] in ['X']
|
|
36
|
+
else row['symbol'] + '.SH',
|
|
37
|
+
axis=1
|
|
38
|
+
)
|
|
39
|
+
# 假设数字格式为 YYYYMMDD
|
|
40
|
+
# debt_real_time_quotes['list_date'] = pd.to_datetime(debt_real_time_quotes['list_date'],
|
|
41
|
+
# format='%Y%m%d').dt.strftime('%Y-%m-%d')
|
|
42
|
+
|
|
43
|
+
db_name = extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_KZZ
|
|
44
|
+
db_create_index.create_index(mongodbUtilV2_27019, db_name)
|
|
45
|
+
|
|
46
|
+
for stock_one in debt_real_time_quotes.itertuples():
|
|
47
|
+
|
|
48
|
+
trade_date_list_df_copy = trade_date_list_df.copy()
|
|
49
|
+
|
|
50
|
+
# list_date = stock_one.list_date
|
|
51
|
+
# trade_date_list_df_copy = trade_date_list_df_copy.loc[trade_date_list_df_copy['_id'] >= list_date]
|
|
52
|
+
|
|
53
|
+
symbol = stock_one.symbol
|
|
54
|
+
|
|
55
|
+
for i in range(0, len(trade_date_list_df_copy), 28):
|
|
56
|
+
try:
|
|
57
|
+
new_df = trade_date_list_df_copy.iloc[i:i + 28]
|
|
58
|
+
first_df = new_df.iloc[0] # 第一个 DataFrame
|
|
59
|
+
last_df = new_df.iloc[-1] # 最后一个 DataFrame
|
|
60
|
+
begin_date = first_df.trade_date + ' 09:20:00'
|
|
61
|
+
end_date = last_df.trade_date + ' 15:00:00'
|
|
62
|
+
|
|
63
|
+
df = get_minute_data(symbol, '1min', begin_date, end_date)
|
|
64
|
+
if data_frame_util.is_not_empty(df):
|
|
65
|
+
df = df.rename(columns={
|
|
66
|
+
"trade_time": "time",
|
|
67
|
+
"ts_code": "symbol",
|
|
68
|
+
"vol": "volume",
|
|
69
|
+
})
|
|
70
|
+
df['time_tick'] = df['time'].str[11:19]
|
|
71
|
+
df = df.loc[df['time_tick'] <= '15:00:00']
|
|
72
|
+
del df['time_tick']
|
|
73
|
+
df['_id'] = df['symbol'] + '_' + df['time']
|
|
74
|
+
mongodbUtilV2_27019.insert_mongo(df, db_name)
|
|
75
|
+
except BaseException as e:
|
|
76
|
+
time.sleep(2)
|
|
77
|
+
first_df = new_df.iloc[0] # 第一个 DataFrame
|
|
78
|
+
last_df = new_df.iloc[-1] # 最后一个 DataFrame
|
|
79
|
+
begin_date = first_df.trade_date + ' 09:20:00'
|
|
80
|
+
end_date = last_df.trade_date + ' 15:00:00'
|
|
81
|
+
fail_dict = {'begin_date': begin_date,
|
|
82
|
+
'end_date': end_date,
|
|
83
|
+
'symbol': symbol,
|
|
84
|
+
'db_name': db_name
|
|
85
|
+
}
|
|
86
|
+
fail_df = pd.DataFrame(fail_dict, index=[1])
|
|
87
|
+
mongodbUtilV2_27019.insert_mongo(fail_df, db_name + '_fail')
|
|
88
|
+
|
|
89
|
+
logger.error("同步数据出现异常:{},{},{},{}", e, symbol, begin_date, end_date)
|
|
90
|
+
logger.info("同步完数据:{},{}", stock_one.symbol, stock_one.name)
|
|
91
|
+
|
|
92
|
+
return debt_real_time_quotes
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def classify_symbol(debt_real_time_quotes_df):
|
|
96
|
+
debt_real_time_quotes_df['classification'] = debt_real_time_quotes_df['market'].apply(
|
|
97
|
+
lambda market: classify_symbol_one(market))
|
|
98
|
+
return debt_real_time_quotes_df
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
# 单个股票分类
|
|
102
|
+
def classify_symbol_one(market):
|
|
103
|
+
if market == 0:
|
|
104
|
+
return 'S'
|
|
105
|
+
else:
|
|
106
|
+
return 'H'
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
if __name__ == '__main__':
|
|
110
|
+
query_trade = {"$and": [{"trade_date": {"$gte": "2025-03-08"}}, {"trade_date": {"$lte": "2025-03-16"}}]}
|
|
111
|
+
trade_date_list_df_all = mongodb_util_27017.find_query_data('trade_date_list', query_trade)
|
|
112
|
+
|
|
113
|
+
sync_kzz_one_minute(trade_date_list_df_all)
|
|
@@ -5,40 +5,30 @@ file_path = os.path.abspath(__file__)
|
|
|
5
5
|
end = file_path.index('mns') + 16
|
|
6
6
|
project_path = file_path[0:end]
|
|
7
7
|
sys.path.append(project_path)
|
|
8
|
-
|
|
8
|
+
|
|
9
|
+
import tushare as ts
|
|
9
10
|
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
10
|
-
|
|
11
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
12
|
+
import mns_common.component.common_service_fun_api as common_service_fun_api
|
|
13
|
+
|
|
11
14
|
import pandas as pd
|
|
12
15
|
from loguru import logger
|
|
13
|
-
import mns_common.component.common_service_fun_api as common_service_fun_api
|
|
14
16
|
import mns_common.utils.data_frame_util as data_frame_util
|
|
17
|
+
import mns_common.api.em.east_money_stock_api as east_money_stock_api
|
|
15
18
|
import time
|
|
16
|
-
import mns_common.api.k_line.stock_minute_data_api as stock_minute_data_api
|
|
17
|
-
import mns_common.constant.db_name_constant as db_name_constant
|
|
18
19
|
|
|
19
20
|
mongodb_util_27017 = MongodbUtil('27017')
|
|
20
|
-
|
|
21
|
-
mongodbUtilV2 = MongodbUtilV2('27017', 'extraIncome')
|
|
21
|
+
mongodbUtilV2 = MongodbUtilV2('27019', 'extraIncome')
|
|
22
22
|
|
|
23
|
-
|
|
24
|
-
def create_index(db_util, db_name):
|
|
25
|
-
index_create = [('symbol', 1), ('time', 1)]
|
|
26
|
-
db_util.create_index(db_name, index_create)
|
|
27
|
-
index_create_01 = [('time', 1)]
|
|
28
|
-
db_util.create_index(db_name, index_create_01)
|
|
29
|
-
index_create_02 = [('symbol', 1)]
|
|
30
|
-
db_util.create_index(db_name, index_create_02)
|
|
23
|
+
pro = ts.pro_api('782213d20640249f1dbae50a7f56b22684b8e915a61e435e015579a1')
|
|
31
24
|
|
|
32
25
|
|
|
33
26
|
def get_minute_data(symbol, freq, start_date, end_date):
|
|
34
27
|
# 获取浦发银行60000.SH的历史分钟数据
|
|
35
|
-
df =
|
|
36
|
-
'')
|
|
28
|
+
df = pro.stk_mins(ts_code=symbol, freq=freq, start_date=start_date, end_date=end_date)
|
|
37
29
|
return df
|
|
38
30
|
|
|
39
31
|
|
|
40
|
-
# 287
|
|
41
|
-
|
|
42
32
|
def sync_all_stock(trade_date_list_df):
|
|
43
33
|
de_list_stock_df = mongodb_util_27017.find_all_data('de_list_stock')
|
|
44
34
|
de_list_stock_df = common_service_fun_api.classify_symbol(de_list_stock_df)
|
|
@@ -60,7 +50,6 @@ def sync_all_stock(trade_date_list_df):
|
|
|
60
50
|
|
|
61
51
|
# 将日期格式化为字符串(YYYY-MM-DD)
|
|
62
52
|
real_time_quotes_all_stocks_df['list_date'] = real_time_quotes_all_stocks_df['list_date'].dt.strftime('%Y-%m-%d')
|
|
63
|
-
|
|
64
53
|
real_time_quotes_all_stocks_df = common_service_fun_api.classify_symbol(real_time_quotes_all_stocks_df)
|
|
65
54
|
|
|
66
55
|
# 对 classification 为 S 或 K 的数据,symbol 列加上 '.SH',其他加上 '.SZ'
|
|
@@ -71,24 +60,22 @@ def sync_all_stock(trade_date_list_df):
|
|
|
71
60
|
axis=1
|
|
72
61
|
)
|
|
73
62
|
|
|
74
|
-
real_time_quotes_all_stocks_df = real_time_quotes_all_stocks_df.sort_values(by=['chg'], ascending=True)
|
|
75
|
-
|
|
76
63
|
real_time_quotes_all_stocks_df['number'] = real_time_quotes_all_stocks_df['chg'].rank(method='first').astype(int)
|
|
77
64
|
|
|
78
65
|
for stock_one in real_time_quotes_all_stocks_df.itertuples():
|
|
79
66
|
classification = stock_one.classification
|
|
80
67
|
if classification == 'X':
|
|
81
|
-
db_name =
|
|
68
|
+
db_name = 'one_minute_k_line_bfq_bj'
|
|
82
69
|
elif classification == 'S':
|
|
83
|
-
db_name =
|
|
70
|
+
db_name = 'one_minute_k_line_bfq_s'
|
|
71
|
+
|
|
84
72
|
elif classification == 'H':
|
|
85
73
|
db_name = 'one_minute_k_line_bfq_h'
|
|
86
74
|
elif classification == 'K':
|
|
87
75
|
db_name = 'one_minute_k_line_bfq_k'
|
|
88
76
|
elif classification == 'C':
|
|
89
77
|
db_name = 'one_minute_k_line_bfq_c'
|
|
90
|
-
|
|
91
|
-
create_index(mongodbUtilV2, db_name)
|
|
78
|
+
|
|
92
79
|
trade_date_list_df_copy = trade_date_list_df.copy()
|
|
93
80
|
|
|
94
81
|
list_date = stock_one.list_date
|
|
@@ -107,37 +94,21 @@ def sync_all_stock(trade_date_list_df):
|
|
|
107
94
|
new_df = trade_date_list_df_copy.iloc[i:i + 28]
|
|
108
95
|
first_df = new_df.iloc[0] # 第一个 DataFrame
|
|
109
96
|
last_df = new_df.iloc[-1] # 最后一个 DataFrame
|
|
110
|
-
begin_date = first_df.trade_date + ' 09:
|
|
97
|
+
begin_date = first_df.trade_date + ' 09:20:00'
|
|
111
98
|
end_date = last_df.trade_date + ' 15:00:00'
|
|
112
99
|
|
|
113
|
-
|
|
114
|
-
if classification in ['K', 'H']:
|
|
115
|
-
symbol_quest = '1.' + symbol[0:6]
|
|
116
|
-
else:
|
|
117
|
-
symbol_quest = '0.' + symbol[0:6]
|
|
118
|
-
|
|
119
|
-
df = get_minute_data(symbol_quest, '1', begin_date, end_date)
|
|
100
|
+
df = get_minute_data(symbol, '1min', begin_date, end_date)
|
|
120
101
|
if data_frame_util.is_not_empty(df):
|
|
121
|
-
df
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
"
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
]]
|
|
132
|
-
df['str_day'] = df['time'].str[:10] #
|
|
133
|
-
df = df.loc[df['str_day'].isin(trade_date_list)]
|
|
134
|
-
if data_frame_util.is_not_empty(df):
|
|
135
|
-
df['time_tick'] = df['time'].str[11:19]
|
|
136
|
-
df = df.loc[df['time_tick'] <= '15:00:00']
|
|
137
|
-
del df['time_tick']
|
|
138
|
-
del df['str_day']
|
|
139
|
-
df['_id'] = df['symbol'] + '_' + df['time']
|
|
140
|
-
mongodbUtilV2.insert_mongo(df, db_name)
|
|
102
|
+
df = df.rename(columns={
|
|
103
|
+
"trade_time": "time",
|
|
104
|
+
"ts_code": "symbol",
|
|
105
|
+
"vol": "volume",
|
|
106
|
+
})
|
|
107
|
+
df['time_tick'] = df['time'].str[11:19]
|
|
108
|
+
df = df.loc[df['time_tick'] <= '15:00:00']
|
|
109
|
+
del df['time_tick']
|
|
110
|
+
df['_id'] = df['symbol'] + '_' + df['time']
|
|
111
|
+
mongodbUtilV2.insert_mongo(df, db_name)
|
|
141
112
|
except BaseException as e:
|
|
142
113
|
time.sleep(2)
|
|
143
114
|
first_df = new_df.iloc[0] # 第一个 DataFrame
|
|
@@ -150,7 +121,7 @@ def sync_all_stock(trade_date_list_df):
|
|
|
150
121
|
'db_name': db_name
|
|
151
122
|
}
|
|
152
123
|
fail_df = pd.DataFrame(fail_dict, index=[1])
|
|
153
|
-
mongodbUtilV2.insert_mongo(fail_df, '
|
|
124
|
+
mongodbUtilV2.insert_mongo(fail_df, 'one_minute_k_line_bfq_fail')
|
|
154
125
|
|
|
155
126
|
logger.error("同步数据出现异常:{},{},{},{}", e, symbol, begin_date, end_date)
|
|
156
127
|
logger.info("同步完数据:{},{}", stock_one.symbol, stock_one.name)
|
|
@@ -159,7 +130,7 @@ def sync_all_stock(trade_date_list_df):
|
|
|
159
130
|
if __name__ == '__main__':
|
|
160
131
|
# get_minute_data('833284.BJ', '1min', '2025-02-28 09:30:00', '2025-02-28 15:00:00')
|
|
161
132
|
|
|
162
|
-
query_trade = {"$and": [{"trade_date": {"$gte": "2025-03-
|
|
133
|
+
query_trade = {"$and": [{"trade_date": {"$gte": "2025-03-08"}}, {"trade_date": {"$lte": "2025-03-16"}}]}
|
|
163
134
|
trade_date_list_df_all = mongodb_util_27017.find_query_data('trade_date_list', query_trade)
|
|
164
135
|
|
|
165
136
|
sync_all_stock(trade_date_list_df_all)
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
|
|
9
|
+
import sys
|
|
10
|
+
import os
|
|
11
|
+
|
|
12
|
+
file_path = os.path.abspath(__file__)
|
|
13
|
+
end = file_path.index('mns') + 16
|
|
14
|
+
project_path = file_path[0:end]
|
|
15
|
+
sys.path.append(project_path)
|
|
16
|
+
import pandas as pd
|
|
17
|
+
from loguru import logger
|
|
18
|
+
import time
|
|
19
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
20
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
21
|
+
import tushare as ts
|
|
22
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
23
|
+
import mns_scheduler.extraIncome.one_minute.common.db_create_index as db_create_index
|
|
24
|
+
|
|
25
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
26
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', 'extraIncome')
|
|
27
|
+
|
|
28
|
+
pro = ts.pro_api('782213d20640249f1dbae50a7f56b22684b8e915a61e435e015579a1')
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def get_minute_data(symbol, freq, start_date, end_date):
|
|
32
|
+
# 获取浦发银行60000.SH的历史分钟数据
|
|
33
|
+
df = pro.stk_mins(ts_code=symbol, freq=freq, start_date=start_date, end_date=end_date)
|
|
34
|
+
return df
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def sync_zhi_shu_one_minute(symbol_list_param, trade_date_list_df):
|
|
38
|
+
db_name = 'one_minute_k_line_bfq_main_index'
|
|
39
|
+
db_create_index.create_index(mongodbUtilV2_27019, db_name)
|
|
40
|
+
for symbol in symbol_list_param:
|
|
41
|
+
|
|
42
|
+
trade_date_list_df_copy = trade_date_list_df.copy()
|
|
43
|
+
|
|
44
|
+
for i in range(0, len(trade_date_list_df_copy), 28):
|
|
45
|
+
try:
|
|
46
|
+
new_df = trade_date_list_df_copy.iloc[i:i + 28]
|
|
47
|
+
first_df = new_df.iloc[0] # 第一个 DataFrame
|
|
48
|
+
last_df = new_df.iloc[-1] # 最后一个 DataFrame
|
|
49
|
+
begin_date = first_df.trade_date + ' 09:20:00'
|
|
50
|
+
end_date = last_df.trade_date + ' 15:00:00'
|
|
51
|
+
|
|
52
|
+
df = get_minute_data(symbol, '1min', begin_date, end_date)
|
|
53
|
+
if data_frame_util.is_not_empty(df):
|
|
54
|
+
df = df.rename(columns={
|
|
55
|
+
"trade_time": "time",
|
|
56
|
+
"ts_code": "symbol",
|
|
57
|
+
"vol": "volume",
|
|
58
|
+
})
|
|
59
|
+
df['time_tick'] = df['time'].str[11:19]
|
|
60
|
+
df = df.loc[df['time_tick'] <= '15:00:00']
|
|
61
|
+
del df['time_tick']
|
|
62
|
+
df['_id'] = df['symbol'] + '_' + df['time']
|
|
63
|
+
mongodbUtilV2_27019.insert_mongo(df, db_name)
|
|
64
|
+
except BaseException as e:
|
|
65
|
+
time.sleep(2)
|
|
66
|
+
first_df = new_df.iloc[0] # 第一个 DataFrame
|
|
67
|
+
last_df = new_df.iloc[-1] # 最后一个 DataFrame
|
|
68
|
+
begin_date = first_df.trade_date + ' 09:20:00'
|
|
69
|
+
end_date = last_df.trade_date + ' 15:00:00'
|
|
70
|
+
fail_dict = {'begin_date': begin_date,
|
|
71
|
+
'end_date': end_date,
|
|
72
|
+
'symbol': symbol,
|
|
73
|
+
'db_name': db_name
|
|
74
|
+
}
|
|
75
|
+
fail_df = pd.DataFrame(fail_dict, index=[1])
|
|
76
|
+
mongodbUtilV2_27019.insert_mongo(fail_df, db_name + '_fail')
|
|
77
|
+
|
|
78
|
+
logger.error("同步数据出现异常:{},{},{},{}", e, symbol, begin_date, end_date)
|
|
79
|
+
logger.info("同步完数据:{}", symbol)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def classify_symbol(debt_real_time_quotes_df):
|
|
83
|
+
debt_real_time_quotes_df['classification'] = debt_real_time_quotes_df['market'].apply(
|
|
84
|
+
lambda market: classify_symbol_one(market))
|
|
85
|
+
return debt_real_time_quotes_df
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
# 单个股票分类
|
|
89
|
+
def classify_symbol_one(market):
|
|
90
|
+
if market == 0:
|
|
91
|
+
return 'S'
|
|
92
|
+
else:
|
|
93
|
+
return 'H'
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
if __name__ == '__main__':
|
|
97
|
+
# 文件路径
|
|
98
|
+
file_path = r"H:\data\1min\指数\指数列表.xlsx"
|
|
99
|
+
|
|
100
|
+
df = pd.read_excel(file_path)
|
|
101
|
+
df = df.rename(columns={
|
|
102
|
+
"指数代码": "symbol"
|
|
103
|
+
})
|
|
104
|
+
new_symbol_list = df['symbol']
|
|
105
|
+
query_trade = {"$and": [{"trade_date": {"$gte": "2025-03-08"}}, {"trade_date": {"$lte": "2025-03-16"}}]}
|
|
106
|
+
trade_date_list_df_all = mongodb_util_27017.find_query_data('trade_date_list', query_trade)
|
|
107
|
+
sync_zhi_shu_one_minute(new_symbol_list, trade_date_list_df_all)
|
|
@@ -56,6 +56,7 @@ import mns_scheduler.zt.zt_pool.update_null_zt_reason_api as update_null_zt_reas
|
|
|
56
56
|
import mns_scheduler.trade.tfp.stock_tfp_info_sync as stock_tfp_info_sync
|
|
57
57
|
import mns_scheduler.industry.ths.ths_industry_sync_service as ths_industry_sync_service
|
|
58
58
|
import mns_scheduler.db.task_handle_service as task_handle_service
|
|
59
|
+
import mns_scheduler.extraIncome.one_minute.one_minute_sync_task as one_minute_sync_task
|
|
59
60
|
|
|
60
61
|
|
|
61
62
|
# 同步交易日期任务完成
|
|
@@ -418,6 +419,11 @@ def sync_ths_industry_info():
|
|
|
418
419
|
logger.info("同步同花顺行业信息完成")
|
|
419
420
|
|
|
420
421
|
|
|
422
|
+
def sync_one_minute_data():
|
|
423
|
+
logger.info("同步1分钟交易数据")
|
|
424
|
+
one_minute_sync_task.sync_one_minute_data()
|
|
425
|
+
|
|
426
|
+
|
|
421
427
|
# # 定义BlockingScheduler
|
|
422
428
|
blockingScheduler = BlockingScheduler()
|
|
423
429
|
# sync_trade_date 同步交易日期
|
|
@@ -542,11 +548,6 @@ blockingScheduler.add_job(real_time_sync_task_close, 'cron', hour='09', minute='
|
|
|
542
548
|
blockingScheduler.add_job(real_time_sync_task_close, 'cron', hour='11', minute='31')
|
|
543
549
|
blockingScheduler.add_job(real_time_sync_task_close, 'cron', hour='15', minute='01')
|
|
544
550
|
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
551
|
# 打开交易客户端
|
|
551
552
|
blockingScheduler.add_job(trader_client_auto_login, 'cron', hour='08,12', minute='30')
|
|
552
553
|
# 同步公司备注信息
|
|
@@ -564,6 +565,9 @@ blockingScheduler.add_job(update_null_zt_reason, 'cron', hour='16,17,18,19,20,21
|
|
|
564
565
|
# 同步同花顺行业信息
|
|
565
566
|
blockingScheduler.add_job(sync_ths_industry_info, 'cron', hour='17,22', minute='38')
|
|
566
567
|
|
|
568
|
+
# 同步一分钟交易数据
|
|
569
|
+
blockingScheduler.add_job(sync_one_minute_data, 'cron', hour='17,22', minute='55')
|
|
570
|
+
|
|
567
571
|
# 同步新公告信息 感觉没有必要同步 直接连接过去查看
|
|
568
572
|
# blockingScheduler.add_job(sync_company_announce, 'cron', hour='07,18,23', minute='33')
|
|
569
573
|
|
|
@@ -52,11 +52,24 @@ mns_scheduler/debt/kzz_bond_info_sync.py,sha256=3o0Y4FBxP3AOXwf7Z7jVO1N_DcqxeOVq
|
|
|
52
52
|
mns_scheduler/dt/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
|
|
53
53
|
mns_scheduler/dt/stock_dt_pool_sync.py,sha256=5ivRUOnFtOapZniwTbujf1lVq3y4btm2Cmd5R6JJAVo,3466
|
|
54
54
|
mns_scheduler/extraIncome/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
|
|
55
|
-
mns_scheduler/extraIncome/
|
|
56
|
-
mns_scheduler/extraIncome/
|
|
57
|
-
mns_scheduler/extraIncome/
|
|
58
|
-
mns_scheduler/extraIncome/
|
|
59
|
-
mns_scheduler/extraIncome/
|
|
55
|
+
mns_scheduler/extraIncome/one_minute/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
|
|
56
|
+
mns_scheduler/extraIncome/one_minute/one_minute_sync_task.py,sha256=qw09DyhYRqWO4U48T3CiIikhCBTOEZ7Y1nkAoBKWgKw,1557
|
|
57
|
+
mns_scheduler/extraIncome/one_minute/common/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
|
|
58
|
+
mns_scheduler/extraIncome/one_minute/common/db_create_index.py,sha256=WhCAaLNrrc4nuGpa_GW6qrWyml6I-Fk4E2jJRSbUUZw,323
|
|
59
|
+
mns_scheduler/extraIncome/one_minute/common/symbol_handle_util.py,sha256=W5BbmAghEXag7yv6Cwdi0augJww2uMbHn0Z7mUW7O9Y,1275
|
|
60
|
+
mns_scheduler/extraIncome/one_minute/etf/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
|
|
61
|
+
mns_scheduler/extraIncome/one_minute/etf/etf_one_minute_sync_task.py,sha256=yHyBL3y2jlHuX6wR9kW3-ZPP-BTOdgY-53rK789p6cc,3700
|
|
62
|
+
mns_scheduler/extraIncome/one_minute/index/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
|
|
63
|
+
mns_scheduler/extraIncome/one_minute/index/main_index_sync_task.py,sha256=AbfHfbQznNa5w6zoElmYAI3iZpFtQGMdsLQZz-_7wF8,2649
|
|
64
|
+
mns_scheduler/extraIncome/one_minute/kzz/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
|
|
65
|
+
mns_scheduler/extraIncome/one_minute/kzz/kzz_one_minute_sync_task.py,sha256=3hbLesQkce_TeHkcIKqoNRR0TKcESo_g0tL7e_ZaeUk,3722
|
|
66
|
+
mns_scheduler/extraIncome/one_minute/stock/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
|
|
67
|
+
mns_scheduler/extraIncome/one_minute/stock/stock_one_minute_sync_task.py,sha256=II7yqa3FFaTZxnjV-9UTh0AD5EopQq_w-Xp4ZWAQerE,4594
|
|
68
|
+
mns_scheduler/extraIncome/temp/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
|
|
69
|
+
mns_scheduler/extraIncome/temp/tu_share_data_etf_sync.py,sha256=XBsLl4o1Ras1zUQBkJ2BAgWtPebqAf9VUu_kLEisGmQ,4464
|
|
70
|
+
mns_scheduler/extraIncome/temp/tu_share_data_kzz_sync.py,sha256=A2Aa4TB2mgTHiDlW9_UpB0mdRCR_1sOTaPZKs-IBbXc,4850
|
|
71
|
+
mns_scheduler/extraIncome/temp/tu_share_data_stock_sync.py,sha256=s3KufWFevrnoCRfAWfOgLCqbBAbhueQelG-2Dy016cY,6248
|
|
72
|
+
mns_scheduler/extraIncome/temp/tu_share_zhi_shu_sync_api.py,sha256=sAvfsIObHWsapgJP8o2YTL4D1XZiWa2tGguM6B6bgyQ,4169
|
|
60
73
|
mns_scheduler/finance/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
|
|
61
74
|
mns_scheduler/finance/em_financial_asset_liability_sync_service_api.py,sha256=kEZQZkxB7RF7UPH4DmHoRWfEKgI61ZN8BcNOzmBUoV0,19417
|
|
62
75
|
mns_scheduler/finance/em_financial_profit_sync_service_api.py,sha256=A_ONxC-1giGUWUhMJG1fE6jem52uJYtzlewzxPZtdd0,14270
|
|
@@ -161,8 +174,8 @@ mns_scheduler/zt/zt_pool/em_zt_pool_sync_api.py,sha256=A5YiAWYdbAxhlTTJ8pOStZrBb
|
|
|
161
174
|
mns_scheduler/zt/zt_pool/ths_zt_pool_sync_api.py,sha256=3OGBmWEX6G-fTuONgoB6Lu5xX0JNaWiP5FWMHIlIf-Y,10647
|
|
162
175
|
mns_scheduler/zt/zt_pool/update_null_zt_reason_api.py,sha256=1uoiR2Uw46kDfjkvNg2US5rd_4OIkYO3872gIJOufUY,2135
|
|
163
176
|
mns_scheduler/zz_task/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
|
|
164
|
-
mns_scheduler/zz_task/data_sync_task.py,sha256=
|
|
165
|
-
mns_scheduler-1.2.7.
|
|
166
|
-
mns_scheduler-1.2.7.
|
|
167
|
-
mns_scheduler-1.2.7.
|
|
168
|
-
mns_scheduler-1.2.7.
|
|
177
|
+
mns_scheduler/zz_task/data_sync_task.py,sha256=MA2RfrM_neT3ElejjF34XXq3fUA8B01lVyJBuOHOYrc,23507
|
|
178
|
+
mns_scheduler-1.2.7.5.dist-info/METADATA,sha256=4vB2ulao4-_Qg867LgPB8rl-aOm2o6Sierj6KFWqNZM,64
|
|
179
|
+
mns_scheduler-1.2.7.5.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
180
|
+
mns_scheduler-1.2.7.5.dist-info/top_level.txt,sha256=PXQDFBGR1pWmsUbH5yiLAh71P5HZODTRED0zJ8CCgOc,14
|
|
181
|
+
mns_scheduler-1.2.7.5.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|