mns-scheduler 1.3.0.8__py3-none-any.whl → 1.3.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mns-scheduler might be problematic. Click here for more details.
- mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/etf/etf_one_minute_sync_task.py +2 -2
- mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/index/main_index_sync_task.py +2 -2
- mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/kzz/kzz_one_minute_sync_task.py +2 -2
- mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/one_minute_sync_task.py +4 -4
- mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/stock/stock_one_minute_sync_task.py +2 -2
- mns_scheduler/extraIncome/a_stock/one_minute/upload/__init__.py +7 -0
- mns_scheduler/extraIncome/us/daily/__init__.py +7 -0
- mns_scheduler/extraIncome/us/{us_stock_qfq_daily_k_line.py → daily/us_stock_qfq_daily_k_line.py} +8 -0
- mns_scheduler/extraIncome/us/one_minute/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/api/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/api/alpha_vantage_api.py +34 -0
- mns_scheduler/extraIncome/us/one_minute/api/y_finance_api.py +47 -0
- mns_scheduler/extraIncome/us/one_minute/etf/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/stock/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/stock/down_load/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/stock/down_load/down_load_stock_his_2024_01.py +143 -0
- mns_scheduler/extraIncome/us/one_minute/stock/down_load/down_load_stock_his_2024_02.py +151 -0
- mns_scheduler/extraIncome/us/one_minute/stock/down_load/etf/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/stock/down_load/etf/down_load_ETF_his_2024.py +150 -0
- mns_scheduler/extraIncome/us/one_minute/stock/his/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/stock/his/us_stock_one_minute_his.py +199 -0
- mns_scheduler/extraIncome/us/one_minute/stock/his/us_stock_one_minute_his_2024.py +212 -0
- mns_scheduler/extraIncome/us/one_minute/stock/us_stock_one_minute_task.py +26 -0
- mns_scheduler/zt/zt_pool/em_zt_pool_sync_api.py +23 -10
- mns_scheduler/zz_task/data_sync_task.py +3 -3
- {mns_scheduler-1.3.0.8.dist-info → mns_scheduler-1.3.1.0.dist-info}/METADATA +1 -1
- {mns_scheduler-1.3.0.8.dist-info → mns_scheduler-1.3.1.0.dist-info}/RECORD +39 -22
- /mns_scheduler/extraIncome/{one_minute → a_stock}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute/common → a_stock/one_minute}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute/etf → a_stock/one_minute/common}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/common/db_create_index.py +0 -0
- /mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/common/symbol_handle_util.py +0 -0
- /mns_scheduler/extraIncome/{one_minute/index → a_stock/one_minute/etf}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute/kzz → a_stock/one_minute/index}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute/stock → a_stock/one_minute/kzz}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute/upload → a_stock/one_minute/stock}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/upload/upload_to_baidu_task.py +0 -0
- {mns_scheduler-1.3.0.8.dist-info → mns_scheduler-1.3.1.0.dist-info}/WHEEL +0 -0
- {mns_scheduler-1.3.0.8.dist-info → mns_scheduler-1.3.1.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.component.em.em_stock_info_api as em_stock_info_api
|
|
9
|
+
from loguru import logger
|
|
10
|
+
import time
|
|
11
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
12
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
13
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
14
|
+
import mns_scheduler.extraIncome.a_stock.one_minute.common.db_create_index as db_create_index
|
|
15
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
16
|
+
import mns_scheduler.extraIncome.us.one_minute.api.alpha_vantage_api as alpha_vantage_api
|
|
17
|
+
import pandas as pd
|
|
18
|
+
import math
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
|
|
21
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
22
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
23
|
+
from datetime import datetime
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def sync_us_stock_one_minute(now_year, now_month):
|
|
27
|
+
real_time_quotes_all_us = em_stock_info_api.get_us_stock_info()
|
|
28
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us.loc[real_time_quotes_all_us['flow_mv'] == 0]
|
|
29
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.sort_values(by=['amount'], ascending=False)
|
|
30
|
+
|
|
31
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.loc[
|
|
32
|
+
real_time_quotes_all_us_stocks['amount'] >= 50000000]
|
|
33
|
+
|
|
34
|
+
path = r'F:\us_etf\one_minute\{}'.format(now_year)
|
|
35
|
+
if not os.path.exists(path):
|
|
36
|
+
os.makedirs(path)
|
|
37
|
+
|
|
38
|
+
path = path + '\{}'.format(now_month)
|
|
39
|
+
if not os.path.exists(path):
|
|
40
|
+
os.makedirs(path)
|
|
41
|
+
stock_name_list = find_exist_file(path)
|
|
42
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.loc[
|
|
43
|
+
~(real_time_quotes_all_us_stocks['symbol'].isin(stock_name_list))]
|
|
44
|
+
for stock_one in real_time_quotes_all_us_stocks.itertuples():
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
symbol = stock_one.symbol
|
|
48
|
+
# simple_symbol = int(stock_one.simple_symbol)
|
|
49
|
+
# code = str(simple_symbol) + '.' + symbol
|
|
50
|
+
list_date = stock_one.list_date
|
|
51
|
+
|
|
52
|
+
if not math.isnan(list_date):
|
|
53
|
+
list_date = str(stock_one.list_date)
|
|
54
|
+
list_date_year = int(list_date[0:4])
|
|
55
|
+
list_month = int(list_date[4:6])
|
|
56
|
+
now_month_int = int(now_month[5:7])
|
|
57
|
+
if (list_date_year > now_year) or ((list_date_year == now_year) and (list_month > now_month_int)):
|
|
58
|
+
continue
|
|
59
|
+
now_date = datetime.now()
|
|
60
|
+
if net_work_check(now_date):
|
|
61
|
+
# 休眠 6分钟
|
|
62
|
+
time.sleep(5 * 60)
|
|
63
|
+
|
|
64
|
+
df = alpha_vantage_api.sync_one_minute_data(symbol, now_month)
|
|
65
|
+
df = df.fillna(0)
|
|
66
|
+
df['time'] = df['time'].dt.strftime('%Y-%m-%d %H:%M:%S')
|
|
67
|
+
df['str_day'] = df['time'].str.slice(0, 10)
|
|
68
|
+
df['minute'] = df['time'].str.slice(11, 19)
|
|
69
|
+
df['_id'] = symbol + "_" + df['time']
|
|
70
|
+
df['symbol'] = symbol
|
|
71
|
+
df_export_df = df.copy()
|
|
72
|
+
export_original_data(df_export_df, symbol, path)
|
|
73
|
+
except BaseException as e:
|
|
74
|
+
time.sleep(1)
|
|
75
|
+
fail_dict = {
|
|
76
|
+
'_id': symbol + '_' + now_month,
|
|
77
|
+
'type': "ETF",
|
|
78
|
+
'path': path,
|
|
79
|
+
'symbol': symbol,
|
|
80
|
+
'now_year': now_year,
|
|
81
|
+
'now_month': now_month
|
|
82
|
+
}
|
|
83
|
+
fail_df = pd.DataFrame(fail_dict, index=[1])
|
|
84
|
+
|
|
85
|
+
mongodb_util_27017.save_mongo(fail_df, 'us_stock_one_minute_down_load_fail')
|
|
86
|
+
logger.error("同步股票分钟数据出现异常:,{},{},{}", e, symbol, now_month)
|
|
87
|
+
logger.info("同步股票分钟票数据完成:{},{}", stock_one.symbol, stock_one.name)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def export_original_data(df, symbol, path):
|
|
91
|
+
file_name = path + '\{}.csv'.format(symbol)
|
|
92
|
+
if data_frame_util.is_not_empty(df):
|
|
93
|
+
df = df.dropna(subset=['_id'])
|
|
94
|
+
del df['str_day']
|
|
95
|
+
del df['minute']
|
|
96
|
+
del df['_id']
|
|
97
|
+
del df['symbol']
|
|
98
|
+
df.to_csv(file_name, index=False, encoding='utf-8')
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def net_work_check(now_date):
|
|
102
|
+
hour = now_date.hour
|
|
103
|
+
minute = now_date.minute
|
|
104
|
+
if hour == 7 and minute == 34:
|
|
105
|
+
return True
|
|
106
|
+
elif hour == 9 and minute == 59:
|
|
107
|
+
return True
|
|
108
|
+
elif hour == 10 and minute == 29:
|
|
109
|
+
return True
|
|
110
|
+
elif hour == 10 and minute == 59:
|
|
111
|
+
return True
|
|
112
|
+
elif hour == 12 and minute == 49:
|
|
113
|
+
return True
|
|
114
|
+
elif hour == 13 and minute == 28:
|
|
115
|
+
return True
|
|
116
|
+
elif hour == 13 and minute == 58:
|
|
117
|
+
return True
|
|
118
|
+
elif hour == 14 and minute == 28:
|
|
119
|
+
return True
|
|
120
|
+
elif hour == 15 and minute == 1:
|
|
121
|
+
return True
|
|
122
|
+
else:
|
|
123
|
+
return False
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def sync_by_year(begin_year):
|
|
127
|
+
begin_month = 12
|
|
128
|
+
while begin_month > 0:
|
|
129
|
+
if begin_month < 10:
|
|
130
|
+
str_month = '0' + str(begin_month)
|
|
131
|
+
else:
|
|
132
|
+
str_month = str(begin_month)
|
|
133
|
+
str_month = str(begin_year) + '-' + str_month
|
|
134
|
+
sync_us_stock_one_minute(begin_year, str_month)
|
|
135
|
+
begin_month = begin_month - 1
|
|
136
|
+
logger.error("同步完成月份:{}", str_month)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def find_exist_file(folder_path):
|
|
140
|
+
if not os.path.exists(folder_path):
|
|
141
|
+
logger.error("错误:目录不存在:{}", folder_path)
|
|
142
|
+
else:
|
|
143
|
+
folder_path = Path(folder_path)
|
|
144
|
+
stock_names = [f.stem for f in folder_path.glob("*.csv")]
|
|
145
|
+
return stock_names
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
if __name__ == '__main__':
|
|
149
|
+
# k_line_df = query_k_line('TSLA')
|
|
150
|
+
sync_by_year(2024)
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.component.em.em_stock_info_api as em_stock_info_api
|
|
9
|
+
from loguru import logger
|
|
10
|
+
import time
|
|
11
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
12
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
13
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
14
|
+
import mns_scheduler.extraIncome.a_stock.one_minute.common.db_create_index as db_create_index
|
|
15
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
16
|
+
import mns_scheduler.extraIncome.us.one_minute.api.alpha_vantage_api as alpha_vantage_api
|
|
17
|
+
import pandas as pd
|
|
18
|
+
from functools import lru_cache
|
|
19
|
+
|
|
20
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
21
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
22
|
+
from datetime import datetime
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def sync_us_stock_one_minute(now_year, now_month, begin_year):
|
|
26
|
+
real_time_quotes_all_us = em_stock_info_api.get_us_stock_info()
|
|
27
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us.loc[real_time_quotes_all_us['flow_mv'] != 0]
|
|
28
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.sort_values(by=['amount'], ascending=False)
|
|
29
|
+
|
|
30
|
+
# todo 改集合名字
|
|
31
|
+
col_name = extra_income_db_name.US_STOCK_MINUTE_K_LINE_BFQ
|
|
32
|
+
col_name = col_name + '_' + str(now_year)
|
|
33
|
+
# 创建索引
|
|
34
|
+
db_create_index.create_index(mongodb_util_27017, col_name)
|
|
35
|
+
|
|
36
|
+
for stock_one in real_time_quotes_all_us_stocks.itertuples():
|
|
37
|
+
|
|
38
|
+
symbol = stock_one.symbol
|
|
39
|
+
# simple_symbol = int(stock_one.simple_symbol)
|
|
40
|
+
# code = str(simple_symbol) + '.' + symbol
|
|
41
|
+
list_date = str(stock_one.list_date)
|
|
42
|
+
list_date_year = int(list_date[0:4])
|
|
43
|
+
if list_date_year > now_year:
|
|
44
|
+
continue
|
|
45
|
+
try:
|
|
46
|
+
|
|
47
|
+
now_date = datetime.now()
|
|
48
|
+
if net_work_check(now_date):
|
|
49
|
+
# 休眠 6分钟
|
|
50
|
+
time.sleep(6 * 60)
|
|
51
|
+
|
|
52
|
+
df = alpha_vantage_api.sync_one_minute_data(symbol, now_month)
|
|
53
|
+
df['time'] = df['time'].dt.strftime('%Y-%m-%d %H:%M:%S')
|
|
54
|
+
df['str_day'] = df['time'].str.slice(0, 10)
|
|
55
|
+
df['minute'] = df['time'].str.slice(11, 19)
|
|
56
|
+
df['_id'] = symbol + "_" + df['time']
|
|
57
|
+
df['symbol'] = symbol
|
|
58
|
+
df_export_df = df.copy()
|
|
59
|
+
export_original_data(df_export_df, symbol, now_year, now_month)
|
|
60
|
+
handle_pan_qian_or_hou_data(df, symbol, col_name)
|
|
61
|
+
|
|
62
|
+
except BaseException as e:
|
|
63
|
+
time.sleep(1)
|
|
64
|
+
fail_dict = {
|
|
65
|
+
'_id': symbol + '_' + now_month,
|
|
66
|
+
'symbol': symbol,
|
|
67
|
+
'now_year': now_year,
|
|
68
|
+
'now_month': now_month
|
|
69
|
+
}
|
|
70
|
+
fail_df = pd.DataFrame(fail_dict, index=[1])
|
|
71
|
+
|
|
72
|
+
mongodb_util_27017.insert_mongo(fail_df, 'us_stock_one_minute_k_line_bfq_fail')
|
|
73
|
+
logger.error("同步股票分钟数据出现异常:,{},{},{}", e, symbol, now_month)
|
|
74
|
+
logger.info("同步股票分钟票数据完整:{},{}", stock_one.symbol, stock_one.name)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def handle_pan_qian_or_hou_data(df, symbol, col_name):
|
|
78
|
+
hand_pan_hou_data(df.copy(), symbol, col_name)
|
|
79
|
+
hand_pan_qian_data(df.copy(), col_name)
|
|
80
|
+
handle_middle_data(df.copy(), col_name)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def handle_middle_data(df_copy, col_name):
|
|
84
|
+
df = df_copy.copy()
|
|
85
|
+
df = df.loc[df['minute'] < '16:00:00']
|
|
86
|
+
df = df.loc[df['minute'] > '09:30:00']
|
|
87
|
+
mongodb_util_27017.insert_mongo(df, col_name)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def hand_pan_qian_data(df_copy, col_name):
|
|
91
|
+
df = df_copy.copy()
|
|
92
|
+
df_pan_qian_init = df.loc[df['minute'] == '09:30:00']
|
|
93
|
+
del df_pan_qian_init['volume']
|
|
94
|
+
pan_qian_data = df.loc[df['minute'] <= '09:30:00']
|
|
95
|
+
pan_qian_data_group_df = pan_qian_data.groupby('str_day')['volume'].sum().reset_index()
|
|
96
|
+
|
|
97
|
+
pan_qian_data_group_df = pan_qian_data_group_df.set_index(['str_day'], drop=True)
|
|
98
|
+
df_pan_qian_init = df_pan_qian_init.set_index(['str_day'], drop=False)
|
|
99
|
+
|
|
100
|
+
df_pan_qian_init = pd.merge(pan_qian_data_group_df, df_pan_qian_init,
|
|
101
|
+
how='outer',
|
|
102
|
+
left_index=True, right_index=True)
|
|
103
|
+
|
|
104
|
+
mongodb_util_27017.insert_mongo(df_pan_qian_init, col_name)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def hand_pan_hou_data(df_copy, symbol, col_name):
|
|
108
|
+
df = df_copy.copy()
|
|
109
|
+
df_pan_hou_init = df.loc[df['minute'] == '16:00:00']
|
|
110
|
+
k_line_trade_df = query_k_line(symbol)
|
|
111
|
+
trade_date_list = list(df_pan_hou_init['str_day'])
|
|
112
|
+
|
|
113
|
+
k_line_trade_df = k_line_trade_df.loc[k_line_trade_df['date'].isin(trade_date_list)]
|
|
114
|
+
|
|
115
|
+
k_line_trade_df = k_line_trade_df.rename(columns={'volume': "k_line_volume"})
|
|
116
|
+
|
|
117
|
+
# 收盘前的
|
|
118
|
+
df = df.loc[df['minute'] < '16:00:00']
|
|
119
|
+
total_volume_df = df.groupby('str_day')['volume'].sum().reset_index()
|
|
120
|
+
total_volume_df = total_volume_df.rename(columns={'volume': "total_volume"})
|
|
121
|
+
|
|
122
|
+
total_volume_df = total_volume_df.set_index(['str_day'], drop=False)
|
|
123
|
+
|
|
124
|
+
k_line_trade_df = k_line_trade_df.set_index(['date'], drop=True)
|
|
125
|
+
|
|
126
|
+
pan_hou_diff_df = pd.merge(total_volume_df, k_line_trade_df,
|
|
127
|
+
how='outer',
|
|
128
|
+
left_index=True, right_index=True)
|
|
129
|
+
|
|
130
|
+
pan_hou_diff_df['volume'] = pan_hou_diff_df['k_line_volume'] - pan_hou_diff_df['total_volume']
|
|
131
|
+
|
|
132
|
+
del df_pan_hou_init['volume']
|
|
133
|
+
|
|
134
|
+
pan_hou_diff_df = pan_hou_diff_df.set_index(['str_day'], drop=True)
|
|
135
|
+
del pan_hou_diff_df['k_line_volume']
|
|
136
|
+
del pan_hou_diff_df['total_volume']
|
|
137
|
+
|
|
138
|
+
df_pan_hou_init = df_pan_hou_init.set_index(['str_day'], drop=False)
|
|
139
|
+
|
|
140
|
+
df_pan_hou_result = pd.merge(pan_hou_diff_df, df_pan_hou_init,
|
|
141
|
+
how='outer',
|
|
142
|
+
left_index=True, right_index=True)
|
|
143
|
+
mongodb_util_27017.insert_mongo(df_pan_hou_result, col_name)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def export_original_data(df, symbol, year, now_month):
|
|
147
|
+
path = r'H:\us_stock\one_minute\{}'.format(year)
|
|
148
|
+
if not os.path.exists(path):
|
|
149
|
+
os.makedirs(path)
|
|
150
|
+
|
|
151
|
+
path = path + '\{}'.format(now_month)
|
|
152
|
+
if not os.path.exists(path):
|
|
153
|
+
os.makedirs(path)
|
|
154
|
+
|
|
155
|
+
file_name = path + '\{}.csv'.format(symbol)
|
|
156
|
+
if data_frame_util.is_not_empty(df):
|
|
157
|
+
del df['str_day']
|
|
158
|
+
del df['minute']
|
|
159
|
+
del df['_id']
|
|
160
|
+
del df['symbol']
|
|
161
|
+
df.to_csv(file_name, index=False, encoding='utf-8')
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
@lru_cache()
|
|
165
|
+
def query_k_line(symbol):
|
|
166
|
+
query = {'symbol': symbol}
|
|
167
|
+
query_field = {"volume": 1, 'date': 1, '_id': 0}
|
|
168
|
+
return mongodbUtilV2_27019.find_query_data_choose_field(extra_income_db_name.US_STOCK_DAILY_QFQ_K_LINE, query,
|
|
169
|
+
query_field)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def net_work_check(now_date):
|
|
173
|
+
hour = now_date.hour
|
|
174
|
+
minute = now_date.minute
|
|
175
|
+
if hour == 7 and minute == 34:
|
|
176
|
+
return True
|
|
177
|
+
elif hour == 9 and minute == 59:
|
|
178
|
+
return True
|
|
179
|
+
elif hour == 10 and minute == 29:
|
|
180
|
+
return True
|
|
181
|
+
elif hour == 10 and minute == 59:
|
|
182
|
+
return True
|
|
183
|
+
elif hour == 12 and minute == 49:
|
|
184
|
+
return True
|
|
185
|
+
elif hour == 13 and minute == 28:
|
|
186
|
+
return True
|
|
187
|
+
elif hour == 13 and minute == 58:
|
|
188
|
+
return True
|
|
189
|
+
elif hour == 14 and minute == 28:
|
|
190
|
+
return True
|
|
191
|
+
elif hour == 15 and minute == 1:
|
|
192
|
+
return True
|
|
193
|
+
else:
|
|
194
|
+
return False
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
if __name__ == '__main__':
|
|
198
|
+
# k_line_df = query_k_line('TSLA')
|
|
199
|
+
sync_us_stock_one_minute(2025, '2025-04', 2000)
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.component.em.em_stock_info_api as em_stock_info_api
|
|
9
|
+
from loguru import logger
|
|
10
|
+
import time
|
|
11
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
12
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
13
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
14
|
+
import mns_scheduler.extraIncome.a_stock.one_minute.common.db_create_index as db_create_index
|
|
15
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
16
|
+
import mns_scheduler.extraIncome.us.one_minute.api.alpha_vantage_api as alpha_vantage_api
|
|
17
|
+
import pandas as pd
|
|
18
|
+
from functools import lru_cache
|
|
19
|
+
|
|
20
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
21
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
22
|
+
from datetime import datetime
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def sync_us_stock_one_minute(now_year, now_month):
|
|
26
|
+
real_time_quotes_all_us = em_stock_info_api.get_us_stock_info()
|
|
27
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us.loc[real_time_quotes_all_us['flow_mv'] != 0]
|
|
28
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.sort_values(by=['amount'], ascending=False)
|
|
29
|
+
|
|
30
|
+
# todo 改集合名字
|
|
31
|
+
col_name = extra_income_db_name.US_STOCK_MINUTE_K_LINE_BFQ
|
|
32
|
+
col_name = col_name + '_' + str(now_year)
|
|
33
|
+
# 创建索引
|
|
34
|
+
db_create_index.create_index(mongodb_util_27017, col_name)
|
|
35
|
+
|
|
36
|
+
for stock_one in real_time_quotes_all_us_stocks.itertuples():
|
|
37
|
+
|
|
38
|
+
symbol = stock_one.symbol
|
|
39
|
+
# simple_symbol = int(stock_one.simple_symbol)
|
|
40
|
+
# code = str(simple_symbol) + '.' + symbol
|
|
41
|
+
list_date = str(stock_one.list_date)
|
|
42
|
+
list_date_year = int(list_date[0:4])
|
|
43
|
+
if list_date_year > now_year:
|
|
44
|
+
continue
|
|
45
|
+
try:
|
|
46
|
+
|
|
47
|
+
now_date = datetime.now()
|
|
48
|
+
if net_work_check(now_date):
|
|
49
|
+
# 休眠 6分钟
|
|
50
|
+
time.sleep(5 * 60)
|
|
51
|
+
|
|
52
|
+
df = alpha_vantage_api.sync_one_minute_data(symbol, now_month)
|
|
53
|
+
df['time'] = df['time'].dt.strftime('%Y-%m-%d %H:%M:%S')
|
|
54
|
+
df['str_day'] = df['time'].str.slice(0, 10)
|
|
55
|
+
df['minute'] = df['time'].str.slice(11, 19)
|
|
56
|
+
df['_id'] = symbol + "_" + df['time']
|
|
57
|
+
df['symbol'] = symbol
|
|
58
|
+
df_export_df = df.copy()
|
|
59
|
+
export_original_data(df_export_df, symbol, now_year, now_month)
|
|
60
|
+
handle_pan_qian_or_hou_data(df, symbol, col_name)
|
|
61
|
+
|
|
62
|
+
except BaseException as e:
|
|
63
|
+
time.sleep(1)
|
|
64
|
+
fail_dict = {
|
|
65
|
+
'_id': symbol + '_' + now_month,
|
|
66
|
+
'symbol': symbol,
|
|
67
|
+
'now_year': now_year,
|
|
68
|
+
'now_month': now_month
|
|
69
|
+
}
|
|
70
|
+
fail_df = pd.DataFrame(fail_dict, index=[1])
|
|
71
|
+
|
|
72
|
+
mongodb_util_27017.insert_mongo(fail_df, 'us_stock_one_minute_k_line_bfq_fail')
|
|
73
|
+
logger.error("同步股票分钟数据出现异常:,{},{},{}", e, symbol, now_month)
|
|
74
|
+
logger.info("同步股票分钟票数据完成:{},{}", stock_one.symbol, stock_one.name)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def handle_pan_qian_or_hou_data(df, symbol, col_name):
|
|
78
|
+
hand_pan_hou_data(df.copy(), symbol, col_name)
|
|
79
|
+
hand_pan_qian_data(df.copy(), col_name)
|
|
80
|
+
handle_middle_data(df.copy(), col_name)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def handle_middle_data(df_copy, col_name):
|
|
84
|
+
df = df_copy.copy()
|
|
85
|
+
df = df.loc[df['minute'] < '16:00:00']
|
|
86
|
+
df = df.loc[df['minute'] > '09:30:00']
|
|
87
|
+
mongodb_util_27017.insert_mongo(df, col_name)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def hand_pan_qian_data(df_copy, col_name):
|
|
91
|
+
df = df_copy.copy()
|
|
92
|
+
df_pan_qian_init = df.loc[df['minute'] == '09:30:00']
|
|
93
|
+
del df_pan_qian_init['volume']
|
|
94
|
+
pan_qian_data = df.loc[df['minute'] <= '09:30:00']
|
|
95
|
+
pan_qian_data_group_df = pan_qian_data.groupby('str_day')['volume'].sum().reset_index()
|
|
96
|
+
|
|
97
|
+
pan_qian_data_group_df = pan_qian_data_group_df.set_index(['str_day'], drop=True)
|
|
98
|
+
df_pan_qian_init = df_pan_qian_init.set_index(['str_day'], drop=False)
|
|
99
|
+
|
|
100
|
+
df_pan_qian_init = pd.merge(pan_qian_data_group_df, df_pan_qian_init,
|
|
101
|
+
how='outer',
|
|
102
|
+
left_index=True, right_index=True)
|
|
103
|
+
|
|
104
|
+
mongodb_util_27017.insert_mongo(df_pan_qian_init, col_name)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def hand_pan_hou_data(df_copy, symbol, col_name):
|
|
108
|
+
df = df_copy.copy()
|
|
109
|
+
df_pan_hou_init = df.loc[df['minute'] == '16:00:00']
|
|
110
|
+
k_line_trade_df = query_k_line(symbol)
|
|
111
|
+
trade_date_list = list(df_pan_hou_init['str_day'])
|
|
112
|
+
|
|
113
|
+
k_line_trade_df = k_line_trade_df.loc[k_line_trade_df['date'].isin(trade_date_list)]
|
|
114
|
+
|
|
115
|
+
k_line_trade_df = k_line_trade_df.rename(columns={'volume': "k_line_volume"})
|
|
116
|
+
|
|
117
|
+
# 收盘前的
|
|
118
|
+
df = df.loc[df['minute'] < '16:00:00']
|
|
119
|
+
total_volume_df = df.groupby('str_day')['volume'].sum().reset_index()
|
|
120
|
+
total_volume_df = total_volume_df.rename(columns={'volume': "total_volume"})
|
|
121
|
+
|
|
122
|
+
total_volume_df = total_volume_df.set_index(['str_day'], drop=False)
|
|
123
|
+
|
|
124
|
+
k_line_trade_df = k_line_trade_df.set_index(['date'], drop=True)
|
|
125
|
+
|
|
126
|
+
pan_hou_diff_df = pd.merge(total_volume_df, k_line_trade_df,
|
|
127
|
+
how='outer',
|
|
128
|
+
left_index=True, right_index=True)
|
|
129
|
+
|
|
130
|
+
pan_hou_diff_df['volume'] = pan_hou_diff_df['k_line_volume'] - pan_hou_diff_df['total_volume']
|
|
131
|
+
|
|
132
|
+
del df_pan_hou_init['volume']
|
|
133
|
+
|
|
134
|
+
pan_hou_diff_df = pan_hou_diff_df.set_index(['str_day'], drop=True)
|
|
135
|
+
del pan_hou_diff_df['k_line_volume']
|
|
136
|
+
del pan_hou_diff_df['total_volume']
|
|
137
|
+
|
|
138
|
+
df_pan_hou_init = df_pan_hou_init.set_index(['str_day'], drop=False)
|
|
139
|
+
|
|
140
|
+
df_pan_hou_result = pd.merge(pan_hou_diff_df, df_pan_hou_init,
|
|
141
|
+
how='outer',
|
|
142
|
+
left_index=True, right_index=True)
|
|
143
|
+
mongodb_util_27017.insert_mongo(df_pan_hou_result, col_name)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def export_original_data(df, symbol, year, now_month):
|
|
147
|
+
path = r'H:\us_stock\one_minute\{}'.format(year)
|
|
148
|
+
if not os.path.exists(path):
|
|
149
|
+
os.makedirs(path)
|
|
150
|
+
|
|
151
|
+
path = path + '\{}'.format(now_month)
|
|
152
|
+
if not os.path.exists(path):
|
|
153
|
+
os.makedirs(path)
|
|
154
|
+
|
|
155
|
+
file_name = path + '\{}.csv'.format(symbol)
|
|
156
|
+
if data_frame_util.is_not_empty(df):
|
|
157
|
+
del df['str_day']
|
|
158
|
+
del df['minute']
|
|
159
|
+
del df['_id']
|
|
160
|
+
del df['symbol']
|
|
161
|
+
df.to_csv(file_name, index=False, encoding='utf-8')
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
@lru_cache()
|
|
165
|
+
def query_k_line(symbol):
|
|
166
|
+
query = {'symbol': symbol}
|
|
167
|
+
query_field = {"volume": 1, 'date': 1, '_id': 0}
|
|
168
|
+
return mongodbUtilV2_27019.find_query_data_choose_field(extra_income_db_name.US_STOCK_DAILY_QFQ_K_LINE, query,
|
|
169
|
+
query_field)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def net_work_check(now_date):
|
|
173
|
+
hour = now_date.hour
|
|
174
|
+
minute = now_date.minute
|
|
175
|
+
if hour == 7 and minute == 34:
|
|
176
|
+
return True
|
|
177
|
+
elif hour == 9 and minute == 59:
|
|
178
|
+
return True
|
|
179
|
+
elif hour == 10 and minute == 29:
|
|
180
|
+
return True
|
|
181
|
+
elif hour == 10 and minute == 59:
|
|
182
|
+
return True
|
|
183
|
+
elif hour == 12 and minute == 49:
|
|
184
|
+
return True
|
|
185
|
+
elif hour == 13 and minute == 28:
|
|
186
|
+
return True
|
|
187
|
+
elif hour == 13 and minute == 58:
|
|
188
|
+
return True
|
|
189
|
+
elif hour == 14 and minute == 28:
|
|
190
|
+
return True
|
|
191
|
+
elif hour == 15 and minute == 1:
|
|
192
|
+
return True
|
|
193
|
+
else:
|
|
194
|
+
return False
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def sync_by_year(begin_year):
|
|
198
|
+
begin_month = 12
|
|
199
|
+
while begin_month > 0:
|
|
200
|
+
if begin_month < 10:
|
|
201
|
+
str_month = '0' + str(begin_month)
|
|
202
|
+
else:
|
|
203
|
+
str_month = str(begin_month)
|
|
204
|
+
str_month = str(begin_year) + '-' + str_month
|
|
205
|
+
sync_us_stock_one_minute(begin_year, str_month)
|
|
206
|
+
begin_month = begin_month - 1
|
|
207
|
+
logger.error("同步完成月份:{}", str_month)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
if __name__ == '__main__':
|
|
211
|
+
# k_line_df = query_k_line('TSLA')
|
|
212
|
+
sync_by_year(2024)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.component.em.em_stock_info_api as em_stock_info_api
|
|
9
|
+
from loguru import logger
|
|
10
|
+
import time
|
|
11
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
12
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
13
|
+
import mns_scheduler.extraIncome.a_stock.one_minute.common.symbol_handle_util as symbol_handle_util
|
|
14
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
15
|
+
import mns_scheduler.extraIncome.a_stock.one_minute.common.db_create_index as db_create_index
|
|
16
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
17
|
+
import mns_scheduler.extraIncome.us.one_minute.api.alpha_vantage_api as alpha_vantage_api
|
|
18
|
+
import pandas as pd
|
|
19
|
+
from functools import lru_cache
|
|
20
|
+
|
|
21
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
22
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
23
|
+
def sync_us_stock_one_minute(now_year, now_month, begin_year):
|
|
24
|
+
real_time_quotes_all_us = em_stock_info_api.get_us_stock_info()
|
|
25
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us.loc[real_time_quotes_all_us['flow_mv'] != 0]
|
|
26
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.sort_values(by=['amount'], ascending=False)
|
|
@@ -16,7 +16,6 @@ import mns_common.component.common_service_fun_api as common_service_fun_api
|
|
|
16
16
|
import mns_common.component.trade_date.trade_date_common_service_api as trade_date_common_service_api
|
|
17
17
|
import mns_common.api.ths.zt.ths_stock_zt_pool_v2_api as ths_stock_zt_pool_v2_api
|
|
18
18
|
import mns_common.component.zt.zt_common_service_api as zt_common_service_api
|
|
19
|
-
import mns_common.component.em.em_stock_info_api as em_stock_info_api
|
|
20
19
|
import mns_common.component.em.em_real_time_quotes_api as em_real_time_quotes_api
|
|
21
20
|
from datetime import datetime
|
|
22
21
|
import mns_common.api.ths.company.ths_company_info_api as ths_company_info_api
|
|
@@ -56,15 +55,15 @@ def save_zt_info(str_day):
|
|
|
56
55
|
date_handle_util.no_slash_date(str_day))
|
|
57
56
|
|
|
58
57
|
# fix 涨停池没有的股票
|
|
59
|
-
stock_em_zt_pool_df_data = sync_miss_zt_data(stock_em_zt_pool_df_data, str_day)
|
|
58
|
+
stock_em_zt_pool_df_data = sync_miss_zt_data(stock_em_zt_pool_df_data.copy(), str_day)
|
|
60
59
|
|
|
61
|
-
stock_em_zt_pool_df_data = common_service_fun_api.total_mv_classification(stock_em_zt_pool_df_data)
|
|
60
|
+
stock_em_zt_pool_df_data = common_service_fun_api.total_mv_classification(stock_em_zt_pool_df_data.copy())
|
|
62
61
|
|
|
63
|
-
stock_em_zt_pool_df_data = common_service_fun_api.classify_symbol(stock_em_zt_pool_df_data)
|
|
62
|
+
stock_em_zt_pool_df_data = common_service_fun_api.classify_symbol(stock_em_zt_pool_df_data.copy())
|
|
64
63
|
|
|
65
|
-
stock_em_zt_pool_df_data = common_service_fun_api.symbol_amount_simple(stock_em_zt_pool_df_data)
|
|
64
|
+
stock_em_zt_pool_df_data = common_service_fun_api.symbol_amount_simple(stock_em_zt_pool_df_data.copy())
|
|
66
65
|
|
|
67
|
-
stock_em_zt_pool_df_data = company_common_service_api.amendment_industry(stock_em_zt_pool_df_data)
|
|
66
|
+
stock_em_zt_pool_df_data = company_common_service_api.amendment_industry(stock_em_zt_pool_df_data.copy())
|
|
68
67
|
|
|
69
68
|
# 上个交易交易日涨停股票
|
|
70
69
|
last_trade_day_zt_df = zt_common_service_api.get_last_trade_day_zt(str_day)
|
|
@@ -79,8 +78,8 @@ def save_zt_info(str_day):
|
|
|
79
78
|
try:
|
|
80
79
|
|
|
81
80
|
# 设置连板数目
|
|
82
|
-
stock_em_zt_pool_df_data = set_connected_boards_numbers(stock_em_zt_pool_df_data,
|
|
83
|
-
stock_one.symbol, last_trade_day_zt_df)
|
|
81
|
+
stock_em_zt_pool_df_data = set_connected_boards_numbers(stock_em_zt_pool_df_data.copy(),
|
|
82
|
+
stock_one.symbol, last_trade_day_zt_df.copy())
|
|
84
83
|
|
|
85
84
|
ths_zt_pool_one_df = ths_zt_pool_df_data.loc[ths_zt_pool_df_data['symbol'] == stock_one.symbol]
|
|
86
85
|
if data_frame_util.is_empty(ths_zt_pool_one_df):
|
|
@@ -173,13 +172,27 @@ def sync_miss_zt_data(stock_em_zt_pool_df_data, str_day):
|
|
|
173
172
|
real_time_quotes_all_stocks_df = em_real_time_quotes_api.get_real_time_quotes_now(None, None)
|
|
174
173
|
real_time_quotes_all_stocks_df = real_time_quotes_all_stocks_df.loc[
|
|
175
174
|
(real_time_quotes_all_stocks_df['wei_bi'] == 100) & (real_time_quotes_all_stocks_df['chg'] >= 9)]
|
|
176
|
-
|
|
175
|
+
miss_zt_data_df_copy = real_time_quotes_all_stocks_df.loc[~(
|
|
177
176
|
real_time_quotes_all_stocks_df['symbol'].isin(stock_em_zt_pool_df_data['symbol']))]
|
|
177
|
+
miss_zt_data_df = miss_zt_data_df_copy.copy()
|
|
178
178
|
if data_frame_util.is_not_empty(miss_zt_data_df):
|
|
179
179
|
miss_zt_data_df['buy_1_num'] = miss_zt_data_df['buy_1_num'].astype(float)
|
|
180
180
|
miss_zt_data_df['now_price'] = miss_zt_data_df['now_price'].astype(float)
|
|
181
181
|
miss_zt_data_df['closure_funds'] = round(miss_zt_data_df['buy_1_num'] * 100 * miss_zt_data_df['now_price'],
|
|
182
182
|
2)
|
|
183
|
+
|
|
184
|
+
company_info_industry_df = company_common_service_api.get_company_info_name()
|
|
185
|
+
company_info_industry_df = company_info_industry_df.loc[
|
|
186
|
+
company_info_industry_df['_id'].isin(miss_zt_data_df['symbol'])]
|
|
187
|
+
|
|
188
|
+
company_info_industry_df = company_info_industry_df[['_id', 'industry', 'name']]
|
|
189
|
+
|
|
190
|
+
company_info_industry_df = company_info_industry_df.set_index(['_id'], drop=True)
|
|
191
|
+
miss_zt_data_df = miss_zt_data_df.set_index(['symbol'], drop=False)
|
|
192
|
+
|
|
193
|
+
miss_zt_data_df = pd.merge(miss_zt_data_df, company_info_industry_df, how='outer',
|
|
194
|
+
left_index=True, right_index=True)
|
|
195
|
+
|
|
183
196
|
miss_zt_data_df = miss_zt_data_df[[
|
|
184
197
|
'symbol',
|
|
185
198
|
'name',
|
|
@@ -207,7 +220,7 @@ def sync_miss_zt_data(stock_em_zt_pool_df_data, str_day):
|
|
|
207
220
|
|
|
208
221
|
|
|
209
222
|
if __name__ == '__main__':
|
|
210
|
-
save_zt_info('2025-
|
|
223
|
+
save_zt_info('2025-05-06')
|
|
211
224
|
# from datetime import datetime
|
|
212
225
|
#
|
|
213
226
|
# if __name__ == '__main__':
|