mns-scheduler 1.3.0.9__py3-none-any.whl → 1.3.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mns-scheduler might be problematic. Click here for more details.
- mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/etf/etf_one_minute_sync_task.py +2 -2
- mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/index/main_index_sync_task.py +2 -2
- mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/kzz/kzz_one_minute_sync_task.py +2 -2
- mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/one_minute_sync_task.py +4 -4
- mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/stock/stock_one_minute_sync_task.py +2 -2
- mns_scheduler/extraIncome/a_stock/one_minute/upload/__init__.py +7 -0
- mns_scheduler/extraIncome/us/daily/__init__.py +7 -0
- mns_scheduler/extraIncome/us/{us_stock_qfq_daily_k_line.py → daily/us_stock_qfq_daily_k_line.py} +8 -0
- mns_scheduler/extraIncome/us/one_minute/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/api/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/api/alpha_vantage_api.py +34 -0
- mns_scheduler/extraIncome/us/one_minute/api/y_finance_api.py +47 -0
- mns_scheduler/extraIncome/us/one_minute/etf/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/stock/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/stock/down_load/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/stock/down_load/down_load_stock_his_2024_01.py +145 -0
- mns_scheduler/extraIncome/us/one_minute/stock/down_load/down_load_stock_his_2024_02.py +153 -0
- mns_scheduler/extraIncome/us/one_minute/stock/down_load/etf/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/stock/down_load/etf/down_load_ETF_his_2024.py +152 -0
- mns_scheduler/extraIncome/us/one_minute/stock/down_load/etf/handle_down_load_fail_ETF.py +67 -0
- mns_scheduler/extraIncome/us/one_minute/stock/his/__init__.py +7 -0
- mns_scheduler/extraIncome/us/one_minute/stock/his/us_stock_one_minute_his.py +199 -0
- mns_scheduler/extraIncome/us/one_minute/stock/his/us_stock_one_minute_his_2024.py +212 -0
- mns_scheduler/extraIncome/us/one_minute/stock/us_stock_one_minute_task.py +26 -0
- mns_scheduler/self_choose/ths_self_choose_service.py +13 -6
- mns_scheduler/zz_task/data_sync_task.py +3 -3
- {mns_scheduler-1.3.0.9.dist-info → mns_scheduler-1.3.1.2.dist-info}/METADATA +1 -1
- {mns_scheduler-1.3.0.9.dist-info → mns_scheduler-1.3.1.2.dist-info}/RECORD +40 -22
- /mns_scheduler/extraIncome/{one_minute → a_stock}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute/common → a_stock/one_minute}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute/etf → a_stock/one_minute/common}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/common/db_create_index.py +0 -0
- /mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/common/symbol_handle_util.py +0 -0
- /mns_scheduler/extraIncome/{one_minute/index → a_stock/one_minute/etf}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute/kzz → a_stock/one_minute/index}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute/stock → a_stock/one_minute/kzz}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute/upload → a_stock/one_minute/stock}/__init__.py +0 -0
- /mns_scheduler/extraIncome/{one_minute → a_stock/one_minute}/upload/upload_to_baidu_task.py +0 -0
- {mns_scheduler-1.3.0.9.dist-info → mns_scheduler-1.3.1.2.dist-info}/WHEEL +0 -0
- {mns_scheduler-1.3.0.9.dist-info → mns_scheduler-1.3.1.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.component.em.em_stock_info_api as em_stock_info_api
|
|
9
|
+
from loguru import logger
|
|
10
|
+
import time
|
|
11
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
12
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
13
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
14
|
+
import mns_scheduler.extraIncome.a_stock.one_minute.common.db_create_index as db_create_index
|
|
15
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
16
|
+
import mns_scheduler.extraIncome.us.one_minute.api.alpha_vantage_api as alpha_vantage_api
|
|
17
|
+
import pandas as pd
|
|
18
|
+
import math
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
|
|
21
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
22
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
23
|
+
from datetime import datetime
|
|
24
|
+
|
|
25
|
+
no_choose_symbol = ['FNGA', 'MSTU', 'SPYU']
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def sync_us_stock_one_minute(now_year, now_month):
|
|
29
|
+
real_time_quotes_all_us = em_stock_info_api.get_us_stock_info()
|
|
30
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us.loc[real_time_quotes_all_us['flow_mv'] == 0]
|
|
31
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.sort_values(by=['amount'], ascending=False)
|
|
32
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.loc[~real_time_quotes_all_us_stocks['symbol'].isin(no_choose_symbol)]
|
|
33
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.loc[
|
|
34
|
+
real_time_quotes_all_us_stocks['amount'] >= 50000000]
|
|
35
|
+
|
|
36
|
+
path = r'F:\us_etf\one_minute\{}'.format(now_year)
|
|
37
|
+
if not os.path.exists(path):
|
|
38
|
+
os.makedirs(path)
|
|
39
|
+
|
|
40
|
+
path = path + '\{}'.format(now_month)
|
|
41
|
+
if not os.path.exists(path):
|
|
42
|
+
os.makedirs(path)
|
|
43
|
+
stock_name_list = find_exist_file(path)
|
|
44
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.loc[
|
|
45
|
+
~(real_time_quotes_all_us_stocks['symbol'].isin(stock_name_list))]
|
|
46
|
+
for stock_one in real_time_quotes_all_us_stocks.itertuples():
|
|
47
|
+
|
|
48
|
+
try:
|
|
49
|
+
symbol = stock_one.symbol
|
|
50
|
+
# simple_symbol = int(stock_one.simple_symbol)
|
|
51
|
+
# code = str(simple_symbol) + '.' + symbol
|
|
52
|
+
list_date = stock_one.list_date
|
|
53
|
+
|
|
54
|
+
if not math.isnan(list_date):
|
|
55
|
+
list_date = str(stock_one.list_date)
|
|
56
|
+
list_date_year = int(list_date[0:4])
|
|
57
|
+
list_month = int(list_date[4:6])
|
|
58
|
+
now_month_int = int(now_month[5:7])
|
|
59
|
+
if (list_date_year > now_year) or ((list_date_year == now_year) and (list_month > now_month_int)):
|
|
60
|
+
continue
|
|
61
|
+
now_date = datetime.now()
|
|
62
|
+
if net_work_check(now_date):
|
|
63
|
+
# 休眠 6分钟
|
|
64
|
+
time.sleep(5 * 60)
|
|
65
|
+
|
|
66
|
+
df = alpha_vantage_api.sync_one_minute_data(symbol, now_month)
|
|
67
|
+
df = df.fillna(0)
|
|
68
|
+
df['time'] = df['time'].dt.strftime('%Y-%m-%d %H:%M:%S')
|
|
69
|
+
df['str_day'] = df['time'].str.slice(0, 10)
|
|
70
|
+
df['minute'] = df['time'].str.slice(11, 19)
|
|
71
|
+
df['_id'] = symbol + "_" + df['time']
|
|
72
|
+
df['symbol'] = symbol
|
|
73
|
+
df_export_df = df.copy()
|
|
74
|
+
export_original_data(df_export_df, symbol, path)
|
|
75
|
+
except BaseException as e:
|
|
76
|
+
time.sleep(1)
|
|
77
|
+
fail_dict = {
|
|
78
|
+
'_id': symbol + '_' + now_month,
|
|
79
|
+
'type': "ETF",
|
|
80
|
+
'path': path,
|
|
81
|
+
'symbol': symbol,
|
|
82
|
+
'now_year': now_year,
|
|
83
|
+
'now_month': now_month
|
|
84
|
+
}
|
|
85
|
+
fail_df = pd.DataFrame(fail_dict, index=[1])
|
|
86
|
+
|
|
87
|
+
mongodb_util_27017.save_mongo(fail_df, 'us_stock_one_minute_down_load_fail')
|
|
88
|
+
logger.error("同步股票分钟数据出现异常:,{},{},{}", e, symbol, now_month)
|
|
89
|
+
logger.info("同步股票分钟票数据完成:{},{}", stock_one.symbol, stock_one.name)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def export_original_data(df, symbol, path):
|
|
93
|
+
file_name = path + '\{}.csv'.format(symbol)
|
|
94
|
+
if data_frame_util.is_not_empty(df):
|
|
95
|
+
df = df.dropna(subset=['_id'])
|
|
96
|
+
del df['str_day']
|
|
97
|
+
del df['minute']
|
|
98
|
+
del df['_id']
|
|
99
|
+
del df['symbol']
|
|
100
|
+
df.to_csv(file_name, index=False, encoding='utf-8')
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def net_work_check(now_date):
|
|
104
|
+
hour = now_date.hour
|
|
105
|
+
minute = now_date.minute
|
|
106
|
+
if hour == 7 and minute == 34:
|
|
107
|
+
return True
|
|
108
|
+
elif hour == 9 and minute == 59:
|
|
109
|
+
return True
|
|
110
|
+
elif hour == 10 and minute == 29:
|
|
111
|
+
return True
|
|
112
|
+
elif hour == 10 and minute == 59:
|
|
113
|
+
return True
|
|
114
|
+
elif hour == 12 and minute == 49:
|
|
115
|
+
return True
|
|
116
|
+
elif hour == 13 and minute == 28:
|
|
117
|
+
return True
|
|
118
|
+
elif hour == 13 and minute == 58:
|
|
119
|
+
return True
|
|
120
|
+
elif hour == 14 and minute == 28:
|
|
121
|
+
return True
|
|
122
|
+
elif hour == 15 and minute == 1:
|
|
123
|
+
return True
|
|
124
|
+
else:
|
|
125
|
+
return False
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def sync_by_year(begin_year):
|
|
129
|
+
begin_month = 12
|
|
130
|
+
while begin_month > 0:
|
|
131
|
+
if begin_month < 10:
|
|
132
|
+
str_month = '0' + str(begin_month)
|
|
133
|
+
else:
|
|
134
|
+
str_month = str(begin_month)
|
|
135
|
+
str_month = str(begin_year) + '-' + str_month
|
|
136
|
+
sync_us_stock_one_minute(begin_year, str_month)
|
|
137
|
+
begin_month = begin_month - 1
|
|
138
|
+
logger.error("同步完成月份:{}", str_month)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def find_exist_file(folder_path):
|
|
142
|
+
if not os.path.exists(folder_path):
|
|
143
|
+
logger.error("错误:目录不存在:{}", folder_path)
|
|
144
|
+
else:
|
|
145
|
+
folder_path = Path(folder_path)
|
|
146
|
+
stock_names = [f.stem for f in folder_path.glob("*.csv")]
|
|
147
|
+
return stock_names
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
if __name__ == '__main__':
|
|
151
|
+
# k_line_df = query_k_line('TSLA')
|
|
152
|
+
sync_by_year(2024)
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
|
|
9
|
+
from loguru import logger
|
|
10
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
11
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
12
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
13
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
14
|
+
import mns_scheduler.extraIncome.us.one_minute.api.alpha_vantage_api as alpha_vantage_api
|
|
15
|
+
|
|
16
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
17
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def handle_fail_data():
|
|
21
|
+
query = {"type": "ETF", }
|
|
22
|
+
fail_df = mongodb_util_27017.find_query_data('us_stock_one_minute_down_load_fail', query)
|
|
23
|
+
|
|
24
|
+
for stock_one in fail_df.itertuples():
|
|
25
|
+
try:
|
|
26
|
+
|
|
27
|
+
now_year = stock_one.now_year
|
|
28
|
+
now_month = stock_one.now_month
|
|
29
|
+
symbol = stock_one.symbol
|
|
30
|
+
id_key = symbol + '_' + now_month
|
|
31
|
+
path = r'F:\us_etf\one_minute\{}'.format(now_year)
|
|
32
|
+
if not os.path.exists(path):
|
|
33
|
+
os.makedirs(path)
|
|
34
|
+
|
|
35
|
+
path = path + '\{}'.format(now_month)
|
|
36
|
+
if not os.path.exists(path):
|
|
37
|
+
os.makedirs(path)
|
|
38
|
+
|
|
39
|
+
df = alpha_vantage_api.sync_one_minute_data(symbol, now_month)
|
|
40
|
+
df = df.fillna(0)
|
|
41
|
+
df['time'] = df['time'].dt.strftime('%Y-%m-%d %H:%M:%S')
|
|
42
|
+
df['str_day'] = df['time'].str.slice(0, 10)
|
|
43
|
+
df['minute'] = df['time'].str.slice(11, 19)
|
|
44
|
+
df['_id'] = symbol + "_" + df['time']
|
|
45
|
+
df['symbol'] = symbol
|
|
46
|
+
df_export_df = df.copy()
|
|
47
|
+
export_original_data(df_export_df, symbol, path)
|
|
48
|
+
logger.info("同步股票分钟票数据完成:{}", stock_one.symbol)
|
|
49
|
+
query = {"_id": id_key}
|
|
50
|
+
mongodb_util_27017.remove_data(query, 'us_stock_one_minute_down_load_fail')
|
|
51
|
+
except BaseException as e:
|
|
52
|
+
logger.error("同步股票分钟数据出现异常:,{},{},{}", e, symbol, now_month)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def export_original_data(df, symbol, path):
|
|
56
|
+
file_name = path + '\{}.csv'.format(symbol)
|
|
57
|
+
if data_frame_util.is_not_empty(df):
|
|
58
|
+
df = df.dropna(subset=['_id'])
|
|
59
|
+
del df['str_day']
|
|
60
|
+
del df['minute']
|
|
61
|
+
del df['_id']
|
|
62
|
+
del df['symbol']
|
|
63
|
+
df.to_csv(file_name, index=False, encoding='utf-8')
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
if __name__ == '__main__':
|
|
67
|
+
handle_fail_data()
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.component.em.em_stock_info_api as em_stock_info_api
|
|
9
|
+
from loguru import logger
|
|
10
|
+
import time
|
|
11
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
12
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
13
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
14
|
+
import mns_scheduler.extraIncome.a_stock.one_minute.common.db_create_index as db_create_index
|
|
15
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
16
|
+
import mns_scheduler.extraIncome.us.one_minute.api.alpha_vantage_api as alpha_vantage_api
|
|
17
|
+
import pandas as pd
|
|
18
|
+
from functools import lru_cache
|
|
19
|
+
|
|
20
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
21
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
22
|
+
from datetime import datetime
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def sync_us_stock_one_minute(now_year, now_month, begin_year):
|
|
26
|
+
real_time_quotes_all_us = em_stock_info_api.get_us_stock_info()
|
|
27
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us.loc[real_time_quotes_all_us['flow_mv'] != 0]
|
|
28
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.sort_values(by=['amount'], ascending=False)
|
|
29
|
+
|
|
30
|
+
# todo 改集合名字
|
|
31
|
+
col_name = extra_income_db_name.US_STOCK_MINUTE_K_LINE_BFQ
|
|
32
|
+
col_name = col_name + '_' + str(now_year)
|
|
33
|
+
# 创建索引
|
|
34
|
+
db_create_index.create_index(mongodb_util_27017, col_name)
|
|
35
|
+
|
|
36
|
+
for stock_one in real_time_quotes_all_us_stocks.itertuples():
|
|
37
|
+
|
|
38
|
+
symbol = stock_one.symbol
|
|
39
|
+
# simple_symbol = int(stock_one.simple_symbol)
|
|
40
|
+
# code = str(simple_symbol) + '.' + symbol
|
|
41
|
+
list_date = str(stock_one.list_date)
|
|
42
|
+
list_date_year = int(list_date[0:4])
|
|
43
|
+
if list_date_year > now_year:
|
|
44
|
+
continue
|
|
45
|
+
try:
|
|
46
|
+
|
|
47
|
+
now_date = datetime.now()
|
|
48
|
+
if net_work_check(now_date):
|
|
49
|
+
# 休眠 6分钟
|
|
50
|
+
time.sleep(6 * 60)
|
|
51
|
+
|
|
52
|
+
df = alpha_vantage_api.sync_one_minute_data(symbol, now_month)
|
|
53
|
+
df['time'] = df['time'].dt.strftime('%Y-%m-%d %H:%M:%S')
|
|
54
|
+
df['str_day'] = df['time'].str.slice(0, 10)
|
|
55
|
+
df['minute'] = df['time'].str.slice(11, 19)
|
|
56
|
+
df['_id'] = symbol + "_" + df['time']
|
|
57
|
+
df['symbol'] = symbol
|
|
58
|
+
df_export_df = df.copy()
|
|
59
|
+
export_original_data(df_export_df, symbol, now_year, now_month)
|
|
60
|
+
handle_pan_qian_or_hou_data(df, symbol, col_name)
|
|
61
|
+
|
|
62
|
+
except BaseException as e:
|
|
63
|
+
time.sleep(1)
|
|
64
|
+
fail_dict = {
|
|
65
|
+
'_id': symbol + '_' + now_month,
|
|
66
|
+
'symbol': symbol,
|
|
67
|
+
'now_year': now_year,
|
|
68
|
+
'now_month': now_month
|
|
69
|
+
}
|
|
70
|
+
fail_df = pd.DataFrame(fail_dict, index=[1])
|
|
71
|
+
|
|
72
|
+
mongodb_util_27017.insert_mongo(fail_df, 'us_stock_one_minute_k_line_bfq_fail')
|
|
73
|
+
logger.error("同步股票分钟数据出现异常:,{},{},{}", e, symbol, now_month)
|
|
74
|
+
logger.info("同步股票分钟票数据完整:{},{}", stock_one.symbol, stock_one.name)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def handle_pan_qian_or_hou_data(df, symbol, col_name):
|
|
78
|
+
hand_pan_hou_data(df.copy(), symbol, col_name)
|
|
79
|
+
hand_pan_qian_data(df.copy(), col_name)
|
|
80
|
+
handle_middle_data(df.copy(), col_name)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def handle_middle_data(df_copy, col_name):
|
|
84
|
+
df = df_copy.copy()
|
|
85
|
+
df = df.loc[df['minute'] < '16:00:00']
|
|
86
|
+
df = df.loc[df['minute'] > '09:30:00']
|
|
87
|
+
mongodb_util_27017.insert_mongo(df, col_name)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def hand_pan_qian_data(df_copy, col_name):
|
|
91
|
+
df = df_copy.copy()
|
|
92
|
+
df_pan_qian_init = df.loc[df['minute'] == '09:30:00']
|
|
93
|
+
del df_pan_qian_init['volume']
|
|
94
|
+
pan_qian_data = df.loc[df['minute'] <= '09:30:00']
|
|
95
|
+
pan_qian_data_group_df = pan_qian_data.groupby('str_day')['volume'].sum().reset_index()
|
|
96
|
+
|
|
97
|
+
pan_qian_data_group_df = pan_qian_data_group_df.set_index(['str_day'], drop=True)
|
|
98
|
+
df_pan_qian_init = df_pan_qian_init.set_index(['str_day'], drop=False)
|
|
99
|
+
|
|
100
|
+
df_pan_qian_init = pd.merge(pan_qian_data_group_df, df_pan_qian_init,
|
|
101
|
+
how='outer',
|
|
102
|
+
left_index=True, right_index=True)
|
|
103
|
+
|
|
104
|
+
mongodb_util_27017.insert_mongo(df_pan_qian_init, col_name)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def hand_pan_hou_data(df_copy, symbol, col_name):
|
|
108
|
+
df = df_copy.copy()
|
|
109
|
+
df_pan_hou_init = df.loc[df['minute'] == '16:00:00']
|
|
110
|
+
k_line_trade_df = query_k_line(symbol)
|
|
111
|
+
trade_date_list = list(df_pan_hou_init['str_day'])
|
|
112
|
+
|
|
113
|
+
k_line_trade_df = k_line_trade_df.loc[k_line_trade_df['date'].isin(trade_date_list)]
|
|
114
|
+
|
|
115
|
+
k_line_trade_df = k_line_trade_df.rename(columns={'volume': "k_line_volume"})
|
|
116
|
+
|
|
117
|
+
# 收盘前的
|
|
118
|
+
df = df.loc[df['minute'] < '16:00:00']
|
|
119
|
+
total_volume_df = df.groupby('str_day')['volume'].sum().reset_index()
|
|
120
|
+
total_volume_df = total_volume_df.rename(columns={'volume': "total_volume"})
|
|
121
|
+
|
|
122
|
+
total_volume_df = total_volume_df.set_index(['str_day'], drop=False)
|
|
123
|
+
|
|
124
|
+
k_line_trade_df = k_line_trade_df.set_index(['date'], drop=True)
|
|
125
|
+
|
|
126
|
+
pan_hou_diff_df = pd.merge(total_volume_df, k_line_trade_df,
|
|
127
|
+
how='outer',
|
|
128
|
+
left_index=True, right_index=True)
|
|
129
|
+
|
|
130
|
+
pan_hou_diff_df['volume'] = pan_hou_diff_df['k_line_volume'] - pan_hou_diff_df['total_volume']
|
|
131
|
+
|
|
132
|
+
del df_pan_hou_init['volume']
|
|
133
|
+
|
|
134
|
+
pan_hou_diff_df = pan_hou_diff_df.set_index(['str_day'], drop=True)
|
|
135
|
+
del pan_hou_diff_df['k_line_volume']
|
|
136
|
+
del pan_hou_diff_df['total_volume']
|
|
137
|
+
|
|
138
|
+
df_pan_hou_init = df_pan_hou_init.set_index(['str_day'], drop=False)
|
|
139
|
+
|
|
140
|
+
df_pan_hou_result = pd.merge(pan_hou_diff_df, df_pan_hou_init,
|
|
141
|
+
how='outer',
|
|
142
|
+
left_index=True, right_index=True)
|
|
143
|
+
mongodb_util_27017.insert_mongo(df_pan_hou_result, col_name)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def export_original_data(df, symbol, year, now_month):
|
|
147
|
+
path = r'H:\us_stock\one_minute\{}'.format(year)
|
|
148
|
+
if not os.path.exists(path):
|
|
149
|
+
os.makedirs(path)
|
|
150
|
+
|
|
151
|
+
path = path + '\{}'.format(now_month)
|
|
152
|
+
if not os.path.exists(path):
|
|
153
|
+
os.makedirs(path)
|
|
154
|
+
|
|
155
|
+
file_name = path + '\{}.csv'.format(symbol)
|
|
156
|
+
if data_frame_util.is_not_empty(df):
|
|
157
|
+
del df['str_day']
|
|
158
|
+
del df['minute']
|
|
159
|
+
del df['_id']
|
|
160
|
+
del df['symbol']
|
|
161
|
+
df.to_csv(file_name, index=False, encoding='utf-8')
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
@lru_cache()
|
|
165
|
+
def query_k_line(symbol):
|
|
166
|
+
query = {'symbol': symbol}
|
|
167
|
+
query_field = {"volume": 1, 'date': 1, '_id': 0}
|
|
168
|
+
return mongodbUtilV2_27019.find_query_data_choose_field(extra_income_db_name.US_STOCK_DAILY_QFQ_K_LINE, query,
|
|
169
|
+
query_field)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def net_work_check(now_date):
|
|
173
|
+
hour = now_date.hour
|
|
174
|
+
minute = now_date.minute
|
|
175
|
+
if hour == 7 and minute == 34:
|
|
176
|
+
return True
|
|
177
|
+
elif hour == 9 and minute == 59:
|
|
178
|
+
return True
|
|
179
|
+
elif hour == 10 and minute == 29:
|
|
180
|
+
return True
|
|
181
|
+
elif hour == 10 and minute == 59:
|
|
182
|
+
return True
|
|
183
|
+
elif hour == 12 and minute == 49:
|
|
184
|
+
return True
|
|
185
|
+
elif hour == 13 and minute == 28:
|
|
186
|
+
return True
|
|
187
|
+
elif hour == 13 and minute == 58:
|
|
188
|
+
return True
|
|
189
|
+
elif hour == 14 and minute == 28:
|
|
190
|
+
return True
|
|
191
|
+
elif hour == 15 and minute == 1:
|
|
192
|
+
return True
|
|
193
|
+
else:
|
|
194
|
+
return False
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
if __name__ == '__main__':
|
|
198
|
+
# k_line_df = query_k_line('TSLA')
|
|
199
|
+
sync_us_stock_one_minute(2025, '2025-04', 2000)
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.component.em.em_stock_info_api as em_stock_info_api
|
|
9
|
+
from loguru import logger
|
|
10
|
+
import time
|
|
11
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
12
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
13
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
14
|
+
import mns_scheduler.extraIncome.a_stock.one_minute.common.db_create_index as db_create_index
|
|
15
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
16
|
+
import mns_scheduler.extraIncome.us.one_minute.api.alpha_vantage_api as alpha_vantage_api
|
|
17
|
+
import pandas as pd
|
|
18
|
+
from functools import lru_cache
|
|
19
|
+
|
|
20
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
21
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
22
|
+
from datetime import datetime
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def sync_us_stock_one_minute(now_year, now_month):
|
|
26
|
+
real_time_quotes_all_us = em_stock_info_api.get_us_stock_info()
|
|
27
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us.loc[real_time_quotes_all_us['flow_mv'] != 0]
|
|
28
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.sort_values(by=['amount'], ascending=False)
|
|
29
|
+
|
|
30
|
+
# todo 改集合名字
|
|
31
|
+
col_name = extra_income_db_name.US_STOCK_MINUTE_K_LINE_BFQ
|
|
32
|
+
col_name = col_name + '_' + str(now_year)
|
|
33
|
+
# 创建索引
|
|
34
|
+
db_create_index.create_index(mongodb_util_27017, col_name)
|
|
35
|
+
|
|
36
|
+
for stock_one in real_time_quotes_all_us_stocks.itertuples():
|
|
37
|
+
|
|
38
|
+
symbol = stock_one.symbol
|
|
39
|
+
# simple_symbol = int(stock_one.simple_symbol)
|
|
40
|
+
# code = str(simple_symbol) + '.' + symbol
|
|
41
|
+
list_date = str(stock_one.list_date)
|
|
42
|
+
list_date_year = int(list_date[0:4])
|
|
43
|
+
if list_date_year > now_year:
|
|
44
|
+
continue
|
|
45
|
+
try:
|
|
46
|
+
|
|
47
|
+
now_date = datetime.now()
|
|
48
|
+
if net_work_check(now_date):
|
|
49
|
+
# 休眠 6分钟
|
|
50
|
+
time.sleep(5 * 60)
|
|
51
|
+
|
|
52
|
+
df = alpha_vantage_api.sync_one_minute_data(symbol, now_month)
|
|
53
|
+
df['time'] = df['time'].dt.strftime('%Y-%m-%d %H:%M:%S')
|
|
54
|
+
df['str_day'] = df['time'].str.slice(0, 10)
|
|
55
|
+
df['minute'] = df['time'].str.slice(11, 19)
|
|
56
|
+
df['_id'] = symbol + "_" + df['time']
|
|
57
|
+
df['symbol'] = symbol
|
|
58
|
+
df_export_df = df.copy()
|
|
59
|
+
export_original_data(df_export_df, symbol, now_year, now_month)
|
|
60
|
+
handle_pan_qian_or_hou_data(df, symbol, col_name)
|
|
61
|
+
|
|
62
|
+
except BaseException as e:
|
|
63
|
+
time.sleep(1)
|
|
64
|
+
fail_dict = {
|
|
65
|
+
'_id': symbol + '_' + now_month,
|
|
66
|
+
'symbol': symbol,
|
|
67
|
+
'now_year': now_year,
|
|
68
|
+
'now_month': now_month
|
|
69
|
+
}
|
|
70
|
+
fail_df = pd.DataFrame(fail_dict, index=[1])
|
|
71
|
+
|
|
72
|
+
mongodb_util_27017.insert_mongo(fail_df, 'us_stock_one_minute_k_line_bfq_fail')
|
|
73
|
+
logger.error("同步股票分钟数据出现异常:,{},{},{}", e, symbol, now_month)
|
|
74
|
+
logger.info("同步股票分钟票数据完成:{},{}", stock_one.symbol, stock_one.name)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def handle_pan_qian_or_hou_data(df, symbol, col_name):
|
|
78
|
+
hand_pan_hou_data(df.copy(), symbol, col_name)
|
|
79
|
+
hand_pan_qian_data(df.copy(), col_name)
|
|
80
|
+
handle_middle_data(df.copy(), col_name)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def handle_middle_data(df_copy, col_name):
|
|
84
|
+
df = df_copy.copy()
|
|
85
|
+
df = df.loc[df['minute'] < '16:00:00']
|
|
86
|
+
df = df.loc[df['minute'] > '09:30:00']
|
|
87
|
+
mongodb_util_27017.insert_mongo(df, col_name)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def hand_pan_qian_data(df_copy, col_name):
|
|
91
|
+
df = df_copy.copy()
|
|
92
|
+
df_pan_qian_init = df.loc[df['minute'] == '09:30:00']
|
|
93
|
+
del df_pan_qian_init['volume']
|
|
94
|
+
pan_qian_data = df.loc[df['minute'] <= '09:30:00']
|
|
95
|
+
pan_qian_data_group_df = pan_qian_data.groupby('str_day')['volume'].sum().reset_index()
|
|
96
|
+
|
|
97
|
+
pan_qian_data_group_df = pan_qian_data_group_df.set_index(['str_day'], drop=True)
|
|
98
|
+
df_pan_qian_init = df_pan_qian_init.set_index(['str_day'], drop=False)
|
|
99
|
+
|
|
100
|
+
df_pan_qian_init = pd.merge(pan_qian_data_group_df, df_pan_qian_init,
|
|
101
|
+
how='outer',
|
|
102
|
+
left_index=True, right_index=True)
|
|
103
|
+
|
|
104
|
+
mongodb_util_27017.insert_mongo(df_pan_qian_init, col_name)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def hand_pan_hou_data(df_copy, symbol, col_name):
|
|
108
|
+
df = df_copy.copy()
|
|
109
|
+
df_pan_hou_init = df.loc[df['minute'] == '16:00:00']
|
|
110
|
+
k_line_trade_df = query_k_line(symbol)
|
|
111
|
+
trade_date_list = list(df_pan_hou_init['str_day'])
|
|
112
|
+
|
|
113
|
+
k_line_trade_df = k_line_trade_df.loc[k_line_trade_df['date'].isin(trade_date_list)]
|
|
114
|
+
|
|
115
|
+
k_line_trade_df = k_line_trade_df.rename(columns={'volume': "k_line_volume"})
|
|
116
|
+
|
|
117
|
+
# 收盘前的
|
|
118
|
+
df = df.loc[df['minute'] < '16:00:00']
|
|
119
|
+
total_volume_df = df.groupby('str_day')['volume'].sum().reset_index()
|
|
120
|
+
total_volume_df = total_volume_df.rename(columns={'volume': "total_volume"})
|
|
121
|
+
|
|
122
|
+
total_volume_df = total_volume_df.set_index(['str_day'], drop=False)
|
|
123
|
+
|
|
124
|
+
k_line_trade_df = k_line_trade_df.set_index(['date'], drop=True)
|
|
125
|
+
|
|
126
|
+
pan_hou_diff_df = pd.merge(total_volume_df, k_line_trade_df,
|
|
127
|
+
how='outer',
|
|
128
|
+
left_index=True, right_index=True)
|
|
129
|
+
|
|
130
|
+
pan_hou_diff_df['volume'] = pan_hou_diff_df['k_line_volume'] - pan_hou_diff_df['total_volume']
|
|
131
|
+
|
|
132
|
+
del df_pan_hou_init['volume']
|
|
133
|
+
|
|
134
|
+
pan_hou_diff_df = pan_hou_diff_df.set_index(['str_day'], drop=True)
|
|
135
|
+
del pan_hou_diff_df['k_line_volume']
|
|
136
|
+
del pan_hou_diff_df['total_volume']
|
|
137
|
+
|
|
138
|
+
df_pan_hou_init = df_pan_hou_init.set_index(['str_day'], drop=False)
|
|
139
|
+
|
|
140
|
+
df_pan_hou_result = pd.merge(pan_hou_diff_df, df_pan_hou_init,
|
|
141
|
+
how='outer',
|
|
142
|
+
left_index=True, right_index=True)
|
|
143
|
+
mongodb_util_27017.insert_mongo(df_pan_hou_result, col_name)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def export_original_data(df, symbol, year, now_month):
|
|
147
|
+
path = r'H:\us_stock\one_minute\{}'.format(year)
|
|
148
|
+
if not os.path.exists(path):
|
|
149
|
+
os.makedirs(path)
|
|
150
|
+
|
|
151
|
+
path = path + '\{}'.format(now_month)
|
|
152
|
+
if not os.path.exists(path):
|
|
153
|
+
os.makedirs(path)
|
|
154
|
+
|
|
155
|
+
file_name = path + '\{}.csv'.format(symbol)
|
|
156
|
+
if data_frame_util.is_not_empty(df):
|
|
157
|
+
del df['str_day']
|
|
158
|
+
del df['minute']
|
|
159
|
+
del df['_id']
|
|
160
|
+
del df['symbol']
|
|
161
|
+
df.to_csv(file_name, index=False, encoding='utf-8')
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
@lru_cache()
|
|
165
|
+
def query_k_line(symbol):
|
|
166
|
+
query = {'symbol': symbol}
|
|
167
|
+
query_field = {"volume": 1, 'date': 1, '_id': 0}
|
|
168
|
+
return mongodbUtilV2_27019.find_query_data_choose_field(extra_income_db_name.US_STOCK_DAILY_QFQ_K_LINE, query,
|
|
169
|
+
query_field)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def net_work_check(now_date):
|
|
173
|
+
hour = now_date.hour
|
|
174
|
+
minute = now_date.minute
|
|
175
|
+
if hour == 7 and minute == 34:
|
|
176
|
+
return True
|
|
177
|
+
elif hour == 9 and minute == 59:
|
|
178
|
+
return True
|
|
179
|
+
elif hour == 10 and minute == 29:
|
|
180
|
+
return True
|
|
181
|
+
elif hour == 10 and minute == 59:
|
|
182
|
+
return True
|
|
183
|
+
elif hour == 12 and minute == 49:
|
|
184
|
+
return True
|
|
185
|
+
elif hour == 13 and minute == 28:
|
|
186
|
+
return True
|
|
187
|
+
elif hour == 13 and minute == 58:
|
|
188
|
+
return True
|
|
189
|
+
elif hour == 14 and minute == 28:
|
|
190
|
+
return True
|
|
191
|
+
elif hour == 15 and minute == 1:
|
|
192
|
+
return True
|
|
193
|
+
else:
|
|
194
|
+
return False
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def sync_by_year(begin_year):
|
|
198
|
+
begin_month = 12
|
|
199
|
+
while begin_month > 0:
|
|
200
|
+
if begin_month < 10:
|
|
201
|
+
str_month = '0' + str(begin_month)
|
|
202
|
+
else:
|
|
203
|
+
str_month = str(begin_month)
|
|
204
|
+
str_month = str(begin_year) + '-' + str_month
|
|
205
|
+
sync_us_stock_one_minute(begin_year, str_month)
|
|
206
|
+
begin_month = begin_month - 1
|
|
207
|
+
logger.error("同步完成月份:{}", str_month)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
if __name__ == '__main__':
|
|
211
|
+
# k_line_df = query_k_line('TSLA')
|
|
212
|
+
sync_by_year(2024)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.component.em.em_stock_info_api as em_stock_info_api
|
|
9
|
+
from loguru import logger
|
|
10
|
+
import time
|
|
11
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
12
|
+
from mns_common.db.MongodbUtil import MongodbUtil
|
|
13
|
+
import mns_scheduler.extraIncome.a_stock.one_minute.common.symbol_handle_util as symbol_handle_util
|
|
14
|
+
from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
|
|
15
|
+
import mns_scheduler.extraIncome.a_stock.one_minute.common.db_create_index as db_create_index
|
|
16
|
+
import mns_common.constant.extra_income_db_name as extra_income_db_name
|
|
17
|
+
import mns_scheduler.extraIncome.us.one_minute.api.alpha_vantage_api as alpha_vantage_api
|
|
18
|
+
import pandas as pd
|
|
19
|
+
from functools import lru_cache
|
|
20
|
+
|
|
21
|
+
mongodb_util_27017 = MongodbUtil('27017')
|
|
22
|
+
mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
|
|
23
|
+
def sync_us_stock_one_minute(now_year, now_month, begin_year):
|
|
24
|
+
real_time_quotes_all_us = em_stock_info_api.get_us_stock_info()
|
|
25
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us.loc[real_time_quotes_all_us['flow_mv'] != 0]
|
|
26
|
+
real_time_quotes_all_us_stocks = real_time_quotes_all_us_stocks.sort_values(by=['amount'], ascending=False)
|