mns-scheduler 1.3.0.3__py3-none-any.whl → 1.3.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mns-scheduler might be problematic. Click here for more details.

@@ -6,11 +6,9 @@ end = file_path.index('mns') + 16
6
6
  project_path = file_path[0:end]
7
7
  sys.path.append(project_path)
8
8
 
9
- import mns_common.utils.data_frame_util as data_frame_util
10
9
  from mns_scheduler.db.script.sync.remote_mongo_util import RemoteMongodbUtil
11
10
  from mns_scheduler.db.script.sync.local_mongo_util import LocalMongodbUtil
12
11
  from loguru import logger
13
- import numpy as np
14
12
 
15
13
  remote_mongodb_util = RemoteMongodbUtil('27017')
16
14
  local_mongodb_util = LocalMongodbUtil('27017')
@@ -33,37 +31,39 @@ def sync_k_line(str_day):
33
31
  try:
34
32
  local_mongodb_util.save_mongo(data_df, db_name)
35
33
  except BaseException as e:
34
+ logger.error("出现异常:{}", e)
36
35
  pass
37
36
 
38
37
 
39
38
  if __name__ == '__main__':
40
39
 
41
- str_day_01 = '2025-02-18'
40
+ str_day_01 = '2025-02-27'
41
+ sync_real_time_data(str_day_01, 1, 2000)
42
42
  sync_k_line(str_day_01)
43
43
  # sync_k_line(str_day_01)
44
44
  # sync_k_line('2024-11-13')
45
45
  # sync_k_line('2024-11-12')
46
46
  # sync_k_line('2024-11-15') 232
47
- sync_real_time_data(str_day_01, 1, 2000)
48
- # sync_real_time_data('2024-11-15', 10, 1010)
49
47
 
50
- sync_k_line('2024-12-20')
51
- sync_k_line('2024-12-19')
52
- sync_k_line('2024-12-18')
53
- sync_k_line('2024-12-17')
54
- sync_k_line('2024-12-16')
55
-
56
- sync_k_line('2024-12-13')
57
- sync_k_line('2024-12-12')
58
- sync_k_line('2024-12-11')
59
- sync_k_line('2024-12-10')
60
- sync_k_line('2024-12-09')
48
+ # sync_real_time_data('2024-11-15', 10, 1010)
61
49
 
62
- sync_k_line('2024-12-06')
63
- sync_k_line('2024-12-05')
64
- sync_k_line('2024-12-04')
65
- sync_k_line('2024-12-03')
66
- sync_k_line('2024-12-02')
50
+ # sync_k_line('2024-12-20')
51
+ # sync_k_line('2024-12-19')
52
+ # sync_k_line('2024-12-18')
53
+ # sync_k_line('2024-12-17')
54
+ # sync_k_line('2024-12-16')
55
+ #
56
+ # sync_k_line('2024-12-13')
57
+ # sync_k_line('2024-12-12')
58
+ # sync_k_line('2024-12-11')
59
+ # sync_k_line('2024-12-10')
60
+ # sync_k_line('2024-12-09')
61
+ #
62
+ # sync_k_line('2024-12-06')
63
+ # sync_k_line('2024-12-05')
64
+ # sync_k_line('2024-12-04')
65
+ # sync_k_line('2024-12-03')
66
+ # sync_k_line('2024-12-02')
67
67
 
68
68
  # sync_real_time_data('2024-12-20', 1000, 2000)
69
69
  # sync_real_time_data('2024-11-13', 10, 1010)
@@ -37,33 +37,32 @@ def sync_k_line(str_day):
37
37
 
38
38
 
39
39
  if __name__ == '__main__':
40
-
41
- str_day_01 = '2025-02-18'
42
- # sync_k_line(str_day_01)
40
+ str_day_01 = '2025-02-27'
41
+ sync_k_line(str_day_01)
43
42
  # sync_k_line(str_day_01)
44
43
  # sync_k_line('2024-11-13')
45
44
  # sync_k_line('2024-11-12')
46
45
  # sync_k_line('2024-11-15') 232
47
- sync_real_time_data(str_day_01, 436, 2000)
46
+ # sync_real_time_data(str_day_01, 1, 2000)
48
47
  # sync_real_time_data('2024-11-15', 10, 1010)
49
-
50
- sync_k_line('2024-12-20')
51
- sync_k_line('2024-12-19')
52
- sync_k_line('2024-12-18')
53
- sync_k_line('2024-12-17')
54
- sync_k_line('2024-12-16')
55
-
56
- sync_k_line('2024-12-13')
57
- sync_k_line('2024-12-12')
58
- sync_k_line('2024-12-11')
59
- sync_k_line('2024-12-10')
60
- sync_k_line('2024-12-09')
61
-
62
- sync_k_line('2024-12-06')
63
- sync_k_line('2024-12-05')
64
- sync_k_line('2024-12-04')
65
- sync_k_line('2024-12-03')
66
- sync_k_line('2024-12-02')
48
+ #
49
+ # sync_k_line('2024-12-20')
50
+ # sync_k_line('2024-12-19')
51
+ # sync_k_line('2024-12-18')
52
+ # sync_k_line('2024-12-17')
53
+ # sync_k_line('2024-12-16')
54
+ #
55
+ # sync_k_line('2024-12-13')
56
+ # sync_k_line('2024-12-12')
57
+ # sync_k_line('2024-12-11')
58
+ # sync_k_line('2024-12-10')
59
+ # sync_k_line('2024-12-09')
60
+ #
61
+ # sync_k_line('2024-12-06')
62
+ # sync_k_line('2024-12-05')
63
+ # sync_k_line('2024-12-04')
64
+ # sync_k_line('2024-12-03')
65
+ # sync_k_line('2024-12-02')
67
66
 
68
67
  # sync_real_time_data('2024-12-20', 1000, 2000)
69
68
  # sync_real_time_data('2024-11-13', 10, 1010)
@@ -0,0 +1,102 @@
1
+ import os
2
+ import sys
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 16
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
8
+
9
+ from mns_common.db.MongodbUtil import MongodbUtil
10
+ from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
11
+ import mns_common.constant.extra_income_db_name as extra_income_db_name
12
+ import mns_common.constant.db_name_constant as db_name_constant
13
+ import akshare as ak
14
+ from loguru import logger
15
+ import mns_scheduler.baidu.baidu_yun_pan_handle_service as baidu_yun_pan_handle_service
16
+ import mns_scheduler.hk.hk_company_info_sync_service_api as hk_company_info_sync_service_api
17
+
18
+ mongodb_util_27017 = MongodbUtil('27017')
19
+ mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
20
+
21
+
22
+ def hk_stock_daily_qfq_sync():
23
+ hk_stock_path = '/港股/qfq/'
24
+ hk_stock_exist_df = baidu_yun_pan_handle_service.get_file_folder(hk_stock_path)
25
+ hk_stock_exist_df['symbol'] = hk_stock_exist_df['name'].astype(str).str[:5]
26
+
27
+ hk_stock_info_df = mongodb_util_27017.find_all_data(db_name_constant.EM_HK_STOCK_INFO)
28
+
29
+
30
+ hk_stock_info_df = hk_stock_info_df.sort_values(by=['amount'], ascending=False)
31
+ del hk_stock_info_df['_id']
32
+ # 上传港股列表
33
+ baidu_yun_pan_handle_service.upload_to_baidu('港股列表', hk_stock_path, hk_stock_info_df)
34
+ hk_ggt_component_df = hk_company_info_sync_service_api.get_hk_ggt_component()
35
+ # 上传港股通列表
36
+ hk_stock_info_ggt_df = hk_stock_info_df.loc[hk_stock_info_df['symbol'].isin(hk_ggt_component_df['symbol'])]
37
+ baidu_yun_pan_handle_service.upload_to_baidu('港股通列表', hk_stock_path, hk_stock_info_ggt_df)
38
+ hk_stock_info_df = hk_stock_info_df.loc[~(hk_stock_info_df['symbol'].isin(hk_stock_exist_df['symbol']))]
39
+ fail_list = []
40
+
41
+ for hk_stock_one in hk_stock_info_df.itertuples():
42
+ symbol = hk_stock_one.symbol
43
+ name = hk_stock_one.name
44
+ try:
45
+ save_one_symbol(symbol, hk_stock_path)
46
+
47
+ except BaseException as e:
48
+ logger.error("同步出现异常:{},{},{}", e, symbol, name)
49
+ fail_list.append(symbol)
50
+ # 处理失败的
51
+ for symbol_fail in fail_list:
52
+ try:
53
+ save_one_symbol(symbol_fail, hk_stock_path)
54
+ except BaseException as e:
55
+ logger.error("同步出现异常:{},{},{}", e, symbol, name)
56
+
57
+
58
+ def save_one_symbol(symbol, hk_stock_path):
59
+ hk_stock_k_line_df = hk_stock_k_line_api(symbol, k_line_period='daily', start_date='18000101',
60
+ end_date='22220101', fq='qfq')
61
+
62
+ hk_stock_k_line_df["date"] = hk_stock_k_line_df["date"].astype(str)
63
+
64
+ hk_stock_k_line_df['_id'] = hk_stock_k_line_df['date'] + '_' + symbol
65
+ hk_stock_k_line_df['symbol'] = symbol
66
+
67
+ query = {'symbol': symbol}
68
+ if mongodbUtilV2_27019.remove_data(query, extra_income_db_name.HK_STOCK_DAILY_QFQ_K_LINE).acknowledged > 0:
69
+ mongodbUtilV2_27019.insert_mongo(hk_stock_k_line_df, extra_income_db_name.HK_STOCK_DAILY_QFQ_K_LINE)
70
+ del hk_stock_k_line_df['_id']
71
+ del hk_stock_k_line_df['symbol']
72
+ # 上传列表
73
+ baidu_yun_pan_handle_service.upload_to_baidu(symbol, hk_stock_path, hk_stock_k_line_df)
74
+
75
+
76
+ def hk_stock_k_line_api(symbol='00001', k_line_period='daily', start_date='18000101',
77
+ end_date='22220101', fq='qfq'):
78
+ stock_hk_hist_df = ak.stock_hk_hist(symbol=symbol,
79
+ period=k_line_period,
80
+ start_date=start_date,
81
+ end_date=end_date,
82
+ adjust=fq)
83
+ stock_hk_hist_df = stock_hk_hist_df.rename(columns={
84
+ "日期": "date",
85
+ "涨跌额": "change_price",
86
+ "涨跌幅": "chg",
87
+ "开盘": "open",
88
+ "最高": "high",
89
+ "最低": "low",
90
+ "收盘": "close",
91
+ "成交量": "volume",
92
+ "成交额": "amount",
93
+ "振幅": "pct_chg",
94
+ "换手率": "exchange"
95
+ })
96
+
97
+ return stock_hk_hist_df
98
+
99
+
100
+ if __name__ == '__main__':
101
+ # us_stock_k_line_api()
102
+ hk_stock_daily_qfq_sync()
@@ -0,0 +1,7 @@
1
+ import sys
2
+ import os
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 16
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
@@ -0,0 +1,95 @@
1
+ import os
2
+ import sys
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 16
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
8
+
9
+ from mns_common.db.MongodbUtil import MongodbUtil
10
+ from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
11
+ import mns_common.constant.extra_income_db_name as extra_income_db_name
12
+ import mns_common.constant.db_name_constant as db_name_constant
13
+ import akshare as ak
14
+ from loguru import logger
15
+ import mns_scheduler.baidu.baidu_yun_pan_handle_service as baidu_yun_pan_handle_service
16
+
17
+ mongodb_util_27017 = MongodbUtil('27017')
18
+ mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
19
+
20
+
21
+ def us_stock_daily_qfq_sync():
22
+ us_stock_path = '/美股/qfq/'
23
+ em_us_stock_info_df = mongodb_util_27017.find_all_data(db_name_constant.EM_US_STOCK_INFO)
24
+ # 将列 A 转为字符串,并提取前三位
25
+ em_us_stock_info_df["simple_symbol"] = em_us_stock_info_df["simple_symbol"].astype(str).str[:3]
26
+ em_us_stock_info_df = em_us_stock_info_df.sort_values(by=['amount'], ascending=False)
27
+ # 上传列表
28
+ baidu_yun_pan_handle_service.upload_to_baidu('美股列表', us_stock_path, em_us_stock_info_df)
29
+ fail_list = []
30
+ for us_stock_one in em_us_stock_info_df.itertuples():
31
+ symbol = us_stock_one.symbol
32
+ name = us_stock_one.name
33
+ try:
34
+ save_one_symbol(us_stock_one, us_stock_path, symbol)
35
+ except BaseException as e:
36
+ logger.error("同步出现异常:{},{},{}", e, symbol, name)
37
+ fail_list.append(symbol)
38
+ fail_stock_df = em_us_stock_info_df.loc[em_us_stock_info_df['symbol'].isin(fail_list)]
39
+
40
+ for us_stock_fail_one in fail_stock_df.itertuples():
41
+ symbol = us_stock_fail_one.symbol
42
+ name = us_stock_fail_one.name
43
+ try:
44
+ save_one_symbol(us_stock_fail_one, us_stock_path, symbol)
45
+ except BaseException as e:
46
+ logger.error("同步出现异常:{},{},{}", e, symbol, name)
47
+ fail_list.append(symbol)
48
+
49
+
50
+ def save_one_symbol(us_stock_one, us_stock_path, symbol):
51
+ simple_symbol = us_stock_one.simple_symbol
52
+ code = simple_symbol + '.' + symbol
53
+
54
+ us_stock_k_line_df = us_stock_k_line_api(code, k_line_period='daily', start_date='18000101',
55
+ end_date='22220101', fq='qfq')
56
+ us_stock_k_line_df['_id'] = us_stock_k_line_df['date'] + '_' + symbol
57
+ us_stock_k_line_df['symbol'] = symbol
58
+
59
+ query = {'symbol': symbol}
60
+ if mongodbUtilV2_27019.remove_data(query, extra_income_db_name.US_STOCK_DAILY_QFQ_K_LINE).acknowledged > 0:
61
+ mongodbUtilV2_27019.insert_mongo(us_stock_k_line_df, extra_income_db_name.US_STOCK_DAILY_QFQ_K_LINE)
62
+ del us_stock_k_line_df['_id']
63
+ del us_stock_k_line_df['symbol']
64
+
65
+ # 上传列表
66
+ baidu_yun_pan_handle_service.upload_to_baidu(symbol, us_stock_path, us_stock_k_line_df)
67
+
68
+
69
+ def us_stock_k_line_api(symbol='106.GE', k_line_period='daily', start_date='18000101',
70
+ end_date='22220101', fq='hfq'):
71
+ stock_us_hist_df = ak.stock_us_hist(symbol=symbol,
72
+ period=k_line_period,
73
+ start_date=start_date,
74
+ end_date=end_date,
75
+ adjust=fq)
76
+ stock_us_hist_df = stock_us_hist_df.rename(columns={
77
+ "日期": "date",
78
+ "涨跌额": "change_price",
79
+ "涨跌幅": "chg",
80
+ "开盘": "open",
81
+ "最高": "high",
82
+ "最低": "low",
83
+ "收盘": "close",
84
+ "成交量": "volume",
85
+ "成交额": "amount",
86
+ "振幅": "pct_chg",
87
+ "换手率": "exchange"
88
+ })
89
+
90
+ return stock_us_hist_df
91
+
92
+
93
+ if __name__ == '__main__':
94
+ # us_stock_k_line_api()
95
+ us_stock_daily_qfq_sync()
@@ -99,4 +99,5 @@ def sync_hk_company_info():
99
99
 
100
100
 
101
101
  if __name__ == '__main__':
102
+ get_hk_ggt_component()
102
103
  sync_hk_company_info()
@@ -63,6 +63,9 @@ import mns_scheduler.industry.ths.ths_industry_sync_service as ths_industry_sync
63
63
  import mns_scheduler.k_line.year_quarter.year_quarter_line_sync as year_quarter_line_sync
64
64
  import mns_common.component.task.real_time_data_sync_check as real_time_data_sync_check
65
65
  import mns_scheduler.extraIncome.one_minute.one_minute_sync_task as one_minute_sync_task
66
+ import mns_scheduler.extraIncome.hk.hk_stock_qfq_daily_k_line as hk_stock_qfq_daily_k_line
67
+ import mns_scheduler.extraIncome.us.us_stock_qfq_daily_k_line as us_stock_qfq_daily_k_line
68
+ import mns_scheduler.extraIncome.one_minute.upload.upload_to_baidu_task as upload_to_baidu_task
66
69
 
67
70
 
68
71
  # 同步交易日期任务完成
@@ -443,12 +446,30 @@ def sync_all_em_stock_info():
443
446
  sync_em_stock_info_sync.sync_all_em_stock_info()
444
447
 
445
448
 
449
+ # 同步外快收入数据
450
+ def sync_extra_income_data():
451
+ # 上传一分钟数据到百度网盘
452
+ upload_to_baidu_task.upload_stock_to_baidu()
453
+ # hk前复权k线数据
454
+ hk_stock_qfq_daily_k_line.hk_stock_daily_qfq_sync()
455
+ # us前复权k线数据
456
+ us_stock_qfq_daily_k_line.us_stock_daily_qfq_sync()
457
+
458
+
446
459
  # # 定义BlockingScheduler
447
460
  blockingScheduler = BlockingScheduler()
448
461
 
449
462
  # 同步东方财富a,etf,kzz,us,hk信息
450
463
  blockingScheduler.add_job(sync_all_em_stock_info, 'cron', hour='07', minute='31')
451
464
  blockingScheduler.add_job(sync_all_em_stock_info, 'cron', hour='15', minute='20')
465
+ # 同步外快收入数据
466
+ blockingScheduler.add_job(
467
+ sync_extra_income_data,
468
+ 'cron',
469
+ day_of_week='sat,sun', # 周六和周日
470
+ hour='07,18', # 16点和21点
471
+ minute='37' # 37分
472
+ )
452
473
 
453
474
  # 开盘前同步当天交易需要的k线数据
454
475
  blockingScheduler.add_job(sync_today_trade_k_line_info, 'cron', hour='07', minute='50')
@@ -1,4 +1,4 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mns-scheduler
3
- Version: 1.3.0.3
3
+ Version: 1.3.0.7
4
4
 
@@ -45,8 +45,8 @@ mns_scheduler/db/script/sync/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb9
45
45
  mns_scheduler/db/script/sync/local_mongo_util.py,sha256=MFG-S7aUBLKe4tkhKNklUzpiZef2n078YXd39dfOMy0,7540
46
46
  mns_scheduler/db/script/sync/remote_data_sync_to_local.py,sha256=gLJORdtt0YospxUwNIS5gusV9NtV7xmP1dm-M08neZ4,2789
47
47
  mns_scheduler/db/script/sync/remote_mongo_util.py,sha256=-BCR2zeQ9z0zeZg6wO0aCS4bGnsGIohFRH7QR8XXJSo,10966
48
- mns_scheduler/db/script/sync/sync_hui_ce_test_data.py,sha256=p0NrTWGsOVfecTBuSW96LKa_3eKevN_bll7RA1PuWcM,2090
49
- mns_scheduler/db/script/sync/sync_hui_ce_test_data_01.py,sha256=cQIxTpfi-wWVpY8aMQrcvig2KG_VykUHBeEmxYIx5y4,2094
48
+ mns_scheduler/db/script/sync/sync_hui_ce_test_data.py,sha256=e4n9kjY-eonjRS1yU5n9D7B2tOP846dNTyvvtLP_zpA,2096
49
+ mns_scheduler/db/script/sync/sync_hui_ce_test_data_01.py,sha256=ywJxst91X2hvE5lvtco_18BpXoP7gKndTEvZ1o4B0jk,2135
50
50
  mns_scheduler/db/script/update/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
51
51
  mns_scheduler/db/script/update/update_col_field.py,sha256=2XeiKodVgR19IgJKMXZmM7MOLgeUyli15qo58gtYHY8,1274
52
52
  mns_scheduler/debt/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
@@ -54,6 +54,8 @@ mns_scheduler/debt/kzz_bond_info_sync.py,sha256=Y4K-cYl3g8IeP_coB-3OoGsvgurcsvg_
54
54
  mns_scheduler/dt/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
55
55
  mns_scheduler/dt/stock_dt_pool_sync.py,sha256=5ivRUOnFtOapZniwTbujf1lVq3y4btm2Cmd5R6JJAVo,3466
56
56
  mns_scheduler/extraIncome/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
57
+ mns_scheduler/extraIncome/hk/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
58
+ mns_scheduler/extraIncome/hk/hk_stock_qfq_daily_k_line.py,sha256=myy7X09gwyPqKp8Dv_nbITok-oKUXFA5USBJSwDyCTY,4236
57
59
  mns_scheduler/extraIncome/one_minute/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
58
60
  mns_scheduler/extraIncome/one_minute/one_minute_sync_task.py,sha256=qw09DyhYRqWO4U48T3CiIikhCBTOEZ7Y1nkAoBKWgKw,1557
59
61
  mns_scheduler/extraIncome/one_minute/common/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
@@ -69,18 +71,15 @@ mns_scheduler/extraIncome/one_minute/stock/__init__.py,sha256=wEg73KlZo-dU0yKGwp
69
71
  mns_scheduler/extraIncome/one_minute/stock/stock_one_minute_sync_task.py,sha256=iB3RWD-Pp9e0an46xoPbn-YkgIlVFxZwQYyQ0Cfad7E,4576
70
72
  mns_scheduler/extraIncome/one_minute/upload/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
71
73
  mns_scheduler/extraIncome/one_minute/upload/upload_to_baidu_task.py,sha256=_AjUYUSqnnq8IST0C0e1cjREBLoI4OWRI-MnPN3avr4,3772
72
- mns_scheduler/extraIncome/temp/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
73
- mns_scheduler/extraIncome/temp/tu_share_data_etf_sync.py,sha256=XBsLl4o1Ras1zUQBkJ2BAgWtPebqAf9VUu_kLEisGmQ,4464
74
- mns_scheduler/extraIncome/temp/tu_share_data_kzz_sync.py,sha256=A2Aa4TB2mgTHiDlW9_UpB0mdRCR_1sOTaPZKs-IBbXc,4850
75
- mns_scheduler/extraIncome/temp/tu_share_data_stock_sync.py,sha256=XOEYxjq0zvhUi9EE9T9LCIlv2B8EXVOrj2h35stNLQM,6232
76
- mns_scheduler/extraIncome/temp/tu_share_zhi_shu_sync_api.py,sha256=sAvfsIObHWsapgJP8o2YTL4D1XZiWa2tGguM6B6bgyQ,4169
74
+ mns_scheduler/extraIncome/us/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
75
+ mns_scheduler/extraIncome/us/us_stock_qfq_daily_k_line.py,sha256=6X0w1rYA93PNF_4uYyPi9Uqf1moxte54VB71FqADqQ4,3926
77
76
  mns_scheduler/finance/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
78
77
  mns_scheduler/finance/em_financial_asset_liability_sync_service_api.py,sha256=kEZQZkxB7RF7UPH4DmHoRWfEKgI61ZN8BcNOzmBUoV0,19417
79
78
  mns_scheduler/finance/em_financial_profit_sync_service_api.py,sha256=BMOS0UIUTdDlbsKJlqEJkei1Uhz_PF2n3xgUvzZHlac,14514
80
79
  mns_scheduler/finance/finance_common_api.py,sha256=CUC_6GPpf_sLn1ZrT476qNBzxgJWDaxIi8LxdNaKFkE,2453
81
80
  mns_scheduler/finance/sync_financial_report_service_api.py,sha256=7ttWeBJOhA76CIz-K0K0jxN5z2-0-_c6-AWEFGFyMIQ,6632
82
81
  mns_scheduler/hk/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
83
- mns_scheduler/hk/hk_company_info_sync_service_api.py,sha256=vIxsl41rJPVvPVw2BTE7b0VZHFiuBecc3i3Gn0TKrFs,3696
82
+ mns_scheduler/hk/hk_company_info_sync_service_api.py,sha256=z8dFxIiGc9aILs-8WKducch_uXKkLS6mBuPWt2zzNyg,3724
84
83
  mns_scheduler/hk/hk_industry_info_sync_service_api.py,sha256=Pf2uRGqjhibsBoOFJlUqj2WxujmIPysTAVOeffj3iWM,2275
85
84
  mns_scheduler/industry/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
86
85
  mns_scheduler/industry/ths/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
@@ -180,8 +179,8 @@ mns_scheduler/zt/zt_pool/em_zt_pool_sync_api.py,sha256=CAB1XCclO92dRd4K0ZGCNggB3
180
179
  mns_scheduler/zt/zt_pool/ths_zt_pool_sync_api.py,sha256=u0IvwPuI2hnjTOrwwe8EhBAMv8NbQTENpRbb-_5lDlM,9803
181
180
  mns_scheduler/zt/zt_pool/update_null_zt_reason_api.py,sha256=1uoiR2Uw46kDfjkvNg2US5rd_4OIkYO3872gIJOufUY,2135
182
181
  mns_scheduler/zz_task/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
183
- mns_scheduler/zz_task/data_sync_task.py,sha256=ls25q6bINT5jaBukjM8A4EllBoHa8_37j3zadCXOpgE,23490
184
- mns_scheduler-1.3.0.3.dist-info/METADATA,sha256=UYtPRiAgduV8TYS3H8hawsO81t9HliWmRltkSAWhalQ,64
185
- mns_scheduler-1.3.0.3.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
186
- mns_scheduler-1.3.0.3.dist-info/top_level.txt,sha256=PXQDFBGR1pWmsUbH5yiLAh71P5HZODTRED0zJ8CCgOc,14
187
- mns_scheduler-1.3.0.3.dist-info/RECORD,,
182
+ mns_scheduler/zz_task/data_sync_task.py,sha256=sLFnSRh3NYuMHQsSupnRGHZ2Dda-Veh-RQp0jSlDrXU,24310
183
+ mns_scheduler-1.3.0.7.dist-info/METADATA,sha256=mvw79fFfBZr_wnZJaCc-RFKh_Q_etn0sbtjG-xpCGUM,64
184
+ mns_scheduler-1.3.0.7.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
185
+ mns_scheduler-1.3.0.7.dist-info/top_level.txt,sha256=PXQDFBGR1pWmsUbH5yiLAh71P5HZODTRED0zJ8CCgOc,14
186
+ mns_scheduler-1.3.0.7.dist-info/RECORD,,
@@ -1,103 +0,0 @@
1
- import sys
2
- import os
3
-
4
- file_path = os.path.abspath(__file__)
5
- end = file_path.index('mns') + 16
6
- project_path = file_path[0:end]
7
- sys.path.append(project_path)
8
- import mns_common.api.em.east_money_etf_api as east_money_etf_api
9
- import pandas as pd
10
- from loguru import logger
11
- import time
12
- import mns_common.utils.data_frame_util as data_frame_util
13
- from mns_common.db.MongodbUtil import MongodbUtil
14
- import tushare as ts
15
- from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
16
- import mns_scheduler.extraIncome.one_minute.common.db_create_index as db_create_index
17
- import mns_common.constant.extra_income_db_name as extra_income_db_name
18
-
19
- mongodb_util_27017 = MongodbUtil('27017')
20
- mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
21
- pro = ts.pro_api('782213d20640249f1dbae50a7f56b22684b8e915a61e435e015579a1')
22
-
23
-
24
- def get_minute_data(symbol, freq, start_date, end_date):
25
- # 获取浦发银行60000.SH的历史分钟数据
26
- df = pro.stk_mins(ts_code=symbol, freq=freq, start_date=start_date, end_date=end_date)
27
- return df
28
-
29
-
30
- def sync_etf_one_minute(trade_date_list_df):
31
- etf_real_time_quotes_df = east_money_etf_api.get_etf_real_time_quotes()
32
- etf_real_time_quotes_df = classify_symbol(etf_real_time_quotes_df)
33
- etf_real_time_quotes_df['symbol'] = etf_real_time_quotes_df.apply(
34
- lambda row: row['symbol'] + '.SZ' if row['classification'] in ['S', 'C']
35
- else row['symbol'] + '.BJ' if row['classification'] in ['X']
36
- else row['symbol'] + '.SH',
37
- axis=1
38
- )
39
-
40
- db_name = extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_ETF
41
- db_create_index.create_index(mongodbUtilV2_27019, db_name)
42
- for stock_one in etf_real_time_quotes_df.itertuples():
43
- trade_date_list_df_copy = trade_date_list_df.copy()
44
- symbol = stock_one.symbol
45
- for i in range(0, len(trade_date_list_df_copy), 28):
46
- try:
47
- new_df = trade_date_list_df_copy.iloc[i:i + 28]
48
- first_df = new_df.iloc[0] # 第一个 DataFrame
49
- last_df = new_df.iloc[-1] # 最后一个 DataFrame
50
- begin_date = first_df.trade_date + ' 09:20:00'
51
- end_date = last_df.trade_date + ' 15:00:00'
52
-
53
- df = get_minute_data(symbol, '1min', begin_date, end_date)
54
- if data_frame_util.is_not_empty(df):
55
- df = df.rename(columns={
56
- "trade_time": "time",
57
- "ts_code": "symbol",
58
- "vol": "volume",
59
- })
60
- df['time_tick'] = df['time'].str[11:19]
61
- df = df.loc[df['time_tick'] <= '15:00:00']
62
- del df['time_tick']
63
- df['_id'] = df['symbol'] + '_' + df['time']
64
- mongodbUtilV2_27019.insert_mongo(df, db_name)
65
- except BaseException as e:
66
- time.sleep(2)
67
- first_df = new_df.iloc[0] # 第一个 DataFrame
68
- last_df = new_df.iloc[-1] # 最后一个 DataFrame
69
- begin_date = first_df.trade_date + ' 09:20:00'
70
- end_date = last_df.trade_date + ' 15:00:00'
71
- fail_dict = {'begin_date': begin_date,
72
- 'end_date': end_date,
73
- 'symbol': symbol,
74
- 'db_name': db_name
75
- }
76
- fail_df = pd.DataFrame(fail_dict, index=[1])
77
- mongodbUtilV2_27019.insert_mongo(fail_df, db_name + '_fail')
78
-
79
- logger.error("同步数据出现异常:{},{},{},{}", e, symbol, begin_date, end_date)
80
- logger.info("同步完数据:{},{}", stock_one.symbol, stock_one.name)
81
-
82
- return etf_real_time_quotes_df
83
-
84
-
85
- def classify_symbol(debt_real_time_quotes_df):
86
- debt_real_time_quotes_df['classification'] = debt_real_time_quotes_df['market'].apply(
87
- lambda market: classify_symbol_one(market))
88
- return debt_real_time_quotes_df
89
-
90
-
91
- # 单个股票分类
92
- def classify_symbol_one(market):
93
- if market == 0:
94
- return 'S'
95
- else:
96
- return 'H'
97
-
98
-
99
- if __name__ == '__main__':
100
- query_trade = {"$and": [{"trade_date": {"$gte": "2025-03-08"}}, {"trade_date": {"$lte": "2025-03-16"}}]}
101
- trade_date_list_df_all = mongodb_util_27017.find_query_data('trade_date_list', query_trade)
102
-
103
- sync_etf_one_minute(trade_date_list_df_all)
@@ -1,113 +0,0 @@
1
- import sys
2
- import os
3
-
4
- file_path = os.path.abspath(__file__)
5
- end = file_path.index('mns') + 16
6
- project_path = file_path[0:end]
7
- sys.path.append(project_path)
8
-
9
- import mns_common.api.em.east_money_debt_api as east_money_debt_api
10
- import pandas as pd
11
- from loguru import logger
12
- import time
13
- import mns_common.utils.data_frame_util as data_frame_util
14
- from mns_common.db.MongodbUtil import MongodbUtil
15
- import tushare as ts
16
- from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
17
- import mns_scheduler.extraIncome.one_minute.common.db_create_index as db_create_index
18
- import mns_common.constant.extra_income_db_name as extra_income_db_name
19
- mongodb_util_27017 = MongodbUtil('27017')
20
- mongodbUtilV2_27019 = MongodbUtilV2('27019', extra_income_db_name.EXTRA_INCOME)
21
- pro = ts.pro_api('782213d20640249f1dbae50a7f56b22684b8e915a61e435e015579a1')
22
-
23
-
24
- def get_minute_data(symbol, freq, start_date, end_date):
25
- # 获取浦发银行60000.SH的历史分钟数据
26
- df = pro.stk_mins(ts_code=symbol, freq=freq, start_date=start_date, end_date=end_date)
27
- return df
28
-
29
-
30
- def sync_kzz_one_minute(trade_date_list_df):
31
- debt_real_time_quotes = east_money_debt_api.get_debt_real_time_quotes()
32
- debt_real_time_quotes = classify_symbol(debt_real_time_quotes)
33
- debt_real_time_quotes['symbol'] = debt_real_time_quotes.apply(
34
- lambda row: row['symbol'] + '.SZ' if row['classification'] in ['S', 'C']
35
- else row['symbol'] + '.BJ' if row['classification'] in ['X']
36
- else row['symbol'] + '.SH',
37
- axis=1
38
- )
39
- # 假设数字格式为 YYYYMMDD
40
- # debt_real_time_quotes['list_date'] = pd.to_datetime(debt_real_time_quotes['list_date'],
41
- # format='%Y%m%d').dt.strftime('%Y-%m-%d')
42
-
43
- db_name = extra_income_db_name.ONE_MINUTE_K_LINE_BFQ_KZZ
44
- db_create_index.create_index(mongodbUtilV2_27019, db_name)
45
-
46
- for stock_one in debt_real_time_quotes.itertuples():
47
-
48
- trade_date_list_df_copy = trade_date_list_df.copy()
49
-
50
- # list_date = stock_one.list_date
51
- # trade_date_list_df_copy = trade_date_list_df_copy.loc[trade_date_list_df_copy['_id'] >= list_date]
52
-
53
- symbol = stock_one.symbol
54
-
55
- for i in range(0, len(trade_date_list_df_copy), 28):
56
- try:
57
- new_df = trade_date_list_df_copy.iloc[i:i + 28]
58
- first_df = new_df.iloc[0] # 第一个 DataFrame
59
- last_df = new_df.iloc[-1] # 最后一个 DataFrame
60
- begin_date = first_df.trade_date + ' 09:20:00'
61
- end_date = last_df.trade_date + ' 15:00:00'
62
-
63
- df = get_minute_data(symbol, '1min', begin_date, end_date)
64
- if data_frame_util.is_not_empty(df):
65
- df = df.rename(columns={
66
- "trade_time": "time",
67
- "ts_code": "symbol",
68
- "vol": "volume",
69
- })
70
- df['time_tick'] = df['time'].str[11:19]
71
- df = df.loc[df['time_tick'] <= '15:00:00']
72
- del df['time_tick']
73
- df['_id'] = df['symbol'] + '_' + df['time']
74
- mongodbUtilV2_27019.insert_mongo(df, db_name)
75
- except BaseException as e:
76
- time.sleep(2)
77
- first_df = new_df.iloc[0] # 第一个 DataFrame
78
- last_df = new_df.iloc[-1] # 最后一个 DataFrame
79
- begin_date = first_df.trade_date + ' 09:20:00'
80
- end_date = last_df.trade_date + ' 15:00:00'
81
- fail_dict = {'begin_date': begin_date,
82
- 'end_date': end_date,
83
- 'symbol': symbol,
84
- 'db_name': db_name
85
- }
86
- fail_df = pd.DataFrame(fail_dict, index=[1])
87
- mongodbUtilV2_27019.insert_mongo(fail_df, db_name + '_fail')
88
-
89
- logger.error("同步数据出现异常:{},{},{},{}", e, symbol, begin_date, end_date)
90
- logger.info("同步完数据:{},{}", stock_one.symbol, stock_one.name)
91
-
92
- return debt_real_time_quotes
93
-
94
-
95
- def classify_symbol(debt_real_time_quotes_df):
96
- debt_real_time_quotes_df['classification'] = debt_real_time_quotes_df['market'].apply(
97
- lambda market: classify_symbol_one(market))
98
- return debt_real_time_quotes_df
99
-
100
-
101
- # 单个股票分类
102
- def classify_symbol_one(market):
103
- if market == 0:
104
- return 'S'
105
- else:
106
- return 'H'
107
-
108
-
109
- if __name__ == '__main__':
110
- query_trade = {"$and": [{"trade_date": {"$gte": "2025-03-08"}}, {"trade_date": {"$lte": "2025-03-16"}}]}
111
- trade_date_list_df_all = mongodb_util_27017.find_query_data('trade_date_list', query_trade)
112
-
113
- sync_kzz_one_minute(trade_date_list_df_all)
@@ -1,137 +0,0 @@
1
- import sys
2
- import os
3
-
4
- file_path = os.path.abspath(__file__)
5
- end = file_path.index('mns') + 16
6
- project_path = file_path[0:end]
7
- sys.path.append(project_path)
8
-
9
- import tushare as ts
10
- from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
11
- from mns_common.db.MongodbUtil import MongodbUtil
12
- import mns_common.component.common_service_fun_api as common_service_fun_api
13
-
14
- import pandas as pd
15
- from loguru import logger
16
- import mns_common.utils.data_frame_util as data_frame_util
17
- import mns_common.component.em.em_stock_info_api as em_stock_info_api
18
- import time
19
-
20
- mongodb_util_27017 = MongodbUtil('27017')
21
- mongodbUtilV2 = MongodbUtilV2('27019', 'extraIncome')
22
-
23
- pro = ts.pro_api('782213d20640249f1dbae50a7f56b22684b8e915a61e435e015579a1')
24
-
25
-
26
- def get_minute_data(symbol, freq, start_date, end_date):
27
- # 获取浦发银行60000.SH的历史分钟数据
28
- df = pro.stk_mins(ts_code=symbol, freq=freq, start_date=start_date, end_date=end_date)
29
- return df
30
-
31
-
32
- def sync_all_stock(trade_date_list_df):
33
- de_list_stock_df = mongodb_util_27017.find_all_data('de_list_stock')
34
- de_list_stock_df = common_service_fun_api.classify_symbol(de_list_stock_df)
35
- de_list_stock_df = de_list_stock_df.loc[
36
- de_list_stock_df['classification'].isin(['K', 'C', 'S', 'H', 'X'])]
37
- # 对 classification 为 S 或 K 的数据,symbol 列加上 '.SH',其他加上 '.SZ'
38
-
39
- de_list_stock_df['symbol'] = de_list_stock_df.apply(
40
- lambda row: row['symbol'] + '.SZ' if row['classification'] in ['S', 'C']
41
- else row['symbol'] + '.BJ' if row['classification'] in ['X']
42
- else row['symbol'] + '.SH',
43
- axis=1
44
- )
45
-
46
- real_time_quotes_all_stocks_df = em_stock_info_api.get_a_stock_info()
47
-
48
- # 假设数字格式为 YYYYMMDD
49
- real_time_quotes_all_stocks_df['list_date'] = pd.to_datetime(real_time_quotes_all_stocks_df['list_date'],
50
- format='%Y%m%d')
51
-
52
- # 将日期格式化为字符串(YYYY-MM-DD)
53
- real_time_quotes_all_stocks_df['list_date'] = real_time_quotes_all_stocks_df['list_date'].dt.strftime('%Y-%m-%d')
54
- real_time_quotes_all_stocks_df = common_service_fun_api.classify_symbol(real_time_quotes_all_stocks_df)
55
-
56
- # 对 classification 为 S 或 K 的数据,symbol 列加上 '.SH',其他加上 '.SZ'
57
- real_time_quotes_all_stocks_df['symbol'] = real_time_quotes_all_stocks_df.apply(
58
- lambda row: row['symbol'] + '.SZ' if row['classification'] in ['S', 'C']
59
- else row['symbol'] + '.BJ' if row['classification'] in ['X']
60
- else row['symbol'] + '.SH',
61
- axis=1
62
- )
63
-
64
- real_time_quotes_all_stocks_df['number'] = real_time_quotes_all_stocks_df['chg'].rank(method='first').astype(int)
65
-
66
- for stock_one in real_time_quotes_all_stocks_df.itertuples():
67
- classification = stock_one.classification
68
- if classification == 'X':
69
- db_name = 'one_minute_k_line_bfq_bj'
70
- elif classification == 'S':
71
- db_name = 'one_minute_k_line_bfq_s'
72
-
73
- elif classification == 'H':
74
- db_name = 'one_minute_k_line_bfq_h'
75
- elif classification == 'K':
76
- db_name = 'one_minute_k_line_bfq_k'
77
- elif classification == 'C':
78
- db_name = 'one_minute_k_line_bfq_c'
79
-
80
- trade_date_list_df_copy = trade_date_list_df.copy()
81
-
82
- list_date = stock_one.list_date
83
- trade_date_list_df_copy = trade_date_list_df_copy.loc[trade_date_list_df_copy['_id'] >= list_date]
84
-
85
- symbol = stock_one.symbol
86
-
87
- de_list_stock_df_one = de_list_stock_df.loc[de_list_stock_df['symbol'] == symbol]
88
-
89
- if data_frame_util.is_not_empty(de_list_stock_df_one):
90
- de_list_date = list(de_list_stock_df_one['de_list_date'])[0]
91
- trade_date_list_df_copy = trade_date_list_df_copy.loc[trade_date_list_df_copy['_id'] <= de_list_date]
92
-
93
- for i in range(0, len(trade_date_list_df_copy), 28):
94
- try:
95
- new_df = trade_date_list_df_copy.iloc[i:i + 28]
96
- first_df = new_df.iloc[0] # 第一个 DataFrame
97
- last_df = new_df.iloc[-1] # 最后一个 DataFrame
98
- begin_date = first_df.trade_date + ' 09:20:00'
99
- end_date = last_df.trade_date + ' 15:00:00'
100
-
101
- df = get_minute_data(symbol, '1min', begin_date, end_date)
102
- if data_frame_util.is_not_empty(df):
103
- df = df.rename(columns={
104
- "trade_time": "time",
105
- "ts_code": "symbol",
106
- "vol": "volume",
107
- })
108
- df['time_tick'] = df['time'].str[11:19]
109
- df = df.loc[df['time_tick'] <= '15:00:00']
110
- del df['time_tick']
111
- df['_id'] = df['symbol'] + '_' + df['time']
112
- mongodbUtilV2.insert_mongo(df, db_name)
113
- except BaseException as e:
114
- time.sleep(2)
115
- first_df = new_df.iloc[0] # 第一个 DataFrame
116
- last_df = new_df.iloc[-1] # 最后一个 DataFrame
117
- begin_date = first_df.trade_date + ' 09:20:00'
118
- end_date = last_df.trade_date + ' 15:00:00'
119
- fail_dict = {'begin_date': begin_date,
120
- 'end_date': end_date,
121
- 'symbol': symbol,
122
- 'db_name': db_name
123
- }
124
- fail_df = pd.DataFrame(fail_dict, index=[1])
125
- mongodbUtilV2.insert_mongo(fail_df, 'one_minute_k_line_bfq_fail')
126
-
127
- logger.error("同步数据出现异常:{},{},{},{}", e, symbol, begin_date, end_date)
128
- logger.info("同步完数据:{},{}", stock_one.symbol, stock_one.name)
129
-
130
-
131
- if __name__ == '__main__':
132
- # get_minute_data('833284.BJ', '1min', '2025-02-28 09:30:00', '2025-02-28 15:00:00')
133
-
134
- query_trade = {"$and": [{"trade_date": {"$gte": "2025-03-08"}}, {"trade_date": {"$lte": "2025-03-18"}}]}
135
- trade_date_list_df_all = mongodb_util_27017.find_query_data('trade_date_list', query_trade)
136
-
137
- sync_all_stock(trade_date_list_df_all)
@@ -1,107 +0,0 @@
1
- import sys
2
- import os
3
-
4
- file_path = os.path.abspath(__file__)
5
- end = file_path.index('mns') + 16
6
- project_path = file_path[0:end]
7
- sys.path.append(project_path)
8
-
9
- import sys
10
- import os
11
-
12
- file_path = os.path.abspath(__file__)
13
- end = file_path.index('mns') + 16
14
- project_path = file_path[0:end]
15
- sys.path.append(project_path)
16
- import pandas as pd
17
- from loguru import logger
18
- import time
19
- import mns_common.utils.data_frame_util as data_frame_util
20
- from mns_common.db.MongodbUtil import MongodbUtil
21
- import tushare as ts
22
- from mns_common.db.v2.MongodbUtilV2 import MongodbUtilV2
23
- import mns_scheduler.extraIncome.one_minute.common.db_create_index as db_create_index
24
-
25
- mongodb_util_27017 = MongodbUtil('27017')
26
- mongodbUtilV2_27019 = MongodbUtilV2('27019', 'extraIncome')
27
-
28
- pro = ts.pro_api('782213d20640249f1dbae50a7f56b22684b8e915a61e435e015579a1')
29
-
30
-
31
- def get_minute_data(symbol, freq, start_date, end_date):
32
- # 获取浦发银行60000.SH的历史分钟数据
33
- df = pro.stk_mins(ts_code=symbol, freq=freq, start_date=start_date, end_date=end_date)
34
- return df
35
-
36
-
37
- def sync_zhi_shu_one_minute(symbol_list_param, trade_date_list_df):
38
- db_name = 'one_minute_k_line_bfq_main_index'
39
- db_create_index.create_index(mongodbUtilV2_27019, db_name)
40
- for symbol in symbol_list_param:
41
-
42
- trade_date_list_df_copy = trade_date_list_df.copy()
43
-
44
- for i in range(0, len(trade_date_list_df_copy), 28):
45
- try:
46
- new_df = trade_date_list_df_copy.iloc[i:i + 28]
47
- first_df = new_df.iloc[0] # 第一个 DataFrame
48
- last_df = new_df.iloc[-1] # 最后一个 DataFrame
49
- begin_date = first_df.trade_date + ' 09:20:00'
50
- end_date = last_df.trade_date + ' 15:00:00'
51
-
52
- df = get_minute_data(symbol, '1min', begin_date, end_date)
53
- if data_frame_util.is_not_empty(df):
54
- df = df.rename(columns={
55
- "trade_time": "time",
56
- "ts_code": "symbol",
57
- "vol": "volume",
58
- })
59
- df['time_tick'] = df['time'].str[11:19]
60
- df = df.loc[df['time_tick'] <= '15:00:00']
61
- del df['time_tick']
62
- df['_id'] = df['symbol'] + '_' + df['time']
63
- mongodbUtilV2_27019.insert_mongo(df, db_name)
64
- except BaseException as e:
65
- time.sleep(2)
66
- first_df = new_df.iloc[0] # 第一个 DataFrame
67
- last_df = new_df.iloc[-1] # 最后一个 DataFrame
68
- begin_date = first_df.trade_date + ' 09:20:00'
69
- end_date = last_df.trade_date + ' 15:00:00'
70
- fail_dict = {'begin_date': begin_date,
71
- 'end_date': end_date,
72
- 'symbol': symbol,
73
- 'db_name': db_name
74
- }
75
- fail_df = pd.DataFrame(fail_dict, index=[1])
76
- mongodbUtilV2_27019.insert_mongo(fail_df, db_name + '_fail')
77
-
78
- logger.error("同步数据出现异常:{},{},{},{}", e, symbol, begin_date, end_date)
79
- logger.info("同步完数据:{}", symbol)
80
-
81
-
82
- def classify_symbol(debt_real_time_quotes_df):
83
- debt_real_time_quotes_df['classification'] = debt_real_time_quotes_df['market'].apply(
84
- lambda market: classify_symbol_one(market))
85
- return debt_real_time_quotes_df
86
-
87
-
88
- # 单个股票分类
89
- def classify_symbol_one(market):
90
- if market == 0:
91
- return 'S'
92
- else:
93
- return 'H'
94
-
95
-
96
- if __name__ == '__main__':
97
- # 文件路径
98
- file_path = r"H:\data\1min\指数\指数列表.xlsx"
99
-
100
- df = pd.read_excel(file_path)
101
- df = df.rename(columns={
102
- "指数代码": "symbol"
103
- })
104
- new_symbol_list = df['symbol']
105
- query_trade = {"$and": [{"trade_date": {"$gte": "2025-03-08"}}, {"trade_date": {"$lte": "2025-03-16"}}]}
106
- trade_date_list_df_all = mongodb_util_27017.find_query_data('trade_date_list', query_trade)
107
- sync_zhi_shu_one_minute(new_symbol_list, trade_date_list_df_all)
File without changes