mns-scheduler 1.2.5.1__py3-none-any.whl → 1.2.5.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mns-scheduler might be problematic. Click here for more details.

@@ -70,7 +70,7 @@ def sync_col_move(str_day):
70
70
  try:
71
71
  db_export('127.0.0.1:27017', str_day)
72
72
  db_import('127.0.0.1:27019', str_day)
73
- # delete_exist_data(str_day)
73
+ delete_exist_data(str_day)
74
74
  except BaseException as e:
75
75
  logger.error("备份数据出现错误:{}", e)
76
76
 
@@ -115,5 +115,5 @@ if __name__ == '__main__':
115
115
 
116
116
  trade_date_list = mongodb_util.find_query_data('trade_date_list', query_trade)
117
117
  for trade_one in trade_date_list.itertuples():
118
- trade_date = trade_one.trade_date
119
- sync_col_move(trade_date)
118
+ trade_date_move = trade_one.trade_date
119
+ sync_col_move(trade_date_move)
@@ -0,0 +1,131 @@
1
+ import sys
2
+ import os
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 16
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
8
+ from bypy import ByPy
9
+ from mns_common.db.MongodbUtil import MongodbUtil
10
+ import tempfile
11
+ from loguru import logger
12
+ import akshare as ak
13
+
14
+ mongodb_util = MongodbUtil('27017')
15
+
16
+ import subprocess
17
+
18
+
19
+ def get_file_list(path):
20
+ """
21
+ 获取百度网盘指定路径下的文件列表
22
+ :param path: 百度网盘中的路径,例如 '/我的资源'
23
+ :return: 文件列表
24
+ """
25
+ try:
26
+ # 调用 bypy list 命令
27
+ result = subprocess.run(['bypy', 'list', path], capture_output=True, text=True, check=True)
28
+
29
+ # 输出结果
30
+ if result.returncode == 0:
31
+ file_list = result.stdout.splitlines() # 按行分割结果
32
+ return file_list
33
+ else:
34
+ logger.error("获取文件路径异常:{}", result.stderr)
35
+ return []
36
+ except subprocess.CalledProcessError as e:
37
+ logger.error("获取文件路径异常:{}", e)
38
+ return []
39
+
40
+
41
+ def upload_to_baidu(file_name, folder_name, data_df):
42
+ bp = ByPy()
43
+ file_name = file_name + '.csv'
44
+ with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) as temp_file:
45
+ data_df.to_csv(temp_file, index=False)
46
+ temp_file_path = temp_file.name # 获取临时文件的路径
47
+
48
+ # 上传临时文件到百度云
49
+ remote_path = f'/{folder_name}/{file_name}'
50
+ result = bp.upload(temp_file_path, remote_path)
51
+ if result == 0:
52
+ logger.info("上传成功:{}", file_name)
53
+ else:
54
+ logger.error("上传失败:{}", file_name)
55
+ return result
56
+
57
+
58
+ def mkdir_baidu_new_folder(remote_path):
59
+ bp = ByPy()
60
+ try:
61
+ # 调用 mkdir 方法创建文件夹
62
+ result = bp.mkdir(remote_path)
63
+
64
+ if result == 0:
65
+ logger.info("成功创建文件夹:{}", remote_path)
66
+ else:
67
+ logger.error("创建文件夹失败:{}", result)
68
+
69
+ except Exception as e:
70
+ logger.error("创建文件夹失败:{}", e)
71
+
72
+
73
+ def del_baidu_old_folder(remote_path):
74
+ bp = ByPy()
75
+ try:
76
+ # 调用 mkdir 方法创建文件夹
77
+ result = bp.delete(remote_path)
78
+
79
+ if result == 0:
80
+ logger.info("成功删除文件夹:{}", remote_path)
81
+ else:
82
+ logger.error("删除文件夹失败:{}", result)
83
+
84
+ except Exception as e:
85
+ logger.error("删除文件夹失败:{}", e)
86
+
87
+
88
+ if __name__ == '__main__':
89
+ folder_name1 = '/美股/不复权日线'
90
+ mkdir_baidu_new_folder(folder_name1)
91
+ # get_file_list(folder_name1)
92
+ stock_us_spot_em_df = ak.stock_us_spot_em()
93
+ stock_us_spot_em_df = stock_us_spot_em_df.rename(columns={
94
+ "序号": "index",
95
+ "代码": "symbol",
96
+ "名称": "name",
97
+ "涨跌额": "change_price",
98
+ "涨跌幅": "chg",
99
+ "开盘价": "open",
100
+ "最高价": "high",
101
+ "最低价": "low",
102
+ "最新价": "now_price",
103
+ "昨收价": "last_price",
104
+ "总市值": "total_mv",
105
+ "市盈率": "pe",
106
+ "成交量": "volume",
107
+ "成交额": "amount",
108
+ "振幅": "pct_chg",
109
+ "换手率": "exchange"
110
+ })
111
+ stock_us_spot_em_df = stock_us_spot_em_df.sort_values(by=['amount'], ascending=False)
112
+ stock_us_spot_em_df = stock_us_spot_em_df.fillna(0)
113
+ stock_us_spot_em_df = stock_us_spot_em_df.loc[stock_us_spot_em_df['total_mv'] != 0]
114
+ for stock_one in stock_us_spot_em_df.itertuples():
115
+ try:
116
+ symbol = stock_one.symbol
117
+ name = stock_one.name
118
+ query = {'symbol': symbol, 'amount': {"$gt": 0}}
119
+ us_stock_bfq_daily_df_one = mongodb_util.find_query_data('us_stock_bfq_daily', query)
120
+ del us_stock_bfq_daily_df_one['_id']
121
+ del us_stock_bfq_daily_df_one['name']
122
+ file_name_one = name + '_' + symbol
123
+ upload_to_baidu(file_name_one, folder_name1, us_stock_bfq_daily_df_one)
124
+
125
+ except BaseException as e:
126
+ logger.error("同步数据发生异常:{}", e)
127
+
128
+ # data_df = mongodb_util.find_query_data('us_stock_bfq_daily', query={'name': file_name1})
129
+ # upload_to_baidu(file_name1, folder_name1, data_df)
130
+
131
+
@@ -0,0 +1,91 @@
1
+ import akshare as ak
2
+ import pandas as pd
3
+ from mns_common.db.MongodbUtil import MongodbUtil
4
+
5
+ mongodb_util = MongodbUtil('27017')
6
+
7
+
8
+ def us_stock():
9
+ # 输入参数
10
+ symbol = input("请输入股票代码(all:全量(时间很长),特定代码:106.TTE):")
11
+ start_date = input("请输入开始日期(格式:YYYYMMDD):")
12
+ end_date = input("请输入结束日期(格式:YYYYMMDD):")
13
+ fq = input("请输入复权信息(前复权:qfq,不复权:bfq,后复权:hfq):")
14
+ k_line_period = input("请输入k线周期(日线:daily,周线:weekly,月线:monthly):")
15
+ db_name = "us_stock_" + fq + "_" + k_line_period
16
+ if fq == 'bfq':
17
+ fq = ''
18
+ if symbol != 'all':
19
+ # 获取股票历史数据
20
+ stock_us_hist_df = ak.stock_us_hist(symbol=symbol,
21
+ period=k_line_period,
22
+ start_date=start_date,
23
+ end_date=end_date,
24
+ adjust=fq)
25
+ # 保存数据到 CSV 文件
26
+ stock_us_hist_df.to_csv(f"{symbol}_historical_data.csv", index=False)
27
+ print(f"数据已保存到 {symbol}_historical_data.csv")
28
+ else:
29
+ stock_us_spot_em_df = ak.stock_us_spot_em()
30
+ stock_us_spot_em_df = stock_us_spot_em_df.rename(columns={
31
+ "序号": "index",
32
+ "代码": "symbol",
33
+ "名称": "name",
34
+ "涨跌额": "change_price",
35
+ "涨跌幅": "chg",
36
+ "开盘价": "open",
37
+ "最高价": "high",
38
+ "最低价": "low",
39
+ "最新价": "now_price",
40
+ "昨收价": "last_price",
41
+ "总市值": "total_mv",
42
+ "市盈率": "pe",
43
+ "成交量": "volume",
44
+ "成交额": "amount",
45
+ "振幅": "pct_chg",
46
+ "换手率": "exchange"
47
+ })
48
+ stock_us_spot_em_df = stock_us_spot_em_df.sort_values(by=['amount'], ascending=False)
49
+ stock_us_spot_em_df = stock_us_spot_em_df.fillna(0)
50
+ stock_us_spot_em_df = stock_us_spot_em_df.loc[stock_us_spot_em_df['total_mv'] != 0]
51
+
52
+ k_line_result = pd.DataFrame()
53
+
54
+ for stock_us_one in stock_us_spot_em_df.itertuples():
55
+ try:
56
+ # 获取股票历史数据
57
+ stock_us_hist_df = ak.stock_us_hist(symbol=stock_us_one.symbol,
58
+ period=k_line_period,
59
+ start_date=start_date,
60
+ end_date=end_date,
61
+ adjust=fq)
62
+ stock_us_hist_df = stock_us_hist_df.rename(columns={
63
+ "日期": "date",
64
+ "涨跌额": "change_price",
65
+ "涨跌幅": "chg",
66
+ "开盘": "open",
67
+ "最高": "high",
68
+ "最低": "low",
69
+ "收盘": "close",
70
+ "成交量": "volume",
71
+ "成交额": "amount",
72
+ "振幅": "pct_chg",
73
+ "换手率": "exchange"
74
+ })
75
+
76
+ k_line_result = pd.concat([stock_us_hist_df, k_line_result])
77
+ stock_us_hist_df['_id'] = stock_us_one.symbol + '_' + stock_us_hist_df['date']
78
+ stock_us_hist_df['symbol'] = stock_us_one.symbol
79
+ stock_us_hist_df['name'] = stock_us_one.name
80
+ mongodb_util.insert_mongo(stock_us_hist_df, db_name)
81
+ print(f"同步k线数据到: {stock_us_one.name}")
82
+ except BaseException as e:
83
+ print(f"同步数据发生异常: {stock_us_one.name}, {e}")
84
+
85
+ # 保存数据到 CSV 文件
86
+ k_line_result.to_csv(f"{symbol}_historical_data.csv", index=False)
87
+ print(f"数据已保存到 {symbol}_historical_data.csv")
88
+
89
+
90
+ if __name__ == "__main__":
91
+ us_stock()
@@ -0,0 +1,39 @@
1
+ import sys
2
+ import os
3
+
4
+ file_path = os.path.abspath(__file__)
5
+ end = file_path.index('mns') + 16
6
+ project_path = file_path[0:end]
7
+ sys.path.append(project_path)
8
+ import akshare as ak
9
+
10
+
11
+ def sync_us_company_info():
12
+ stock_us_spot_em_df = ak.stock_us_spot_em()
13
+ stock_us_spot_em_df = stock_us_spot_em_df.rename(columns={
14
+ "序号": "index",
15
+ "代码": "symbol",
16
+ "名称": "name",
17
+ "涨跌额": "change_price",
18
+ "涨跌幅": "chg",
19
+ "开盘价": "open",
20
+ "最高价": "high",
21
+ "最低价": "low",
22
+ "最新价": "now_price",
23
+ "昨收价": "last_price",
24
+ "总市值": "total_mv",
25
+ "市盈率": "pe",
26
+ "成交量": "volume",
27
+ "成交额": "amount",
28
+ "振幅": "pct_chg",
29
+ "换手率": "exchange"
30
+ })
31
+ stock_us_spot_em_df = stock_us_spot_em_df.sort_values(by=['amount'], ascending=False)
32
+ stock_us_spot_em_df = stock_us_spot_em_df.fillna(0)
33
+ stock_us_spot_em_df = stock_us_spot_em_df.loc[stock_us_spot_em_df['total_mv']!=0]
34
+ stock_us_spot_em_df.to_csv('us_stock.csv', index=False)
35
+ return stock_us_spot_em_df
36
+
37
+
38
+ if __name__ == '__main__':
39
+ sync_us_company_info()
@@ -81,6 +81,8 @@ def sync_stock_zb_pool():
81
81
  if trade_date_common_service_api.is_trade_day(str_now_day):
82
82
  stock_zb_pool_sync_api.sync_stock_zb_pool(str_now_day)
83
83
  logger.info("同步炸板信息任务执行成功:{}", str_now_day)
84
+ # 同步停复牌信息
85
+ sync_stock_tfp()
84
86
 
85
87
 
86
88
  # 定时同步每周交易行情数据(前复权)
@@ -349,8 +351,6 @@ def sync_all_interactive_questions():
349
351
  # 数据同步状态check
350
352
  def real_time_task_check_status():
351
353
  real_time_task_check_api.run_check_real_time_data_sync_status()
352
- # 同步停复牌信息
353
- sync_stock_tfp()
354
354
 
355
355
 
356
356
  # 打开交易客户端
@@ -495,9 +495,38 @@ blockingScheduler.add_job(sync_high_risk_stocks, 'cron', hour='0,09,12,16', minu
495
495
  # 同步互动回答
496
496
  blockingScheduler.add_job(sync_all_interactive_questions, 'cron', hour='08,12,17', minute='05')
497
497
 
498
- # 实时数据状态同步check
499
- blockingScheduler.add_job(real_time_task_check_status, 'interval', seconds=10, max_instances=4)
500
-
498
+ # 添加第一个时间段的任务:9:25 到 11:30,每 10 秒执行一次
499
+ blockingScheduler.add_job(
500
+ real_time_task_check_status,
501
+ 'cron',
502
+ hour='9-11', # 9 点到 11 点
503
+ minute='25-59', # 25 分到 59 分(9:25-9:59)
504
+ second='*/10', # 每 10 秒
505
+ max_instances=4
506
+ )
507
+
508
+ # 添加第二个时间段的任务:10:00 到 11:30,每 10 秒执行一次
509
+ blockingScheduler.add_job(
510
+ real_time_task_check_status,
511
+ 'cron',
512
+ hour='10-11', # 10 点到 11 点
513
+ minute='*', # 每分钟
514
+ second='*/10', # 每 10 秒
515
+ max_instances=4
516
+ )
517
+
518
+ # 添加第三个时间段的任务:13:00 到 15:00,每 10 秒执行一次
519
+ blockingScheduler.add_job(
520
+ real_time_task_check_status,
521
+ 'cron',
522
+ hour='13-15', # 13 点到 15 点
523
+ minute='*', # 每分钟
524
+ second='*/10', # 每 10 秒
525
+ max_instances=4
526
+ )
527
+
528
+ # 实时数据状态同步check,使用 'cron' 触发器来限制任务在 9 点到 15 点之间执行
529
+ blockingScheduler.add_job(real_time_task_check_status, 'cron', hour='9-15', second='*/10', max_instances=4)
501
530
  # 打开交易客户端
502
531
  blockingScheduler.add_job(trader_client_auto_login, 'cron', hour='08,12', minute='30')
503
532
  # 同步公司备注信息
@@ -1,4 +1,4 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mns-scheduler
3
- Version: 1.2.5.1
3
+ Version: 1.2.5.3
4
4
 
@@ -1,6 +1,4 @@
1
1
  mns_scheduler/__init__.py,sha256=_nhtk1b00OsMAiqRATNrb3HD44RmgjSG5jqS-QLNMrQ,130
2
- mns_scheduler/2014-2015-test/2014_2015_chg_statistics.py,sha256=u7QaSs764ZGXi3iLf0tX2VJzKWVlhmOG-3iPX8sZsnQ,3889
3
- mns_scheduler/2014-2015-test/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
4
2
  mns_scheduler/big_deal/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
5
3
  mns_scheduler/big_deal/ths_big_deal_sync.py,sha256=aMFj-_pLprh4vGjSSzmr_tlYoPA0L4Lm0SkLRkQwIiw,4564
6
4
  mns_scheduler/company_info/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
@@ -33,7 +31,7 @@ mns_scheduler/concept/ths/update_concept_info/__init__.py,sha256=QWBdZwBCvQw8aS4
33
31
  mns_scheduler/concept/ths/update_concept_info/sync_one_concept_all_symbols_api.py,sha256=wwuLfjj9AnFcHP-oQPC5AhpwgZ8IsPiNUh-Z6swcngA,1380
34
32
  mns_scheduler/concept/ths/update_concept_info/sync_one_symbol_all_concepts_api.py,sha256=3KT-FTG337KWufTrllp_4QZv3U-UBEomUcx77t0x_eQ,9410
35
33
  mns_scheduler/db/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
36
- mns_scheduler/db/col_move_service.py,sha256=tyWth-68MVs3lc6PTW-2SygD98_4_qUaCYZ6tX1JUp8,4230
34
+ mns_scheduler/db/col_move_service.py,sha256=9pOrHJ6rQuMPQdGFho1IJMEy2xvpQ20iku3p20MuAhg,4238
37
35
  mns_scheduler/db/db_status.py,sha256=e5eW5ZSm5J7tHvmxxhFmFdbZb2_oB_SAcdcFqc4KDmw,733
38
36
  mns_scheduler/db/real_time_task_check.py,sha256=bf3Ov6fLL332u7yWyDC3ZLvVWhM6W9i47LapkfP1w7c,4902
39
37
  mns_scheduler/db/script/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
@@ -127,6 +125,10 @@ mns_scheduler/trade/task/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImy
127
125
  mns_scheduler/trade/task/trader_task_service.py,sha256=xBqByakfCO2ruWXuWWsRPJtZ_oCLXLgaOHDiLdD4SVw,1560
128
126
  mns_scheduler/trade/tfp/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
129
127
  mns_scheduler/trade/tfp/stock_tfp_info_sync.py,sha256=KqZlnzKvAGVJXHJfy_aP0KhP91A5wB2C7Sa98QP3_3o,2440
128
+ mns_scheduler/us/__init__.py,sha256=wEg73KlZo-dU0yKGwpA1C2y6LZm4IBb94tNda1tqLeg,163
129
+ mns_scheduler/us/baidu_yun_pan_handle_service.py,sha256=XAEDmyFMnQyD0qQKbA1axQTzQh0kd9mTSvMyeIglgF4,4219
130
+ mns_scheduler/us/k_line.py,sha256=0F6IR1WCtREGFjLfL6Mcu5gAhNY2yaAshRlXTUE-0Sg,4100
131
+ mns_scheduler/us/us_company_info_sync_service_api.py,sha256=fyCtBb1OX6NdD9OhgMU_Dy80dEepJ3w4BF2g9S5j9yc,1227
130
132
  mns_scheduler/zb/__init__.py,sha256=Tyvi_iQlv3jz59EdH67Mycnt9CSixcWPQoJwu55bOq0,165
131
133
  mns_scheduler/zb/stock_zb_pool_sync.py,sha256=LLL7TgqdhEwGsfOmox5VJt9eveVvWsD4jJqC5LO0oGY,1974
132
134
  mns_scheduler/zt/__init__.py,sha256=Rzolrn2R5RZIj-eOmu9KcL6oZBY9Wjz_uBFXPlzirQc,1641
@@ -149,8 +151,8 @@ mns_scheduler/zt/zt_pool/em_zt_pool_sync_api.py,sha256=A5YiAWYdbAxhlTTJ8pOStZrBb
149
151
  mns_scheduler/zt/zt_pool/ths_zt_pool_sync_api.py,sha256=Sy39T-yFwLSIIoSZqQzS-6-W1RlaFWvYpksEXKQVFdI,10456
150
152
  mns_scheduler/zt/zt_pool/update_null_zt_reason_api.py,sha256=1uoiR2Uw46kDfjkvNg2US5rd_4OIkYO3872gIJOufUY,2135
151
153
  mns_scheduler/zz_task/__init__.py,sha256=QWBdZwBCvQw8aS4hnL9_pg3U3ZiNLUXzlImyy9WhUcI,163
152
- mns_scheduler/zz_task/data_sync_task.py,sha256=Muq8lILGn6ShNoaJc97f5lU65Ql8nW63NlxUjGnIZOA,21669
153
- mns_scheduler-1.2.5.1.dist-info/METADATA,sha256=D0U49bXPUdkY2wcsByluL6nkOLcMBA2_EFsG0FixBtQ,64
154
- mns_scheduler-1.2.5.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
155
- mns_scheduler-1.2.5.1.dist-info/top_level.txt,sha256=PXQDFBGR1pWmsUbH5yiLAh71P5HZODTRED0zJ8CCgOc,14
156
- mns_scheduler-1.2.5.1.dist-info/RECORD,,
154
+ mns_scheduler/zz_task/data_sync_task.py,sha256=_lxpa8dJVThjwx9eu8SCLiUTAsjqjUJLlRgAbRdJxZ0,22626
155
+ mns_scheduler-1.2.5.3.dist-info/METADATA,sha256=8uDSKTf2yXxdKmYNPvhIvyh5q3UG53X7N93pZt6wwWc,64
156
+ mns_scheduler-1.2.5.3.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
157
+ mns_scheduler-1.2.5.3.dist-info/top_level.txt,sha256=PXQDFBGR1pWmsUbH5yiLAh71P5HZODTRED0zJ8CCgOc,14
158
+ mns_scheduler-1.2.5.3.dist-info/RECORD,,
@@ -1,87 +0,0 @@
1
- import sys
2
- import os
3
-
4
- file_path = os.path.abspath(__file__)
5
- end = file_path.index('mns') + 16
6
- project_path = file_path[0:end]
7
- sys.path.append(project_path)
8
- import mns_common.api.em.east_money_stock_api as east_money_stock_api
9
- import mns_common.constant.db_name_constant as db_name_constant
10
- from mns_common.db.MongodbUtil import MongodbUtil
11
- from loguru import logger
12
- import mns_common.component.company.company_common_service_new_api as company_common_service_new_api
13
- import mns_common.utils.date_handle_util as date_handle_util
14
- import mns_common.api.akshare.k_line_api as k_line_api
15
-
16
- mongodb_util = MongodbUtil('27017')
17
-
18
-
19
- def history_high_chg(symbol_param):
20
- company_info_df = company_common_service_new_api.get_company_info_info()
21
- begin_date = '20140722'
22
- end_date = '20150615'
23
-
24
- real_time_quotes_all_stocks_df = east_money_stock_api.get_real_time_quotes_all_stocks()
25
- if symbol_param is not None:
26
- real_time_quotes_all_stocks_df = real_time_quotes_all_stocks_df.loc[
27
- real_time_quotes_all_stocks_df['symbol'] == symbol_param]
28
- real_time_quotes_all_stocks_list_date_before = real_time_quotes_all_stocks_df.loc[
29
- real_time_quotes_all_stocks_df['list_date'] < 20150615]
30
-
31
- for company_one in real_time_quotes_all_stocks_list_date_before.itertuples():
32
- try:
33
-
34
- symbol = company_one.symbol
35
- stock_qfq_daily_df = k_line_api.stock_zh_a_hist(symbol=symbol, period='daily',
36
- start_date=date_handle_util.no_slash_date(begin_date),
37
- end_date=date_handle_util.no_slash_date(end_date),
38
- adjust='hfq')
39
-
40
- if stock_qfq_daily_df.shape[0] < 100:
41
- continue
42
- logger.error("新股或者交易时间不足{}:{}", symbol, company_one.name)
43
- stock_qfq_daily_df = stock_qfq_daily_df.sort_values(by=['date'], ascending=True)
44
- first_row = stock_qfq_daily_df.iloc[0]
45
-
46
- open_price = first_row.open
47
-
48
- last_row = stock_qfq_daily_df.iloc[-1]
49
-
50
- close_price = last_row.close
51
-
52
- sum_chg = round((close_price - open_price) * 100 / open_price, 2)
53
-
54
- company_info_df_one = company_info_df.loc[company_info_df['_id'] == symbol]
55
- if company_info_df_one.shape[0] > 0:
56
- company_info_df_one['sum_chg'] = sum_chg
57
- company_info_df_one['name'] = company_one.name
58
- company_info_df_one = company_info_df_one[
59
- ['_id',
60
- 'name',
61
- 'sum_chg',
62
- 'industry',
63
- 'first_sw_industry',
64
- 'second_sw_industry',
65
- 'third_sw_industry',
66
- 'em_industry',
67
- 'list_date',
68
- 'ths_concept_list_info',
69
- 'kpl_plate_name',
70
- 'kpl_plate_list_info',
71
- 'company_type']]
72
- mongodb_util.save_mongo(company_info_df_one, '2014-2015-chg-statistics')
73
- else:
74
- logger.error("该股票已经退市{}:{}", symbol, company_one.name)
75
- except BaseException as e:
76
- logger.error("出现异常{}:{}", symbol, e)
77
-
78
-
79
- if __name__ == '__main__':
80
- symbol_test = None
81
-
82
- # qfq_k_line_df = k_line_api.stock_zh_a_hist(symbol=symbol_test, period='daily',
83
- # start_date=date_handle_util.no_slash_date('1990-12-19'),
84
- # end_date=date_handle_util.no_slash_date('2990-12-19'),
85
- # adjust='hfq')
86
-
87
- history_high_chg(symbol_test)
File without changes