hikyuu 2.6.5__py3-none-win_amd64.whl → 2.6.6__py3-none-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hikyuu/__init__.py +6 -0
- hikyuu/__init__.pyi +548 -545
- hikyuu/analysis/__init__.pyi +519 -518
- hikyuu/analysis/analysis.pyi +520 -519
- hikyuu/core.pyi +521 -520
- hikyuu/cpp/__init__.pyi +2 -2
- hikyuu/cpp/boost_date_time-mt.dll +0 -0
- hikyuu/cpp/boost_serialization-mt.dll +0 -0
- hikyuu/cpp/boost_wserialization-mt.dll +0 -0
- hikyuu/cpp/core310.pyd +0 -0
- hikyuu/cpp/core310.pyi +35 -25
- hikyuu/cpp/core311.pyd +0 -0
- hikyuu/cpp/core311.pyi +35 -25
- hikyuu/cpp/core312.pyd +0 -0
- hikyuu/cpp/core312.pyi +35 -25
- hikyuu/cpp/core313.pyd +0 -0
- hikyuu/cpp/core313.pyi +35 -25
- hikyuu/cpp/core39.pyd +0 -0
- hikyuu/cpp/core39.pyi +35 -25
- hikyuu/cpp/hikyuu.dll +0 -0
- hikyuu/cpp/hikyuu.lib +0 -0
- hikyuu/cpp/i18n/__init__.py +0 -0
- hikyuu/cpp/i18n/zh_CN.mo +0 -0
- hikyuu/cpp/sqlite3.dll +0 -0
- hikyuu/data/clickhouse_upgrade/__init__.py +1 -0
- hikyuu/data/clickhouse_upgrade/createdb.sql +1085 -0
- hikyuu/data/common_clickhouse.py +512 -0
- hikyuu/data/em_block_to_clickhouse.py +120 -0
- hikyuu/data/hku_config_template.py +58 -3
- hikyuu/data/pytdx_finance_to_clickhouse.py +107 -0
- hikyuu/data/pytdx_to_clickhouse.py +841 -0
- hikyuu/data/pytdx_to_mysql.py +4 -4
- hikyuu/data/pytdx_weight_to_clickhouse.py +191 -0
- hikyuu/data/tdx_to_clickhouse.py +448 -0
- hikyuu/data/zh_bond10_to_clickhouse.py +49 -0
- hikyuu/draw/drawplot/__init__.pyi +9 -9
- hikyuu/draw/drawplot/bokeh_draw.pyi +537 -534
- hikyuu/draw/drawplot/common.pyi +1 -1
- hikyuu/draw/drawplot/echarts_draw.pyi +539 -536
- hikyuu/draw/drawplot/matplotlib_draw.pyi +549 -546
- hikyuu/draw/elder.pyi +11 -11
- hikyuu/draw/kaufman.pyi +18 -18
- hikyuu/draw/volume.pyi +10 -10
- hikyuu/extend.pyi +527 -526
- hikyuu/fetcher/stock/zh_stock_a_pytdx.py +9 -20
- hikyuu/fetcher/stock/zh_stock_a_qmt.py +4 -5
- hikyuu/fetcher/stock/zh_stock_a_sina_qq.py +16 -60
- hikyuu/flat/Spot.py +96 -200
- hikyuu/gui/HikyuuTDX.py +132 -3
- hikyuu/gui/data/ImportBlockInfoTask.py +11 -0
- hikyuu/gui/data/ImportHistoryFinanceTask.py +15 -1
- hikyuu/gui/data/ImportPytdxTimeToH5Task.py +11 -1
- hikyuu/gui/data/ImportPytdxToH5Task.py +13 -1
- hikyuu/gui/data/ImportPytdxTransToH5Task.py +11 -1
- hikyuu/gui/data/ImportTdxToH5Task.py +13 -1
- hikyuu/gui/data/ImportWeightToSqliteTask.py +14 -1
- hikyuu/gui/data/ImportZhBond10Task.py +11 -0
- hikyuu/gui/data/MainWindow.py +76 -12
- hikyuu/gui/data/UsePytdxImportToH5Thread.py +45 -26
- hikyuu/gui/data/UseTdxImportToH5Thread.py +19 -1
- hikyuu/gui/dataserver.py +12 -4
- hikyuu/gui/spot_server.py +30 -40
- hikyuu/gui/start_qmt.py +20 -3
- hikyuu/hub.pyi +6 -6
- hikyuu/include/hikyuu/DataType.h +11 -0
- hikyuu/include/hikyuu/StockManager.h +8 -0
- hikyuu/include/hikyuu/data_driver/kdata/mysql/KRecordTable.h +1 -0
- hikyuu/include/hikyuu/global/GlobalSpotAgent.h +1 -1
- hikyuu/include/hikyuu/global/SpotRecord.h +15 -31
- hikyuu/include/hikyuu/global/agent/spot_generated.h +48 -232
- hikyuu/include/hikyuu/global/schedule/scheduler.h +1 -1
- hikyuu/include/hikyuu/plugin/KDataToHdf5Importer.h +3 -0
- hikyuu/include/hikyuu/plugin/dataserver.h +26 -1
- hikyuu/include/hikyuu/plugin/device.h +2 -1
- hikyuu/include/hikyuu/plugin/interface/DataDriverPluginInterface.h +27 -0
- hikyuu/include/hikyuu/plugin/interface/DataServerPluginInterface.h +2 -1
- hikyuu/include/hikyuu/plugin/interface/DevicePluginInterface.h +1 -1
- hikyuu/include/hikyuu/plugin/interface/ImportKDataToHdf5PluginInterface.h +3 -0
- hikyuu/include/hikyuu/plugin/interface/plugins.h +2 -0
- hikyuu/include/hikyuu/strategy/Strategy.h +0 -9
- hikyuu/include/hikyuu/utilities/config.h +1 -1
- hikyuu/include/hikyuu/utilities/mo/mo.h +30 -14
- hikyuu/include/hikyuu/utilities/os.h +6 -0
- hikyuu/include/hikyuu/version.h +4 -4
- hikyuu/plugin/backtest.dll +0 -0
- hikyuu/plugin/clickhousedriver.dll +0 -0
- hikyuu/plugin/dataserver.dll +0 -0
- hikyuu/plugin/device.dll +0 -0
- hikyuu/plugin/extind.dll +0 -0
- hikyuu/plugin/import2hdf5.dll +0 -0
- hikyuu/plugin/tmreport.dll +0 -0
- hikyuu/trade_manage/__init__.pyi +537 -534
- hikyuu/trade_manage/broker.pyi +3 -3
- hikyuu/trade_manage/broker_easytrader.pyi +1 -1
- hikyuu/trade_manage/trade.pyi +537 -534
- hikyuu/util/__init__.py +1 -0
- hikyuu/util/__init__.pyi +4 -3
- hikyuu/util/check.py +8 -0
- hikyuu/util/check.pyi +5 -1
- hikyuu/util/singleton.pyi +1 -1
- {hikyuu-2.6.5.dist-info → hikyuu-2.6.6.dist-info}/METADATA +2 -2
- {hikyuu-2.6.5.dist-info → hikyuu-2.6.6.dist-info}/RECORD +106 -95
- {hikyuu-2.6.5.dist-info → hikyuu-2.6.6.dist-info}/top_level.txt +2 -2
- hikyuu/include/hikyuu/global/agent/hikyuu/__init__.py +0 -1
- hikyuu/include/hikyuu/global/agent/hikyuu/flat/__init__.py +0 -1
- {hikyuu-2.6.5.dist-info → hikyuu-2.6.6.dist-info}/LICENSE +0 -0
- {hikyuu-2.6.5.dist-info → hikyuu-2.6.6.dist-info}/WHEEL +0 -0
- {hikyuu-2.6.5.dist-info → hikyuu-2.6.6.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,841 @@
|
|
|
1
|
+
# coding:utf-8
|
|
2
|
+
#
|
|
3
|
+
# The MIT License (MIT)
|
|
4
|
+
#
|
|
5
|
+
# Copyright (c) 2010-2019 fasiondog/hikyuu
|
|
6
|
+
#
|
|
7
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
8
|
+
# of this software and associated documentation files (the "Software"), to deal
|
|
9
|
+
# in the Software without restriction, including without limitation the rights
|
|
10
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
11
|
+
# copies of the Software, and to permit persons to whom the Software is
|
|
12
|
+
# furnished to do so, subject to the following conditions:
|
|
13
|
+
#
|
|
14
|
+
# The above copyright notice and this permission notice shall be included in all
|
|
15
|
+
# copies or substantial portions of the Software.
|
|
16
|
+
#
|
|
17
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
18
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
19
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
20
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
21
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
22
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
23
|
+
# SOFTWARE.
|
|
24
|
+
|
|
25
|
+
import sys
|
|
26
|
+
import math
|
|
27
|
+
import datetime
|
|
28
|
+
from pytdx.hq import TDXParams
|
|
29
|
+
|
|
30
|
+
from hikyuu.util import hku_error, hku_debug, hku_run_ignore_exception
|
|
31
|
+
|
|
32
|
+
from hikyuu import Datetime
|
|
33
|
+
from hikyuu.data.common import *
|
|
34
|
+
from hikyuu.data.common_pytdx import to_pytdx_market, pytdx_get_day_trans
|
|
35
|
+
from hikyuu.data.common_clickhouse import (
|
|
36
|
+
create_database,
|
|
37
|
+
get_codepre_list,
|
|
38
|
+
get_stock_list,
|
|
39
|
+
get_table,
|
|
40
|
+
get_lastdatetime,
|
|
41
|
+
get_last_krecord,
|
|
42
|
+
update_extern_data,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def ProgressBar(cur, total):
|
|
47
|
+
percent = "{:.0%}".format(cur / total)
|
|
48
|
+
sys.stdout.write("\r")
|
|
49
|
+
sys.stdout.write("[%-50s] %s" % ("=" * int(math.floor(cur * 50 / total)), percent))
|
|
50
|
+
sys.stdout.flush()
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@hku_catch(ret=0, trace=True)
|
|
54
|
+
def import_index_name(connect):
|
|
55
|
+
"""
|
|
56
|
+
导入所有指数代码表
|
|
57
|
+
|
|
58
|
+
:param connect: sqlite3实例
|
|
59
|
+
:return: 指数个数
|
|
60
|
+
"""
|
|
61
|
+
index_list = get_index_code_name_list()
|
|
62
|
+
if not index_list:
|
|
63
|
+
return 0
|
|
64
|
+
|
|
65
|
+
sql = f"select market, code, name, type, valid, startDate, endDate from hku_base.stock where type={STOCKTYPE.INDEX}"
|
|
66
|
+
a = connect.query(sql)
|
|
67
|
+
a = a.result_rows
|
|
68
|
+
oldStockDict = {}
|
|
69
|
+
for oldstock in a:
|
|
70
|
+
oldcode = f"{oldstock[0]}{oldstock[1]}"
|
|
71
|
+
oldStockDict[oldcode] = oldstock
|
|
72
|
+
|
|
73
|
+
today = datetime.date.today()
|
|
74
|
+
today = today.year * 10000 + today.month * 100 + today.day
|
|
75
|
+
insert_records = []
|
|
76
|
+
for index in index_list:
|
|
77
|
+
market_code = index["market_code"]
|
|
78
|
+
market, code = market_code[:2], market_code[2:]
|
|
79
|
+
if market_code in oldStockDict:
|
|
80
|
+
old = oldStockDict[market_code]
|
|
81
|
+
if old[4] == 0:
|
|
82
|
+
# connect.command(
|
|
83
|
+
# f"alter table hku_base.stock update valid=1, name='{index['name']}' where market='{market}' and code='{code}'")
|
|
84
|
+
connect.command(
|
|
85
|
+
f"delete from hku_base.stock where market='{market}' and code='{code}'")
|
|
86
|
+
insert_records.append((market, code, index["name"], STOCKTYPE.INDEX, 1, today, 99999999))
|
|
87
|
+
else:
|
|
88
|
+
insert_records.append((market, code, index["name"], STOCKTYPE.INDEX, 1, today, 99999999))
|
|
89
|
+
|
|
90
|
+
if insert_records:
|
|
91
|
+
ic = connect.create_insert_context(table='stock', database='hku_base',
|
|
92
|
+
column_names=['market', 'code', 'name',
|
|
93
|
+
'type', 'valid', 'startDate', 'endDate'],
|
|
94
|
+
data=insert_records)
|
|
95
|
+
connect.insert(context=ic)
|
|
96
|
+
return len(index_list)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
@hku_catch(ret=0, trace=True)
|
|
100
|
+
def import_stock_name(connect, api, market, quotations=None):
|
|
101
|
+
"""更新每只股票的名称、当前是否有效性、起始日期及结束日期
|
|
102
|
+
如果导入的代码表中不存在对应的代码,则认为该股已失效
|
|
103
|
+
|
|
104
|
+
:param connect: sqlite3实例
|
|
105
|
+
:param api: pytdx接口,必须在函数外进行连接
|
|
106
|
+
:param market: 'SH' | 'SZ'
|
|
107
|
+
:param quotations: 待导入的行情类别,空为导入全部 'stock' | 'fund' | 'bond' | None
|
|
108
|
+
"""
|
|
109
|
+
deSet = set() # 记录退市证券
|
|
110
|
+
if market == MARKET.SH:
|
|
111
|
+
df = ak.stock_info_sh_delist()
|
|
112
|
+
l = df[['公司代码', '公司简称']].to_dict(orient='records') if not df .empty else []
|
|
113
|
+
for stock in l:
|
|
114
|
+
code = str(stock['公司代码'])
|
|
115
|
+
deSet.add(code)
|
|
116
|
+
elif market == MARKET.SZ:
|
|
117
|
+
for t in ['暂停上市公司', '终止上市公司']:
|
|
118
|
+
df = ak.stock_info_sz_delist(t)
|
|
119
|
+
l = df[['证券代码', '证券简称']].to_dict(orient='records') if not df.empty else []
|
|
120
|
+
for stock in l:
|
|
121
|
+
code = str(stock['证券代码'])
|
|
122
|
+
deSet.add(code)
|
|
123
|
+
|
|
124
|
+
newStockDict = {}
|
|
125
|
+
stk_list = get_stk_code_name_list(market)
|
|
126
|
+
if not stk_list:
|
|
127
|
+
hku_error("获取 {} 股票代码表失败", market)
|
|
128
|
+
return 0
|
|
129
|
+
|
|
130
|
+
if not quotations or "fund" in [v.lower() for v in quotations]:
|
|
131
|
+
stk_list.extend(get_fund_code_name_list(market))
|
|
132
|
+
for stock in stk_list:
|
|
133
|
+
code = str(stock["code"])
|
|
134
|
+
if code not in deSet:
|
|
135
|
+
newStockDict[code] = stock["name"]
|
|
136
|
+
|
|
137
|
+
stktype_list = get_stktype_list(quotations)
|
|
138
|
+
stktype_list = list(stktype_list)
|
|
139
|
+
stktype_list.remove(STOCKTYPE.INDEX) # 移除指数类型
|
|
140
|
+
stktype_list = tuple(stktype_list)
|
|
141
|
+
sql = f"select market, code, name, type, valid, startDate, endDate from hku_base.stock where market='{market}' and type in {stktype_list}"
|
|
142
|
+
a = connect.query(sql)
|
|
143
|
+
a = a.result_rows
|
|
144
|
+
oldStockDict = {}
|
|
145
|
+
insert_records = []
|
|
146
|
+
for oldstock in a:
|
|
147
|
+
oldcode = oldstock[1]
|
|
148
|
+
oldStockDict[oldcode] = oldstock
|
|
149
|
+
|
|
150
|
+
oldname, oldtype, oldvalid, oldstartDate, oldendDate = oldstock[2], oldstock[3], oldstock[4], oldstock[5], oldstock[6]
|
|
151
|
+
|
|
152
|
+
# 新的代码表中无此股票,则置为无效
|
|
153
|
+
# if (oldvalid == 1) and (oldcode not in newStockDict):
|
|
154
|
+
if (oldvalid == 1) and ((oldcode not in newStockDict) or oldcode in deSet):
|
|
155
|
+
sql = f"delete from hku_base.stock where market='{market}' and code='{oldcode}'"
|
|
156
|
+
connect.command(sql)
|
|
157
|
+
insert_records.append((market, oldcode, oldname, oldtype, 0, oldstartDate, oldendDate))
|
|
158
|
+
|
|
159
|
+
# 股票名称发生变化,更新股票名称;如果原无效,则置为有效
|
|
160
|
+
if oldcode in newStockDict:
|
|
161
|
+
if oldname != newStockDict[oldcode] or oldvalid == 0:
|
|
162
|
+
sql = f"delete from hku_base.stock where market='{market}' and code='{oldcode}'"
|
|
163
|
+
connect.command(sql)
|
|
164
|
+
insert_records.append((market, oldcode, newStockDict[oldcode], oldtype, 1, oldstartDate, 99999999))
|
|
165
|
+
|
|
166
|
+
# 处理新出现的股票
|
|
167
|
+
codepre_list = get_codepre_list(connect, market, quotations)
|
|
168
|
+
|
|
169
|
+
today = datetime.date.today()
|
|
170
|
+
today = today.year * 10000 + today.month * 100 + today.day
|
|
171
|
+
count = 0
|
|
172
|
+
for code in newStockDict:
|
|
173
|
+
if code not in oldStockDict:
|
|
174
|
+
for codepre in codepre_list:
|
|
175
|
+
length = len(codepre[0])
|
|
176
|
+
if code[:length] == codepre[0]:
|
|
177
|
+
count += 1
|
|
178
|
+
insert_records.append((market, code, newStockDict[code], codepre[1], 1, today, 99999999))
|
|
179
|
+
break
|
|
180
|
+
|
|
181
|
+
if insert_records:
|
|
182
|
+
ic = connect.create_insert_context(table='stock', database='hku_base',
|
|
183
|
+
column_names=['market', 'code', 'name',
|
|
184
|
+
'type', 'valid', 'startDate', 'endDate'],
|
|
185
|
+
data=insert_records)
|
|
186
|
+
connect.insert(context=ic)
|
|
187
|
+
# print('%s新增股票数:%i' % (market.upper(), count))
|
|
188
|
+
return count
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def guess_day_n_step(last_datetime):
|
|
192
|
+
last_date = int(last_datetime // 10000)
|
|
193
|
+
today = datetime.date.today()
|
|
194
|
+
|
|
195
|
+
last_y = last_date // 10000
|
|
196
|
+
n = int((today.year - last_y + 1) * 250 // 800)
|
|
197
|
+
|
|
198
|
+
step = 800
|
|
199
|
+
if n < 1:
|
|
200
|
+
last_m = last_date // 100 - last_y * 100
|
|
201
|
+
last_d = last_date - (last_y * 10000 + last_m * 100)
|
|
202
|
+
step = (today - datetime.date(last_y, last_m, last_d)).days + 1
|
|
203
|
+
if step > 800:
|
|
204
|
+
n = 1
|
|
205
|
+
step = 800
|
|
206
|
+
return (n, step)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def guess_1min_n_step(last_datetime):
|
|
210
|
+
last_date = int(last_datetime // 10000)
|
|
211
|
+
today = datetime.date.today()
|
|
212
|
+
|
|
213
|
+
last_y = last_date // 10000
|
|
214
|
+
last_m = last_date // 100 - last_y * 100
|
|
215
|
+
last_d = last_date - (last_y * 10000 + last_m * 100)
|
|
216
|
+
|
|
217
|
+
n = int(((today - datetime.date(last_y, last_m, last_d)).days * 240 + 1) // 800)
|
|
218
|
+
step = 800
|
|
219
|
+
if n < 1:
|
|
220
|
+
step = (today - datetime.date(last_y, last_m, last_d)).days * 240 + 1
|
|
221
|
+
elif n > 99:
|
|
222
|
+
n = 99
|
|
223
|
+
|
|
224
|
+
return (n, step)
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def guess_5min_n_step(last_datetime):
|
|
228
|
+
last_date = int(last_datetime // 10000)
|
|
229
|
+
today = datetime.date.today()
|
|
230
|
+
|
|
231
|
+
last_y = last_date // 10000
|
|
232
|
+
last_m = last_date // 100 - last_y * 100
|
|
233
|
+
last_d = last_date - (last_y * 10000 + last_m * 100)
|
|
234
|
+
|
|
235
|
+
n = int(((today - datetime.date(last_y, last_m, last_d)).days * 48 + 1) // 800)
|
|
236
|
+
step = 800
|
|
237
|
+
if n < 1:
|
|
238
|
+
step = (today - datetime.date(last_y, last_m, last_d)).days * 48 + 1
|
|
239
|
+
elif n > 99:
|
|
240
|
+
n = 99
|
|
241
|
+
|
|
242
|
+
return (n, step)
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def import_one_stock_data(
|
|
246
|
+
connect, api, market, ktype, stock_record, startDate=199012191500
|
|
247
|
+
):
|
|
248
|
+
market = market.upper()
|
|
249
|
+
pytdx_market = to_pytdx_market(market)
|
|
250
|
+
|
|
251
|
+
market, code, valid, stktype = stock_record[:5]
|
|
252
|
+
hku_debug("{}{}".format(market, code))
|
|
253
|
+
table = get_table(connect, market, code, ktype)
|
|
254
|
+
last_krecord = get_last_krecord(connect, table)
|
|
255
|
+
last_datetime = startDate if last_krecord is None else last_krecord[0]
|
|
256
|
+
|
|
257
|
+
today = datetime.date.today()
|
|
258
|
+
if ktype == "DAY":
|
|
259
|
+
n, step = guess_day_n_step(last_datetime)
|
|
260
|
+
pytdx_kline_type = TDXParams.KLINE_TYPE_RI_K
|
|
261
|
+
today_datetime = (today.year * 10000 + today.month * 100 + today.day) * 10000
|
|
262
|
+
|
|
263
|
+
elif ktype == "1MIN":
|
|
264
|
+
n, step = guess_1min_n_step(last_datetime)
|
|
265
|
+
pytdx_kline_type = TDXParams.KLINE_TYPE_1MIN
|
|
266
|
+
today_datetime = (
|
|
267
|
+
today.year * 10000 + today.month * 100 + today.day
|
|
268
|
+
) * 10000 + 1500
|
|
269
|
+
|
|
270
|
+
elif ktype == "5MIN":
|
|
271
|
+
n, step = guess_5min_n_step(last_datetime)
|
|
272
|
+
pytdx_kline_type = TDXParams.KLINE_TYPE_5MIN
|
|
273
|
+
today_datetime = (
|
|
274
|
+
today.year * 10000 + today.month * 100 + today.day
|
|
275
|
+
) * 10000 + 1500
|
|
276
|
+
else:
|
|
277
|
+
return 0
|
|
278
|
+
|
|
279
|
+
if today_datetime <= last_datetime:
|
|
280
|
+
return 0
|
|
281
|
+
|
|
282
|
+
get_bars = (
|
|
283
|
+
api.get_index_bars if stktype == STOCKTYPE.INDEX else api.get_security_bars
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
buf = []
|
|
287
|
+
while n >= 0:
|
|
288
|
+
bar_list = get_bars(pytdx_kline_type, pytdx_market, code, n * 800, step)
|
|
289
|
+
n -= 1
|
|
290
|
+
if bar_list is None:
|
|
291
|
+
# print(code, "invalid!!")
|
|
292
|
+
continue
|
|
293
|
+
|
|
294
|
+
for bar in bar_list:
|
|
295
|
+
try:
|
|
296
|
+
if ktype == "DAY":
|
|
297
|
+
tmp = datetime.date(bar["year"], bar["month"], bar["day"])
|
|
298
|
+
bar_datetime = (tmp.year * 10000 + tmp.month * 100 + tmp.day) * 10000
|
|
299
|
+
else:
|
|
300
|
+
tmp = datetime.datetime(bar["year"], bar["month"], bar["day"], bar['hour'], bar['minute'])
|
|
301
|
+
bar_datetime = (tmp.year * 10000 + tmp.month * 100 + tmp.day) * \
|
|
302
|
+
10000 + bar["hour"] * 100 + bar["minute"]
|
|
303
|
+
except Exception as e:
|
|
304
|
+
hku_error("Failed translate datetime: {}, from {}! {}".format(bar, api.ip, e))
|
|
305
|
+
continue
|
|
306
|
+
|
|
307
|
+
if last_krecord is not None and bar_datetime == last_datetime:
|
|
308
|
+
if abs(last_krecord[1] - bar["open"]) / last_krecord[1] > 0.01:
|
|
309
|
+
hku_error(
|
|
310
|
+
f"fetch data from tdx error! {bar_datetime} {ktype} {market}{code} last_krecord open: {last_krecord[1]}, bar: {bar['open']}")
|
|
311
|
+
return 0
|
|
312
|
+
if abs(last_krecord[2] - bar["high"]) / last_krecord[2] > 0.01:
|
|
313
|
+
hku_error(
|
|
314
|
+
f"fetch data from tdx error! {bar_datetime} {ktype} {market}{code} last_krecord high: {last_krecord[2]}, bar: {bar['high']}")
|
|
315
|
+
return 0
|
|
316
|
+
if abs(last_krecord[3] - bar["low"]) / last_krecord[3] > 0.01:
|
|
317
|
+
hku_error(
|
|
318
|
+
f"fetch data from tdx error! {bar_datetime} {ktype} {market}{code} last_krecord low: {last_krecord[3]}, bar: {bar['low']}")
|
|
319
|
+
return 0
|
|
320
|
+
if abs(last_krecord[4] - bar["close"]) / last_krecord[4] > 0.01:
|
|
321
|
+
hku_error(
|
|
322
|
+
f"fetch data from tdx error! {bar_datetime} {ktype} {market}{code} last_krecord close: {last_krecord[4]}, bar: {bar['close']}")
|
|
323
|
+
return 0
|
|
324
|
+
if ktype == 'DAY' and last_krecord[5] != 0.0 and abs(last_krecord[5] - bar["amount"]*0.001) / last_krecord[5] > 0.1:
|
|
325
|
+
hku_error(
|
|
326
|
+
f"fetch data from tdx error! {bar_datetime} {ktype} {market}{code} last_krecord amount: {last_krecord[5]}, bar: {bar['amount']*0.001}")
|
|
327
|
+
return 0
|
|
328
|
+
if ktype == 'DAY' and last_krecord[5] != 0.0 and abs(last_krecord[6] - bar["vol"]) / last_krecord[6] > 0.1:
|
|
329
|
+
hku_error(
|
|
330
|
+
f"fetch data from tdx error! {bar_datetime} {ktype} {market}{code} last_krecord count: {last_krecord[6]}, bar: {bar['vol']}")
|
|
331
|
+
return 0
|
|
332
|
+
continue
|
|
333
|
+
|
|
334
|
+
if (
|
|
335
|
+
today_datetime >= bar_datetime > last_datetime
|
|
336
|
+
and bar["high"] >= bar["open"] >= bar["low"] > 0
|
|
337
|
+
and bar["high"] >= bar["close"] >= bar["low"] > 0
|
|
338
|
+
and bar["vol"] >= 0
|
|
339
|
+
and bar["amount"] >= 0
|
|
340
|
+
):
|
|
341
|
+
try:
|
|
342
|
+
buf.append(
|
|
343
|
+
(
|
|
344
|
+
table[1], table[2],
|
|
345
|
+
Datetime(bar_datetime).timestamp_utc()//1000000,
|
|
346
|
+
bar["open"],
|
|
347
|
+
bar["high"],
|
|
348
|
+
bar["low"],
|
|
349
|
+
bar["close"],
|
|
350
|
+
bar["amount"] * 0.001,
|
|
351
|
+
bar["vol"]
|
|
352
|
+
# bar['vol'] if stktype == 2 else round(bar['vol'] * 0.01)
|
|
353
|
+
)
|
|
354
|
+
)
|
|
355
|
+
except Exception as e:
|
|
356
|
+
hku_error("Can't trans record({}), {}".format(bar, e))
|
|
357
|
+
last_datetime = bar_datetime
|
|
358
|
+
|
|
359
|
+
if len(buf) > 0:
|
|
360
|
+
ic = connect.create_insert_context(table=table[0],
|
|
361
|
+
data=buf)
|
|
362
|
+
connect.insert(context=ic, settings={"prefer_warmed_unmerged_parts_seconds": 86400})
|
|
363
|
+
|
|
364
|
+
return len(buf)
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
def update_stock_info(connect, market):
|
|
368
|
+
sql = f"SELECT a.code, a.valid, a.startDate, a.endDate, b.min_date, b.max_date FROM hku_base.stock a JOIN (SELECT market, code, toInt32(min(date)) AS min_date, toInt32(max(date)) AS max_date FROM hku_data.day_k WHERE market = '{market}' GROUP BY market, code) b ON a.market = b.market AND a.code = b.code"
|
|
369
|
+
# hku_info(sql)
|
|
370
|
+
a = connect.query(sql)
|
|
371
|
+
a = a.result_rows
|
|
372
|
+
ticks = 1000000
|
|
373
|
+
for v in a:
|
|
374
|
+
code, valid, startDate, endDate, min_date, max_date = v
|
|
375
|
+
now_start = Datetime.from_timestamp_utc(min_date*ticks).ymd
|
|
376
|
+
now_end = Datetime.from_timestamp_utc(max_date*ticks).ymd
|
|
377
|
+
if valid == 1 and now_start != startDate:
|
|
378
|
+
sql = f"alter table hku_base.stock update startDate={now_start}, endDate=99999999 where market='{market}' and code='{code}'"
|
|
379
|
+
connect.command(sql)
|
|
380
|
+
elif valid == 0 and now_end != endDate:
|
|
381
|
+
sql = f"alter table hku_base.stock update startDate={now_start}, endDate={now_end} where market='{market}' and code='{code}'"
|
|
382
|
+
connect.command(sql)
|
|
383
|
+
if ((code == "000001" and market == MARKET.SH)
|
|
384
|
+
or (code == "399001" and market == MARKET.SZ)
|
|
385
|
+
or (code == "830799" and market == MARKET.BJ)):
|
|
386
|
+
sql = f"alter table hku_base.market update lastDate={now_end} where market='{market}'"
|
|
387
|
+
connect.command(sql)
|
|
388
|
+
|
|
389
|
+
|
|
390
|
+
def clear_extern_data(connect, market, data_type):
|
|
391
|
+
if data_type == 'DAY':
|
|
392
|
+
index_list = ('week', 'month', 'quarter', 'halfyear', 'year')
|
|
393
|
+
lastdate = connect.command(
|
|
394
|
+
f"select toInt32(max(date)) from hku_data.day_k where market='SH' and code='000001'")
|
|
395
|
+
for index_type in index_list:
|
|
396
|
+
sql = f"delete from hku_data.{index_type}_k where market='{market}' and date>={lastdate}"
|
|
397
|
+
# hku_info(
|
|
398
|
+
# f"delete from hku_data.{index_type}_k where market='{market}' and date>={Datetime.from_timestamp_utc(lastdate*1000000)}")
|
|
399
|
+
connect.command(sql, settings={"mutations_sync": 0})
|
|
400
|
+
# hku_run_ignore_exception(connect.command, f"OPTIMIZE TABLE hku_data.{index_type}_k", settings={
|
|
401
|
+
# "mutations_sync": 0})
|
|
402
|
+
hku_info(f"清理 {market} {data_type} 线扩展数据完毕")
|
|
403
|
+
|
|
404
|
+
else:
|
|
405
|
+
index_list = ('min15', 'min30', 'min60', 'hour2')
|
|
406
|
+
lastdate = connect.command(
|
|
407
|
+
f"select toInt32(max(date)) from hku_data.min5_k where market='SH' and code='000001'")
|
|
408
|
+
lastdate = Datetime.from_timestamp_utc(lastdate*1000000).start_of_day()
|
|
409
|
+
last_timestamp = Datetime(lastdate).timestamp_utc()//1000000
|
|
410
|
+
for index_type in index_list:
|
|
411
|
+
sql = f"delete from hku_data.{index_type}_k where market='{market}' and date>={last_timestamp}"
|
|
412
|
+
# hku_info(f"delete from hku_data.{index_type}_k where market='{market}' and date>={lastdate}")
|
|
413
|
+
connect.command(sql, settings={"mutations_sync": 0})
|
|
414
|
+
# hku_run_ignore_exception(connect.command, f"OPTIMIZE TABLE hku_data.{index_type}_k", settings={
|
|
415
|
+
# "mutations_sync": 0})
|
|
416
|
+
hku_info(f"清理 {market} {data_type} 线扩展数据完毕")
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
@hku_catch(trace=True, re_raise=True)
|
|
420
|
+
def import_data(
|
|
421
|
+
connect,
|
|
422
|
+
market,
|
|
423
|
+
ktype,
|
|
424
|
+
quotations,
|
|
425
|
+
api,
|
|
426
|
+
dest_dir,
|
|
427
|
+
startDate=199012190000,
|
|
428
|
+
progress=ProgressBar,
|
|
429
|
+
):
|
|
430
|
+
"""导入通达信指定盘后数据路径中的K线数据。注:只导入基础信息数据库中存在的股票。
|
|
431
|
+
|
|
432
|
+
:param connect : sqlit3链接
|
|
433
|
+
:param market : 'SH' | 'SZ'
|
|
434
|
+
:param ktype : 'DAY' | '1MIN' | '5MIN'
|
|
435
|
+
:param quotations: 'stock' | 'fund' | 'bond'
|
|
436
|
+
:param src_dir : 盘后K线数据路径,如上证5分钟线:D:\\Tdx\\vipdoc\\sh\\fzline
|
|
437
|
+
:param dest_dir : HDF5数据文件所在目录
|
|
438
|
+
:param progress : 进度显示函数
|
|
439
|
+
:return: 导入记录数
|
|
440
|
+
"""
|
|
441
|
+
add_record_count = 0
|
|
442
|
+
market = market.upper()
|
|
443
|
+
|
|
444
|
+
stock_list = get_stock_list(connect, market, quotations)
|
|
445
|
+
|
|
446
|
+
if ktype in ("DAY", "5MIN"):
|
|
447
|
+
clear_extern_data(connect, market, ktype)
|
|
448
|
+
if ktype == "DAY":
|
|
449
|
+
index_list = ('week', 'month', 'quarter', 'halfyear', 'year')
|
|
450
|
+
update_data = {}
|
|
451
|
+
for index_type in index_list:
|
|
452
|
+
update_data[index_type] = []
|
|
453
|
+
elif ktype == "5MIN":
|
|
454
|
+
index_list = ('min15', 'min30', 'min60', 'hour2')
|
|
455
|
+
update_data = {}
|
|
456
|
+
for index_type in index_list:
|
|
457
|
+
update_data[index_type] = []
|
|
458
|
+
|
|
459
|
+
total = len(stock_list)
|
|
460
|
+
# market, code, valid, type
|
|
461
|
+
for i, stock in enumerate(stock_list):
|
|
462
|
+
if stock[2] == 0 or len(stock[1]) != 6:
|
|
463
|
+
if progress:
|
|
464
|
+
progress(i, total)
|
|
465
|
+
continue
|
|
466
|
+
|
|
467
|
+
this_count = import_one_stock_data(
|
|
468
|
+
connect, api, market, ktype, stock, startDate
|
|
469
|
+
)
|
|
470
|
+
add_record_count += this_count
|
|
471
|
+
if ktype in ("DAY", "5MIN"):
|
|
472
|
+
if ktype == "DAY":
|
|
473
|
+
index_data = update_extern_data(connect, market, stock[1], "DAY")
|
|
474
|
+
elif ktype == "5MIN":
|
|
475
|
+
index_data = update_extern_data(connect, market, stock[1], "5MIN")
|
|
476
|
+
for index_type in index_list:
|
|
477
|
+
update_data[index_type].extend(index_data[index_type])
|
|
478
|
+
if len(update_data[index_type]) > 200000:
|
|
479
|
+
index_table = get_table(connect, market, stock[1], index_type)
|
|
480
|
+
# hku_info(f"写入 {market} {index_table[0]} 扩展数据: {len(update_data[index_type])} ...")
|
|
481
|
+
ic = connect.create_insert_context(table=index_table[0],
|
|
482
|
+
data=update_data[index_type])
|
|
483
|
+
connect.insert(context=ic, settings={"prefer_warmed_unmerged_parts_seconds": 86400})
|
|
484
|
+
update_data[index_type].clear()
|
|
485
|
+
|
|
486
|
+
if progress:
|
|
487
|
+
progress(i, total)
|
|
488
|
+
|
|
489
|
+
if ktype in ("DAY", "5MIN"):
|
|
490
|
+
for index_type in index_list:
|
|
491
|
+
if len(update_data[index_type]) > 0:
|
|
492
|
+
index_table = get_table(connect, market, stock[1], index_type)
|
|
493
|
+
# hku_info(f"写入 {market} {index_table[0]} 扩展数据: {len(update_data[index_type])} ...")
|
|
494
|
+
ic = connect.create_insert_context(table=index_table[0],
|
|
495
|
+
data=update_data[index_type])
|
|
496
|
+
connect.insert(context=ic, settings={"prefer_warmed_unmerged_parts_seconds": 86400})
|
|
497
|
+
update_data[index_type].clear()
|
|
498
|
+
update_data.clear()
|
|
499
|
+
|
|
500
|
+
cur_year = Datetime.today().year
|
|
501
|
+
if ktype == "DAY":
|
|
502
|
+
update_stock_info(connect, market)
|
|
503
|
+
hku_info(f"更新 {market} 股票信息完毕")
|
|
504
|
+
connect.command(f"OPTIMIZE TABLE hku_data.year_k PARTITION '{market}' FINAL", settings={
|
|
505
|
+
"mutations_sync": 0})
|
|
506
|
+
connect.command(f"OPTIMIZE TABLE hku_data.halfyear_k PARTITION '{market}' FINAL", settings={
|
|
507
|
+
"mutations_sync": 0})
|
|
508
|
+
connect.command(f"OPTIMIZE TABLE hku_data.quarter_k PARTITION '{market}' FINAL", settings={
|
|
509
|
+
"mutations_sync": 0})
|
|
510
|
+
connect.command(f"OPTIMIZE TABLE hku_data.month_k PARTITION '{market}' FINAL", settings={
|
|
511
|
+
"mutations_sync": 0})
|
|
512
|
+
connect.command(f"OPTIMIZE TABLE hku_data.week_k PARTITION '{market}' FINAL", settings={
|
|
513
|
+
"mutations_sync": 0})
|
|
514
|
+
connect.command(f"OPTIMIZE TABLE hku_data.day_k PARTITION '{market}' FINAL", settings={
|
|
515
|
+
"mutations_sync": 0})
|
|
516
|
+
hku_info(f"优化 {market} 日线及扩展表数据完毕")
|
|
517
|
+
if ktype == "5MIN":
|
|
518
|
+
hku_run_ignore_exception(connect.command, f"OPTIMIZE TABLE hku_data.hour2_k PARTITION '{market}' FINAL", settings={
|
|
519
|
+
"mutations_sync": 0, "optimize_skip_merged_partitions": 1})
|
|
520
|
+
hku_run_ignore_exception(connect.command,
|
|
521
|
+
f"OPTIMIZE TABLE hku_data.min30_k PARTITION ('{market}', {cur_year - cur_year % 10}) FINAL", settings={"mutations_sync": 0, "optimize_skip_merged_partitions": 1})
|
|
522
|
+
hku_run_ignore_exception(connect.command,
|
|
523
|
+
f"OPTIMIZE TABLE hku_data.min15_k PARTITION ('{market}', {cur_year - cur_year % 10}) FINAL", settings={"mutations_sync": 0, "optimize_skip_merged_partitions": 1})
|
|
524
|
+
hku_run_ignore_exception(connect.command,
|
|
525
|
+
f"OPTIMIZE TABLE hku_data.min5_k PARTITION ('{market}', {cur_year - cur_year % 10}) FINAL", settings={"mutations_sync": 0, "optimize_skip_merged_partitions": 1})
|
|
526
|
+
hku_run_ignore_exception(connect.command, f"OPTIMIZE TABLE hku_data.min60_k PARTITION '{market}' FINAL", settings={
|
|
527
|
+
"mutations_sync": 0, "optimize_skip_merged_partitions": 1})
|
|
528
|
+
hku_info(f"优化 {market} 5分钟线及扩展表完毕")
|
|
529
|
+
if ktype == "1MIN":
|
|
530
|
+
hku_run_ignore_exception(connect.command,
|
|
531
|
+
f"OPTIMIZE TABLE hku_data.min_k PARTITION ('{market}', {cur_year - cur_year % 10}) FINAL", settings={"mutations_sync": 0, "optimize_skip_merged_partitions": 1})
|
|
532
|
+
hku_info(f"优化 {market} 1分钟线表数据完毕")
|
|
533
|
+
return add_record_count
|
|
534
|
+
|
|
535
|
+
|
|
536
|
+
@hku_catch(trace=True, re_raise=True)
|
|
537
|
+
def get_trans_table(connect, market, code):
|
|
538
|
+
return get_table(connect, market, code, 'transdata')
|
|
539
|
+
|
|
540
|
+
|
|
541
|
+
@hku_catch(ret=[], trace=True)
|
|
542
|
+
def import_on_stock_trans(connect, api, market, stock_record, max_days):
|
|
543
|
+
market = market.upper()
|
|
544
|
+
pytdx_market = to_pytdx_market(market)
|
|
545
|
+
|
|
546
|
+
market, code, valid, stktype = stock_record
|
|
547
|
+
hku_debug("{}{}".format(market, code))
|
|
548
|
+
table = get_trans_table(connect, market, code)
|
|
549
|
+
last_datetime = get_lastdatetime(connect, table)
|
|
550
|
+
|
|
551
|
+
today = datetime.date.today()
|
|
552
|
+
if last_datetime is not None:
|
|
553
|
+
# yyyymmddHHMMSS
|
|
554
|
+
last_y = last_datetime.year
|
|
555
|
+
last_m = last_datetime.month
|
|
556
|
+
last_d = last_datetime.day
|
|
557
|
+
last_date = datetime.date(last_y, last_m, last_d)
|
|
558
|
+
need_days = (today - last_date).days
|
|
559
|
+
else:
|
|
560
|
+
need_days = max_days
|
|
561
|
+
|
|
562
|
+
date_list = []
|
|
563
|
+
for i in range(need_days):
|
|
564
|
+
cur_date = today - datetime.timedelta(i)
|
|
565
|
+
if cur_date.weekday() not in (5, 6):
|
|
566
|
+
date_list.append(
|
|
567
|
+
cur_date.year * 10000 + cur_date.month * 100 + cur_date.day
|
|
568
|
+
)
|
|
569
|
+
date_list.reverse()
|
|
570
|
+
|
|
571
|
+
trans_buf = []
|
|
572
|
+
for cur_date in date_list:
|
|
573
|
+
buf = pytdx_get_day_trans(api, pytdx_market, code, cur_date)
|
|
574
|
+
if not buf:
|
|
575
|
+
continue
|
|
576
|
+
|
|
577
|
+
second = 2
|
|
578
|
+
pre_minute = 900
|
|
579
|
+
|
|
580
|
+
for record in buf:
|
|
581
|
+
try:
|
|
582
|
+
minute = int(record["time"][0:2]) * 100 + int(record["time"][3:])
|
|
583
|
+
if minute != pre_minute:
|
|
584
|
+
second = 0 if minute == 1500 else 2
|
|
585
|
+
pre_minute = minute
|
|
586
|
+
else:
|
|
587
|
+
second += 3
|
|
588
|
+
if second > 59:
|
|
589
|
+
continue
|
|
590
|
+
|
|
591
|
+
trans_buf.append(
|
|
592
|
+
(
|
|
593
|
+
market, code,
|
|
594
|
+
Datetime(cur_date * 1000000 + minute * 100 + second).timestamp_utc()//1000000,
|
|
595
|
+
record["price"],
|
|
596
|
+
record["vol"],
|
|
597
|
+
record["buyorsell"],
|
|
598
|
+
)
|
|
599
|
+
)
|
|
600
|
+
except Exception as e:
|
|
601
|
+
hku_error("Failed trans to record! {}", e)
|
|
602
|
+
|
|
603
|
+
return trans_buf
|
|
604
|
+
|
|
605
|
+
|
|
606
|
+
def import_trans(
|
|
607
|
+
connect, market, quotations, api, dest_dir, max_days=30, progress=ProgressBar
|
|
608
|
+
):
|
|
609
|
+
add_record_count = 0
|
|
610
|
+
market = market.upper()
|
|
611
|
+
|
|
612
|
+
stock_list = get_stock_list(connect, market, quotations)
|
|
613
|
+
total = len(stock_list)
|
|
614
|
+
a_stktype_list = get_a_stktype_list()
|
|
615
|
+
buf = []
|
|
616
|
+
for i, stock in enumerate(stock_list):
|
|
617
|
+
market, code, valid, stype = stock
|
|
618
|
+
if valid == 0 or len(code) != 6 or stype not in a_stktype_list:
|
|
619
|
+
if progress:
|
|
620
|
+
progress(i, total)
|
|
621
|
+
continue
|
|
622
|
+
|
|
623
|
+
data = import_on_stock_trans(connect, api, market, stock, max_days)
|
|
624
|
+
add_record_count += len(data)
|
|
625
|
+
if len(data) > 0:
|
|
626
|
+
buf.extend(data)
|
|
627
|
+
if len(buf) > 500000:
|
|
628
|
+
table = get_trans_table(connect, market, code)
|
|
629
|
+
ic = connect.create_insert_context(table=table[0], data=buf)
|
|
630
|
+
connect.insert(context=ic, settings={"prefer_warmed_unmerged_parts_seconds": 86400})
|
|
631
|
+
buf.clear()
|
|
632
|
+
# hku_info(f"写入 {market} {table[0]} 分笔数据: {len(buf)} ...")
|
|
633
|
+
|
|
634
|
+
if progress:
|
|
635
|
+
progress(i, total)
|
|
636
|
+
|
|
637
|
+
if len(buf) > 0:
|
|
638
|
+
ic = connect.create_insert_context(table='hku_data.transdata', data=buf)
|
|
639
|
+
connect.insert(context=ic, settings={"prefer_warmed_unmerged_parts_seconds": 86400})
|
|
640
|
+
buf.clear()
|
|
641
|
+
# hku_info(f"写入 {market} hku_data.transdata 分笔数据: {len(buf)} ...")
|
|
642
|
+
|
|
643
|
+
cur_year = Datetime.today().year
|
|
644
|
+
hku_run_ignore_exception(connect.command,
|
|
645
|
+
f"OPTIMIZE TABLE hku_data.transdata PARTITION ('{market}', {cur_year}) FINAL", settings={"mutations_sync": 0, "optimize_skip_merged_partitions": 1})
|
|
646
|
+
return add_record_count
|
|
647
|
+
|
|
648
|
+
|
|
649
|
+
def get_time_table(connect, market, code):
|
|
650
|
+
return get_table(connect, market, code, 'timeline')
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
@hku_catch(ret=[], trace=True)
|
|
654
|
+
def import_on_stock_time(connect, api, market, stock_record, max_days):
|
|
655
|
+
market = market.upper()
|
|
656
|
+
pytdx_market = to_pytdx_market(market)
|
|
657
|
+
|
|
658
|
+
market, code, valid, type = stock_record
|
|
659
|
+
hku_debug("{}{}".format(market, code))
|
|
660
|
+
table = get_time_table(connect, market, code)
|
|
661
|
+
last_datetime = get_lastdatetime(connect, table)
|
|
662
|
+
|
|
663
|
+
today = datetime.date.today()
|
|
664
|
+
if last_datetime is not None:
|
|
665
|
+
# yyyymmddHHMM
|
|
666
|
+
last_y = last_datetime.year
|
|
667
|
+
last_m = last_datetime.month
|
|
668
|
+
last_d = last_datetime.day
|
|
669
|
+
last_date = datetime.date(last_y, last_m, last_d)
|
|
670
|
+
need_days = (today - last_date).days
|
|
671
|
+
else:
|
|
672
|
+
need_days = max_days
|
|
673
|
+
|
|
674
|
+
date_list = []
|
|
675
|
+
for i in range(need_days):
|
|
676
|
+
cur_date = today - datetime.timedelta(i)
|
|
677
|
+
if cur_date.weekday() not in (5, 6):
|
|
678
|
+
date_list.append(cur_date.year * 10000 + cur_date.month * 100 + cur_date.day)
|
|
679
|
+
date_list.reverse()
|
|
680
|
+
|
|
681
|
+
ticks = 1000000
|
|
682
|
+
time_buf = []
|
|
683
|
+
for cur_date in date_list:
|
|
684
|
+
buf = api.get_history_minute_time_data(pytdx_market, code, cur_date)
|
|
685
|
+
if buf is None or len(buf) != 240:
|
|
686
|
+
# print(cur_date, "获取的分时线长度不为240!", stock_record[1], stock_record[2])
|
|
687
|
+
continue
|
|
688
|
+
this_date = cur_date * 10000
|
|
689
|
+
time = 930
|
|
690
|
+
for record in buf:
|
|
691
|
+
if time == 960:
|
|
692
|
+
time = 1000
|
|
693
|
+
elif time == 1060:
|
|
694
|
+
time = 1100
|
|
695
|
+
elif time == 1130:
|
|
696
|
+
time = 1300
|
|
697
|
+
elif time == 1360:
|
|
698
|
+
time = 1400
|
|
699
|
+
try:
|
|
700
|
+
time_buf.append((market, code, Datetime(this_date + time).timestamp_utc() //
|
|
701
|
+
ticks, record['price'], record['vol']))
|
|
702
|
+
time += 1
|
|
703
|
+
except Exception as e:
|
|
704
|
+
hku_error("Failed trans record {}! {}".format(record, e))
|
|
705
|
+
|
|
706
|
+
return time_buf
|
|
707
|
+
|
|
708
|
+
|
|
709
|
+
def import_time(connect, market, quotations, api, dest_dir, max_days=9000, progress=ProgressBar):
|
|
710
|
+
add_record_count = 0
|
|
711
|
+
market = market.upper()
|
|
712
|
+
|
|
713
|
+
# market, code, valid, type
|
|
714
|
+
stock_list = get_stock_list(connect, market, quotations)
|
|
715
|
+
total = len(stock_list)
|
|
716
|
+
buf = []
|
|
717
|
+
for i, stock in enumerate(stock_list):
|
|
718
|
+
market, code, valid, stype = stock
|
|
719
|
+
if valid == 0 or len(code) != 6:
|
|
720
|
+
if progress:
|
|
721
|
+
progress(i, total)
|
|
722
|
+
continue
|
|
723
|
+
|
|
724
|
+
data = import_on_stock_time(connect, api, market, stock, max_days)
|
|
725
|
+
add_record_count += len(data)
|
|
726
|
+
if len(data) > 0:
|
|
727
|
+
buf.extend(data)
|
|
728
|
+
if len(buf) > 500000:
|
|
729
|
+
table = get_time_table(connect, market, code)
|
|
730
|
+
ic = connect.create_insert_context(table=table[0], data=buf)
|
|
731
|
+
connect.insert(context=ic, settings={"prefer_warmed_unmerged_parts_seconds": 86400})
|
|
732
|
+
buf.clear()
|
|
733
|
+
# hku_info(f"写入 {market} {table[0]} 分时数据: {len(buf)} ...")
|
|
734
|
+
|
|
735
|
+
if progress:
|
|
736
|
+
progress(i, total)
|
|
737
|
+
|
|
738
|
+
if len(buf) > 0:
|
|
739
|
+
ic = connect.create_insert_context(table='hku_data.timeline', data=buf)
|
|
740
|
+
connect.insert(context=ic, settings={"prefer_warmed_unmerged_parts_seconds": 86400})
|
|
741
|
+
buf.clear()
|
|
742
|
+
# hku_info(f"写入 {market} hku_data.timeline 分时数据: {len(buf)} ...")
|
|
743
|
+
|
|
744
|
+
cur_year = Datetime.today().year
|
|
745
|
+
hku_run_ignore_exception(connect.command,
|
|
746
|
+
f"OPTIMIZE TABLE hku_data.timeline PARTITION ('{market}', {cur_year - cur_year % 10}) FINAL", settings={"mutations_sync": 0, "optimize_skip_merged_partitions": 1})
|
|
747
|
+
return add_record_count
|
|
748
|
+
|
|
749
|
+
|
|
750
|
+
if __name__ == "__main__":
|
|
751
|
+
import os
|
|
752
|
+
from configparser import ConfigParser
|
|
753
|
+
dev_config = ConfigParser()
|
|
754
|
+
dev_config.read(os.path.expanduser("~") + '/workspace/dev.ini')
|
|
755
|
+
db = 'clickhouse54-http'
|
|
756
|
+
user = dev_config.get(db, 'user')
|
|
757
|
+
password = dev_config.get(db, 'pwd')
|
|
758
|
+
host = dev_config.get(db, 'host')
|
|
759
|
+
port = dev_config.getint(db, 'port')
|
|
760
|
+
|
|
761
|
+
import clickhouse_connect
|
|
762
|
+
client = clickhouse_connect.get_client(
|
|
763
|
+
host=host, username=user, password=password)
|
|
764
|
+
|
|
765
|
+
tdx_server = "180.101.48.170"
|
|
766
|
+
tdx_port = 7709
|
|
767
|
+
quotations = ["stock", "fund"]
|
|
768
|
+
|
|
769
|
+
create_database(client)
|
|
770
|
+
|
|
771
|
+
from pytdx.hq import TdxHq_API, TDXParams
|
|
772
|
+
|
|
773
|
+
api = TdxHq_API()
|
|
774
|
+
api.connect(tdx_server, tdx_port)
|
|
775
|
+
|
|
776
|
+
import time
|
|
777
|
+
starttime = time.time()
|
|
778
|
+
|
|
779
|
+
add_count = 0
|
|
780
|
+
|
|
781
|
+
# print("导入股票代码表")
|
|
782
|
+
# # add_count = import_index_name(client)
|
|
783
|
+
# add_count = import_stock_name(client, api, 'SH', quotations)
|
|
784
|
+
# add_count += import_stock_name(client, api, 'SZ', quotations)
|
|
785
|
+
# add_count += import_stock_name(client, api, 'BJ', quotations)
|
|
786
|
+
# print("新增股票数:", add_count)
|
|
787
|
+
|
|
788
|
+
print("\n导入上证日线数据")
|
|
789
|
+
# add_count = import_data(client, "SH", "DAY", quotations, api, "", progress=ProgressBar)
|
|
790
|
+
print("\n导入数量:", add_count)
|
|
791
|
+
"""
|
|
792
|
+
print("\n导入深证日线数据")
|
|
793
|
+
add_count = import_data(connect, 'SZ', 'DAY', quotations, api, dest_dir, progress=ProgressBar)
|
|
794
|
+
print("\n导入数量:", add_count)
|
|
795
|
+
|
|
796
|
+
print("\n导入上证5分钟线数据")
|
|
797
|
+
add_count = import_data(connect, 'SH', '5MIN', quotations, api, dest_dir, progress=ProgressBar)
|
|
798
|
+
print("\n导入数量:", add_count)
|
|
799
|
+
|
|
800
|
+
print("\n导入深证5分钟线数据")
|
|
801
|
+
add_count = import_data(connect, 'SZ', '5MIN', quotations, api, dest_dir, progress=ProgressBar)
|
|
802
|
+
print("\n导入数量:", add_count)
|
|
803
|
+
|
|
804
|
+
print("\n导入上证分钟线数据")
|
|
805
|
+
add_count = import_data(connect, 'SH', '1MIN', quotations, api, dest_dir, progress=ProgressBar)
|
|
806
|
+
print("\n导入数量:", add_count)
|
|
807
|
+
|
|
808
|
+
print("\n导入深证分钟线数据")
|
|
809
|
+
add_count = import_data(connect, 'SZ', '1MIN', quotations, api, dest_dir, progress=ProgressBar)
|
|
810
|
+
print("\n导入数量:", add_count)
|
|
811
|
+
|
|
812
|
+
print("\n导入权息数据")
|
|
813
|
+
print("正在下载权息数据...")
|
|
814
|
+
import urllib.request
|
|
815
|
+
net_file = urllib.request.urlopen('http://www.qianlong.com.cn/download/history/weight.rar', timeout=60)
|
|
816
|
+
dest_filename = dest_dir + '/weight.rar'
|
|
817
|
+
with open(dest_filename, 'wb') as file:
|
|
818
|
+
file.write(net_file.read())
|
|
819
|
+
|
|
820
|
+
print("下载完成,正在解压...")
|
|
821
|
+
os.system('unrar x -o+ -inul {} {}'.format(dest_filename, dest_dir))
|
|
822
|
+
|
|
823
|
+
print("解压完成,正在导入...")
|
|
824
|
+
add_count = qianlong_import_weight(connect, dest_dir + '/weight', 'SH')
|
|
825
|
+
add_count += qianlong_import_weight(connect, dest_dir + '/weight', 'SZ')
|
|
826
|
+
print("导入数量:", add_count)
|
|
827
|
+
"""
|
|
828
|
+
|
|
829
|
+
# for i in range(10):
|
|
830
|
+
# x = api.get_history_transaction_data(TDXParams.MARKET_SZ, '000001', (9-i)*2000, 2000, 20181112)
|
|
831
|
+
# x = api.get_transaction_data(TDXParams.MARKET_SZ, '000001', (9-i)*800, 800)
|
|
832
|
+
# if x is not None and len(x) > 0:
|
|
833
|
+
# print(i, len(x), x[0], x[-1])
|
|
834
|
+
|
|
835
|
+
api.disconnect()
|
|
836
|
+
client.close()
|
|
837
|
+
|
|
838
|
+
endtime = time.time()
|
|
839
|
+
print("\nTotal time:")
|
|
840
|
+
print("%.2fs" % (endtime - starttime))
|
|
841
|
+
print("%.2fm" % ((endtime - starttime) / 60))
|